code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def rule_expand(component, text): '''expand one rule component''' global rline_mpstate if component[0] == '<' and component[-1] == '>': return component[1:-1].split('|') if component in rline_mpstate.completion_functions: return rline_mpstate.completion_functions[component](text) return [component]
def function[rule_expand, parameter[component, text]]: constant[expand one rule component] <ast.Global object at 0x7da18ede4910> if <ast.BoolOp object at 0x7da18ede7400> begin[:] return[call[call[name[component]][<ast.Slice object at 0x7da18ede7eb0>].split, parameter[constant[|]]]] if compare[name[component] in name[rline_mpstate].completion_functions] begin[:] return[call[call[name[rline_mpstate].completion_functions][name[component]], parameter[name[text]]]] return[list[[<ast.Name object at 0x7da18ede4310>]]]
keyword[def] identifier[rule_expand] ( identifier[component] , identifier[text] ): literal[string] keyword[global] identifier[rline_mpstate] keyword[if] identifier[component] [ literal[int] ]== literal[string] keyword[and] identifier[component] [- literal[int] ]== literal[string] : keyword[return] identifier[component] [ literal[int] :- literal[int] ]. identifier[split] ( literal[string] ) keyword[if] identifier[component] keyword[in] identifier[rline_mpstate] . identifier[completion_functions] : keyword[return] identifier[rline_mpstate] . identifier[completion_functions] [ identifier[component] ]( identifier[text] ) keyword[return] [ identifier[component] ]
def rule_expand(component, text): """expand one rule component""" global rline_mpstate if component[0] == '<' and component[-1] == '>': return component[1:-1].split('|') # depends on [control=['if'], data=[]] if component in rline_mpstate.completion_functions: return rline_mpstate.completion_functions[component](text) # depends on [control=['if'], data=['component']] return [component]
def connect_s3(aws_access_key_id=None, aws_secret_access_key=None, **kwargs): """ :type aws_access_key_id: string :param aws_access_key_id: Your AWS Access Key ID :type aws_secret_access_key: string :param aws_secret_access_key: Your AWS Secret Access Key :rtype: :class:`boto.s3.connection.S3Connection` :return: A connection to Amazon's S3 """ from botornado.s3.connection import AsyncS3Connection return AsyncS3Connection(aws_access_key_id, aws_secret_access_key, **kwargs)
def function[connect_s3, parameter[aws_access_key_id, aws_secret_access_key]]: constant[ :type aws_access_key_id: string :param aws_access_key_id: Your AWS Access Key ID :type aws_secret_access_key: string :param aws_secret_access_key: Your AWS Secret Access Key :rtype: :class:`boto.s3.connection.S3Connection` :return: A connection to Amazon's S3 ] from relative_module[botornado.s3.connection] import module[AsyncS3Connection] return[call[name[AsyncS3Connection], parameter[name[aws_access_key_id], name[aws_secret_access_key]]]]
keyword[def] identifier[connect_s3] ( identifier[aws_access_key_id] = keyword[None] , identifier[aws_secret_access_key] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[from] identifier[botornado] . identifier[s3] . identifier[connection] keyword[import] identifier[AsyncS3Connection] keyword[return] identifier[AsyncS3Connection] ( identifier[aws_access_key_id] , identifier[aws_secret_access_key] ,** identifier[kwargs] )
def connect_s3(aws_access_key_id=None, aws_secret_access_key=None, **kwargs): """ :type aws_access_key_id: string :param aws_access_key_id: Your AWS Access Key ID :type aws_secret_access_key: string :param aws_secret_access_key: Your AWS Secret Access Key :rtype: :class:`boto.s3.connection.S3Connection` :return: A connection to Amazon's S3 """ from botornado.s3.connection import AsyncS3Connection return AsyncS3Connection(aws_access_key_id, aws_secret_access_key, **kwargs)
def get_bgp_neighbors(self): """BGP neighbor information. Currently no VRF support. Supports both IPv4 and IPv6. """ supported_afi = ['ipv4', 'ipv6'] bgp_neighbor_data = dict() bgp_neighbor_data['global'] = {} # get summary output from device cmd_bgp_all_sum = 'show bgp all summary' summary_output = self._send_command(cmd_bgp_all_sum).strip() # get neighbor output from device neighbor_output = '' for afi in supported_afi: cmd_bgp_neighbor = 'show bgp %s unicast neighbors' % afi neighbor_output += self._send_command(cmd_bgp_neighbor).strip() # trailing newline required for parsing neighbor_output += "\n" # Regular expressions used for parsing BGP summary parse_summary = { 'patterns': [ # For address family: IPv4 Unicast {'regexp': re.compile(r'^For address family: (?P<afi>\S+) '), 'record': False}, # Capture router_id and local_as values, e.g.: # BGP router identifier 10.0.1.1, local AS number 65000 {'regexp': re.compile(r'^.* router identifier (?P<router_id>{}), ' r'local AS number (?P<local_as>{})'.format( IPV4_ADDR_REGEX, ASN_REGEX )), 'record': False}, # Match neighbor summary row, capturing useful details and # discarding the 5 columns that we don't care about, e.g.: # Neighbor V AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down State/PfxRcd # 10.0.0.2 4 65000 1336020 64337701 1011343614 0 0 8w0d 3143 {'regexp': re.compile(r'^\*?(?P<remote_addr>({})|({}))' r'\s+\d+\s+(?P<remote_as>{})(\s+\S+){{5}}\s+' r'(?P<uptime>(never)|\d+\S+)' r'\s+(?P<accepted_prefixes>\d+)'.format( IPV4_ADDR_REGEX, IPV6_ADDR_REGEX, ASN_REGEX )), 'record': True}, # Same as above, but for peer that are not Established, e.g.: # Neighbor V AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down State/PfxRcd # 192.168.0.2 4 65002 0 0 1 0 0 never Active {'regexp': re.compile(r'^\*?(?P<remote_addr>({})|({}))' r'\s+\d+\s+(?P<remote_as>{})(\s+\S+){{5}}\s+' r'(?P<uptime>(never)|\d+\S+)\s+(?P<state>\D.*)'.format( IPV4_ADDR_REGEX, IPV6_ADDR_REGEX, ASN_REGEX )), 'record': True}, # ipv6 peers often break accross rows because of the longer peer address, # match as above, but in separate expressions, e.g.: # Neighbor V AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down State/PfxRcd # 2001:DB8::4 # 4 65004 9900690 612449 155362939 0 0 26w6d 36391 {'regexp': re.compile(r'^\*?(?P<remote_addr>({})|({}))'.format( IPV4_ADDR_REGEX, IPV6_ADDR_REGEX )), 'record': False}, {'regexp': re.compile(r'^\s+\d+\s+(?P<remote_as>{})(\s+\S+){{5}}\s+' r'(?P<uptime>(never)|\d+\S+)' r'\s+(?P<accepted_prefixes>\d+)'.format( ASN_REGEX )), 'record': True}, # Same as above, but for peers that are not Established, e.g.: # Neighbor V AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down State/PfxRcd # 2001:DB8::3 # 4 65003 0 0 1 0 0 never Idle (Admin) {'regexp': re.compile(r'^\s+\d+\s+(?P<remote_as>{})(\s+\S+){{5}}\s+' r'(?P<uptime>(never)|\d+\S+)\s+(?P<state>\D.*)'.format( ASN_REGEX )), 'record': True} ], 'no_fill_fields': ['accepted_prefixes', 'state', 'uptime', 'remote_as', 'remote_addr'] } parse_neighbors = { 'patterns': [ # Capture BGP neighbor is 10.0.0.2, remote AS 65000, internal link {'regexp': re.compile(r'^BGP neighbor is (?P<remote_addr>({})|({})),' r'\s+remote AS (?P<remote_as>{}).*'.format( IPV4_ADDR_REGEX, IPV6_ADDR_REGEX, ASN_REGEX )), 'record': False}, # Capture description {'regexp': re.compile(r'^\s+Description: (?P<description>.+)'), 'record': False}, # Capture remote_id, e.g.: # BGP version 4, remote router ID 10.0.1.2 {'regexp': re.compile(r'^\s+BGP version \d+, remote router ID ' r'(?P<remote_id>{})'.format(IPV4_ADDR_REGEX)), 'record': False}, # Capture AFI and SAFI names, e.g.: # For address family: IPv4 Unicast {'regexp': re.compile(r'^\s+For address family: (?P<afi>\S+) '), 'record': False}, # Capture current sent and accepted prefixes, e.g.: # Prefixes Current: 637213 3142 (Consumes 377040 bytes) {'regexp': re.compile(r'^\s+Prefixes Current:\s+(?P<sent_prefixes>\d+)\s+' r'(?P<accepted_prefixes>\d+).*'), 'record': False}, # Capture received_prefixes if soft-reconfig is enabled for the peer {'regexp': re.compile(r'^\s+Saved (soft-reconfig):.+(?P<received_prefixes>\d+).*'), 'record': True}, # Otherwise, use the following as an end of row marker {'regexp': re.compile(r'^\s+Local Policy Denied Prefixes:.+'), 'record': True} ], # fields that should not be "filled down" across table rows 'no_fill_fields': ['received_prefixes', 'accepted_prefixes', 'sent_prefixes'] } # Parse outputs into a list of dicts summary_data = [] summary_data_entry = {} for line in summary_output.splitlines(): # check for matches against each pattern for item in parse_summary['patterns']: match = item['regexp'].match(line) if match: # a match was found, so update the temp entry with the match's groupdict summary_data_entry.update(match.groupdict()) if item['record']: # Record indicates the last piece of data has been obtained; move # on to next entry summary_data.append(copy.deepcopy(summary_data_entry)) # remove keys that are listed in no_fill_fields before the next pass for field in parse_summary['no_fill_fields']: try: del summary_data_entry[field] except KeyError: pass break neighbor_data = [] neighbor_data_entry = {} for line in neighbor_output.splitlines(): # check for matches against each pattern for item in parse_neighbors['patterns']: match = item['regexp'].match(line) if match: # a match was found, so update the temp entry with the match's groupdict neighbor_data_entry.update(match.groupdict()) if item['record']: # Record indicates the last piece of data has been obtained; move # on to next entry neighbor_data.append(copy.deepcopy(neighbor_data_entry)) # remove keys that are listed in no_fill_fields before the next pass for field in parse_neighbors['no_fill_fields']: try: del neighbor_data_entry[field] except KeyError: pass break router_id = None for entry in summary_data: if not router_id: router_id = entry['router_id'] elif entry['router_id'] != router_id: raise ValueError # check the router_id looks like an ipv4 address router_id = napalm_base.helpers.ip(router_id, version=4) # add parsed data to output dict bgp_neighbor_data['global']['router_id'] = router_id bgp_neighbor_data['global']['peers'] = {} for entry in summary_data: remote_addr = napalm_base.helpers.ip(entry['remote_addr']) afi = entry['afi'].lower() # check that we're looking at a supported afi if afi not in supported_afi: continue # get neighbor_entry out of neighbor data neighbor_entry = None for neighbor in neighbor_data: if (neighbor['afi'].lower() == afi and napalm_base.helpers.ip(neighbor['remote_addr']) == remote_addr): neighbor_entry = neighbor break if not isinstance(neighbor_entry, dict): raise ValueError(msg="Couldn't find neighbor data for %s in afi %s" % (remote_addr, afi)) # check for admin down state try: if "(Admin)" in entry['state']: is_enabled = False else: is_enabled = True except KeyError: is_enabled = True # parse uptime value uptime = self.bgp_time_conversion(entry['uptime']) # Uptime should be -1 if BGP session not up is_up = True if uptime >= 0 else False # check whether session is up for address family and get prefix count try: accepted_prefixes = int(entry['accepted_prefixes']) except (ValueError, KeyError): accepted_prefixes = -1 # Only parse neighbor detailed data if BGP session is-up if is_up: try: # overide accepted_prefixes with neighbor data if possible (since that's newer) accepted_prefixes = int(neighbor_entry['accepted_prefixes']) except (ValueError, KeyError): pass # try to get received prefix count, otherwise set to accepted_prefixes received_prefixes = neighbor_entry.get('received_prefixes', accepted_prefixes) # try to get sent prefix count and convert to int, otherwise set to -1 sent_prefixes = int(neighbor_entry.get('sent_prefixes', -1)) else: received_prefixes = -1 sent_prefixes = -1 # get description try: description = py23_compat.text_type(neighbor_entry['description']) except KeyError: description = '' # check the remote router_id looks like an ipv4 address remote_id = napalm_base.helpers.ip(neighbor_entry['remote_id'], version=4) if remote_addr not in bgp_neighbor_data['global']['peers']: bgp_neighbor_data['global']['peers'][remote_addr] = { 'local_as': napalm_base.helpers.as_number(entry['local_as']), 'remote_as': napalm_base.helpers.as_number(entry['remote_as']), 'remote_id': remote_id, 'is_up': is_up, 'is_enabled': is_enabled, 'description': description, 'uptime': uptime, 'address_family': { afi: { 'received_prefixes': received_prefixes, 'accepted_prefixes': accepted_prefixes, 'sent_prefixes': sent_prefixes } } } else: # found previous data for matching remote_addr, but for different afi existing = bgp_neighbor_data['global']['peers'][remote_addr] assert afi not in existing['address_family'] # compare with existing values and croak if they don't match assert existing['local_as'] == napalm_base.helpers.as_number(entry['local_as']) assert existing['remote_as'] == napalm_base.helpers.as_number(entry['remote_as']) assert existing['remote_id'] == remote_id assert existing['is_enabled'] == is_enabled assert existing['description'] == description # merge other values in a sane manner existing['is_up'] = existing['is_up'] or is_up existing['uptime'] = max(existing['uptime'], uptime) existing['address_family'][afi] = { 'received_prefixes': received_prefixes, 'accepted_prefixes': accepted_prefixes, 'sent_prefixes': sent_prefixes } return bgp_neighbor_data
def function[get_bgp_neighbors, parameter[self]]: constant[BGP neighbor information. Currently no VRF support. Supports both IPv4 and IPv6. ] variable[supported_afi] assign[=] list[[<ast.Constant object at 0x7da18dc99660>, <ast.Constant object at 0x7da18dc9a230>]] variable[bgp_neighbor_data] assign[=] call[name[dict], parameter[]] call[name[bgp_neighbor_data]][constant[global]] assign[=] dictionary[[], []] variable[cmd_bgp_all_sum] assign[=] constant[show bgp all summary] variable[summary_output] assign[=] call[call[name[self]._send_command, parameter[name[cmd_bgp_all_sum]]].strip, parameter[]] variable[neighbor_output] assign[=] constant[] for taget[name[afi]] in starred[name[supported_afi]] begin[:] variable[cmd_bgp_neighbor] assign[=] binary_operation[constant[show bgp %s unicast neighbors] <ast.Mod object at 0x7da2590d6920> name[afi]] <ast.AugAssign object at 0x7da2041da500> <ast.AugAssign object at 0x7da2041d8d60> variable[parse_summary] assign[=] dictionary[[<ast.Constant object at 0x7da2041db100>, <ast.Constant object at 0x7da2041d8580>], [<ast.List object at 0x7da2041d8a60>, <ast.List object at 0x7da1b10d6380>]] variable[parse_neighbors] assign[=] dictionary[[<ast.Constant object at 0x7da1b10d48b0>, <ast.Constant object at 0x7da1b10d6b60>], [<ast.List object at 0x7da1b10d7340>, <ast.List object at 0x7da18dc9afe0>]] variable[summary_data] assign[=] list[[]] variable[summary_data_entry] assign[=] dictionary[[], []] for taget[name[line]] in starred[call[name[summary_output].splitlines, parameter[]]] begin[:] for taget[name[item]] in starred[call[name[parse_summary]][constant[patterns]]] begin[:] variable[match] assign[=] call[call[name[item]][constant[regexp]].match, parameter[name[line]]] if name[match] begin[:] call[name[summary_data_entry].update, parameter[call[name[match].groupdict, parameter[]]]] if call[name[item]][constant[record]] begin[:] call[name[summary_data].append, parameter[call[name[copy].deepcopy, parameter[name[summary_data_entry]]]]] for taget[name[field]] in starred[call[name[parse_summary]][constant[no_fill_fields]]] begin[:] <ast.Try object at 0x7da20c76f3d0> break variable[neighbor_data] assign[=] list[[]] variable[neighbor_data_entry] assign[=] dictionary[[], []] for taget[name[line]] in starred[call[name[neighbor_output].splitlines, parameter[]]] begin[:] for taget[name[item]] in starred[call[name[parse_neighbors]][constant[patterns]]] begin[:] variable[match] assign[=] call[call[name[item]][constant[regexp]].match, parameter[name[line]]] if name[match] begin[:] call[name[neighbor_data_entry].update, parameter[call[name[match].groupdict, parameter[]]]] if call[name[item]][constant[record]] begin[:] call[name[neighbor_data].append, parameter[call[name[copy].deepcopy, parameter[name[neighbor_data_entry]]]]] for taget[name[field]] in starred[call[name[parse_neighbors]][constant[no_fill_fields]]] begin[:] <ast.Try object at 0x7da1b26acb80> break variable[router_id] assign[=] constant[None] for taget[name[entry]] in starred[name[summary_data]] begin[:] if <ast.UnaryOp object at 0x7da1b26ac5e0> begin[:] variable[router_id] assign[=] call[name[entry]][constant[router_id]] variable[router_id] assign[=] call[name[napalm_base].helpers.ip, parameter[name[router_id]]] call[call[name[bgp_neighbor_data]][constant[global]]][constant[router_id]] assign[=] name[router_id] call[call[name[bgp_neighbor_data]][constant[global]]][constant[peers]] assign[=] dictionary[[], []] for taget[name[entry]] in starred[name[summary_data]] begin[:] variable[remote_addr] assign[=] call[name[napalm_base].helpers.ip, parameter[call[name[entry]][constant[remote_addr]]]] variable[afi] assign[=] call[call[name[entry]][constant[afi]].lower, parameter[]] if compare[name[afi] <ast.NotIn object at 0x7da2590d7190> name[supported_afi]] begin[:] continue variable[neighbor_entry] assign[=] constant[None] for taget[name[neighbor]] in starred[name[neighbor_data]] begin[:] if <ast.BoolOp object at 0x7da1b26af2b0> begin[:] variable[neighbor_entry] assign[=] name[neighbor] break if <ast.UnaryOp object at 0x7da1b26ad3f0> begin[:] <ast.Raise object at 0x7da1b26ae4a0> <ast.Try object at 0x7da1b26af640> variable[uptime] assign[=] call[name[self].bgp_time_conversion, parameter[call[name[entry]][constant[uptime]]]] variable[is_up] assign[=] <ast.IfExp object at 0x7da1b26ad8a0> <ast.Try object at 0x7da1b26aefb0> if name[is_up] begin[:] <ast.Try object at 0x7da1b26ae830> variable[received_prefixes] assign[=] call[name[neighbor_entry].get, parameter[constant[received_prefixes], name[accepted_prefixes]]] variable[sent_prefixes] assign[=] call[name[int], parameter[call[name[neighbor_entry].get, parameter[constant[sent_prefixes], <ast.UnaryOp object at 0x7da1b26ad060>]]]] <ast.Try object at 0x7da1b26ae080> variable[remote_id] assign[=] call[name[napalm_base].helpers.ip, parameter[call[name[neighbor_entry]][constant[remote_id]]]] if compare[name[remote_addr] <ast.NotIn object at 0x7da2590d7190> call[call[name[bgp_neighbor_data]][constant[global]]][constant[peers]]] begin[:] call[call[call[name[bgp_neighbor_data]][constant[global]]][constant[peers]]][name[remote_addr]] assign[=] dictionary[[<ast.Constant object at 0x7da207f03910>, <ast.Constant object at 0x7da207f00c40>, <ast.Constant object at 0x7da207f00a90>, <ast.Constant object at 0x7da207f014e0>, <ast.Constant object at 0x7da207f00190>, <ast.Constant object at 0x7da207f01f60>, <ast.Constant object at 0x7da207f02f20>, <ast.Constant object at 0x7da207f03160>], [<ast.Call object at 0x7da207f03430>, <ast.Call object at 0x7da207f03a30>, <ast.Name object at 0x7da207f017e0>, <ast.Name object at 0x7da207f02a40>, <ast.Name object at 0x7da207f027a0>, <ast.Name object at 0x7da207f03fd0>, <ast.Name object at 0x7da207f01c30>, <ast.Dict object at 0x7da207f01b70>]] return[name[bgp_neighbor_data]]
keyword[def] identifier[get_bgp_neighbors] ( identifier[self] ): literal[string] identifier[supported_afi] =[ literal[string] , literal[string] ] identifier[bgp_neighbor_data] = identifier[dict] () identifier[bgp_neighbor_data] [ literal[string] ]={} identifier[cmd_bgp_all_sum] = literal[string] identifier[summary_output] = identifier[self] . identifier[_send_command] ( identifier[cmd_bgp_all_sum] ). identifier[strip] () identifier[neighbor_output] = literal[string] keyword[for] identifier[afi] keyword[in] identifier[supported_afi] : identifier[cmd_bgp_neighbor] = literal[string] % identifier[afi] identifier[neighbor_output] += identifier[self] . identifier[_send_command] ( identifier[cmd_bgp_neighbor] ). identifier[strip] () identifier[neighbor_output] += literal[string] identifier[parse_summary] ={ literal[string] :[ { literal[string] : identifier[re] . identifier[compile] ( literal[string] ), literal[string] : keyword[False] }, { literal[string] : identifier[re] . identifier[compile] ( literal[string] literal[string] . identifier[format] ( identifier[IPV4_ADDR_REGEX] , identifier[ASN_REGEX] )), literal[string] : keyword[False] }, { literal[string] : identifier[re] . identifier[compile] ( literal[string] literal[string] literal[string] literal[string] . identifier[format] ( identifier[IPV4_ADDR_REGEX] , identifier[IPV6_ADDR_REGEX] , identifier[ASN_REGEX] )), literal[string] : keyword[True] }, { literal[string] : identifier[re] . identifier[compile] ( literal[string] literal[string] literal[string] . identifier[format] ( identifier[IPV4_ADDR_REGEX] , identifier[IPV6_ADDR_REGEX] , identifier[ASN_REGEX] )), literal[string] : keyword[True] }, { literal[string] : identifier[re] . identifier[compile] ( literal[string] . identifier[format] ( identifier[IPV4_ADDR_REGEX] , identifier[IPV6_ADDR_REGEX] )), literal[string] : keyword[False] }, { literal[string] : identifier[re] . identifier[compile] ( literal[string] literal[string] literal[string] . identifier[format] ( identifier[ASN_REGEX] )), literal[string] : keyword[True] }, { literal[string] : identifier[re] . identifier[compile] ( literal[string] literal[string] . identifier[format] ( identifier[ASN_REGEX] )), literal[string] : keyword[True] } ], literal[string] :[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] } identifier[parse_neighbors] ={ literal[string] :[ { literal[string] : identifier[re] . identifier[compile] ( literal[string] literal[string] . identifier[format] ( identifier[IPV4_ADDR_REGEX] , identifier[IPV6_ADDR_REGEX] , identifier[ASN_REGEX] )), literal[string] : keyword[False] }, { literal[string] : identifier[re] . identifier[compile] ( literal[string] ), literal[string] : keyword[False] }, { literal[string] : identifier[re] . identifier[compile] ( literal[string] literal[string] . identifier[format] ( identifier[IPV4_ADDR_REGEX] )), literal[string] : keyword[False] }, { literal[string] : identifier[re] . identifier[compile] ( literal[string] ), literal[string] : keyword[False] }, { literal[string] : identifier[re] . identifier[compile] ( literal[string] literal[string] ), literal[string] : keyword[False] }, { literal[string] : identifier[re] . identifier[compile] ( literal[string] ), literal[string] : keyword[True] }, { literal[string] : identifier[re] . identifier[compile] ( literal[string] ), literal[string] : keyword[True] } ], literal[string] :[ literal[string] , literal[string] , literal[string] ] } identifier[summary_data] =[] identifier[summary_data_entry] ={} keyword[for] identifier[line] keyword[in] identifier[summary_output] . identifier[splitlines] (): keyword[for] identifier[item] keyword[in] identifier[parse_summary] [ literal[string] ]: identifier[match] = identifier[item] [ literal[string] ]. identifier[match] ( identifier[line] ) keyword[if] identifier[match] : identifier[summary_data_entry] . identifier[update] ( identifier[match] . identifier[groupdict] ()) keyword[if] identifier[item] [ literal[string] ]: identifier[summary_data] . identifier[append] ( identifier[copy] . identifier[deepcopy] ( identifier[summary_data_entry] )) keyword[for] identifier[field] keyword[in] identifier[parse_summary] [ literal[string] ]: keyword[try] : keyword[del] identifier[summary_data_entry] [ identifier[field] ] keyword[except] identifier[KeyError] : keyword[pass] keyword[break] identifier[neighbor_data] =[] identifier[neighbor_data_entry] ={} keyword[for] identifier[line] keyword[in] identifier[neighbor_output] . identifier[splitlines] (): keyword[for] identifier[item] keyword[in] identifier[parse_neighbors] [ literal[string] ]: identifier[match] = identifier[item] [ literal[string] ]. identifier[match] ( identifier[line] ) keyword[if] identifier[match] : identifier[neighbor_data_entry] . identifier[update] ( identifier[match] . identifier[groupdict] ()) keyword[if] identifier[item] [ literal[string] ]: identifier[neighbor_data] . identifier[append] ( identifier[copy] . identifier[deepcopy] ( identifier[neighbor_data_entry] )) keyword[for] identifier[field] keyword[in] identifier[parse_neighbors] [ literal[string] ]: keyword[try] : keyword[del] identifier[neighbor_data_entry] [ identifier[field] ] keyword[except] identifier[KeyError] : keyword[pass] keyword[break] identifier[router_id] = keyword[None] keyword[for] identifier[entry] keyword[in] identifier[summary_data] : keyword[if] keyword[not] identifier[router_id] : identifier[router_id] = identifier[entry] [ literal[string] ] keyword[elif] identifier[entry] [ literal[string] ]!= identifier[router_id] : keyword[raise] identifier[ValueError] identifier[router_id] = identifier[napalm_base] . identifier[helpers] . identifier[ip] ( identifier[router_id] , identifier[version] = literal[int] ) identifier[bgp_neighbor_data] [ literal[string] ][ literal[string] ]= identifier[router_id] identifier[bgp_neighbor_data] [ literal[string] ][ literal[string] ]={} keyword[for] identifier[entry] keyword[in] identifier[summary_data] : identifier[remote_addr] = identifier[napalm_base] . identifier[helpers] . identifier[ip] ( identifier[entry] [ literal[string] ]) identifier[afi] = identifier[entry] [ literal[string] ]. identifier[lower] () keyword[if] identifier[afi] keyword[not] keyword[in] identifier[supported_afi] : keyword[continue] identifier[neighbor_entry] = keyword[None] keyword[for] identifier[neighbor] keyword[in] identifier[neighbor_data] : keyword[if] ( identifier[neighbor] [ literal[string] ]. identifier[lower] ()== identifier[afi] keyword[and] identifier[napalm_base] . identifier[helpers] . identifier[ip] ( identifier[neighbor] [ literal[string] ])== identifier[remote_addr] ): identifier[neighbor_entry] = identifier[neighbor] keyword[break] keyword[if] keyword[not] identifier[isinstance] ( identifier[neighbor_entry] , identifier[dict] ): keyword[raise] identifier[ValueError] ( identifier[msg] = literal[string] % ( identifier[remote_addr] , identifier[afi] )) keyword[try] : keyword[if] literal[string] keyword[in] identifier[entry] [ literal[string] ]: identifier[is_enabled] = keyword[False] keyword[else] : identifier[is_enabled] = keyword[True] keyword[except] identifier[KeyError] : identifier[is_enabled] = keyword[True] identifier[uptime] = identifier[self] . identifier[bgp_time_conversion] ( identifier[entry] [ literal[string] ]) identifier[is_up] = keyword[True] keyword[if] identifier[uptime] >= literal[int] keyword[else] keyword[False] keyword[try] : identifier[accepted_prefixes] = identifier[int] ( identifier[entry] [ literal[string] ]) keyword[except] ( identifier[ValueError] , identifier[KeyError] ): identifier[accepted_prefixes] =- literal[int] keyword[if] identifier[is_up] : keyword[try] : identifier[accepted_prefixes] = identifier[int] ( identifier[neighbor_entry] [ literal[string] ]) keyword[except] ( identifier[ValueError] , identifier[KeyError] ): keyword[pass] identifier[received_prefixes] = identifier[neighbor_entry] . identifier[get] ( literal[string] , identifier[accepted_prefixes] ) identifier[sent_prefixes] = identifier[int] ( identifier[neighbor_entry] . identifier[get] ( literal[string] ,- literal[int] )) keyword[else] : identifier[received_prefixes] =- literal[int] identifier[sent_prefixes] =- literal[int] keyword[try] : identifier[description] = identifier[py23_compat] . identifier[text_type] ( identifier[neighbor_entry] [ literal[string] ]) keyword[except] identifier[KeyError] : identifier[description] = literal[string] identifier[remote_id] = identifier[napalm_base] . identifier[helpers] . identifier[ip] ( identifier[neighbor_entry] [ literal[string] ], identifier[version] = literal[int] ) keyword[if] identifier[remote_addr] keyword[not] keyword[in] identifier[bgp_neighbor_data] [ literal[string] ][ literal[string] ]: identifier[bgp_neighbor_data] [ literal[string] ][ literal[string] ][ identifier[remote_addr] ]={ literal[string] : identifier[napalm_base] . identifier[helpers] . identifier[as_number] ( identifier[entry] [ literal[string] ]), literal[string] : identifier[napalm_base] . identifier[helpers] . identifier[as_number] ( identifier[entry] [ literal[string] ]), literal[string] : identifier[remote_id] , literal[string] : identifier[is_up] , literal[string] : identifier[is_enabled] , literal[string] : identifier[description] , literal[string] : identifier[uptime] , literal[string] :{ identifier[afi] :{ literal[string] : identifier[received_prefixes] , literal[string] : identifier[accepted_prefixes] , literal[string] : identifier[sent_prefixes] } } } keyword[else] : identifier[existing] = identifier[bgp_neighbor_data] [ literal[string] ][ literal[string] ][ identifier[remote_addr] ] keyword[assert] identifier[afi] keyword[not] keyword[in] identifier[existing] [ literal[string] ] keyword[assert] identifier[existing] [ literal[string] ]== identifier[napalm_base] . identifier[helpers] . identifier[as_number] ( identifier[entry] [ literal[string] ]) keyword[assert] identifier[existing] [ literal[string] ]== identifier[napalm_base] . identifier[helpers] . identifier[as_number] ( identifier[entry] [ literal[string] ]) keyword[assert] identifier[existing] [ literal[string] ]== identifier[remote_id] keyword[assert] identifier[existing] [ literal[string] ]== identifier[is_enabled] keyword[assert] identifier[existing] [ literal[string] ]== identifier[description] identifier[existing] [ literal[string] ]= identifier[existing] [ literal[string] ] keyword[or] identifier[is_up] identifier[existing] [ literal[string] ]= identifier[max] ( identifier[existing] [ literal[string] ], identifier[uptime] ) identifier[existing] [ literal[string] ][ identifier[afi] ]={ literal[string] : identifier[received_prefixes] , literal[string] : identifier[accepted_prefixes] , literal[string] : identifier[sent_prefixes] } keyword[return] identifier[bgp_neighbor_data]
def get_bgp_neighbors(self): """BGP neighbor information. Currently no VRF support. Supports both IPv4 and IPv6. """ supported_afi = ['ipv4', 'ipv6'] bgp_neighbor_data = dict() bgp_neighbor_data['global'] = {} # get summary output from device cmd_bgp_all_sum = 'show bgp all summary' summary_output = self._send_command(cmd_bgp_all_sum).strip() # get neighbor output from device neighbor_output = '' for afi in supported_afi: cmd_bgp_neighbor = 'show bgp %s unicast neighbors' % afi neighbor_output += self._send_command(cmd_bgp_neighbor).strip() # trailing newline required for parsing neighbor_output += '\n' # depends on [control=['for'], data=['afi']] # Regular expressions used for parsing BGP summary # For address family: IPv4 Unicast # Capture router_id and local_as values, e.g.: # BGP router identifier 10.0.1.1, local AS number 65000 # Match neighbor summary row, capturing useful details and # discarding the 5 columns that we don't care about, e.g.: # Neighbor V AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down State/PfxRcd # 10.0.0.2 4 65000 1336020 64337701 1011343614 0 0 8w0d 3143 # Same as above, but for peer that are not Established, e.g.: # Neighbor V AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down State/PfxRcd # 192.168.0.2 4 65002 0 0 1 0 0 never Active # ipv6 peers often break accross rows because of the longer peer address, # match as above, but in separate expressions, e.g.: # Neighbor V AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down State/PfxRcd # 2001:DB8::4 # 4 65004 9900690 612449 155362939 0 0 26w6d 36391 # Same as above, but for peers that are not Established, e.g.: # Neighbor V AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down State/PfxRcd # 2001:DB8::3 # 4 65003 0 0 1 0 0 never Idle (Admin) parse_summary = {'patterns': [{'regexp': re.compile('^For address family: (?P<afi>\\S+) '), 'record': False}, {'regexp': re.compile('^.* router identifier (?P<router_id>{}), local AS number (?P<local_as>{})'.format(IPV4_ADDR_REGEX, ASN_REGEX)), 'record': False}, {'regexp': re.compile('^\\*?(?P<remote_addr>({})|({}))\\s+\\d+\\s+(?P<remote_as>{})(\\s+\\S+){{5}}\\s+(?P<uptime>(never)|\\d+\\S+)\\s+(?P<accepted_prefixes>\\d+)'.format(IPV4_ADDR_REGEX, IPV6_ADDR_REGEX, ASN_REGEX)), 'record': True}, {'regexp': re.compile('^\\*?(?P<remote_addr>({})|({}))\\s+\\d+\\s+(?P<remote_as>{})(\\s+\\S+){{5}}\\s+(?P<uptime>(never)|\\d+\\S+)\\s+(?P<state>\\D.*)'.format(IPV4_ADDR_REGEX, IPV6_ADDR_REGEX, ASN_REGEX)), 'record': True}, {'regexp': re.compile('^\\*?(?P<remote_addr>({})|({}))'.format(IPV4_ADDR_REGEX, IPV6_ADDR_REGEX)), 'record': False}, {'regexp': re.compile('^\\s+\\d+\\s+(?P<remote_as>{})(\\s+\\S+){{5}}\\s+(?P<uptime>(never)|\\d+\\S+)\\s+(?P<accepted_prefixes>\\d+)'.format(ASN_REGEX)), 'record': True}, {'regexp': re.compile('^\\s+\\d+\\s+(?P<remote_as>{})(\\s+\\S+){{5}}\\s+(?P<uptime>(never)|\\d+\\S+)\\s+(?P<state>\\D.*)'.format(ASN_REGEX)), 'record': True}], 'no_fill_fields': ['accepted_prefixes', 'state', 'uptime', 'remote_as', 'remote_addr']} # Capture BGP neighbor is 10.0.0.2, remote AS 65000, internal link # Capture description # Capture remote_id, e.g.: # BGP version 4, remote router ID 10.0.1.2 # Capture AFI and SAFI names, e.g.: # For address family: IPv4 Unicast # Capture current sent and accepted prefixes, e.g.: # Prefixes Current: 637213 3142 (Consumes 377040 bytes) # Capture received_prefixes if soft-reconfig is enabled for the peer # Otherwise, use the following as an end of row marker # fields that should not be "filled down" across table rows parse_neighbors = {'patterns': [{'regexp': re.compile('^BGP neighbor is (?P<remote_addr>({})|({})),\\s+remote AS (?P<remote_as>{}).*'.format(IPV4_ADDR_REGEX, IPV6_ADDR_REGEX, ASN_REGEX)), 'record': False}, {'regexp': re.compile('^\\s+Description: (?P<description>.+)'), 'record': False}, {'regexp': re.compile('^\\s+BGP version \\d+, remote router ID (?P<remote_id>{})'.format(IPV4_ADDR_REGEX)), 'record': False}, {'regexp': re.compile('^\\s+For address family: (?P<afi>\\S+) '), 'record': False}, {'regexp': re.compile('^\\s+Prefixes Current:\\s+(?P<sent_prefixes>\\d+)\\s+(?P<accepted_prefixes>\\d+).*'), 'record': False}, {'regexp': re.compile('^\\s+Saved (soft-reconfig):.+(?P<received_prefixes>\\d+).*'), 'record': True}, {'regexp': re.compile('^\\s+Local Policy Denied Prefixes:.+'), 'record': True}], 'no_fill_fields': ['received_prefixes', 'accepted_prefixes', 'sent_prefixes']} # Parse outputs into a list of dicts summary_data = [] summary_data_entry = {} for line in summary_output.splitlines(): # check for matches against each pattern for item in parse_summary['patterns']: match = item['regexp'].match(line) if match: # a match was found, so update the temp entry with the match's groupdict summary_data_entry.update(match.groupdict()) if item['record']: # Record indicates the last piece of data has been obtained; move # on to next entry summary_data.append(copy.deepcopy(summary_data_entry)) # remove keys that are listed in no_fill_fields before the next pass for field in parse_summary['no_fill_fields']: try: del summary_data_entry[field] # depends on [control=['try'], data=[]] except KeyError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['field']] # depends on [control=['if'], data=[]] break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] # depends on [control=['for'], data=['line']] neighbor_data = [] neighbor_data_entry = {} for line in neighbor_output.splitlines(): # check for matches against each pattern for item in parse_neighbors['patterns']: match = item['regexp'].match(line) if match: # a match was found, so update the temp entry with the match's groupdict neighbor_data_entry.update(match.groupdict()) if item['record']: # Record indicates the last piece of data has been obtained; move # on to next entry neighbor_data.append(copy.deepcopy(neighbor_data_entry)) # remove keys that are listed in no_fill_fields before the next pass for field in parse_neighbors['no_fill_fields']: try: del neighbor_data_entry[field] # depends on [control=['try'], data=[]] except KeyError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['field']] # depends on [control=['if'], data=[]] break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] # depends on [control=['for'], data=['line']] router_id = None for entry in summary_data: if not router_id: router_id = entry['router_id'] # depends on [control=['if'], data=[]] elif entry['router_id'] != router_id: raise ValueError # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['entry']] # check the router_id looks like an ipv4 address router_id = napalm_base.helpers.ip(router_id, version=4) # add parsed data to output dict bgp_neighbor_data['global']['router_id'] = router_id bgp_neighbor_data['global']['peers'] = {} for entry in summary_data: remote_addr = napalm_base.helpers.ip(entry['remote_addr']) afi = entry['afi'].lower() # check that we're looking at a supported afi if afi not in supported_afi: continue # depends on [control=['if'], data=[]] # get neighbor_entry out of neighbor data neighbor_entry = None for neighbor in neighbor_data: if neighbor['afi'].lower() == afi and napalm_base.helpers.ip(neighbor['remote_addr']) == remote_addr: neighbor_entry = neighbor break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['neighbor']] if not isinstance(neighbor_entry, dict): raise ValueError(msg="Couldn't find neighbor data for %s in afi %s" % (remote_addr, afi)) # depends on [control=['if'], data=[]] # check for admin down state try: if '(Admin)' in entry['state']: is_enabled = False # depends on [control=['if'], data=[]] else: is_enabled = True # depends on [control=['try'], data=[]] except KeyError: is_enabled = True # depends on [control=['except'], data=[]] # parse uptime value uptime = self.bgp_time_conversion(entry['uptime']) # Uptime should be -1 if BGP session not up is_up = True if uptime >= 0 else False # check whether session is up for address family and get prefix count try: accepted_prefixes = int(entry['accepted_prefixes']) # depends on [control=['try'], data=[]] except (ValueError, KeyError): accepted_prefixes = -1 # depends on [control=['except'], data=[]] # Only parse neighbor detailed data if BGP session is-up if is_up: try: # overide accepted_prefixes with neighbor data if possible (since that's newer) accepted_prefixes = int(neighbor_entry['accepted_prefixes']) # depends on [control=['try'], data=[]] except (ValueError, KeyError): pass # depends on [control=['except'], data=[]] # try to get received prefix count, otherwise set to accepted_prefixes received_prefixes = neighbor_entry.get('received_prefixes', accepted_prefixes) # try to get sent prefix count and convert to int, otherwise set to -1 sent_prefixes = int(neighbor_entry.get('sent_prefixes', -1)) # depends on [control=['if'], data=[]] else: received_prefixes = -1 sent_prefixes = -1 # get description try: description = py23_compat.text_type(neighbor_entry['description']) # depends on [control=['try'], data=[]] except KeyError: description = '' # depends on [control=['except'], data=[]] # check the remote router_id looks like an ipv4 address remote_id = napalm_base.helpers.ip(neighbor_entry['remote_id'], version=4) if remote_addr not in bgp_neighbor_data['global']['peers']: bgp_neighbor_data['global']['peers'][remote_addr] = {'local_as': napalm_base.helpers.as_number(entry['local_as']), 'remote_as': napalm_base.helpers.as_number(entry['remote_as']), 'remote_id': remote_id, 'is_up': is_up, 'is_enabled': is_enabled, 'description': description, 'uptime': uptime, 'address_family': {afi: {'received_prefixes': received_prefixes, 'accepted_prefixes': accepted_prefixes, 'sent_prefixes': sent_prefixes}}} # depends on [control=['if'], data=['remote_addr']] else: # found previous data for matching remote_addr, but for different afi existing = bgp_neighbor_data['global']['peers'][remote_addr] assert afi not in existing['address_family'] # compare with existing values and croak if they don't match assert existing['local_as'] == napalm_base.helpers.as_number(entry['local_as']) assert existing['remote_as'] == napalm_base.helpers.as_number(entry['remote_as']) assert existing['remote_id'] == remote_id assert existing['is_enabled'] == is_enabled assert existing['description'] == description # merge other values in a sane manner existing['is_up'] = existing['is_up'] or is_up existing['uptime'] = max(existing['uptime'], uptime) existing['address_family'][afi] = {'received_prefixes': received_prefixes, 'accepted_prefixes': accepted_prefixes, 'sent_prefixes': sent_prefixes} # depends on [control=['for'], data=['entry']] return bgp_neighbor_data
def _parseSCPDVariableTypes(self, variableListElement, variableTypes): """Internal method to parse the SCPD definitions. :param variableListElement: the xml root node of the variable list :type variableListElement: xml.etree.ElementTree.Element :param dict variableTypes: a container to store the variables """ # iterate through all variables for variableElement in variableListElement.getchildren(): variable = {} # iterate through the variable definition for inVariableElement in variableElement.getchildren(): tagName = inVariableElement.tag.lower() if tagName.endswith("name"): variable["name"] = inVariableElement.text elif tagName.endswith("datatype"): variable["dataType"] = inVariableElement.text elif tagName.endswith("defaultvalue"): variable["defaultValue"] = inVariableElement.text if "name" not in variable.keys(): raise ValueError("Variable has no name defined.") if "dataType" not in variable.keys(): raise ValueError("No dataType was defined by variable: " + variable["name"]) if variable["name"] in variableTypes.keys(): raise ValueError("Variable has been defined multiple times: " + variable["name"]) variableTypes[variable["name"]] = variable
def function[_parseSCPDVariableTypes, parameter[self, variableListElement, variableTypes]]: constant[Internal method to parse the SCPD definitions. :param variableListElement: the xml root node of the variable list :type variableListElement: xml.etree.ElementTree.Element :param dict variableTypes: a container to store the variables ] for taget[name[variableElement]] in starred[call[name[variableListElement].getchildren, parameter[]]] begin[:] variable[variable] assign[=] dictionary[[], []] for taget[name[inVariableElement]] in starred[call[name[variableElement].getchildren, parameter[]]] begin[:] variable[tagName] assign[=] call[name[inVariableElement].tag.lower, parameter[]] if call[name[tagName].endswith, parameter[constant[name]]] begin[:] call[name[variable]][constant[name]] assign[=] name[inVariableElement].text if compare[constant[name] <ast.NotIn object at 0x7da2590d7190> call[name[variable].keys, parameter[]]] begin[:] <ast.Raise object at 0x7da20e9b1d20> if compare[constant[dataType] <ast.NotIn object at 0x7da2590d7190> call[name[variable].keys, parameter[]]] begin[:] <ast.Raise object at 0x7da20e9b0a00> if compare[call[name[variable]][constant[name]] in call[name[variableTypes].keys, parameter[]]] begin[:] <ast.Raise object at 0x7da20e9b0760> call[name[variableTypes]][call[name[variable]][constant[name]]] assign[=] name[variable]
keyword[def] identifier[_parseSCPDVariableTypes] ( identifier[self] , identifier[variableListElement] , identifier[variableTypes] ): literal[string] keyword[for] identifier[variableElement] keyword[in] identifier[variableListElement] . identifier[getchildren] (): identifier[variable] ={} keyword[for] identifier[inVariableElement] keyword[in] identifier[variableElement] . identifier[getchildren] (): identifier[tagName] = identifier[inVariableElement] . identifier[tag] . identifier[lower] () keyword[if] identifier[tagName] . identifier[endswith] ( literal[string] ): identifier[variable] [ literal[string] ]= identifier[inVariableElement] . identifier[text] keyword[elif] identifier[tagName] . identifier[endswith] ( literal[string] ): identifier[variable] [ literal[string] ]= identifier[inVariableElement] . identifier[text] keyword[elif] identifier[tagName] . identifier[endswith] ( literal[string] ): identifier[variable] [ literal[string] ]= identifier[inVariableElement] . identifier[text] keyword[if] literal[string] keyword[not] keyword[in] identifier[variable] . identifier[keys] (): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[variable] . identifier[keys] (): keyword[raise] identifier[ValueError] ( literal[string] + identifier[variable] [ literal[string] ]) keyword[if] identifier[variable] [ literal[string] ] keyword[in] identifier[variableTypes] . identifier[keys] (): keyword[raise] identifier[ValueError] ( literal[string] + identifier[variable] [ literal[string] ]) identifier[variableTypes] [ identifier[variable] [ literal[string] ]]= identifier[variable]
def _parseSCPDVariableTypes(self, variableListElement, variableTypes): """Internal method to parse the SCPD definitions. :param variableListElement: the xml root node of the variable list :type variableListElement: xml.etree.ElementTree.Element :param dict variableTypes: a container to store the variables """ # iterate through all variables for variableElement in variableListElement.getchildren(): variable = {} # iterate through the variable definition for inVariableElement in variableElement.getchildren(): tagName = inVariableElement.tag.lower() if tagName.endswith('name'): variable['name'] = inVariableElement.text # depends on [control=['if'], data=[]] elif tagName.endswith('datatype'): variable['dataType'] = inVariableElement.text # depends on [control=['if'], data=[]] elif tagName.endswith('defaultvalue'): variable['defaultValue'] = inVariableElement.text # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['inVariableElement']] if 'name' not in variable.keys(): raise ValueError('Variable has no name defined.') # depends on [control=['if'], data=[]] if 'dataType' not in variable.keys(): raise ValueError('No dataType was defined by variable: ' + variable['name']) # depends on [control=['if'], data=[]] if variable['name'] in variableTypes.keys(): raise ValueError('Variable has been defined multiple times: ' + variable['name']) # depends on [control=['if'], data=[]] variableTypes[variable['name']] = variable # depends on [control=['for'], data=['variableElement']]
def reset(self, reset_type=None): """! @brief Reset the core. The reset method is selectable via the reset_type parameter as well as the reset_type session option. If the reset_type parameter is not specified or None, then the reset_type option will be used. If the option is not set, or if it is set to a value of 'default', the the core's default_reset_type property value is used. So, the session option overrides the core's default, while the parameter overrides everything. Note that only v7-M cores support the `VECTRESET` software reset method. If this method is chosen but the core doesn't support it, the the reset method will fall back to an emulated software reset. After a call to this function, the core is running. """ self.notify(Notification(event=Target.EVENT_PRE_RESET, source=self)) reset_type = self._get_actual_reset_type(reset_type) self._run_token += 1 # Give the delegate a chance to overide reset. If the delegate returns True, then it # handled the reset on its own. if not self.call_delegate('will_reset', core=self, reset_type=reset_type): self._perform_reset(reset_type) self.call_delegate('did_reset', core=self, reset_type=reset_type) # Now wait for the system to come out of reset. Keep reading the DHCSR until # we get a good response with S_RESET_ST cleared, or we time out. with timeout.Timeout(2.0) as t_o: while t_o.check(): try: dhcsr = self.read32(CortexM.DHCSR) if (dhcsr & CortexM.S_RESET_ST) == 0: break except exceptions.TransferError: self.flush() sleep(0.01) self.notify(Notification(event=Target.EVENT_POST_RESET, source=self))
def function[reset, parameter[self, reset_type]]: constant[! @brief Reset the core. The reset method is selectable via the reset_type parameter as well as the reset_type session option. If the reset_type parameter is not specified or None, then the reset_type option will be used. If the option is not set, or if it is set to a value of 'default', the the core's default_reset_type property value is used. So, the session option overrides the core's default, while the parameter overrides everything. Note that only v7-M cores support the `VECTRESET` software reset method. If this method is chosen but the core doesn't support it, the the reset method will fall back to an emulated software reset. After a call to this function, the core is running. ] call[name[self].notify, parameter[call[name[Notification], parameter[]]]] variable[reset_type] assign[=] call[name[self]._get_actual_reset_type, parameter[name[reset_type]]] <ast.AugAssign object at 0x7da1b18ae320> if <ast.UnaryOp object at 0x7da1b18ad1e0> begin[:] call[name[self]._perform_reset, parameter[name[reset_type]]] call[name[self].call_delegate, parameter[constant[did_reset]]] with call[name[timeout].Timeout, parameter[constant[2.0]]] begin[:] while call[name[t_o].check, parameter[]] begin[:] <ast.Try object at 0x7da1b18dcac0> call[name[self].notify, parameter[call[name[Notification], parameter[]]]]
keyword[def] identifier[reset] ( identifier[self] , identifier[reset_type] = keyword[None] ): literal[string] identifier[self] . identifier[notify] ( identifier[Notification] ( identifier[event] = identifier[Target] . identifier[EVENT_PRE_RESET] , identifier[source] = identifier[self] )) identifier[reset_type] = identifier[self] . identifier[_get_actual_reset_type] ( identifier[reset_type] ) identifier[self] . identifier[_run_token] += literal[int] keyword[if] keyword[not] identifier[self] . identifier[call_delegate] ( literal[string] , identifier[core] = identifier[self] , identifier[reset_type] = identifier[reset_type] ): identifier[self] . identifier[_perform_reset] ( identifier[reset_type] ) identifier[self] . identifier[call_delegate] ( literal[string] , identifier[core] = identifier[self] , identifier[reset_type] = identifier[reset_type] ) keyword[with] identifier[timeout] . identifier[Timeout] ( literal[int] ) keyword[as] identifier[t_o] : keyword[while] identifier[t_o] . identifier[check] (): keyword[try] : identifier[dhcsr] = identifier[self] . identifier[read32] ( identifier[CortexM] . identifier[DHCSR] ) keyword[if] ( identifier[dhcsr] & identifier[CortexM] . identifier[S_RESET_ST] )== literal[int] : keyword[break] keyword[except] identifier[exceptions] . identifier[TransferError] : identifier[self] . identifier[flush] () identifier[sleep] ( literal[int] ) identifier[self] . identifier[notify] ( identifier[Notification] ( identifier[event] = identifier[Target] . identifier[EVENT_POST_RESET] , identifier[source] = identifier[self] ))
def reset(self, reset_type=None): """! @brief Reset the core. The reset method is selectable via the reset_type parameter as well as the reset_type session option. If the reset_type parameter is not specified or None, then the reset_type option will be used. If the option is not set, or if it is set to a value of 'default', the the core's default_reset_type property value is used. So, the session option overrides the core's default, while the parameter overrides everything. Note that only v7-M cores support the `VECTRESET` software reset method. If this method is chosen but the core doesn't support it, the the reset method will fall back to an emulated software reset. After a call to this function, the core is running. """ self.notify(Notification(event=Target.EVENT_PRE_RESET, source=self)) reset_type = self._get_actual_reset_type(reset_type) self._run_token += 1 # Give the delegate a chance to overide reset. If the delegate returns True, then it # handled the reset on its own. if not self.call_delegate('will_reset', core=self, reset_type=reset_type): self._perform_reset(reset_type) # depends on [control=['if'], data=[]] self.call_delegate('did_reset', core=self, reset_type=reset_type) # Now wait for the system to come out of reset. Keep reading the DHCSR until # we get a good response with S_RESET_ST cleared, or we time out. with timeout.Timeout(2.0) as t_o: while t_o.check(): try: dhcsr = self.read32(CortexM.DHCSR) if dhcsr & CortexM.S_RESET_ST == 0: break # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except exceptions.TransferError: self.flush() sleep(0.01) # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['with'], data=['t_o']] self.notify(Notification(event=Target.EVENT_POST_RESET, source=self))
def get_perceel_by_id(self, id): ''' Retrieve a `Perceel` by the Id. :param string id: the Id of the `Perceel` :rtype: :class:`Perceel` ''' def creator(): res = crab_gateway_request( self.client, 'GetPerceelByIdentificatorPerceel', id ) if res == None: raise GatewayResourceNotFoundException() return Perceel( res.IdentificatorPerceel, (res.CenterX, res.CenterY), Metadata( res.BeginDatum, res.BeginTijd, self.get_bewerking(res.BeginBewerking), self.get_organisatie(res.BeginOrganisatie) ) ) if self.caches['short'].is_configured: key = 'GetPerceelByIdentificatorPerceel#%s' % (id) perceel = self.caches['short'].get_or_create(key, creator) else: perceel = creator() perceel.set_gateway(self) return perceel
def function[get_perceel_by_id, parameter[self, id]]: constant[ Retrieve a `Perceel` by the Id. :param string id: the Id of the `Perceel` :rtype: :class:`Perceel` ] def function[creator, parameter[]]: variable[res] assign[=] call[name[crab_gateway_request], parameter[name[self].client, constant[GetPerceelByIdentificatorPerceel], name[id]]] if compare[name[res] equal[==] constant[None]] begin[:] <ast.Raise object at 0x7da1b0b6c100> return[call[name[Perceel], parameter[name[res].IdentificatorPerceel, tuple[[<ast.Attribute object at 0x7da1b0b6d2a0>, <ast.Attribute object at 0x7da1b0b6c070>]], call[name[Metadata], parameter[name[res].BeginDatum, name[res].BeginTijd, call[name[self].get_bewerking, parameter[name[res].BeginBewerking]], call[name[self].get_organisatie, parameter[name[res].BeginOrganisatie]]]]]]] if call[name[self].caches][constant[short]].is_configured begin[:] variable[key] assign[=] binary_operation[constant[GetPerceelByIdentificatorPerceel#%s] <ast.Mod object at 0x7da2590d6920> name[id]] variable[perceel] assign[=] call[call[name[self].caches][constant[short]].get_or_create, parameter[name[key], name[creator]]] call[name[perceel].set_gateway, parameter[name[self]]] return[name[perceel]]
keyword[def] identifier[get_perceel_by_id] ( identifier[self] , identifier[id] ): literal[string] keyword[def] identifier[creator] (): identifier[res] = identifier[crab_gateway_request] ( identifier[self] . identifier[client] , literal[string] , identifier[id] ) keyword[if] identifier[res] == keyword[None] : keyword[raise] identifier[GatewayResourceNotFoundException] () keyword[return] identifier[Perceel] ( identifier[res] . identifier[IdentificatorPerceel] , ( identifier[res] . identifier[CenterX] , identifier[res] . identifier[CenterY] ), identifier[Metadata] ( identifier[res] . identifier[BeginDatum] , identifier[res] . identifier[BeginTijd] , identifier[self] . identifier[get_bewerking] ( identifier[res] . identifier[BeginBewerking] ), identifier[self] . identifier[get_organisatie] ( identifier[res] . identifier[BeginOrganisatie] ) ) ) keyword[if] identifier[self] . identifier[caches] [ literal[string] ]. identifier[is_configured] : identifier[key] = literal[string] %( identifier[id] ) identifier[perceel] = identifier[self] . identifier[caches] [ literal[string] ]. identifier[get_or_create] ( identifier[key] , identifier[creator] ) keyword[else] : identifier[perceel] = identifier[creator] () identifier[perceel] . identifier[set_gateway] ( identifier[self] ) keyword[return] identifier[perceel]
def get_perceel_by_id(self, id): """ Retrieve a `Perceel` by the Id. :param string id: the Id of the `Perceel` :rtype: :class:`Perceel` """ def creator(): res = crab_gateway_request(self.client, 'GetPerceelByIdentificatorPerceel', id) if res == None: raise GatewayResourceNotFoundException() # depends on [control=['if'], data=[]] return Perceel(res.IdentificatorPerceel, (res.CenterX, res.CenterY), Metadata(res.BeginDatum, res.BeginTijd, self.get_bewerking(res.BeginBewerking), self.get_organisatie(res.BeginOrganisatie))) if self.caches['short'].is_configured: key = 'GetPerceelByIdentificatorPerceel#%s' % id perceel = self.caches['short'].get_or_create(key, creator) # depends on [control=['if'], data=[]] else: perceel = creator() perceel.set_gateway(self) return perceel
def splitkeyurl(url): ''' Splits a Send url into key, urlid and 'prefix' for the Send server Should handle any hostname, but will brake on key & id length changes ''' key = url[-22:] urlid = url[-34:-24] service = url[:-43] return service, urlid, key
def function[splitkeyurl, parameter[url]]: constant[ Splits a Send url into key, urlid and 'prefix' for the Send server Should handle any hostname, but will brake on key & id length changes ] variable[key] assign[=] call[name[url]][<ast.Slice object at 0x7da1b11f6230>] variable[urlid] assign[=] call[name[url]][<ast.Slice object at 0x7da1b11f4be0>] variable[service] assign[=] call[name[url]][<ast.Slice object at 0x7da20c76c2e0>] return[tuple[[<ast.Name object at 0x7da1b120a5f0>, <ast.Name object at 0x7da1b1209600>, <ast.Name object at 0x7da1b120a5c0>]]]
keyword[def] identifier[splitkeyurl] ( identifier[url] ): literal[string] identifier[key] = identifier[url] [- literal[int] :] identifier[urlid] = identifier[url] [- literal[int] :- literal[int] ] identifier[service] = identifier[url] [:- literal[int] ] keyword[return] identifier[service] , identifier[urlid] , identifier[key]
def splitkeyurl(url): """ Splits a Send url into key, urlid and 'prefix' for the Send server Should handle any hostname, but will brake on key & id length changes """ key = url[-22:] urlid = url[-34:-24] service = url[:-43] return (service, urlid, key)
def render_from_tag( cls, context, max_levels=None, use_specific=None, apply_active_classes=True, allow_repeating_parents=True, use_absolute_page_urls=False, add_sub_menus_inline=None, template_name='', **kwargs ): """ A template tag should call this method to render a menu. The ``Context`` instance and option values provided are used to get or create a relevant menu instance, prepare it, then render it and it's menu items to an appropriate template. It shouldn't be neccessary to override this method, as any new option values will be available as a dict in `opt_vals.extra`, and there are more specific methods for overriding certain behaviour at different stages of rendering, such as: * get_from_collected_values() (if the class is a Django model), OR * create_from_collected_values() (if it isn't) * prepare_to_render() * get_context_data() * render_to_template() """ instance = cls._get_render_prepared_object( context, max_levels=max_levels, use_specific=use_specific, apply_active_classes=apply_active_classes, allow_repeating_parents=allow_repeating_parents, use_absolute_page_urls=use_absolute_page_urls, add_sub_menus_inline=add_sub_menus_inline, template_name=template_name, **kwargs ) if not instance: return '' return instance.render_to_template()
def function[render_from_tag, parameter[cls, context, max_levels, use_specific, apply_active_classes, allow_repeating_parents, use_absolute_page_urls, add_sub_menus_inline, template_name]]: constant[ A template tag should call this method to render a menu. The ``Context`` instance and option values provided are used to get or create a relevant menu instance, prepare it, then render it and it's menu items to an appropriate template. It shouldn't be neccessary to override this method, as any new option values will be available as a dict in `opt_vals.extra`, and there are more specific methods for overriding certain behaviour at different stages of rendering, such as: * get_from_collected_values() (if the class is a Django model), OR * create_from_collected_values() (if it isn't) * prepare_to_render() * get_context_data() * render_to_template() ] variable[instance] assign[=] call[name[cls]._get_render_prepared_object, parameter[name[context]]] if <ast.UnaryOp object at 0x7da1b1115d80> begin[:] return[constant[]] return[call[name[instance].render_to_template, parameter[]]]
keyword[def] identifier[render_from_tag] ( identifier[cls] , identifier[context] , identifier[max_levels] = keyword[None] , identifier[use_specific] = keyword[None] , identifier[apply_active_classes] = keyword[True] , identifier[allow_repeating_parents] = keyword[True] , identifier[use_absolute_page_urls] = keyword[False] , identifier[add_sub_menus_inline] = keyword[None] , identifier[template_name] = literal[string] ,** identifier[kwargs] ): literal[string] identifier[instance] = identifier[cls] . identifier[_get_render_prepared_object] ( identifier[context] , identifier[max_levels] = identifier[max_levels] , identifier[use_specific] = identifier[use_specific] , identifier[apply_active_classes] = identifier[apply_active_classes] , identifier[allow_repeating_parents] = identifier[allow_repeating_parents] , identifier[use_absolute_page_urls] = identifier[use_absolute_page_urls] , identifier[add_sub_menus_inline] = identifier[add_sub_menus_inline] , identifier[template_name] = identifier[template_name] , ** identifier[kwargs] ) keyword[if] keyword[not] identifier[instance] : keyword[return] literal[string] keyword[return] identifier[instance] . identifier[render_to_template] ()
def render_from_tag(cls, context, max_levels=None, use_specific=None, apply_active_classes=True, allow_repeating_parents=True, use_absolute_page_urls=False, add_sub_menus_inline=None, template_name='', **kwargs): """ A template tag should call this method to render a menu. The ``Context`` instance and option values provided are used to get or create a relevant menu instance, prepare it, then render it and it's menu items to an appropriate template. It shouldn't be neccessary to override this method, as any new option values will be available as a dict in `opt_vals.extra`, and there are more specific methods for overriding certain behaviour at different stages of rendering, such as: * get_from_collected_values() (if the class is a Django model), OR * create_from_collected_values() (if it isn't) * prepare_to_render() * get_context_data() * render_to_template() """ instance = cls._get_render_prepared_object(context, max_levels=max_levels, use_specific=use_specific, apply_active_classes=apply_active_classes, allow_repeating_parents=allow_repeating_parents, use_absolute_page_urls=use_absolute_page_urls, add_sub_menus_inline=add_sub_menus_inline, template_name=template_name, **kwargs) if not instance: return '' # depends on [control=['if'], data=[]] return instance.render_to_template()
def dist(self, src, tar): """Return the NCD between two strings using BWT plus RLE. Parameters ---------- src : str Source string for comparison tar : str Target string for comparison Returns ------- float Compression distance Examples -------- >>> cmp = NCDbwtrle() >>> cmp.dist('cat', 'hat') 0.75 >>> cmp.dist('Niall', 'Neil') 0.8333333333333334 >>> cmp.dist('aluminum', 'Catalan') 1.0 >>> cmp.dist('ATCG', 'TAGC') 0.8 """ if src == tar: return 0.0 src_comp = self._rle.encode(self._bwt.encode(src)) tar_comp = self._rle.encode(self._bwt.encode(tar)) concat_comp = self._rle.encode(self._bwt.encode(src + tar)) concat_comp2 = self._rle.encode(self._bwt.encode(tar + src)) return ( min(len(concat_comp), len(concat_comp2)) - min(len(src_comp), len(tar_comp)) ) / max(len(src_comp), len(tar_comp))
def function[dist, parameter[self, src, tar]]: constant[Return the NCD between two strings using BWT plus RLE. Parameters ---------- src : str Source string for comparison tar : str Target string for comparison Returns ------- float Compression distance Examples -------- >>> cmp = NCDbwtrle() >>> cmp.dist('cat', 'hat') 0.75 >>> cmp.dist('Niall', 'Neil') 0.8333333333333334 >>> cmp.dist('aluminum', 'Catalan') 1.0 >>> cmp.dist('ATCG', 'TAGC') 0.8 ] if compare[name[src] equal[==] name[tar]] begin[:] return[constant[0.0]] variable[src_comp] assign[=] call[name[self]._rle.encode, parameter[call[name[self]._bwt.encode, parameter[name[src]]]]] variable[tar_comp] assign[=] call[name[self]._rle.encode, parameter[call[name[self]._bwt.encode, parameter[name[tar]]]]] variable[concat_comp] assign[=] call[name[self]._rle.encode, parameter[call[name[self]._bwt.encode, parameter[binary_operation[name[src] + name[tar]]]]]] variable[concat_comp2] assign[=] call[name[self]._rle.encode, parameter[call[name[self]._bwt.encode, parameter[binary_operation[name[tar] + name[src]]]]]] return[binary_operation[binary_operation[call[name[min], parameter[call[name[len], parameter[name[concat_comp]]], call[name[len], parameter[name[concat_comp2]]]]] - call[name[min], parameter[call[name[len], parameter[name[src_comp]]], call[name[len], parameter[name[tar_comp]]]]]] / call[name[max], parameter[call[name[len], parameter[name[src_comp]]], call[name[len], parameter[name[tar_comp]]]]]]]
keyword[def] identifier[dist] ( identifier[self] , identifier[src] , identifier[tar] ): literal[string] keyword[if] identifier[src] == identifier[tar] : keyword[return] literal[int] identifier[src_comp] = identifier[self] . identifier[_rle] . identifier[encode] ( identifier[self] . identifier[_bwt] . identifier[encode] ( identifier[src] )) identifier[tar_comp] = identifier[self] . identifier[_rle] . identifier[encode] ( identifier[self] . identifier[_bwt] . identifier[encode] ( identifier[tar] )) identifier[concat_comp] = identifier[self] . identifier[_rle] . identifier[encode] ( identifier[self] . identifier[_bwt] . identifier[encode] ( identifier[src] + identifier[tar] )) identifier[concat_comp2] = identifier[self] . identifier[_rle] . identifier[encode] ( identifier[self] . identifier[_bwt] . identifier[encode] ( identifier[tar] + identifier[src] )) keyword[return] ( identifier[min] ( identifier[len] ( identifier[concat_comp] ), identifier[len] ( identifier[concat_comp2] )) - identifier[min] ( identifier[len] ( identifier[src_comp] ), identifier[len] ( identifier[tar_comp] )) )/ identifier[max] ( identifier[len] ( identifier[src_comp] ), identifier[len] ( identifier[tar_comp] ))
def dist(self, src, tar): """Return the NCD between two strings using BWT plus RLE. Parameters ---------- src : str Source string for comparison tar : str Target string for comparison Returns ------- float Compression distance Examples -------- >>> cmp = NCDbwtrle() >>> cmp.dist('cat', 'hat') 0.75 >>> cmp.dist('Niall', 'Neil') 0.8333333333333334 >>> cmp.dist('aluminum', 'Catalan') 1.0 >>> cmp.dist('ATCG', 'TAGC') 0.8 """ if src == tar: return 0.0 # depends on [control=['if'], data=[]] src_comp = self._rle.encode(self._bwt.encode(src)) tar_comp = self._rle.encode(self._bwt.encode(tar)) concat_comp = self._rle.encode(self._bwt.encode(src + tar)) concat_comp2 = self._rle.encode(self._bwt.encode(tar + src)) return (min(len(concat_comp), len(concat_comp2)) - min(len(src_comp), len(tar_comp))) / max(len(src_comp), len(tar_comp))
def asyncPipeRename(context=None, _INPUT=None, conf=None, **kwargs): """An operator that asynchronously renames or copies fields in the input source. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : asyncPipe like object (twisted Deferred iterable of items) conf : { 'RULE': [ { 'op': {'value': 'rename or copy'}, 'field': {'value': 'old field'}, 'newval': {'value': 'new field'} } ] } kwargs : other inputs, e.g., to feed terminals for rule values Returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of items """ splits = yield asyncGetSplits(_INPUT, conf['RULE'], **cdicts(opts, kwargs)) _OUTPUT = yield maybeDeferred(parse_results, splits, **kwargs) returnValue(_OUTPUT)
def function[asyncPipeRename, parameter[context, _INPUT, conf]]: constant[An operator that asynchronously renames or copies fields in the input source. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : asyncPipe like object (twisted Deferred iterable of items) conf : { 'RULE': [ { 'op': {'value': 'rename or copy'}, 'field': {'value': 'old field'}, 'newval': {'value': 'new field'} } ] } kwargs : other inputs, e.g., to feed terminals for rule values Returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of items ] variable[splits] assign[=] <ast.Yield object at 0x7da1b0693d90> variable[_OUTPUT] assign[=] <ast.Yield object at 0x7da1b0692590> call[name[returnValue], parameter[name[_OUTPUT]]]
keyword[def] identifier[asyncPipeRename] ( identifier[context] = keyword[None] , identifier[_INPUT] = keyword[None] , identifier[conf] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[splits] = keyword[yield] identifier[asyncGetSplits] ( identifier[_INPUT] , identifier[conf] [ literal[string] ],** identifier[cdicts] ( identifier[opts] , identifier[kwargs] )) identifier[_OUTPUT] = keyword[yield] identifier[maybeDeferred] ( identifier[parse_results] , identifier[splits] ,** identifier[kwargs] ) identifier[returnValue] ( identifier[_OUTPUT] )
def asyncPipeRename(context=None, _INPUT=None, conf=None, **kwargs): """An operator that asynchronously renames or copies fields in the input source. Not loopable. Parameters ---------- context : pipe2py.Context object _INPUT : asyncPipe like object (twisted Deferred iterable of items) conf : { 'RULE': [ { 'op': {'value': 'rename or copy'}, 'field': {'value': 'old field'}, 'newval': {'value': 'new field'} } ] } kwargs : other inputs, e.g., to feed terminals for rule values Returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of items """ splits = (yield asyncGetSplits(_INPUT, conf['RULE'], **cdicts(opts, kwargs))) _OUTPUT = (yield maybeDeferred(parse_results, splits, **kwargs)) returnValue(_OUTPUT)
def is_internal_attribute(obj, attr): """Test if the attribute given is an internal python attribute. For example this function returns `True` for the `func_code` attribute of python objects. This is useful if the environment method :meth:`~SandboxedEnvironment.is_safe_attribute` is overriden. >>> from jinja2.sandbox import is_internal_attribute >>> is_internal_attribute(lambda: None, "func_code") True >>> is_internal_attribute((lambda x:x).func_code, 'co_code') True >>> is_internal_attribute(str, "upper") False """ if isinstance(obj, FunctionType): if attr in UNSAFE_FUNCTION_ATTRIBUTES: return True elif isinstance(obj, MethodType): if attr in UNSAFE_FUNCTION_ATTRIBUTES or \ attr in UNSAFE_METHOD_ATTRIBUTES: return True elif isinstance(obj, type): if attr == 'mro': return True elif isinstance(obj, (CodeType, TracebackType, FrameType)): return True elif isinstance(obj, GeneratorType): if attr == 'gi_frame': return True return attr.startswith('__')
def function[is_internal_attribute, parameter[obj, attr]]: constant[Test if the attribute given is an internal python attribute. For example this function returns `True` for the `func_code` attribute of python objects. This is useful if the environment method :meth:`~SandboxedEnvironment.is_safe_attribute` is overriden. >>> from jinja2.sandbox import is_internal_attribute >>> is_internal_attribute(lambda: None, "func_code") True >>> is_internal_attribute((lambda x:x).func_code, 'co_code') True >>> is_internal_attribute(str, "upper") False ] if call[name[isinstance], parameter[name[obj], name[FunctionType]]] begin[:] if compare[name[attr] in name[UNSAFE_FUNCTION_ATTRIBUTES]] begin[:] return[constant[True]] return[call[name[attr].startswith, parameter[constant[__]]]]
keyword[def] identifier[is_internal_attribute] ( identifier[obj] , identifier[attr] ): literal[string] keyword[if] identifier[isinstance] ( identifier[obj] , identifier[FunctionType] ): keyword[if] identifier[attr] keyword[in] identifier[UNSAFE_FUNCTION_ATTRIBUTES] : keyword[return] keyword[True] keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[MethodType] ): keyword[if] identifier[attr] keyword[in] identifier[UNSAFE_FUNCTION_ATTRIBUTES] keyword[or] identifier[attr] keyword[in] identifier[UNSAFE_METHOD_ATTRIBUTES] : keyword[return] keyword[True] keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[type] ): keyword[if] identifier[attr] == literal[string] : keyword[return] keyword[True] keyword[elif] identifier[isinstance] ( identifier[obj] ,( identifier[CodeType] , identifier[TracebackType] , identifier[FrameType] )): keyword[return] keyword[True] keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[GeneratorType] ): keyword[if] identifier[attr] == literal[string] : keyword[return] keyword[True] keyword[return] identifier[attr] . identifier[startswith] ( literal[string] )
def is_internal_attribute(obj, attr): """Test if the attribute given is an internal python attribute. For example this function returns `True` for the `func_code` attribute of python objects. This is useful if the environment method :meth:`~SandboxedEnvironment.is_safe_attribute` is overriden. >>> from jinja2.sandbox import is_internal_attribute >>> is_internal_attribute(lambda: None, "func_code") True >>> is_internal_attribute((lambda x:x).func_code, 'co_code') True >>> is_internal_attribute(str, "upper") False """ if isinstance(obj, FunctionType): if attr in UNSAFE_FUNCTION_ATTRIBUTES: return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(obj, MethodType): if attr in UNSAFE_FUNCTION_ATTRIBUTES or attr in UNSAFE_METHOD_ATTRIBUTES: return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(obj, type): if attr == 'mro': return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(obj, (CodeType, TracebackType, FrameType)): return True # depends on [control=['if'], data=[]] elif isinstance(obj, GeneratorType): if attr == 'gi_frame': return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return attr.startswith('__')
def format_help(help): """Formats the help string.""" help = help.replace("Options:", str(crayons.normal("Options:", bold=True))) help = help.replace( "Usage: pipenv", str("Usage: {0}".format(crayons.normal("pipenv", bold=True))) ) help = help.replace(" check", str(crayons.red(" check", bold=True))) help = help.replace(" clean", str(crayons.red(" clean", bold=True))) help = help.replace(" graph", str(crayons.red(" graph", bold=True))) help = help.replace(" install", str(crayons.magenta(" install", bold=True))) help = help.replace(" lock", str(crayons.green(" lock", bold=True))) help = help.replace(" open", str(crayons.red(" open", bold=True))) help = help.replace(" run", str(crayons.yellow(" run", bold=True))) help = help.replace(" shell", str(crayons.yellow(" shell", bold=True))) help = help.replace(" sync", str(crayons.green(" sync", bold=True))) help = help.replace(" uninstall", str(crayons.magenta(" uninstall", bold=True))) help = help.replace(" update", str(crayons.green(" update", bold=True))) additional_help = """ Usage Examples: Create a new project using Python 3.7, specifically: $ {1} Remove project virtualenv (inferred from current directory): $ {9} Install all dependencies for a project (including dev): $ {2} Create a lockfile containing pre-releases: $ {6} Show a graph of your installed dependencies: $ {4} Check your installed dependencies for security vulnerabilities: $ {7} Install a local setup.py into your virtual environment/Pipfile: $ {5} Use a lower-level pip command: $ {8} Commands:""".format( crayons.red("pipenv --three"), crayons.red("pipenv --python 3.7"), crayons.red("pipenv install --dev"), crayons.red("pipenv lock"), crayons.red("pipenv graph"), crayons.red("pipenv install -e ."), crayons.red("pipenv lock --pre"), crayons.red("pipenv check"), crayons.red("pipenv run pip freeze"), crayons.red("pipenv --rm"), ) help = help.replace("Commands:", additional_help) return help
def function[format_help, parameter[help]]: constant[Formats the help string.] variable[help] assign[=] call[name[help].replace, parameter[constant[Options:], call[name[str], parameter[call[name[crayons].normal, parameter[constant[Options:]]]]]]] variable[help] assign[=] call[name[help].replace, parameter[constant[Usage: pipenv], call[name[str], parameter[call[constant[Usage: {0}].format, parameter[call[name[crayons].normal, parameter[constant[pipenv]]]]]]]]] variable[help] assign[=] call[name[help].replace, parameter[constant[ check], call[name[str], parameter[call[name[crayons].red, parameter[constant[ check]]]]]]] variable[help] assign[=] call[name[help].replace, parameter[constant[ clean], call[name[str], parameter[call[name[crayons].red, parameter[constant[ clean]]]]]]] variable[help] assign[=] call[name[help].replace, parameter[constant[ graph], call[name[str], parameter[call[name[crayons].red, parameter[constant[ graph]]]]]]] variable[help] assign[=] call[name[help].replace, parameter[constant[ install], call[name[str], parameter[call[name[crayons].magenta, parameter[constant[ install]]]]]]] variable[help] assign[=] call[name[help].replace, parameter[constant[ lock], call[name[str], parameter[call[name[crayons].green, parameter[constant[ lock]]]]]]] variable[help] assign[=] call[name[help].replace, parameter[constant[ open], call[name[str], parameter[call[name[crayons].red, parameter[constant[ open]]]]]]] variable[help] assign[=] call[name[help].replace, parameter[constant[ run], call[name[str], parameter[call[name[crayons].yellow, parameter[constant[ run]]]]]]] variable[help] assign[=] call[name[help].replace, parameter[constant[ shell], call[name[str], parameter[call[name[crayons].yellow, parameter[constant[ shell]]]]]]] variable[help] assign[=] call[name[help].replace, parameter[constant[ sync], call[name[str], parameter[call[name[crayons].green, parameter[constant[ sync]]]]]]] variable[help] assign[=] call[name[help].replace, parameter[constant[ uninstall], call[name[str], parameter[call[name[crayons].magenta, parameter[constant[ uninstall]]]]]]] variable[help] assign[=] call[name[help].replace, parameter[constant[ update], call[name[str], parameter[call[name[crayons].green, parameter[constant[ update]]]]]]] variable[additional_help] assign[=] call[constant[ Usage Examples: Create a new project using Python 3.7, specifically: $ {1} Remove project virtualenv (inferred from current directory): $ {9} Install all dependencies for a project (including dev): $ {2} Create a lockfile containing pre-releases: $ {6} Show a graph of your installed dependencies: $ {4} Check your installed dependencies for security vulnerabilities: $ {7} Install a local setup.py into your virtual environment/Pipfile: $ {5} Use a lower-level pip command: $ {8} Commands:].format, parameter[call[name[crayons].red, parameter[constant[pipenv --three]]], call[name[crayons].red, parameter[constant[pipenv --python 3.7]]], call[name[crayons].red, parameter[constant[pipenv install --dev]]], call[name[crayons].red, parameter[constant[pipenv lock]]], call[name[crayons].red, parameter[constant[pipenv graph]]], call[name[crayons].red, parameter[constant[pipenv install -e .]]], call[name[crayons].red, parameter[constant[pipenv lock --pre]]], call[name[crayons].red, parameter[constant[pipenv check]]], call[name[crayons].red, parameter[constant[pipenv run pip freeze]]], call[name[crayons].red, parameter[constant[pipenv --rm]]]]] variable[help] assign[=] call[name[help].replace, parameter[constant[Commands:], name[additional_help]]] return[name[help]]
keyword[def] identifier[format_help] ( identifier[help] ): literal[string] identifier[help] = identifier[help] . identifier[replace] ( literal[string] , identifier[str] ( identifier[crayons] . identifier[normal] ( literal[string] , identifier[bold] = keyword[True] ))) identifier[help] = identifier[help] . identifier[replace] ( literal[string] , identifier[str] ( literal[string] . identifier[format] ( identifier[crayons] . identifier[normal] ( literal[string] , identifier[bold] = keyword[True] ))) ) identifier[help] = identifier[help] . identifier[replace] ( literal[string] , identifier[str] ( identifier[crayons] . identifier[red] ( literal[string] , identifier[bold] = keyword[True] ))) identifier[help] = identifier[help] . identifier[replace] ( literal[string] , identifier[str] ( identifier[crayons] . identifier[red] ( literal[string] , identifier[bold] = keyword[True] ))) identifier[help] = identifier[help] . identifier[replace] ( literal[string] , identifier[str] ( identifier[crayons] . identifier[red] ( literal[string] , identifier[bold] = keyword[True] ))) identifier[help] = identifier[help] . identifier[replace] ( literal[string] , identifier[str] ( identifier[crayons] . identifier[magenta] ( literal[string] , identifier[bold] = keyword[True] ))) identifier[help] = identifier[help] . identifier[replace] ( literal[string] , identifier[str] ( identifier[crayons] . identifier[green] ( literal[string] , identifier[bold] = keyword[True] ))) identifier[help] = identifier[help] . identifier[replace] ( literal[string] , identifier[str] ( identifier[crayons] . identifier[red] ( literal[string] , identifier[bold] = keyword[True] ))) identifier[help] = identifier[help] . identifier[replace] ( literal[string] , identifier[str] ( identifier[crayons] . identifier[yellow] ( literal[string] , identifier[bold] = keyword[True] ))) identifier[help] = identifier[help] . identifier[replace] ( literal[string] , identifier[str] ( identifier[crayons] . identifier[yellow] ( literal[string] , identifier[bold] = keyword[True] ))) identifier[help] = identifier[help] . identifier[replace] ( literal[string] , identifier[str] ( identifier[crayons] . identifier[green] ( literal[string] , identifier[bold] = keyword[True] ))) identifier[help] = identifier[help] . identifier[replace] ( literal[string] , identifier[str] ( identifier[crayons] . identifier[magenta] ( literal[string] , identifier[bold] = keyword[True] ))) identifier[help] = identifier[help] . identifier[replace] ( literal[string] , identifier[str] ( identifier[crayons] . identifier[green] ( literal[string] , identifier[bold] = keyword[True] ))) identifier[additional_help] = literal[string] . identifier[format] ( identifier[crayons] . identifier[red] ( literal[string] ), identifier[crayons] . identifier[red] ( literal[string] ), identifier[crayons] . identifier[red] ( literal[string] ), identifier[crayons] . identifier[red] ( literal[string] ), identifier[crayons] . identifier[red] ( literal[string] ), identifier[crayons] . identifier[red] ( literal[string] ), identifier[crayons] . identifier[red] ( literal[string] ), identifier[crayons] . identifier[red] ( literal[string] ), identifier[crayons] . identifier[red] ( literal[string] ), identifier[crayons] . identifier[red] ( literal[string] ), ) identifier[help] = identifier[help] . identifier[replace] ( literal[string] , identifier[additional_help] ) keyword[return] identifier[help]
def format_help(help): """Formats the help string.""" help = help.replace('Options:', str(crayons.normal('Options:', bold=True))) help = help.replace('Usage: pipenv', str('Usage: {0}'.format(crayons.normal('pipenv', bold=True)))) help = help.replace(' check', str(crayons.red(' check', bold=True))) help = help.replace(' clean', str(crayons.red(' clean', bold=True))) help = help.replace(' graph', str(crayons.red(' graph', bold=True))) help = help.replace(' install', str(crayons.magenta(' install', bold=True))) help = help.replace(' lock', str(crayons.green(' lock', bold=True))) help = help.replace(' open', str(crayons.red(' open', bold=True))) help = help.replace(' run', str(crayons.yellow(' run', bold=True))) help = help.replace(' shell', str(crayons.yellow(' shell', bold=True))) help = help.replace(' sync', str(crayons.green(' sync', bold=True))) help = help.replace(' uninstall', str(crayons.magenta(' uninstall', bold=True))) help = help.replace(' update', str(crayons.green(' update', bold=True))) additional_help = '\nUsage Examples:\n Create a new project using Python 3.7, specifically:\n $ {1}\n\n Remove project virtualenv (inferred from current directory):\n $ {9}\n\n Install all dependencies for a project (including dev):\n $ {2}\n\n Create a lockfile containing pre-releases:\n $ {6}\n\n Show a graph of your installed dependencies:\n $ {4}\n\n Check your installed dependencies for security vulnerabilities:\n $ {7}\n\n Install a local setup.py into your virtual environment/Pipfile:\n $ {5}\n\n Use a lower-level pip command:\n $ {8}\n\nCommands:'.format(crayons.red('pipenv --three'), crayons.red('pipenv --python 3.7'), crayons.red('pipenv install --dev'), crayons.red('pipenv lock'), crayons.red('pipenv graph'), crayons.red('pipenv install -e .'), crayons.red('pipenv lock --pre'), crayons.red('pipenv check'), crayons.red('pipenv run pip freeze'), crayons.red('pipenv --rm')) help = help.replace('Commands:', additional_help) return help
def do_it(self, dbg): ''' Create an XML for console output, error and more (true/false) <xml> <output message=output_message></output> <error message=error_message></error> <more>true/false</more> </xml> ''' try: frame = dbg.find_frame(self.thread_id, self.frame_id) if frame is not None: console_message = pydevd_console.execute_console_command( frame, self.thread_id, self.frame_id, self.line, self.buffer_output) cmd = dbg.cmd_factory.make_send_console_message(self.sequence, console_message.to_xml()) else: from _pydevd_bundle.pydevd_console import ConsoleMessage console_message = ConsoleMessage() console_message.add_console_message( pydevd_console.CONSOLE_ERROR, "Select the valid frame in the debug view (thread: %s, frame: %s invalid)" % (self.thread_id, self.frame_id), ) cmd = dbg.cmd_factory.make_error_message(self.sequence, console_message.to_xml()) except: exc = get_exception_traceback_str() cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error evaluating expression " + exc) dbg.writer.add_command(cmd)
def function[do_it, parameter[self, dbg]]: constant[ Create an XML for console output, error and more (true/false) <xml> <output message=output_message></output> <error message=error_message></error> <more>true/false</more> </xml> ] <ast.Try object at 0x7da18fe929e0> call[name[dbg].writer.add_command, parameter[name[cmd]]]
keyword[def] identifier[do_it] ( identifier[self] , identifier[dbg] ): literal[string] keyword[try] : identifier[frame] = identifier[dbg] . identifier[find_frame] ( identifier[self] . identifier[thread_id] , identifier[self] . identifier[frame_id] ) keyword[if] identifier[frame] keyword[is] keyword[not] keyword[None] : identifier[console_message] = identifier[pydevd_console] . identifier[execute_console_command] ( identifier[frame] , identifier[self] . identifier[thread_id] , identifier[self] . identifier[frame_id] , identifier[self] . identifier[line] , identifier[self] . identifier[buffer_output] ) identifier[cmd] = identifier[dbg] . identifier[cmd_factory] . identifier[make_send_console_message] ( identifier[self] . identifier[sequence] , identifier[console_message] . identifier[to_xml] ()) keyword[else] : keyword[from] identifier[_pydevd_bundle] . identifier[pydevd_console] keyword[import] identifier[ConsoleMessage] identifier[console_message] = identifier[ConsoleMessage] () identifier[console_message] . identifier[add_console_message] ( identifier[pydevd_console] . identifier[CONSOLE_ERROR] , literal[string] %( identifier[self] . identifier[thread_id] , identifier[self] . identifier[frame_id] ), ) identifier[cmd] = identifier[dbg] . identifier[cmd_factory] . identifier[make_error_message] ( identifier[self] . identifier[sequence] , identifier[console_message] . identifier[to_xml] ()) keyword[except] : identifier[exc] = identifier[get_exception_traceback_str] () identifier[cmd] = identifier[dbg] . identifier[cmd_factory] . identifier[make_error_message] ( identifier[self] . identifier[sequence] , literal[string] + identifier[exc] ) identifier[dbg] . identifier[writer] . identifier[add_command] ( identifier[cmd] )
def do_it(self, dbg): """ Create an XML for console output, error and more (true/false) <xml> <output message=output_message></output> <error message=error_message></error> <more>true/false</more> </xml> """ try: frame = dbg.find_frame(self.thread_id, self.frame_id) if frame is not None: console_message = pydevd_console.execute_console_command(frame, self.thread_id, self.frame_id, self.line, self.buffer_output) cmd = dbg.cmd_factory.make_send_console_message(self.sequence, console_message.to_xml()) # depends on [control=['if'], data=['frame']] else: from _pydevd_bundle.pydevd_console import ConsoleMessage console_message = ConsoleMessage() console_message.add_console_message(pydevd_console.CONSOLE_ERROR, 'Select the valid frame in the debug view (thread: %s, frame: %s invalid)' % (self.thread_id, self.frame_id)) cmd = dbg.cmd_factory.make_error_message(self.sequence, console_message.to_xml()) # depends on [control=['try'], data=[]] except: exc = get_exception_traceback_str() cmd = dbg.cmd_factory.make_error_message(self.sequence, 'Error evaluating expression ' + exc) # depends on [control=['except'], data=[]] dbg.writer.add_command(cmd)
def perform_exit(): """perform_exit Handling at-the-exit events --------------------------- This will cleanup each worker process which could be in the middle of a request/sleep/block action. This has been tested on python 3 with Celery and single processes. """ if SPLUNK_DEBUG: print('{} -------------------------------'.format( rnow())) print('{} splunkpub: atexit.register - start'.format( rnow())) worked = True for instance in instances: try: if SPLUNK_DEBUG: print('{} - shutting down instance={} - start'.format( rnow(), instance)) instance.shutdown() if SPLUNK_DEBUG: print('{} - shutting down instance={} - done'.format( rnow(), instance)) except Exception as e: worked = False if SPLUNK_DEBUG: print( '{} - shutting down instance={} ' '- hit ex={} during shutdown'.format( rnow(), instance, e)) # end of try/ex if not worked: if SPLUNK_DEBUG: print('{} Failed exiting'.format( rnow())) if SPLUNK_DEBUG: print('{} splunkpub: atexit.register - done'.format( rnow())) print('{} -------------------------------'.format( rnow()))
def function[perform_exit, parameter[]]: constant[perform_exit Handling at-the-exit events --------------------------- This will cleanup each worker process which could be in the middle of a request/sleep/block action. This has been tested on python 3 with Celery and single processes. ] if name[SPLUNK_DEBUG] begin[:] call[name[print], parameter[call[constant[{} -------------------------------].format, parameter[call[name[rnow], parameter[]]]]]] call[name[print], parameter[call[constant[{} splunkpub: atexit.register - start].format, parameter[call[name[rnow], parameter[]]]]]] variable[worked] assign[=] constant[True] for taget[name[instance]] in starred[name[instances]] begin[:] <ast.Try object at 0x7da1b26af4c0> if <ast.UnaryOp object at 0x7da1b26ae680> begin[:] if name[SPLUNK_DEBUG] begin[:] call[name[print], parameter[call[constant[{} Failed exiting].format, parameter[call[name[rnow], parameter[]]]]]] if name[SPLUNK_DEBUG] begin[:] call[name[print], parameter[call[constant[{} splunkpub: atexit.register - done].format, parameter[call[name[rnow], parameter[]]]]]] call[name[print], parameter[call[constant[{} -------------------------------].format, parameter[call[name[rnow], parameter[]]]]]]
keyword[def] identifier[perform_exit] (): literal[string] keyword[if] identifier[SPLUNK_DEBUG] : identifier[print] ( literal[string] . identifier[format] ( identifier[rnow] ())) identifier[print] ( literal[string] . identifier[format] ( identifier[rnow] ())) identifier[worked] = keyword[True] keyword[for] identifier[instance] keyword[in] identifier[instances] : keyword[try] : keyword[if] identifier[SPLUNK_DEBUG] : identifier[print] ( literal[string] . identifier[format] ( identifier[rnow] (), identifier[instance] )) identifier[instance] . identifier[shutdown] () keyword[if] identifier[SPLUNK_DEBUG] : identifier[print] ( literal[string] . identifier[format] ( identifier[rnow] (), identifier[instance] )) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[worked] = keyword[False] keyword[if] identifier[SPLUNK_DEBUG] : identifier[print] ( literal[string] literal[string] . identifier[format] ( identifier[rnow] (), identifier[instance] , identifier[e] )) keyword[if] keyword[not] identifier[worked] : keyword[if] identifier[SPLUNK_DEBUG] : identifier[print] ( literal[string] . identifier[format] ( identifier[rnow] ())) keyword[if] identifier[SPLUNK_DEBUG] : identifier[print] ( literal[string] . identifier[format] ( identifier[rnow] ())) identifier[print] ( literal[string] . identifier[format] ( identifier[rnow] ()))
def perform_exit(): """perform_exit Handling at-the-exit events --------------------------- This will cleanup each worker process which could be in the middle of a request/sleep/block action. This has been tested on python 3 with Celery and single processes. """ if SPLUNK_DEBUG: print('{} -------------------------------'.format(rnow())) print('{} splunkpub: atexit.register - start'.format(rnow())) # depends on [control=['if'], data=[]] worked = True for instance in instances: try: if SPLUNK_DEBUG: print('{} - shutting down instance={} - start'.format(rnow(), instance)) # depends on [control=['if'], data=[]] instance.shutdown() if SPLUNK_DEBUG: print('{} - shutting down instance={} - done'.format(rnow(), instance)) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Exception as e: worked = False if SPLUNK_DEBUG: print('{} - shutting down instance={} - hit ex={} during shutdown'.format(rnow(), instance, e)) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['instance']] # end of try/ex if not worked: if SPLUNK_DEBUG: print('{} Failed exiting'.format(rnow())) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if SPLUNK_DEBUG: print('{} splunkpub: atexit.register - done'.format(rnow())) print('{} -------------------------------'.format(rnow())) # depends on [control=['if'], data=[]]
def graph_png(self): """ Export a graph of the data in png format using graphviz/dot. """ if not self.out_file: ui.error(c.MESSAGES["png_missing_out"]) sys.exit(1) cli_flags = "-Gsize='{0}' -Gdpi='{1}' {2} ".format(self.size, self.dpi, self.flags) cli_flags += "-o {0}".format(self.out_file) (out, err) = utils.capture_shell( "ansigenome export -t graph -f dot | dot -Tpng {0}" .format(cli_flags)) if err: ui.error(err)
def function[graph_png, parameter[self]]: constant[ Export a graph of the data in png format using graphviz/dot. ] if <ast.UnaryOp object at 0x7da1b0b36200> begin[:] call[name[ui].error, parameter[call[name[c].MESSAGES][constant[png_missing_out]]]] call[name[sys].exit, parameter[constant[1]]] variable[cli_flags] assign[=] call[constant[-Gsize='{0}' -Gdpi='{1}' {2} ].format, parameter[name[self].size, name[self].dpi, name[self].flags]] <ast.AugAssign object at 0x7da1b0b37040> <ast.Tuple object at 0x7da1b0b364d0> assign[=] call[name[utils].capture_shell, parameter[call[constant[ansigenome export -t graph -f dot | dot -Tpng {0}].format, parameter[name[cli_flags]]]]] if name[err] begin[:] call[name[ui].error, parameter[name[err]]]
keyword[def] identifier[graph_png] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[out_file] : identifier[ui] . identifier[error] ( identifier[c] . identifier[MESSAGES] [ literal[string] ]) identifier[sys] . identifier[exit] ( literal[int] ) identifier[cli_flags] = literal[string] . identifier[format] ( identifier[self] . identifier[size] , identifier[self] . identifier[dpi] , identifier[self] . identifier[flags] ) identifier[cli_flags] += literal[string] . identifier[format] ( identifier[self] . identifier[out_file] ) ( identifier[out] , identifier[err] )= identifier[utils] . identifier[capture_shell] ( literal[string] . identifier[format] ( identifier[cli_flags] )) keyword[if] identifier[err] : identifier[ui] . identifier[error] ( identifier[err] )
def graph_png(self): """ Export a graph of the data in png format using graphviz/dot. """ if not self.out_file: ui.error(c.MESSAGES['png_missing_out']) sys.exit(1) # depends on [control=['if'], data=[]] cli_flags = "-Gsize='{0}' -Gdpi='{1}' {2} ".format(self.size, self.dpi, self.flags) cli_flags += '-o {0}'.format(self.out_file) (out, err) = utils.capture_shell('ansigenome export -t graph -f dot | dot -Tpng {0}'.format(cli_flags)) if err: ui.error(err) # depends on [control=['if'], data=[]]
def setup_app(self, app, add_context_processor=True): # pragma: no cover ''' This method has been deprecated. Please use :meth:`LoginManager.init_app` instead. ''' warnings.warn('Warning setup_app is deprecated. Please use init_app.', DeprecationWarning) self.init_app(app, add_context_processor)
def function[setup_app, parameter[self, app, add_context_processor]]: constant[ This method has been deprecated. Please use :meth:`LoginManager.init_app` instead. ] call[name[warnings].warn, parameter[constant[Warning setup_app is deprecated. Please use init_app.], name[DeprecationWarning]]] call[name[self].init_app, parameter[name[app], name[add_context_processor]]]
keyword[def] identifier[setup_app] ( identifier[self] , identifier[app] , identifier[add_context_processor] = keyword[True] ): literal[string] identifier[warnings] . identifier[warn] ( literal[string] , identifier[DeprecationWarning] ) identifier[self] . identifier[init_app] ( identifier[app] , identifier[add_context_processor] )
def setup_app(self, app, add_context_processor=True): # pragma: no cover '\n This method has been deprecated. Please use\n :meth:`LoginManager.init_app` instead.\n ' warnings.warn('Warning setup_app is deprecated. Please use init_app.', DeprecationWarning) self.init_app(app, add_context_processor)
def get_cats(self): '''Get top keywords categories''' start_url = 'http://top.taobao.com/index.php?from=tbsy' rs = self.fetch(start_url) if not rs: return None soup = BeautifulSoup(rs.content, convertEntities=BeautifulSoup.HTML_ENTITIES, markupMassage=hexentityMassage) cats = [{'id':'TR_%s'%li['id'].encode('utf-8').upper(), 'title':li.a.text.encode('utf-8').strip()} for li in soup.find('div', id='nav').findAll('li') if li['id']!='index'] threadPool = ThreadPool(len(cats) if len(cats)<=5 else 5) for cat in cats: threadPool.run(self.get_cats_thread, callback=None, cat=cat) cats = threadPool.killAllWorkers(None) return cats
def function[get_cats, parameter[self]]: constant[Get top keywords categories] variable[start_url] assign[=] constant[http://top.taobao.com/index.php?from=tbsy] variable[rs] assign[=] call[name[self].fetch, parameter[name[start_url]]] if <ast.UnaryOp object at 0x7da1b25adcf0> begin[:] return[constant[None]] variable[soup] assign[=] call[name[BeautifulSoup], parameter[name[rs].content]] variable[cats] assign[=] <ast.ListComp object at 0x7da1b25ac520> variable[threadPool] assign[=] call[name[ThreadPool], parameter[<ast.IfExp object at 0x7da1b25ad030>]] for taget[name[cat]] in starred[name[cats]] begin[:] call[name[threadPool].run, parameter[name[self].get_cats_thread]] variable[cats] assign[=] call[name[threadPool].killAllWorkers, parameter[constant[None]]] return[name[cats]]
keyword[def] identifier[get_cats] ( identifier[self] ): literal[string] identifier[start_url] = literal[string] identifier[rs] = identifier[self] . identifier[fetch] ( identifier[start_url] ) keyword[if] keyword[not] identifier[rs] : keyword[return] keyword[None] identifier[soup] = identifier[BeautifulSoup] ( identifier[rs] . identifier[content] , identifier[convertEntities] = identifier[BeautifulSoup] . identifier[HTML_ENTITIES] , identifier[markupMassage] = identifier[hexentityMassage] ) identifier[cats] =[{ literal[string] : literal[string] % identifier[li] [ literal[string] ]. identifier[encode] ( literal[string] ). identifier[upper] (), literal[string] : identifier[li] . identifier[a] . identifier[text] . identifier[encode] ( literal[string] ). identifier[strip] ()} keyword[for] identifier[li] keyword[in] identifier[soup] . identifier[find] ( literal[string] , identifier[id] = literal[string] ). identifier[findAll] ( literal[string] ) keyword[if] identifier[li] [ literal[string] ]!= literal[string] ] identifier[threadPool] = identifier[ThreadPool] ( identifier[len] ( identifier[cats] ) keyword[if] identifier[len] ( identifier[cats] )<= literal[int] keyword[else] literal[int] ) keyword[for] identifier[cat] keyword[in] identifier[cats] : identifier[threadPool] . identifier[run] ( identifier[self] . identifier[get_cats_thread] , identifier[callback] = keyword[None] , identifier[cat] = identifier[cat] ) identifier[cats] = identifier[threadPool] . identifier[killAllWorkers] ( keyword[None] ) keyword[return] identifier[cats]
def get_cats(self): """Get top keywords categories""" start_url = 'http://top.taobao.com/index.php?from=tbsy' rs = self.fetch(start_url) if not rs: return None # depends on [control=['if'], data=[]] soup = BeautifulSoup(rs.content, convertEntities=BeautifulSoup.HTML_ENTITIES, markupMassage=hexentityMassage) cats = [{'id': 'TR_%s' % li['id'].encode('utf-8').upper(), 'title': li.a.text.encode('utf-8').strip()} for li in soup.find('div', id='nav').findAll('li') if li['id'] != 'index'] threadPool = ThreadPool(len(cats) if len(cats) <= 5 else 5) for cat in cats: threadPool.run(self.get_cats_thread, callback=None, cat=cat) # depends on [control=['for'], data=['cat']] cats = threadPool.killAllWorkers(None) return cats
def text(self, prompt, default=None): """Prompts the user for some text, with optional default""" prompt = prompt if prompt is not None else 'Enter some text' prompt += " [{0}]: ".format(default) if default is not None else ': ' return self.input(curry(filter_text, default=default), prompt)
def function[text, parameter[self, prompt, default]]: constant[Prompts the user for some text, with optional default] variable[prompt] assign[=] <ast.IfExp object at 0x7da20c6c7d60> <ast.AugAssign object at 0x7da20c6c5d80> return[call[name[self].input, parameter[call[name[curry], parameter[name[filter_text]]], name[prompt]]]]
keyword[def] identifier[text] ( identifier[self] , identifier[prompt] , identifier[default] = keyword[None] ): literal[string] identifier[prompt] = identifier[prompt] keyword[if] identifier[prompt] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] identifier[prompt] += literal[string] . identifier[format] ( identifier[default] ) keyword[if] identifier[default] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] keyword[return] identifier[self] . identifier[input] ( identifier[curry] ( identifier[filter_text] , identifier[default] = identifier[default] ), identifier[prompt] )
def text(self, prompt, default=None): """Prompts the user for some text, with optional default""" prompt = prompt if prompt is not None else 'Enter some text' prompt += ' [{0}]: '.format(default) if default is not None else ': ' return self.input(curry(filter_text, default=default), prompt)
def update(self, request, key): """Set an email address as primary address.""" request.UPDATE = http.QueryDict(request.body) email_addr = request.UPDATE.get('email') user_id = request.UPDATE.get('user') if not email_addr: return http.HttpResponseBadRequest() try: email = EmailAddress.objects.get(address=email_addr, user_id=user_id) except EmailAddress.DoesNotExist: raise http.Http404 email.user.email = email_addr email.user.save() return http.HttpResponse(status=204)
def function[update, parameter[self, request, key]]: constant[Set an email address as primary address.] name[request].UPDATE assign[=] call[name[http].QueryDict, parameter[name[request].body]] variable[email_addr] assign[=] call[name[request].UPDATE.get, parameter[constant[email]]] variable[user_id] assign[=] call[name[request].UPDATE.get, parameter[constant[user]]] if <ast.UnaryOp object at 0x7da1b0f2f790> begin[:] return[call[name[http].HttpResponseBadRequest, parameter[]]] <ast.Try object at 0x7da1b0f2d630> name[email].user.email assign[=] name[email_addr] call[name[email].user.save, parameter[]] return[call[name[http].HttpResponse, parameter[]]]
keyword[def] identifier[update] ( identifier[self] , identifier[request] , identifier[key] ): literal[string] identifier[request] . identifier[UPDATE] = identifier[http] . identifier[QueryDict] ( identifier[request] . identifier[body] ) identifier[email_addr] = identifier[request] . identifier[UPDATE] . identifier[get] ( literal[string] ) identifier[user_id] = identifier[request] . identifier[UPDATE] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[email_addr] : keyword[return] identifier[http] . identifier[HttpResponseBadRequest] () keyword[try] : identifier[email] = identifier[EmailAddress] . identifier[objects] . identifier[get] ( identifier[address] = identifier[email_addr] , identifier[user_id] = identifier[user_id] ) keyword[except] identifier[EmailAddress] . identifier[DoesNotExist] : keyword[raise] identifier[http] . identifier[Http404] identifier[email] . identifier[user] . identifier[email] = identifier[email_addr] identifier[email] . identifier[user] . identifier[save] () keyword[return] identifier[http] . identifier[HttpResponse] ( identifier[status] = literal[int] )
def update(self, request, key): """Set an email address as primary address.""" request.UPDATE = http.QueryDict(request.body) email_addr = request.UPDATE.get('email') user_id = request.UPDATE.get('user') if not email_addr: return http.HttpResponseBadRequest() # depends on [control=['if'], data=[]] try: email = EmailAddress.objects.get(address=email_addr, user_id=user_id) # depends on [control=['try'], data=[]] except EmailAddress.DoesNotExist: raise http.Http404 # depends on [control=['except'], data=[]] email.user.email = email_addr email.user.save() return http.HttpResponse(status=204)
def check_response_code(resp): """ check if query quota has been surpassed or other errors occured :param resp: json response :return: """ if resp["status"] == "OK" or resp["status"] == "ZERO_RESULTS": return if resp["status"] == "REQUEST_DENIED": raise Exception("Google Places " + resp["status"], "Request was denied, the API key is invalid.") if resp["status"] == "OVER_QUERY_LIMIT": raise Exception("Google Places " + resp["status"], "You exceeded your Query Limit for Google Places API Web Service, " "check https://developers.google.com/places/web-service/usage " "to upgrade your quota.") if resp["status"] == "INVALID_REQUEST": raise Exception("Google Places " + resp["status"], "The query string is malformed, " "check if your formatting for lat/lng and radius is correct.") raise Exception("Google Places " + resp["status"], "Unidentified error with the Places API, please check the response code")
def function[check_response_code, parameter[resp]]: constant[ check if query quota has been surpassed or other errors occured :param resp: json response :return: ] if <ast.BoolOp object at 0x7da18f720af0> begin[:] return[None] if compare[call[name[resp]][constant[status]] equal[==] constant[REQUEST_DENIED]] begin[:] <ast.Raise object at 0x7da18f00e050> if compare[call[name[resp]][constant[status]] equal[==] constant[OVER_QUERY_LIMIT]] begin[:] <ast.Raise object at 0x7da18f00cfa0> if compare[call[name[resp]][constant[status]] equal[==] constant[INVALID_REQUEST]] begin[:] <ast.Raise object at 0x7da18f00df00> <ast.Raise object at 0x7da18f00c310>
keyword[def] identifier[check_response_code] ( identifier[resp] ): literal[string] keyword[if] identifier[resp] [ literal[string] ]== literal[string] keyword[or] identifier[resp] [ literal[string] ]== literal[string] : keyword[return] keyword[if] identifier[resp] [ literal[string] ]== literal[string] : keyword[raise] identifier[Exception] ( literal[string] + identifier[resp] [ literal[string] ], literal[string] ) keyword[if] identifier[resp] [ literal[string] ]== literal[string] : keyword[raise] identifier[Exception] ( literal[string] + identifier[resp] [ literal[string] ], literal[string] literal[string] literal[string] ) keyword[if] identifier[resp] [ literal[string] ]== literal[string] : keyword[raise] identifier[Exception] ( literal[string] + identifier[resp] [ literal[string] ], literal[string] literal[string] ) keyword[raise] identifier[Exception] ( literal[string] + identifier[resp] [ literal[string] ], literal[string] )
def check_response_code(resp): """ check if query quota has been surpassed or other errors occured :param resp: json response :return: """ if resp['status'] == 'OK' or resp['status'] == 'ZERO_RESULTS': return # depends on [control=['if'], data=[]] if resp['status'] == 'REQUEST_DENIED': raise Exception('Google Places ' + resp['status'], 'Request was denied, the API key is invalid.') # depends on [control=['if'], data=[]] if resp['status'] == 'OVER_QUERY_LIMIT': raise Exception('Google Places ' + resp['status'], 'You exceeded your Query Limit for Google Places API Web Service, check https://developers.google.com/places/web-service/usage to upgrade your quota.') # depends on [control=['if'], data=[]] if resp['status'] == 'INVALID_REQUEST': raise Exception('Google Places ' + resp['status'], 'The query string is malformed, check if your formatting for lat/lng and radius is correct.') # depends on [control=['if'], data=[]] raise Exception('Google Places ' + resp['status'], 'Unidentified error with the Places API, please check the response code')
def concatenate_children(node, concatenate_with, strategy): """ Concatenate children of node according to https://ocr-d.github.io/page#consistency-of-text-results-on-different-levels """ _, _, getter, concatenate_with = [x for x in _HIERARCHY if isinstance(node, x[0])][0] tokens = [get_text(x, strategy) for x in getattr(node, getter)()] return concatenate_with.join(tokens).strip()
def function[concatenate_children, parameter[node, concatenate_with, strategy]]: constant[ Concatenate children of node according to https://ocr-d.github.io/page#consistency-of-text-results-on-different-levels ] <ast.Tuple object at 0x7da1b0383730> assign[=] call[<ast.ListComp object at 0x7da1b0382860>][constant[0]] variable[tokens] assign[=] <ast.ListComp object at 0x7da1b0381f90> return[call[call[name[concatenate_with].join, parameter[name[tokens]]].strip, parameter[]]]
keyword[def] identifier[concatenate_children] ( identifier[node] , identifier[concatenate_with] , identifier[strategy] ): literal[string] identifier[_] , identifier[_] , identifier[getter] , identifier[concatenate_with] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[_HIERARCHY] keyword[if] identifier[isinstance] ( identifier[node] , identifier[x] [ literal[int] ])][ literal[int] ] identifier[tokens] =[ identifier[get_text] ( identifier[x] , identifier[strategy] ) keyword[for] identifier[x] keyword[in] identifier[getattr] ( identifier[node] , identifier[getter] )()] keyword[return] identifier[concatenate_with] . identifier[join] ( identifier[tokens] ). identifier[strip] ()
def concatenate_children(node, concatenate_with, strategy): """ Concatenate children of node according to https://ocr-d.github.io/page#consistency-of-text-results-on-different-levels """ (_, _, getter, concatenate_with) = [x for x in _HIERARCHY if isinstance(node, x[0])][0] tokens = [get_text(x, strategy) for x in getattr(node, getter)()] return concatenate_with.join(tokens).strip()
def remove_uri(self, image): '''remove_image_uri will return just the image name. this will also remove all spaces from the uri. ''' image = image or '' uri = self.get_uri(image) or '' image = image.replace('%s://' %uri,'', 1) return image.strip('-').rstrip('/')
def function[remove_uri, parameter[self, image]]: constant[remove_image_uri will return just the image name. this will also remove all spaces from the uri. ] variable[image] assign[=] <ast.BoolOp object at 0x7da1b040cc40> variable[uri] assign[=] <ast.BoolOp object at 0x7da1b040f670> variable[image] assign[=] call[name[image].replace, parameter[binary_operation[constant[%s://] <ast.Mod object at 0x7da2590d6920> name[uri]], constant[], constant[1]]] return[call[call[name[image].strip, parameter[constant[-]]].rstrip, parameter[constant[/]]]]
keyword[def] identifier[remove_uri] ( identifier[self] , identifier[image] ): literal[string] identifier[image] = identifier[image] keyword[or] literal[string] identifier[uri] = identifier[self] . identifier[get_uri] ( identifier[image] ) keyword[or] literal[string] identifier[image] = identifier[image] . identifier[replace] ( literal[string] % identifier[uri] , literal[string] , literal[int] ) keyword[return] identifier[image] . identifier[strip] ( literal[string] ). identifier[rstrip] ( literal[string] )
def remove_uri(self, image): """remove_image_uri will return just the image name. this will also remove all spaces from the uri. """ image = image or '' uri = self.get_uri(image) or '' image = image.replace('%s://' % uri, '', 1) return image.strip('-').rstrip('/')
def avail_images(call=None): ''' Return a dict of all available VM images on the cloud provider. ''' if call == 'action': raise SaltCloudSystemExit( 'The avail_images function must be called with ' '-f or --function, or with the --list-images option' ) ret = {} conn = get_conn(service='SoftLayer_Product_Package') for category in conn.getCategories(id=50): if category['categoryCode'] != 'os': continue for group in category['groups']: for price in group['prices']: ret[price['id']] = price['item'].copy() del ret[price['id']]['id'] return ret
def function[avail_images, parameter[call]]: constant[ Return a dict of all available VM images on the cloud provider. ] if compare[name[call] equal[==] constant[action]] begin[:] <ast.Raise object at 0x7da20c7ca380> variable[ret] assign[=] dictionary[[], []] variable[conn] assign[=] call[name[get_conn], parameter[]] for taget[name[category]] in starred[call[name[conn].getCategories, parameter[]]] begin[:] if compare[call[name[category]][constant[categoryCode]] not_equal[!=] constant[os]] begin[:] continue for taget[name[group]] in starred[call[name[category]][constant[groups]]] begin[:] for taget[name[price]] in starred[call[name[group]][constant[prices]]] begin[:] call[name[ret]][call[name[price]][constant[id]]] assign[=] call[call[name[price]][constant[item]].copy, parameter[]] <ast.Delete object at 0x7da20cabf430> return[name[ret]]
keyword[def] identifier[avail_images] ( identifier[call] = keyword[None] ): literal[string] keyword[if] identifier[call] == literal[string] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] literal[string] ) identifier[ret] ={} identifier[conn] = identifier[get_conn] ( identifier[service] = literal[string] ) keyword[for] identifier[category] keyword[in] identifier[conn] . identifier[getCategories] ( identifier[id] = literal[int] ): keyword[if] identifier[category] [ literal[string] ]!= literal[string] : keyword[continue] keyword[for] identifier[group] keyword[in] identifier[category] [ literal[string] ]: keyword[for] identifier[price] keyword[in] identifier[group] [ literal[string] ]: identifier[ret] [ identifier[price] [ literal[string] ]]= identifier[price] [ literal[string] ]. identifier[copy] () keyword[del] identifier[ret] [ identifier[price] [ literal[string] ]][ literal[string] ] keyword[return] identifier[ret]
def avail_images(call=None): """ Return a dict of all available VM images on the cloud provider. """ if call == 'action': raise SaltCloudSystemExit('The avail_images function must be called with -f or --function, or with the --list-images option') # depends on [control=['if'], data=[]] ret = {} conn = get_conn(service='SoftLayer_Product_Package') for category in conn.getCategories(id=50): if category['categoryCode'] != 'os': continue # depends on [control=['if'], data=[]] for group in category['groups']: for price in group['prices']: ret[price['id']] = price['item'].copy() del ret[price['id']]['id'] # depends on [control=['for'], data=['price']] # depends on [control=['for'], data=['group']] # depends on [control=['for'], data=['category']] return ret
def analyze_one(self, nm, importernm=None, imptyp=0, level=-1): """ break the name being imported up so we get: a.b.c -> [a, b, c] ; ..z -> ['', '', z] """ #print '## analyze_one', nm, importernm, imptyp, level if not nm: nm = importernm importernm = None level = 0 nmparts = nm.split('.') if level < 0: # behaviour up to Python 2.4 (and default in Python 2.5) # first see if we could be importing a relative name contexts = [None] if importernm: if self.ispackage(importernm): contexts.insert(0, importernm) else: pkgnm = ".".join(importernm.split(".")[:-1]) if pkgnm: contexts.insert(0, pkgnm) elif level == 0: # absolute import, do not try relative importernm = None contexts = [None] elif level > 0: # relative import, do not try absolute if self.ispackage(importernm): level -= 1 if level > 0: importernm = ".".join(importernm.split('.')[:-level]) contexts = [importernm, None] importernm = None _all = None assert contexts # so contexts is [pkgnm, None] or just [None] if nmparts[-1] == '*': del nmparts[-1] _all = [] nms = [] for context in contexts: ctx = context for i, nm in enumerate(nmparts): if ctx: fqname = ctx + '.' + nm else: fqname = nm mod = self.modules.get(fqname, UNTRIED) if mod is UNTRIED: logger.debug('Analyzing %s', fqname) mod = self.doimport(nm, ctx, fqname) if mod: nms.append(mod.__name__) ctx = fqname else: break else: # no break, point i beyond end i = i + 1 if i: break # now nms is the list of modules that went into sys.modules # just as result of the structure of the name being imported # however, each mod has been scanned and that list is in mod.imports if i<len(nmparts): if ctx: if hasattr(self.modules[ctx], nmparts[i]): return nms if not self.ispackage(ctx): return nms self.warnings["W: no module named %s (%s import by %s)" % (fqname, imptyps[imptyp], importernm or "__main__")] = 1 if fqname in self.modules: del self.modules[fqname] return nms if _all is None: return nms bottommod = self.modules[ctx] if bottommod.ispackage(): for nm in bottommod._all: if not hasattr(bottommod, nm): mod = self.doimport(nm, ctx, ctx+'.'+nm) if mod: nms.append(mod.__name__) else: bottommod.warnings.append("W: name %s not found" % nm) return nms
def function[analyze_one, parameter[self, nm, importernm, imptyp, level]]: constant[ break the name being imported up so we get: a.b.c -> [a, b, c] ; ..z -> ['', '', z] ] if <ast.UnaryOp object at 0x7da1b0e27ca0> begin[:] variable[nm] assign[=] name[importernm] variable[importernm] assign[=] constant[None] variable[level] assign[=] constant[0] variable[nmparts] assign[=] call[name[nm].split, parameter[constant[.]]] if compare[name[level] less[<] constant[0]] begin[:] variable[contexts] assign[=] list[[<ast.Constant object at 0x7da1b0e27820>]] if name[importernm] begin[:] if call[name[self].ispackage, parameter[name[importernm]]] begin[:] call[name[contexts].insert, parameter[constant[0], name[importernm]]] variable[_all] assign[=] constant[None] assert[name[contexts]] if compare[call[name[nmparts]][<ast.UnaryOp object at 0x7da1b0e26680>] equal[==] constant[*]] begin[:] <ast.Delete object at 0x7da1b0e265f0> variable[_all] assign[=] list[[]] variable[nms] assign[=] list[[]] for taget[name[context]] in starred[name[contexts]] begin[:] variable[ctx] assign[=] name[context] for taget[tuple[[<ast.Name object at 0x7da1b0e26230>, <ast.Name object at 0x7da1b0e26200>]]] in starred[call[name[enumerate], parameter[name[nmparts]]]] begin[:] if name[ctx] begin[:] variable[fqname] assign[=] binary_operation[binary_operation[name[ctx] + constant[.]] + name[nm]] variable[mod] assign[=] call[name[self].modules.get, parameter[name[fqname], name[UNTRIED]]] if compare[name[mod] is name[UNTRIED]] begin[:] call[name[logger].debug, parameter[constant[Analyzing %s], name[fqname]]] variable[mod] assign[=] call[name[self].doimport, parameter[name[nm], name[ctx], name[fqname]]] if name[mod] begin[:] call[name[nms].append, parameter[name[mod].__name__]] variable[ctx] assign[=] name[fqname] if name[i] begin[:] break if compare[name[i] less[<] call[name[len], parameter[name[nmparts]]]] begin[:] if name[ctx] begin[:] if call[name[hasattr], parameter[call[name[self].modules][name[ctx]], call[name[nmparts]][name[i]]]] begin[:] return[name[nms]] if <ast.UnaryOp object at 0x7da1b0e25180> begin[:] return[name[nms]] call[name[self].warnings][binary_operation[constant[W: no module named %s (%s import by %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0e24eb0>, <ast.Subscript object at 0x7da1b0e24e80>, <ast.BoolOp object at 0x7da1b0e24df0>]]]] assign[=] constant[1] if compare[name[fqname] in name[self].modules] begin[:] <ast.Delete object at 0x7da1b0e24c40> return[name[nms]] if compare[name[_all] is constant[None]] begin[:] return[name[nms]] variable[bottommod] assign[=] call[name[self].modules][name[ctx]] if call[name[bottommod].ispackage, parameter[]] begin[:] for taget[name[nm]] in starred[name[bottommod]._all] begin[:] if <ast.UnaryOp object at 0x7da1b0ed25f0> begin[:] variable[mod] assign[=] call[name[self].doimport, parameter[name[nm], name[ctx], binary_operation[binary_operation[name[ctx] + constant[.]] + name[nm]]]] if name[mod] begin[:] call[name[nms].append, parameter[name[mod].__name__]] return[name[nms]]
keyword[def] identifier[analyze_one] ( identifier[self] , identifier[nm] , identifier[importernm] = keyword[None] , identifier[imptyp] = literal[int] , identifier[level] =- literal[int] ): literal[string] keyword[if] keyword[not] identifier[nm] : identifier[nm] = identifier[importernm] identifier[importernm] = keyword[None] identifier[level] = literal[int] identifier[nmparts] = identifier[nm] . identifier[split] ( literal[string] ) keyword[if] identifier[level] < literal[int] : identifier[contexts] =[ keyword[None] ] keyword[if] identifier[importernm] : keyword[if] identifier[self] . identifier[ispackage] ( identifier[importernm] ): identifier[contexts] . identifier[insert] ( literal[int] , identifier[importernm] ) keyword[else] : identifier[pkgnm] = literal[string] . identifier[join] ( identifier[importernm] . identifier[split] ( literal[string] )[:- literal[int] ]) keyword[if] identifier[pkgnm] : identifier[contexts] . identifier[insert] ( literal[int] , identifier[pkgnm] ) keyword[elif] identifier[level] == literal[int] : identifier[importernm] = keyword[None] identifier[contexts] =[ keyword[None] ] keyword[elif] identifier[level] > literal[int] : keyword[if] identifier[self] . identifier[ispackage] ( identifier[importernm] ): identifier[level] -= literal[int] keyword[if] identifier[level] > literal[int] : identifier[importernm] = literal[string] . identifier[join] ( identifier[importernm] . identifier[split] ( literal[string] )[:- identifier[level] ]) identifier[contexts] =[ identifier[importernm] , keyword[None] ] identifier[importernm] = keyword[None] identifier[_all] = keyword[None] keyword[assert] identifier[contexts] keyword[if] identifier[nmparts] [- literal[int] ]== literal[string] : keyword[del] identifier[nmparts] [- literal[int] ] identifier[_all] =[] identifier[nms] =[] keyword[for] identifier[context] keyword[in] identifier[contexts] : identifier[ctx] = identifier[context] keyword[for] identifier[i] , identifier[nm] keyword[in] identifier[enumerate] ( identifier[nmparts] ): keyword[if] identifier[ctx] : identifier[fqname] = identifier[ctx] + literal[string] + identifier[nm] keyword[else] : identifier[fqname] = identifier[nm] identifier[mod] = identifier[self] . identifier[modules] . identifier[get] ( identifier[fqname] , identifier[UNTRIED] ) keyword[if] identifier[mod] keyword[is] identifier[UNTRIED] : identifier[logger] . identifier[debug] ( literal[string] , identifier[fqname] ) identifier[mod] = identifier[self] . identifier[doimport] ( identifier[nm] , identifier[ctx] , identifier[fqname] ) keyword[if] identifier[mod] : identifier[nms] . identifier[append] ( identifier[mod] . identifier[__name__] ) identifier[ctx] = identifier[fqname] keyword[else] : keyword[break] keyword[else] : identifier[i] = identifier[i] + literal[int] keyword[if] identifier[i] : keyword[break] keyword[if] identifier[i] < identifier[len] ( identifier[nmparts] ): keyword[if] identifier[ctx] : keyword[if] identifier[hasattr] ( identifier[self] . identifier[modules] [ identifier[ctx] ], identifier[nmparts] [ identifier[i] ]): keyword[return] identifier[nms] keyword[if] keyword[not] identifier[self] . identifier[ispackage] ( identifier[ctx] ): keyword[return] identifier[nms] identifier[self] . identifier[warnings] [ literal[string] %( identifier[fqname] , identifier[imptyps] [ identifier[imptyp] ], identifier[importernm] keyword[or] literal[string] )]= literal[int] keyword[if] identifier[fqname] keyword[in] identifier[self] . identifier[modules] : keyword[del] identifier[self] . identifier[modules] [ identifier[fqname] ] keyword[return] identifier[nms] keyword[if] identifier[_all] keyword[is] keyword[None] : keyword[return] identifier[nms] identifier[bottommod] = identifier[self] . identifier[modules] [ identifier[ctx] ] keyword[if] identifier[bottommod] . identifier[ispackage] (): keyword[for] identifier[nm] keyword[in] identifier[bottommod] . identifier[_all] : keyword[if] keyword[not] identifier[hasattr] ( identifier[bottommod] , identifier[nm] ): identifier[mod] = identifier[self] . identifier[doimport] ( identifier[nm] , identifier[ctx] , identifier[ctx] + literal[string] + identifier[nm] ) keyword[if] identifier[mod] : identifier[nms] . identifier[append] ( identifier[mod] . identifier[__name__] ) keyword[else] : identifier[bottommod] . identifier[warnings] . identifier[append] ( literal[string] % identifier[nm] ) keyword[return] identifier[nms]
def analyze_one(self, nm, importernm=None, imptyp=0, level=-1): """ break the name being imported up so we get: a.b.c -> [a, b, c] ; ..z -> ['', '', z] """ #print '## analyze_one', nm, importernm, imptyp, level if not nm: nm = importernm importernm = None level = 0 # depends on [control=['if'], data=[]] nmparts = nm.split('.') if level < 0: # behaviour up to Python 2.4 (and default in Python 2.5) # first see if we could be importing a relative name contexts = [None] if importernm: if self.ispackage(importernm): contexts.insert(0, importernm) # depends on [control=['if'], data=[]] else: pkgnm = '.'.join(importernm.split('.')[:-1]) if pkgnm: contexts.insert(0, pkgnm) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif level == 0: # absolute import, do not try relative importernm = None contexts = [None] # depends on [control=['if'], data=[]] elif level > 0: # relative import, do not try absolute if self.ispackage(importernm): level -= 1 # depends on [control=['if'], data=[]] if level > 0: importernm = '.'.join(importernm.split('.')[:-level]) # depends on [control=['if'], data=['level']] contexts = [importernm, None] importernm = None # depends on [control=['if'], data=['level']] _all = None assert contexts # so contexts is [pkgnm, None] or just [None] if nmparts[-1] == '*': del nmparts[-1] _all = [] # depends on [control=['if'], data=[]] nms = [] for context in contexts: ctx = context for (i, nm) in enumerate(nmparts): if ctx: fqname = ctx + '.' + nm # depends on [control=['if'], data=[]] else: fqname = nm mod = self.modules.get(fqname, UNTRIED) if mod is UNTRIED: logger.debug('Analyzing %s', fqname) mod = self.doimport(nm, ctx, fqname) # depends on [control=['if'], data=['mod']] if mod: nms.append(mod.__name__) ctx = fqname # depends on [control=['if'], data=[]] else: break # depends on [control=['for'], data=[]] else: # no break, point i beyond end i = i + 1 if i: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['context']] # now nms is the list of modules that went into sys.modules # just as result of the structure of the name being imported # however, each mod has been scanned and that list is in mod.imports if i < len(nmparts): if ctx: if hasattr(self.modules[ctx], nmparts[i]): return nms # depends on [control=['if'], data=[]] if not self.ispackage(ctx): return nms # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] self.warnings['W: no module named %s (%s import by %s)' % (fqname, imptyps[imptyp], importernm or '__main__')] = 1 if fqname in self.modules: del self.modules[fqname] # depends on [control=['if'], data=['fqname']] return nms # depends on [control=['if'], data=['i']] if _all is None: return nms # depends on [control=['if'], data=[]] bottommod = self.modules[ctx] if bottommod.ispackage(): for nm in bottommod._all: if not hasattr(bottommod, nm): mod = self.doimport(nm, ctx, ctx + '.' + nm) if mod: nms.append(mod.__name__) # depends on [control=['if'], data=[]] else: bottommod.warnings.append('W: name %s not found' % nm) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['nm']] # depends on [control=['if'], data=[]] return nms
def refresh(self): """Update list of files, if there are changes. Calls underlying list_rtn for the particular science instrument. Typically, these routines search in the pysat provided path, pysat_data_dir/platform/name/tag/, where pysat_data_dir is set by pysat.utils.set_data_dir(path=path). """ output_str = '{platform} {name} {tag} {sat_id}' output_str = output_str.format(platform=self._sat.platform, name=self._sat.name, tag=self._sat.tag, sat_id=self._sat.sat_id) output_str = " ".join(("pysat is searching for", output_str, "files.")) output_str = " ".join(output_str.split()) print (output_str) info = self._sat._list_rtn(tag=self._sat.tag, sat_id=self._sat.sat_id, data_path=self.data_path, format_str=self.file_format) if not info.empty: print('Found {ll:d} of them.'.format(ll=len(info))) else: estr = "Unable to find any files that match the supplied template. " estr += "If you have the necessary files please check pysat " estr += "settings and file locations (e.g. pysat.pysat_dir)." print(estr) info = self._remove_data_dir_path(info) self._attach_files(info) self._store()
def function[refresh, parameter[self]]: constant[Update list of files, if there are changes. Calls underlying list_rtn for the particular science instrument. Typically, these routines search in the pysat provided path, pysat_data_dir/platform/name/tag/, where pysat_data_dir is set by pysat.utils.set_data_dir(path=path). ] variable[output_str] assign[=] constant[{platform} {name} {tag} {sat_id}] variable[output_str] assign[=] call[name[output_str].format, parameter[]] variable[output_str] assign[=] call[constant[ ].join, parameter[tuple[[<ast.Constant object at 0x7da1b0f504f0>, <ast.Name object at 0x7da1b0f50550>, <ast.Constant object at 0x7da1b0f50850>]]]] variable[output_str] assign[=] call[constant[ ].join, parameter[call[name[output_str].split, parameter[]]]] call[name[print], parameter[name[output_str]]] variable[info] assign[=] call[name[self]._sat._list_rtn, parameter[]] if <ast.UnaryOp object at 0x7da1b0f50fd0> begin[:] call[name[print], parameter[call[constant[Found {ll:d} of them.].format, parameter[]]]] variable[info] assign[=] call[name[self]._remove_data_dir_path, parameter[name[info]]] call[name[self]._attach_files, parameter[name[info]]] call[name[self]._store, parameter[]]
keyword[def] identifier[refresh] ( identifier[self] ): literal[string] identifier[output_str] = literal[string] identifier[output_str] = identifier[output_str] . identifier[format] ( identifier[platform] = identifier[self] . identifier[_sat] . identifier[platform] , identifier[name] = identifier[self] . identifier[_sat] . identifier[name] , identifier[tag] = identifier[self] . identifier[_sat] . identifier[tag] , identifier[sat_id] = identifier[self] . identifier[_sat] . identifier[sat_id] ) identifier[output_str] = literal[string] . identifier[join] (( literal[string] , identifier[output_str] , literal[string] )) identifier[output_str] = literal[string] . identifier[join] ( identifier[output_str] . identifier[split] ()) identifier[print] ( identifier[output_str] ) identifier[info] = identifier[self] . identifier[_sat] . identifier[_list_rtn] ( identifier[tag] = identifier[self] . identifier[_sat] . identifier[tag] , identifier[sat_id] = identifier[self] . identifier[_sat] . identifier[sat_id] , identifier[data_path] = identifier[self] . identifier[data_path] , identifier[format_str] = identifier[self] . identifier[file_format] ) keyword[if] keyword[not] identifier[info] . identifier[empty] : identifier[print] ( literal[string] . identifier[format] ( identifier[ll] = identifier[len] ( identifier[info] ))) keyword[else] : identifier[estr] = literal[string] identifier[estr] += literal[string] identifier[estr] += literal[string] identifier[print] ( identifier[estr] ) identifier[info] = identifier[self] . identifier[_remove_data_dir_path] ( identifier[info] ) identifier[self] . identifier[_attach_files] ( identifier[info] ) identifier[self] . identifier[_store] ()
def refresh(self): """Update list of files, if there are changes. Calls underlying list_rtn for the particular science instrument. Typically, these routines search in the pysat provided path, pysat_data_dir/platform/name/tag/, where pysat_data_dir is set by pysat.utils.set_data_dir(path=path). """ output_str = '{platform} {name} {tag} {sat_id}' output_str = output_str.format(platform=self._sat.platform, name=self._sat.name, tag=self._sat.tag, sat_id=self._sat.sat_id) output_str = ' '.join(('pysat is searching for', output_str, 'files.')) output_str = ' '.join(output_str.split()) print(output_str) info = self._sat._list_rtn(tag=self._sat.tag, sat_id=self._sat.sat_id, data_path=self.data_path, format_str=self.file_format) if not info.empty: print('Found {ll:d} of them.'.format(ll=len(info))) # depends on [control=['if'], data=[]] else: estr = 'Unable to find any files that match the supplied template. ' estr += 'If you have the necessary files please check pysat ' estr += 'settings and file locations (e.g. pysat.pysat_dir).' print(estr) info = self._remove_data_dir_path(info) self._attach_files(info) self._store()
def tempo(ref, est, **kwargs): r'''Tempo evaluation Parameters ---------- ref : jams.Annotation Reference annotation object est : jams.Annotation Estimated annotation object kwargs Additional keyword arguments Returns ------- scores : dict Dictionary of scores, where the key is the metric name (str) and the value is the (float) score achieved. See Also -------- mir_eval.tempo.evaluate Examples -------- >>> # Load in the JAMS objects >>> ref_jam = jams.load('reference.jams') >>> est_jam = jams.load('estimated.jams') >>> # Select the first relevant annotations >>> ref_ann = ref_jam.search(namespace='tempo')[0] >>> est_ann = est_jam.search(namespace='tempo')[0] >>> scores = jams.eval.tempo(ref_ann, est_ann) ''' ref = coerce_annotation(ref, 'tempo') est = coerce_annotation(est, 'tempo') ref_tempi = np.asarray([o.value for o in ref]) ref_weight = ref.data[0].confidence est_tempi = np.asarray([o.value for o in est]) return mir_eval.tempo.evaluate(ref_tempi, ref_weight, est_tempi, **kwargs)
def function[tempo, parameter[ref, est]]: constant[Tempo evaluation Parameters ---------- ref : jams.Annotation Reference annotation object est : jams.Annotation Estimated annotation object kwargs Additional keyword arguments Returns ------- scores : dict Dictionary of scores, where the key is the metric name (str) and the value is the (float) score achieved. See Also -------- mir_eval.tempo.evaluate Examples -------- >>> # Load in the JAMS objects >>> ref_jam = jams.load('reference.jams') >>> est_jam = jams.load('estimated.jams') >>> # Select the first relevant annotations >>> ref_ann = ref_jam.search(namespace='tempo')[0] >>> est_ann = est_jam.search(namespace='tempo')[0] >>> scores = jams.eval.tempo(ref_ann, est_ann) ] variable[ref] assign[=] call[name[coerce_annotation], parameter[name[ref], constant[tempo]]] variable[est] assign[=] call[name[coerce_annotation], parameter[name[est], constant[tempo]]] variable[ref_tempi] assign[=] call[name[np].asarray, parameter[<ast.ListComp object at 0x7da204621e40>]] variable[ref_weight] assign[=] call[name[ref].data][constant[0]].confidence variable[est_tempi] assign[=] call[name[np].asarray, parameter[<ast.ListComp object at 0x7da20c6c5db0>]] return[call[name[mir_eval].tempo.evaluate, parameter[name[ref_tempi], name[ref_weight], name[est_tempi]]]]
keyword[def] identifier[tempo] ( identifier[ref] , identifier[est] ,** identifier[kwargs] ): literal[string] identifier[ref] = identifier[coerce_annotation] ( identifier[ref] , literal[string] ) identifier[est] = identifier[coerce_annotation] ( identifier[est] , literal[string] ) identifier[ref_tempi] = identifier[np] . identifier[asarray] ([ identifier[o] . identifier[value] keyword[for] identifier[o] keyword[in] identifier[ref] ]) identifier[ref_weight] = identifier[ref] . identifier[data] [ literal[int] ]. identifier[confidence] identifier[est_tempi] = identifier[np] . identifier[asarray] ([ identifier[o] . identifier[value] keyword[for] identifier[o] keyword[in] identifier[est] ]) keyword[return] identifier[mir_eval] . identifier[tempo] . identifier[evaluate] ( identifier[ref_tempi] , identifier[ref_weight] , identifier[est_tempi] ,** identifier[kwargs] )
def tempo(ref, est, **kwargs): """Tempo evaluation Parameters ---------- ref : jams.Annotation Reference annotation object est : jams.Annotation Estimated annotation object kwargs Additional keyword arguments Returns ------- scores : dict Dictionary of scores, where the key is the metric name (str) and the value is the (float) score achieved. See Also -------- mir_eval.tempo.evaluate Examples -------- >>> # Load in the JAMS objects >>> ref_jam = jams.load('reference.jams') >>> est_jam = jams.load('estimated.jams') >>> # Select the first relevant annotations >>> ref_ann = ref_jam.search(namespace='tempo')[0] >>> est_ann = est_jam.search(namespace='tempo')[0] >>> scores = jams.eval.tempo(ref_ann, est_ann) """ ref = coerce_annotation(ref, 'tempo') est = coerce_annotation(est, 'tempo') ref_tempi = np.asarray([o.value for o in ref]) ref_weight = ref.data[0].confidence est_tempi = np.asarray([o.value for o in est]) return mir_eval.tempo.evaluate(ref_tempi, ref_weight, est_tempi, **kwargs)
def factory(self, data, manager=None): """Factory func for filters. data - policy config for filters manager - resource type manager (ec2, s3, etc) """ # Make the syntax a little nicer for common cases. if isinstance(data, dict) and len(data) == 1 and 'type' not in data: op = list(data.keys())[0] if op == 'or': return Or(data, self, manager) elif op == 'and': return And(data, self, manager) elif op == 'not': return Not(data, self, manager) return ValueFilter(data, manager) if isinstance(data, six.string_types): filter_type = data data = {'type': data} else: filter_type = data.get('type') if not filter_type: raise PolicyValidationError( "%s Invalid Filter %s" % ( self.plugin_type, data)) filter_class = self.get(filter_type) if filter_class is not None: return filter_class(data, manager) else: raise PolicyValidationError( "%s Invalid filter type %s" % ( self.plugin_type, data))
def function[factory, parameter[self, data, manager]]: constant[Factory func for filters. data - policy config for filters manager - resource type manager (ec2, s3, etc) ] if <ast.BoolOp object at 0x7da1b1fdee90> begin[:] variable[op] assign[=] call[call[name[list], parameter[call[name[data].keys, parameter[]]]]][constant[0]] if compare[name[op] equal[==] constant[or]] begin[:] return[call[name[Or], parameter[name[data], name[self], name[manager]]]] return[call[name[ValueFilter], parameter[name[data], name[manager]]]] if call[name[isinstance], parameter[name[data], name[six].string_types]] begin[:] variable[filter_type] assign[=] name[data] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b1fdffa0>], [<ast.Name object at 0x7da1b1fdfee0>]] if <ast.UnaryOp object at 0x7da1b1fded10> begin[:] <ast.Raise object at 0x7da1b1fded40> variable[filter_class] assign[=] call[name[self].get, parameter[name[filter_type]]] if compare[name[filter_class] is_not constant[None]] begin[:] return[call[name[filter_class], parameter[name[data], name[manager]]]]
keyword[def] identifier[factory] ( identifier[self] , identifier[data] , identifier[manager] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[data] , identifier[dict] ) keyword[and] identifier[len] ( identifier[data] )== literal[int] keyword[and] literal[string] keyword[not] keyword[in] identifier[data] : identifier[op] = identifier[list] ( identifier[data] . identifier[keys] ())[ literal[int] ] keyword[if] identifier[op] == literal[string] : keyword[return] identifier[Or] ( identifier[data] , identifier[self] , identifier[manager] ) keyword[elif] identifier[op] == literal[string] : keyword[return] identifier[And] ( identifier[data] , identifier[self] , identifier[manager] ) keyword[elif] identifier[op] == literal[string] : keyword[return] identifier[Not] ( identifier[data] , identifier[self] , identifier[manager] ) keyword[return] identifier[ValueFilter] ( identifier[data] , identifier[manager] ) keyword[if] identifier[isinstance] ( identifier[data] , identifier[six] . identifier[string_types] ): identifier[filter_type] = identifier[data] identifier[data] ={ literal[string] : identifier[data] } keyword[else] : identifier[filter_type] = identifier[data] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[filter_type] : keyword[raise] identifier[PolicyValidationError] ( literal[string] %( identifier[self] . identifier[plugin_type] , identifier[data] )) identifier[filter_class] = identifier[self] . identifier[get] ( identifier[filter_type] ) keyword[if] identifier[filter_class] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[filter_class] ( identifier[data] , identifier[manager] ) keyword[else] : keyword[raise] identifier[PolicyValidationError] ( literal[string] %( identifier[self] . identifier[plugin_type] , identifier[data] ))
def factory(self, data, manager=None): """Factory func for filters. data - policy config for filters manager - resource type manager (ec2, s3, etc) """ # Make the syntax a little nicer for common cases. if isinstance(data, dict) and len(data) == 1 and ('type' not in data): op = list(data.keys())[0] if op == 'or': return Or(data, self, manager) # depends on [control=['if'], data=[]] elif op == 'and': return And(data, self, manager) # depends on [control=['if'], data=[]] elif op == 'not': return Not(data, self, manager) # depends on [control=['if'], data=[]] return ValueFilter(data, manager) # depends on [control=['if'], data=[]] if isinstance(data, six.string_types): filter_type = data data = {'type': data} # depends on [control=['if'], data=[]] else: filter_type = data.get('type') if not filter_type: raise PolicyValidationError('%s Invalid Filter %s' % (self.plugin_type, data)) # depends on [control=['if'], data=[]] filter_class = self.get(filter_type) if filter_class is not None: return filter_class(data, manager) # depends on [control=['if'], data=['filter_class']] else: raise PolicyValidationError('%s Invalid filter type %s' % (self.plugin_type, data))
def _set_mixed_moments_to_zero(self, closed_central_moments, n_counter): r""" In univariate case, set the cross-terms to 0. :param closed_central_moments: matrix of closed central moment :param n_counter: a list of :class:`~means.core.descriptors.Moment`\s representing central moments :type n_counter: list[:class:`~means.core.descriptors.Moment`] :return: a matrix of new closed central moments with cross-terms equal to 0 """ positive_n_counter = [n for n in n_counter if n.order > 1] if self.is_multivariate: return closed_central_moments else: return [0 if n.is_mixed else ccm for n,ccm in zip(positive_n_counter, closed_central_moments)]
def function[_set_mixed_moments_to_zero, parameter[self, closed_central_moments, n_counter]]: constant[ In univariate case, set the cross-terms to 0. :param closed_central_moments: matrix of closed central moment :param n_counter: a list of :class:`~means.core.descriptors.Moment`\s representing central moments :type n_counter: list[:class:`~means.core.descriptors.Moment`] :return: a matrix of new closed central moments with cross-terms equal to 0 ] variable[positive_n_counter] assign[=] <ast.ListComp object at 0x7da20c7ca0b0> if name[self].is_multivariate begin[:] return[name[closed_central_moments]]
keyword[def] identifier[_set_mixed_moments_to_zero] ( identifier[self] , identifier[closed_central_moments] , identifier[n_counter] ): literal[string] identifier[positive_n_counter] =[ identifier[n] keyword[for] identifier[n] keyword[in] identifier[n_counter] keyword[if] identifier[n] . identifier[order] > literal[int] ] keyword[if] identifier[self] . identifier[is_multivariate] : keyword[return] identifier[closed_central_moments] keyword[else] : keyword[return] [ literal[int] keyword[if] identifier[n] . identifier[is_mixed] keyword[else] identifier[ccm] keyword[for] identifier[n] , identifier[ccm] keyword[in] identifier[zip] ( identifier[positive_n_counter] , identifier[closed_central_moments] )]
def _set_mixed_moments_to_zero(self, closed_central_moments, n_counter): """ In univariate case, set the cross-terms to 0. :param closed_central_moments: matrix of closed central moment :param n_counter: a list of :class:`~means.core.descriptors.Moment`\\s representing central moments :type n_counter: list[:class:`~means.core.descriptors.Moment`] :return: a matrix of new closed central moments with cross-terms equal to 0 """ positive_n_counter = [n for n in n_counter if n.order > 1] if self.is_multivariate: return closed_central_moments # depends on [control=['if'], data=[]] else: return [0 if n.is_mixed else ccm for (n, ccm) in zip(positive_n_counter, closed_central_moments)]
def add_acquisition_source( self, method, date=None, submission_number=None, internal_uid=None, email=None, orcid=None, source=None, datetime=None, ): """Add acquisition source. :type submission_number: integer :type email: integer :type source: string :param date: UTC date in isoformat .. deprecated:: 30.1.0 Use ``datetime`` instead. :type date: string :param method: method of acquisition for the suggested document :type method: string :param orcid: orcid of the user that is creating the record :type orcid: string :param internal_uid: id of the user that is creating the record :type internal_uid: string :param datetime: UTC datetime in ISO 8601 format :type datetime: string """ if date is not None: if datetime is not None: raise ValueError("Conflicting args: 'date' and 'datetime'") warnings.warn("Use 'datetime', not 'date'", DeprecationWarning) datetime = date acquisition_source = self._sourced_dict(source) acquisition_source['submission_number'] = str(submission_number) for key in ('datetime', 'email', 'method', 'orcid', 'internal_uid'): if locals()[key] is not None: acquisition_source[key] = locals()[key] self.record['acquisition_source'] = acquisition_source
def function[add_acquisition_source, parameter[self, method, date, submission_number, internal_uid, email, orcid, source, datetime]]: constant[Add acquisition source. :type submission_number: integer :type email: integer :type source: string :param date: UTC date in isoformat .. deprecated:: 30.1.0 Use ``datetime`` instead. :type date: string :param method: method of acquisition for the suggested document :type method: string :param orcid: orcid of the user that is creating the record :type orcid: string :param internal_uid: id of the user that is creating the record :type internal_uid: string :param datetime: UTC datetime in ISO 8601 format :type datetime: string ] if compare[name[date] is_not constant[None]] begin[:] if compare[name[datetime] is_not constant[None]] begin[:] <ast.Raise object at 0x7da1b24af430> call[name[warnings].warn, parameter[constant[Use 'datetime', not 'date'], name[DeprecationWarning]]] variable[datetime] assign[=] name[date] variable[acquisition_source] assign[=] call[name[self]._sourced_dict, parameter[name[source]]] call[name[acquisition_source]][constant[submission_number]] assign[=] call[name[str], parameter[name[submission_number]]] for taget[name[key]] in starred[tuple[[<ast.Constant object at 0x7da1b24ac160>, <ast.Constant object at 0x7da1b24afe20>, <ast.Constant object at 0x7da1b24af220>, <ast.Constant object at 0x7da1b24ae3e0>, <ast.Constant object at 0x7da1b24ae350>]]] begin[:] if compare[call[call[name[locals], parameter[]]][name[key]] is_not constant[None]] begin[:] call[name[acquisition_source]][name[key]] assign[=] call[call[name[locals], parameter[]]][name[key]] call[name[self].record][constant[acquisition_source]] assign[=] name[acquisition_source]
keyword[def] identifier[add_acquisition_source] ( identifier[self] , identifier[method] , identifier[date] = keyword[None] , identifier[submission_number] = keyword[None] , identifier[internal_uid] = keyword[None] , identifier[email] = keyword[None] , identifier[orcid] = keyword[None] , identifier[source] = keyword[None] , identifier[datetime] = keyword[None] , ): literal[string] keyword[if] identifier[date] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[datetime] keyword[is] keyword[not] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[warnings] . identifier[warn] ( literal[string] , identifier[DeprecationWarning] ) identifier[datetime] = identifier[date] identifier[acquisition_source] = identifier[self] . identifier[_sourced_dict] ( identifier[source] ) identifier[acquisition_source] [ literal[string] ]= identifier[str] ( identifier[submission_number] ) keyword[for] identifier[key] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ): keyword[if] identifier[locals] ()[ identifier[key] ] keyword[is] keyword[not] keyword[None] : identifier[acquisition_source] [ identifier[key] ]= identifier[locals] ()[ identifier[key] ] identifier[self] . identifier[record] [ literal[string] ]= identifier[acquisition_source]
def add_acquisition_source(self, method, date=None, submission_number=None, internal_uid=None, email=None, orcid=None, source=None, datetime=None): """Add acquisition source. :type submission_number: integer :type email: integer :type source: string :param date: UTC date in isoformat .. deprecated:: 30.1.0 Use ``datetime`` instead. :type date: string :param method: method of acquisition for the suggested document :type method: string :param orcid: orcid of the user that is creating the record :type orcid: string :param internal_uid: id of the user that is creating the record :type internal_uid: string :param datetime: UTC datetime in ISO 8601 format :type datetime: string """ if date is not None: if datetime is not None: raise ValueError("Conflicting args: 'date' and 'datetime'") # depends on [control=['if'], data=[]] warnings.warn("Use 'datetime', not 'date'", DeprecationWarning) datetime = date # depends on [control=['if'], data=['date']] acquisition_source = self._sourced_dict(source) acquisition_source['submission_number'] = str(submission_number) for key in ('datetime', 'email', 'method', 'orcid', 'internal_uid'): if locals()[key] is not None: acquisition_source[key] = locals()[key] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] self.record['acquisition_source'] = acquisition_source
def interface_endpoints(self): """Instance depends on the API version: * 2018-08-01: :class:`InterfaceEndpointsOperations<azure.mgmt.network.v2018_08_01.operations.InterfaceEndpointsOperations>` """ api_version = self._get_api_version('interface_endpoints') if api_version == '2018-08-01': from .v2018_08_01.operations import InterfaceEndpointsOperations as OperationClass else: raise NotImplementedError("APIVersion {} is not available".format(api_version)) return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
def function[interface_endpoints, parameter[self]]: constant[Instance depends on the API version: * 2018-08-01: :class:`InterfaceEndpointsOperations<azure.mgmt.network.v2018_08_01.operations.InterfaceEndpointsOperations>` ] variable[api_version] assign[=] call[name[self]._get_api_version, parameter[constant[interface_endpoints]]] if compare[name[api_version] equal[==] constant[2018-08-01]] begin[:] from relative_module[v2018_08_01.operations] import module[InterfaceEndpointsOperations] return[call[name[OperationClass], parameter[name[self]._client, name[self].config, call[name[Serializer], parameter[call[name[self]._models_dict, parameter[name[api_version]]]]], call[name[Deserializer], parameter[call[name[self]._models_dict, parameter[name[api_version]]]]]]]]
keyword[def] identifier[interface_endpoints] ( identifier[self] ): literal[string] identifier[api_version] = identifier[self] . identifier[_get_api_version] ( literal[string] ) keyword[if] identifier[api_version] == literal[string] : keyword[from] . identifier[v2018_08_01] . identifier[operations] keyword[import] identifier[InterfaceEndpointsOperations] keyword[as] identifier[OperationClass] keyword[else] : keyword[raise] identifier[NotImplementedError] ( literal[string] . identifier[format] ( identifier[api_version] )) keyword[return] identifier[OperationClass] ( identifier[self] . identifier[_client] , identifier[self] . identifier[config] , identifier[Serializer] ( identifier[self] . identifier[_models_dict] ( identifier[api_version] )), identifier[Deserializer] ( identifier[self] . identifier[_models_dict] ( identifier[api_version] )))
def interface_endpoints(self): """Instance depends on the API version: * 2018-08-01: :class:`InterfaceEndpointsOperations<azure.mgmt.network.v2018_08_01.operations.InterfaceEndpointsOperations>` """ api_version = self._get_api_version('interface_endpoints') if api_version == '2018-08-01': from .v2018_08_01.operations import InterfaceEndpointsOperations as OperationClass # depends on [control=['if'], data=[]] else: raise NotImplementedError('APIVersion {} is not available'.format(api_version)) return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
def get_areas(self, area_id=None, **kwargs): """ Alias for get_elements() but filter the result by Area :param area_id: The Id of the area :type area_id: Integer :return: List of elements """ return self.get_elements(Area, elem_id=area_id, **kwargs)
def function[get_areas, parameter[self, area_id]]: constant[ Alias for get_elements() but filter the result by Area :param area_id: The Id of the area :type area_id: Integer :return: List of elements ] return[call[name[self].get_elements, parameter[name[Area]]]]
keyword[def] identifier[get_areas] ( identifier[self] , identifier[area_id] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[self] . identifier[get_elements] ( identifier[Area] , identifier[elem_id] = identifier[area_id] ,** identifier[kwargs] )
def get_areas(self, area_id=None, **kwargs): """ Alias for get_elements() but filter the result by Area :param area_id: The Id of the area :type area_id: Integer :return: List of elements """ return self.get_elements(Area, elem_id=area_id, **kwargs)
def touch(path, content="", encoding="utf-8", overwrite=False): """Create a file at the given path if it does not already exists. Args: path (str): Path to the file. content (str): Optional content that will be written in the file. encoding (str): Encoding in which to write the content. Default: ``utf-8`` overwrite (bool): Overwrite the file if exists. Returns: bool: True if the operation is successful, False otherwise. """ path = os.path.abspath(path) if not overwrite and os.path.exists(path): logger.warning('touch: "%s" already exists', path) return False try: logger.info("touch: %s", path) with io.open(path, "wb") as f: if not isinstance(content, six.binary_type): content = content.encode(encoding) f.write(content) return True except Exception as e: logger.error("touch: %s failed. Error: %s", path, e) return False
def function[touch, parameter[path, content, encoding, overwrite]]: constant[Create a file at the given path if it does not already exists. Args: path (str): Path to the file. content (str): Optional content that will be written in the file. encoding (str): Encoding in which to write the content. Default: ``utf-8`` overwrite (bool): Overwrite the file if exists. Returns: bool: True if the operation is successful, False otherwise. ] variable[path] assign[=] call[name[os].path.abspath, parameter[name[path]]] if <ast.BoolOp object at 0x7da1b1093d00> begin[:] call[name[logger].warning, parameter[constant[touch: "%s" already exists], name[path]]] return[constant[False]] <ast.Try object at 0x7da1b11a7a60>
keyword[def] identifier[touch] ( identifier[path] , identifier[content] = literal[string] , identifier[encoding] = literal[string] , identifier[overwrite] = keyword[False] ): literal[string] identifier[path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[path] ) keyword[if] keyword[not] identifier[overwrite] keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ): identifier[logger] . identifier[warning] ( literal[string] , identifier[path] ) keyword[return] keyword[False] keyword[try] : identifier[logger] . identifier[info] ( literal[string] , identifier[path] ) keyword[with] identifier[io] . identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] : keyword[if] keyword[not] identifier[isinstance] ( identifier[content] , identifier[six] . identifier[binary_type] ): identifier[content] = identifier[content] . identifier[encode] ( identifier[encoding] ) identifier[f] . identifier[write] ( identifier[content] ) keyword[return] keyword[True] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[logger] . identifier[error] ( literal[string] , identifier[path] , identifier[e] ) keyword[return] keyword[False]
def touch(path, content='', encoding='utf-8', overwrite=False): """Create a file at the given path if it does not already exists. Args: path (str): Path to the file. content (str): Optional content that will be written in the file. encoding (str): Encoding in which to write the content. Default: ``utf-8`` overwrite (bool): Overwrite the file if exists. Returns: bool: True if the operation is successful, False otherwise. """ path = os.path.abspath(path) if not overwrite and os.path.exists(path): logger.warning('touch: "%s" already exists', path) return False # depends on [control=['if'], data=[]] try: logger.info('touch: %s', path) with io.open(path, 'wb') as f: if not isinstance(content, six.binary_type): content = content.encode(encoding) # depends on [control=['if'], data=[]] f.write(content) # depends on [control=['with'], data=['f']] return True # depends on [control=['try'], data=[]] except Exception as e: logger.error('touch: %s failed. Error: %s', path, e) return False # depends on [control=['except'], data=['e']]
def _IDW(self, latitude, longitude, radius=1): """ Return the interpolated elevation at a point. Load the correct tile for latitude and longitude given. If the tile doesn't exist, return None. Otherwise, call the tile's Inverse Distance Weighted function and return the elevation. Args: latitude: float with the latitude in decimal degrees longitude: float with the longitude in decimal degrees radius: int of 1 or 2 indicating the approximate radius of adjacent cells to include Returns: a float of the interpolated elevation with the same unit as the .hgt file (meters) """ tile = self.get_file(latitude, longitude) if tile is None: return None return tile._InverseDistanceWeighted(latitude, longitude, radius)
def function[_IDW, parameter[self, latitude, longitude, radius]]: constant[ Return the interpolated elevation at a point. Load the correct tile for latitude and longitude given. If the tile doesn't exist, return None. Otherwise, call the tile's Inverse Distance Weighted function and return the elevation. Args: latitude: float with the latitude in decimal degrees longitude: float with the longitude in decimal degrees radius: int of 1 or 2 indicating the approximate radius of adjacent cells to include Returns: a float of the interpolated elevation with the same unit as the .hgt file (meters) ] variable[tile] assign[=] call[name[self].get_file, parameter[name[latitude], name[longitude]]] if compare[name[tile] is constant[None]] begin[:] return[constant[None]] return[call[name[tile]._InverseDistanceWeighted, parameter[name[latitude], name[longitude], name[radius]]]]
keyword[def] identifier[_IDW] ( identifier[self] , identifier[latitude] , identifier[longitude] , identifier[radius] = literal[int] ): literal[string] identifier[tile] = identifier[self] . identifier[get_file] ( identifier[latitude] , identifier[longitude] ) keyword[if] identifier[tile] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[return] identifier[tile] . identifier[_InverseDistanceWeighted] ( identifier[latitude] , identifier[longitude] , identifier[radius] )
def _IDW(self, latitude, longitude, radius=1): """ Return the interpolated elevation at a point. Load the correct tile for latitude and longitude given. If the tile doesn't exist, return None. Otherwise, call the tile's Inverse Distance Weighted function and return the elevation. Args: latitude: float with the latitude in decimal degrees longitude: float with the longitude in decimal degrees radius: int of 1 or 2 indicating the approximate radius of adjacent cells to include Returns: a float of the interpolated elevation with the same unit as the .hgt file (meters) """ tile = self.get_file(latitude, longitude) if tile is None: return None # depends on [control=['if'], data=[]] return tile._InverseDistanceWeighted(latitude, longitude, radius)
def version(self) -> str: '''Show the version number of Android Debug Bridge.''' output, _ = self._execute('version') return output.splitlines()[0].split()[-1]
def function[version, parameter[self]]: constant[Show the version number of Android Debug Bridge.] <ast.Tuple object at 0x7da18f8117e0> assign[=] call[name[self]._execute, parameter[constant[version]]] return[call[call[call[call[name[output].splitlines, parameter[]]][constant[0]].split, parameter[]]][<ast.UnaryOp object at 0x7da20c6c5a80>]]
keyword[def] identifier[version] ( identifier[self] )-> identifier[str] : literal[string] identifier[output] , identifier[_] = identifier[self] . identifier[_execute] ( literal[string] ) keyword[return] identifier[output] . identifier[splitlines] ()[ literal[int] ]. identifier[split] ()[- literal[int] ]
def version(self) -> str: """Show the version number of Android Debug Bridge.""" (output, _) = self._execute('version') return output.splitlines()[0].split()[-1]
def load(description, add_arguments_cb = lambda x: None, postprocess_conf_cb = lambda x: None): """Loads the global Conf object from command line arguments. Encode the next argument after +plugin to ensure that it does not start with a prefix_char """ argparser = ArgumentParser( description = description, prefix_chars = '-+' ) argparser.add_argument( '--version', dest = 'PRINT_VERSION', action = 'store_true', help = 'Print version and exit' ) add_arguments_cb(argparser) # set up plugin argument argparser plugin_argparser = argparser.add_argument_group('Plugins') plugins = {} def load_plugin_group(group): """Load all plugins from the given plugin_group.""" for entry_point in iter_entry_points(group = group): name = str(entry_point).split(' =',1)[0] plugin = entry_point.load() if isclass(plugin) \ and not plugin in Conf.SUPPORTED_PLUGIN_INTERFACES \ and any([ issubclass(plugin, supported_plugin_interface) for supported_plugin_interface in Conf.SUPPORTED_PLUGIN_INTERFACES ]): plugin_argparser.add_argument( '+{}'.format(name), dest = 'PLUGIN_{}'.format(name), type = str, nargs = '?', default = DEFAULT, metavar = 'args'.format(name), help = make_argparse_help_safe( call_plugin( plugin, 'help' ) ) ) # register plugin plugins[name] = plugin else: warning('Plugin not supported: {}'.format(name)) load_plugin_group(Conf.PLUGIN_GROUP_BASE) if Conf.LOAD_PLUGINS: load_plugin_group(Conf.PLUGIN_GROUP) conf = vars( argparser.parse_args([ v if i == 0 or v[0] == '+' or Conf.ARGS[i-1][0] != '+' else b32encode(v.encode()).decode() for i, v in enumerate(Conf.ARGS) ]) ) postprocess_conf_cb(conf) # apply configuration Conf.set(conf) if Conf.PRINT_VERSION: print( 'pdml2flow version {}'.format( Conf.VERSION ), file = Conf.OUT ) sys.exit(0) # initialize plugins Conf.PLUGINS = [] for conf_name, args in conf.items(): if conf_name.startswith('PLUGIN_') and args != DEFAULT: plugin_name = conf_name[7:] Conf.PLUGINS.append( # instantiate plugin plugins[plugin_name]( *split( b32decode(args.encode()).decode() if args is not None else '' ) ) )
def function[load, parameter[description, add_arguments_cb, postprocess_conf_cb]]: constant[Loads the global Conf object from command line arguments. Encode the next argument after +plugin to ensure that it does not start with a prefix_char ] variable[argparser] assign[=] call[name[ArgumentParser], parameter[]] call[name[argparser].add_argument, parameter[constant[--version]]] call[name[add_arguments_cb], parameter[name[argparser]]] variable[plugin_argparser] assign[=] call[name[argparser].add_argument_group, parameter[constant[Plugins]]] variable[plugins] assign[=] dictionary[[], []] def function[load_plugin_group, parameter[group]]: constant[Load all plugins from the given plugin_group.] for taget[name[entry_point]] in starred[call[name[iter_entry_points], parameter[]]] begin[:] variable[name] assign[=] call[call[call[name[str], parameter[name[entry_point]]].split, parameter[constant[ =], constant[1]]]][constant[0]] variable[plugin] assign[=] call[name[entry_point].load, parameter[]] if <ast.BoolOp object at 0x7da18bc71630> begin[:] call[name[plugin_argparser].add_argument, parameter[call[constant[+{}].format, parameter[name[name]]]]] call[name[plugins]][name[name]] assign[=] name[plugin] call[name[load_plugin_group], parameter[name[Conf].PLUGIN_GROUP_BASE]] if name[Conf].LOAD_PLUGINS begin[:] call[name[load_plugin_group], parameter[name[Conf].PLUGIN_GROUP]] variable[conf] assign[=] call[name[vars], parameter[call[name[argparser].parse_args, parameter[<ast.ListComp object at 0x7da1b1b1a440>]]]] call[name[postprocess_conf_cb], parameter[name[conf]]] call[name[Conf].set, parameter[name[conf]]] if name[Conf].PRINT_VERSION begin[:] call[name[print], parameter[call[constant[pdml2flow version {}].format, parameter[name[Conf].VERSION]]]] call[name[sys].exit, parameter[constant[0]]] name[Conf].PLUGINS assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b1be7b20>, <ast.Name object at 0x7da1b1be5a20>]]] in starred[call[name[conf].items, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da1b1be7670> begin[:] variable[plugin_name] assign[=] call[name[conf_name]][<ast.Slice object at 0x7da1b1be6a70>] call[name[Conf].PLUGINS.append, parameter[call[call[name[plugins]][name[plugin_name]], parameter[<ast.Starred object at 0x7da1b1be4c40>]]]]
keyword[def] identifier[load] ( identifier[description] , identifier[add_arguments_cb] = keyword[lambda] identifier[x] : keyword[None] , identifier[postprocess_conf_cb] = keyword[lambda] identifier[x] : keyword[None] ): literal[string] identifier[argparser] = identifier[ArgumentParser] ( identifier[description] = identifier[description] , identifier[prefix_chars] = literal[string] ) identifier[argparser] . identifier[add_argument] ( literal[string] , identifier[dest] = literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[add_arguments_cb] ( identifier[argparser] ) identifier[plugin_argparser] = identifier[argparser] . identifier[add_argument_group] ( literal[string] ) identifier[plugins] ={} keyword[def] identifier[load_plugin_group] ( identifier[group] ): literal[string] keyword[for] identifier[entry_point] keyword[in] identifier[iter_entry_points] ( identifier[group] = identifier[group] ): identifier[name] = identifier[str] ( identifier[entry_point] ). identifier[split] ( literal[string] , literal[int] )[ literal[int] ] identifier[plugin] = identifier[entry_point] . identifier[load] () keyword[if] identifier[isclass] ( identifier[plugin] ) keyword[and] keyword[not] identifier[plugin] keyword[in] identifier[Conf] . identifier[SUPPORTED_PLUGIN_INTERFACES] keyword[and] identifier[any] ([ identifier[issubclass] ( identifier[plugin] , identifier[supported_plugin_interface] ) keyword[for] identifier[supported_plugin_interface] keyword[in] identifier[Conf] . identifier[SUPPORTED_PLUGIN_INTERFACES] ]): identifier[plugin_argparser] . identifier[add_argument] ( literal[string] . identifier[format] ( identifier[name] ), identifier[dest] = literal[string] . identifier[format] ( identifier[name] ), identifier[type] = identifier[str] , identifier[nargs] = literal[string] , identifier[default] = identifier[DEFAULT] , identifier[metavar] = literal[string] . identifier[format] ( identifier[name] ), identifier[help] = identifier[make_argparse_help_safe] ( identifier[call_plugin] ( identifier[plugin] , literal[string] ) ) ) identifier[plugins] [ identifier[name] ]= identifier[plugin] keyword[else] : identifier[warning] ( literal[string] . identifier[format] ( identifier[name] )) identifier[load_plugin_group] ( identifier[Conf] . identifier[PLUGIN_GROUP_BASE] ) keyword[if] identifier[Conf] . identifier[LOAD_PLUGINS] : identifier[load_plugin_group] ( identifier[Conf] . identifier[PLUGIN_GROUP] ) identifier[conf] = identifier[vars] ( identifier[argparser] . identifier[parse_args] ([ identifier[v] keyword[if] identifier[i] == literal[int] keyword[or] identifier[v] [ literal[int] ]== literal[string] keyword[or] identifier[Conf] . identifier[ARGS] [ identifier[i] - literal[int] ][ literal[int] ]!= literal[string] keyword[else] identifier[b32encode] ( identifier[v] . identifier[encode] ()). identifier[decode] () keyword[for] identifier[i] , identifier[v] keyword[in] identifier[enumerate] ( identifier[Conf] . identifier[ARGS] ) ]) ) identifier[postprocess_conf_cb] ( identifier[conf] ) identifier[Conf] . identifier[set] ( identifier[conf] ) keyword[if] identifier[Conf] . identifier[PRINT_VERSION] : identifier[print] ( literal[string] . identifier[format] ( identifier[Conf] . identifier[VERSION] ), identifier[file] = identifier[Conf] . identifier[OUT] ) identifier[sys] . identifier[exit] ( literal[int] ) identifier[Conf] . identifier[PLUGINS] =[] keyword[for] identifier[conf_name] , identifier[args] keyword[in] identifier[conf] . identifier[items] (): keyword[if] identifier[conf_name] . identifier[startswith] ( literal[string] ) keyword[and] identifier[args] != identifier[DEFAULT] : identifier[plugin_name] = identifier[conf_name] [ literal[int] :] identifier[Conf] . identifier[PLUGINS] . identifier[append] ( identifier[plugins] [ identifier[plugin_name] ]( * identifier[split] ( identifier[b32decode] ( identifier[args] . identifier[encode] ()). identifier[decode] () keyword[if] identifier[args] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] ) ) )
def load(description, add_arguments_cb=lambda x: None, postprocess_conf_cb=lambda x: None): """Loads the global Conf object from command line arguments. Encode the next argument after +plugin to ensure that it does not start with a prefix_char """ argparser = ArgumentParser(description=description, prefix_chars='-+') argparser.add_argument('--version', dest='PRINT_VERSION', action='store_true', help='Print version and exit') add_arguments_cb(argparser) # set up plugin argument argparser plugin_argparser = argparser.add_argument_group('Plugins') plugins = {} def load_plugin_group(group): """Load all plugins from the given plugin_group.""" for entry_point in iter_entry_points(group=group): name = str(entry_point).split(' =', 1)[0] plugin = entry_point.load() if isclass(plugin) and (not plugin in Conf.SUPPORTED_PLUGIN_INTERFACES) and any([issubclass(plugin, supported_plugin_interface) for supported_plugin_interface in Conf.SUPPORTED_PLUGIN_INTERFACES]): plugin_argparser.add_argument('+{}'.format(name), dest='PLUGIN_{}'.format(name), type=str, nargs='?', default=DEFAULT, metavar='args'.format(name), help=make_argparse_help_safe(call_plugin(plugin, 'help'))) # register plugin plugins[name] = plugin # depends on [control=['if'], data=[]] else: warning('Plugin not supported: {}'.format(name)) # depends on [control=['for'], data=['entry_point']] load_plugin_group(Conf.PLUGIN_GROUP_BASE) if Conf.LOAD_PLUGINS: load_plugin_group(Conf.PLUGIN_GROUP) # depends on [control=['if'], data=[]] conf = vars(argparser.parse_args([v if i == 0 or v[0] == '+' or Conf.ARGS[i - 1][0] != '+' else b32encode(v.encode()).decode() for (i, v) in enumerate(Conf.ARGS)])) postprocess_conf_cb(conf) # apply configuration Conf.set(conf) if Conf.PRINT_VERSION: print('pdml2flow version {}'.format(Conf.VERSION), file=Conf.OUT) sys.exit(0) # depends on [control=['if'], data=[]] # initialize plugins Conf.PLUGINS = [] for (conf_name, args) in conf.items(): if conf_name.startswith('PLUGIN_') and args != DEFAULT: plugin_name = conf_name[7:] # instantiate plugin Conf.PLUGINS.append(plugins[plugin_name](*split(b32decode(args.encode()).decode() if args is not None else ''))) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def _update_projects(self): '''Check project update''' now = time.time() if ( not self._force_update_project and self._last_update_project + self.UPDATE_PROJECT_INTERVAL > now ): return for project in self.projectdb.check_update(self._last_update_project): self._update_project(project) logger.debug("project: %s updated.", project['name']) self._force_update_project = False self._last_update_project = now
def function[_update_projects, parameter[self]]: constant[Check project update] variable[now] assign[=] call[name[time].time, parameter[]] if <ast.BoolOp object at 0x7da1b208ee30> begin[:] return[None] for taget[name[project]] in starred[call[name[self].projectdb.check_update, parameter[name[self]._last_update_project]]] begin[:] call[name[self]._update_project, parameter[name[project]]] call[name[logger].debug, parameter[constant[project: %s updated.], call[name[project]][constant[name]]]] name[self]._force_update_project assign[=] constant[False] name[self]._last_update_project assign[=] name[now]
keyword[def] identifier[_update_projects] ( identifier[self] ): literal[string] identifier[now] = identifier[time] . identifier[time] () keyword[if] ( keyword[not] identifier[self] . identifier[_force_update_project] keyword[and] identifier[self] . identifier[_last_update_project] + identifier[self] . identifier[UPDATE_PROJECT_INTERVAL] > identifier[now] ): keyword[return] keyword[for] identifier[project] keyword[in] identifier[self] . identifier[projectdb] . identifier[check_update] ( identifier[self] . identifier[_last_update_project] ): identifier[self] . identifier[_update_project] ( identifier[project] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[project] [ literal[string] ]) identifier[self] . identifier[_force_update_project] = keyword[False] identifier[self] . identifier[_last_update_project] = identifier[now]
def _update_projects(self): """Check project update""" now = time.time() if not self._force_update_project and self._last_update_project + self.UPDATE_PROJECT_INTERVAL > now: return # depends on [control=['if'], data=[]] for project in self.projectdb.check_update(self._last_update_project): self._update_project(project) logger.debug('project: %s updated.', project['name']) # depends on [control=['for'], data=['project']] self._force_update_project = False self._last_update_project = now
def osm_polygon_download(query, limit=1, polygon_geojson=1): """ Geocode a place and download its boundary geometry from OSM's Nominatim API. Parameters ---------- query : string or dict query string or structured query dict to geocode/download limit : int max number of results to return polygon_geojson : int request the boundary geometry polygon from the API, 0=no, 1=yes Returns ------- dict """ # define the parameters params = OrderedDict() params['format'] = 'json' params['limit'] = limit params['dedupe'] = 0 #this prevents OSM from de-duping results so we're guaranteed to get precisely 'limit' number of results params['polygon_geojson'] = polygon_geojson # add the structured query dict (if provided) to params, otherwise query # with place name string if isinstance(query, str): params['q'] = query elif isinstance(query, dict): # add the query keys in alphabetical order so the URL is the same string # each time, for caching purposes for key in sorted(list(query.keys())): params[key] = query[key] else: raise TypeError('query must be a dict or a string') # request the URL, return the JSON response_json = nominatim_request(params=params, timeout=30) return response_json
def function[osm_polygon_download, parameter[query, limit, polygon_geojson]]: constant[ Geocode a place and download its boundary geometry from OSM's Nominatim API. Parameters ---------- query : string or dict query string or structured query dict to geocode/download limit : int max number of results to return polygon_geojson : int request the boundary geometry polygon from the API, 0=no, 1=yes Returns ------- dict ] variable[params] assign[=] call[name[OrderedDict], parameter[]] call[name[params]][constant[format]] assign[=] constant[json] call[name[params]][constant[limit]] assign[=] name[limit] call[name[params]][constant[dedupe]] assign[=] constant[0] call[name[params]][constant[polygon_geojson]] assign[=] name[polygon_geojson] if call[name[isinstance], parameter[name[query], name[str]]] begin[:] call[name[params]][constant[q]] assign[=] name[query] variable[response_json] assign[=] call[name[nominatim_request], parameter[]] return[name[response_json]]
keyword[def] identifier[osm_polygon_download] ( identifier[query] , identifier[limit] = literal[int] , identifier[polygon_geojson] = literal[int] ): literal[string] identifier[params] = identifier[OrderedDict] () identifier[params] [ literal[string] ]= literal[string] identifier[params] [ literal[string] ]= identifier[limit] identifier[params] [ literal[string] ]= literal[int] identifier[params] [ literal[string] ]= identifier[polygon_geojson] keyword[if] identifier[isinstance] ( identifier[query] , identifier[str] ): identifier[params] [ literal[string] ]= identifier[query] keyword[elif] identifier[isinstance] ( identifier[query] , identifier[dict] ): keyword[for] identifier[key] keyword[in] identifier[sorted] ( identifier[list] ( identifier[query] . identifier[keys] ())): identifier[params] [ identifier[key] ]= identifier[query] [ identifier[key] ] keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] ) identifier[response_json] = identifier[nominatim_request] ( identifier[params] = identifier[params] , identifier[timeout] = literal[int] ) keyword[return] identifier[response_json]
def osm_polygon_download(query, limit=1, polygon_geojson=1): """ Geocode a place and download its boundary geometry from OSM's Nominatim API. Parameters ---------- query : string or dict query string or structured query dict to geocode/download limit : int max number of results to return polygon_geojson : int request the boundary geometry polygon from the API, 0=no, 1=yes Returns ------- dict """ # define the parameters params = OrderedDict() params['format'] = 'json' params['limit'] = limit params['dedupe'] = 0 #this prevents OSM from de-duping results so we're guaranteed to get precisely 'limit' number of results params['polygon_geojson'] = polygon_geojson # add the structured query dict (if provided) to params, otherwise query # with place name string if isinstance(query, str): params['q'] = query # depends on [control=['if'], data=[]] elif isinstance(query, dict): # add the query keys in alphabetical order so the URL is the same string # each time, for caching purposes for key in sorted(list(query.keys())): params[key] = query[key] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]] else: raise TypeError('query must be a dict or a string') # request the URL, return the JSON response_json = nominatim_request(params=params, timeout=30) return response_json
def update_field(uid, post_id=None, tag_id=None, par_id=None): ''' Update the field of post2tag. ''' if post_id: entry = TabPost2Tag.update( post_id=post_id ).where(TabPost2Tag.uid == uid) entry.execute() if tag_id: entry2 = TabPost2Tag.update( par_id=tag_id[:2] + '00', tag_id=tag_id, ).where(TabPost2Tag.uid == uid) entry2.execute() if par_id: entry2 = TabPost2Tag.update( par_id=par_id ).where(TabPost2Tag.uid == uid) entry2.execute()
def function[update_field, parameter[uid, post_id, tag_id, par_id]]: constant[ Update the field of post2tag. ] if name[post_id] begin[:] variable[entry] assign[=] call[call[name[TabPost2Tag].update, parameter[]].where, parameter[compare[name[TabPost2Tag].uid equal[==] name[uid]]]] call[name[entry].execute, parameter[]] if name[tag_id] begin[:] variable[entry2] assign[=] call[call[name[TabPost2Tag].update, parameter[]].where, parameter[compare[name[TabPost2Tag].uid equal[==] name[uid]]]] call[name[entry2].execute, parameter[]] if name[par_id] begin[:] variable[entry2] assign[=] call[call[name[TabPost2Tag].update, parameter[]].where, parameter[compare[name[TabPost2Tag].uid equal[==] name[uid]]]] call[name[entry2].execute, parameter[]]
keyword[def] identifier[update_field] ( identifier[uid] , identifier[post_id] = keyword[None] , identifier[tag_id] = keyword[None] , identifier[par_id] = keyword[None] ): literal[string] keyword[if] identifier[post_id] : identifier[entry] = identifier[TabPost2Tag] . identifier[update] ( identifier[post_id] = identifier[post_id] ). identifier[where] ( identifier[TabPost2Tag] . identifier[uid] == identifier[uid] ) identifier[entry] . identifier[execute] () keyword[if] identifier[tag_id] : identifier[entry2] = identifier[TabPost2Tag] . identifier[update] ( identifier[par_id] = identifier[tag_id] [: literal[int] ]+ literal[string] , identifier[tag_id] = identifier[tag_id] , ). identifier[where] ( identifier[TabPost2Tag] . identifier[uid] == identifier[uid] ) identifier[entry2] . identifier[execute] () keyword[if] identifier[par_id] : identifier[entry2] = identifier[TabPost2Tag] . identifier[update] ( identifier[par_id] = identifier[par_id] ). identifier[where] ( identifier[TabPost2Tag] . identifier[uid] == identifier[uid] ) identifier[entry2] . identifier[execute] ()
def update_field(uid, post_id=None, tag_id=None, par_id=None): """ Update the field of post2tag. """ if post_id: entry = TabPost2Tag.update(post_id=post_id).where(TabPost2Tag.uid == uid) entry.execute() # depends on [control=['if'], data=[]] if tag_id: entry2 = TabPost2Tag.update(par_id=tag_id[:2] + '00', tag_id=tag_id).where(TabPost2Tag.uid == uid) entry2.execute() # depends on [control=['if'], data=[]] if par_id: entry2 = TabPost2Tag.update(par_id=par_id).where(TabPost2Tag.uid == uid) entry2.execute() # depends on [control=['if'], data=[]]
def unload(action, action_space, unload_id): """Unload a unit from a transport/bunker/nydus/etc.""" del action_space action.action_ui.cargo_panel.unit_index = unload_id
def function[unload, parameter[action, action_space, unload_id]]: constant[Unload a unit from a transport/bunker/nydus/etc.] <ast.Delete object at 0x7da18f00c3d0> name[action].action_ui.cargo_panel.unit_index assign[=] name[unload_id]
keyword[def] identifier[unload] ( identifier[action] , identifier[action_space] , identifier[unload_id] ): literal[string] keyword[del] identifier[action_space] identifier[action] . identifier[action_ui] . identifier[cargo_panel] . identifier[unit_index] = identifier[unload_id]
def unload(action, action_space, unload_id): """Unload a unit from a transport/bunker/nydus/etc.""" del action_space action.action_ui.cargo_panel.unit_index = unload_id
def load_config_file(self, suppress_errors=True): """Load the config file. By default, errors in loading config are handled, and a warning printed on screen. For testing, the suppress_errors option is set to False, so errors will make tests fail. """ self.log.debug("Searching path %s for config files", self.config_file_paths) base_config = 'ipython_config.py' self.log.debug("Attempting to load config file: %s" % base_config) try: Application.load_config_file( self, base_config, path=self.config_file_paths ) except ConfigFileNotFound: # ignore errors loading parent self.log.debug("Config file %s not found", base_config) pass if self.config_file_name == base_config: # don't load secondary config return self.log.debug("Attempting to load config file: %s" % self.config_file_name) try: Application.load_config_file( self, self.config_file_name, path=self.config_file_paths ) except ConfigFileNotFound: # Only warn if the default config file was NOT being used. if self.config_file_specified: msg = self.log.warn else: msg = self.log.debug msg("Config file not found, skipping: %s", self.config_file_name) except: # For testing purposes. if not suppress_errors: raise self.log.warn("Error loading config file: %s" % self.config_file_name, exc_info=True)
def function[load_config_file, parameter[self, suppress_errors]]: constant[Load the config file. By default, errors in loading config are handled, and a warning printed on screen. For testing, the suppress_errors option is set to False, so errors will make tests fail. ] call[name[self].log.debug, parameter[constant[Searching path %s for config files], name[self].config_file_paths]] variable[base_config] assign[=] constant[ipython_config.py] call[name[self].log.debug, parameter[binary_operation[constant[Attempting to load config file: %s] <ast.Mod object at 0x7da2590d6920> name[base_config]]]] <ast.Try object at 0x7da18ede4220> if compare[name[self].config_file_name equal[==] name[base_config]] begin[:] return[None] call[name[self].log.debug, parameter[binary_operation[constant[Attempting to load config file: %s] <ast.Mod object at 0x7da2590d6920> name[self].config_file_name]]] <ast.Try object at 0x7da18ede6170>
keyword[def] identifier[load_config_file] ( identifier[self] , identifier[suppress_errors] = keyword[True] ): literal[string] identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[self] . identifier[config_file_paths] ) identifier[base_config] = literal[string] identifier[self] . identifier[log] . identifier[debug] ( literal[string] % identifier[base_config] ) keyword[try] : identifier[Application] . identifier[load_config_file] ( identifier[self] , identifier[base_config] , identifier[path] = identifier[self] . identifier[config_file_paths] ) keyword[except] identifier[ConfigFileNotFound] : identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[base_config] ) keyword[pass] keyword[if] identifier[self] . identifier[config_file_name] == identifier[base_config] : keyword[return] identifier[self] . identifier[log] . identifier[debug] ( literal[string] % identifier[self] . identifier[config_file_name] ) keyword[try] : identifier[Application] . identifier[load_config_file] ( identifier[self] , identifier[self] . identifier[config_file_name] , identifier[path] = identifier[self] . identifier[config_file_paths] ) keyword[except] identifier[ConfigFileNotFound] : keyword[if] identifier[self] . identifier[config_file_specified] : identifier[msg] = identifier[self] . identifier[log] . identifier[warn] keyword[else] : identifier[msg] = identifier[self] . identifier[log] . identifier[debug] identifier[msg] ( literal[string] , identifier[self] . identifier[config_file_name] ) keyword[except] : keyword[if] keyword[not] identifier[suppress_errors] : keyword[raise] identifier[self] . identifier[log] . identifier[warn] ( literal[string] % identifier[self] . identifier[config_file_name] , identifier[exc_info] = keyword[True] )
def load_config_file(self, suppress_errors=True): """Load the config file. By default, errors in loading config are handled, and a warning printed on screen. For testing, the suppress_errors option is set to False, so errors will make tests fail. """ self.log.debug('Searching path %s for config files', self.config_file_paths) base_config = 'ipython_config.py' self.log.debug('Attempting to load config file: %s' % base_config) try: Application.load_config_file(self, base_config, path=self.config_file_paths) # depends on [control=['try'], data=[]] except ConfigFileNotFound: # ignore errors loading parent self.log.debug('Config file %s not found', base_config) pass # depends on [control=['except'], data=[]] if self.config_file_name == base_config: # don't load secondary config return # depends on [control=['if'], data=[]] self.log.debug('Attempting to load config file: %s' % self.config_file_name) try: Application.load_config_file(self, self.config_file_name, path=self.config_file_paths) # depends on [control=['try'], data=[]] except ConfigFileNotFound: # Only warn if the default config file was NOT being used. if self.config_file_specified: msg = self.log.warn # depends on [control=['if'], data=[]] else: msg = self.log.debug msg('Config file not found, skipping: %s', self.config_file_name) # depends on [control=['except'], data=[]] except: # For testing purposes. if not suppress_errors: raise # depends on [control=['if'], data=[]] self.log.warn('Error loading config file: %s' % self.config_file_name, exc_info=True) # depends on [control=['except'], data=[]]
def _gen_labels_columns(self, list_columns): """ Auto generates pretty label_columns from list of columns """ for col in list_columns: if not self.label_columns.get(col): self.label_columns[col] = self._prettify_column(col)
def function[_gen_labels_columns, parameter[self, list_columns]]: constant[ Auto generates pretty label_columns from list of columns ] for taget[name[col]] in starred[name[list_columns]] begin[:] if <ast.UnaryOp object at 0x7da207f02560> begin[:] call[name[self].label_columns][name[col]] assign[=] call[name[self]._prettify_column, parameter[name[col]]]
keyword[def] identifier[_gen_labels_columns] ( identifier[self] , identifier[list_columns] ): literal[string] keyword[for] identifier[col] keyword[in] identifier[list_columns] : keyword[if] keyword[not] identifier[self] . identifier[label_columns] . identifier[get] ( identifier[col] ): identifier[self] . identifier[label_columns] [ identifier[col] ]= identifier[self] . identifier[_prettify_column] ( identifier[col] )
def _gen_labels_columns(self, list_columns): """ Auto generates pretty label_columns from list of columns """ for col in list_columns: if not self.label_columns.get(col): self.label_columns[col] = self._prettify_column(col) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['col']]
def allocate_hosting_port(self, context, router_id, port_db, network_type, hosting_device_id): """Allocates a hosting port for a logical port. We create a hosting port for the router port """ l3admin_tenant_id = self._dev_mgr.l3_tenant_id() hostingport_name = 'hostingport_' + port_db['id'][:8] p_spec = {'port': { 'tenant_id': l3admin_tenant_id, 'admin_state_up': True, 'name': hostingport_name, 'network_id': port_db['network_id'], 'mac_address': bc.constants.ATTR_NOT_SPECIFIED, 'fixed_ips': [], 'device_id': '', 'device_owner': '', 'port_security_enabled': False}} try: hosting_port = self._core_plugin.create_port(context, p_spec) except n_exc.NeutronException as e: LOG.error('Error %s when creating hosting port' 'Cleaning up.', e) self.delete_hosting_device_resources( context, l3admin_tenant_id, hosting_port) hosting_port = None finally: if hosting_port: return {'allocated_port_id': hosting_port['id'], 'allocated_vlan': None} else: return None
def function[allocate_hosting_port, parameter[self, context, router_id, port_db, network_type, hosting_device_id]]: constant[Allocates a hosting port for a logical port. We create a hosting port for the router port ] variable[l3admin_tenant_id] assign[=] call[name[self]._dev_mgr.l3_tenant_id, parameter[]] variable[hostingport_name] assign[=] binary_operation[constant[hostingport_] + call[call[name[port_db]][constant[id]]][<ast.Slice object at 0x7da2041d8190>]] variable[p_spec] assign[=] dictionary[[<ast.Constant object at 0x7da2041dbc40>], [<ast.Dict object at 0x7da2041d8af0>]] <ast.Try object at 0x7da2041d8550>
keyword[def] identifier[allocate_hosting_port] ( identifier[self] , identifier[context] , identifier[router_id] , identifier[port_db] , identifier[network_type] , identifier[hosting_device_id] ): literal[string] identifier[l3admin_tenant_id] = identifier[self] . identifier[_dev_mgr] . identifier[l3_tenant_id] () identifier[hostingport_name] = literal[string] + identifier[port_db] [ literal[string] ][: literal[int] ] identifier[p_spec] ={ literal[string] :{ literal[string] : identifier[l3admin_tenant_id] , literal[string] : keyword[True] , literal[string] : identifier[hostingport_name] , literal[string] : identifier[port_db] [ literal[string] ], literal[string] : identifier[bc] . identifier[constants] . identifier[ATTR_NOT_SPECIFIED] , literal[string] :[], literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[False] }} keyword[try] : identifier[hosting_port] = identifier[self] . identifier[_core_plugin] . identifier[create_port] ( identifier[context] , identifier[p_spec] ) keyword[except] identifier[n_exc] . identifier[NeutronException] keyword[as] identifier[e] : identifier[LOG] . identifier[error] ( literal[string] literal[string] , identifier[e] ) identifier[self] . identifier[delete_hosting_device_resources] ( identifier[context] , identifier[l3admin_tenant_id] , identifier[hosting_port] ) identifier[hosting_port] = keyword[None] keyword[finally] : keyword[if] identifier[hosting_port] : keyword[return] { literal[string] : identifier[hosting_port] [ literal[string] ], literal[string] : keyword[None] } keyword[else] : keyword[return] keyword[None]
def allocate_hosting_port(self, context, router_id, port_db, network_type, hosting_device_id): """Allocates a hosting port for a logical port. We create a hosting port for the router port """ l3admin_tenant_id = self._dev_mgr.l3_tenant_id() hostingport_name = 'hostingport_' + port_db['id'][:8] p_spec = {'port': {'tenant_id': l3admin_tenant_id, 'admin_state_up': True, 'name': hostingport_name, 'network_id': port_db['network_id'], 'mac_address': bc.constants.ATTR_NOT_SPECIFIED, 'fixed_ips': [], 'device_id': '', 'device_owner': '', 'port_security_enabled': False}} try: hosting_port = self._core_plugin.create_port(context, p_spec) # depends on [control=['try'], data=[]] except n_exc.NeutronException as e: LOG.error('Error %s when creating hosting portCleaning up.', e) self.delete_hosting_device_resources(context, l3admin_tenant_id, hosting_port) hosting_port = None # depends on [control=['except'], data=['e']] finally: if hosting_port: return {'allocated_port_id': hosting_port['id'], 'allocated_vlan': None} # depends on [control=['if'], data=[]] else: return None
def can_mark_block_complete_on_view(self, block): """ Returns True if the xblock can be marked complete on view. This is true of any non-customized, non-scorable, completable block. """ return ( XBlockCompletionMode.get_mode(block) == XBlockCompletionMode.COMPLETABLE and not getattr(block, 'has_custom_completion', False) and not getattr(block, 'has_score', False) )
def function[can_mark_block_complete_on_view, parameter[self, block]]: constant[ Returns True if the xblock can be marked complete on view. This is true of any non-customized, non-scorable, completable block. ] return[<ast.BoolOp object at 0x7da20c6c54e0>]
keyword[def] identifier[can_mark_block_complete_on_view] ( identifier[self] , identifier[block] ): literal[string] keyword[return] ( identifier[XBlockCompletionMode] . identifier[get_mode] ( identifier[block] )== identifier[XBlockCompletionMode] . identifier[COMPLETABLE] keyword[and] keyword[not] identifier[getattr] ( identifier[block] , literal[string] , keyword[False] ) keyword[and] keyword[not] identifier[getattr] ( identifier[block] , literal[string] , keyword[False] ) )
def can_mark_block_complete_on_view(self, block): """ Returns True if the xblock can be marked complete on view. This is true of any non-customized, non-scorable, completable block. """ return XBlockCompletionMode.get_mode(block) == XBlockCompletionMode.COMPLETABLE and (not getattr(block, 'has_custom_completion', False)) and (not getattr(block, 'has_score', False))
def assign_moving_mean_variance( mean_var, variance_var, value, decay, name=None): """Compute exponentially weighted moving {mean,variance} of a streaming value. The `value` updated exponentially weighted moving `mean_var` and `variance_var` are given by the following recurrence relations: ```python variance_var = decay * (variance_var + (1 - decay) * (value - mean_var)**2) mean_var = decay * mean_var + (1 - decay) * value ``` Note: `mean_var` is updated *after* `variance_var`, i.e., `variance_var` uses the lag-1 mean. For derivation justification, see [Finch (2009; Eq. 143)][1]. Parameterization: Finch's `alpha` is `1 - decay`. Args: mean_var: `float`-like `Variable` representing the exponentially weighted moving mean. Same shape as `variance_var` and `value`. variance_var: `float`-like `Variable` representing the exponentially weighted moving variance. Same shape as `mean_var` and `value`. value: `float`-like `Tensor`. Same shape as `mean_var` and `variance_var`. decay: A `float`-like `Tensor`. The moving mean decay. Typically close to `1.`, e.g., `0.999`. name: Optional name of the returned operation. Returns: mean_var: `Variable` representing the `value`-updated exponentially weighted moving mean. variance_var: `Variable` representing the `value`-updated exponentially weighted moving variance. Raises: TypeError: if `mean_var` does not have float type `dtype`. TypeError: if `mean_var`, `variance_var`, `value`, `decay` have different `base_dtype`. #### References [1]: Tony Finch. Incremental calculation of weighted mean and variance. _Technical Report_, 2009. http://people.ds.cam.ac.uk/fanf2/hermes/doc/antiforgery/stats.pdf """ with tf.compat.v1.name_scope(name, "assign_moving_mean_variance", [variance_var, mean_var, value, decay]): with tf.compat.v1.colocate_with(variance_var): with tf.compat.v1.colocate_with(mean_var): base_dtype = mean_var.dtype.base_dtype if not base_dtype.is_floating: raise TypeError( "mean_var.base_dtype({}) does not have float type " "`dtype`.".format(base_dtype.name)) if base_dtype != variance_var.dtype.base_dtype: raise TypeError( "mean_var.base_dtype({}) != variance_var.base_dtype({})".format( base_dtype.name, variance_var.dtype.base_dtype.name)) value = tf.convert_to_tensor( value=value, dtype=base_dtype, name="value") decay = tf.convert_to_tensor( value=decay, dtype=base_dtype, name="decay") delta = value - mean_var with tf.control_dependencies([delta]): # We want mean_{t+1} = decay * mean_t + (1. - decay) * value # We compute mean += decay * mean_t - mean_t + (1. - decay) * value = # = (1. - decay) * (value - mean_t) mean_var = mean_var.assign_add((1. - decay) * delta) # We want variance_{t+1} = decay * (variance_t + # + (1 - decay) * (value - mean_var)**2). # We compute variance -= variance_t - decay * (variance_t + # + (1 - decay) * (value - mean_var)**2) = # = (1 - decay) * variance_t # - decay * (1 - decay) * (value - mean_var)**2 # = (1 - decay) * (variance_t - decay * (value - mean_var)**2). variance_var = variance_var.assign_sub( (1. - decay) * (variance_var - decay * tf.square(delta))) return mean_var, variance_var
def function[assign_moving_mean_variance, parameter[mean_var, variance_var, value, decay, name]]: constant[Compute exponentially weighted moving {mean,variance} of a streaming value. The `value` updated exponentially weighted moving `mean_var` and `variance_var` are given by the following recurrence relations: ```python variance_var = decay * (variance_var + (1 - decay) * (value - mean_var)**2) mean_var = decay * mean_var + (1 - decay) * value ``` Note: `mean_var` is updated *after* `variance_var`, i.e., `variance_var` uses the lag-1 mean. For derivation justification, see [Finch (2009; Eq. 143)][1]. Parameterization: Finch's `alpha` is `1 - decay`. Args: mean_var: `float`-like `Variable` representing the exponentially weighted moving mean. Same shape as `variance_var` and `value`. variance_var: `float`-like `Variable` representing the exponentially weighted moving variance. Same shape as `mean_var` and `value`. value: `float`-like `Tensor`. Same shape as `mean_var` and `variance_var`. decay: A `float`-like `Tensor`. The moving mean decay. Typically close to `1.`, e.g., `0.999`. name: Optional name of the returned operation. Returns: mean_var: `Variable` representing the `value`-updated exponentially weighted moving mean. variance_var: `Variable` representing the `value`-updated exponentially weighted moving variance. Raises: TypeError: if `mean_var` does not have float type `dtype`. TypeError: if `mean_var`, `variance_var`, `value`, `decay` have different `base_dtype`. #### References [1]: Tony Finch. Incremental calculation of weighted mean and variance. _Technical Report_, 2009. http://people.ds.cam.ac.uk/fanf2/hermes/doc/antiforgery/stats.pdf ] with call[name[tf].compat.v1.name_scope, parameter[name[name], constant[assign_moving_mean_variance], list[[<ast.Name object at 0x7da1b03e37c0>, <ast.Name object at 0x7da1b03e2380>, <ast.Name object at 0x7da1b03e3010>, <ast.Name object at 0x7da1b03e3280>]]]] begin[:] with call[name[tf].compat.v1.colocate_with, parameter[name[variance_var]]] begin[:] with call[name[tf].compat.v1.colocate_with, parameter[name[mean_var]]] begin[:] variable[base_dtype] assign[=] name[mean_var].dtype.base_dtype if <ast.UnaryOp object at 0x7da1b0529720> begin[:] <ast.Raise object at 0x7da1b052ae60> if compare[name[base_dtype] not_equal[!=] name[variance_var].dtype.base_dtype] begin[:] <ast.Raise object at 0x7da1b0529fc0> variable[value] assign[=] call[name[tf].convert_to_tensor, parameter[]] variable[decay] assign[=] call[name[tf].convert_to_tensor, parameter[]] variable[delta] assign[=] binary_operation[name[value] - name[mean_var]] with call[name[tf].control_dependencies, parameter[list[[<ast.Name object at 0x7da1b0373c70>]]]] begin[:] variable[mean_var] assign[=] call[name[mean_var].assign_add, parameter[binary_operation[binary_operation[constant[1.0] - name[decay]] * name[delta]]]] variable[variance_var] assign[=] call[name[variance_var].assign_sub, parameter[binary_operation[binary_operation[constant[1.0] - name[decay]] * binary_operation[name[variance_var] - binary_operation[name[decay] * call[name[tf].square, parameter[name[delta]]]]]]]] return[tuple[[<ast.Name object at 0x7da1b03e23e0>, <ast.Name object at 0x7da1b03e24d0>]]]
keyword[def] identifier[assign_moving_mean_variance] ( identifier[mean_var] , identifier[variance_var] , identifier[value] , identifier[decay] , identifier[name] = keyword[None] ): literal[string] keyword[with] identifier[tf] . identifier[compat] . identifier[v1] . identifier[name_scope] ( identifier[name] , literal[string] , [ identifier[variance_var] , identifier[mean_var] , identifier[value] , identifier[decay] ]): keyword[with] identifier[tf] . identifier[compat] . identifier[v1] . identifier[colocate_with] ( identifier[variance_var] ): keyword[with] identifier[tf] . identifier[compat] . identifier[v1] . identifier[colocate_with] ( identifier[mean_var] ): identifier[base_dtype] = identifier[mean_var] . identifier[dtype] . identifier[base_dtype] keyword[if] keyword[not] identifier[base_dtype] . identifier[is_floating] : keyword[raise] identifier[TypeError] ( literal[string] literal[string] . identifier[format] ( identifier[base_dtype] . identifier[name] )) keyword[if] identifier[base_dtype] != identifier[variance_var] . identifier[dtype] . identifier[base_dtype] : keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[base_dtype] . identifier[name] , identifier[variance_var] . identifier[dtype] . identifier[base_dtype] . identifier[name] )) identifier[value] = identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[value] , identifier[dtype] = identifier[base_dtype] , identifier[name] = literal[string] ) identifier[decay] = identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[decay] , identifier[dtype] = identifier[base_dtype] , identifier[name] = literal[string] ) identifier[delta] = identifier[value] - identifier[mean_var] keyword[with] identifier[tf] . identifier[control_dependencies] ([ identifier[delta] ]): identifier[mean_var] = identifier[mean_var] . identifier[assign_add] (( literal[int] - identifier[decay] )* identifier[delta] ) identifier[variance_var] = identifier[variance_var] . identifier[assign_sub] ( ( literal[int] - identifier[decay] )*( identifier[variance_var] - identifier[decay] * identifier[tf] . identifier[square] ( identifier[delta] ))) keyword[return] identifier[mean_var] , identifier[variance_var]
def assign_moving_mean_variance(mean_var, variance_var, value, decay, name=None): """Compute exponentially weighted moving {mean,variance} of a streaming value. The `value` updated exponentially weighted moving `mean_var` and `variance_var` are given by the following recurrence relations: ```python variance_var = decay * (variance_var + (1 - decay) * (value - mean_var)**2) mean_var = decay * mean_var + (1 - decay) * value ``` Note: `mean_var` is updated *after* `variance_var`, i.e., `variance_var` uses the lag-1 mean. For derivation justification, see [Finch (2009; Eq. 143)][1]. Parameterization: Finch's `alpha` is `1 - decay`. Args: mean_var: `float`-like `Variable` representing the exponentially weighted moving mean. Same shape as `variance_var` and `value`. variance_var: `float`-like `Variable` representing the exponentially weighted moving variance. Same shape as `mean_var` and `value`. value: `float`-like `Tensor`. Same shape as `mean_var` and `variance_var`. decay: A `float`-like `Tensor`. The moving mean decay. Typically close to `1.`, e.g., `0.999`. name: Optional name of the returned operation. Returns: mean_var: `Variable` representing the `value`-updated exponentially weighted moving mean. variance_var: `Variable` representing the `value`-updated exponentially weighted moving variance. Raises: TypeError: if `mean_var` does not have float type `dtype`. TypeError: if `mean_var`, `variance_var`, `value`, `decay` have different `base_dtype`. #### References [1]: Tony Finch. Incremental calculation of weighted mean and variance. _Technical Report_, 2009. http://people.ds.cam.ac.uk/fanf2/hermes/doc/antiforgery/stats.pdf """ with tf.compat.v1.name_scope(name, 'assign_moving_mean_variance', [variance_var, mean_var, value, decay]): with tf.compat.v1.colocate_with(variance_var): with tf.compat.v1.colocate_with(mean_var): base_dtype = mean_var.dtype.base_dtype if not base_dtype.is_floating: raise TypeError('mean_var.base_dtype({}) does not have float type `dtype`.'.format(base_dtype.name)) # depends on [control=['if'], data=[]] if base_dtype != variance_var.dtype.base_dtype: raise TypeError('mean_var.base_dtype({}) != variance_var.base_dtype({})'.format(base_dtype.name, variance_var.dtype.base_dtype.name)) # depends on [control=['if'], data=['base_dtype']] value = tf.convert_to_tensor(value=value, dtype=base_dtype, name='value') decay = tf.convert_to_tensor(value=decay, dtype=base_dtype, name='decay') delta = value - mean_var with tf.control_dependencies([delta]): # We want mean_{t+1} = decay * mean_t + (1. - decay) * value # We compute mean += decay * mean_t - mean_t + (1. - decay) * value = # = (1. - decay) * (value - mean_t) mean_var = mean_var.assign_add((1.0 - decay) * delta) # We want variance_{t+1} = decay * (variance_t + # + (1 - decay) * (value - mean_var)**2). # We compute variance -= variance_t - decay * (variance_t + # + (1 - decay) * (value - mean_var)**2) = # = (1 - decay) * variance_t # - decay * (1 - decay) * (value - mean_var)**2 # = (1 - decay) * (variance_t - decay * (value - mean_var)**2). variance_var = variance_var.assign_sub((1.0 - decay) * (variance_var - decay * tf.square(delta))) # depends on [control=['with'], data=[]] return (mean_var, variance_var) # depends on [control=['with'], data=[]] # depends on [control=['with'], data=[]] # depends on [control=['with'], data=[]]
def cache_get(key): """ Wrapper for ``cache.get``. The expiry time for the cache entry is stored with the entry. If the expiry time has past, put the stale entry back into cache, and don't return it to trigger a fake cache miss. """ packed = cache.get(_hashed_key(key)) if packed is None: return None value, refresh_time, refreshed = packed if (time() > refresh_time) and not refreshed: cache_set(key, value, settings.CACHE_SET_DELAY_SECONDS, True) return None return value
def function[cache_get, parameter[key]]: constant[ Wrapper for ``cache.get``. The expiry time for the cache entry is stored with the entry. If the expiry time has past, put the stale entry back into cache, and don't return it to trigger a fake cache miss. ] variable[packed] assign[=] call[name[cache].get, parameter[call[name[_hashed_key], parameter[name[key]]]]] if compare[name[packed] is constant[None]] begin[:] return[constant[None]] <ast.Tuple object at 0x7da204566740> assign[=] name[packed] if <ast.BoolOp object at 0x7da1b23440d0> begin[:] call[name[cache_set], parameter[name[key], name[value], name[settings].CACHE_SET_DELAY_SECONDS, constant[True]]] return[constant[None]] return[name[value]]
keyword[def] identifier[cache_get] ( identifier[key] ): literal[string] identifier[packed] = identifier[cache] . identifier[get] ( identifier[_hashed_key] ( identifier[key] )) keyword[if] identifier[packed] keyword[is] keyword[None] : keyword[return] keyword[None] identifier[value] , identifier[refresh_time] , identifier[refreshed] = identifier[packed] keyword[if] ( identifier[time] ()> identifier[refresh_time] ) keyword[and] keyword[not] identifier[refreshed] : identifier[cache_set] ( identifier[key] , identifier[value] , identifier[settings] . identifier[CACHE_SET_DELAY_SECONDS] , keyword[True] ) keyword[return] keyword[None] keyword[return] identifier[value]
def cache_get(key): """ Wrapper for ``cache.get``. The expiry time for the cache entry is stored with the entry. If the expiry time has past, put the stale entry back into cache, and don't return it to trigger a fake cache miss. """ packed = cache.get(_hashed_key(key)) if packed is None: return None # depends on [control=['if'], data=[]] (value, refresh_time, refreshed) = packed if time() > refresh_time and (not refreshed): cache_set(key, value, settings.CACHE_SET_DELAY_SECONDS, True) return None # depends on [control=['if'], data=[]] return value
def process_event(self, name, subject, data): """ Process a single event. :param name: :param subject: :param data: """ method_mapping = Registry.get_event(name) if not method_mapping: log.info('@{}.process_event no subscriber for event `{}`' .format(self.__class__.__name__, name)) return for event, methods in method_mapping.items(): event_instance = event(subject, data) log.info('@{}.process_event `{}` for subject `{}`'.format( self.__class__.__name__, event_instance.__class__.__name__, subject )) for method in methods: with self._context_manager: log.info('>> Calling subscriber `{}`' .format(method.__name__)) method(event_instance)
def function[process_event, parameter[self, name, subject, data]]: constant[ Process a single event. :param name: :param subject: :param data: ] variable[method_mapping] assign[=] call[name[Registry].get_event, parameter[name[name]]] if <ast.UnaryOp object at 0x7da18dc05e70> begin[:] call[name[log].info, parameter[call[constant[@{}.process_event no subscriber for event `{}`].format, parameter[name[self].__class__.__name__, name[name]]]]] return[None] for taget[tuple[[<ast.Name object at 0x7da18dc05de0>, <ast.Name object at 0x7da18dc07b50>]]] in starred[call[name[method_mapping].items, parameter[]]] begin[:] variable[event_instance] assign[=] call[name[event], parameter[name[subject], name[data]]] call[name[log].info, parameter[call[constant[@{}.process_event `{}` for subject `{}`].format, parameter[name[self].__class__.__name__, name[event_instance].__class__.__name__, name[subject]]]]] for taget[name[method]] in starred[name[methods]] begin[:] with name[self]._context_manager begin[:] call[name[log].info, parameter[call[constant[>> Calling subscriber `{}`].format, parameter[name[method].__name__]]]] call[name[method], parameter[name[event_instance]]]
keyword[def] identifier[process_event] ( identifier[self] , identifier[name] , identifier[subject] , identifier[data] ): literal[string] identifier[method_mapping] = identifier[Registry] . identifier[get_event] ( identifier[name] ) keyword[if] keyword[not] identifier[method_mapping] : identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[self] . identifier[__class__] . identifier[__name__] , identifier[name] )) keyword[return] keyword[for] identifier[event] , identifier[methods] keyword[in] identifier[method_mapping] . identifier[items] (): identifier[event_instance] = identifier[event] ( identifier[subject] , identifier[data] ) identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[self] . identifier[__class__] . identifier[__name__] , identifier[event_instance] . identifier[__class__] . identifier[__name__] , identifier[subject] )) keyword[for] identifier[method] keyword[in] identifier[methods] : keyword[with] identifier[self] . identifier[_context_manager] : identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[method] . identifier[__name__] )) identifier[method] ( identifier[event_instance] )
def process_event(self, name, subject, data): """ Process a single event. :param name: :param subject: :param data: """ method_mapping = Registry.get_event(name) if not method_mapping: log.info('@{}.process_event no subscriber for event `{}`'.format(self.__class__.__name__, name)) return # depends on [control=['if'], data=[]] for (event, methods) in method_mapping.items(): event_instance = event(subject, data) log.info('@{}.process_event `{}` for subject `{}`'.format(self.__class__.__name__, event_instance.__class__.__name__, subject)) for method in methods: with self._context_manager: log.info('>> Calling subscriber `{}`'.format(method.__name__)) method(event_instance) # depends on [control=['with'], data=[]] # depends on [control=['for'], data=['method']] # depends on [control=['for'], data=[]]
def get(self): """Reloads the check with its current values.""" new = self.manager.get(self) if new: self._add_details(new._info)
def function[get, parameter[self]]: constant[Reloads the check with its current values.] variable[new] assign[=] call[name[self].manager.get, parameter[name[self]]] if name[new] begin[:] call[name[self]._add_details, parameter[name[new]._info]]
keyword[def] identifier[get] ( identifier[self] ): literal[string] identifier[new] = identifier[self] . identifier[manager] . identifier[get] ( identifier[self] ) keyword[if] identifier[new] : identifier[self] . identifier[_add_details] ( identifier[new] . identifier[_info] )
def get(self): """Reloads the check with its current values.""" new = self.manager.get(self) if new: self._add_details(new._info) # depends on [control=['if'], data=[]]
def get_assessment_part_item_design_session_for_bank(self, bank_id, proxy): """Gets the ``OsidSession`` associated with the assessment part item design service for the given bank. arg: bank_id (osid.id.Id): the ``Id`` of the ``Bank`` return: (osid.assessment.authoring.AssessmentPartItemDesignSession) - an ``AssessmentPartItemDesignSession`` raise: NotFound - no ``Bank`` found by the given ``Id`` raise: NullArgument - ``bank_id`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_assessment_part_item_design()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_assessment_part_item_design()`` and ``supports_visible_federation()`` are ``true``.* """ if not self.supports_assessment_part_lookup(): # This is kludgy, but only until Tom fixes spec raise errors.Unimplemented() # Also include check to see if the catalog Id is found otherwise raise errors.NotFound # pylint: disable=no-member return sessions.AssessmentPartItemDesignSession(bank_id, proxy=proxy, runtime=self._runtime)
def function[get_assessment_part_item_design_session_for_bank, parameter[self, bank_id, proxy]]: constant[Gets the ``OsidSession`` associated with the assessment part item design service for the given bank. arg: bank_id (osid.id.Id): the ``Id`` of the ``Bank`` return: (osid.assessment.authoring.AssessmentPartItemDesignSession) - an ``AssessmentPartItemDesignSession`` raise: NotFound - no ``Bank`` found by the given ``Id`` raise: NullArgument - ``bank_id`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_assessment_part_item_design()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_assessment_part_item_design()`` and ``supports_visible_federation()`` are ``true``.* ] if <ast.UnaryOp object at 0x7da1b0a22fe0> begin[:] <ast.Raise object at 0x7da1b0a22d10> return[call[name[sessions].AssessmentPartItemDesignSession, parameter[name[bank_id]]]]
keyword[def] identifier[get_assessment_part_item_design_session_for_bank] ( identifier[self] , identifier[bank_id] , identifier[proxy] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[supports_assessment_part_lookup] (): keyword[raise] identifier[errors] . identifier[Unimplemented] () keyword[return] identifier[sessions] . identifier[AssessmentPartItemDesignSession] ( identifier[bank_id] , identifier[proxy] = identifier[proxy] , identifier[runtime] = identifier[self] . identifier[_runtime] )
def get_assessment_part_item_design_session_for_bank(self, bank_id, proxy): """Gets the ``OsidSession`` associated with the assessment part item design service for the given bank. arg: bank_id (osid.id.Id): the ``Id`` of the ``Bank`` return: (osid.assessment.authoring.AssessmentPartItemDesignSession) - an ``AssessmentPartItemDesignSession`` raise: NotFound - no ``Bank`` found by the given ``Id`` raise: NullArgument - ``bank_id`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_assessment_part_item_design()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_assessment_part_item_design()`` and ``supports_visible_federation()`` are ``true``.* """ if not self.supports_assessment_part_lookup(): # This is kludgy, but only until Tom fixes spec raise errors.Unimplemented() # depends on [control=['if'], data=[]] # Also include check to see if the catalog Id is found otherwise raise errors.NotFound # pylint: disable=no-member return sessions.AssessmentPartItemDesignSession(bank_id, proxy=proxy, runtime=self._runtime)
def on_shutdown(self, broker): """Request the slave gracefully shut itself down.""" LOG.debug('%r closing CALL_FUNCTION channel', self) self._send( mitogen.core.Message( src_id=mitogen.context_id, dst_id=self.remote_id, handle=mitogen.core.SHUTDOWN, ) )
def function[on_shutdown, parameter[self, broker]]: constant[Request the slave gracefully shut itself down.] call[name[LOG].debug, parameter[constant[%r closing CALL_FUNCTION channel], name[self]]] call[name[self]._send, parameter[call[name[mitogen].core.Message, parameter[]]]]
keyword[def] identifier[on_shutdown] ( identifier[self] , identifier[broker] ): literal[string] identifier[LOG] . identifier[debug] ( literal[string] , identifier[self] ) identifier[self] . identifier[_send] ( identifier[mitogen] . identifier[core] . identifier[Message] ( identifier[src_id] = identifier[mitogen] . identifier[context_id] , identifier[dst_id] = identifier[self] . identifier[remote_id] , identifier[handle] = identifier[mitogen] . identifier[core] . identifier[SHUTDOWN] , ) )
def on_shutdown(self, broker): """Request the slave gracefully shut itself down.""" LOG.debug('%r closing CALL_FUNCTION channel', self) self._send(mitogen.core.Message(src_id=mitogen.context_id, dst_id=self.remote_id, handle=mitogen.core.SHUTDOWN))
def ensure_bytes(str_or_bytes, encoding='utf-8', errors='strict'): """Ensures an input is bytes, encoding if it is a string. """ if isinstance(str_or_bytes, six.text_type): return str_or_bytes.encode(encoding, errors) return str_or_bytes
def function[ensure_bytes, parameter[str_or_bytes, encoding, errors]]: constant[Ensures an input is bytes, encoding if it is a string. ] if call[name[isinstance], parameter[name[str_or_bytes], name[six].text_type]] begin[:] return[call[name[str_or_bytes].encode, parameter[name[encoding], name[errors]]]] return[name[str_or_bytes]]
keyword[def] identifier[ensure_bytes] ( identifier[str_or_bytes] , identifier[encoding] = literal[string] , identifier[errors] = literal[string] ): literal[string] keyword[if] identifier[isinstance] ( identifier[str_or_bytes] , identifier[six] . identifier[text_type] ): keyword[return] identifier[str_or_bytes] . identifier[encode] ( identifier[encoding] , identifier[errors] ) keyword[return] identifier[str_or_bytes]
def ensure_bytes(str_or_bytes, encoding='utf-8', errors='strict'): """Ensures an input is bytes, encoding if it is a string. """ if isinstance(str_or_bytes, six.text_type): return str_or_bytes.encode(encoding, errors) # depends on [control=['if'], data=[]] return str_or_bytes
def fit( self, img_data, gamma=1.0, save_freq=-1, pic_freq=-1, n_epochs=100, batch_size=50, weight_decay=True, model_path='./VAEGAN_training_model/', img_path='./VAEGAN_training_images/', img_out_width=10, mirroring=False ): '''Fit the VAE/GAN model to the image data. Parameters ---------- img_data : array-like shape (n_images, n_colors, image_width, image_height) Images used to fit VAE model. gamma [optional] : float Sets the multiplicative factor that weights the relative importance of reconstruction loss vs. ability to fool the discriminator. Higher weight means greater focus on faithful reconstruction. save_freq [optional] : int Sets the number of epochs to wait before saving the model and optimizer states. Also saves image files of randomly generated images using those states in a separate directory. Does not save if negative valued. pic_freq [optional] : int Sets the number of batches to wait before displaying a picture or randomly generated images using the current model state. Does not display if negative valued. n_epochs [optional] : int Gives the number of training epochs to run through for the fitting process. batch_size [optional] : int The size of the batch to use when training. Note: generally larger batch sizes will result in fater epoch iteration, but at the const of lower granulatity when updating the layer weights. weight_decay [optional] : bool Flag that controls adding weight decay hooks to the optimizer. model_path [optional] : str Directory where the model and optimizer state files will be saved. img_path [optional] : str Directory where the end of epoch training image files will be saved. img_out_width : int Controls the number of randomly genreated images per row in the output saved imags. mirroring [optional] : bool Controls whether images are randomly mirrored along the verical axis with a .5 probability. Artificially increases images variance for training set. ''' width = img_out_width self.enc_opt.setup(self.enc) self.dec_opt.setup(self.dec) self.disc_opt.setup(self.disc) if weight_decay: self.enc_opt.add_hook(chainer.optimizer.WeightDecay(0.00001)) self.dec_opt.add_hook(chainer.optimizer.WeightDecay(0.00001)) self.disc_opt.add_hook(chainer.optimizer.WeightDecay(0.00001)) n_data = img_data.shape[0] batch_iter = list(range(0, n_data, batch_size)) n_batches = len(batch_iter) c_samples = np.random.standard_normal((width, self.latent_width)).astype(np.float32) save_counter = 0 for epoch in range(1, n_epochs + 1): print('epoch: %i' % epoch) t1 = time.time() indexes = np.random.permutation(n_data) sum_l_enc = 0. sum_l_dec = 0. sum_l_disc = 0. sum_l_gan = 0. sum_l_like = 0. sum_l_prior = 0. count = 0 for i in tqdm.tqdm(batch_iter): x = img_data[indexes[i: i + batch_size]] size = x.shape[0] if mirroring: for j in range(size): if np.random.randint(2): x[j, :, :, :] = x[j, :, :, ::-1] x_batch = Variable(x) zeros = Variable(np.zeros(size, dtype=np.int32)) ones = Variable(np.ones(size, dtype=np.int32)) if self.flag_gpu: x_batch.to_gpu() zeros.to_gpu() ones.to_gpu() kl_loss, dif_l, disc_rec, disc_batch, disc_samp = self._forward(x_batch) L_batch_GAN = F.softmax_cross_entropy(disc_batch, ones) L_rec_GAN = F.softmax_cross_entropy(disc_rec, zeros) L_samp_GAN = F.softmax_cross_entropy(disc_samp, zeros) l_gan = (L_batch_GAN + L_rec_GAN + L_samp_GAN)/3. l_like = dif_l l_prior = kl_loss enc_loss = self.kl_ratio*l_prior + l_like dec_loss = gamma*l_like - l_gan disc_loss = l_gan self.enc_opt.zero_grads() enc_loss.backward() self.enc_opt.update() self.dec_opt.zero_grads() dec_loss.backward() self.dec_opt.update() self.disc_opt.zero_grads() disc_loss.backward() self.disc_opt.update() sum_l_enc += enc_loss.data sum_l_dec += dec_loss.data sum_l_disc += disc_loss.data sum_l_gan += l_gan.data sum_l_like += l_like.data sum_l_prior += l_prior.data count += 1 plot_data = img_data[indexes[:width]] if pic_freq > 0: assert type(pic_freq) == int, "pic_freq must be an integer." if count % pic_freq == 0: fig = self._plot_img( plot_data, c_samples, img_path=img_path, epoch=epoch ) display(fig) if save_freq > 0: save_counter += 1 assert type(save_freq) == int, "save_freq must be an integer." if epoch % save_freq == 0: name = "vaegan_epoch%s" % str(epoch) if save_counter == 1: save_meta = True else: save_meta = False self.save(model_path, name, save_meta=save_meta) fig = self._plot_img( plot_data, c_samples, img_path=img_path, epoch=epoch, batch=n_batches, save_pic=True ) sum_l_enc /= n_batches sum_l_dec /= n_batches sum_l_disc /= n_batches sum_l_gan /= n_batches sum_l_like /= n_batches sum_l_prior /= n_batches msg = "enc_loss = {0}, dec_loss = {1} , disc_loss = {2}" msg2 = "gan_loss = {0}, sim_loss = {1}, kl_loss = {2}" print(msg.format(sum_l_enc, sum_l_dec, sum_l_disc)) print(msg2.format(sum_l_gan, sum_l_like, sum_l_prior)) t_diff = time.time()-t1 print("time: %f\n\n" % t_diff)
def function[fit, parameter[self, img_data, gamma, save_freq, pic_freq, n_epochs, batch_size, weight_decay, model_path, img_path, img_out_width, mirroring]]: constant[Fit the VAE/GAN model to the image data. Parameters ---------- img_data : array-like shape (n_images, n_colors, image_width, image_height) Images used to fit VAE model. gamma [optional] : float Sets the multiplicative factor that weights the relative importance of reconstruction loss vs. ability to fool the discriminator. Higher weight means greater focus on faithful reconstruction. save_freq [optional] : int Sets the number of epochs to wait before saving the model and optimizer states. Also saves image files of randomly generated images using those states in a separate directory. Does not save if negative valued. pic_freq [optional] : int Sets the number of batches to wait before displaying a picture or randomly generated images using the current model state. Does not display if negative valued. n_epochs [optional] : int Gives the number of training epochs to run through for the fitting process. batch_size [optional] : int The size of the batch to use when training. Note: generally larger batch sizes will result in fater epoch iteration, but at the const of lower granulatity when updating the layer weights. weight_decay [optional] : bool Flag that controls adding weight decay hooks to the optimizer. model_path [optional] : str Directory where the model and optimizer state files will be saved. img_path [optional] : str Directory where the end of epoch training image files will be saved. img_out_width : int Controls the number of randomly genreated images per row in the output saved imags. mirroring [optional] : bool Controls whether images are randomly mirrored along the verical axis with a .5 probability. Artificially increases images variance for training set. ] variable[width] assign[=] name[img_out_width] call[name[self].enc_opt.setup, parameter[name[self].enc]] call[name[self].dec_opt.setup, parameter[name[self].dec]] call[name[self].disc_opt.setup, parameter[name[self].disc]] if name[weight_decay] begin[:] call[name[self].enc_opt.add_hook, parameter[call[name[chainer].optimizer.WeightDecay, parameter[constant[1e-05]]]]] call[name[self].dec_opt.add_hook, parameter[call[name[chainer].optimizer.WeightDecay, parameter[constant[1e-05]]]]] call[name[self].disc_opt.add_hook, parameter[call[name[chainer].optimizer.WeightDecay, parameter[constant[1e-05]]]]] variable[n_data] assign[=] call[name[img_data].shape][constant[0]] variable[batch_iter] assign[=] call[name[list], parameter[call[name[range], parameter[constant[0], name[n_data], name[batch_size]]]]] variable[n_batches] assign[=] call[name[len], parameter[name[batch_iter]]] variable[c_samples] assign[=] call[call[name[np].random.standard_normal, parameter[tuple[[<ast.Name object at 0x7da18eb56b00>, <ast.Attribute object at 0x7da18eb555a0>]]]].astype, parameter[name[np].float32]] variable[save_counter] assign[=] constant[0] for taget[name[epoch]] in starred[call[name[range], parameter[constant[1], binary_operation[name[n_epochs] + constant[1]]]]] begin[:] call[name[print], parameter[binary_operation[constant[epoch: %i] <ast.Mod object at 0x7da2590d6920> name[epoch]]]] variable[t1] assign[=] call[name[time].time, parameter[]] variable[indexes] assign[=] call[name[np].random.permutation, parameter[name[n_data]]] variable[sum_l_enc] assign[=] constant[0.0] variable[sum_l_dec] assign[=] constant[0.0] variable[sum_l_disc] assign[=] constant[0.0] variable[sum_l_gan] assign[=] constant[0.0] variable[sum_l_like] assign[=] constant[0.0] variable[sum_l_prior] assign[=] constant[0.0] variable[count] assign[=] constant[0] for taget[name[i]] in starred[call[name[tqdm].tqdm, parameter[name[batch_iter]]]] begin[:] variable[x] assign[=] call[name[img_data]][call[name[indexes]][<ast.Slice object at 0x7da18eb552a0>]] variable[size] assign[=] call[name[x].shape][constant[0]] if name[mirroring] begin[:] for taget[name[j]] in starred[call[name[range], parameter[name[size]]]] begin[:] if call[name[np].random.randint, parameter[constant[2]]] begin[:] call[name[x]][tuple[[<ast.Name object at 0x7da18eb56cb0>, <ast.Slice object at 0x7da18eb57f70>, <ast.Slice object at 0x7da18eb572e0>, <ast.Slice object at 0x7da18eb57010>]]] assign[=] call[name[x]][tuple[[<ast.Name object at 0x7da18eb54af0>, <ast.Slice object at 0x7da18eb55db0>, <ast.Slice object at 0x7da18eb543a0>, <ast.Slice object at 0x7da18eb541c0>]]] variable[x_batch] assign[=] call[name[Variable], parameter[name[x]]] variable[zeros] assign[=] call[name[Variable], parameter[call[name[np].zeros, parameter[name[size]]]]] variable[ones] assign[=] call[name[Variable], parameter[call[name[np].ones, parameter[name[size]]]]] if name[self].flag_gpu begin[:] call[name[x_batch].to_gpu, parameter[]] call[name[zeros].to_gpu, parameter[]] call[name[ones].to_gpu, parameter[]] <ast.Tuple object at 0x7da18eb540d0> assign[=] call[name[self]._forward, parameter[name[x_batch]]] variable[L_batch_GAN] assign[=] call[name[F].softmax_cross_entropy, parameter[name[disc_batch], name[ones]]] variable[L_rec_GAN] assign[=] call[name[F].softmax_cross_entropy, parameter[name[disc_rec], name[zeros]]] variable[L_samp_GAN] assign[=] call[name[F].softmax_cross_entropy, parameter[name[disc_samp], name[zeros]]] variable[l_gan] assign[=] binary_operation[binary_operation[binary_operation[name[L_batch_GAN] + name[L_rec_GAN]] + name[L_samp_GAN]] / constant[3.0]] variable[l_like] assign[=] name[dif_l] variable[l_prior] assign[=] name[kl_loss] variable[enc_loss] assign[=] binary_operation[binary_operation[name[self].kl_ratio * name[l_prior]] + name[l_like]] variable[dec_loss] assign[=] binary_operation[binary_operation[name[gamma] * name[l_like]] - name[l_gan]] variable[disc_loss] assign[=] name[l_gan] call[name[self].enc_opt.zero_grads, parameter[]] call[name[enc_loss].backward, parameter[]] call[name[self].enc_opt.update, parameter[]] call[name[self].dec_opt.zero_grads, parameter[]] call[name[dec_loss].backward, parameter[]] call[name[self].dec_opt.update, parameter[]] call[name[self].disc_opt.zero_grads, parameter[]] call[name[disc_loss].backward, parameter[]] call[name[self].disc_opt.update, parameter[]] <ast.AugAssign object at 0x7da18eb54430> <ast.AugAssign object at 0x7da18eb57c40> <ast.AugAssign object at 0x7da18eb57be0> <ast.AugAssign object at 0x7da207f99db0> <ast.AugAssign object at 0x7da207f9aa10> <ast.AugAssign object at 0x7da207f993f0> <ast.AugAssign object at 0x7da207f9a8f0> variable[plot_data] assign[=] call[name[img_data]][call[name[indexes]][<ast.Slice object at 0x7da207f99030>]] if compare[name[pic_freq] greater[>] constant[0]] begin[:] assert[compare[call[name[type], parameter[name[pic_freq]]] equal[==] name[int]]] if compare[binary_operation[name[count] <ast.Mod object at 0x7da2590d6920> name[pic_freq]] equal[==] constant[0]] begin[:] variable[fig] assign[=] call[name[self]._plot_img, parameter[name[plot_data], name[c_samples]]] call[name[display], parameter[name[fig]]] if compare[name[save_freq] greater[>] constant[0]] begin[:] <ast.AugAssign object at 0x7da207f994e0> assert[compare[call[name[type], parameter[name[save_freq]]] equal[==] name[int]]] if compare[binary_operation[name[epoch] <ast.Mod object at 0x7da2590d6920> name[save_freq]] equal[==] constant[0]] begin[:] variable[name] assign[=] binary_operation[constant[vaegan_epoch%s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[epoch]]]] if compare[name[save_counter] equal[==] constant[1]] begin[:] variable[save_meta] assign[=] constant[True] call[name[self].save, parameter[name[model_path], name[name]]] variable[fig] assign[=] call[name[self]._plot_img, parameter[name[plot_data], name[c_samples]]] <ast.AugAssign object at 0x7da207f99330> <ast.AugAssign object at 0x7da207f9a560> <ast.AugAssign object at 0x7da207f981c0> <ast.AugAssign object at 0x7da207f9b3d0> <ast.AugAssign object at 0x7da207f9b220> <ast.AugAssign object at 0x7da20e9b25c0> variable[msg] assign[=] constant[enc_loss = {0}, dec_loss = {1} , disc_loss = {2}] variable[msg2] assign[=] constant[gan_loss = {0}, sim_loss = {1}, kl_loss = {2}] call[name[print], parameter[call[name[msg].format, parameter[name[sum_l_enc], name[sum_l_dec], name[sum_l_disc]]]]] call[name[print], parameter[call[name[msg2].format, parameter[name[sum_l_gan], name[sum_l_like], name[sum_l_prior]]]]] variable[t_diff] assign[=] binary_operation[call[name[time].time, parameter[]] - name[t1]] call[name[print], parameter[binary_operation[constant[time: %f ] <ast.Mod object at 0x7da2590d6920> name[t_diff]]]]
keyword[def] identifier[fit] ( identifier[self] , identifier[img_data] , identifier[gamma] = literal[int] , identifier[save_freq] =- literal[int] , identifier[pic_freq] =- literal[int] , identifier[n_epochs] = literal[int] , identifier[batch_size] = literal[int] , identifier[weight_decay] = keyword[True] , identifier[model_path] = literal[string] , identifier[img_path] = literal[string] , identifier[img_out_width] = literal[int] , identifier[mirroring] = keyword[False] ): literal[string] identifier[width] = identifier[img_out_width] identifier[self] . identifier[enc_opt] . identifier[setup] ( identifier[self] . identifier[enc] ) identifier[self] . identifier[dec_opt] . identifier[setup] ( identifier[self] . identifier[dec] ) identifier[self] . identifier[disc_opt] . identifier[setup] ( identifier[self] . identifier[disc] ) keyword[if] identifier[weight_decay] : identifier[self] . identifier[enc_opt] . identifier[add_hook] ( identifier[chainer] . identifier[optimizer] . identifier[WeightDecay] ( literal[int] )) identifier[self] . identifier[dec_opt] . identifier[add_hook] ( identifier[chainer] . identifier[optimizer] . identifier[WeightDecay] ( literal[int] )) identifier[self] . identifier[disc_opt] . identifier[add_hook] ( identifier[chainer] . identifier[optimizer] . identifier[WeightDecay] ( literal[int] )) identifier[n_data] = identifier[img_data] . identifier[shape] [ literal[int] ] identifier[batch_iter] = identifier[list] ( identifier[range] ( literal[int] , identifier[n_data] , identifier[batch_size] )) identifier[n_batches] = identifier[len] ( identifier[batch_iter] ) identifier[c_samples] = identifier[np] . identifier[random] . identifier[standard_normal] (( identifier[width] , identifier[self] . identifier[latent_width] )). identifier[astype] ( identifier[np] . identifier[float32] ) identifier[save_counter] = literal[int] keyword[for] identifier[epoch] keyword[in] identifier[range] ( literal[int] , identifier[n_epochs] + literal[int] ): identifier[print] ( literal[string] % identifier[epoch] ) identifier[t1] = identifier[time] . identifier[time] () identifier[indexes] = identifier[np] . identifier[random] . identifier[permutation] ( identifier[n_data] ) identifier[sum_l_enc] = literal[int] identifier[sum_l_dec] = literal[int] identifier[sum_l_disc] = literal[int] identifier[sum_l_gan] = literal[int] identifier[sum_l_like] = literal[int] identifier[sum_l_prior] = literal[int] identifier[count] = literal[int] keyword[for] identifier[i] keyword[in] identifier[tqdm] . identifier[tqdm] ( identifier[batch_iter] ): identifier[x] = identifier[img_data] [ identifier[indexes] [ identifier[i] : identifier[i] + identifier[batch_size] ]] identifier[size] = identifier[x] . identifier[shape] [ literal[int] ] keyword[if] identifier[mirroring] : keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[size] ): keyword[if] identifier[np] . identifier[random] . identifier[randint] ( literal[int] ): identifier[x] [ identifier[j] ,:,:,:]= identifier[x] [ identifier[j] ,:,:,::- literal[int] ] identifier[x_batch] = identifier[Variable] ( identifier[x] ) identifier[zeros] = identifier[Variable] ( identifier[np] . identifier[zeros] ( identifier[size] , identifier[dtype] = identifier[np] . identifier[int32] )) identifier[ones] = identifier[Variable] ( identifier[np] . identifier[ones] ( identifier[size] , identifier[dtype] = identifier[np] . identifier[int32] )) keyword[if] identifier[self] . identifier[flag_gpu] : identifier[x_batch] . identifier[to_gpu] () identifier[zeros] . identifier[to_gpu] () identifier[ones] . identifier[to_gpu] () identifier[kl_loss] , identifier[dif_l] , identifier[disc_rec] , identifier[disc_batch] , identifier[disc_samp] = identifier[self] . identifier[_forward] ( identifier[x_batch] ) identifier[L_batch_GAN] = identifier[F] . identifier[softmax_cross_entropy] ( identifier[disc_batch] , identifier[ones] ) identifier[L_rec_GAN] = identifier[F] . identifier[softmax_cross_entropy] ( identifier[disc_rec] , identifier[zeros] ) identifier[L_samp_GAN] = identifier[F] . identifier[softmax_cross_entropy] ( identifier[disc_samp] , identifier[zeros] ) identifier[l_gan] =( identifier[L_batch_GAN] + identifier[L_rec_GAN] + identifier[L_samp_GAN] )/ literal[int] identifier[l_like] = identifier[dif_l] identifier[l_prior] = identifier[kl_loss] identifier[enc_loss] = identifier[self] . identifier[kl_ratio] * identifier[l_prior] + identifier[l_like] identifier[dec_loss] = identifier[gamma] * identifier[l_like] - identifier[l_gan] identifier[disc_loss] = identifier[l_gan] identifier[self] . identifier[enc_opt] . identifier[zero_grads] () identifier[enc_loss] . identifier[backward] () identifier[self] . identifier[enc_opt] . identifier[update] () identifier[self] . identifier[dec_opt] . identifier[zero_grads] () identifier[dec_loss] . identifier[backward] () identifier[self] . identifier[dec_opt] . identifier[update] () identifier[self] . identifier[disc_opt] . identifier[zero_grads] () identifier[disc_loss] . identifier[backward] () identifier[self] . identifier[disc_opt] . identifier[update] () identifier[sum_l_enc] += identifier[enc_loss] . identifier[data] identifier[sum_l_dec] += identifier[dec_loss] . identifier[data] identifier[sum_l_disc] += identifier[disc_loss] . identifier[data] identifier[sum_l_gan] += identifier[l_gan] . identifier[data] identifier[sum_l_like] += identifier[l_like] . identifier[data] identifier[sum_l_prior] += identifier[l_prior] . identifier[data] identifier[count] += literal[int] identifier[plot_data] = identifier[img_data] [ identifier[indexes] [: identifier[width] ]] keyword[if] identifier[pic_freq] > literal[int] : keyword[assert] identifier[type] ( identifier[pic_freq] )== identifier[int] , literal[string] keyword[if] identifier[count] % identifier[pic_freq] == literal[int] : identifier[fig] = identifier[self] . identifier[_plot_img] ( identifier[plot_data] , identifier[c_samples] , identifier[img_path] = identifier[img_path] , identifier[epoch] = identifier[epoch] ) identifier[display] ( identifier[fig] ) keyword[if] identifier[save_freq] > literal[int] : identifier[save_counter] += literal[int] keyword[assert] identifier[type] ( identifier[save_freq] )== identifier[int] , literal[string] keyword[if] identifier[epoch] % identifier[save_freq] == literal[int] : identifier[name] = literal[string] % identifier[str] ( identifier[epoch] ) keyword[if] identifier[save_counter] == literal[int] : identifier[save_meta] = keyword[True] keyword[else] : identifier[save_meta] = keyword[False] identifier[self] . identifier[save] ( identifier[model_path] , identifier[name] , identifier[save_meta] = identifier[save_meta] ) identifier[fig] = identifier[self] . identifier[_plot_img] ( identifier[plot_data] , identifier[c_samples] , identifier[img_path] = identifier[img_path] , identifier[epoch] = identifier[epoch] , identifier[batch] = identifier[n_batches] , identifier[save_pic] = keyword[True] ) identifier[sum_l_enc] /= identifier[n_batches] identifier[sum_l_dec] /= identifier[n_batches] identifier[sum_l_disc] /= identifier[n_batches] identifier[sum_l_gan] /= identifier[n_batches] identifier[sum_l_like] /= identifier[n_batches] identifier[sum_l_prior] /= identifier[n_batches] identifier[msg] = literal[string] identifier[msg2] = literal[string] identifier[print] ( identifier[msg] . identifier[format] ( identifier[sum_l_enc] , identifier[sum_l_dec] , identifier[sum_l_disc] )) identifier[print] ( identifier[msg2] . identifier[format] ( identifier[sum_l_gan] , identifier[sum_l_like] , identifier[sum_l_prior] )) identifier[t_diff] = identifier[time] . identifier[time] ()- identifier[t1] identifier[print] ( literal[string] % identifier[t_diff] )
def fit(self, img_data, gamma=1.0, save_freq=-1, pic_freq=-1, n_epochs=100, batch_size=50, weight_decay=True, model_path='./VAEGAN_training_model/', img_path='./VAEGAN_training_images/', img_out_width=10, mirroring=False): """Fit the VAE/GAN model to the image data. Parameters ---------- img_data : array-like shape (n_images, n_colors, image_width, image_height) Images used to fit VAE model. gamma [optional] : float Sets the multiplicative factor that weights the relative importance of reconstruction loss vs. ability to fool the discriminator. Higher weight means greater focus on faithful reconstruction. save_freq [optional] : int Sets the number of epochs to wait before saving the model and optimizer states. Also saves image files of randomly generated images using those states in a separate directory. Does not save if negative valued. pic_freq [optional] : int Sets the number of batches to wait before displaying a picture or randomly generated images using the current model state. Does not display if negative valued. n_epochs [optional] : int Gives the number of training epochs to run through for the fitting process. batch_size [optional] : int The size of the batch to use when training. Note: generally larger batch sizes will result in fater epoch iteration, but at the const of lower granulatity when updating the layer weights. weight_decay [optional] : bool Flag that controls adding weight decay hooks to the optimizer. model_path [optional] : str Directory where the model and optimizer state files will be saved. img_path [optional] : str Directory where the end of epoch training image files will be saved. img_out_width : int Controls the number of randomly genreated images per row in the output saved imags. mirroring [optional] : bool Controls whether images are randomly mirrored along the verical axis with a .5 probability. Artificially increases images variance for training set. """ width = img_out_width self.enc_opt.setup(self.enc) self.dec_opt.setup(self.dec) self.disc_opt.setup(self.disc) if weight_decay: self.enc_opt.add_hook(chainer.optimizer.WeightDecay(1e-05)) self.dec_opt.add_hook(chainer.optimizer.WeightDecay(1e-05)) self.disc_opt.add_hook(chainer.optimizer.WeightDecay(1e-05)) # depends on [control=['if'], data=[]] n_data = img_data.shape[0] batch_iter = list(range(0, n_data, batch_size)) n_batches = len(batch_iter) c_samples = np.random.standard_normal((width, self.latent_width)).astype(np.float32) save_counter = 0 for epoch in range(1, n_epochs + 1): print('epoch: %i' % epoch) t1 = time.time() indexes = np.random.permutation(n_data) sum_l_enc = 0.0 sum_l_dec = 0.0 sum_l_disc = 0.0 sum_l_gan = 0.0 sum_l_like = 0.0 sum_l_prior = 0.0 count = 0 for i in tqdm.tqdm(batch_iter): x = img_data[indexes[i:i + batch_size]] size = x.shape[0] if mirroring: for j in range(size): if np.random.randint(2): x[j, :, :, :] = x[j, :, :, ::-1] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['if'], data=[]] x_batch = Variable(x) zeros = Variable(np.zeros(size, dtype=np.int32)) ones = Variable(np.ones(size, dtype=np.int32)) if self.flag_gpu: x_batch.to_gpu() zeros.to_gpu() ones.to_gpu() # depends on [control=['if'], data=[]] (kl_loss, dif_l, disc_rec, disc_batch, disc_samp) = self._forward(x_batch) L_batch_GAN = F.softmax_cross_entropy(disc_batch, ones) L_rec_GAN = F.softmax_cross_entropy(disc_rec, zeros) L_samp_GAN = F.softmax_cross_entropy(disc_samp, zeros) l_gan = (L_batch_GAN + L_rec_GAN + L_samp_GAN) / 3.0 l_like = dif_l l_prior = kl_loss enc_loss = self.kl_ratio * l_prior + l_like dec_loss = gamma * l_like - l_gan disc_loss = l_gan self.enc_opt.zero_grads() enc_loss.backward() self.enc_opt.update() self.dec_opt.zero_grads() dec_loss.backward() self.dec_opt.update() self.disc_opt.zero_grads() disc_loss.backward() self.disc_opt.update() sum_l_enc += enc_loss.data sum_l_dec += dec_loss.data sum_l_disc += disc_loss.data sum_l_gan += l_gan.data sum_l_like += l_like.data sum_l_prior += l_prior.data count += 1 plot_data = img_data[indexes[:width]] if pic_freq > 0: assert type(pic_freq) == int, 'pic_freq must be an integer.' if count % pic_freq == 0: fig = self._plot_img(plot_data, c_samples, img_path=img_path, epoch=epoch) display(fig) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['pic_freq']] # depends on [control=['for'], data=['i']] if save_freq > 0: save_counter += 1 assert type(save_freq) == int, 'save_freq must be an integer.' if epoch % save_freq == 0: name = 'vaegan_epoch%s' % str(epoch) if save_counter == 1: save_meta = True # depends on [control=['if'], data=[]] else: save_meta = False self.save(model_path, name, save_meta=save_meta) fig = self._plot_img(plot_data, c_samples, img_path=img_path, epoch=epoch, batch=n_batches, save_pic=True) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['save_freq']] sum_l_enc /= n_batches sum_l_dec /= n_batches sum_l_disc /= n_batches sum_l_gan /= n_batches sum_l_like /= n_batches sum_l_prior /= n_batches msg = 'enc_loss = {0}, dec_loss = {1} , disc_loss = {2}' msg2 = 'gan_loss = {0}, sim_loss = {1}, kl_loss = {2}' print(msg.format(sum_l_enc, sum_l_dec, sum_l_disc)) print(msg2.format(sum_l_gan, sum_l_like, sum_l_prior)) t_diff = time.time() - t1 print('time: %f\n\n' % t_diff) # depends on [control=['for'], data=['epoch']]
def app_class(): """Create Flask application class. Invenio-Files-REST needs to patch the Werkzeug form parsing in order to support streaming large file uploads. This is done by subclassing the Flask application class. """ try: pkg_resources.get_distribution('invenio-files-rest') from invenio_files_rest.app import Flask as FlaskBase except pkg_resources.DistributionNotFound: from flask import Flask as FlaskBase # Add Host header validation via APP_ALLOWED_HOSTS configuration variable. class Request(TrustedHostsMixin, FlaskBase.request_class): pass class Flask(FlaskBase): request_class = Request return Flask
def function[app_class, parameter[]]: constant[Create Flask application class. Invenio-Files-REST needs to patch the Werkzeug form parsing in order to support streaming large file uploads. This is done by subclassing the Flask application class. ] <ast.Try object at 0x7da1afea9720> class class[Request, parameter[]] begin[:] pass class class[Flask, parameter[]] begin[:] variable[request_class] assign[=] name[Request] return[name[Flask]]
keyword[def] identifier[app_class] (): literal[string] keyword[try] : identifier[pkg_resources] . identifier[get_distribution] ( literal[string] ) keyword[from] identifier[invenio_files_rest] . identifier[app] keyword[import] identifier[Flask] keyword[as] identifier[FlaskBase] keyword[except] identifier[pkg_resources] . identifier[DistributionNotFound] : keyword[from] identifier[flask] keyword[import] identifier[Flask] keyword[as] identifier[FlaskBase] keyword[class] identifier[Request] ( identifier[TrustedHostsMixin] , identifier[FlaskBase] . identifier[request_class] ): keyword[pass] keyword[class] identifier[Flask] ( identifier[FlaskBase] ): identifier[request_class] = identifier[Request] keyword[return] identifier[Flask]
def app_class(): """Create Flask application class. Invenio-Files-REST needs to patch the Werkzeug form parsing in order to support streaming large file uploads. This is done by subclassing the Flask application class. """ try: pkg_resources.get_distribution('invenio-files-rest') from invenio_files_rest.app import Flask as FlaskBase # depends on [control=['try'], data=[]] except pkg_resources.DistributionNotFound: from flask import Flask as FlaskBase # depends on [control=['except'], data=[]] # Add Host header validation via APP_ALLOWED_HOSTS configuration variable. class Request(TrustedHostsMixin, FlaskBase.request_class): pass class Flask(FlaskBase): request_class = Request return Flask
def resetPassword(self, userId): ''' Changes a user's password to a system-generated value. ''' self._setHeaders('resetPassword') return self._sforce.service.resetPassword(userId)
def function[resetPassword, parameter[self, userId]]: constant[ Changes a user's password to a system-generated value. ] call[name[self]._setHeaders, parameter[constant[resetPassword]]] return[call[name[self]._sforce.service.resetPassword, parameter[name[userId]]]]
keyword[def] identifier[resetPassword] ( identifier[self] , identifier[userId] ): literal[string] identifier[self] . identifier[_setHeaders] ( literal[string] ) keyword[return] identifier[self] . identifier[_sforce] . identifier[service] . identifier[resetPassword] ( identifier[userId] )
def resetPassword(self, userId): """ Changes a user's password to a system-generated value. """ self._setHeaders('resetPassword') return self._sforce.service.resetPassword(userId)
def transitive_closure(self): """Compute the transitive closure of the matrix.""" data = [[1 if j else 0 for j in i] for i in self.data] for k in range(self.rows): for i in range(self.rows): for j in range(self.rows): if data[i][k] and data[k][j]: data[i][j] = 1 return data
def function[transitive_closure, parameter[self]]: constant[Compute the transitive closure of the matrix.] variable[data] assign[=] <ast.ListComp object at 0x7da1b20f9d20> for taget[name[k]] in starred[call[name[range], parameter[name[self].rows]]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[name[self].rows]]] begin[:] for taget[name[j]] in starred[call[name[range], parameter[name[self].rows]]] begin[:] if <ast.BoolOp object at 0x7da1b20fb5b0> begin[:] call[call[name[data]][name[i]]][name[j]] assign[=] constant[1] return[name[data]]
keyword[def] identifier[transitive_closure] ( identifier[self] ): literal[string] identifier[data] =[[ literal[int] keyword[if] identifier[j] keyword[else] literal[int] keyword[for] identifier[j] keyword[in] identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[data] ] keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[self] . identifier[rows] ): keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[rows] ): keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[self] . identifier[rows] ): keyword[if] identifier[data] [ identifier[i] ][ identifier[k] ] keyword[and] identifier[data] [ identifier[k] ][ identifier[j] ]: identifier[data] [ identifier[i] ][ identifier[j] ]= literal[int] keyword[return] identifier[data]
def transitive_closure(self): """Compute the transitive closure of the matrix.""" data = [[1 if j else 0 for j in i] for i in self.data] for k in range(self.rows): for i in range(self.rows): for j in range(self.rows): if data[i][k] and data[k][j]: data[i][j] = 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['k']] return data
def minimum_spanning_subtree(self): '''Returns the (undirected) minimum spanning tree subgraph.''' dist = self.matrix('dense', copy=True) dist[dist==0] = np.inf np.fill_diagonal(dist, 0) mst = ssc.minimum_spanning_tree(dist) return self.__class__.from_adj_matrix(mst + mst.T)
def function[minimum_spanning_subtree, parameter[self]]: constant[Returns the (undirected) minimum spanning tree subgraph.] variable[dist] assign[=] call[name[self].matrix, parameter[constant[dense]]] call[name[dist]][compare[name[dist] equal[==] constant[0]]] assign[=] name[np].inf call[name[np].fill_diagonal, parameter[name[dist], constant[0]]] variable[mst] assign[=] call[name[ssc].minimum_spanning_tree, parameter[name[dist]]] return[call[name[self].__class__.from_adj_matrix, parameter[binary_operation[name[mst] + name[mst].T]]]]
keyword[def] identifier[minimum_spanning_subtree] ( identifier[self] ): literal[string] identifier[dist] = identifier[self] . identifier[matrix] ( literal[string] , identifier[copy] = keyword[True] ) identifier[dist] [ identifier[dist] == literal[int] ]= identifier[np] . identifier[inf] identifier[np] . identifier[fill_diagonal] ( identifier[dist] , literal[int] ) identifier[mst] = identifier[ssc] . identifier[minimum_spanning_tree] ( identifier[dist] ) keyword[return] identifier[self] . identifier[__class__] . identifier[from_adj_matrix] ( identifier[mst] + identifier[mst] . identifier[T] )
def minimum_spanning_subtree(self): """Returns the (undirected) minimum spanning tree subgraph.""" dist = self.matrix('dense', copy=True) dist[dist == 0] = np.inf np.fill_diagonal(dist, 0) mst = ssc.minimum_spanning_tree(dist) return self.__class__.from_adj_matrix(mst + mst.T)
def get_utt_regions(self): """ Return the regions of all utterances, assuming all utterances are concatenated. A region is defined by offset, length (num-frames) and a list of references to the utterance datasets in the containers. Returns: list: List of with a tuple for every utterances containing the region info. """ regions = [] current_offset = 0 for utt_idx, utt_data in zip(self.data.info.utt_ids, self.data.utt_data): offset = current_offset num_frames = [] refs = [] for part in utt_data: num_frames.append(part.shape[0]) refs.append(part) if len(set(num_frames)) != 1: raise ValueError('Utterance {} has not the same number of frames in all containers!'.format(utt_idx)) num_chunks = math.ceil(num_frames[0] / float(self.frames_per_chunk)) region = (offset, num_chunks, refs) regions.append(region) # Sets the offset for the next utterances current_offset += num_chunks return regions
def function[get_utt_regions, parameter[self]]: constant[ Return the regions of all utterances, assuming all utterances are concatenated. A region is defined by offset, length (num-frames) and a list of references to the utterance datasets in the containers. Returns: list: List of with a tuple for every utterances containing the region info. ] variable[regions] assign[=] list[[]] variable[current_offset] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da1b0b51ba0>, <ast.Name object at 0x7da1b0b516f0>]]] in starred[call[name[zip], parameter[name[self].data.info.utt_ids, name[self].data.utt_data]]] begin[:] variable[offset] assign[=] name[current_offset] variable[num_frames] assign[=] list[[]] variable[refs] assign[=] list[[]] for taget[name[part]] in starred[name[utt_data]] begin[:] call[name[num_frames].append, parameter[call[name[part].shape][constant[0]]]] call[name[refs].append, parameter[name[part]]] if compare[call[name[len], parameter[call[name[set], parameter[name[num_frames]]]]] not_equal[!=] constant[1]] begin[:] <ast.Raise object at 0x7da1b0ba7250> variable[num_chunks] assign[=] call[name[math].ceil, parameter[binary_operation[call[name[num_frames]][constant[0]] / call[name[float], parameter[name[self].frames_per_chunk]]]]] variable[region] assign[=] tuple[[<ast.Name object at 0x7da1b0ba7970>, <ast.Name object at 0x7da1b0ba7100>, <ast.Name object at 0x7da1b0ba7670>]] call[name[regions].append, parameter[name[region]]] <ast.AugAssign object at 0x7da1b0b50cd0> return[name[regions]]
keyword[def] identifier[get_utt_regions] ( identifier[self] ): literal[string] identifier[regions] =[] identifier[current_offset] = literal[int] keyword[for] identifier[utt_idx] , identifier[utt_data] keyword[in] identifier[zip] ( identifier[self] . identifier[data] . identifier[info] . identifier[utt_ids] , identifier[self] . identifier[data] . identifier[utt_data] ): identifier[offset] = identifier[current_offset] identifier[num_frames] =[] identifier[refs] =[] keyword[for] identifier[part] keyword[in] identifier[utt_data] : identifier[num_frames] . identifier[append] ( identifier[part] . identifier[shape] [ literal[int] ]) identifier[refs] . identifier[append] ( identifier[part] ) keyword[if] identifier[len] ( identifier[set] ( identifier[num_frames] ))!= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[utt_idx] )) identifier[num_chunks] = identifier[math] . identifier[ceil] ( identifier[num_frames] [ literal[int] ]/ identifier[float] ( identifier[self] . identifier[frames_per_chunk] )) identifier[region] =( identifier[offset] , identifier[num_chunks] , identifier[refs] ) identifier[regions] . identifier[append] ( identifier[region] ) identifier[current_offset] += identifier[num_chunks] keyword[return] identifier[regions]
def get_utt_regions(self): """ Return the regions of all utterances, assuming all utterances are concatenated. A region is defined by offset, length (num-frames) and a list of references to the utterance datasets in the containers. Returns: list: List of with a tuple for every utterances containing the region info. """ regions = [] current_offset = 0 for (utt_idx, utt_data) in zip(self.data.info.utt_ids, self.data.utt_data): offset = current_offset num_frames = [] refs = [] for part in utt_data: num_frames.append(part.shape[0]) refs.append(part) # depends on [control=['for'], data=['part']] if len(set(num_frames)) != 1: raise ValueError('Utterance {} has not the same number of frames in all containers!'.format(utt_idx)) # depends on [control=['if'], data=[]] num_chunks = math.ceil(num_frames[0] / float(self.frames_per_chunk)) region = (offset, num_chunks, refs) regions.append(region) # Sets the offset for the next utterances current_offset += num_chunks # depends on [control=['for'], data=[]] return regions
def get(self, name, **kwargs): """Get the variable given a name if one exists or create a new one if missing. Parameters ---------- name : str name of the variable **kwargs : more arguments that's passed to symbol.Variable """ name = self._prefix + name if name not in self._params: self._params[name] = symbol.Variable(name, **kwargs) return self._params[name]
def function[get, parameter[self, name]]: constant[Get the variable given a name if one exists or create a new one if missing. Parameters ---------- name : str name of the variable **kwargs : more arguments that's passed to symbol.Variable ] variable[name] assign[=] binary_operation[name[self]._prefix + name[name]] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[self]._params] begin[:] call[name[self]._params][name[name]] assign[=] call[name[symbol].Variable, parameter[name[name]]] return[call[name[self]._params][name[name]]]
keyword[def] identifier[get] ( identifier[self] , identifier[name] ,** identifier[kwargs] ): literal[string] identifier[name] = identifier[self] . identifier[_prefix] + identifier[name] keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[_params] : identifier[self] . identifier[_params] [ identifier[name] ]= identifier[symbol] . identifier[Variable] ( identifier[name] ,** identifier[kwargs] ) keyword[return] identifier[self] . identifier[_params] [ identifier[name] ]
def get(self, name, **kwargs): """Get the variable given a name if one exists or create a new one if missing. Parameters ---------- name : str name of the variable **kwargs : more arguments that's passed to symbol.Variable """ name = self._prefix + name if name not in self._params: self._params[name] = symbol.Variable(name, **kwargs) # depends on [control=['if'], data=['name']] return self._params[name]
def get_info(self): ''' Get info regarding the current template state :return: info dictionary ''' self.render() info = super(Template, self).get_info() res = {} res['name'] = self.get_name() res['mutation'] = { 'current_index': self._current_index, 'total_number': self.num_mutations() } res['value'] = { 'rendered': { 'base64': b64encode(self._current_rendered.tobytes()).decode(), 'length_in_bytes': len(self._current_rendered.tobytes()), } } res['hash'] = self.hash() res['field'] = info return res
def function[get_info, parameter[self]]: constant[ Get info regarding the current template state :return: info dictionary ] call[name[self].render, parameter[]] variable[info] assign[=] call[call[name[super], parameter[name[Template], name[self]]].get_info, parameter[]] variable[res] assign[=] dictionary[[], []] call[name[res]][constant[name]] assign[=] call[name[self].get_name, parameter[]] call[name[res]][constant[mutation]] assign[=] dictionary[[<ast.Constant object at 0x7da18dc05ed0>, <ast.Constant object at 0x7da18dc06830>], [<ast.Attribute object at 0x7da18dc05300>, <ast.Call object at 0x7da18dc065c0>]] call[name[res]][constant[value]] assign[=] dictionary[[<ast.Constant object at 0x7da18dc071c0>], [<ast.Dict object at 0x7da18dc04430>]] call[name[res]][constant[hash]] assign[=] call[name[self].hash, parameter[]] call[name[res]][constant[field]] assign[=] name[info] return[name[res]]
keyword[def] identifier[get_info] ( identifier[self] ): literal[string] identifier[self] . identifier[render] () identifier[info] = identifier[super] ( identifier[Template] , identifier[self] ). identifier[get_info] () identifier[res] ={} identifier[res] [ literal[string] ]= identifier[self] . identifier[get_name] () identifier[res] [ literal[string] ]={ literal[string] : identifier[self] . identifier[_current_index] , literal[string] : identifier[self] . identifier[num_mutations] () } identifier[res] [ literal[string] ]={ literal[string] :{ literal[string] : identifier[b64encode] ( identifier[self] . identifier[_current_rendered] . identifier[tobytes] ()). identifier[decode] (), literal[string] : identifier[len] ( identifier[self] . identifier[_current_rendered] . identifier[tobytes] ()), } } identifier[res] [ literal[string] ]= identifier[self] . identifier[hash] () identifier[res] [ literal[string] ]= identifier[info] keyword[return] identifier[res]
def get_info(self): """ Get info regarding the current template state :return: info dictionary """ self.render() info = super(Template, self).get_info() res = {} res['name'] = self.get_name() res['mutation'] = {'current_index': self._current_index, 'total_number': self.num_mutations()} res['value'] = {'rendered': {'base64': b64encode(self._current_rendered.tobytes()).decode(), 'length_in_bytes': len(self._current_rendered.tobytes())}} res['hash'] = self.hash() res['field'] = info return res
def copy_file(self, filepath): """ Returns flag which says to copy rather than link a file. """ copy_file = False try: copy_file = self.data[filepath]['copy'] except KeyError: return False return copy_file
def function[copy_file, parameter[self, filepath]]: constant[ Returns flag which says to copy rather than link a file. ] variable[copy_file] assign[=] constant[False] <ast.Try object at 0x7da1b0283910> return[name[copy_file]]
keyword[def] identifier[copy_file] ( identifier[self] , identifier[filepath] ): literal[string] identifier[copy_file] = keyword[False] keyword[try] : identifier[copy_file] = identifier[self] . identifier[data] [ identifier[filepath] ][ literal[string] ] keyword[except] identifier[KeyError] : keyword[return] keyword[False] keyword[return] identifier[copy_file]
def copy_file(self, filepath): """ Returns flag which says to copy rather than link a file. """ copy_file = False try: copy_file = self.data[filepath]['copy'] # depends on [control=['try'], data=[]] except KeyError: return False # depends on [control=['except'], data=[]] return copy_file
def command(execute=None): # noqa: E501 """Execute a Command Execute a command # noqa: E501 :param execute: The data needed to execute this command :type execute: dict | bytes :rtype: Response """ if connexion.request.is_json: execute = Execute.from_dict(connexion.request.get_json()) # noqa: E501 if(not hasAccess()): return redirectUnauthorized() try: connector = None parameters = {} if (execute.command.parameters): parameters = execute.command.parameters credentials = Credentials() options = Options(debug=execute.command.options['debug'], sensitive=execute.command.options['sensitive']) if (execute.auth): credentials = mapUserAuthToCredentials(execute.auth, credentials) if (not execute.auth.api_token): options.sensitive = True connector = Connector(options=options, credentials=credentials, command=execute.command.command, parameters=parameters) commandHandler = connector.execute() response = Response(status=commandHandler.getRequest().getResponseStatusCode(), body=json.loads(commandHandler.getRequest().getResponseBody())) if (execute.command.options['debug']): response.log = connector.logBuffer return response except: State.log.error(traceback.format_exc()) if ('debug' in execute.command.options and execute.command.options['debug']): return ErrorResponse(status=500, message="Uncaught exception occured during processing. To get a larger stack trace, visit the logs.", state=traceback.format_exc(3)) else: return ErrorResponse(status=500, message="")
def function[command, parameter[execute]]: constant[Execute a Command Execute a command # noqa: E501 :param execute: The data needed to execute this command :type execute: dict | bytes :rtype: Response ] if name[connexion].request.is_json begin[:] variable[execute] assign[=] call[name[Execute].from_dict, parameter[call[name[connexion].request.get_json, parameter[]]]] if <ast.UnaryOp object at 0x7da18f58cd60> begin[:] return[call[name[redirectUnauthorized], parameter[]]] <ast.Try object at 0x7da18f58f1f0>
keyword[def] identifier[command] ( identifier[execute] = keyword[None] ): literal[string] keyword[if] identifier[connexion] . identifier[request] . identifier[is_json] : identifier[execute] = identifier[Execute] . identifier[from_dict] ( identifier[connexion] . identifier[request] . identifier[get_json] ()) keyword[if] ( keyword[not] identifier[hasAccess] ()): keyword[return] identifier[redirectUnauthorized] () keyword[try] : identifier[connector] = keyword[None] identifier[parameters] ={} keyword[if] ( identifier[execute] . identifier[command] . identifier[parameters] ): identifier[parameters] = identifier[execute] . identifier[command] . identifier[parameters] identifier[credentials] = identifier[Credentials] () identifier[options] = identifier[Options] ( identifier[debug] = identifier[execute] . identifier[command] . identifier[options] [ literal[string] ], identifier[sensitive] = identifier[execute] . identifier[command] . identifier[options] [ literal[string] ]) keyword[if] ( identifier[execute] . identifier[auth] ): identifier[credentials] = identifier[mapUserAuthToCredentials] ( identifier[execute] . identifier[auth] , identifier[credentials] ) keyword[if] ( keyword[not] identifier[execute] . identifier[auth] . identifier[api_token] ): identifier[options] . identifier[sensitive] = keyword[True] identifier[connector] = identifier[Connector] ( identifier[options] = identifier[options] , identifier[credentials] = identifier[credentials] , identifier[command] = identifier[execute] . identifier[command] . identifier[command] , identifier[parameters] = identifier[parameters] ) identifier[commandHandler] = identifier[connector] . identifier[execute] () identifier[response] = identifier[Response] ( identifier[status] = identifier[commandHandler] . identifier[getRequest] (). identifier[getResponseStatusCode] (), identifier[body] = identifier[json] . identifier[loads] ( identifier[commandHandler] . identifier[getRequest] (). identifier[getResponseBody] ())) keyword[if] ( identifier[execute] . identifier[command] . identifier[options] [ literal[string] ]): identifier[response] . identifier[log] = identifier[connector] . identifier[logBuffer] keyword[return] identifier[response] keyword[except] : identifier[State] . identifier[log] . identifier[error] ( identifier[traceback] . identifier[format_exc] ()) keyword[if] ( literal[string] keyword[in] identifier[execute] . identifier[command] . identifier[options] keyword[and] identifier[execute] . identifier[command] . identifier[options] [ literal[string] ]): keyword[return] identifier[ErrorResponse] ( identifier[status] = literal[int] , identifier[message] = literal[string] , identifier[state] = identifier[traceback] . identifier[format_exc] ( literal[int] )) keyword[else] : keyword[return] identifier[ErrorResponse] ( identifier[status] = literal[int] , identifier[message] = literal[string] )
def command(execute=None): # noqa: E501 'Execute a Command\n\n Execute a command # noqa: E501\n\n :param execute: The data needed to execute this command\n :type execute: dict | bytes\n\n :rtype: Response\n ' if connexion.request.is_json: execute = Execute.from_dict(connexion.request.get_json()) # noqa: E501 # depends on [control=['if'], data=[]] if not hasAccess(): return redirectUnauthorized() # depends on [control=['if'], data=[]] try: connector = None parameters = {} if execute.command.parameters: parameters = execute.command.parameters # depends on [control=['if'], data=[]] credentials = Credentials() options = Options(debug=execute.command.options['debug'], sensitive=execute.command.options['sensitive']) if execute.auth: credentials = mapUserAuthToCredentials(execute.auth, credentials) # depends on [control=['if'], data=[]] if not execute.auth.api_token: options.sensitive = True # depends on [control=['if'], data=[]] connector = Connector(options=options, credentials=credentials, command=execute.command.command, parameters=parameters) commandHandler = connector.execute() response = Response(status=commandHandler.getRequest().getResponseStatusCode(), body=json.loads(commandHandler.getRequest().getResponseBody())) if execute.command.options['debug']: response.log = connector.logBuffer # depends on [control=['if'], data=[]] return response # depends on [control=['try'], data=[]] except: State.log.error(traceback.format_exc()) if 'debug' in execute.command.options and execute.command.options['debug']: return ErrorResponse(status=500, message='Uncaught exception occured during processing. To get a larger stack trace, visit the logs.', state=traceback.format_exc(3)) # depends on [control=['if'], data=[]] else: return ErrorResponse(status=500, message='') # depends on [control=['except'], data=[]]
def compare_enums(autogen_context, upgrade_ops, schema_names): """ Walk the declared SQLAlchemy schema for every referenced Enum, walk the PG schema for every definde Enum, then generate SyncEnumValuesOp migrations for each defined enum that has grown new entries when compared to its declared version. Enums that don't exist in the database yet are ignored, since SQLAlchemy/Alembic will create them as part of the usual migration process. """ to_add = set() for schema in schema_names: default = autogen_context.dialect.default_schema_name if schema is None: schema = default defined = get_defined_enums(autogen_context.connection, schema) declared = get_declared_enums(autogen_context.metadata, schema, default) for name, new_values in declared.items(): old_values = defined.get(name) # Alembic will handle creation of the type in this migration, so # skip undefined names. if name in defined and new_values.difference(old_values): to_add.add((schema, name, old_values, new_values)) for schema, name, old_values, new_values in sorted(to_add): op = SyncEnumValuesOp(schema, name, old_values, new_values) upgrade_ops.ops.append(op)
def function[compare_enums, parameter[autogen_context, upgrade_ops, schema_names]]: constant[ Walk the declared SQLAlchemy schema for every referenced Enum, walk the PG schema for every definde Enum, then generate SyncEnumValuesOp migrations for each defined enum that has grown new entries when compared to its declared version. Enums that don't exist in the database yet are ignored, since SQLAlchemy/Alembic will create them as part of the usual migration process. ] variable[to_add] assign[=] call[name[set], parameter[]] for taget[name[schema]] in starred[name[schema_names]] begin[:] variable[default] assign[=] name[autogen_context].dialect.default_schema_name if compare[name[schema] is constant[None]] begin[:] variable[schema] assign[=] name[default] variable[defined] assign[=] call[name[get_defined_enums], parameter[name[autogen_context].connection, name[schema]]] variable[declared] assign[=] call[name[get_declared_enums], parameter[name[autogen_context].metadata, name[schema], name[default]]] for taget[tuple[[<ast.Name object at 0x7da1b23479d0>, <ast.Name object at 0x7da1b2346470>]]] in starred[call[name[declared].items, parameter[]]] begin[:] variable[old_values] assign[=] call[name[defined].get, parameter[name[name]]] if <ast.BoolOp object at 0x7da1b049be50> begin[:] call[name[to_add].add, parameter[tuple[[<ast.Name object at 0x7da1b049b910>, <ast.Name object at 0x7da1b049a4d0>, <ast.Name object at 0x7da1b049b2b0>, <ast.Name object at 0x7da1b04989d0>]]]] for taget[tuple[[<ast.Name object at 0x7da1b049af50>, <ast.Name object at 0x7da1b0499c90>, <ast.Name object at 0x7da1b0498970>, <ast.Name object at 0x7da1b04997e0>]]] in starred[call[name[sorted], parameter[name[to_add]]]] begin[:] variable[op] assign[=] call[name[SyncEnumValuesOp], parameter[name[schema], name[name], name[old_values], name[new_values]]] call[name[upgrade_ops].ops.append, parameter[name[op]]]
keyword[def] identifier[compare_enums] ( identifier[autogen_context] , identifier[upgrade_ops] , identifier[schema_names] ): literal[string] identifier[to_add] = identifier[set] () keyword[for] identifier[schema] keyword[in] identifier[schema_names] : identifier[default] = identifier[autogen_context] . identifier[dialect] . identifier[default_schema_name] keyword[if] identifier[schema] keyword[is] keyword[None] : identifier[schema] = identifier[default] identifier[defined] = identifier[get_defined_enums] ( identifier[autogen_context] . identifier[connection] , identifier[schema] ) identifier[declared] = identifier[get_declared_enums] ( identifier[autogen_context] . identifier[metadata] , identifier[schema] , identifier[default] ) keyword[for] identifier[name] , identifier[new_values] keyword[in] identifier[declared] . identifier[items] (): identifier[old_values] = identifier[defined] . identifier[get] ( identifier[name] ) keyword[if] identifier[name] keyword[in] identifier[defined] keyword[and] identifier[new_values] . identifier[difference] ( identifier[old_values] ): identifier[to_add] . identifier[add] (( identifier[schema] , identifier[name] , identifier[old_values] , identifier[new_values] )) keyword[for] identifier[schema] , identifier[name] , identifier[old_values] , identifier[new_values] keyword[in] identifier[sorted] ( identifier[to_add] ): identifier[op] = identifier[SyncEnumValuesOp] ( identifier[schema] , identifier[name] , identifier[old_values] , identifier[new_values] ) identifier[upgrade_ops] . identifier[ops] . identifier[append] ( identifier[op] )
def compare_enums(autogen_context, upgrade_ops, schema_names): """ Walk the declared SQLAlchemy schema for every referenced Enum, walk the PG schema for every definde Enum, then generate SyncEnumValuesOp migrations for each defined enum that has grown new entries when compared to its declared version. Enums that don't exist in the database yet are ignored, since SQLAlchemy/Alembic will create them as part of the usual migration process. """ to_add = set() for schema in schema_names: default = autogen_context.dialect.default_schema_name if schema is None: schema = default # depends on [control=['if'], data=['schema']] defined = get_defined_enums(autogen_context.connection, schema) declared = get_declared_enums(autogen_context.metadata, schema, default) for (name, new_values) in declared.items(): old_values = defined.get(name) # Alembic will handle creation of the type in this migration, so # skip undefined names. if name in defined and new_values.difference(old_values): to_add.add((schema, name, old_values, new_values)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['schema']] for (schema, name, old_values, new_values) in sorted(to_add): op = SyncEnumValuesOp(schema, name, old_values, new_values) upgrade_ops.ops.append(op) # depends on [control=['for'], data=[]]
def get_default_realms(self, client_key, request): """Default realms of the client.""" log.debug('Get realms for %r', client_key) if not request.client: request.client = self._clientgetter(client_key=client_key) client = request.client if hasattr(client, 'default_realms'): return client.default_realms return []
def function[get_default_realms, parameter[self, client_key, request]]: constant[Default realms of the client.] call[name[log].debug, parameter[constant[Get realms for %r], name[client_key]]] if <ast.UnaryOp object at 0x7da1b0382fb0> begin[:] name[request].client assign[=] call[name[self]._clientgetter, parameter[]] variable[client] assign[=] name[request].client if call[name[hasattr], parameter[name[client], constant[default_realms]]] begin[:] return[name[client].default_realms] return[list[[]]]
keyword[def] identifier[get_default_realms] ( identifier[self] , identifier[client_key] , identifier[request] ): literal[string] identifier[log] . identifier[debug] ( literal[string] , identifier[client_key] ) keyword[if] keyword[not] identifier[request] . identifier[client] : identifier[request] . identifier[client] = identifier[self] . identifier[_clientgetter] ( identifier[client_key] = identifier[client_key] ) identifier[client] = identifier[request] . identifier[client] keyword[if] identifier[hasattr] ( identifier[client] , literal[string] ): keyword[return] identifier[client] . identifier[default_realms] keyword[return] []
def get_default_realms(self, client_key, request): """Default realms of the client.""" log.debug('Get realms for %r', client_key) if not request.client: request.client = self._clientgetter(client_key=client_key) # depends on [control=['if'], data=[]] client = request.client if hasattr(client, 'default_realms'): return client.default_realms # depends on [control=['if'], data=[]] return []
def configuration_import(config_file, rules=None, file_format='xml', **kwargs): ''' .. versionadded:: 2017.7 Imports Zabbix configuration specified in file to Zabbix server. :param config_file: File with Zabbix config (local or remote) :param rules: Optional - Rules that have to be different from default (defaults are the same as in Zabbix web UI.) :param file_format: Config file format (default: xml) :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) CLI Example: .. code-block:: bash salt '*' zabbix.configuration_import salt://zabbix/config/zabbix_templates.xml \ "{'screens': {'createMissing': True, 'updateExisting': True}}" ''' if rules is None: rules = {} default_rules = {'applications': {'createMissing': True, 'updateExisting': False, 'deleteMissing': False}, 'discoveryRules': {'createMissing': True, 'updateExisting': True, 'deleteMissing': False}, 'graphs': {'createMissing': True, 'updateExisting': True, 'deleteMissing': False}, 'groups': {'createMissing': True}, 'hosts': {'createMissing': False, 'updateExisting': False}, 'images': {'createMissing': False, 'updateExisting': False}, 'items': {'createMissing': True, 'updateExisting': True, 'deleteMissing': False}, 'maps': {'createMissing': False, 'updateExisting': False}, 'screens': {'createMissing': False, 'updateExisting': False}, 'templateLinkage': {'createMissing': True}, 'templates': {'createMissing': True, 'updateExisting': True}, 'templateScreens': {'createMissing': True, 'updateExisting': True, 'deleteMissing': False}, 'triggers': {'createMissing': True, 'updateExisting': True, 'deleteMissing': False}, 'valueMaps': {'createMissing': True, 'updateExisting': False}} new_rules = dict(default_rules) if rules: for rule in rules: if rule in new_rules: new_rules[rule].update(rules[rule]) else: new_rules[rule] = rules[rule] if 'salt://' in config_file: tmpfile = salt.utils.files.mkstemp() cfile = __salt__['cp.get_file'](config_file, tmpfile) if not cfile or os.path.getsize(cfile) == 0: return {'name': config_file, 'result': False, 'message': 'Failed to fetch config file.'} else: cfile = config_file if not os.path.isfile(cfile): return {'name': config_file, 'result': False, 'message': 'Invalid file path.'} with salt.utils.files.fopen(cfile, mode='r') as fp_: xml = fp_.read() if 'salt://' in config_file: salt.utils.files.safe_rm(cfile) params = {'format': file_format, 'rules': new_rules, 'source': xml} log.info('CONFIGURATION IMPORT: rules: %s', six.text_type(params['rules'])) try: run_query('configuration.import', params, **kwargs) return {'name': config_file, 'result': True, 'message': 'Zabbix API "configuration.import" method ' 'called successfully.'} except SaltException as exc: return {'name': config_file, 'result': False, 'message': six.text_type(exc)}
def function[configuration_import, parameter[config_file, rules, file_format]]: constant[ .. versionadded:: 2017.7 Imports Zabbix configuration specified in file to Zabbix server. :param config_file: File with Zabbix config (local or remote) :param rules: Optional - Rules that have to be different from default (defaults are the same as in Zabbix web UI.) :param file_format: Config file format (default: xml) :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) CLI Example: .. code-block:: bash salt '*' zabbix.configuration_import salt://zabbix/config/zabbix_templates.xml "{'screens': {'createMissing': True, 'updateExisting': True}}" ] if compare[name[rules] is constant[None]] begin[:] variable[rules] assign[=] dictionary[[], []] variable[default_rules] assign[=] dictionary[[<ast.Constant object at 0x7da20c6a9210>, <ast.Constant object at 0x7da20c6aa590>, <ast.Constant object at 0x7da20c6aaaa0>, <ast.Constant object at 0x7da20c6ab430>, <ast.Constant object at 0x7da20c6aa7a0>, <ast.Constant object at 0x7da20c6abb80>, <ast.Constant object at 0x7da20c6a86d0>, <ast.Constant object at 0x7da20c6a94e0>, <ast.Constant object at 0x7da20c6a89d0>, <ast.Constant object at 0x7da20c6a8be0>, <ast.Constant object at 0x7da20c6a8ee0>, <ast.Constant object at 0x7da20c6ab460>, <ast.Constant object at 0x7da20c6aa830>, <ast.Constant object at 0x7da20c6a8790>], [<ast.Dict object at 0x7da20c6a96f0>, <ast.Dict object at 0x7da20c6aac50>, <ast.Dict object at 0x7da20c6ab4f0>, <ast.Dict object at 0x7da20c6aab00>, <ast.Dict object at 0x7da20c6aada0>, <ast.Dict object at 0x7da20c6ab2e0>, <ast.Dict object at 0x7da20c6ab9a0>, <ast.Dict object at 0x7da2043466e0>, <ast.Dict object at 0x7da204347cd0>, <ast.Dict object at 0x7da2043448b0>, <ast.Dict object at 0x7da204346dd0>, <ast.Dict object at 0x7da2043471c0>, <ast.Dict object at 0x7da2043478e0>, <ast.Dict object at 0x7da204344850>]] variable[new_rules] assign[=] call[name[dict], parameter[name[default_rules]]] if name[rules] begin[:] for taget[name[rule]] in starred[name[rules]] begin[:] if compare[name[rule] in name[new_rules]] begin[:] call[call[name[new_rules]][name[rule]].update, parameter[call[name[rules]][name[rule]]]] if compare[constant[salt://] in name[config_file]] begin[:] variable[tmpfile] assign[=] call[name[salt].utils.files.mkstemp, parameter[]] variable[cfile] assign[=] call[call[name[__salt__]][constant[cp.get_file]], parameter[name[config_file], name[tmpfile]]] if <ast.BoolOp object at 0x7da2043463b0> begin[:] return[dictionary[[<ast.Constant object at 0x7da204344250>, <ast.Constant object at 0x7da204344670>, <ast.Constant object at 0x7da204346a10>], [<ast.Name object at 0x7da204346b60>, <ast.Constant object at 0x7da204347b20>, <ast.Constant object at 0x7da204344d00>]]] with call[name[salt].utils.files.fopen, parameter[name[cfile]]] begin[:] variable[xml] assign[=] call[name[fp_].read, parameter[]] if compare[constant[salt://] in name[config_file]] begin[:] call[name[salt].utils.files.safe_rm, parameter[name[cfile]]] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da204345de0>, <ast.Constant object at 0x7da2043466b0>, <ast.Constant object at 0x7da204346320>], [<ast.Name object at 0x7da204347c70>, <ast.Name object at 0x7da2043461a0>, <ast.Name object at 0x7da204344e50>]] call[name[log].info, parameter[constant[CONFIGURATION IMPORT: rules: %s], call[name[six].text_type, parameter[call[name[params]][constant[rules]]]]]] <ast.Try object at 0x7da204347a90>
keyword[def] identifier[configuration_import] ( identifier[config_file] , identifier[rules] = keyword[None] , identifier[file_format] = literal[string] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[rules] keyword[is] keyword[None] : identifier[rules] ={} identifier[default_rules] ={ literal[string] :{ literal[string] : keyword[True] , literal[string] : keyword[False] , literal[string] : keyword[False] }, literal[string] :{ literal[string] : keyword[True] , literal[string] : keyword[True] , literal[string] : keyword[False] }, literal[string] :{ literal[string] : keyword[True] , literal[string] : keyword[True] , literal[string] : keyword[False] }, literal[string] :{ literal[string] : keyword[True] }, literal[string] :{ literal[string] : keyword[False] , literal[string] : keyword[False] }, literal[string] :{ literal[string] : keyword[False] , literal[string] : keyword[False] }, literal[string] :{ literal[string] : keyword[True] , literal[string] : keyword[True] , literal[string] : keyword[False] }, literal[string] :{ literal[string] : keyword[False] , literal[string] : keyword[False] }, literal[string] :{ literal[string] : keyword[False] , literal[string] : keyword[False] }, literal[string] :{ literal[string] : keyword[True] }, literal[string] :{ literal[string] : keyword[True] , literal[string] : keyword[True] }, literal[string] :{ literal[string] : keyword[True] , literal[string] : keyword[True] , literal[string] : keyword[False] }, literal[string] :{ literal[string] : keyword[True] , literal[string] : keyword[True] , literal[string] : keyword[False] }, literal[string] :{ literal[string] : keyword[True] , literal[string] : keyword[False] }} identifier[new_rules] = identifier[dict] ( identifier[default_rules] ) keyword[if] identifier[rules] : keyword[for] identifier[rule] keyword[in] identifier[rules] : keyword[if] identifier[rule] keyword[in] identifier[new_rules] : identifier[new_rules] [ identifier[rule] ]. identifier[update] ( identifier[rules] [ identifier[rule] ]) keyword[else] : identifier[new_rules] [ identifier[rule] ]= identifier[rules] [ identifier[rule] ] keyword[if] literal[string] keyword[in] identifier[config_file] : identifier[tmpfile] = identifier[salt] . identifier[utils] . identifier[files] . identifier[mkstemp] () identifier[cfile] = identifier[__salt__] [ literal[string] ]( identifier[config_file] , identifier[tmpfile] ) keyword[if] keyword[not] identifier[cfile] keyword[or] identifier[os] . identifier[path] . identifier[getsize] ( identifier[cfile] )== literal[int] : keyword[return] { literal[string] : identifier[config_file] , literal[string] : keyword[False] , literal[string] : literal[string] } keyword[else] : identifier[cfile] = identifier[config_file] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[cfile] ): keyword[return] { literal[string] : identifier[config_file] , literal[string] : keyword[False] , literal[string] : literal[string] } keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[cfile] , identifier[mode] = literal[string] ) keyword[as] identifier[fp_] : identifier[xml] = identifier[fp_] . identifier[read] () keyword[if] literal[string] keyword[in] identifier[config_file] : identifier[salt] . identifier[utils] . identifier[files] . identifier[safe_rm] ( identifier[cfile] ) identifier[params] ={ literal[string] : identifier[file_format] , literal[string] : identifier[new_rules] , literal[string] : identifier[xml] } identifier[log] . identifier[info] ( literal[string] , identifier[six] . identifier[text_type] ( identifier[params] [ literal[string] ])) keyword[try] : identifier[run_query] ( literal[string] , identifier[params] ,** identifier[kwargs] ) keyword[return] { literal[string] : identifier[config_file] , literal[string] : keyword[True] , literal[string] : literal[string] literal[string] } keyword[except] identifier[SaltException] keyword[as] identifier[exc] : keyword[return] { literal[string] : identifier[config_file] , literal[string] : keyword[False] , literal[string] : identifier[six] . identifier[text_type] ( identifier[exc] )}
def configuration_import(config_file, rules=None, file_format='xml', **kwargs): """ .. versionadded:: 2017.7 Imports Zabbix configuration specified in file to Zabbix server. :param config_file: File with Zabbix config (local or remote) :param rules: Optional - Rules that have to be different from default (defaults are the same as in Zabbix web UI.) :param file_format: Config file format (default: xml) :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) CLI Example: .. code-block:: bash salt '*' zabbix.configuration_import salt://zabbix/config/zabbix_templates.xml "{'screens': {'createMissing': True, 'updateExisting': True}}" """ if rules is None: rules = {} # depends on [control=['if'], data=['rules']] default_rules = {'applications': {'createMissing': True, 'updateExisting': False, 'deleteMissing': False}, 'discoveryRules': {'createMissing': True, 'updateExisting': True, 'deleteMissing': False}, 'graphs': {'createMissing': True, 'updateExisting': True, 'deleteMissing': False}, 'groups': {'createMissing': True}, 'hosts': {'createMissing': False, 'updateExisting': False}, 'images': {'createMissing': False, 'updateExisting': False}, 'items': {'createMissing': True, 'updateExisting': True, 'deleteMissing': False}, 'maps': {'createMissing': False, 'updateExisting': False}, 'screens': {'createMissing': False, 'updateExisting': False}, 'templateLinkage': {'createMissing': True}, 'templates': {'createMissing': True, 'updateExisting': True}, 'templateScreens': {'createMissing': True, 'updateExisting': True, 'deleteMissing': False}, 'triggers': {'createMissing': True, 'updateExisting': True, 'deleteMissing': False}, 'valueMaps': {'createMissing': True, 'updateExisting': False}} new_rules = dict(default_rules) if rules: for rule in rules: if rule in new_rules: new_rules[rule].update(rules[rule]) # depends on [control=['if'], data=['rule', 'new_rules']] else: new_rules[rule] = rules[rule] # depends on [control=['for'], data=['rule']] # depends on [control=['if'], data=[]] if 'salt://' in config_file: tmpfile = salt.utils.files.mkstemp() cfile = __salt__['cp.get_file'](config_file, tmpfile) if not cfile or os.path.getsize(cfile) == 0: return {'name': config_file, 'result': False, 'message': 'Failed to fetch config file.'} # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['config_file']] else: cfile = config_file if not os.path.isfile(cfile): return {'name': config_file, 'result': False, 'message': 'Invalid file path.'} # depends on [control=['if'], data=[]] with salt.utils.files.fopen(cfile, mode='r') as fp_: xml = fp_.read() # depends on [control=['with'], data=['fp_']] if 'salt://' in config_file: salt.utils.files.safe_rm(cfile) # depends on [control=['if'], data=[]] params = {'format': file_format, 'rules': new_rules, 'source': xml} log.info('CONFIGURATION IMPORT: rules: %s', six.text_type(params['rules'])) try: run_query('configuration.import', params, **kwargs) return {'name': config_file, 'result': True, 'message': 'Zabbix API "configuration.import" method called successfully.'} # depends on [control=['try'], data=[]] except SaltException as exc: return {'name': config_file, 'result': False, 'message': six.text_type(exc)} # depends on [control=['except'], data=['exc']]
def add_link_type_vlan(enode, portlbl, name, vlan_id, shell=None): """ Add a new virtual link with the type set to VLAN. Creates a new vlan device {name} on device {port}. Will raise an exception if value is already assigned. :param enode: Engine node to communicate with. :type enode: topology.platforms.base.BaseNode :param str portlbl: Port label to configure. Port label will be mapped automatically. :param str name: specifies the name of the new virtual device. :param str vlan_id: specifies the VLAN identifier. :param str shell: Shell name to execute commands. If ``None``, use the Engine Node default shell. """ assert name if name in enode.ports: raise ValueError('Port {name} already exists'.format(name=name)) assert portlbl assert vlan_id port = enode.ports[portlbl] cmd = 'ip link add link {dev} name {name} type vlan id {vlan_id}'.format( dev=port, name=name, vlan_id=vlan_id) response = enode(cmd, shell=shell) assert not response, 'Cannot add virtual link {name}'.format(name=name) enode.ports[name] = name
def function[add_link_type_vlan, parameter[enode, portlbl, name, vlan_id, shell]]: constant[ Add a new virtual link with the type set to VLAN. Creates a new vlan device {name} on device {port}. Will raise an exception if value is already assigned. :param enode: Engine node to communicate with. :type enode: topology.platforms.base.BaseNode :param str portlbl: Port label to configure. Port label will be mapped automatically. :param str name: specifies the name of the new virtual device. :param str vlan_id: specifies the VLAN identifier. :param str shell: Shell name to execute commands. If ``None``, use the Engine Node default shell. ] assert[name[name]] if compare[name[name] in name[enode].ports] begin[:] <ast.Raise object at 0x7da2054a5270> assert[name[portlbl]] assert[name[vlan_id]] variable[port] assign[=] call[name[enode].ports][name[portlbl]] variable[cmd] assign[=] call[constant[ip link add link {dev} name {name} type vlan id {vlan_id}].format, parameter[]] variable[response] assign[=] call[name[enode], parameter[name[cmd]]] assert[<ast.UnaryOp object at 0x7da2054a7430>] call[name[enode].ports][name[name]] assign[=] name[name]
keyword[def] identifier[add_link_type_vlan] ( identifier[enode] , identifier[portlbl] , identifier[name] , identifier[vlan_id] , identifier[shell] = keyword[None] ): literal[string] keyword[assert] identifier[name] keyword[if] identifier[name] keyword[in] identifier[enode] . identifier[ports] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[name] = identifier[name] )) keyword[assert] identifier[portlbl] keyword[assert] identifier[vlan_id] identifier[port] = identifier[enode] . identifier[ports] [ identifier[portlbl] ] identifier[cmd] = literal[string] . identifier[format] ( identifier[dev] = identifier[port] , identifier[name] = identifier[name] , identifier[vlan_id] = identifier[vlan_id] ) identifier[response] = identifier[enode] ( identifier[cmd] , identifier[shell] = identifier[shell] ) keyword[assert] keyword[not] identifier[response] , literal[string] . identifier[format] ( identifier[name] = identifier[name] ) identifier[enode] . identifier[ports] [ identifier[name] ]= identifier[name]
def add_link_type_vlan(enode, portlbl, name, vlan_id, shell=None): """ Add a new virtual link with the type set to VLAN. Creates a new vlan device {name} on device {port}. Will raise an exception if value is already assigned. :param enode: Engine node to communicate with. :type enode: topology.platforms.base.BaseNode :param str portlbl: Port label to configure. Port label will be mapped automatically. :param str name: specifies the name of the new virtual device. :param str vlan_id: specifies the VLAN identifier. :param str shell: Shell name to execute commands. If ``None``, use the Engine Node default shell. """ assert name if name in enode.ports: raise ValueError('Port {name} already exists'.format(name=name)) # depends on [control=['if'], data=['name']] assert portlbl assert vlan_id port = enode.ports[portlbl] cmd = 'ip link add link {dev} name {name} type vlan id {vlan_id}'.format(dev=port, name=name, vlan_id=vlan_id) response = enode(cmd, shell=shell) assert not response, 'Cannot add virtual link {name}'.format(name=name) enode.ports[name] = name
def right(self, expand=None): """ Returns a new Region right of the current region with a width of ``expand`` pixels. Does not include the current region. If range is omitted, it reaches to the right border of the screen. The new region has the same height and y-position as the current region. """ if expand == None: x = self.x+self.w y = self.y w = self.getScreen().getBounds()[2] - x h = self.h else: x = self.x+self.w y = self.y w = expand h = self.h return Region(x, y, w, h).clipRegionToScreen()
def function[right, parameter[self, expand]]: constant[ Returns a new Region right of the current region with a width of ``expand`` pixels. Does not include the current region. If range is omitted, it reaches to the right border of the screen. The new region has the same height and y-position as the current region. ] if compare[name[expand] equal[==] constant[None]] begin[:] variable[x] assign[=] binary_operation[name[self].x + name[self].w] variable[y] assign[=] name[self].y variable[w] assign[=] binary_operation[call[call[call[name[self].getScreen, parameter[]].getBounds, parameter[]]][constant[2]] - name[x]] variable[h] assign[=] name[self].h return[call[call[name[Region], parameter[name[x], name[y], name[w], name[h]]].clipRegionToScreen, parameter[]]]
keyword[def] identifier[right] ( identifier[self] , identifier[expand] = keyword[None] ): literal[string] keyword[if] identifier[expand] == keyword[None] : identifier[x] = identifier[self] . identifier[x] + identifier[self] . identifier[w] identifier[y] = identifier[self] . identifier[y] identifier[w] = identifier[self] . identifier[getScreen] (). identifier[getBounds] ()[ literal[int] ]- identifier[x] identifier[h] = identifier[self] . identifier[h] keyword[else] : identifier[x] = identifier[self] . identifier[x] + identifier[self] . identifier[w] identifier[y] = identifier[self] . identifier[y] identifier[w] = identifier[expand] identifier[h] = identifier[self] . identifier[h] keyword[return] identifier[Region] ( identifier[x] , identifier[y] , identifier[w] , identifier[h] ). identifier[clipRegionToScreen] ()
def right(self, expand=None): """ Returns a new Region right of the current region with a width of ``expand`` pixels. Does not include the current region. If range is omitted, it reaches to the right border of the screen. The new region has the same height and y-position as the current region. """ if expand == None: x = self.x + self.w y = self.y w = self.getScreen().getBounds()[2] - x h = self.h # depends on [control=['if'], data=[]] else: x = self.x + self.w y = self.y w = expand h = self.h return Region(x, y, w, h).clipRegionToScreen()
def decode_struct_tree(self, data_type, obj): """ The data_type argument must be a StructTree. See json_compat_obj_decode() for argument descriptions. """ subtype = self.determine_struct_tree_subtype(data_type, obj) return self.decode_struct(subtype, obj)
def function[decode_struct_tree, parameter[self, data_type, obj]]: constant[ The data_type argument must be a StructTree. See json_compat_obj_decode() for argument descriptions. ] variable[subtype] assign[=] call[name[self].determine_struct_tree_subtype, parameter[name[data_type], name[obj]]] return[call[name[self].decode_struct, parameter[name[subtype], name[obj]]]]
keyword[def] identifier[decode_struct_tree] ( identifier[self] , identifier[data_type] , identifier[obj] ): literal[string] identifier[subtype] = identifier[self] . identifier[determine_struct_tree_subtype] ( identifier[data_type] , identifier[obj] ) keyword[return] identifier[self] . identifier[decode_struct] ( identifier[subtype] , identifier[obj] )
def decode_struct_tree(self, data_type, obj): """ The data_type argument must be a StructTree. See json_compat_obj_decode() for argument descriptions. """ subtype = self.determine_struct_tree_subtype(data_type, obj) return self.decode_struct(subtype, obj)
def estimate(items, batch, config): """Estimate heterogeneity for a pair of tumor/normal samples. Run in parallel. """ hetcallers = {"theta": theta.run, "phylowgs": phylowgs.run, "bubbletree": bubbletree.run} paired = vcfutils.get_paired_bams([dd.get_align_bam(d) for d in items], items) calls = _get_calls(paired.tumor_data) variants = get_variants(paired.tumor_data) het_info = [] for hetcaller in _get_hetcallers(items): try: hetfn = hetcallers[hetcaller] except KeyError: hetfn = None print("%s not yet implemented" % hetcaller) if hetfn: hetout = hetfn(variants[0], calls, paired) if hetout: het_info.append(hetout) out = [] for data in items: if batch == _get_batches(data)[0]: if dd.get_sample_name(data) == paired.tumor_name: if het_info: data["heterogeneity"] = het_info out.append([data]) return out
def function[estimate, parameter[items, batch, config]]: constant[Estimate heterogeneity for a pair of tumor/normal samples. Run in parallel. ] variable[hetcallers] assign[=] dictionary[[<ast.Constant object at 0x7da18bcc85b0>, <ast.Constant object at 0x7da18bcc9a20>, <ast.Constant object at 0x7da18bcc8b50>], [<ast.Attribute object at 0x7da18bcc8190>, <ast.Attribute object at 0x7da18bccb7c0>, <ast.Attribute object at 0x7da18bcc9d20>]] variable[paired] assign[=] call[name[vcfutils].get_paired_bams, parameter[<ast.ListComp object at 0x7da18bcc9c30>, name[items]]] variable[calls] assign[=] call[name[_get_calls], parameter[name[paired].tumor_data]] variable[variants] assign[=] call[name[get_variants], parameter[name[paired].tumor_data]] variable[het_info] assign[=] list[[]] for taget[name[hetcaller]] in starred[call[name[_get_hetcallers], parameter[name[items]]]] begin[:] <ast.Try object at 0x7da18bccab00> if name[hetfn] begin[:] variable[hetout] assign[=] call[name[hetfn], parameter[call[name[variants]][constant[0]], name[calls], name[paired]]] if name[hetout] begin[:] call[name[het_info].append, parameter[name[hetout]]] variable[out] assign[=] list[[]] for taget[name[data]] in starred[name[items]] begin[:] if compare[name[batch] equal[==] call[call[name[_get_batches], parameter[name[data]]]][constant[0]]] begin[:] if compare[call[name[dd].get_sample_name, parameter[name[data]]] equal[==] name[paired].tumor_name] begin[:] if name[het_info] begin[:] call[name[data]][constant[heterogeneity]] assign[=] name[het_info] call[name[out].append, parameter[list[[<ast.Name object at 0x7da1b18aa8c0>]]]] return[name[out]]
keyword[def] identifier[estimate] ( identifier[items] , identifier[batch] , identifier[config] ): literal[string] identifier[hetcallers] ={ literal[string] : identifier[theta] . identifier[run] , literal[string] : identifier[phylowgs] . identifier[run] , literal[string] : identifier[bubbletree] . identifier[run] } identifier[paired] = identifier[vcfutils] . identifier[get_paired_bams] ([ identifier[dd] . identifier[get_align_bam] ( identifier[d] ) keyword[for] identifier[d] keyword[in] identifier[items] ], identifier[items] ) identifier[calls] = identifier[_get_calls] ( identifier[paired] . identifier[tumor_data] ) identifier[variants] = identifier[get_variants] ( identifier[paired] . identifier[tumor_data] ) identifier[het_info] =[] keyword[for] identifier[hetcaller] keyword[in] identifier[_get_hetcallers] ( identifier[items] ): keyword[try] : identifier[hetfn] = identifier[hetcallers] [ identifier[hetcaller] ] keyword[except] identifier[KeyError] : identifier[hetfn] = keyword[None] identifier[print] ( literal[string] % identifier[hetcaller] ) keyword[if] identifier[hetfn] : identifier[hetout] = identifier[hetfn] ( identifier[variants] [ literal[int] ], identifier[calls] , identifier[paired] ) keyword[if] identifier[hetout] : identifier[het_info] . identifier[append] ( identifier[hetout] ) identifier[out] =[] keyword[for] identifier[data] keyword[in] identifier[items] : keyword[if] identifier[batch] == identifier[_get_batches] ( identifier[data] )[ literal[int] ]: keyword[if] identifier[dd] . identifier[get_sample_name] ( identifier[data] )== identifier[paired] . identifier[tumor_name] : keyword[if] identifier[het_info] : identifier[data] [ literal[string] ]= identifier[het_info] identifier[out] . identifier[append] ([ identifier[data] ]) keyword[return] identifier[out]
def estimate(items, batch, config): """Estimate heterogeneity for a pair of tumor/normal samples. Run in parallel. """ hetcallers = {'theta': theta.run, 'phylowgs': phylowgs.run, 'bubbletree': bubbletree.run} paired = vcfutils.get_paired_bams([dd.get_align_bam(d) for d in items], items) calls = _get_calls(paired.tumor_data) variants = get_variants(paired.tumor_data) het_info = [] for hetcaller in _get_hetcallers(items): try: hetfn = hetcallers[hetcaller] # depends on [control=['try'], data=[]] except KeyError: hetfn = None print('%s not yet implemented' % hetcaller) # depends on [control=['except'], data=[]] if hetfn: hetout = hetfn(variants[0], calls, paired) if hetout: het_info.append(hetout) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['hetcaller']] out = [] for data in items: if batch == _get_batches(data)[0]: if dd.get_sample_name(data) == paired.tumor_name: if het_info: data['heterogeneity'] = het_info # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] out.append([data]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['data']] return out
def _sanity_check_files(item, files): """Ensure input files correspond with supported approaches. Handles BAM, fastqs, plus split fastqs. """ msg = None file_types = set([("bam" if x.endswith(".bam") else "fastq") for x in files if x]) if len(file_types) > 1: msg = "Found multiple file types (BAM and fastq)" file_type = file_types.pop() if file_type == "bam": if len(files) != 1: msg = "Expect a single BAM file input as input" elif file_type == "fastq": if len(files) not in [1, 2] and item["analysis"].lower() != "scrna-seq": pair_types = set([len(xs) for xs in fastq.combine_pairs(files)]) if len(pair_types) != 1 or pair_types.pop() not in [1, 2]: msg = "Expect either 1 (single end) or 2 (paired end) fastq inputs" if len(files) == 2 and files[0] == files[1]: msg = "Expect both fastq files to not be the same" if msg: raise ValueError("%s for %s: %s" % (msg, item.get("description", ""), files))
def function[_sanity_check_files, parameter[item, files]]: constant[Ensure input files correspond with supported approaches. Handles BAM, fastqs, plus split fastqs. ] variable[msg] assign[=] constant[None] variable[file_types] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da1b17bae60>]] if compare[call[name[len], parameter[name[file_types]]] greater[>] constant[1]] begin[:] variable[msg] assign[=] constant[Found multiple file types (BAM and fastq)] variable[file_type] assign[=] call[name[file_types].pop, parameter[]] if compare[name[file_type] equal[==] constant[bam]] begin[:] if compare[call[name[len], parameter[name[files]]] not_equal[!=] constant[1]] begin[:] variable[msg] assign[=] constant[Expect a single BAM file input as input] if name[msg] begin[:] <ast.Raise object at 0x7da1b18fbbe0>
keyword[def] identifier[_sanity_check_files] ( identifier[item] , identifier[files] ): literal[string] identifier[msg] = keyword[None] identifier[file_types] = identifier[set] ([( literal[string] keyword[if] identifier[x] . identifier[endswith] ( literal[string] ) keyword[else] literal[string] ) keyword[for] identifier[x] keyword[in] identifier[files] keyword[if] identifier[x] ]) keyword[if] identifier[len] ( identifier[file_types] )> literal[int] : identifier[msg] = literal[string] identifier[file_type] = identifier[file_types] . identifier[pop] () keyword[if] identifier[file_type] == literal[string] : keyword[if] identifier[len] ( identifier[files] )!= literal[int] : identifier[msg] = literal[string] keyword[elif] identifier[file_type] == literal[string] : keyword[if] identifier[len] ( identifier[files] ) keyword[not] keyword[in] [ literal[int] , literal[int] ] keyword[and] identifier[item] [ literal[string] ]. identifier[lower] ()!= literal[string] : identifier[pair_types] = identifier[set] ([ identifier[len] ( identifier[xs] ) keyword[for] identifier[xs] keyword[in] identifier[fastq] . identifier[combine_pairs] ( identifier[files] )]) keyword[if] identifier[len] ( identifier[pair_types] )!= literal[int] keyword[or] identifier[pair_types] . identifier[pop] () keyword[not] keyword[in] [ literal[int] , literal[int] ]: identifier[msg] = literal[string] keyword[if] identifier[len] ( identifier[files] )== literal[int] keyword[and] identifier[files] [ literal[int] ]== identifier[files] [ literal[int] ]: identifier[msg] = literal[string] keyword[if] identifier[msg] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[msg] , identifier[item] . identifier[get] ( literal[string] , literal[string] ), identifier[files] ))
def _sanity_check_files(item, files): """Ensure input files correspond with supported approaches. Handles BAM, fastqs, plus split fastqs. """ msg = None file_types = set(['bam' if x.endswith('.bam') else 'fastq' for x in files if x]) if len(file_types) > 1: msg = 'Found multiple file types (BAM and fastq)' # depends on [control=['if'], data=[]] file_type = file_types.pop() if file_type == 'bam': if len(files) != 1: msg = 'Expect a single BAM file input as input' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif file_type == 'fastq': if len(files) not in [1, 2] and item['analysis'].lower() != 'scrna-seq': pair_types = set([len(xs) for xs in fastq.combine_pairs(files)]) if len(pair_types) != 1 or pair_types.pop() not in [1, 2]: msg = 'Expect either 1 (single end) or 2 (paired end) fastq inputs' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if len(files) == 2 and files[0] == files[1]: msg = 'Expect both fastq files to not be the same' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if msg: raise ValueError('%s for %s: %s' % (msg, item.get('description', ''), files)) # depends on [control=['if'], data=[]]
def split_by_fname_file(self, fname:PathOrStr, path:PathOrStr=None)->'ItemLists': "Split the data by using the names in `fname` for the validation set. `path` will override `self.path`." path = Path(ifnone(path, self.path)) valid_names = loadtxt_str(path/fname) return self.split_by_files(valid_names)
def function[split_by_fname_file, parameter[self, fname, path]]: constant[Split the data by using the names in `fname` for the validation set. `path` will override `self.path`.] variable[path] assign[=] call[name[Path], parameter[call[name[ifnone], parameter[name[path], name[self].path]]]] variable[valid_names] assign[=] call[name[loadtxt_str], parameter[binary_operation[name[path] / name[fname]]]] return[call[name[self].split_by_files, parameter[name[valid_names]]]]
keyword[def] identifier[split_by_fname_file] ( identifier[self] , identifier[fname] : identifier[PathOrStr] , identifier[path] : identifier[PathOrStr] = keyword[None] )-> literal[string] : literal[string] identifier[path] = identifier[Path] ( identifier[ifnone] ( identifier[path] , identifier[self] . identifier[path] )) identifier[valid_names] = identifier[loadtxt_str] ( identifier[path] / identifier[fname] ) keyword[return] identifier[self] . identifier[split_by_files] ( identifier[valid_names] )
def split_by_fname_file(self, fname: PathOrStr, path: PathOrStr=None) -> 'ItemLists': """Split the data by using the names in `fname` for the validation set. `path` will override `self.path`.""" path = Path(ifnone(path, self.path)) valid_names = loadtxt_str(path / fname) return self.split_by_files(valid_names)
def file2json(self, jsonfile=None): """ Convert entire lte file into json like format USAGE: 1: kwsdictstr = file2json() 2: kwsdictstr = file2json(jsonfile = 'somefile') show pretty format with pipeline: | jshon, or | pjson if jsonfile is defined, dump to defined file before returning json string :param jsonfile: filename to dump json strings """ kwslist = self.detectAllKws() kwsdict = {} idx = 0 for kw in sorted(kwslist, key=str.lower): # print kw idx += 1 tdict = self.getKwAsDict(kw) self.rpn2val(tdict) kwsdict.update(tdict) if kw not in self.ctrlconf_dict: ctrlconf = self.getKwCtrlConf(kw, fmt='dict') if ctrlconf is not None: self.ctrlconf_dict.update({kw: ctrlconf}) kwsdict.update(self.prestrdict) ctrlconfdict = {'_epics': self.ctrlconf_dict} # all epics contrl config in self.ctrlconfdict kwsdict.update(ctrlconfdict) try: with open(os.path.expanduser(jsonfile), 'w') as outfile: json.dump(kwsdict, outfile) except: pass return json.dumps(kwsdict)
def function[file2json, parameter[self, jsonfile]]: constant[ Convert entire lte file into json like format USAGE: 1: kwsdictstr = file2json() 2: kwsdictstr = file2json(jsonfile = 'somefile') show pretty format with pipeline: | jshon, or | pjson if jsonfile is defined, dump to defined file before returning json string :param jsonfile: filename to dump json strings ] variable[kwslist] assign[=] call[name[self].detectAllKws, parameter[]] variable[kwsdict] assign[=] dictionary[[], []] variable[idx] assign[=] constant[0] for taget[name[kw]] in starred[call[name[sorted], parameter[name[kwslist]]]] begin[:] <ast.AugAssign object at 0x7da1b094b0a0> variable[tdict] assign[=] call[name[self].getKwAsDict, parameter[name[kw]]] call[name[self].rpn2val, parameter[name[tdict]]] call[name[kwsdict].update, parameter[name[tdict]]] if compare[name[kw] <ast.NotIn object at 0x7da2590d7190> name[self].ctrlconf_dict] begin[:] variable[ctrlconf] assign[=] call[name[self].getKwCtrlConf, parameter[name[kw]]] if compare[name[ctrlconf] is_not constant[None]] begin[:] call[name[self].ctrlconf_dict.update, parameter[dictionary[[<ast.Name object at 0x7da1b0804790>], [<ast.Name object at 0x7da1b0804670>]]]] call[name[kwsdict].update, parameter[name[self].prestrdict]] variable[ctrlconfdict] assign[=] dictionary[[<ast.Constant object at 0x7da1b095d210>], [<ast.Attribute object at 0x7da1b095e260>]] call[name[kwsdict].update, parameter[name[ctrlconfdict]]] <ast.Try object at 0x7da1b095c190> return[call[name[json].dumps, parameter[name[kwsdict]]]]
keyword[def] identifier[file2json] ( identifier[self] , identifier[jsonfile] = keyword[None] ): literal[string] identifier[kwslist] = identifier[self] . identifier[detectAllKws] () identifier[kwsdict] ={} identifier[idx] = literal[int] keyword[for] identifier[kw] keyword[in] identifier[sorted] ( identifier[kwslist] , identifier[key] = identifier[str] . identifier[lower] ): identifier[idx] += literal[int] identifier[tdict] = identifier[self] . identifier[getKwAsDict] ( identifier[kw] ) identifier[self] . identifier[rpn2val] ( identifier[tdict] ) identifier[kwsdict] . identifier[update] ( identifier[tdict] ) keyword[if] identifier[kw] keyword[not] keyword[in] identifier[self] . identifier[ctrlconf_dict] : identifier[ctrlconf] = identifier[self] . identifier[getKwCtrlConf] ( identifier[kw] , identifier[fmt] = literal[string] ) keyword[if] identifier[ctrlconf] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[ctrlconf_dict] . identifier[update] ({ identifier[kw] : identifier[ctrlconf] }) identifier[kwsdict] . identifier[update] ( identifier[self] . identifier[prestrdict] ) identifier[ctrlconfdict] ={ literal[string] : identifier[self] . identifier[ctrlconf_dict] } identifier[kwsdict] . identifier[update] ( identifier[ctrlconfdict] ) keyword[try] : keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[expanduser] ( identifier[jsonfile] ), literal[string] ) keyword[as] identifier[outfile] : identifier[json] . identifier[dump] ( identifier[kwsdict] , identifier[outfile] ) keyword[except] : keyword[pass] keyword[return] identifier[json] . identifier[dumps] ( identifier[kwsdict] )
def file2json(self, jsonfile=None): """ Convert entire lte file into json like format USAGE: 1: kwsdictstr = file2json() 2: kwsdictstr = file2json(jsonfile = 'somefile') show pretty format with pipeline: | jshon, or | pjson if jsonfile is defined, dump to defined file before returning json string :param jsonfile: filename to dump json strings """ kwslist = self.detectAllKws() kwsdict = {} idx = 0 for kw in sorted(kwslist, key=str.lower): # print kw idx += 1 tdict = self.getKwAsDict(kw) self.rpn2val(tdict) kwsdict.update(tdict) if kw not in self.ctrlconf_dict: ctrlconf = self.getKwCtrlConf(kw, fmt='dict') if ctrlconf is not None: self.ctrlconf_dict.update({kw: ctrlconf}) # depends on [control=['if'], data=['ctrlconf']] # depends on [control=['if'], data=['kw']] # depends on [control=['for'], data=['kw']] kwsdict.update(self.prestrdict) ctrlconfdict = {'_epics': self.ctrlconf_dict} # all epics contrl config in self.ctrlconfdict kwsdict.update(ctrlconfdict) try: with open(os.path.expanduser(jsonfile), 'w') as outfile: json.dump(kwsdict, outfile) # depends on [control=['with'], data=['outfile']] # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] return json.dumps(kwsdict)
def decode_bridged_message(rx_data): """Decode a (multi-)bridged command. rx_data: the received message as bytestring Returns the decoded message as bytestring """ while array('B', rx_data)[5] == constants.CMDID_SEND_MESSAGE: rsp = create_message(constants.NETFN_APP + 1, constants.CMDID_SEND_MESSAGE, None) decode_message(rsp, rx_data[6:]) check_completion_code(rsp.completion_code) rx_data = rx_data[7:-1] if len(rx_data) < 6: break return rx_data
def function[decode_bridged_message, parameter[rx_data]]: constant[Decode a (multi-)bridged command. rx_data: the received message as bytestring Returns the decoded message as bytestring ] while compare[call[call[name[array], parameter[constant[B], name[rx_data]]]][constant[5]] equal[==] name[constants].CMDID_SEND_MESSAGE] begin[:] variable[rsp] assign[=] call[name[create_message], parameter[binary_operation[name[constants].NETFN_APP + constant[1]], name[constants].CMDID_SEND_MESSAGE, constant[None]]] call[name[decode_message], parameter[name[rsp], call[name[rx_data]][<ast.Slice object at 0x7da1b0791750>]]] call[name[check_completion_code], parameter[name[rsp].completion_code]] variable[rx_data] assign[=] call[name[rx_data]][<ast.Slice object at 0x7da1b07904f0>] if compare[call[name[len], parameter[name[rx_data]]] less[<] constant[6]] begin[:] break return[name[rx_data]]
keyword[def] identifier[decode_bridged_message] ( identifier[rx_data] ): literal[string] keyword[while] identifier[array] ( literal[string] , identifier[rx_data] )[ literal[int] ]== identifier[constants] . identifier[CMDID_SEND_MESSAGE] : identifier[rsp] = identifier[create_message] ( identifier[constants] . identifier[NETFN_APP] + literal[int] , identifier[constants] . identifier[CMDID_SEND_MESSAGE] , keyword[None] ) identifier[decode_message] ( identifier[rsp] , identifier[rx_data] [ literal[int] :]) identifier[check_completion_code] ( identifier[rsp] . identifier[completion_code] ) identifier[rx_data] = identifier[rx_data] [ literal[int] :- literal[int] ] keyword[if] identifier[len] ( identifier[rx_data] )< literal[int] : keyword[break] keyword[return] identifier[rx_data]
def decode_bridged_message(rx_data): """Decode a (multi-)bridged command. rx_data: the received message as bytestring Returns the decoded message as bytestring """ while array('B', rx_data)[5] == constants.CMDID_SEND_MESSAGE: rsp = create_message(constants.NETFN_APP + 1, constants.CMDID_SEND_MESSAGE, None) decode_message(rsp, rx_data[6:]) check_completion_code(rsp.completion_code) rx_data = rx_data[7:-1] if len(rx_data) < 6: break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] return rx_data
def _set_properties(self, data): """ set the properties of the app model by the given data dict """ for property in data.keys(): if property in vars(self): setattr(self, property, data[property])
def function[_set_properties, parameter[self, data]]: constant[ set the properties of the app model by the given data dict ] for taget[name[property]] in starred[call[name[data].keys, parameter[]]] begin[:] if compare[name[property] in call[name[vars], parameter[name[self]]]] begin[:] call[name[setattr], parameter[name[self], name[property], call[name[data]][name[property]]]]
keyword[def] identifier[_set_properties] ( identifier[self] , identifier[data] ): literal[string] keyword[for] identifier[property] keyword[in] identifier[data] . identifier[keys] (): keyword[if] identifier[property] keyword[in] identifier[vars] ( identifier[self] ): identifier[setattr] ( identifier[self] , identifier[property] , identifier[data] [ identifier[property] ])
def _set_properties(self, data): """ set the properties of the app model by the given data dict """ for property in data.keys(): if property in vars(self): setattr(self, property, data[property]) # depends on [control=['if'], data=['property']] # depends on [control=['for'], data=['property']]
def GetDeviceStringProperty(dev_ref, key): """Reads string property from the HID device.""" cf_key = CFStr(key) type_ref = iokit.IOHIDDeviceGetProperty(dev_ref, cf_key) cf.CFRelease(cf_key) if not type_ref: return None if cf.CFGetTypeID(type_ref) != cf.CFStringGetTypeID(): raise errors.OsHidError('Expected string type, got {}'.format( cf.CFGetTypeID(type_ref))) type_ref = ctypes.cast(type_ref, CF_STRING_REF) out = ctypes.create_string_buffer(DEVICE_STRING_PROPERTY_BUFFER_SIZE) ret = cf.CFStringGetCString(type_ref, out, DEVICE_STRING_PROPERTY_BUFFER_SIZE, K_CF_STRING_ENCODING_UTF8) if not ret: return None return out.value.decode('utf8')
def function[GetDeviceStringProperty, parameter[dev_ref, key]]: constant[Reads string property from the HID device.] variable[cf_key] assign[=] call[name[CFStr], parameter[name[key]]] variable[type_ref] assign[=] call[name[iokit].IOHIDDeviceGetProperty, parameter[name[dev_ref], name[cf_key]]] call[name[cf].CFRelease, parameter[name[cf_key]]] if <ast.UnaryOp object at 0x7da204622650> begin[:] return[constant[None]] if compare[call[name[cf].CFGetTypeID, parameter[name[type_ref]]] not_equal[!=] call[name[cf].CFStringGetTypeID, parameter[]]] begin[:] <ast.Raise object at 0x7da204621cf0> variable[type_ref] assign[=] call[name[ctypes].cast, parameter[name[type_ref], name[CF_STRING_REF]]] variable[out] assign[=] call[name[ctypes].create_string_buffer, parameter[name[DEVICE_STRING_PROPERTY_BUFFER_SIZE]]] variable[ret] assign[=] call[name[cf].CFStringGetCString, parameter[name[type_ref], name[out], name[DEVICE_STRING_PROPERTY_BUFFER_SIZE], name[K_CF_STRING_ENCODING_UTF8]]] if <ast.UnaryOp object at 0x7da204623400> begin[:] return[constant[None]] return[call[name[out].value.decode, parameter[constant[utf8]]]]
keyword[def] identifier[GetDeviceStringProperty] ( identifier[dev_ref] , identifier[key] ): literal[string] identifier[cf_key] = identifier[CFStr] ( identifier[key] ) identifier[type_ref] = identifier[iokit] . identifier[IOHIDDeviceGetProperty] ( identifier[dev_ref] , identifier[cf_key] ) identifier[cf] . identifier[CFRelease] ( identifier[cf_key] ) keyword[if] keyword[not] identifier[type_ref] : keyword[return] keyword[None] keyword[if] identifier[cf] . identifier[CFGetTypeID] ( identifier[type_ref] )!= identifier[cf] . identifier[CFStringGetTypeID] (): keyword[raise] identifier[errors] . identifier[OsHidError] ( literal[string] . identifier[format] ( identifier[cf] . identifier[CFGetTypeID] ( identifier[type_ref] ))) identifier[type_ref] = identifier[ctypes] . identifier[cast] ( identifier[type_ref] , identifier[CF_STRING_REF] ) identifier[out] = identifier[ctypes] . identifier[create_string_buffer] ( identifier[DEVICE_STRING_PROPERTY_BUFFER_SIZE] ) identifier[ret] = identifier[cf] . identifier[CFStringGetCString] ( identifier[type_ref] , identifier[out] , identifier[DEVICE_STRING_PROPERTY_BUFFER_SIZE] , identifier[K_CF_STRING_ENCODING_UTF8] ) keyword[if] keyword[not] identifier[ret] : keyword[return] keyword[None] keyword[return] identifier[out] . identifier[value] . identifier[decode] ( literal[string] )
def GetDeviceStringProperty(dev_ref, key): """Reads string property from the HID device.""" cf_key = CFStr(key) type_ref = iokit.IOHIDDeviceGetProperty(dev_ref, cf_key) cf.CFRelease(cf_key) if not type_ref: return None # depends on [control=['if'], data=[]] if cf.CFGetTypeID(type_ref) != cf.CFStringGetTypeID(): raise errors.OsHidError('Expected string type, got {}'.format(cf.CFGetTypeID(type_ref))) # depends on [control=['if'], data=[]] type_ref = ctypes.cast(type_ref, CF_STRING_REF) out = ctypes.create_string_buffer(DEVICE_STRING_PROPERTY_BUFFER_SIZE) ret = cf.CFStringGetCString(type_ref, out, DEVICE_STRING_PROPERTY_BUFFER_SIZE, K_CF_STRING_ENCODING_UTF8) if not ret: return None # depends on [control=['if'], data=[]] return out.value.decode('utf8')
def visitInlineShapeAnd(self, ctx: ShExDocParser.InlineShapeAndContext): """ inlineShapeAnd: inlineShapeNot (KW_AND inlineShapeNot)* """ if len(ctx.inlineShapeNot()) > 1: self.expr = ShapeAnd(id=self.label, shapeExprs=[]) for sa in ctx.inlineShapeNot(): sep = ShexShapeExpressionParser(self.context) sep.visit(sa) self._and_collapser(self.expr, sep.expr) else: self.visit(ctx.inlineShapeNot(0))
def function[visitInlineShapeAnd, parameter[self, ctx]]: constant[ inlineShapeAnd: inlineShapeNot (KW_AND inlineShapeNot)* ] if compare[call[name[len], parameter[call[name[ctx].inlineShapeNot, parameter[]]]] greater[>] constant[1]] begin[:] name[self].expr assign[=] call[name[ShapeAnd], parameter[]] for taget[name[sa]] in starred[call[name[ctx].inlineShapeNot, parameter[]]] begin[:] variable[sep] assign[=] call[name[ShexShapeExpressionParser], parameter[name[self].context]] call[name[sep].visit, parameter[name[sa]]] call[name[self]._and_collapser, parameter[name[self].expr, name[sep].expr]]
keyword[def] identifier[visitInlineShapeAnd] ( identifier[self] , identifier[ctx] : identifier[ShExDocParser] . identifier[InlineShapeAndContext] ): literal[string] keyword[if] identifier[len] ( identifier[ctx] . identifier[inlineShapeNot] ())> literal[int] : identifier[self] . identifier[expr] = identifier[ShapeAnd] ( identifier[id] = identifier[self] . identifier[label] , identifier[shapeExprs] =[]) keyword[for] identifier[sa] keyword[in] identifier[ctx] . identifier[inlineShapeNot] (): identifier[sep] = identifier[ShexShapeExpressionParser] ( identifier[self] . identifier[context] ) identifier[sep] . identifier[visit] ( identifier[sa] ) identifier[self] . identifier[_and_collapser] ( identifier[self] . identifier[expr] , identifier[sep] . identifier[expr] ) keyword[else] : identifier[self] . identifier[visit] ( identifier[ctx] . identifier[inlineShapeNot] ( literal[int] ))
def visitInlineShapeAnd(self, ctx: ShExDocParser.InlineShapeAndContext): """ inlineShapeAnd: inlineShapeNot (KW_AND inlineShapeNot)* """ if len(ctx.inlineShapeNot()) > 1: self.expr = ShapeAnd(id=self.label, shapeExprs=[]) for sa in ctx.inlineShapeNot(): sep = ShexShapeExpressionParser(self.context) sep.visit(sa) self._and_collapser(self.expr, sep.expr) # depends on [control=['for'], data=['sa']] # depends on [control=['if'], data=[]] else: self.visit(ctx.inlineShapeNot(0))
def zip(self, *args): """ Zip together multiple lists into a single array -- elements that share an index go together. """ args = list(args) args.insert(0, self.obj) maxLen = _(args).chain().collect(lambda x, *args: len(x)).max().value() for i, v in enumerate(args): l = len(args[i]) if l < maxLen: args[i] for x in range(maxLen - l): args[i].append(None) return self._wrap(zip(*args))
def function[zip, parameter[self]]: constant[ Zip together multiple lists into a single array -- elements that share an index go together. ] variable[args] assign[=] call[name[list], parameter[name[args]]] call[name[args].insert, parameter[constant[0], name[self].obj]] variable[maxLen] assign[=] call[call[call[call[call[name[_], parameter[name[args]]].chain, parameter[]].collect, parameter[<ast.Lambda object at 0x7da18fe92da0>]].max, parameter[]].value, parameter[]] for taget[tuple[[<ast.Name object at 0x7da18fe92740>, <ast.Name object at 0x7da18fe937f0>]]] in starred[call[name[enumerate], parameter[name[args]]]] begin[:] variable[l] assign[=] call[name[len], parameter[call[name[args]][name[i]]]] if compare[name[l] less[<] name[maxLen]] begin[:] call[name[args]][name[i]] for taget[name[x]] in starred[call[name[range], parameter[binary_operation[name[maxLen] - name[l]]]]] begin[:] call[call[name[args]][name[i]].append, parameter[constant[None]]] return[call[name[self]._wrap, parameter[call[name[zip], parameter[<ast.Starred object at 0x7da18fe90a90>]]]]]
keyword[def] identifier[zip] ( identifier[self] ,* identifier[args] ): literal[string] identifier[args] = identifier[list] ( identifier[args] ) identifier[args] . identifier[insert] ( literal[int] , identifier[self] . identifier[obj] ) identifier[maxLen] = identifier[_] ( identifier[args] ). identifier[chain] (). identifier[collect] ( keyword[lambda] identifier[x] ,* identifier[args] : identifier[len] ( identifier[x] )). identifier[max] (). identifier[value] () keyword[for] identifier[i] , identifier[v] keyword[in] identifier[enumerate] ( identifier[args] ): identifier[l] = identifier[len] ( identifier[args] [ identifier[i] ]) keyword[if] identifier[l] < identifier[maxLen] : identifier[args] [ identifier[i] ] keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[maxLen] - identifier[l] ): identifier[args] [ identifier[i] ]. identifier[append] ( keyword[None] ) keyword[return] identifier[self] . identifier[_wrap] ( identifier[zip] (* identifier[args] ))
def zip(self, *args): """ Zip together multiple lists into a single array -- elements that share an index go together. """ args = list(args) args.insert(0, self.obj) maxLen = _(args).chain().collect(lambda x, *args: len(x)).max().value() for (i, v) in enumerate(args): l = len(args[i]) if l < maxLen: args[i] # depends on [control=['if'], data=[]] for x in range(maxLen - l): args[i].append(None) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] return self._wrap(zip(*args))
def add(self, function, kind='add', at_pos='end',*args, **kwargs): """Add a function to custom processing queue. Custom functions are applied automatically to associated pysat instrument whenever instrument.load command called. Parameters ---------- function : string or function object name of function or function object to be added to queue kind : {'add', 'modify', 'pass} add Adds data returned from function to instrument object. A copy of pysat instrument object supplied to routine. modify pysat instrument object supplied to routine. Any and all changes to object are retained. pass A copy of pysat object is passed to function. No data is accepted from return. at_pos : string or int insert at position. (default, insert at end). args : extra arguments extra arguments are passed to the custom function (once) kwargs : extra keyword arguments extra keyword args are passed to the custom function (once) Note ---- Allowed `add` function returns: - {'data' : pandas Series/DataFrame/array_like, 'units' : string/array_like of strings, 'long_name' : string/array_like of strings, 'name' : string/array_like of strings (iff data array_like)} - pandas DataFrame, names of columns are used - pandas Series, .name required - (string/list of strings, numpy array/list of arrays) """ if isinstance(function, str): # convert string to function object function=eval(function) if (at_pos == 'end') | (at_pos == len(self._functions)): # store function object self._functions.append(function) self._args.append(args) self._kwargs.append(kwargs) self._kind.append(kind.lower()) elif at_pos < len(self._functions): # user picked a specific location to insert self._functions.insert(at_pos, function) self._args.insert(at_pos, args) self._kwargs.insert(at_pos, kwargs) self._kind.insert(at_pos, kind) else: raise TypeError('Must enter an index between 0 and %i' % len(self._functions))
def function[add, parameter[self, function, kind, at_pos]]: constant[Add a function to custom processing queue. Custom functions are applied automatically to associated pysat instrument whenever instrument.load command called. Parameters ---------- function : string or function object name of function or function object to be added to queue kind : {'add', 'modify', 'pass} add Adds data returned from function to instrument object. A copy of pysat instrument object supplied to routine. modify pysat instrument object supplied to routine. Any and all changes to object are retained. pass A copy of pysat object is passed to function. No data is accepted from return. at_pos : string or int insert at position. (default, insert at end). args : extra arguments extra arguments are passed to the custom function (once) kwargs : extra keyword arguments extra keyword args are passed to the custom function (once) Note ---- Allowed `add` function returns: - {'data' : pandas Series/DataFrame/array_like, 'units' : string/array_like of strings, 'long_name' : string/array_like of strings, 'name' : string/array_like of strings (iff data array_like)} - pandas DataFrame, names of columns are used - pandas Series, .name required - (string/list of strings, numpy array/list of arrays) ] if call[name[isinstance], parameter[name[function], name[str]]] begin[:] variable[function] assign[=] call[name[eval], parameter[name[function]]] if binary_operation[compare[name[at_pos] equal[==] constant[end]] <ast.BitOr object at 0x7da2590d6aa0> compare[name[at_pos] equal[==] call[name[len], parameter[name[self]._functions]]]] begin[:] call[name[self]._functions.append, parameter[name[function]]] call[name[self]._args.append, parameter[name[args]]] call[name[self]._kwargs.append, parameter[name[kwargs]]] call[name[self]._kind.append, parameter[call[name[kind].lower, parameter[]]]]
keyword[def] identifier[add] ( identifier[self] , identifier[function] , identifier[kind] = literal[string] , identifier[at_pos] = literal[string] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[isinstance] ( identifier[function] , identifier[str] ): identifier[function] = identifier[eval] ( identifier[function] ) keyword[if] ( identifier[at_pos] == literal[string] )|( identifier[at_pos] == identifier[len] ( identifier[self] . identifier[_functions] )): identifier[self] . identifier[_functions] . identifier[append] ( identifier[function] ) identifier[self] . identifier[_args] . identifier[append] ( identifier[args] ) identifier[self] . identifier[_kwargs] . identifier[append] ( identifier[kwargs] ) identifier[self] . identifier[_kind] . identifier[append] ( identifier[kind] . identifier[lower] ()) keyword[elif] identifier[at_pos] < identifier[len] ( identifier[self] . identifier[_functions] ): identifier[self] . identifier[_functions] . identifier[insert] ( identifier[at_pos] , identifier[function] ) identifier[self] . identifier[_args] . identifier[insert] ( identifier[at_pos] , identifier[args] ) identifier[self] . identifier[_kwargs] . identifier[insert] ( identifier[at_pos] , identifier[kwargs] ) identifier[self] . identifier[_kind] . identifier[insert] ( identifier[at_pos] , identifier[kind] ) keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] % identifier[len] ( identifier[self] . identifier[_functions] ))
def add(self, function, kind='add', at_pos='end', *args, **kwargs): """Add a function to custom processing queue. Custom functions are applied automatically to associated pysat instrument whenever instrument.load command called. Parameters ---------- function : string or function object name of function or function object to be added to queue kind : {'add', 'modify', 'pass} add Adds data returned from function to instrument object. A copy of pysat instrument object supplied to routine. modify pysat instrument object supplied to routine. Any and all changes to object are retained. pass A copy of pysat object is passed to function. No data is accepted from return. at_pos : string or int insert at position. (default, insert at end). args : extra arguments extra arguments are passed to the custom function (once) kwargs : extra keyword arguments extra keyword args are passed to the custom function (once) Note ---- Allowed `add` function returns: - {'data' : pandas Series/DataFrame/array_like, 'units' : string/array_like of strings, 'long_name' : string/array_like of strings, 'name' : string/array_like of strings (iff data array_like)} - pandas DataFrame, names of columns are used - pandas Series, .name required - (string/list of strings, numpy array/list of arrays) """ if isinstance(function, str): # convert string to function object function = eval(function) # depends on [control=['if'], data=[]] if (at_pos == 'end') | (at_pos == len(self._functions)): # store function object self._functions.append(function) self._args.append(args) self._kwargs.append(kwargs) self._kind.append(kind.lower()) # depends on [control=['if'], data=[]] elif at_pos < len(self._functions): # user picked a specific location to insert self._functions.insert(at_pos, function) self._args.insert(at_pos, args) self._kwargs.insert(at_pos, kwargs) self._kind.insert(at_pos, kind) # depends on [control=['if'], data=['at_pos']] else: raise TypeError('Must enter an index between 0 and %i' % len(self._functions))
def QA_SU_save_stock_min(engine, client=DATABASE): """save stock_min Arguments: engine {[type]} -- [description] Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ engine = select_save_engine(engine) engine.QA_SU_save_stock_min(client=client)
def function[QA_SU_save_stock_min, parameter[engine, client]]: constant[save stock_min Arguments: engine {[type]} -- [description] Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) ] variable[engine] assign[=] call[name[select_save_engine], parameter[name[engine]]] call[name[engine].QA_SU_save_stock_min, parameter[]]
keyword[def] identifier[QA_SU_save_stock_min] ( identifier[engine] , identifier[client] = identifier[DATABASE] ): literal[string] identifier[engine] = identifier[select_save_engine] ( identifier[engine] ) identifier[engine] . identifier[QA_SU_save_stock_min] ( identifier[client] = identifier[client] )
def QA_SU_save_stock_min(engine, client=DATABASE): """save stock_min Arguments: engine {[type]} -- [description] Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ engine = select_save_engine(engine) engine.QA_SU_save_stock_min(client=client)
def back(self): """ Goes to the previous page for this wizard. """ try: pageId = self._navigation[-2] last_page = self.page(pageId) except IndexError: return curr_page = self.page(self._navigation.pop()) if not (last_page and curr_page): return self._currentId = pageId y = curr_page.y() last_page.move(-last_page.width(), y) last_page.show() # animate the last page in anim_in = QtCore.QPropertyAnimation(self) anim_in.setTargetObject(last_page) anim_in.setPropertyName('pos') anim_in.setStartValue(last_page.pos()) anim_in.setEndValue(curr_page.pos()) anim_in.setDuration(self.animationSpeed()) anim_in.setEasingCurve(QtCore.QEasingCurve.Linear) # animate the current page out anim_out = QtCore.QPropertyAnimation(self) anim_out.setTargetObject(curr_page) anim_out.setPropertyName('pos') anim_out.setStartValue(curr_page.pos()) anim_out.setEndValue(QtCore.QPoint(self.width()+curr_page.width(), y)) anim_out.setDuration(self.animationSpeed()) anim_out.setEasingCurve(QtCore.QEasingCurve.Linear) # create the anim group anim_grp = QtCore.QParallelAnimationGroup(self) anim_grp.addAnimation(anim_in) anim_grp.addAnimation(anim_out) anim_grp.finished.connect(curr_page.hide) anim_grp.finished.connect(anim_grp.deleteLater) # update the button states self._buttons[self.WizardButton.BackButton].setVisible(self.canGoBack()) self._buttons[self.WizardButton.NextButton].setVisible(True) self._buttons[self.WizardButton.RetryButton].setVisible(self.canRetry()) self._buttons[self.WizardButton.CommitButton].setVisible(last_page.isCommitPage()) self._buttons[self.WizardButton.FinishButton].setVisible(last_page.isFinalPage()) self.adjustSize() self.currentIdChanged.emit(pageId) anim_grp.start()
def function[back, parameter[self]]: constant[ Goes to the previous page for this wizard. ] <ast.Try object at 0x7da204347310> variable[curr_page] assign[=] call[name[self].page, parameter[call[name[self]._navigation.pop, parameter[]]]] if <ast.UnaryOp object at 0x7da20cabc550> begin[:] return[None] name[self]._currentId assign[=] name[pageId] variable[y] assign[=] call[name[curr_page].y, parameter[]] call[name[last_page].move, parameter[<ast.UnaryOp object at 0x7da20cabc790>, name[y]]] call[name[last_page].show, parameter[]] variable[anim_in] assign[=] call[name[QtCore].QPropertyAnimation, parameter[name[self]]] call[name[anim_in].setTargetObject, parameter[name[last_page]]] call[name[anim_in].setPropertyName, parameter[constant[pos]]] call[name[anim_in].setStartValue, parameter[call[name[last_page].pos, parameter[]]]] call[name[anim_in].setEndValue, parameter[call[name[curr_page].pos, parameter[]]]] call[name[anim_in].setDuration, parameter[call[name[self].animationSpeed, parameter[]]]] call[name[anim_in].setEasingCurve, parameter[name[QtCore].QEasingCurve.Linear]] variable[anim_out] assign[=] call[name[QtCore].QPropertyAnimation, parameter[name[self]]] call[name[anim_out].setTargetObject, parameter[name[curr_page]]] call[name[anim_out].setPropertyName, parameter[constant[pos]]] call[name[anim_out].setStartValue, parameter[call[name[curr_page].pos, parameter[]]]] call[name[anim_out].setEndValue, parameter[call[name[QtCore].QPoint, parameter[binary_operation[call[name[self].width, parameter[]] + call[name[curr_page].width, parameter[]]], name[y]]]]] call[name[anim_out].setDuration, parameter[call[name[self].animationSpeed, parameter[]]]] call[name[anim_out].setEasingCurve, parameter[name[QtCore].QEasingCurve.Linear]] variable[anim_grp] assign[=] call[name[QtCore].QParallelAnimationGroup, parameter[name[self]]] call[name[anim_grp].addAnimation, parameter[name[anim_in]]] call[name[anim_grp].addAnimation, parameter[name[anim_out]]] call[name[anim_grp].finished.connect, parameter[name[curr_page].hide]] call[name[anim_grp].finished.connect, parameter[name[anim_grp].deleteLater]] call[call[name[self]._buttons][name[self].WizardButton.BackButton].setVisible, parameter[call[name[self].canGoBack, parameter[]]]] call[call[name[self]._buttons][name[self].WizardButton.NextButton].setVisible, parameter[constant[True]]] call[call[name[self]._buttons][name[self].WizardButton.RetryButton].setVisible, parameter[call[name[self].canRetry, parameter[]]]] call[call[name[self]._buttons][name[self].WizardButton.CommitButton].setVisible, parameter[call[name[last_page].isCommitPage, parameter[]]]] call[call[name[self]._buttons][name[self].WizardButton.FinishButton].setVisible, parameter[call[name[last_page].isFinalPage, parameter[]]]] call[name[self].adjustSize, parameter[]] call[name[self].currentIdChanged.emit, parameter[name[pageId]]] call[name[anim_grp].start, parameter[]]
keyword[def] identifier[back] ( identifier[self] ): literal[string] keyword[try] : identifier[pageId] = identifier[self] . identifier[_navigation] [- literal[int] ] identifier[last_page] = identifier[self] . identifier[page] ( identifier[pageId] ) keyword[except] identifier[IndexError] : keyword[return] identifier[curr_page] = identifier[self] . identifier[page] ( identifier[self] . identifier[_navigation] . identifier[pop] ()) keyword[if] keyword[not] ( identifier[last_page] keyword[and] identifier[curr_page] ): keyword[return] identifier[self] . identifier[_currentId] = identifier[pageId] identifier[y] = identifier[curr_page] . identifier[y] () identifier[last_page] . identifier[move] (- identifier[last_page] . identifier[width] (), identifier[y] ) identifier[last_page] . identifier[show] () identifier[anim_in] = identifier[QtCore] . identifier[QPropertyAnimation] ( identifier[self] ) identifier[anim_in] . identifier[setTargetObject] ( identifier[last_page] ) identifier[anim_in] . identifier[setPropertyName] ( literal[string] ) identifier[anim_in] . identifier[setStartValue] ( identifier[last_page] . identifier[pos] ()) identifier[anim_in] . identifier[setEndValue] ( identifier[curr_page] . identifier[pos] ()) identifier[anim_in] . identifier[setDuration] ( identifier[self] . identifier[animationSpeed] ()) identifier[anim_in] . identifier[setEasingCurve] ( identifier[QtCore] . identifier[QEasingCurve] . identifier[Linear] ) identifier[anim_out] = identifier[QtCore] . identifier[QPropertyAnimation] ( identifier[self] ) identifier[anim_out] . identifier[setTargetObject] ( identifier[curr_page] ) identifier[anim_out] . identifier[setPropertyName] ( literal[string] ) identifier[anim_out] . identifier[setStartValue] ( identifier[curr_page] . identifier[pos] ()) identifier[anim_out] . identifier[setEndValue] ( identifier[QtCore] . identifier[QPoint] ( identifier[self] . identifier[width] ()+ identifier[curr_page] . identifier[width] (), identifier[y] )) identifier[anim_out] . identifier[setDuration] ( identifier[self] . identifier[animationSpeed] ()) identifier[anim_out] . identifier[setEasingCurve] ( identifier[QtCore] . identifier[QEasingCurve] . identifier[Linear] ) identifier[anim_grp] = identifier[QtCore] . identifier[QParallelAnimationGroup] ( identifier[self] ) identifier[anim_grp] . identifier[addAnimation] ( identifier[anim_in] ) identifier[anim_grp] . identifier[addAnimation] ( identifier[anim_out] ) identifier[anim_grp] . identifier[finished] . identifier[connect] ( identifier[curr_page] . identifier[hide] ) identifier[anim_grp] . identifier[finished] . identifier[connect] ( identifier[anim_grp] . identifier[deleteLater] ) identifier[self] . identifier[_buttons] [ identifier[self] . identifier[WizardButton] . identifier[BackButton] ]. identifier[setVisible] ( identifier[self] . identifier[canGoBack] ()) identifier[self] . identifier[_buttons] [ identifier[self] . identifier[WizardButton] . identifier[NextButton] ]. identifier[setVisible] ( keyword[True] ) identifier[self] . identifier[_buttons] [ identifier[self] . identifier[WizardButton] . identifier[RetryButton] ]. identifier[setVisible] ( identifier[self] . identifier[canRetry] ()) identifier[self] . identifier[_buttons] [ identifier[self] . identifier[WizardButton] . identifier[CommitButton] ]. identifier[setVisible] ( identifier[last_page] . identifier[isCommitPage] ()) identifier[self] . identifier[_buttons] [ identifier[self] . identifier[WizardButton] . identifier[FinishButton] ]. identifier[setVisible] ( identifier[last_page] . identifier[isFinalPage] ()) identifier[self] . identifier[adjustSize] () identifier[self] . identifier[currentIdChanged] . identifier[emit] ( identifier[pageId] ) identifier[anim_grp] . identifier[start] ()
def back(self): """ Goes to the previous page for this wizard. """ try: pageId = self._navigation[-2] last_page = self.page(pageId) # depends on [control=['try'], data=[]] except IndexError: return # depends on [control=['except'], data=[]] curr_page = self.page(self._navigation.pop()) if not (last_page and curr_page): return # depends on [control=['if'], data=[]] self._currentId = pageId y = curr_page.y() last_page.move(-last_page.width(), y) last_page.show() # animate the last page in anim_in = QtCore.QPropertyAnimation(self) anim_in.setTargetObject(last_page) anim_in.setPropertyName('pos') anim_in.setStartValue(last_page.pos()) anim_in.setEndValue(curr_page.pos()) anim_in.setDuration(self.animationSpeed()) anim_in.setEasingCurve(QtCore.QEasingCurve.Linear) # animate the current page out anim_out = QtCore.QPropertyAnimation(self) anim_out.setTargetObject(curr_page) anim_out.setPropertyName('pos') anim_out.setStartValue(curr_page.pos()) anim_out.setEndValue(QtCore.QPoint(self.width() + curr_page.width(), y)) anim_out.setDuration(self.animationSpeed()) anim_out.setEasingCurve(QtCore.QEasingCurve.Linear) # create the anim group anim_grp = QtCore.QParallelAnimationGroup(self) anim_grp.addAnimation(anim_in) anim_grp.addAnimation(anim_out) anim_grp.finished.connect(curr_page.hide) anim_grp.finished.connect(anim_grp.deleteLater) # update the button states self._buttons[self.WizardButton.BackButton].setVisible(self.canGoBack()) self._buttons[self.WizardButton.NextButton].setVisible(True) self._buttons[self.WizardButton.RetryButton].setVisible(self.canRetry()) self._buttons[self.WizardButton.CommitButton].setVisible(last_page.isCommitPage()) self._buttons[self.WizardButton.FinishButton].setVisible(last_page.isFinalPage()) self.adjustSize() self.currentIdChanged.emit(pageId) anim_grp.start()
def track_count(self, unique_identifier, metric, inc_amt=1, **kwargs): """ Tracks a metric just by count. If you track a metric this way, you won't be able to query the metric by day, week or month. :param unique_identifier: Unique string indetifying the object this metric is for :param metric: A unique name for the metric you want to track :param inc_amt: The amount you want to increment the ``metric`` for the ``unique_identifier`` :return: ``True`` if successful ``False`` otherwise """ return self._analytics_backend.incr(self._prefix + ":" + "analy:%s:count:%s" % (unique_identifier, metric), inc_amt)
def function[track_count, parameter[self, unique_identifier, metric, inc_amt]]: constant[ Tracks a metric just by count. If you track a metric this way, you won't be able to query the metric by day, week or month. :param unique_identifier: Unique string indetifying the object this metric is for :param metric: A unique name for the metric you want to track :param inc_amt: The amount you want to increment the ``metric`` for the ``unique_identifier`` :return: ``True`` if successful ``False`` otherwise ] return[call[name[self]._analytics_backend.incr, parameter[binary_operation[binary_operation[name[self]._prefix + constant[:]] + binary_operation[constant[analy:%s:count:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e9b1870>, <ast.Name object at 0x7da20e9b2500>]]]], name[inc_amt]]]]
keyword[def] identifier[track_count] ( identifier[self] , identifier[unique_identifier] , identifier[metric] , identifier[inc_amt] = literal[int] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[self] . identifier[_analytics_backend] . identifier[incr] ( identifier[self] . identifier[_prefix] + literal[string] + literal[string] %( identifier[unique_identifier] , identifier[metric] ), identifier[inc_amt] )
def track_count(self, unique_identifier, metric, inc_amt=1, **kwargs): """ Tracks a metric just by count. If you track a metric this way, you won't be able to query the metric by day, week or month. :param unique_identifier: Unique string indetifying the object this metric is for :param metric: A unique name for the metric you want to track :param inc_amt: The amount you want to increment the ``metric`` for the ``unique_identifier`` :return: ``True`` if successful ``False`` otherwise """ return self._analytics_backend.incr(self._prefix + ':' + 'analy:%s:count:%s' % (unique_identifier, metric), inc_amt)
def __add_variables(self, *args, **kwargs): """ Adds given variables to __variables attribute. :param \*args: Variables. :type \*args: \* :param \*\*kwargs: Variables : Values. :type \*\*kwargs: \* """ for variable in args: self.__variables[variable] = None self.__variables.update(kwargs)
def function[__add_variables, parameter[self]]: constant[ Adds given variables to __variables attribute. :param \*args: Variables. :type \*args: \* :param \*\*kwargs: Variables : Values. :type \*\*kwargs: \* ] for taget[name[variable]] in starred[name[args]] begin[:] call[name[self].__variables][name[variable]] assign[=] constant[None] call[name[self].__variables.update, parameter[name[kwargs]]]
keyword[def] identifier[__add_variables] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[for] identifier[variable] keyword[in] identifier[args] : identifier[self] . identifier[__variables] [ identifier[variable] ]= keyword[None] identifier[self] . identifier[__variables] . identifier[update] ( identifier[kwargs] )
def __add_variables(self, *args, **kwargs): """ Adds given variables to __variables attribute. :param \\*args: Variables. :type \\*args: \\* :param \\*\\*kwargs: Variables : Values. :type \\*\\*kwargs: \\* """ for variable in args: self.__variables[variable] = None # depends on [control=['for'], data=['variable']] self.__variables.update(kwargs)
def progressive(image_field, alt_text=''): """ Used as a Jinja2 filter, this function returns a safe HTML chunk. Usage (in the HTML template): {{ obj.image|progressive }} :param django.db.models.fields.files.ImageFieldFile image_field: image :param str alt_text: str :return: a safe HTML template ready to be rendered """ if not isinstance(image_field, ImageFieldFile): raise ValueError('"image_field" argument must be an ImageField.') for engine in engines.all(): if isinstance(engine, BaseEngine) and hasattr(engine, 'env'): env = engine.env if isinstance(env, Environment): context = render_progressive_field(image_field, alt_text) template = env.get_template( 'progressiveimagefield/render_field.html' ) rendered = template.render(**context) return Markup(rendered) return ''
def function[progressive, parameter[image_field, alt_text]]: constant[ Used as a Jinja2 filter, this function returns a safe HTML chunk. Usage (in the HTML template): {{ obj.image|progressive }} :param django.db.models.fields.files.ImageFieldFile image_field: image :param str alt_text: str :return: a safe HTML template ready to be rendered ] if <ast.UnaryOp object at 0x7da1b25efdf0> begin[:] <ast.Raise object at 0x7da1b25ec2b0> for taget[name[engine]] in starred[call[name[engines].all, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da1b25ed000> begin[:] variable[env] assign[=] name[engine].env if call[name[isinstance], parameter[name[env], name[Environment]]] begin[:] variable[context] assign[=] call[name[render_progressive_field], parameter[name[image_field], name[alt_text]]] variable[template] assign[=] call[name[env].get_template, parameter[constant[progressiveimagefield/render_field.html]]] variable[rendered] assign[=] call[name[template].render, parameter[]] return[call[name[Markup], parameter[name[rendered]]]] return[constant[]]
keyword[def] identifier[progressive] ( identifier[image_field] , identifier[alt_text] = literal[string] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[image_field] , identifier[ImageFieldFile] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[for] identifier[engine] keyword[in] identifier[engines] . identifier[all] (): keyword[if] identifier[isinstance] ( identifier[engine] , identifier[BaseEngine] ) keyword[and] identifier[hasattr] ( identifier[engine] , literal[string] ): identifier[env] = identifier[engine] . identifier[env] keyword[if] identifier[isinstance] ( identifier[env] , identifier[Environment] ): identifier[context] = identifier[render_progressive_field] ( identifier[image_field] , identifier[alt_text] ) identifier[template] = identifier[env] . identifier[get_template] ( literal[string] ) identifier[rendered] = identifier[template] . identifier[render] (** identifier[context] ) keyword[return] identifier[Markup] ( identifier[rendered] ) keyword[return] literal[string]
def progressive(image_field, alt_text=''): """ Used as a Jinja2 filter, this function returns a safe HTML chunk. Usage (in the HTML template): {{ obj.image|progressive }} :param django.db.models.fields.files.ImageFieldFile image_field: image :param str alt_text: str :return: a safe HTML template ready to be rendered """ if not isinstance(image_field, ImageFieldFile): raise ValueError('"image_field" argument must be an ImageField.') # depends on [control=['if'], data=[]] for engine in engines.all(): if isinstance(engine, BaseEngine) and hasattr(engine, 'env'): env = engine.env if isinstance(env, Environment): context = render_progressive_field(image_field, alt_text) template = env.get_template('progressiveimagefield/render_field.html') rendered = template.render(**context) return Markup(rendered) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['engine']] return ''
def getMappingsOnThingTypeForLogicalInterface(self, thingTypeId, logicalInterfaceId, draft=False): """ Gets the mappings for a logical interface from a thing type. Parameters: - thingTypeId (string) - the thing type - logicalInterfaceId (string) - the platform returned id of the logical interface Throws APIException on failure. """ if draft: req = ApiClient.oneThingTypeMappingUrl % (self.host, "/draft", thingTypeId, logicalInterfaceId) else: req = ApiClient.oneThingTypeMappingUrl % (self.host, "", thingTypeId, logicalInterfaceId) resp = requests.get(req, auth=self.credentials, verify=self.verify) if resp.status_code == 200: self.logger.debug("Mappings retrieved from the thing type") else: raise ibmiotf.APIException(resp.status_code, "HTTP error getting mappings for a logical interface from a thing type", resp) return resp.json()
def function[getMappingsOnThingTypeForLogicalInterface, parameter[self, thingTypeId, logicalInterfaceId, draft]]: constant[ Gets the mappings for a logical interface from a thing type. Parameters: - thingTypeId (string) - the thing type - logicalInterfaceId (string) - the platform returned id of the logical interface Throws APIException on failure. ] if name[draft] begin[:] variable[req] assign[=] binary_operation[name[ApiClient].oneThingTypeMappingUrl <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da2041dacb0>, <ast.Constant object at 0x7da2041da890>, <ast.Name object at 0x7da2041da530>, <ast.Name object at 0x7da2041d9c90>]]] variable[resp] assign[=] call[name[requests].get, parameter[name[req]]] if compare[name[resp].status_code equal[==] constant[200]] begin[:] call[name[self].logger.debug, parameter[constant[Mappings retrieved from the thing type]]] return[call[name[resp].json, parameter[]]]
keyword[def] identifier[getMappingsOnThingTypeForLogicalInterface] ( identifier[self] , identifier[thingTypeId] , identifier[logicalInterfaceId] , identifier[draft] = keyword[False] ): literal[string] keyword[if] identifier[draft] : identifier[req] = identifier[ApiClient] . identifier[oneThingTypeMappingUrl] %( identifier[self] . identifier[host] , literal[string] , identifier[thingTypeId] , identifier[logicalInterfaceId] ) keyword[else] : identifier[req] = identifier[ApiClient] . identifier[oneThingTypeMappingUrl] %( identifier[self] . identifier[host] , literal[string] , identifier[thingTypeId] , identifier[logicalInterfaceId] ) identifier[resp] = identifier[requests] . identifier[get] ( identifier[req] , identifier[auth] = identifier[self] . identifier[credentials] , identifier[verify] = identifier[self] . identifier[verify] ) keyword[if] identifier[resp] . identifier[status_code] == literal[int] : identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ) keyword[else] : keyword[raise] identifier[ibmiotf] . identifier[APIException] ( identifier[resp] . identifier[status_code] , literal[string] , identifier[resp] ) keyword[return] identifier[resp] . identifier[json] ()
def getMappingsOnThingTypeForLogicalInterface(self, thingTypeId, logicalInterfaceId, draft=False): """ Gets the mappings for a logical interface from a thing type. Parameters: - thingTypeId (string) - the thing type - logicalInterfaceId (string) - the platform returned id of the logical interface Throws APIException on failure. """ if draft: req = ApiClient.oneThingTypeMappingUrl % (self.host, '/draft', thingTypeId, logicalInterfaceId) # depends on [control=['if'], data=[]] else: req = ApiClient.oneThingTypeMappingUrl % (self.host, '', thingTypeId, logicalInterfaceId) resp = requests.get(req, auth=self.credentials, verify=self.verify) if resp.status_code == 200: self.logger.debug('Mappings retrieved from the thing type') # depends on [control=['if'], data=[]] else: raise ibmiotf.APIException(resp.status_code, 'HTTP error getting mappings for a logical interface from a thing type', resp) return resp.json()
def gng_importer(self, corpus_file): """Fill in self.ngcorpus from a Google NGram corpus file. Parameters ---------- corpus_file : file The Google NGram file from which to initialize the n-gram corpus """ with c_open(corpus_file, 'r', encoding='utf-8') as gng: for line in gng: line = line.rstrip().split('\t') words = line[0].split() self._add_to_ngcorpus(self.ngcorpus, words, int(line[2]))
def function[gng_importer, parameter[self, corpus_file]]: constant[Fill in self.ngcorpus from a Google NGram corpus file. Parameters ---------- corpus_file : file The Google NGram file from which to initialize the n-gram corpus ] with call[name[c_open], parameter[name[corpus_file], constant[r]]] begin[:] for taget[name[line]] in starred[name[gng]] begin[:] variable[line] assign[=] call[call[name[line].rstrip, parameter[]].split, parameter[constant[ ]]] variable[words] assign[=] call[call[name[line]][constant[0]].split, parameter[]] call[name[self]._add_to_ngcorpus, parameter[name[self].ngcorpus, name[words], call[name[int], parameter[call[name[line]][constant[2]]]]]]
keyword[def] identifier[gng_importer] ( identifier[self] , identifier[corpus_file] ): literal[string] keyword[with] identifier[c_open] ( identifier[corpus_file] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[gng] : keyword[for] identifier[line] keyword[in] identifier[gng] : identifier[line] = identifier[line] . identifier[rstrip] (). identifier[split] ( literal[string] ) identifier[words] = identifier[line] [ literal[int] ]. identifier[split] () identifier[self] . identifier[_add_to_ngcorpus] ( identifier[self] . identifier[ngcorpus] , identifier[words] , identifier[int] ( identifier[line] [ literal[int] ]))
def gng_importer(self, corpus_file): """Fill in self.ngcorpus from a Google NGram corpus file. Parameters ---------- corpus_file : file The Google NGram file from which to initialize the n-gram corpus """ with c_open(corpus_file, 'r', encoding='utf-8') as gng: for line in gng: line = line.rstrip().split('\t') words = line[0].split() self._add_to_ngcorpus(self.ngcorpus, words, int(line[2])) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['gng']]
def download(ctx, help: bool, symbol: str, namespace: str, agent: str, currency: str): """ Download the latest prices """ if help: click.echo(ctx.get_help()) ctx.exit() app = PriceDbApplication() app.logger = logger if currency: currency = currency.strip() currency = currency.upper() # Otherwise download the prices for securities listed in the database. app.download_prices(currency=currency, agent=agent, symbol=symbol, namespace=namespace)
def function[download, parameter[ctx, help, symbol, namespace, agent, currency]]: constant[ Download the latest prices ] if name[help] begin[:] call[name[click].echo, parameter[call[name[ctx].get_help, parameter[]]]] call[name[ctx].exit, parameter[]] variable[app] assign[=] call[name[PriceDbApplication], parameter[]] name[app].logger assign[=] name[logger] if name[currency] begin[:] variable[currency] assign[=] call[name[currency].strip, parameter[]] variable[currency] assign[=] call[name[currency].upper, parameter[]] call[name[app].download_prices, parameter[]]
keyword[def] identifier[download] ( identifier[ctx] , identifier[help] : identifier[bool] , identifier[symbol] : identifier[str] , identifier[namespace] : identifier[str] , identifier[agent] : identifier[str] , identifier[currency] : identifier[str] ): literal[string] keyword[if] identifier[help] : identifier[click] . identifier[echo] ( identifier[ctx] . identifier[get_help] ()) identifier[ctx] . identifier[exit] () identifier[app] = identifier[PriceDbApplication] () identifier[app] . identifier[logger] = identifier[logger] keyword[if] identifier[currency] : identifier[currency] = identifier[currency] . identifier[strip] () identifier[currency] = identifier[currency] . identifier[upper] () identifier[app] . identifier[download_prices] ( identifier[currency] = identifier[currency] , identifier[agent] = identifier[agent] , identifier[symbol] = identifier[symbol] , identifier[namespace] = identifier[namespace] )
def download(ctx, help: bool, symbol: str, namespace: str, agent: str, currency: str): """ Download the latest prices """ if help: click.echo(ctx.get_help()) ctx.exit() # depends on [control=['if'], data=[]] app = PriceDbApplication() app.logger = logger if currency: currency = currency.strip() currency = currency.upper() # depends on [control=['if'], data=[]] # Otherwise download the prices for securities listed in the database. app.download_prices(currency=currency, agent=agent, symbol=symbol, namespace=namespace)
def from_robot(cls, robot, **kwargs): """Construct a Nearest Neighbor forward model from an existing dataset.""" m = cls(len(robot.m_feats), len(robot.s_feats), **kwargs) return m
def function[from_robot, parameter[cls, robot]]: constant[Construct a Nearest Neighbor forward model from an existing dataset.] variable[m] assign[=] call[name[cls], parameter[call[name[len], parameter[name[robot].m_feats]], call[name[len], parameter[name[robot].s_feats]]]] return[name[m]]
keyword[def] identifier[from_robot] ( identifier[cls] , identifier[robot] ,** identifier[kwargs] ): literal[string] identifier[m] = identifier[cls] ( identifier[len] ( identifier[robot] . identifier[m_feats] ), identifier[len] ( identifier[robot] . identifier[s_feats] ),** identifier[kwargs] ) keyword[return] identifier[m]
def from_robot(cls, robot, **kwargs): """Construct a Nearest Neighbor forward model from an existing dataset.""" m = cls(len(robot.m_feats), len(robot.s_feats), **kwargs) return m
def _estimate_count(self): """ Update the count number using the estimation of the unset ratio """ if self.estimate_z == 0: self.estimate_z = (1.0 / self.nbr_bits) self.estimate_z = min(self.estimate_z, 0.999999) self.count = int(-(self.nbr_bits / self.nbr_slices) * np.log(1 - self.estimate_z))
def function[_estimate_count, parameter[self]]: constant[ Update the count number using the estimation of the unset ratio ] if compare[name[self].estimate_z equal[==] constant[0]] begin[:] name[self].estimate_z assign[=] binary_operation[constant[1.0] / name[self].nbr_bits] name[self].estimate_z assign[=] call[name[min], parameter[name[self].estimate_z, constant[0.999999]]] name[self].count assign[=] call[name[int], parameter[binary_operation[<ast.UnaryOp object at 0x7da1b196b250> * call[name[np].log, parameter[binary_operation[constant[1] - name[self].estimate_z]]]]]]
keyword[def] identifier[_estimate_count] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[estimate_z] == literal[int] : identifier[self] . identifier[estimate_z] =( literal[int] / identifier[self] . identifier[nbr_bits] ) identifier[self] . identifier[estimate_z] = identifier[min] ( identifier[self] . identifier[estimate_z] , literal[int] ) identifier[self] . identifier[count] = identifier[int] (-( identifier[self] . identifier[nbr_bits] / identifier[self] . identifier[nbr_slices] )* identifier[np] . identifier[log] ( literal[int] - identifier[self] . identifier[estimate_z] ))
def _estimate_count(self): """ Update the count number using the estimation of the unset ratio """ if self.estimate_z == 0: self.estimate_z = 1.0 / self.nbr_bits # depends on [control=['if'], data=[]] self.estimate_z = min(self.estimate_z, 0.999999) self.count = int(-(self.nbr_bits / self.nbr_slices) * np.log(1 - self.estimate_z))
def disconnect(self, forced=False): """ Given the pipeline topology disconnects ``Pipers`` in the order output -> input. This also disconnects inputs. See ``Dagger.connect``, ``Piper.connect`` and ``Piper.disconnect``. If "forced" is ``True`` ``NuMap`` instances will be emptied. Arguments: - forced(``bool``) [default: ``False``] If set ``True`` all tasks from all ``NuMaps`` instances used in the ``Dagger`` will be removed even if they did not belong to this ``Dagger``. """ reversed_postorder = reversed(self.postorder()) self.log.debug('%s trying to disconnect in the order %s' % \ (repr(self), repr(reversed_postorder))) for piper in reversed_postorder: if piper.connected: # we don't want to trigger an exception piper.disconnect(forced) self.log.debug('%s succesfuly disconnected' % repr(self))
def function[disconnect, parameter[self, forced]]: constant[ Given the pipeline topology disconnects ``Pipers`` in the order output -> input. This also disconnects inputs. See ``Dagger.connect``, ``Piper.connect`` and ``Piper.disconnect``. If "forced" is ``True`` ``NuMap`` instances will be emptied. Arguments: - forced(``bool``) [default: ``False``] If set ``True`` all tasks from all ``NuMaps`` instances used in the ``Dagger`` will be removed even if they did not belong to this ``Dagger``. ] variable[reversed_postorder] assign[=] call[name[reversed], parameter[call[name[self].postorder, parameter[]]]] call[name[self].log.debug, parameter[binary_operation[constant[%s trying to disconnect in the order %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b257cca0>, <ast.Call object at 0x7da1b257e410>]]]]] for taget[name[piper]] in starred[name[reversed_postorder]] begin[:] if name[piper].connected begin[:] call[name[piper].disconnect, parameter[name[forced]]] call[name[self].log.debug, parameter[binary_operation[constant[%s succesfuly disconnected] <ast.Mod object at 0x7da2590d6920> call[name[repr], parameter[name[self]]]]]]
keyword[def] identifier[disconnect] ( identifier[self] , identifier[forced] = keyword[False] ): literal[string] identifier[reversed_postorder] = identifier[reversed] ( identifier[self] . identifier[postorder] ()) identifier[self] . identifier[log] . identifier[debug] ( literal[string] %( identifier[repr] ( identifier[self] ), identifier[repr] ( identifier[reversed_postorder] ))) keyword[for] identifier[piper] keyword[in] identifier[reversed_postorder] : keyword[if] identifier[piper] . identifier[connected] : identifier[piper] . identifier[disconnect] ( identifier[forced] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] % identifier[repr] ( identifier[self] ))
def disconnect(self, forced=False): """ Given the pipeline topology disconnects ``Pipers`` in the order output -> input. This also disconnects inputs. See ``Dagger.connect``, ``Piper.connect`` and ``Piper.disconnect``. If "forced" is ``True`` ``NuMap`` instances will be emptied. Arguments: - forced(``bool``) [default: ``False``] If set ``True`` all tasks from all ``NuMaps`` instances used in the ``Dagger`` will be removed even if they did not belong to this ``Dagger``. """ reversed_postorder = reversed(self.postorder()) self.log.debug('%s trying to disconnect in the order %s' % (repr(self), repr(reversed_postorder))) for piper in reversed_postorder: if piper.connected: # we don't want to trigger an exception piper.disconnect(forced) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['piper']] self.log.debug('%s succesfuly disconnected' % repr(self))
def ParseForwardedIps(self, forwarded_ips): """Parse and validate forwarded IP addresses. Args: forwarded_ips: list, the IP address strings to parse. Returns: list, the valid IP address strings. """ addresses = [] forwarded_ips = forwarded_ips or [] for ip in forwarded_ips: if ip and (IP_REGEX.match(ip) or IP_ALIAS_REGEX.match(ip)): addresses.extend([str(addr) for addr in list(netaddr.IPNetwork(ip))]) else: self.logger.warning('Could not parse IP address: "%s".', ip) return addresses
def function[ParseForwardedIps, parameter[self, forwarded_ips]]: constant[Parse and validate forwarded IP addresses. Args: forwarded_ips: list, the IP address strings to parse. Returns: list, the valid IP address strings. ] variable[addresses] assign[=] list[[]] variable[forwarded_ips] assign[=] <ast.BoolOp object at 0x7da1b1736e00> for taget[name[ip]] in starred[name[forwarded_ips]] begin[:] if <ast.BoolOp object at 0x7da1b1734970> begin[:] call[name[addresses].extend, parameter[<ast.ListComp object at 0x7da2047eb820>]] return[name[addresses]]
keyword[def] identifier[ParseForwardedIps] ( identifier[self] , identifier[forwarded_ips] ): literal[string] identifier[addresses] =[] identifier[forwarded_ips] = identifier[forwarded_ips] keyword[or] [] keyword[for] identifier[ip] keyword[in] identifier[forwarded_ips] : keyword[if] identifier[ip] keyword[and] ( identifier[IP_REGEX] . identifier[match] ( identifier[ip] ) keyword[or] identifier[IP_ALIAS_REGEX] . identifier[match] ( identifier[ip] )): identifier[addresses] . identifier[extend] ([ identifier[str] ( identifier[addr] ) keyword[for] identifier[addr] keyword[in] identifier[list] ( identifier[netaddr] . identifier[IPNetwork] ( identifier[ip] ))]) keyword[else] : identifier[self] . identifier[logger] . identifier[warning] ( literal[string] , identifier[ip] ) keyword[return] identifier[addresses]
def ParseForwardedIps(self, forwarded_ips): """Parse and validate forwarded IP addresses. Args: forwarded_ips: list, the IP address strings to parse. Returns: list, the valid IP address strings. """ addresses = [] forwarded_ips = forwarded_ips or [] for ip in forwarded_ips: if ip and (IP_REGEX.match(ip) or IP_ALIAS_REGEX.match(ip)): addresses.extend([str(addr) for addr in list(netaddr.IPNetwork(ip))]) # depends on [control=['if'], data=[]] else: self.logger.warning('Could not parse IP address: "%s".', ip) # depends on [control=['for'], data=['ip']] return addresses
def run(align_bams, items, ref_file, assoc_files, region=None, out_file=None): """Run tumor only smCounter2 calling. """ paired = vcfutils.get_paired_bams(align_bams, items) assert paired and not paired.normal_bam, ("smCounter2 supports tumor-only variant calling: %s" % (",".join([dd.get_sample_name(d) for d in items]))) vrs = bedutils.population_variant_regions(items) target = shared.subset_variant_regions(vrs, region, out_file, items=items, do_merge=True) out_file = out_file.replace(".vcf.gz", ".vcf") out_prefix = utils.splitext_plus(os.path.basename(out_file))[0] if not utils.file_exists(out_file) and not utils.file_exists(out_file + ".gz"): with file_transaction(paired.tumor_data, out_file) as tx_out_file: cmd = ["smCounter2", "--runPath", os.path.dirname(tx_out_file), "--outPrefix", out_prefix, "--bedTarget", target, "--refGenome", ref_file, "--bamFile", paired.tumor_bam, "--bamType", "consensus", "--nCPU", dd.get_num_cores(paired.tumor_data)] do.run(cmd, "smcounter2 variant calling") for fname in glob.glob(os.path.join(os.path.dirname(tx_out_file), "*.smCounter*")): shutil.move(fname, os.path.join(os.path.dirname(out_file), os.path.basename(fname))) utils.symlink_plus(os.path.join(os.path.dirname(out_file), "%s.smCounter.cut.vcf" % out_prefix), out_file) return vcfutils.bgzip_and_index(out_file, paired.tumor_data["config"], remove_orig=False, prep_cmd="sed 's#FORMAT\t%s#FORMAT\t%s#' | %s" % (out_prefix, dd.get_sample_name(paired.tumor_data), vcfutils.add_contig_to_header_cl(dd.get_ref_file(paired.tumor_data), out_file)))
def function[run, parameter[align_bams, items, ref_file, assoc_files, region, out_file]]: constant[Run tumor only smCounter2 calling. ] variable[paired] assign[=] call[name[vcfutils].get_paired_bams, parameter[name[align_bams], name[items]]] assert[<ast.BoolOp object at 0x7da1b18d8880>] variable[vrs] assign[=] call[name[bedutils].population_variant_regions, parameter[name[items]]] variable[target] assign[=] call[name[shared].subset_variant_regions, parameter[name[vrs], name[region], name[out_file]]] variable[out_file] assign[=] call[name[out_file].replace, parameter[constant[.vcf.gz], constant[.vcf]]] variable[out_prefix] assign[=] call[call[name[utils].splitext_plus, parameter[call[name[os].path.basename, parameter[name[out_file]]]]]][constant[0]] if <ast.BoolOp object at 0x7da1b18da2f0> begin[:] with call[name[file_transaction], parameter[name[paired].tumor_data, name[out_file]]] begin[:] variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b18d9690>, <ast.Constant object at 0x7da1b18dacb0>, <ast.Call object at 0x7da1b18d9150>, <ast.Constant object at 0x7da1b18da4a0>, <ast.Name object at 0x7da1b18da530>, <ast.Constant object at 0x7da1b18d9030>, <ast.Name object at 0x7da1b18d9a50>, <ast.Constant object at 0x7da1b18d84f0>, <ast.Name object at 0x7da1b18da320>, <ast.Constant object at 0x7da1b18d9a20>, <ast.Attribute object at 0x7da1b18da6b0>, <ast.Constant object at 0x7da1b18d8c40>, <ast.Constant object at 0x7da1b18d8460>, <ast.Constant object at 0x7da1b18d9bd0>, <ast.Call object at 0x7da1b18d9e10>]] call[name[do].run, parameter[name[cmd], constant[smcounter2 variant calling]]] for taget[name[fname]] in starred[call[name[glob].glob, parameter[call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[tx_out_file]]], constant[*.smCounter*]]]]]] begin[:] call[name[shutil].move, parameter[name[fname], call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[out_file]]], call[name[os].path.basename, parameter[name[fname]]]]]]] call[name[utils].symlink_plus, parameter[call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[out_file]]], binary_operation[constant[%s.smCounter.cut.vcf] <ast.Mod object at 0x7da2590d6920> name[out_prefix]]]], name[out_file]]] return[call[name[vcfutils].bgzip_and_index, parameter[name[out_file], call[name[paired].tumor_data][constant[config]]]]]
keyword[def] identifier[run] ( identifier[align_bams] , identifier[items] , identifier[ref_file] , identifier[assoc_files] , identifier[region] = keyword[None] , identifier[out_file] = keyword[None] ): literal[string] identifier[paired] = identifier[vcfutils] . identifier[get_paired_bams] ( identifier[align_bams] , identifier[items] ) keyword[assert] identifier[paired] keyword[and] keyword[not] identifier[paired] . identifier[normal_bam] ,( literal[string] % ( literal[string] . identifier[join] ([ identifier[dd] . identifier[get_sample_name] ( identifier[d] ) keyword[for] identifier[d] keyword[in] identifier[items] ]))) identifier[vrs] = identifier[bedutils] . identifier[population_variant_regions] ( identifier[items] ) identifier[target] = identifier[shared] . identifier[subset_variant_regions] ( identifier[vrs] , identifier[region] , identifier[out_file] , identifier[items] = identifier[items] , identifier[do_merge] = keyword[True] ) identifier[out_file] = identifier[out_file] . identifier[replace] ( literal[string] , literal[string] ) identifier[out_prefix] = identifier[utils] . identifier[splitext_plus] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[out_file] ))[ literal[int] ] keyword[if] keyword[not] identifier[utils] . identifier[file_exists] ( identifier[out_file] ) keyword[and] keyword[not] identifier[utils] . identifier[file_exists] ( identifier[out_file] + literal[string] ): keyword[with] identifier[file_transaction] ( identifier[paired] . identifier[tumor_data] , identifier[out_file] ) keyword[as] identifier[tx_out_file] : identifier[cmd] =[ literal[string] , literal[string] , identifier[os] . identifier[path] . identifier[dirname] ( identifier[tx_out_file] ), literal[string] , identifier[out_prefix] , literal[string] , identifier[target] , literal[string] , identifier[ref_file] , literal[string] , identifier[paired] . identifier[tumor_bam] , literal[string] , literal[string] , literal[string] , identifier[dd] . identifier[get_num_cores] ( identifier[paired] . identifier[tumor_data] )] identifier[do] . identifier[run] ( identifier[cmd] , literal[string] ) keyword[for] identifier[fname] keyword[in] identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[tx_out_file] ), literal[string] )): identifier[shutil] . identifier[move] ( identifier[fname] , identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[out_file] ), identifier[os] . identifier[path] . identifier[basename] ( identifier[fname] ))) identifier[utils] . identifier[symlink_plus] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[out_file] ), literal[string] % identifier[out_prefix] ), identifier[out_file] ) keyword[return] identifier[vcfutils] . identifier[bgzip_and_index] ( identifier[out_file] , identifier[paired] . identifier[tumor_data] [ literal[string] ], identifier[remove_orig] = keyword[False] , identifier[prep_cmd] = literal[string] % ( identifier[out_prefix] , identifier[dd] . identifier[get_sample_name] ( identifier[paired] . identifier[tumor_data] ), identifier[vcfutils] . identifier[add_contig_to_header_cl] ( identifier[dd] . identifier[get_ref_file] ( identifier[paired] . identifier[tumor_data] ), identifier[out_file] )))
def run(align_bams, items, ref_file, assoc_files, region=None, out_file=None): """Run tumor only smCounter2 calling. """ paired = vcfutils.get_paired_bams(align_bams, items) assert paired and (not paired.normal_bam), 'smCounter2 supports tumor-only variant calling: %s' % ','.join([dd.get_sample_name(d) for d in items]) vrs = bedutils.population_variant_regions(items) target = shared.subset_variant_regions(vrs, region, out_file, items=items, do_merge=True) out_file = out_file.replace('.vcf.gz', '.vcf') out_prefix = utils.splitext_plus(os.path.basename(out_file))[0] if not utils.file_exists(out_file) and (not utils.file_exists(out_file + '.gz')): with file_transaction(paired.tumor_data, out_file) as tx_out_file: cmd = ['smCounter2', '--runPath', os.path.dirname(tx_out_file), '--outPrefix', out_prefix, '--bedTarget', target, '--refGenome', ref_file, '--bamFile', paired.tumor_bam, '--bamType', 'consensus', '--nCPU', dd.get_num_cores(paired.tumor_data)] do.run(cmd, 'smcounter2 variant calling') for fname in glob.glob(os.path.join(os.path.dirname(tx_out_file), '*.smCounter*')): shutil.move(fname, os.path.join(os.path.dirname(out_file), os.path.basename(fname))) # depends on [control=['for'], data=['fname']] utils.symlink_plus(os.path.join(os.path.dirname(out_file), '%s.smCounter.cut.vcf' % out_prefix), out_file) # depends on [control=['with'], data=['tx_out_file']] # depends on [control=['if'], data=[]] return vcfutils.bgzip_and_index(out_file, paired.tumor_data['config'], remove_orig=False, prep_cmd="sed 's#FORMAT\t%s#FORMAT\t%s#' | %s" % (out_prefix, dd.get_sample_name(paired.tumor_data), vcfutils.add_contig_to_header_cl(dd.get_ref_file(paired.tumor_data), out_file)))
def assert_valid(self, name, # type: str value, # type: Any error_type=None, # type: Type[ValidationError] help_msg=None, # type: str **kw_context_args): """ Asserts that the provided named value is valid with respect to the inner base validation functions. It returns silently in case of success, and raises a `ValidationError` or a subclass in case of failure. This corresponds to a 'Defensive programming' (sometimes known as 'Offensive programming') mode. By default this raises instances of `ValidationError` with a default message, in case of failure. There are two ways that you can customize this behaviour: * if you set `help_msg` in this method or in `Validator` constructor, instances of `ValidationError` created will be customized with the provided help message. * if you set `error_type` in this method or in `Validator` constructor, instances of your custom class will be created. Note that you may still provide a `help_msg`. It is recommended that Users define their own validation error types (case 2 above), so as to provide a unique error type for each kind of applicative error. This eases the process of error handling at app-level. :param name: the name of the variable to validate (for error messages) :param value: the value to validate :param error_type: a subclass of `ValidationError` to raise in case of validation failure. By default a `ValidationError` will be raised with the provided `help_msg` :param help_msg: an optional help message to be used in the raised error in case of validation failure. :param kw_context_args: optional contextual information to store in the exception, and that may be also used to format the help message :return: nothing in case of success. Otherwise, raises a ValidationError """ try: # perform validation res = self.main_function(value) except Exception as e: # caught any exception: raise ValidationError or subclass with that exception in the details # --old bad idea: first wrap into a failure ==> NO !!! I tried and it was making it far too messy/verbose # note: we do not have to 'raise x from e' of `raise_from`since the ValidationError constructor already # sets the __cause__ so we can safely take the same handling than for non-exception failures. res = e # check the result if not result_is_success(res): raise_(self._create_validation_error(name, value, validation_outcome=res, error_type=error_type, help_msg=help_msg, **kw_context_args))
def function[assert_valid, parameter[self, name, value, error_type, help_msg]]: constant[ Asserts that the provided named value is valid with respect to the inner base validation functions. It returns silently in case of success, and raises a `ValidationError` or a subclass in case of failure. This corresponds to a 'Defensive programming' (sometimes known as 'Offensive programming') mode. By default this raises instances of `ValidationError` with a default message, in case of failure. There are two ways that you can customize this behaviour: * if you set `help_msg` in this method or in `Validator` constructor, instances of `ValidationError` created will be customized with the provided help message. * if you set `error_type` in this method or in `Validator` constructor, instances of your custom class will be created. Note that you may still provide a `help_msg`. It is recommended that Users define their own validation error types (case 2 above), so as to provide a unique error type for each kind of applicative error. This eases the process of error handling at app-level. :param name: the name of the variable to validate (for error messages) :param value: the value to validate :param error_type: a subclass of `ValidationError` to raise in case of validation failure. By default a `ValidationError` will be raised with the provided `help_msg` :param help_msg: an optional help message to be used in the raised error in case of validation failure. :param kw_context_args: optional contextual information to store in the exception, and that may be also used to format the help message :return: nothing in case of success. Otherwise, raises a ValidationError ] <ast.Try object at 0x7da1b0fe45e0> if <ast.UnaryOp object at 0x7da1b0fe5780> begin[:] call[name[raise_], parameter[call[name[self]._create_validation_error, parameter[name[name], name[value]]]]]
keyword[def] identifier[assert_valid] ( identifier[self] , identifier[name] , identifier[value] , identifier[error_type] = keyword[None] , identifier[help_msg] = keyword[None] , ** identifier[kw_context_args] ): literal[string] keyword[try] : identifier[res] = identifier[self] . identifier[main_function] ( identifier[value] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[res] = identifier[e] keyword[if] keyword[not] identifier[result_is_success] ( identifier[res] ): identifier[raise_] ( identifier[self] . identifier[_create_validation_error] ( identifier[name] , identifier[value] , identifier[validation_outcome] = identifier[res] , identifier[error_type] = identifier[error_type] , identifier[help_msg] = identifier[help_msg] ,** identifier[kw_context_args] ))
def assert_valid(self, name, value, error_type=None, help_msg=None, **kw_context_args): # type: str # type: Any # type: Type[ValidationError] # type: str "\n Asserts that the provided named value is valid with respect to the inner base validation functions. It returns\n silently in case of success, and raises a `ValidationError` or a subclass in case of failure. This corresponds\n to a 'Defensive programming' (sometimes known as 'Offensive programming') mode.\n\n By default this raises instances of `ValidationError` with a default message, in case of failure. There are\n two ways that you can customize this behaviour:\n\n * if you set `help_msg` in this method or in `Validator` constructor, instances of `ValidationError` created\n will be customized with the provided help message.\n\n * if you set `error_type` in this method or in `Validator` constructor, instances of your custom class will be\n created. Note that you may still provide a `help_msg`.\n\n It is recommended that Users define their own validation error types (case 2 above), so as to provide a unique\n error type for each kind of applicative error. This eases the process of error handling at app-level.\n\n :param name: the name of the variable to validate (for error messages)\n :param value: the value to validate\n :param error_type: a subclass of `ValidationError` to raise in case of validation failure. By default a\n `ValidationError` will be raised with the provided `help_msg`\n :param help_msg: an optional help message to be used in the raised error in case of validation failure.\n :param kw_context_args: optional contextual information to store in the exception, and that may be also used\n to format the help message\n :return: nothing in case of success. Otherwise, raises a ValidationError\n " try: # perform validation res = self.main_function(value) # depends on [control=['try'], data=[]] except Exception as e: # caught any exception: raise ValidationError or subclass with that exception in the details # --old bad idea: first wrap into a failure ==> NO !!! I tried and it was making it far too messy/verbose # note: we do not have to 'raise x from e' of `raise_from`since the ValidationError constructor already # sets the __cause__ so we can safely take the same handling than for non-exception failures. res = e # depends on [control=['except'], data=['e']] # check the result if not result_is_success(res): raise_(self._create_validation_error(name, value, validation_outcome=res, error_type=error_type, help_msg=help_msg, **kw_context_args)) # depends on [control=['if'], data=[]]
def pipeline_getter(self): "For duck-typing with *Spec types" if not self.derivable: raise ArcanaUsageError( "There is no pipeline getter for {} because it doesn't " "fallback to a derived spec".format(self)) return self._fallback.pipeline_getter
def function[pipeline_getter, parameter[self]]: constant[For duck-typing with *Spec types] if <ast.UnaryOp object at 0x7da204565600> begin[:] <ast.Raise object at 0x7da204564430> return[name[self]._fallback.pipeline_getter]
keyword[def] identifier[pipeline_getter] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[derivable] : keyword[raise] identifier[ArcanaUsageError] ( literal[string] literal[string] . identifier[format] ( identifier[self] )) keyword[return] identifier[self] . identifier[_fallback] . identifier[pipeline_getter]
def pipeline_getter(self): """For duck-typing with *Spec types""" if not self.derivable: raise ArcanaUsageError("There is no pipeline getter for {} because it doesn't fallback to a derived spec".format(self)) # depends on [control=['if'], data=[]] return self._fallback.pipeline_getter
def regression(): """ Run regression testing - lint and then run all tests. """ # HACK: Start using hitchbuildpy to get around this. Command("touch", DIR.project.joinpath("pathquery", "__init__.py").abspath()).run() storybook = _storybook({}).only_uninherited() #storybook.with_params(**{"python version": "2.7.10"})\ #.ordered_by_name().play() Command("touch", DIR.project.joinpath("pathquery", "__init__.py").abspath()).run() storybook.with_params(**{"python version": "3.5.0"}).ordered_by_name().play() lint()
def function[regression, parameter[]]: constant[ Run regression testing - lint and then run all tests. ] call[call[name[Command], parameter[constant[touch], call[call[name[DIR].project.joinpath, parameter[constant[pathquery], constant[__init__.py]]].abspath, parameter[]]]].run, parameter[]] variable[storybook] assign[=] call[call[name[_storybook], parameter[dictionary[[], []]]].only_uninherited, parameter[]] call[call[name[Command], parameter[constant[touch], call[call[name[DIR].project.joinpath, parameter[constant[pathquery], constant[__init__.py]]].abspath, parameter[]]]].run, parameter[]] call[call[call[name[storybook].with_params, parameter[]].ordered_by_name, parameter[]].play, parameter[]] call[name[lint], parameter[]]
keyword[def] identifier[regression] (): literal[string] identifier[Command] ( literal[string] , identifier[DIR] . identifier[project] . identifier[joinpath] ( literal[string] , literal[string] ). identifier[abspath] ()). identifier[run] () identifier[storybook] = identifier[_storybook] ({}). identifier[only_uninherited] () identifier[Command] ( literal[string] , identifier[DIR] . identifier[project] . identifier[joinpath] ( literal[string] , literal[string] ). identifier[abspath] ()). identifier[run] () identifier[storybook] . identifier[with_params] (**{ literal[string] : literal[string] }). identifier[ordered_by_name] (). identifier[play] () identifier[lint] ()
def regression(): """ Run regression testing - lint and then run all tests. """ # HACK: Start using hitchbuildpy to get around this. Command('touch', DIR.project.joinpath('pathquery', '__init__.py').abspath()).run() storybook = _storybook({}).only_uninherited() #storybook.with_params(**{"python version": "2.7.10"})\ #.ordered_by_name().play() Command('touch', DIR.project.joinpath('pathquery', '__init__.py').abspath()).run() storybook.with_params(**{'python version': '3.5.0'}).ordered_by_name().play() lint()
def set_extent_location(self, new_location, tag_location=None): # type: (int, int) -> None ''' A method to set the location of this UDF Terminating Descriptor. Parameters: new_location - The new extent this UDF Terminating Descriptor should be located at. tag_location - The tag location to set for this UDF Terminator Descriptor. Returns: Nothing. ''' if not self._initialized: raise pycdlibexception.PyCdlibInternalError('UDF Terminating Descriptor not initialized') self.new_extent_loc = new_location if tag_location is None: tag_location = new_location self.desc_tag.tag_location = tag_location
def function[set_extent_location, parameter[self, new_location, tag_location]]: constant[ A method to set the location of this UDF Terminating Descriptor. Parameters: new_location - The new extent this UDF Terminating Descriptor should be located at. tag_location - The tag location to set for this UDF Terminator Descriptor. Returns: Nothing. ] if <ast.UnaryOp object at 0x7da20c6c6f80> begin[:] <ast.Raise object at 0x7da20c6c77c0> name[self].new_extent_loc assign[=] name[new_location] if compare[name[tag_location] is constant[None]] begin[:] variable[tag_location] assign[=] name[new_location] name[self].desc_tag.tag_location assign[=] name[tag_location]
keyword[def] identifier[set_extent_location] ( identifier[self] , identifier[new_location] , identifier[tag_location] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_initialized] : keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInternalError] ( literal[string] ) identifier[self] . identifier[new_extent_loc] = identifier[new_location] keyword[if] identifier[tag_location] keyword[is] keyword[None] : identifier[tag_location] = identifier[new_location] identifier[self] . identifier[desc_tag] . identifier[tag_location] = identifier[tag_location]
def set_extent_location(self, new_location, tag_location=None): # type: (int, int) -> None '\n A method to set the location of this UDF Terminating Descriptor.\n\n Parameters:\n new_location - The new extent this UDF Terminating Descriptor should be located at.\n tag_location - The tag location to set for this UDF Terminator Descriptor.\n Returns:\n Nothing.\n ' if not self._initialized: raise pycdlibexception.PyCdlibInternalError('UDF Terminating Descriptor not initialized') # depends on [control=['if'], data=[]] self.new_extent_loc = new_location if tag_location is None: tag_location = new_location # depends on [control=['if'], data=['tag_location']] self.desc_tag.tag_location = tag_location
def _handle_subscribed(self, dtype, data, ts,): """Handles responses to subscribe() commands. Registers a channel id with the client and assigns a data handler to it. :param dtype: :param data: :param ts: :return: """ self.log.debug("_handle_subscribed: %s - %s - %s", dtype, data, ts) channel_name = data.pop('channel') channel_id = data.pop('chanId') config = data if 'pair' in config: symbol = config['pair'] if symbol.startswith('t'): symbol = symbol[1:] elif 'symbol' in config: symbol = config['symbol'] if symbol.startswith('t'): symbol = symbol[1:] elif 'key' in config: symbol = config['key'].split(':')[2][1:] #layout type:interval:tPair else: symbol = None if 'prec' in config and config['prec'].startswith('R'): channel_name = 'raw_' + channel_name self.channel_handlers[channel_id] = self._data_handlers[channel_name] # Create a channel_name, symbol tuple to identify channels of same type if 'key' in config: identifier = (channel_name, symbol, config['key'].split(':')[1]) else: identifier = (channel_name, symbol) self.channel_handlers[channel_id] = identifier self.channel_directory[identifier] = channel_id self.channel_directory[channel_id] = identifier self.log.info("Subscription succesful for channel %s", identifier)
def function[_handle_subscribed, parameter[self, dtype, data, ts]]: constant[Handles responses to subscribe() commands. Registers a channel id with the client and assigns a data handler to it. :param dtype: :param data: :param ts: :return: ] call[name[self].log.debug, parameter[constant[_handle_subscribed: %s - %s - %s], name[dtype], name[data], name[ts]]] variable[channel_name] assign[=] call[name[data].pop, parameter[constant[channel]]] variable[channel_id] assign[=] call[name[data].pop, parameter[constant[chanId]]] variable[config] assign[=] name[data] if compare[constant[pair] in name[config]] begin[:] variable[symbol] assign[=] call[name[config]][constant[pair]] if call[name[symbol].startswith, parameter[constant[t]]] begin[:] variable[symbol] assign[=] call[name[symbol]][<ast.Slice object at 0x7da18c4cdc60>] if <ast.BoolOp object at 0x7da1b26ad420> begin[:] variable[channel_name] assign[=] binary_operation[constant[raw_] + name[channel_name]] call[name[self].channel_handlers][name[channel_id]] assign[=] call[name[self]._data_handlers][name[channel_name]] if compare[constant[key] in name[config]] begin[:] variable[identifier] assign[=] tuple[[<ast.Name object at 0x7da18c4cf5b0>, <ast.Name object at 0x7da18c4cc0d0>, <ast.Subscript object at 0x7da18c4cc040>]] call[name[self].channel_handlers][name[channel_id]] assign[=] name[identifier] call[name[self].channel_directory][name[identifier]] assign[=] name[channel_id] call[name[self].channel_directory][name[channel_id]] assign[=] name[identifier] call[name[self].log.info, parameter[constant[Subscription succesful for channel %s], name[identifier]]]
keyword[def] identifier[_handle_subscribed] ( identifier[self] , identifier[dtype] , identifier[data] , identifier[ts] ,): literal[string] identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[dtype] , identifier[data] , identifier[ts] ) identifier[channel_name] = identifier[data] . identifier[pop] ( literal[string] ) identifier[channel_id] = identifier[data] . identifier[pop] ( literal[string] ) identifier[config] = identifier[data] keyword[if] literal[string] keyword[in] identifier[config] : identifier[symbol] = identifier[config] [ literal[string] ] keyword[if] identifier[symbol] . identifier[startswith] ( literal[string] ): identifier[symbol] = identifier[symbol] [ literal[int] :] keyword[elif] literal[string] keyword[in] identifier[config] : identifier[symbol] = identifier[config] [ literal[string] ] keyword[if] identifier[symbol] . identifier[startswith] ( literal[string] ): identifier[symbol] = identifier[symbol] [ literal[int] :] keyword[elif] literal[string] keyword[in] identifier[config] : identifier[symbol] = identifier[config] [ literal[string] ]. identifier[split] ( literal[string] )[ literal[int] ][ literal[int] :] keyword[else] : identifier[symbol] = keyword[None] keyword[if] literal[string] keyword[in] identifier[config] keyword[and] identifier[config] [ literal[string] ]. identifier[startswith] ( literal[string] ): identifier[channel_name] = literal[string] + identifier[channel_name] identifier[self] . identifier[channel_handlers] [ identifier[channel_id] ]= identifier[self] . identifier[_data_handlers] [ identifier[channel_name] ] keyword[if] literal[string] keyword[in] identifier[config] : identifier[identifier] =( identifier[channel_name] , identifier[symbol] , identifier[config] [ literal[string] ]. identifier[split] ( literal[string] )[ literal[int] ]) keyword[else] : identifier[identifier] =( identifier[channel_name] , identifier[symbol] ) identifier[self] . identifier[channel_handlers] [ identifier[channel_id] ]= identifier[identifier] identifier[self] . identifier[channel_directory] [ identifier[identifier] ]= identifier[channel_id] identifier[self] . identifier[channel_directory] [ identifier[channel_id] ]= identifier[identifier] identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[identifier] )
def _handle_subscribed(self, dtype, data, ts): """Handles responses to subscribe() commands. Registers a channel id with the client and assigns a data handler to it. :param dtype: :param data: :param ts: :return: """ self.log.debug('_handle_subscribed: %s - %s - %s', dtype, data, ts) channel_name = data.pop('channel') channel_id = data.pop('chanId') config = data if 'pair' in config: symbol = config['pair'] if symbol.startswith('t'): symbol = symbol[1:] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['config']] elif 'symbol' in config: symbol = config['symbol'] if symbol.startswith('t'): symbol = symbol[1:] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['config']] elif 'key' in config: symbol = config['key'].split(':')[2][1:] #layout type:interval:tPair # depends on [control=['if'], data=['config']] else: symbol = None if 'prec' in config and config['prec'].startswith('R'): channel_name = 'raw_' + channel_name # depends on [control=['if'], data=[]] self.channel_handlers[channel_id] = self._data_handlers[channel_name] # Create a channel_name, symbol tuple to identify channels of same type if 'key' in config: identifier = (channel_name, symbol, config['key'].split(':')[1]) # depends on [control=['if'], data=['config']] else: identifier = (channel_name, symbol) self.channel_handlers[channel_id] = identifier self.channel_directory[identifier] = channel_id self.channel_directory[channel_id] = identifier self.log.info('Subscription succesful for channel %s', identifier)
def plotReceptiveFields(sp, nDim1=8, nDim2=8): """ Plot 2D receptive fields for 16 randomly selected columns :param sp: :return: """ columnNumber = np.product(sp.getColumnDimensions()) fig, ax = plt.subplots(nrows=4, ncols=4) for rowI in range(4): for colI in range(4): col = np.random.randint(columnNumber) connectedSynapses = np.zeros((nDim1*nDim2,), dtype=uintType) sp.getConnectedSynapses(col, connectedSynapses) receptiveField = connectedSynapses.reshape((nDim1, nDim2)) ax[rowI, colI].imshow(receptiveField, cmap='gray') ax[rowI, colI].set_title("col: {}".format(col))
def function[plotReceptiveFields, parameter[sp, nDim1, nDim2]]: constant[ Plot 2D receptive fields for 16 randomly selected columns :param sp: :return: ] variable[columnNumber] assign[=] call[name[np].product, parameter[call[name[sp].getColumnDimensions, parameter[]]]] <ast.Tuple object at 0x7da1b08605e0> assign[=] call[name[plt].subplots, parameter[]] for taget[name[rowI]] in starred[call[name[range], parameter[constant[4]]]] begin[:] for taget[name[colI]] in starred[call[name[range], parameter[constant[4]]]] begin[:] variable[col] assign[=] call[name[np].random.randint, parameter[name[columnNumber]]] variable[connectedSynapses] assign[=] call[name[np].zeros, parameter[tuple[[<ast.BinOp object at 0x7da1b08622f0>]]]] call[name[sp].getConnectedSynapses, parameter[name[col], name[connectedSynapses]]] variable[receptiveField] assign[=] call[name[connectedSynapses].reshape, parameter[tuple[[<ast.Name object at 0x7da1b0862980>, <ast.Name object at 0x7da1b08605b0>]]]] call[call[name[ax]][tuple[[<ast.Name object at 0x7da1b0860a90>, <ast.Name object at 0x7da1b0862410>]]].imshow, parameter[name[receptiveField]]] call[call[name[ax]][tuple[[<ast.Name object at 0x7da1b0862950>, <ast.Name object at 0x7da1b08c6710>]]].set_title, parameter[call[constant[col: {}].format, parameter[name[col]]]]]
keyword[def] identifier[plotReceptiveFields] ( identifier[sp] , identifier[nDim1] = literal[int] , identifier[nDim2] = literal[int] ): literal[string] identifier[columnNumber] = identifier[np] . identifier[product] ( identifier[sp] . identifier[getColumnDimensions] ()) identifier[fig] , identifier[ax] = identifier[plt] . identifier[subplots] ( identifier[nrows] = literal[int] , identifier[ncols] = literal[int] ) keyword[for] identifier[rowI] keyword[in] identifier[range] ( literal[int] ): keyword[for] identifier[colI] keyword[in] identifier[range] ( literal[int] ): identifier[col] = identifier[np] . identifier[random] . identifier[randint] ( identifier[columnNumber] ) identifier[connectedSynapses] = identifier[np] . identifier[zeros] (( identifier[nDim1] * identifier[nDim2] ,), identifier[dtype] = identifier[uintType] ) identifier[sp] . identifier[getConnectedSynapses] ( identifier[col] , identifier[connectedSynapses] ) identifier[receptiveField] = identifier[connectedSynapses] . identifier[reshape] (( identifier[nDim1] , identifier[nDim2] )) identifier[ax] [ identifier[rowI] , identifier[colI] ]. identifier[imshow] ( identifier[receptiveField] , identifier[cmap] = literal[string] ) identifier[ax] [ identifier[rowI] , identifier[colI] ]. identifier[set_title] ( literal[string] . identifier[format] ( identifier[col] ))
def plotReceptiveFields(sp, nDim1=8, nDim2=8): """ Plot 2D receptive fields for 16 randomly selected columns :param sp: :return: """ columnNumber = np.product(sp.getColumnDimensions()) (fig, ax) = plt.subplots(nrows=4, ncols=4) for rowI in range(4): for colI in range(4): col = np.random.randint(columnNumber) connectedSynapses = np.zeros((nDim1 * nDim2,), dtype=uintType) sp.getConnectedSynapses(col, connectedSynapses) receptiveField = connectedSynapses.reshape((nDim1, nDim2)) ax[rowI, colI].imshow(receptiveField, cmap='gray') ax[rowI, colI].set_title('col: {}'.format(col)) # depends on [control=['for'], data=['colI']] # depends on [control=['for'], data=['rowI']]
def queryAll(self, queryString): ''' Retrieves data from specified objects, whether or not they have been deleted. ''' self._setHeaders('queryAll') return self._sforce.service.queryAll(queryString)
def function[queryAll, parameter[self, queryString]]: constant[ Retrieves data from specified objects, whether or not they have been deleted. ] call[name[self]._setHeaders, parameter[constant[queryAll]]] return[call[name[self]._sforce.service.queryAll, parameter[name[queryString]]]]
keyword[def] identifier[queryAll] ( identifier[self] , identifier[queryString] ): literal[string] identifier[self] . identifier[_setHeaders] ( literal[string] ) keyword[return] identifier[self] . identifier[_sforce] . identifier[service] . identifier[queryAll] ( identifier[queryString] )
def queryAll(self, queryString): """ Retrieves data from specified objects, whether or not they have been deleted. """ self._setHeaders('queryAll') return self._sforce.service.queryAll(queryString)
def _load_library(library_names, library_file_extensions, library_search_paths, version_check_callback): """ Finds, loads and returns the most recent version of the library. """ candidates = _find_library_candidates(library_names, library_file_extensions, library_search_paths) library_versions = [] for filename in candidates: version = version_check_callback(filename) if version is not None and version >= (3, 0, 0): library_versions.append((version, filename)) if not library_versions: return None library_versions.sort() return ctypes.CDLL(library_versions[-1][1])
def function[_load_library, parameter[library_names, library_file_extensions, library_search_paths, version_check_callback]]: constant[ Finds, loads and returns the most recent version of the library. ] variable[candidates] assign[=] call[name[_find_library_candidates], parameter[name[library_names], name[library_file_extensions], name[library_search_paths]]] variable[library_versions] assign[=] list[[]] for taget[name[filename]] in starred[name[candidates]] begin[:] variable[version] assign[=] call[name[version_check_callback], parameter[name[filename]]] if <ast.BoolOp object at 0x7da18ede4790> begin[:] call[name[library_versions].append, parameter[tuple[[<ast.Name object at 0x7da18bc70e80>, <ast.Name object at 0x7da18bc732e0>]]]] if <ast.UnaryOp object at 0x7da18bc70520> begin[:] return[constant[None]] call[name[library_versions].sort, parameter[]] return[call[name[ctypes].CDLL, parameter[call[call[name[library_versions]][<ast.UnaryOp object at 0x7da18bc718a0>]][constant[1]]]]]
keyword[def] identifier[_load_library] ( identifier[library_names] , identifier[library_file_extensions] , identifier[library_search_paths] , identifier[version_check_callback] ): literal[string] identifier[candidates] = identifier[_find_library_candidates] ( identifier[library_names] , identifier[library_file_extensions] , identifier[library_search_paths] ) identifier[library_versions] =[] keyword[for] identifier[filename] keyword[in] identifier[candidates] : identifier[version] = identifier[version_check_callback] ( identifier[filename] ) keyword[if] identifier[version] keyword[is] keyword[not] keyword[None] keyword[and] identifier[version] >=( literal[int] , literal[int] , literal[int] ): identifier[library_versions] . identifier[append] (( identifier[version] , identifier[filename] )) keyword[if] keyword[not] identifier[library_versions] : keyword[return] keyword[None] identifier[library_versions] . identifier[sort] () keyword[return] identifier[ctypes] . identifier[CDLL] ( identifier[library_versions] [- literal[int] ][ literal[int] ])
def _load_library(library_names, library_file_extensions, library_search_paths, version_check_callback): """ Finds, loads and returns the most recent version of the library. """ candidates = _find_library_candidates(library_names, library_file_extensions, library_search_paths) library_versions = [] for filename in candidates: version = version_check_callback(filename) if version is not None and version >= (3, 0, 0): library_versions.append((version, filename)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filename']] if not library_versions: return None # depends on [control=['if'], data=[]] library_versions.sort() return ctypes.CDLL(library_versions[-1][1])
def fault_and_striae_plot(ax, strikes, dips, rakes): """Makes a fault-and-striae plot (a.k.a. "Ball of String") for normal faults with the given strikes, dips, and rakes.""" # Plot the planes lines = ax.plane(strikes, dips, 'k-', lw=0.5) # Calculate the position of the rake of the lineations, but don't plot yet x, y = mplstereonet.rake(strikes, dips, rakes) # Calculate the direction the arrows should point # These are all normal faults, so the arrows point away from the center # For thrusts, it would just be u, v = -x/mag, -y/mag mag = np.hypot(x, y) u, v = x / mag, y / mag # Plot the arrows at the rake locations... arrows = ax.quiver(x, y, u, v, width=1, headwidth=4, units='dots') return lines, arrows
def function[fault_and_striae_plot, parameter[ax, strikes, dips, rakes]]: constant[Makes a fault-and-striae plot (a.k.a. "Ball of String") for normal faults with the given strikes, dips, and rakes.] variable[lines] assign[=] call[name[ax].plane, parameter[name[strikes], name[dips], constant[k-]]] <ast.Tuple object at 0x7da1b184bc40> assign[=] call[name[mplstereonet].rake, parameter[name[strikes], name[dips], name[rakes]]] variable[mag] assign[=] call[name[np].hypot, parameter[name[x], name[y]]] <ast.Tuple object at 0x7da1b1848340> assign[=] tuple[[<ast.BinOp object at 0x7da1b1848910>, <ast.BinOp object at 0x7da1b184bd00>]] variable[arrows] assign[=] call[name[ax].quiver, parameter[name[x], name[y], name[u], name[v]]] return[tuple[[<ast.Name object at 0x7da1b184ba90>, <ast.Name object at 0x7da1b184b160>]]]
keyword[def] identifier[fault_and_striae_plot] ( identifier[ax] , identifier[strikes] , identifier[dips] , identifier[rakes] ): literal[string] identifier[lines] = identifier[ax] . identifier[plane] ( identifier[strikes] , identifier[dips] , literal[string] , identifier[lw] = literal[int] ) identifier[x] , identifier[y] = identifier[mplstereonet] . identifier[rake] ( identifier[strikes] , identifier[dips] , identifier[rakes] ) identifier[mag] = identifier[np] . identifier[hypot] ( identifier[x] , identifier[y] ) identifier[u] , identifier[v] = identifier[x] / identifier[mag] , identifier[y] / identifier[mag] identifier[arrows] = identifier[ax] . identifier[quiver] ( identifier[x] , identifier[y] , identifier[u] , identifier[v] , identifier[width] = literal[int] , identifier[headwidth] = literal[int] , identifier[units] = literal[string] ) keyword[return] identifier[lines] , identifier[arrows]
def fault_and_striae_plot(ax, strikes, dips, rakes): """Makes a fault-and-striae plot (a.k.a. "Ball of String") for normal faults with the given strikes, dips, and rakes.""" # Plot the planes lines = ax.plane(strikes, dips, 'k-', lw=0.5) # Calculate the position of the rake of the lineations, but don't plot yet (x, y) = mplstereonet.rake(strikes, dips, rakes) # Calculate the direction the arrows should point # These are all normal faults, so the arrows point away from the center # For thrusts, it would just be u, v = -x/mag, -y/mag mag = np.hypot(x, y) (u, v) = (x / mag, y / mag) # Plot the arrows at the rake locations... arrows = ax.quiver(x, y, u, v, width=1, headwidth=4, units='dots') return (lines, arrows)
def process_event(self, c): """Returns a message from tick() to be displayed if game is over""" if c == "": sys.exit() elif c in key_directions: self.move_entity(self.player, *vscale(self.player.speed, key_directions[c])) else: return "try arrow keys, w, a, s, d, or ctrl-D (you pressed %r)" % c return self.tick()
def function[process_event, parameter[self, c]]: constant[Returns a message from tick() to be displayed if game is over] if compare[name[c] equal[==] constant[]] begin[:] call[name[sys].exit, parameter[]] return[call[name[self].tick, parameter[]]]
keyword[def] identifier[process_event] ( identifier[self] , identifier[c] ): literal[string] keyword[if] identifier[c] == literal[string] : identifier[sys] . identifier[exit] () keyword[elif] identifier[c] keyword[in] identifier[key_directions] : identifier[self] . identifier[move_entity] ( identifier[self] . identifier[player] ,* identifier[vscale] ( identifier[self] . identifier[player] . identifier[speed] , identifier[key_directions] [ identifier[c] ])) keyword[else] : keyword[return] literal[string] % identifier[c] keyword[return] identifier[self] . identifier[tick] ()
def process_event(self, c): """Returns a message from tick() to be displayed if game is over""" if c == '\x04': sys.exit() # depends on [control=['if'], data=[]] elif c in key_directions: self.move_entity(self.player, *vscale(self.player.speed, key_directions[c])) # depends on [control=['if'], data=['c', 'key_directions']] else: return 'try arrow keys, w, a, s, d, or ctrl-D (you pressed %r)' % c return self.tick()
def ball_and_sticks(self, ball_radius=0.05, stick_radius=0.02, colorlist=None, opacity=1.0): """Display the system using a ball and stick representation. """ # Add the spheres if colorlist is None: colorlist = [get_atom_color(t) for t in self.topology['atom_types']] sizes = [ball_radius] * len(self.topology['atom_types']) spheres = self.add_representation('spheres', {'coordinates': self.coordinates.astype('float32'), 'colors': colorlist, 'radii': sizes, 'opacity': opacity}) def update(self=self, spheres=spheres): self.update_representation(spheres, {'coordinates': self.coordinates.astype('float32')}) self.update_callbacks.append(update) # Add the cylinders if 'bonds' in self.topology and self.topology['bonds'] is not None: start_idx, end_idx = zip(*self.topology['bonds']) # Added this so bonds don't go through atoms when opacity<1.0 new_start_coords = [] new_end_coords = [] for bond_ind, bond in enumerate(self.topology['bonds']): trim_amt = (ball_radius**2 - stick_radius**2)**0.5 if ball_radius>stick_radius else 0 start_coord = self.coordinates[bond[0]] end_coord = self.coordinates[bond[1]] vec = (end_coord-start_coord)/np.linalg.norm(end_coord-start_coord) new_start_coords.append(start_coord+vec*trim_amt) new_end_coords.append(end_coord-vec*trim_amt) cylinders = self.add_representation('cylinders', {'startCoords': np.array(new_start_coords,dtype='float32'), 'endCoords': np.array(new_end_coords,dtype='float32'), 'colors': [0xcccccc] * len(new_start_coords), 'radii': [stick_radius] * len(new_start_coords), 'opacity': opacity}) # Update closure def update(self=self, rep=cylinders, start_idx=start_idx, end_idx=end_idx): self.update_representation(rep, {'startCoords': self.coordinates[list(start_idx)], 'endCoords': self.coordinates[list(end_idx)]}) self.update_callbacks.append(update) self.autozoom(self.coordinates)
def function[ball_and_sticks, parameter[self, ball_radius, stick_radius, colorlist, opacity]]: constant[Display the system using a ball and stick representation. ] if compare[name[colorlist] is constant[None]] begin[:] variable[colorlist] assign[=] <ast.ListComp object at 0x7da1b2346f20> variable[sizes] assign[=] binary_operation[list[[<ast.Name object at 0x7da1b23474c0>]] * call[name[len], parameter[call[name[self].topology][constant[atom_types]]]]] variable[spheres] assign[=] call[name[self].add_representation, parameter[constant[spheres], dictionary[[<ast.Constant object at 0x7da1b23472e0>, <ast.Constant object at 0x7da1b2346830>, <ast.Constant object at 0x7da1b23465c0>, <ast.Constant object at 0x7da1b2344070>], [<ast.Call object at 0x7da1b2344eb0>, <ast.Name object at 0x7da1b2344220>, <ast.Name object at 0x7da1b2344d60>, <ast.Name object at 0x7da1b2347e20>]]]] def function[update, parameter[self, spheres]]: call[name[self].update_representation, parameter[name[spheres], dictionary[[<ast.Constant object at 0x7da1b2346bc0>], [<ast.Call object at 0x7da1b23476a0>]]]] call[name[self].update_callbacks.append, parameter[name[update]]] if <ast.BoolOp object at 0x7da1b23460e0> begin[:] <ast.Tuple object at 0x7da1b2347fa0> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b2344730>]] variable[new_start_coords] assign[=] list[[]] variable[new_end_coords] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b2346aa0>, <ast.Name object at 0x7da1b23458a0>]]] in starred[call[name[enumerate], parameter[call[name[self].topology][constant[bonds]]]]] begin[:] variable[trim_amt] assign[=] <ast.IfExp object at 0x7da1b2345750> variable[start_coord] assign[=] call[name[self].coordinates][call[name[bond]][constant[0]]] variable[end_coord] assign[=] call[name[self].coordinates][call[name[bond]][constant[1]]] variable[vec] assign[=] binary_operation[binary_operation[name[end_coord] - name[start_coord]] / call[name[np].linalg.norm, parameter[binary_operation[name[end_coord] - name[start_coord]]]]] call[name[new_start_coords].append, parameter[binary_operation[name[start_coord] + binary_operation[name[vec] * name[trim_amt]]]]] call[name[new_end_coords].append, parameter[binary_operation[name[end_coord] - binary_operation[name[vec] * name[trim_amt]]]]] variable[cylinders] assign[=] call[name[self].add_representation, parameter[constant[cylinders], dictionary[[<ast.Constant object at 0x7da1b23457e0>, <ast.Constant object at 0x7da1b2345b10>, <ast.Constant object at 0x7da1b23440a0>, <ast.Constant object at 0x7da1b23476d0>, <ast.Constant object at 0x7da1b2346980>], [<ast.Call object at 0x7da1b23470d0>, <ast.Call object at 0x7da1b2346290>, <ast.BinOp object at 0x7da1b26af7c0>, <ast.BinOp object at 0x7da1b26ac3a0>, <ast.Name object at 0x7da1b26afa60>]]]] def function[update, parameter[self, rep, start_idx, end_idx]]: call[name[self].update_representation, parameter[name[rep], dictionary[[<ast.Constant object at 0x7da1b26ac0d0>, <ast.Constant object at 0x7da1b26af010>], [<ast.Subscript object at 0x7da1b26acd30>, <ast.Subscript object at 0x7da1b1be9d80>]]]] call[name[self].update_callbacks.append, parameter[name[update]]] call[name[self].autozoom, parameter[name[self].coordinates]]
keyword[def] identifier[ball_and_sticks] ( identifier[self] , identifier[ball_radius] = literal[int] , identifier[stick_radius] = literal[int] , identifier[colorlist] = keyword[None] , identifier[opacity] = literal[int] ): literal[string] keyword[if] identifier[colorlist] keyword[is] keyword[None] : identifier[colorlist] =[ identifier[get_atom_color] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[self] . identifier[topology] [ literal[string] ]] identifier[sizes] =[ identifier[ball_radius] ]* identifier[len] ( identifier[self] . identifier[topology] [ literal[string] ]) identifier[spheres] = identifier[self] . identifier[add_representation] ( literal[string] ,{ literal[string] : identifier[self] . identifier[coordinates] . identifier[astype] ( literal[string] ), literal[string] : identifier[colorlist] , literal[string] : identifier[sizes] , literal[string] : identifier[opacity] }) keyword[def] identifier[update] ( identifier[self] = identifier[self] , identifier[spheres] = identifier[spheres] ): identifier[self] . identifier[update_representation] ( identifier[spheres] ,{ literal[string] : identifier[self] . identifier[coordinates] . identifier[astype] ( literal[string] )}) identifier[self] . identifier[update_callbacks] . identifier[append] ( identifier[update] ) keyword[if] literal[string] keyword[in] identifier[self] . identifier[topology] keyword[and] identifier[self] . identifier[topology] [ literal[string] ] keyword[is] keyword[not] keyword[None] : identifier[start_idx] , identifier[end_idx] = identifier[zip] (* identifier[self] . identifier[topology] [ literal[string] ]) identifier[new_start_coords] =[] identifier[new_end_coords] =[] keyword[for] identifier[bond_ind] , identifier[bond] keyword[in] identifier[enumerate] ( identifier[self] . identifier[topology] [ literal[string] ]): identifier[trim_amt] =( identifier[ball_radius] ** literal[int] - identifier[stick_radius] ** literal[int] )** literal[int] keyword[if] identifier[ball_radius] > identifier[stick_radius] keyword[else] literal[int] identifier[start_coord] = identifier[self] . identifier[coordinates] [ identifier[bond] [ literal[int] ]] identifier[end_coord] = identifier[self] . identifier[coordinates] [ identifier[bond] [ literal[int] ]] identifier[vec] =( identifier[end_coord] - identifier[start_coord] )/ identifier[np] . identifier[linalg] . identifier[norm] ( identifier[end_coord] - identifier[start_coord] ) identifier[new_start_coords] . identifier[append] ( identifier[start_coord] + identifier[vec] * identifier[trim_amt] ) identifier[new_end_coords] . identifier[append] ( identifier[end_coord] - identifier[vec] * identifier[trim_amt] ) identifier[cylinders] = identifier[self] . identifier[add_representation] ( literal[string] ,{ literal[string] : identifier[np] . identifier[array] ( identifier[new_start_coords] , identifier[dtype] = literal[string] ), literal[string] : identifier[np] . identifier[array] ( identifier[new_end_coords] , identifier[dtype] = literal[string] ), literal[string] :[ literal[int] ]* identifier[len] ( identifier[new_start_coords] ), literal[string] :[ identifier[stick_radius] ]* identifier[len] ( identifier[new_start_coords] ), literal[string] : identifier[opacity] }) keyword[def] identifier[update] ( identifier[self] = identifier[self] , identifier[rep] = identifier[cylinders] , identifier[start_idx] = identifier[start_idx] , identifier[end_idx] = identifier[end_idx] ): identifier[self] . identifier[update_representation] ( identifier[rep] ,{ literal[string] : identifier[self] . identifier[coordinates] [ identifier[list] ( identifier[start_idx] )], literal[string] : identifier[self] . identifier[coordinates] [ identifier[list] ( identifier[end_idx] )]}) identifier[self] . identifier[update_callbacks] . identifier[append] ( identifier[update] ) identifier[self] . identifier[autozoom] ( identifier[self] . identifier[coordinates] )
def ball_and_sticks(self, ball_radius=0.05, stick_radius=0.02, colorlist=None, opacity=1.0): """Display the system using a ball and stick representation. """ # Add the spheres if colorlist is None: colorlist = [get_atom_color(t) for t in self.topology['atom_types']] # depends on [control=['if'], data=['colorlist']] sizes = [ball_radius] * len(self.topology['atom_types']) spheres = self.add_representation('spheres', {'coordinates': self.coordinates.astype('float32'), 'colors': colorlist, 'radii': sizes, 'opacity': opacity}) def update(self=self, spheres=spheres): self.update_representation(spheres, {'coordinates': self.coordinates.astype('float32')}) self.update_callbacks.append(update) # Add the cylinders if 'bonds' in self.topology and self.topology['bonds'] is not None: (start_idx, end_idx) = zip(*self.topology['bonds']) # Added this so bonds don't go through atoms when opacity<1.0 new_start_coords = [] new_end_coords = [] for (bond_ind, bond) in enumerate(self.topology['bonds']): trim_amt = (ball_radius ** 2 - stick_radius ** 2) ** 0.5 if ball_radius > stick_radius else 0 start_coord = self.coordinates[bond[0]] end_coord = self.coordinates[bond[1]] vec = (end_coord - start_coord) / np.linalg.norm(end_coord - start_coord) new_start_coords.append(start_coord + vec * trim_amt) new_end_coords.append(end_coord - vec * trim_amt) # depends on [control=['for'], data=[]] cylinders = self.add_representation('cylinders', {'startCoords': np.array(new_start_coords, dtype='float32'), 'endCoords': np.array(new_end_coords, dtype='float32'), 'colors': [13421772] * len(new_start_coords), 'radii': [stick_radius] * len(new_start_coords), 'opacity': opacity}) # Update closure def update(self=self, rep=cylinders, start_idx=start_idx, end_idx=end_idx): self.update_representation(rep, {'startCoords': self.coordinates[list(start_idx)], 'endCoords': self.coordinates[list(end_idx)]}) self.update_callbacks.append(update) # depends on [control=['if'], data=[]] self.autozoom(self.coordinates)