code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
async def disable_digital_reporting(self, command): """ Disable Firmata reporting for a digital pin. :param command: {"method": "disable_digital_reporting", "params": [PIN]} :returns: No return message. """ pin = int(command[0]) await self.core.disable_digital_reporting(pin)
<ast.AsyncFunctionDef object at 0x7da18dc049d0>
keyword[async] keyword[def] identifier[disable_digital_reporting] ( identifier[self] , identifier[command] ): literal[string] identifier[pin] = identifier[int] ( identifier[command] [ literal[int] ]) keyword[await] identifier[self] . identifier[core] . identifier[disable_digital_reporting] ( identifier[pin] )
async def disable_digital_reporting(self, command): """ Disable Firmata reporting for a digital pin. :param command: {"method": "disable_digital_reporting", "params": [PIN]} :returns: No return message. """ pin = int(command[0]) await self.core.disable_digital_reporting(pin)
def measure(self, ndim=None): """Return the Lebesgue measure of this interval product. Parameters ---------- ndim : int, optional Dimension of the measure to apply. ``None`` is interpreted as `true_ndim`, which always results in a finite and positive result (unless the set is a single point). Examples -------- >>> min_pt, max_pt = [-1, 2.5, 0], [-0.5, 10, 0] >>> rbox = IntervalProd(min_pt, max_pt) >>> rbox.measure() 3.75 >>> rbox.measure(ndim=3) 0.0 >>> rbox.measure(ndim=3) == rbox.volume True >>> rbox.measure(ndim=1) inf >>> rbox.measure() == rbox.squeeze().volume True """ if self.true_ndim == 0: return 0.0 if ndim is None: return self.measure(ndim=self.true_ndim) elif ndim < self.true_ndim: return np.inf elif ndim > self.true_ndim: return 0.0 else: return np.prod(self.extent[self.nondegen_byaxis])
def function[measure, parameter[self, ndim]]: constant[Return the Lebesgue measure of this interval product. Parameters ---------- ndim : int, optional Dimension of the measure to apply. ``None`` is interpreted as `true_ndim`, which always results in a finite and positive result (unless the set is a single point). Examples -------- >>> min_pt, max_pt = [-1, 2.5, 0], [-0.5, 10, 0] >>> rbox = IntervalProd(min_pt, max_pt) >>> rbox.measure() 3.75 >>> rbox.measure(ndim=3) 0.0 >>> rbox.measure(ndim=3) == rbox.volume True >>> rbox.measure(ndim=1) inf >>> rbox.measure() == rbox.squeeze().volume True ] if compare[name[self].true_ndim equal[==] constant[0]] begin[:] return[constant[0.0]] if compare[name[ndim] is constant[None]] begin[:] return[call[name[self].measure, parameter[]]]
keyword[def] identifier[measure] ( identifier[self] , identifier[ndim] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[true_ndim] == literal[int] : keyword[return] literal[int] keyword[if] identifier[ndim] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[measure] ( identifier[ndim] = identifier[self] . identifier[true_ndim] ) keyword[elif] identifier[ndim] < identifier[self] . identifier[true_ndim] : keyword[return] identifier[np] . identifier[inf] keyword[elif] identifier[ndim] > identifier[self] . identifier[true_ndim] : keyword[return] literal[int] keyword[else] : keyword[return] identifier[np] . identifier[prod] ( identifier[self] . identifier[extent] [ identifier[self] . identifier[nondegen_byaxis] ])
def measure(self, ndim=None): """Return the Lebesgue measure of this interval product. Parameters ---------- ndim : int, optional Dimension of the measure to apply. ``None`` is interpreted as `true_ndim`, which always results in a finite and positive result (unless the set is a single point). Examples -------- >>> min_pt, max_pt = [-1, 2.5, 0], [-0.5, 10, 0] >>> rbox = IntervalProd(min_pt, max_pt) >>> rbox.measure() 3.75 >>> rbox.measure(ndim=3) 0.0 >>> rbox.measure(ndim=3) == rbox.volume True >>> rbox.measure(ndim=1) inf >>> rbox.measure() == rbox.squeeze().volume True """ if self.true_ndim == 0: return 0.0 # depends on [control=['if'], data=[]] if ndim is None: return self.measure(ndim=self.true_ndim) # depends on [control=['if'], data=[]] elif ndim < self.true_ndim: return np.inf # depends on [control=['if'], data=[]] elif ndim > self.true_ndim: return 0.0 # depends on [control=['if'], data=[]] else: return np.prod(self.extent[self.nondegen_byaxis])
def _handle_list_marker(self): """Handle a list marker at the head (``#``, ``*``, ``;``, ``:``).""" markup = self._read() if markup == ";": self._context |= contexts.DL_TERM self._emit(tokens.TagOpenOpen(wiki_markup=markup)) self._emit_text(get_html_tag(markup)) self._emit(tokens.TagCloseSelfclose())
def function[_handle_list_marker, parameter[self]]: constant[Handle a list marker at the head (``#``, ``*``, ``;``, ``:``).] variable[markup] assign[=] call[name[self]._read, parameter[]] if compare[name[markup] equal[==] constant[;]] begin[:] <ast.AugAssign object at 0x7da204344490> call[name[self]._emit, parameter[call[name[tokens].TagOpenOpen, parameter[]]]] call[name[self]._emit_text, parameter[call[name[get_html_tag], parameter[name[markup]]]]] call[name[self]._emit, parameter[call[name[tokens].TagCloseSelfclose, parameter[]]]]
keyword[def] identifier[_handle_list_marker] ( identifier[self] ): literal[string] identifier[markup] = identifier[self] . identifier[_read] () keyword[if] identifier[markup] == literal[string] : identifier[self] . identifier[_context] |= identifier[contexts] . identifier[DL_TERM] identifier[self] . identifier[_emit] ( identifier[tokens] . identifier[TagOpenOpen] ( identifier[wiki_markup] = identifier[markup] )) identifier[self] . identifier[_emit_text] ( identifier[get_html_tag] ( identifier[markup] )) identifier[self] . identifier[_emit] ( identifier[tokens] . identifier[TagCloseSelfclose] ())
def _handle_list_marker(self): """Handle a list marker at the head (``#``, ``*``, ``;``, ``:``).""" markup = self._read() if markup == ';': self._context |= contexts.DL_TERM # depends on [control=['if'], data=[]] self._emit(tokens.TagOpenOpen(wiki_markup=markup)) self._emit_text(get_html_tag(markup)) self._emit(tokens.TagCloseSelfclose())
def node_to_xml(node, output=sys.stdout, nsmap=None): """ Convert a Node object into a pretty .xml file without keeping everything in memory. If you just want the string representation use tostring(node). :param node: a Node-compatible object (ElementTree nodes are fine) :param nsmap: if given, shorten the tags with aliases """ if nsmap: for ns, prefix in nsmap.items(): if prefix: node['xmlns:' + prefix[:-1]] = ns else: node['xmlns'] = ns with StreamingXMLWriter(output, nsmap=nsmap) as w: w.serialize(node)
def function[node_to_xml, parameter[node, output, nsmap]]: constant[ Convert a Node object into a pretty .xml file without keeping everything in memory. If you just want the string representation use tostring(node). :param node: a Node-compatible object (ElementTree nodes are fine) :param nsmap: if given, shorten the tags with aliases ] if name[nsmap] begin[:] for taget[tuple[[<ast.Name object at 0x7da204621b70>, <ast.Name object at 0x7da2046234c0>]]] in starred[call[name[nsmap].items, parameter[]]] begin[:] if name[prefix] begin[:] call[name[node]][binary_operation[constant[xmlns:] + call[name[prefix]][<ast.Slice object at 0x7da2046234f0>]]] assign[=] name[ns] with call[name[StreamingXMLWriter], parameter[name[output]]] begin[:] call[name[w].serialize, parameter[name[node]]]
keyword[def] identifier[node_to_xml] ( identifier[node] , identifier[output] = identifier[sys] . identifier[stdout] , identifier[nsmap] = keyword[None] ): literal[string] keyword[if] identifier[nsmap] : keyword[for] identifier[ns] , identifier[prefix] keyword[in] identifier[nsmap] . identifier[items] (): keyword[if] identifier[prefix] : identifier[node] [ literal[string] + identifier[prefix] [:- literal[int] ]]= identifier[ns] keyword[else] : identifier[node] [ literal[string] ]= identifier[ns] keyword[with] identifier[StreamingXMLWriter] ( identifier[output] , identifier[nsmap] = identifier[nsmap] ) keyword[as] identifier[w] : identifier[w] . identifier[serialize] ( identifier[node] )
def node_to_xml(node, output=sys.stdout, nsmap=None): """ Convert a Node object into a pretty .xml file without keeping everything in memory. If you just want the string representation use tostring(node). :param node: a Node-compatible object (ElementTree nodes are fine) :param nsmap: if given, shorten the tags with aliases """ if nsmap: for (ns, prefix) in nsmap.items(): if prefix: node['xmlns:' + prefix[:-1]] = ns # depends on [control=['if'], data=[]] else: node['xmlns'] = ns # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] with StreamingXMLWriter(output, nsmap=nsmap) as w: w.serialize(node) # depends on [control=['with'], data=['w']]
def metadata_query(fpid, apikey): """Queries the Last.fm servers for metadata about a given fingerprint ID (an integer). Returns the XML response (a string). """ params = { 'method': 'track.getFingerprintMetadata', 'fingerprintid': fpid, 'api_key': apikey, } url = '%s?%s' % (URL_METADATA, urllib.urlencode(params)) try: fh = _query_wrap(urllib.urlopen, url) except urllib2.HTTPError: raise CommunicationError('metadata query failed') except httplib.BadStatusLine: raise CommunicationError('bad response in metadata query') except IOError: raise CommunicationError('metadata query failed') return fh.read()
def function[metadata_query, parameter[fpid, apikey]]: constant[Queries the Last.fm servers for metadata about a given fingerprint ID (an integer). Returns the XML response (a string). ] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b0bb97b0>, <ast.Constant object at 0x7da1b0bb8400>, <ast.Constant object at 0x7da1b0bb8f10>], [<ast.Constant object at 0x7da1b0bb8ca0>, <ast.Name object at 0x7da1b0bbab60>, <ast.Name object at 0x7da1b0bb97e0>]] variable[url] assign[=] binary_operation[constant[%s?%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0bb8460>, <ast.Call object at 0x7da1b0bb84f0>]]] <ast.Try object at 0x7da1b0bb9780> return[call[name[fh].read, parameter[]]]
keyword[def] identifier[metadata_query] ( identifier[fpid] , identifier[apikey] ): literal[string] identifier[params] ={ literal[string] : literal[string] , literal[string] : identifier[fpid] , literal[string] : identifier[apikey] , } identifier[url] = literal[string] %( identifier[URL_METADATA] , identifier[urllib] . identifier[urlencode] ( identifier[params] )) keyword[try] : identifier[fh] = identifier[_query_wrap] ( identifier[urllib] . identifier[urlopen] , identifier[url] ) keyword[except] identifier[urllib2] . identifier[HTTPError] : keyword[raise] identifier[CommunicationError] ( literal[string] ) keyword[except] identifier[httplib] . identifier[BadStatusLine] : keyword[raise] identifier[CommunicationError] ( literal[string] ) keyword[except] identifier[IOError] : keyword[raise] identifier[CommunicationError] ( literal[string] ) keyword[return] identifier[fh] . identifier[read] ()
def metadata_query(fpid, apikey): """Queries the Last.fm servers for metadata about a given fingerprint ID (an integer). Returns the XML response (a string). """ params = {'method': 'track.getFingerprintMetadata', 'fingerprintid': fpid, 'api_key': apikey} url = '%s?%s' % (URL_METADATA, urllib.urlencode(params)) try: fh = _query_wrap(urllib.urlopen, url) # depends on [control=['try'], data=[]] except urllib2.HTTPError: raise CommunicationError('metadata query failed') # depends on [control=['except'], data=[]] except httplib.BadStatusLine: raise CommunicationError('bad response in metadata query') # depends on [control=['except'], data=[]] except IOError: raise CommunicationError('metadata query failed') # depends on [control=['except'], data=[]] return fh.read()
def _ProcessMessageHandlerRequests(self, requests): """Processes message handler requests.""" logging.debug("Leased message handler request ids: %s", ",".join(str(r.request_id) for r in requests)) grouped_requests = collection.Group(requests, lambda r: r.handler_name) for handler_name, requests_for_handler in iteritems(grouped_requests): handler_cls = handler_registry.handler_name_map.get(handler_name) if not handler_cls: logging.error("Unknown message handler: %s", handler_name) continue stats_collector_instance.Get().IncrementCounter( "well_known_flow_requests", fields=[handler_name]) try: logging.debug("Running %d messages for handler %s", len(requests_for_handler), handler_name) handler_cls(token=self.token).ProcessMessages(requests_for_handler) except Exception as e: # pylint: disable=broad-except logging.exception("Exception while processing message handler %s: %s", handler_name, e) logging.debug("Deleting message handler request ids: %s", ",".join(str(r.request_id) for r in requests)) data_store.REL_DB.DeleteMessageHandlerRequests(requests)
def function[_ProcessMessageHandlerRequests, parameter[self, requests]]: constant[Processes message handler requests.] call[name[logging].debug, parameter[constant[Leased message handler request ids: %s], call[constant[,].join, parameter[<ast.GeneratorExp object at 0x7da1b1b46e00>]]]] variable[grouped_requests] assign[=] call[name[collection].Group, parameter[name[requests], <ast.Lambda object at 0x7da1b1b46650>]] for taget[tuple[[<ast.Name object at 0x7da1b1b45330>, <ast.Name object at 0x7da1b1b45390>]]] in starred[call[name[iteritems], parameter[name[grouped_requests]]]] begin[:] variable[handler_cls] assign[=] call[name[handler_registry].handler_name_map.get, parameter[name[handler_name]]] if <ast.UnaryOp object at 0x7da1b1b44a00> begin[:] call[name[logging].error, parameter[constant[Unknown message handler: %s], name[handler_name]]] continue call[call[name[stats_collector_instance].Get, parameter[]].IncrementCounter, parameter[constant[well_known_flow_requests]]] <ast.Try object at 0x7da1b1b45e10> call[name[logging].debug, parameter[constant[Deleting message handler request ids: %s], call[constant[,].join, parameter[<ast.GeneratorExp object at 0x7da1b1b457e0>]]]] call[name[data_store].REL_DB.DeleteMessageHandlerRequests, parameter[name[requests]]]
keyword[def] identifier[_ProcessMessageHandlerRequests] ( identifier[self] , identifier[requests] ): literal[string] identifier[logging] . identifier[debug] ( literal[string] , literal[string] . identifier[join] ( identifier[str] ( identifier[r] . identifier[request_id] ) keyword[for] identifier[r] keyword[in] identifier[requests] )) identifier[grouped_requests] = identifier[collection] . identifier[Group] ( identifier[requests] , keyword[lambda] identifier[r] : identifier[r] . identifier[handler_name] ) keyword[for] identifier[handler_name] , identifier[requests_for_handler] keyword[in] identifier[iteritems] ( identifier[grouped_requests] ): identifier[handler_cls] = identifier[handler_registry] . identifier[handler_name_map] . identifier[get] ( identifier[handler_name] ) keyword[if] keyword[not] identifier[handler_cls] : identifier[logging] . identifier[error] ( literal[string] , identifier[handler_name] ) keyword[continue] identifier[stats_collector_instance] . identifier[Get] (). identifier[IncrementCounter] ( literal[string] , identifier[fields] =[ identifier[handler_name] ]) keyword[try] : identifier[logging] . identifier[debug] ( literal[string] , identifier[len] ( identifier[requests_for_handler] ), identifier[handler_name] ) identifier[handler_cls] ( identifier[token] = identifier[self] . identifier[token] ). identifier[ProcessMessages] ( identifier[requests_for_handler] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[logging] . identifier[exception] ( literal[string] , identifier[handler_name] , identifier[e] ) identifier[logging] . identifier[debug] ( literal[string] , literal[string] . identifier[join] ( identifier[str] ( identifier[r] . identifier[request_id] ) keyword[for] identifier[r] keyword[in] identifier[requests] )) identifier[data_store] . identifier[REL_DB] . identifier[DeleteMessageHandlerRequests] ( identifier[requests] )
def _ProcessMessageHandlerRequests(self, requests): """Processes message handler requests.""" logging.debug('Leased message handler request ids: %s', ','.join((str(r.request_id) for r in requests))) grouped_requests = collection.Group(requests, lambda r: r.handler_name) for (handler_name, requests_for_handler) in iteritems(grouped_requests): handler_cls = handler_registry.handler_name_map.get(handler_name) if not handler_cls: logging.error('Unknown message handler: %s', handler_name) continue # depends on [control=['if'], data=[]] stats_collector_instance.Get().IncrementCounter('well_known_flow_requests', fields=[handler_name]) try: logging.debug('Running %d messages for handler %s', len(requests_for_handler), handler_name) handler_cls(token=self.token).ProcessMessages(requests_for_handler) # depends on [control=['try'], data=[]] except Exception as e: # pylint: disable=broad-except logging.exception('Exception while processing message handler %s: %s', handler_name, e) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=[]] logging.debug('Deleting message handler request ids: %s', ','.join((str(r.request_id) for r in requests))) data_store.REL_DB.DeleteMessageHandlerRequests(requests)
def ROL(self, a): """ Rotates all bits of the register one place left through the C (carry) bit. This is a 9-bit rotation. source code forms: ROL Q; ROLA; ROLB CC bits "HNZVC": -aaas """ r = (a << 1) | self.C self.clear_NZVC() self.update_NZVC_8(a, a, r) return r
def function[ROL, parameter[self, a]]: constant[ Rotates all bits of the register one place left through the C (carry) bit. This is a 9-bit rotation. source code forms: ROL Q; ROLA; ROLB CC bits "HNZVC": -aaas ] variable[r] assign[=] binary_operation[binary_operation[name[a] <ast.LShift object at 0x7da2590d69e0> constant[1]] <ast.BitOr object at 0x7da2590d6aa0> name[self].C] call[name[self].clear_NZVC, parameter[]] call[name[self].update_NZVC_8, parameter[name[a], name[a], name[r]]] return[name[r]]
keyword[def] identifier[ROL] ( identifier[self] , identifier[a] ): literal[string] identifier[r] =( identifier[a] << literal[int] )| identifier[self] . identifier[C] identifier[self] . identifier[clear_NZVC] () identifier[self] . identifier[update_NZVC_8] ( identifier[a] , identifier[a] , identifier[r] ) keyword[return] identifier[r]
def ROL(self, a): """ Rotates all bits of the register one place left through the C (carry) bit. This is a 9-bit rotation. source code forms: ROL Q; ROLA; ROLB CC bits "HNZVC": -aaas """ r = a << 1 | self.C self.clear_NZVC() self.update_NZVC_8(a, a, r) return r
def boundary_maximum_exponential(graph, xxx_todo_changeme3): r""" Boundary term processing adjacent voxels maximum value using an exponential relationship. An implementation of a boundary term, suitable to be used with the `~medpy.graphcut.generate.graph_from_voxels` function. The same as `boundary_difference_exponential`, but working on the gradient image instead of the original. See there for details. Parameters ---------- graph : GCGraph The graph to add the weights to. gradient_image : ndarray The gradient image. sigma : float The sigma parameter to use in the boundary term. spacing : sequence of float or False A sequence containing the slice spacing used for weighting the computed neighbourhood weight value for different dimensions. If `False`, no distance based weighting of the graph edges is performed. Notes ----- This function requires the gradient image to be passed along. That means that `~medpy.graphcut.generate.graph_from_voxels` has to be called with ``boundary_term_args`` set to the gradient image. """ (gradient_image, sigma, spacing) = xxx_todo_changeme3 gradient_image = scipy.asarray(gradient_image) def boundary_term_exponential(intensities): """ Implementation of a exponential boundary term computation over an array. """ # apply exp-(x**2/sigma**2) intensities = scipy.power(intensities, 2) intensities /= math.pow(sigma, 2) intensities *= -1 intensities = scipy.exp(intensities) intensities[intensities <= 0] = sys.float_info.min return intensities __skeleton_maximum(graph, gradient_image, boundary_term_exponential, spacing)
def function[boundary_maximum_exponential, parameter[graph, xxx_todo_changeme3]]: constant[ Boundary term processing adjacent voxels maximum value using an exponential relationship. An implementation of a boundary term, suitable to be used with the `~medpy.graphcut.generate.graph_from_voxels` function. The same as `boundary_difference_exponential`, but working on the gradient image instead of the original. See there for details. Parameters ---------- graph : GCGraph The graph to add the weights to. gradient_image : ndarray The gradient image. sigma : float The sigma parameter to use in the boundary term. spacing : sequence of float or False A sequence containing the slice spacing used for weighting the computed neighbourhood weight value for different dimensions. If `False`, no distance based weighting of the graph edges is performed. Notes ----- This function requires the gradient image to be passed along. That means that `~medpy.graphcut.generate.graph_from_voxels` has to be called with ``boundary_term_args`` set to the gradient image. ] <ast.Tuple object at 0x7da20c991a20> assign[=] name[xxx_todo_changeme3] variable[gradient_image] assign[=] call[name[scipy].asarray, parameter[name[gradient_image]]] def function[boundary_term_exponential, parameter[intensities]]: constant[ Implementation of a exponential boundary term computation over an array. ] variable[intensities] assign[=] call[name[scipy].power, parameter[name[intensities], constant[2]]] <ast.AugAssign object at 0x7da20c990af0> <ast.AugAssign object at 0x7da20c9910f0> variable[intensities] assign[=] call[name[scipy].exp, parameter[name[intensities]]] call[name[intensities]][compare[name[intensities] less_or_equal[<=] constant[0]]] assign[=] name[sys].float_info.min return[name[intensities]] call[name[__skeleton_maximum], parameter[name[graph], name[gradient_image], name[boundary_term_exponential], name[spacing]]]
keyword[def] identifier[boundary_maximum_exponential] ( identifier[graph] , identifier[xxx_todo_changeme3] ): literal[string] ( identifier[gradient_image] , identifier[sigma] , identifier[spacing] )= identifier[xxx_todo_changeme3] identifier[gradient_image] = identifier[scipy] . identifier[asarray] ( identifier[gradient_image] ) keyword[def] identifier[boundary_term_exponential] ( identifier[intensities] ): literal[string] identifier[intensities] = identifier[scipy] . identifier[power] ( identifier[intensities] , literal[int] ) identifier[intensities] /= identifier[math] . identifier[pow] ( identifier[sigma] , literal[int] ) identifier[intensities] *=- literal[int] identifier[intensities] = identifier[scipy] . identifier[exp] ( identifier[intensities] ) identifier[intensities] [ identifier[intensities] <= literal[int] ]= identifier[sys] . identifier[float_info] . identifier[min] keyword[return] identifier[intensities] identifier[__skeleton_maximum] ( identifier[graph] , identifier[gradient_image] , identifier[boundary_term_exponential] , identifier[spacing] )
def boundary_maximum_exponential(graph, xxx_todo_changeme3): """ Boundary term processing adjacent voxels maximum value using an exponential relationship. An implementation of a boundary term, suitable to be used with the `~medpy.graphcut.generate.graph_from_voxels` function. The same as `boundary_difference_exponential`, but working on the gradient image instead of the original. See there for details. Parameters ---------- graph : GCGraph The graph to add the weights to. gradient_image : ndarray The gradient image. sigma : float The sigma parameter to use in the boundary term. spacing : sequence of float or False A sequence containing the slice spacing used for weighting the computed neighbourhood weight value for different dimensions. If `False`, no distance based weighting of the graph edges is performed. Notes ----- This function requires the gradient image to be passed along. That means that `~medpy.graphcut.generate.graph_from_voxels` has to be called with ``boundary_term_args`` set to the gradient image. """ (gradient_image, sigma, spacing) = xxx_todo_changeme3 gradient_image = scipy.asarray(gradient_image) def boundary_term_exponential(intensities): """ Implementation of a exponential boundary term computation over an array. """ # apply exp-(x**2/sigma**2) intensities = scipy.power(intensities, 2) intensities /= math.pow(sigma, 2) intensities *= -1 intensities = scipy.exp(intensities) intensities[intensities <= 0] = sys.float_info.min return intensities __skeleton_maximum(graph, gradient_image, boundary_term_exponential, spacing)
def turn_physical_off(self): """ NAME: turn_physical_off PURPOSE: turn off automatic returning of outputs in physical units INPUT: (none) OUTPUT: (none) HISTORY: 2014-06-17 - Written - Bovy (IAS) """ self._roSet= False self._voSet= False self._orb.turn_physical_off()
def function[turn_physical_off, parameter[self]]: constant[ NAME: turn_physical_off PURPOSE: turn off automatic returning of outputs in physical units INPUT: (none) OUTPUT: (none) HISTORY: 2014-06-17 - Written - Bovy (IAS) ] name[self]._roSet assign[=] constant[False] name[self]._voSet assign[=] constant[False] call[name[self]._orb.turn_physical_off, parameter[]]
keyword[def] identifier[turn_physical_off] ( identifier[self] ): literal[string] identifier[self] . identifier[_roSet] = keyword[False] identifier[self] . identifier[_voSet] = keyword[False] identifier[self] . identifier[_orb] . identifier[turn_physical_off] ()
def turn_physical_off(self): """ NAME: turn_physical_off PURPOSE: turn off automatic returning of outputs in physical units INPUT: (none) OUTPUT: (none) HISTORY: 2014-06-17 - Written - Bovy (IAS) """ self._roSet = False self._voSet = False self._orb.turn_physical_off()
def humanize_timedelta(td): """Pretty-print a timedelta in a human readable format.""" secs = int(td.total_seconds()) hours, secs = divmod(secs, 60 * 60) mins, secs = divmod(secs, 60) if hours: return '%dh %dm' % (hours, mins) if mins: return '%dm' % mins return '%ds' % secs
def function[humanize_timedelta, parameter[td]]: constant[Pretty-print a timedelta in a human readable format.] variable[secs] assign[=] call[name[int], parameter[call[name[td].total_seconds, parameter[]]]] <ast.Tuple object at 0x7da20e957940> assign[=] call[name[divmod], parameter[name[secs], binary_operation[constant[60] * constant[60]]]] <ast.Tuple object at 0x7da207f019f0> assign[=] call[name[divmod], parameter[name[secs], constant[60]]] if name[hours] begin[:] return[binary_operation[constant[%dh %dm] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da207f02b60>, <ast.Name object at 0x7da207f01a50>]]]] if name[mins] begin[:] return[binary_operation[constant[%dm] <ast.Mod object at 0x7da2590d6920> name[mins]]] return[binary_operation[constant[%ds] <ast.Mod object at 0x7da2590d6920> name[secs]]]
keyword[def] identifier[humanize_timedelta] ( identifier[td] ): literal[string] identifier[secs] = identifier[int] ( identifier[td] . identifier[total_seconds] ()) identifier[hours] , identifier[secs] = identifier[divmod] ( identifier[secs] , literal[int] * literal[int] ) identifier[mins] , identifier[secs] = identifier[divmod] ( identifier[secs] , literal[int] ) keyword[if] identifier[hours] : keyword[return] literal[string] %( identifier[hours] , identifier[mins] ) keyword[if] identifier[mins] : keyword[return] literal[string] % identifier[mins] keyword[return] literal[string] % identifier[secs]
def humanize_timedelta(td): """Pretty-print a timedelta in a human readable format.""" secs = int(td.total_seconds()) (hours, secs) = divmod(secs, 60 * 60) (mins, secs) = divmod(secs, 60) if hours: return '%dh %dm' % (hours, mins) # depends on [control=['if'], data=[]] if mins: return '%dm' % mins # depends on [control=['if'], data=[]] return '%ds' % secs
def generateProjectFiles(self, dir=os.getcwd(), args=[]): """ Generates IDE project files for the Unreal project in the specified directory """ # If the project is a pure Blueprint project, then we cannot generate project files if os.path.exists(os.path.join(dir, 'Source')) == False: Utility.printStderr('Pure Blueprint project, nothing to generate project files for.') return # Generate the project files genScript = self.getGenerateScript() projectFile = self.getProjectDescriptor(dir) Utility.run([genScript, '-project=' + projectFile, '-game', '-engine'] + args, cwd=os.path.dirname(genScript), raiseOnError=True)
def function[generateProjectFiles, parameter[self, dir, args]]: constant[ Generates IDE project files for the Unreal project in the specified directory ] if compare[call[name[os].path.exists, parameter[call[name[os].path.join, parameter[name[dir], constant[Source]]]]] equal[==] constant[False]] begin[:] call[name[Utility].printStderr, parameter[constant[Pure Blueprint project, nothing to generate project files for.]]] return[None] variable[genScript] assign[=] call[name[self].getGenerateScript, parameter[]] variable[projectFile] assign[=] call[name[self].getProjectDescriptor, parameter[name[dir]]] call[name[Utility].run, parameter[binary_operation[list[[<ast.Name object at 0x7da18f00d7e0>, <ast.BinOp object at 0x7da18f00e200>, <ast.Constant object at 0x7da18f00df30>, <ast.Constant object at 0x7da18f00f1c0>]] + name[args]]]]
keyword[def] identifier[generateProjectFiles] ( identifier[self] , identifier[dir] = identifier[os] . identifier[getcwd] (), identifier[args] =[]): literal[string] keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dir] , literal[string] ))== keyword[False] : identifier[Utility] . identifier[printStderr] ( literal[string] ) keyword[return] identifier[genScript] = identifier[self] . identifier[getGenerateScript] () identifier[projectFile] = identifier[self] . identifier[getProjectDescriptor] ( identifier[dir] ) identifier[Utility] . identifier[run] ([ identifier[genScript] , literal[string] + identifier[projectFile] , literal[string] , literal[string] ]+ identifier[args] , identifier[cwd] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[genScript] ), identifier[raiseOnError] = keyword[True] )
def generateProjectFiles(self, dir=os.getcwd(), args=[]): """ Generates IDE project files for the Unreal project in the specified directory """ # If the project is a pure Blueprint project, then we cannot generate project files if os.path.exists(os.path.join(dir, 'Source')) == False: Utility.printStderr('Pure Blueprint project, nothing to generate project files for.') return # depends on [control=['if'], data=[]] # Generate the project files genScript = self.getGenerateScript() projectFile = self.getProjectDescriptor(dir) Utility.run([genScript, '-project=' + projectFile, '-game', '-engine'] + args, cwd=os.path.dirname(genScript), raiseOnError=True)
def get_cipher(key=None, keyfile=None): """ Get cipher object, and then you can invoke: des = get_cipher() d = des.encrpy('Hello') print des.descrpy(d) """ des_func = import_attr(settings.SECRETKEY.CIPHER_CLS) kwargs = settings.SECRETKEY.CIPHER_ARGS if not key: key = functions.get_cipher_key(keyfile) cipher = des_func(key, **kwargs) return cipher
def function[get_cipher, parameter[key, keyfile]]: constant[ Get cipher object, and then you can invoke: des = get_cipher() d = des.encrpy('Hello') print des.descrpy(d) ] variable[des_func] assign[=] call[name[import_attr], parameter[name[settings].SECRETKEY.CIPHER_CLS]] variable[kwargs] assign[=] name[settings].SECRETKEY.CIPHER_ARGS if <ast.UnaryOp object at 0x7da20c9924a0> begin[:] variable[key] assign[=] call[name[functions].get_cipher_key, parameter[name[keyfile]]] variable[cipher] assign[=] call[name[des_func], parameter[name[key]]] return[name[cipher]]
keyword[def] identifier[get_cipher] ( identifier[key] = keyword[None] , identifier[keyfile] = keyword[None] ): literal[string] identifier[des_func] = identifier[import_attr] ( identifier[settings] . identifier[SECRETKEY] . identifier[CIPHER_CLS] ) identifier[kwargs] = identifier[settings] . identifier[SECRETKEY] . identifier[CIPHER_ARGS] keyword[if] keyword[not] identifier[key] : identifier[key] = identifier[functions] . identifier[get_cipher_key] ( identifier[keyfile] ) identifier[cipher] = identifier[des_func] ( identifier[key] ,** identifier[kwargs] ) keyword[return] identifier[cipher]
def get_cipher(key=None, keyfile=None): """ Get cipher object, and then you can invoke: des = get_cipher() d = des.encrpy('Hello') print des.descrpy(d) """ des_func = import_attr(settings.SECRETKEY.CIPHER_CLS) kwargs = settings.SECRETKEY.CIPHER_ARGS if not key: key = functions.get_cipher_key(keyfile) # depends on [control=['if'], data=[]] cipher = des_func(key, **kwargs) return cipher
def _get_snpeff_cmd(cmd_name, datadir, data, out_file): """Retrieve snpEff base command line. """ resources = config_utils.get_resources("snpeff", data["config"]) jvm_opts = resources.get("jvm_opts", ["-Xms750m", "-Xmx3g"]) # scale by cores, defaulting to 2x base usage to ensure we have enough memory # for single core runs to use with human genomes. # Sets a maximum amount of memory to avoid core dumps exceeding 32Gb # We shouldn't need that much memory for snpEff, so avoid issues # https://www.elastic.co/guide/en/elasticsearch/guide/current/heap-sizing.html#compressed_oops jvm_opts = config_utils.adjust_opts(jvm_opts, {"algorithm": {"memory_adjust": {"direction": "increase", "maximum": "30000M", "magnitude": max(2, dd.get_cores(data))}}}) memory = " ".join(jvm_opts) snpeff = config_utils.get_program("snpEff", data["config"]) java_args = "-Djava.io.tmpdir=%s" % utils.safe_makedir(os.path.join(os.path.dirname(out_file), "tmp")) export = "unset JAVA_HOME && export PATH=%s:\"$PATH\" && " % (utils.get_java_binpath()) cmd = "{export} {snpeff} {memory} {java_args} {cmd_name} -dataDir {datadir}" return cmd.format(**locals())
def function[_get_snpeff_cmd, parameter[cmd_name, datadir, data, out_file]]: constant[Retrieve snpEff base command line. ] variable[resources] assign[=] call[name[config_utils].get_resources, parameter[constant[snpeff], call[name[data]][constant[config]]]] variable[jvm_opts] assign[=] call[name[resources].get, parameter[constant[jvm_opts], list[[<ast.Constant object at 0x7da1b26ad870>, <ast.Constant object at 0x7da1b26ad2d0>]]]] variable[jvm_opts] assign[=] call[name[config_utils].adjust_opts, parameter[name[jvm_opts], dictionary[[<ast.Constant object at 0x7da1b26ac1f0>], [<ast.Dict object at 0x7da1b26ac910>]]]] variable[memory] assign[=] call[constant[ ].join, parameter[name[jvm_opts]]] variable[snpeff] assign[=] call[name[config_utils].get_program, parameter[constant[snpEff], call[name[data]][constant[config]]]] variable[java_args] assign[=] binary_operation[constant[-Djava.io.tmpdir=%s] <ast.Mod object at 0x7da2590d6920> call[name[utils].safe_makedir, parameter[call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[out_file]]], constant[tmp]]]]]] variable[export] assign[=] binary_operation[constant[unset JAVA_HOME && export PATH=%s:"$PATH" && ] <ast.Mod object at 0x7da2590d6920> call[name[utils].get_java_binpath, parameter[]]] variable[cmd] assign[=] constant[{export} {snpeff} {memory} {java_args} {cmd_name} -dataDir {datadir}] return[call[name[cmd].format, parameter[]]]
keyword[def] identifier[_get_snpeff_cmd] ( identifier[cmd_name] , identifier[datadir] , identifier[data] , identifier[out_file] ): literal[string] identifier[resources] = identifier[config_utils] . identifier[get_resources] ( literal[string] , identifier[data] [ literal[string] ]) identifier[jvm_opts] = identifier[resources] . identifier[get] ( literal[string] ,[ literal[string] , literal[string] ]) identifier[jvm_opts] = identifier[config_utils] . identifier[adjust_opts] ( identifier[jvm_opts] ,{ literal[string] :{ literal[string] : { literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[max] ( literal[int] , identifier[dd] . identifier[get_cores] ( identifier[data] ))}}}) identifier[memory] = literal[string] . identifier[join] ( identifier[jvm_opts] ) identifier[snpeff] = identifier[config_utils] . identifier[get_program] ( literal[string] , identifier[data] [ literal[string] ]) identifier[java_args] = literal[string] % identifier[utils] . identifier[safe_makedir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[out_file] ), literal[string] )) identifier[export] = literal[string] %( identifier[utils] . identifier[get_java_binpath] ()) identifier[cmd] = literal[string] keyword[return] identifier[cmd] . identifier[format] (** identifier[locals] ())
def _get_snpeff_cmd(cmd_name, datadir, data, out_file): """Retrieve snpEff base command line. """ resources = config_utils.get_resources('snpeff', data['config']) jvm_opts = resources.get('jvm_opts', ['-Xms750m', '-Xmx3g']) # scale by cores, defaulting to 2x base usage to ensure we have enough memory # for single core runs to use with human genomes. # Sets a maximum amount of memory to avoid core dumps exceeding 32Gb # We shouldn't need that much memory for snpEff, so avoid issues # https://www.elastic.co/guide/en/elasticsearch/guide/current/heap-sizing.html#compressed_oops jvm_opts = config_utils.adjust_opts(jvm_opts, {'algorithm': {'memory_adjust': {'direction': 'increase', 'maximum': '30000M', 'magnitude': max(2, dd.get_cores(data))}}}) memory = ' '.join(jvm_opts) snpeff = config_utils.get_program('snpEff', data['config']) java_args = '-Djava.io.tmpdir=%s' % utils.safe_makedir(os.path.join(os.path.dirname(out_file), 'tmp')) export = 'unset JAVA_HOME && export PATH=%s:"$PATH" && ' % utils.get_java_binpath() cmd = '{export} {snpeff} {memory} {java_args} {cmd_name} -dataDir {datadir}' return cmd.format(**locals())
def _parse_normalization_kwargs(self, use_batch_norm, batch_norm_config, normalization_ctor, normalization_kwargs): """Sets up normalization, checking old and new flags.""" if use_batch_norm is not None: # Delete this whole block when deprecation is done. util.deprecation_warning( "`use_batch_norm` kwarg is deprecated. Change your code to instead " "specify `normalization_ctor` and `normalization_kwargs`.") if not use_batch_norm: # Explicitly set to False - normalization_{ctor,kwargs} has precedence. self._check_and_assign_normalization_members(normalization_ctor, normalization_kwargs or {}) else: # Explicitly set to true - new kwargs must not be used. if normalization_ctor is not None or normalization_kwargs is not None: raise ValueError( "if use_batch_norm is specified, normalization_ctor and " "normalization_kwargs must not be.") self._check_and_assign_normalization_members(batch_norm.BatchNorm, batch_norm_config or {}) else: # Old kwargs not set, this block will remain after removing old kwarg. self._check_and_assign_normalization_members(normalization_ctor, normalization_kwargs or {})
def function[_parse_normalization_kwargs, parameter[self, use_batch_norm, batch_norm_config, normalization_ctor, normalization_kwargs]]: constant[Sets up normalization, checking old and new flags.] if compare[name[use_batch_norm] is_not constant[None]] begin[:] call[name[util].deprecation_warning, parameter[constant[`use_batch_norm` kwarg is deprecated. Change your code to instead specify `normalization_ctor` and `normalization_kwargs`.]]] if <ast.UnaryOp object at 0x7da1b1cad690> begin[:] call[name[self]._check_and_assign_normalization_members, parameter[name[normalization_ctor], <ast.BoolOp object at 0x7da1b1cae830>]]
keyword[def] identifier[_parse_normalization_kwargs] ( identifier[self] , identifier[use_batch_norm] , identifier[batch_norm_config] , identifier[normalization_ctor] , identifier[normalization_kwargs] ): literal[string] keyword[if] identifier[use_batch_norm] keyword[is] keyword[not] keyword[None] : identifier[util] . identifier[deprecation_warning] ( literal[string] literal[string] ) keyword[if] keyword[not] identifier[use_batch_norm] : identifier[self] . identifier[_check_and_assign_normalization_members] ( identifier[normalization_ctor] , identifier[normalization_kwargs] keyword[or] {}) keyword[else] : keyword[if] identifier[normalization_ctor] keyword[is] keyword[not] keyword[None] keyword[or] identifier[normalization_kwargs] keyword[is] keyword[not] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) identifier[self] . identifier[_check_and_assign_normalization_members] ( identifier[batch_norm] . identifier[BatchNorm] , identifier[batch_norm_config] keyword[or] {}) keyword[else] : identifier[self] . identifier[_check_and_assign_normalization_members] ( identifier[normalization_ctor] , identifier[normalization_kwargs] keyword[or] {})
def _parse_normalization_kwargs(self, use_batch_norm, batch_norm_config, normalization_ctor, normalization_kwargs): """Sets up normalization, checking old and new flags.""" if use_batch_norm is not None: # Delete this whole block when deprecation is done. util.deprecation_warning('`use_batch_norm` kwarg is deprecated. Change your code to instead specify `normalization_ctor` and `normalization_kwargs`.') if not use_batch_norm: # Explicitly set to False - normalization_{ctor,kwargs} has precedence. self._check_and_assign_normalization_members(normalization_ctor, normalization_kwargs or {}) # depends on [control=['if'], data=[]] else: # Explicitly set to true - new kwargs must not be used. if normalization_ctor is not None or normalization_kwargs is not None: raise ValueError('if use_batch_norm is specified, normalization_ctor and normalization_kwargs must not be.') # depends on [control=['if'], data=[]] self._check_and_assign_normalization_members(batch_norm.BatchNorm, batch_norm_config or {}) # depends on [control=['if'], data=['use_batch_norm']] else: # Old kwargs not set, this block will remain after removing old kwarg. self._check_and_assign_normalization_members(normalization_ctor, normalization_kwargs or {})
def is_alive(self): """Checks if the current node is up and running in the cloud. It only checks the status provided by the cloud interface. Therefore a node might be running, but not yet ready to ssh into it. """ running = False if not self.instance_id: return False try: log.debug("Getting information for instance %s", self.instance_id) running = self._cloud_provider.is_instance_running( self.instance_id) except Exception as ex: log.debug("Ignoring error while looking for vm id %s: %s", self.instance_id, str(ex)) if running: log.debug("node `%s` (instance id %s) is up and running", self.name, self.instance_id) self.update_ips() else: log.debug("node `%s` (instance id `%s`) still building...", self.name, self.instance_id) return running
def function[is_alive, parameter[self]]: constant[Checks if the current node is up and running in the cloud. It only checks the status provided by the cloud interface. Therefore a node might be running, but not yet ready to ssh into it. ] variable[running] assign[=] constant[False] if <ast.UnaryOp object at 0x7da204621d50> begin[:] return[constant[False]] <ast.Try object at 0x7da204623ca0> if name[running] begin[:] call[name[log].debug, parameter[constant[node `%s` (instance id %s) is up and running], name[self].name, name[self].instance_id]] call[name[self].update_ips, parameter[]] return[name[running]]
keyword[def] identifier[is_alive] ( identifier[self] ): literal[string] identifier[running] = keyword[False] keyword[if] keyword[not] identifier[self] . identifier[instance_id] : keyword[return] keyword[False] keyword[try] : identifier[log] . identifier[debug] ( literal[string] , identifier[self] . identifier[instance_id] ) identifier[running] = identifier[self] . identifier[_cloud_provider] . identifier[is_instance_running] ( identifier[self] . identifier[instance_id] ) keyword[except] identifier[Exception] keyword[as] identifier[ex] : identifier[log] . identifier[debug] ( literal[string] , identifier[self] . identifier[instance_id] , identifier[str] ( identifier[ex] )) keyword[if] identifier[running] : identifier[log] . identifier[debug] ( literal[string] , identifier[self] . identifier[name] , identifier[self] . identifier[instance_id] ) identifier[self] . identifier[update_ips] () keyword[else] : identifier[log] . identifier[debug] ( literal[string] , identifier[self] . identifier[name] , identifier[self] . identifier[instance_id] ) keyword[return] identifier[running]
def is_alive(self): """Checks if the current node is up and running in the cloud. It only checks the status provided by the cloud interface. Therefore a node might be running, but not yet ready to ssh into it. """ running = False if not self.instance_id: return False # depends on [control=['if'], data=[]] try: log.debug('Getting information for instance %s', self.instance_id) running = self._cloud_provider.is_instance_running(self.instance_id) # depends on [control=['try'], data=[]] except Exception as ex: log.debug('Ignoring error while looking for vm id %s: %s', self.instance_id, str(ex)) # depends on [control=['except'], data=['ex']] if running: log.debug('node `%s` (instance id %s) is up and running', self.name, self.instance_id) self.update_ips() # depends on [control=['if'], data=[]] else: log.debug('node `%s` (instance id `%s`) still building...', self.name, self.instance_id) return running
def parse(filename, MAX_TERM_COUNT=1000): """ MAX_TERM_COUNT = 10000 # There are 39,000 terms in the GO! """ with open(filename, "r") as f: termId = None name = None desc = None parents = [] termCount = 0 for l in f.readlines(): if l.startswith("id:"): termId = l.strip()[4:] if l.startswith("name:"): name = l.strip()[6:] elif l.startswith("def:"): desc = l.strip()[5:] elif l.startswith("is_a:"): pid = l.strip()[6:].split(" ", 1)[0] parents.append(pid) if len(l) == 1: # newline # save if termId is not None and name is not None: terms[termId] = {'name': name, 'desc': desc, 'parents': parents[:], 'children': []} termId = None name = None parents = [] termCount += 1 if MAX_TERM_COUNT is not None and \ termCount > MAX_TERM_COUNT: break count = 0 for tid, tdict in terms.items(): count += 1 # purely for display for p in tdict['parents']: if p in terms.keys(): terms[p]['children'].append(tid) # Get unique term IDs for Tag Groups. tagGroups = set() for tid, tdict in terms.items(): # Only create Tags for GO:terms that are 'leafs' of the tree if len(tdict['children']) == 0: for p in tdict['parents']: tagGroups.add(p) return tagGroups, terms
def function[parse, parameter[filename, MAX_TERM_COUNT]]: constant[ MAX_TERM_COUNT = 10000 # There are 39,000 terms in the GO! ] with call[name[open], parameter[name[filename], constant[r]]] begin[:] variable[termId] assign[=] constant[None] variable[name] assign[=] constant[None] variable[desc] assign[=] constant[None] variable[parents] assign[=] list[[]] variable[termCount] assign[=] constant[0] for taget[name[l]] in starred[call[name[f].readlines, parameter[]]] begin[:] if call[name[l].startswith, parameter[constant[id:]]] begin[:] variable[termId] assign[=] call[call[name[l].strip, parameter[]]][<ast.Slice object at 0x7da2054a48e0>] if call[name[l].startswith, parameter[constant[name:]]] begin[:] variable[name] assign[=] call[call[name[l].strip, parameter[]]][<ast.Slice object at 0x7da2054a4d60>] if compare[call[name[len], parameter[name[l]]] equal[==] constant[1]] begin[:] if <ast.BoolOp object at 0x7da18f09ee00> begin[:] call[name[terms]][name[termId]] assign[=] dictionary[[<ast.Constant object at 0x7da1b0fe83d0>, <ast.Constant object at 0x7da1b0fe9360>, <ast.Constant object at 0x7da1b0febfd0>, <ast.Constant object at 0x7da1b0fe9810>], [<ast.Name object at 0x7da1b0fe9ba0>, <ast.Name object at 0x7da1b0fe8a30>, <ast.Subscript object at 0x7da1b0fe8070>, <ast.List object at 0x7da1b0fe8e50>]] variable[termId] assign[=] constant[None] variable[name] assign[=] constant[None] variable[parents] assign[=] list[[]] <ast.AugAssign object at 0x7da1b0febd00> if <ast.BoolOp object at 0x7da1b0feb970> begin[:] break variable[count] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da1b0febb20>, <ast.Name object at 0x7da1b0fe9d20>]]] in starred[call[name[terms].items, parameter[]]] begin[:] <ast.AugAssign object at 0x7da1b0fea170> for taget[name[p]] in starred[call[name[tdict]][constant[parents]]] begin[:] if compare[name[p] in call[name[terms].keys, parameter[]]] begin[:] call[call[call[name[terms]][name[p]]][constant[children]].append, parameter[name[tid]]] variable[tagGroups] assign[=] call[name[set], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b0feb7c0>, <ast.Name object at 0x7da1b0fe9cc0>]]] in starred[call[name[terms].items, parameter[]]] begin[:] if compare[call[name[len], parameter[call[name[tdict]][constant[children]]]] equal[==] constant[0]] begin[:] for taget[name[p]] in starred[call[name[tdict]][constant[parents]]] begin[:] call[name[tagGroups].add, parameter[name[p]]] return[tuple[[<ast.Name object at 0x7da1b0febca0>, <ast.Name object at 0x7da1b0febc40>]]]
keyword[def] identifier[parse] ( identifier[filename] , identifier[MAX_TERM_COUNT] = literal[int] ): literal[string] keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] : identifier[termId] = keyword[None] identifier[name] = keyword[None] identifier[desc] = keyword[None] identifier[parents] =[] identifier[termCount] = literal[int] keyword[for] identifier[l] keyword[in] identifier[f] . identifier[readlines] (): keyword[if] identifier[l] . identifier[startswith] ( literal[string] ): identifier[termId] = identifier[l] . identifier[strip] ()[ literal[int] :] keyword[if] identifier[l] . identifier[startswith] ( literal[string] ): identifier[name] = identifier[l] . identifier[strip] ()[ literal[int] :] keyword[elif] identifier[l] . identifier[startswith] ( literal[string] ): identifier[desc] = identifier[l] . identifier[strip] ()[ literal[int] :] keyword[elif] identifier[l] . identifier[startswith] ( literal[string] ): identifier[pid] = identifier[l] . identifier[strip] ()[ literal[int] :]. identifier[split] ( literal[string] , literal[int] )[ literal[int] ] identifier[parents] . identifier[append] ( identifier[pid] ) keyword[if] identifier[len] ( identifier[l] )== literal[int] : keyword[if] identifier[termId] keyword[is] keyword[not] keyword[None] keyword[and] identifier[name] keyword[is] keyword[not] keyword[None] : identifier[terms] [ identifier[termId] ]={ literal[string] : identifier[name] , literal[string] : identifier[desc] , literal[string] : identifier[parents] [:], literal[string] :[]} identifier[termId] = keyword[None] identifier[name] = keyword[None] identifier[parents] =[] identifier[termCount] += literal[int] keyword[if] identifier[MAX_TERM_COUNT] keyword[is] keyword[not] keyword[None] keyword[and] identifier[termCount] > identifier[MAX_TERM_COUNT] : keyword[break] identifier[count] = literal[int] keyword[for] identifier[tid] , identifier[tdict] keyword[in] identifier[terms] . identifier[items] (): identifier[count] += literal[int] keyword[for] identifier[p] keyword[in] identifier[tdict] [ literal[string] ]: keyword[if] identifier[p] keyword[in] identifier[terms] . identifier[keys] (): identifier[terms] [ identifier[p] ][ literal[string] ]. identifier[append] ( identifier[tid] ) identifier[tagGroups] = identifier[set] () keyword[for] identifier[tid] , identifier[tdict] keyword[in] identifier[terms] . identifier[items] (): keyword[if] identifier[len] ( identifier[tdict] [ literal[string] ])== literal[int] : keyword[for] identifier[p] keyword[in] identifier[tdict] [ literal[string] ]: identifier[tagGroups] . identifier[add] ( identifier[p] ) keyword[return] identifier[tagGroups] , identifier[terms]
def parse(filename, MAX_TERM_COUNT=1000): """ MAX_TERM_COUNT = 10000 # There are 39,000 terms in the GO! """ with open(filename, 'r') as f: termId = None name = None desc = None parents = [] termCount = 0 for l in f.readlines(): if l.startswith('id:'): termId = l.strip()[4:] # depends on [control=['if'], data=[]] if l.startswith('name:'): name = l.strip()[6:] # depends on [control=['if'], data=[]] elif l.startswith('def:'): desc = l.strip()[5:] # depends on [control=['if'], data=[]] elif l.startswith('is_a:'): pid = l.strip()[6:].split(' ', 1)[0] parents.append(pid) # depends on [control=['if'], data=[]] if len(l) == 1: # newline # save if termId is not None and name is not None: terms[termId] = {'name': name, 'desc': desc, 'parents': parents[:], 'children': []} termId = None name = None parents = [] termCount += 1 if MAX_TERM_COUNT is not None and termCount > MAX_TERM_COUNT: break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['l']] # depends on [control=['with'], data=['f']] count = 0 for (tid, tdict) in terms.items(): count += 1 # purely for display for p in tdict['parents']: if p in terms.keys(): terms[p]['children'].append(tid) # depends on [control=['if'], data=['p']] # depends on [control=['for'], data=['p']] # depends on [control=['for'], data=[]] # Get unique term IDs for Tag Groups. tagGroups = set() for (tid, tdict) in terms.items(): # Only create Tags for GO:terms that are 'leafs' of the tree if len(tdict['children']) == 0: for p in tdict['parents']: tagGroups.add(p) # depends on [control=['for'], data=['p']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return (tagGroups, terms)
def _GetDirectory(self): """Retrieves a directory. Returns: TSKPartitionDirectory: a directory or None if not available. """ if self.entry_type != definitions.FILE_ENTRY_TYPE_DIRECTORY: return None return TSKPartitionDirectory(self._file_system, self.path_spec)
def function[_GetDirectory, parameter[self]]: constant[Retrieves a directory. Returns: TSKPartitionDirectory: a directory or None if not available. ] if compare[name[self].entry_type not_equal[!=] name[definitions].FILE_ENTRY_TYPE_DIRECTORY] begin[:] return[constant[None]] return[call[name[TSKPartitionDirectory], parameter[name[self]._file_system, name[self].path_spec]]]
keyword[def] identifier[_GetDirectory] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[entry_type] != identifier[definitions] . identifier[FILE_ENTRY_TYPE_DIRECTORY] : keyword[return] keyword[None] keyword[return] identifier[TSKPartitionDirectory] ( identifier[self] . identifier[_file_system] , identifier[self] . identifier[path_spec] )
def _GetDirectory(self): """Retrieves a directory. Returns: TSKPartitionDirectory: a directory or None if not available. """ if self.entry_type != definitions.FILE_ENTRY_TYPE_DIRECTORY: return None # depends on [control=['if'], data=[]] return TSKPartitionDirectory(self._file_system, self.path_spec)
def recompute_missing_neighbors(self): """ Recomputes statistics for missing users of the current user's network: - ``User.percent_outofnetwork_calls`` - ``User.percent_outofnetwork_texts`` - ``User.percent_outofnetwork_contacts`` - ``User.percent_outofnetwork_call_durations`` This function is automatically called from :meth:`~bandicoot.io.read_csv` when loading a network user. """ oon_records = [r for r in self.records if self.network.get( r.correspondent_id, None) is None] num_oon_calls = len( [r for r in oon_records if r.interaction == 'call']) num_oon_texts = len( [r for r in oon_records if r.interaction == 'text']) num_oon_neighbors = len(set(x.correspondent_id for x in oon_records)) oon_call_durations = sum( [r.call_duration for r in oon_records if r.interaction == 'call']) num_calls = len([r for r in self.records if r.interaction == 'call']) num_texts = len([r for r in self.records if r.interaction == 'text']) total_neighbors = len(set(x.correspondent_id for x in self.records)) total_call_durations = sum( [r.call_duration for r in self.records if r.interaction == 'call']) def _safe_div(a, b, default): return a / b if b != 0 else default # We set the percentage at 0 if no event occurs self.percent_outofnetwork_calls = _safe_div( num_oon_calls, num_calls, 0) self.percent_outofnetwork_texts = _safe_div( num_oon_texts, num_texts, 0) self.percent_outofnetwork_contacts = _safe_div( num_oon_neighbors, total_neighbors, 0) self.percent_outofnetwork_call_durations = _safe_div( oon_call_durations, total_call_durations, 0)
def function[recompute_missing_neighbors, parameter[self]]: constant[ Recomputes statistics for missing users of the current user's network: - ``User.percent_outofnetwork_calls`` - ``User.percent_outofnetwork_texts`` - ``User.percent_outofnetwork_contacts`` - ``User.percent_outofnetwork_call_durations`` This function is automatically called from :meth:`~bandicoot.io.read_csv` when loading a network user. ] variable[oon_records] assign[=] <ast.ListComp object at 0x7da1b0d3e440> variable[num_oon_calls] assign[=] call[name[len], parameter[<ast.ListComp object at 0x7da1b0d3dd80>]] variable[num_oon_texts] assign[=] call[name[len], parameter[<ast.ListComp object at 0x7da1b0d3cfd0>]] variable[num_oon_neighbors] assign[=] call[name[len], parameter[call[name[set], parameter[<ast.GeneratorExp object at 0x7da1b0d3f010>]]]] variable[oon_call_durations] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da1b0d3d870>]] variable[num_calls] assign[=] call[name[len], parameter[<ast.ListComp object at 0x7da1b0d3db70>]] variable[num_texts] assign[=] call[name[len], parameter[<ast.ListComp object at 0x7da1b0d3f040>]] variable[total_neighbors] assign[=] call[name[len], parameter[call[name[set], parameter[<ast.GeneratorExp object at 0x7da18dc07370>]]]] variable[total_call_durations] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da18dc04520>]] def function[_safe_div, parameter[a, b, default]]: return[<ast.IfExp object at 0x7da18dc04160>] name[self].percent_outofnetwork_calls assign[=] call[name[_safe_div], parameter[name[num_oon_calls], name[num_calls], constant[0]]] name[self].percent_outofnetwork_texts assign[=] call[name[_safe_div], parameter[name[num_oon_texts], name[num_texts], constant[0]]] name[self].percent_outofnetwork_contacts assign[=] call[name[_safe_div], parameter[name[num_oon_neighbors], name[total_neighbors], constant[0]]] name[self].percent_outofnetwork_call_durations assign[=] call[name[_safe_div], parameter[name[oon_call_durations], name[total_call_durations], constant[0]]]
keyword[def] identifier[recompute_missing_neighbors] ( identifier[self] ): literal[string] identifier[oon_records] =[ identifier[r] keyword[for] identifier[r] keyword[in] identifier[self] . identifier[records] keyword[if] identifier[self] . identifier[network] . identifier[get] ( identifier[r] . identifier[correspondent_id] , keyword[None] ) keyword[is] keyword[None] ] identifier[num_oon_calls] = identifier[len] ( [ identifier[r] keyword[for] identifier[r] keyword[in] identifier[oon_records] keyword[if] identifier[r] . identifier[interaction] == literal[string] ]) identifier[num_oon_texts] = identifier[len] ( [ identifier[r] keyword[for] identifier[r] keyword[in] identifier[oon_records] keyword[if] identifier[r] . identifier[interaction] == literal[string] ]) identifier[num_oon_neighbors] = identifier[len] ( identifier[set] ( identifier[x] . identifier[correspondent_id] keyword[for] identifier[x] keyword[in] identifier[oon_records] )) identifier[oon_call_durations] = identifier[sum] ( [ identifier[r] . identifier[call_duration] keyword[for] identifier[r] keyword[in] identifier[oon_records] keyword[if] identifier[r] . identifier[interaction] == literal[string] ]) identifier[num_calls] = identifier[len] ([ identifier[r] keyword[for] identifier[r] keyword[in] identifier[self] . identifier[records] keyword[if] identifier[r] . identifier[interaction] == literal[string] ]) identifier[num_texts] = identifier[len] ([ identifier[r] keyword[for] identifier[r] keyword[in] identifier[self] . identifier[records] keyword[if] identifier[r] . identifier[interaction] == literal[string] ]) identifier[total_neighbors] = identifier[len] ( identifier[set] ( identifier[x] . identifier[correspondent_id] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[records] )) identifier[total_call_durations] = identifier[sum] ( [ identifier[r] . identifier[call_duration] keyword[for] identifier[r] keyword[in] identifier[self] . identifier[records] keyword[if] identifier[r] . identifier[interaction] == literal[string] ]) keyword[def] identifier[_safe_div] ( identifier[a] , identifier[b] , identifier[default] ): keyword[return] identifier[a] / identifier[b] keyword[if] identifier[b] != literal[int] keyword[else] identifier[default] identifier[self] . identifier[percent_outofnetwork_calls] = identifier[_safe_div] ( identifier[num_oon_calls] , identifier[num_calls] , literal[int] ) identifier[self] . identifier[percent_outofnetwork_texts] = identifier[_safe_div] ( identifier[num_oon_texts] , identifier[num_texts] , literal[int] ) identifier[self] . identifier[percent_outofnetwork_contacts] = identifier[_safe_div] ( identifier[num_oon_neighbors] , identifier[total_neighbors] , literal[int] ) identifier[self] . identifier[percent_outofnetwork_call_durations] = identifier[_safe_div] ( identifier[oon_call_durations] , identifier[total_call_durations] , literal[int] )
def recompute_missing_neighbors(self): """ Recomputes statistics for missing users of the current user's network: - ``User.percent_outofnetwork_calls`` - ``User.percent_outofnetwork_texts`` - ``User.percent_outofnetwork_contacts`` - ``User.percent_outofnetwork_call_durations`` This function is automatically called from :meth:`~bandicoot.io.read_csv` when loading a network user. """ oon_records = [r for r in self.records if self.network.get(r.correspondent_id, None) is None] num_oon_calls = len([r for r in oon_records if r.interaction == 'call']) num_oon_texts = len([r for r in oon_records if r.interaction == 'text']) num_oon_neighbors = len(set((x.correspondent_id for x in oon_records))) oon_call_durations = sum([r.call_duration for r in oon_records if r.interaction == 'call']) num_calls = len([r for r in self.records if r.interaction == 'call']) num_texts = len([r for r in self.records if r.interaction == 'text']) total_neighbors = len(set((x.correspondent_id for x in self.records))) total_call_durations = sum([r.call_duration for r in self.records if r.interaction == 'call']) def _safe_div(a, b, default): return a / b if b != 0 else default # We set the percentage at 0 if no event occurs self.percent_outofnetwork_calls = _safe_div(num_oon_calls, num_calls, 0) self.percent_outofnetwork_texts = _safe_div(num_oon_texts, num_texts, 0) self.percent_outofnetwork_contacts = _safe_div(num_oon_neighbors, total_neighbors, 0) self.percent_outofnetwork_call_durations = _safe_div(oon_call_durations, total_call_durations, 0)
def get_s3store_instance(bucket): """Return an instance of S3Store.""" global _s3store_instances key = "%s" % bucket try: instance = _s3store_instances[key] except KeyError: instance = S3Store( bucket=bucket ) _s3store_instances[key] = instance return instance
def function[get_s3store_instance, parameter[bucket]]: constant[Return an instance of S3Store.] <ast.Global object at 0x7da20c991b70> variable[key] assign[=] binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> name[bucket]] <ast.Try object at 0x7da20c991600> return[name[instance]]
keyword[def] identifier[get_s3store_instance] ( identifier[bucket] ): literal[string] keyword[global] identifier[_s3store_instances] identifier[key] = literal[string] % identifier[bucket] keyword[try] : identifier[instance] = identifier[_s3store_instances] [ identifier[key] ] keyword[except] identifier[KeyError] : identifier[instance] = identifier[S3Store] ( identifier[bucket] = identifier[bucket] ) identifier[_s3store_instances] [ identifier[key] ]= identifier[instance] keyword[return] identifier[instance]
def get_s3store_instance(bucket): """Return an instance of S3Store.""" global _s3store_instances key = '%s' % bucket try: instance = _s3store_instances[key] # depends on [control=['try'], data=[]] except KeyError: instance = S3Store(bucket=bucket) _s3store_instances[key] = instance # depends on [control=['except'], data=[]] return instance
def initialize(self, symbolic_vm: LaserEVM): """Initializes the BenchmarkPlugin Introduces hooks in symbolic_vm to track the desired values :param symbolic_vm: Symbolic virtual machine to analyze """ self._reset() @symbolic_vm.laser_hook("execute_state") def execute_state_hook(_): current_time = time() - self.begin self.nr_of_executed_insns += 1 for key, value in symbolic_vm.coverage.items(): try: self.coverage[key][current_time] = sum(value[1]) * 100 / value[0] except KeyError: self.coverage[key] = {} self.coverage[key][current_time] = sum(value[1]) * 100 / value[0] @symbolic_vm.laser_hook("start_sym_exec") def start_sym_exec_hook(): self.begin = time() @symbolic_vm.laser_hook("stop_sym_exec") def stop_sym_exec_hook(): self.end = time() self._write_to_graph() self._store_report()
def function[initialize, parameter[self, symbolic_vm]]: constant[Initializes the BenchmarkPlugin Introduces hooks in symbolic_vm to track the desired values :param symbolic_vm: Symbolic virtual machine to analyze ] call[name[self]._reset, parameter[]] def function[execute_state_hook, parameter[_]]: variable[current_time] assign[=] binary_operation[call[name[time], parameter[]] - name[self].begin] <ast.AugAssign object at 0x7da1b1d36800> for taget[tuple[[<ast.Name object at 0x7da1b1d34d90>, <ast.Name object at 0x7da1b1d374c0>]]] in starred[call[name[symbolic_vm].coverage.items, parameter[]]] begin[:] <ast.Try object at 0x7da1b1d35780> def function[start_sym_exec_hook, parameter[]]: name[self].begin assign[=] call[name[time], parameter[]] def function[stop_sym_exec_hook, parameter[]]: name[self].end assign[=] call[name[time], parameter[]] call[name[self]._write_to_graph, parameter[]] call[name[self]._store_report, parameter[]]
keyword[def] identifier[initialize] ( identifier[self] , identifier[symbolic_vm] : identifier[LaserEVM] ): literal[string] identifier[self] . identifier[_reset] () @ identifier[symbolic_vm] . identifier[laser_hook] ( literal[string] ) keyword[def] identifier[execute_state_hook] ( identifier[_] ): identifier[current_time] = identifier[time] ()- identifier[self] . identifier[begin] identifier[self] . identifier[nr_of_executed_insns] += literal[int] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[symbolic_vm] . identifier[coverage] . identifier[items] (): keyword[try] : identifier[self] . identifier[coverage] [ identifier[key] ][ identifier[current_time] ]= identifier[sum] ( identifier[value] [ literal[int] ])* literal[int] / identifier[value] [ literal[int] ] keyword[except] identifier[KeyError] : identifier[self] . identifier[coverage] [ identifier[key] ]={} identifier[self] . identifier[coverage] [ identifier[key] ][ identifier[current_time] ]= identifier[sum] ( identifier[value] [ literal[int] ])* literal[int] / identifier[value] [ literal[int] ] @ identifier[symbolic_vm] . identifier[laser_hook] ( literal[string] ) keyword[def] identifier[start_sym_exec_hook] (): identifier[self] . identifier[begin] = identifier[time] () @ identifier[symbolic_vm] . identifier[laser_hook] ( literal[string] ) keyword[def] identifier[stop_sym_exec_hook] (): identifier[self] . identifier[end] = identifier[time] () identifier[self] . identifier[_write_to_graph] () identifier[self] . identifier[_store_report] ()
def initialize(self, symbolic_vm: LaserEVM): """Initializes the BenchmarkPlugin Introduces hooks in symbolic_vm to track the desired values :param symbolic_vm: Symbolic virtual machine to analyze """ self._reset() @symbolic_vm.laser_hook('execute_state') def execute_state_hook(_): current_time = time() - self.begin self.nr_of_executed_insns += 1 for (key, value) in symbolic_vm.coverage.items(): try: self.coverage[key][current_time] = sum(value[1]) * 100 / value[0] # depends on [control=['try'], data=[]] except KeyError: self.coverage[key] = {} self.coverage[key][current_time] = sum(value[1]) * 100 / value[0] # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] @symbolic_vm.laser_hook('start_sym_exec') def start_sym_exec_hook(): self.begin = time() @symbolic_vm.laser_hook('stop_sym_exec') def stop_sym_exec_hook(): self.end = time() self._write_to_graph() self._store_report()
def data_path(self) -> DataPath: """Return the receiver's data path.""" dp = self.data_parent() return (dp.data_path() if dp else "") + "/" + self.iname()
def function[data_path, parameter[self]]: constant[Return the receiver's data path.] variable[dp] assign[=] call[name[self].data_parent, parameter[]] return[binary_operation[binary_operation[<ast.IfExp object at 0x7da1b052aaa0> + constant[/]] + call[name[self].iname, parameter[]]]]
keyword[def] identifier[data_path] ( identifier[self] )-> identifier[DataPath] : literal[string] identifier[dp] = identifier[self] . identifier[data_parent] () keyword[return] ( identifier[dp] . identifier[data_path] () keyword[if] identifier[dp] keyword[else] literal[string] )+ literal[string] + identifier[self] . identifier[iname] ()
def data_path(self) -> DataPath: """Return the receiver's data path.""" dp = self.data_parent() return (dp.data_path() if dp else '') + '/' + self.iname()
def ReadFileObject(self, file_object): """Reads artifact definitions from a file-like object. Args: file_object (file): file-like object to read from. Yields: ArtifactDefinition: an artifact definition. Raises: FormatError: if the format of the YAML artifact definition is not set or incorrect. """ # TODO: add try, except? yaml_generator = yaml.safe_load_all(file_object) last_artifact_definition = None for yaml_definition in yaml_generator: try: artifact_definition = self.ReadArtifactDefinitionValues(yaml_definition) except errors.FormatError as exception: error_location = 'At start' if last_artifact_definition: error_location = 'After: {0:s}'.format(last_artifact_definition.name) raise errors.FormatError( '{0:s} {1!s}'.format(error_location, exception)) yield artifact_definition last_artifact_definition = artifact_definition
def function[ReadFileObject, parameter[self, file_object]]: constant[Reads artifact definitions from a file-like object. Args: file_object (file): file-like object to read from. Yields: ArtifactDefinition: an artifact definition. Raises: FormatError: if the format of the YAML artifact definition is not set or incorrect. ] variable[yaml_generator] assign[=] call[name[yaml].safe_load_all, parameter[name[file_object]]] variable[last_artifact_definition] assign[=] constant[None] for taget[name[yaml_definition]] in starred[name[yaml_generator]] begin[:] <ast.Try object at 0x7da1b1eced40> <ast.Yield object at 0x7da1b1ecc730> variable[last_artifact_definition] assign[=] name[artifact_definition]
keyword[def] identifier[ReadFileObject] ( identifier[self] , identifier[file_object] ): literal[string] identifier[yaml_generator] = identifier[yaml] . identifier[safe_load_all] ( identifier[file_object] ) identifier[last_artifact_definition] = keyword[None] keyword[for] identifier[yaml_definition] keyword[in] identifier[yaml_generator] : keyword[try] : identifier[artifact_definition] = identifier[self] . identifier[ReadArtifactDefinitionValues] ( identifier[yaml_definition] ) keyword[except] identifier[errors] . identifier[FormatError] keyword[as] identifier[exception] : identifier[error_location] = literal[string] keyword[if] identifier[last_artifact_definition] : identifier[error_location] = literal[string] . identifier[format] ( identifier[last_artifact_definition] . identifier[name] ) keyword[raise] identifier[errors] . identifier[FormatError] ( literal[string] . identifier[format] ( identifier[error_location] , identifier[exception] )) keyword[yield] identifier[artifact_definition] identifier[last_artifact_definition] = identifier[artifact_definition]
def ReadFileObject(self, file_object): """Reads artifact definitions from a file-like object. Args: file_object (file): file-like object to read from. Yields: ArtifactDefinition: an artifact definition. Raises: FormatError: if the format of the YAML artifact definition is not set or incorrect. """ # TODO: add try, except? yaml_generator = yaml.safe_load_all(file_object) last_artifact_definition = None for yaml_definition in yaml_generator: try: artifact_definition = self.ReadArtifactDefinitionValues(yaml_definition) # depends on [control=['try'], data=[]] except errors.FormatError as exception: error_location = 'At start' if last_artifact_definition: error_location = 'After: {0:s}'.format(last_artifact_definition.name) # depends on [control=['if'], data=[]] raise errors.FormatError('{0:s} {1!s}'.format(error_location, exception)) # depends on [control=['except'], data=['exception']] yield artifact_definition last_artifact_definition = artifact_definition # depends on [control=['for'], data=['yaml_definition']]
def getReflexRuleConditionElement(self, set_idx=0, row_idx=0, element=''): """ Returns the expected value saved in the action list object. :set_idx: it is an integer with the position of the reflex rules set in the widget's list. :row_idx: is an integer with the numer of the row from the set :element: a string with the name of the element of the action to obtain: 'analysisservice', 'cond_row_idx', 'range0', 'range1', 'discreteresult', and_or """ if isinstance(set_idx, str): set_idx = int(set_idx) if isinstance(row_idx, str): row_idx = int(row_idx) cond = self.getReflexRuleElement(idx=set_idx, element='conditions') return cond[row_idx].get(element, '')
def function[getReflexRuleConditionElement, parameter[self, set_idx, row_idx, element]]: constant[ Returns the expected value saved in the action list object. :set_idx: it is an integer with the position of the reflex rules set in the widget's list. :row_idx: is an integer with the numer of the row from the set :element: a string with the name of the element of the action to obtain: 'analysisservice', 'cond_row_idx', 'range0', 'range1', 'discreteresult', and_or ] if call[name[isinstance], parameter[name[set_idx], name[str]]] begin[:] variable[set_idx] assign[=] call[name[int], parameter[name[set_idx]]] if call[name[isinstance], parameter[name[row_idx], name[str]]] begin[:] variable[row_idx] assign[=] call[name[int], parameter[name[row_idx]]] variable[cond] assign[=] call[name[self].getReflexRuleElement, parameter[]] return[call[call[name[cond]][name[row_idx]].get, parameter[name[element], constant[]]]]
keyword[def] identifier[getReflexRuleConditionElement] ( identifier[self] , identifier[set_idx] = literal[int] , identifier[row_idx] = literal[int] , identifier[element] = literal[string] ): literal[string] keyword[if] identifier[isinstance] ( identifier[set_idx] , identifier[str] ): identifier[set_idx] = identifier[int] ( identifier[set_idx] ) keyword[if] identifier[isinstance] ( identifier[row_idx] , identifier[str] ): identifier[row_idx] = identifier[int] ( identifier[row_idx] ) identifier[cond] = identifier[self] . identifier[getReflexRuleElement] ( identifier[idx] = identifier[set_idx] , identifier[element] = literal[string] ) keyword[return] identifier[cond] [ identifier[row_idx] ]. identifier[get] ( identifier[element] , literal[string] )
def getReflexRuleConditionElement(self, set_idx=0, row_idx=0, element=''): """ Returns the expected value saved in the action list object. :set_idx: it is an integer with the position of the reflex rules set in the widget's list. :row_idx: is an integer with the numer of the row from the set :element: a string with the name of the element of the action to obtain: 'analysisservice', 'cond_row_idx', 'range0', 'range1', 'discreteresult', and_or """ if isinstance(set_idx, str): set_idx = int(set_idx) # depends on [control=['if'], data=[]] if isinstance(row_idx, str): row_idx = int(row_idx) # depends on [control=['if'], data=[]] cond = self.getReflexRuleElement(idx=set_idx, element='conditions') return cond[row_idx].get(element, '')
def check_has_docstring(self, api): '''An API class must have a docstring.''' if not api.__doc__: msg = 'The Api class "{}" lacks a docstring.' return [msg.format(api.__name__)]
def function[check_has_docstring, parameter[self, api]]: constant[An API class must have a docstring.] if <ast.UnaryOp object at 0x7da20c796b00> begin[:] variable[msg] assign[=] constant[The Api class "{}" lacks a docstring.] return[list[[<ast.Call object at 0x7da20c794a90>]]]
keyword[def] identifier[check_has_docstring] ( identifier[self] , identifier[api] ): literal[string] keyword[if] keyword[not] identifier[api] . identifier[__doc__] : identifier[msg] = literal[string] keyword[return] [ identifier[msg] . identifier[format] ( identifier[api] . identifier[__name__] )]
def check_has_docstring(self, api): """An API class must have a docstring.""" if not api.__doc__: msg = 'The Api class "{}" lacks a docstring.' return [msg.format(api.__name__)] # depends on [control=['if'], data=[]]
def remaining_duration(self, time): '''Returns the remaining duration for a recording. ''' return max(0, self.end - max(self.start, time))
def function[remaining_duration, parameter[self, time]]: constant[Returns the remaining duration for a recording. ] return[call[name[max], parameter[constant[0], binary_operation[name[self].end - call[name[max], parameter[name[self].start, name[time]]]]]]]
keyword[def] identifier[remaining_duration] ( identifier[self] , identifier[time] ): literal[string] keyword[return] identifier[max] ( literal[int] , identifier[self] . identifier[end] - identifier[max] ( identifier[self] . identifier[start] , identifier[time] ))
def remaining_duration(self, time): """Returns the remaining duration for a recording. """ return max(0, self.end - max(self.start, time))
def stop_drivers(cls, maintain_default=False): """Stop all drivers except default if it should be reused :param maintain_default: True if the default driver should not be closed """ # Exclude first wrapper if the driver must be reused driver_wrappers = cls.driver_wrappers[1:] if maintain_default else cls.driver_wrappers for driver_wrapper in driver_wrappers: if not driver_wrapper.driver: continue try: driver_wrapper.driver.quit() except Exception as e: driver_wrapper.logger.warn( "Capture exceptions to avoid errors in teardown method due to session timeouts: \n %s" % e)
def function[stop_drivers, parameter[cls, maintain_default]]: constant[Stop all drivers except default if it should be reused :param maintain_default: True if the default driver should not be closed ] variable[driver_wrappers] assign[=] <ast.IfExp object at 0x7da20c76e9b0> for taget[name[driver_wrapper]] in starred[name[driver_wrappers]] begin[:] if <ast.UnaryOp object at 0x7da20c76eaa0> begin[:] continue <ast.Try object at 0x7da20c76c9d0>
keyword[def] identifier[stop_drivers] ( identifier[cls] , identifier[maintain_default] = keyword[False] ): literal[string] identifier[driver_wrappers] = identifier[cls] . identifier[driver_wrappers] [ literal[int] :] keyword[if] identifier[maintain_default] keyword[else] identifier[cls] . identifier[driver_wrappers] keyword[for] identifier[driver_wrapper] keyword[in] identifier[driver_wrappers] : keyword[if] keyword[not] identifier[driver_wrapper] . identifier[driver] : keyword[continue] keyword[try] : identifier[driver_wrapper] . identifier[driver] . identifier[quit] () keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[driver_wrapper] . identifier[logger] . identifier[warn] ( literal[string] % identifier[e] )
def stop_drivers(cls, maintain_default=False): """Stop all drivers except default if it should be reused :param maintain_default: True if the default driver should not be closed """ # Exclude first wrapper if the driver must be reused driver_wrappers = cls.driver_wrappers[1:] if maintain_default else cls.driver_wrappers for driver_wrapper in driver_wrappers: if not driver_wrapper.driver: continue # depends on [control=['if'], data=[]] try: driver_wrapper.driver.quit() # depends on [control=['try'], data=[]] except Exception as e: driver_wrapper.logger.warn('Capture exceptions to avoid errors in teardown method due to session timeouts: \n %s' % e) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['driver_wrapper']]
def build_distributions(context): """Builds package distributions""" rmtree('dist', ignore_errors=True) build_package_command = 'python setup.py clean sdist bdist_wheel' result = shell.dry_run(build_package_command, context.dry_run) packages = Path('dist').files() if not context.dry_run else "nothing" if not result: raise Exception('Error building packages: %s' % result) else: log.info('Built %s' % ', '.join(packages)) return packages
def function[build_distributions, parameter[context]]: constant[Builds package distributions] call[name[rmtree], parameter[constant[dist]]] variable[build_package_command] assign[=] constant[python setup.py clean sdist bdist_wheel] variable[result] assign[=] call[name[shell].dry_run, parameter[name[build_package_command], name[context].dry_run]] variable[packages] assign[=] <ast.IfExp object at 0x7da1b05beb30> if <ast.UnaryOp object at 0x7da1b05be6b0> begin[:] <ast.Raise object at 0x7da1b05be4d0> return[name[packages]]
keyword[def] identifier[build_distributions] ( identifier[context] ): literal[string] identifier[rmtree] ( literal[string] , identifier[ignore_errors] = keyword[True] ) identifier[build_package_command] = literal[string] identifier[result] = identifier[shell] . identifier[dry_run] ( identifier[build_package_command] , identifier[context] . identifier[dry_run] ) identifier[packages] = identifier[Path] ( literal[string] ). identifier[files] () keyword[if] keyword[not] identifier[context] . identifier[dry_run] keyword[else] literal[string] keyword[if] keyword[not] identifier[result] : keyword[raise] identifier[Exception] ( literal[string] % identifier[result] ) keyword[else] : identifier[log] . identifier[info] ( literal[string] % literal[string] . identifier[join] ( identifier[packages] )) keyword[return] identifier[packages]
def build_distributions(context): """Builds package distributions""" rmtree('dist', ignore_errors=True) build_package_command = 'python setup.py clean sdist bdist_wheel' result = shell.dry_run(build_package_command, context.dry_run) packages = Path('dist').files() if not context.dry_run else 'nothing' if not result: raise Exception('Error building packages: %s' % result) # depends on [control=['if'], data=[]] else: log.info('Built %s' % ', '.join(packages)) return packages
async def crypto_verify(signer_vk: str, msg: bytes, signature: bytes) -> bool: """ Verify a signature with a verkey. Note to use DID keys with this function you can call indy_key_for_did to get key id (verkey) for specific DID. :param signer_vk: verkey of signer of the message :param msg: message that has been signed :param signature: a signature to be verified :return: valid: true - if signature is valid, false - otherwise """ logger = logging.getLogger(__name__) logger.debug("crypto_verify: >>> my_vk: %r, signed_msg: %r, signature: %r", signer_vk, msg, signature) if not hasattr(crypto_verify, "cb"): logger.debug("crypto_verify: Creating callback") crypto_verify.cb = create_cb(CFUNCTYPE(None, c_int32, c_int32, c_bool)) c_signer_vk = c_char_p(signer_vk.encode('utf-8')) c_msg_len = c_uint32(len(msg)) c_signature_len = c_uint32(len(signature)) res = await do_call('indy_crypto_verify', c_signer_vk, msg, c_msg_len, signature, c_signature_len, crypto_verify.cb) logger.debug("crypto_verify: <<< res: %r", res) return res
<ast.AsyncFunctionDef object at 0x7da1b1f4a7d0>
keyword[async] keyword[def] identifier[crypto_verify] ( identifier[signer_vk] : identifier[str] , identifier[msg] : identifier[bytes] , identifier[signature] : identifier[bytes] )-> identifier[bool] : literal[string] identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[__name__] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[signer_vk] , identifier[msg] , identifier[signature] ) keyword[if] keyword[not] identifier[hasattr] ( identifier[crypto_verify] , literal[string] ): identifier[logger] . identifier[debug] ( literal[string] ) identifier[crypto_verify] . identifier[cb] = identifier[create_cb] ( identifier[CFUNCTYPE] ( keyword[None] , identifier[c_int32] , identifier[c_int32] , identifier[c_bool] )) identifier[c_signer_vk] = identifier[c_char_p] ( identifier[signer_vk] . identifier[encode] ( literal[string] )) identifier[c_msg_len] = identifier[c_uint32] ( identifier[len] ( identifier[msg] )) identifier[c_signature_len] = identifier[c_uint32] ( identifier[len] ( identifier[signature] )) identifier[res] = keyword[await] identifier[do_call] ( literal[string] , identifier[c_signer_vk] , identifier[msg] , identifier[c_msg_len] , identifier[signature] , identifier[c_signature_len] , identifier[crypto_verify] . identifier[cb] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[res] ) keyword[return] identifier[res]
async def crypto_verify(signer_vk: str, msg: bytes, signature: bytes) -> bool: """ Verify a signature with a verkey. Note to use DID keys with this function you can call indy_key_for_did to get key id (verkey) for specific DID. :param signer_vk: verkey of signer of the message :param msg: message that has been signed :param signature: a signature to be verified :return: valid: true - if signature is valid, false - otherwise """ logger = logging.getLogger(__name__) logger.debug('crypto_verify: >>> my_vk: %r, signed_msg: %r, signature: %r', signer_vk, msg, signature) if not hasattr(crypto_verify, 'cb'): logger.debug('crypto_verify: Creating callback') crypto_verify.cb = create_cb(CFUNCTYPE(None, c_int32, c_int32, c_bool)) # depends on [control=['if'], data=[]] c_signer_vk = c_char_p(signer_vk.encode('utf-8')) c_msg_len = c_uint32(len(msg)) c_signature_len = c_uint32(len(signature)) res = await do_call('indy_crypto_verify', c_signer_vk, msg, c_msg_len, signature, c_signature_len, crypto_verify.cb) logger.debug('crypto_verify: <<< res: %r', res) return res
def bust_self(self, obj): """Remove the value that is being stored on `obj` for this :class:`.cached_property` object. :param obj: The instance on which to bust the cache. """ if self.func.__name__ in obj.__dict__: delattr(obj, self.func.__name__)
def function[bust_self, parameter[self, obj]]: constant[Remove the value that is being stored on `obj` for this :class:`.cached_property` object. :param obj: The instance on which to bust the cache. ] if compare[name[self].func.__name__ in name[obj].__dict__] begin[:] call[name[delattr], parameter[name[obj], name[self].func.__name__]]
keyword[def] identifier[bust_self] ( identifier[self] , identifier[obj] ): literal[string] keyword[if] identifier[self] . identifier[func] . identifier[__name__] keyword[in] identifier[obj] . identifier[__dict__] : identifier[delattr] ( identifier[obj] , identifier[self] . identifier[func] . identifier[__name__] )
def bust_self(self, obj): """Remove the value that is being stored on `obj` for this :class:`.cached_property` object. :param obj: The instance on which to bust the cache. """ if self.func.__name__ in obj.__dict__: delattr(obj, self.func.__name__) # depends on [control=['if'], data=[]]
def import_from_pandapower_net(network, net): """ Import network from pandapower net. This import function is not yet finished (see warning below). Parameters ---------- net : pandapower network Examples -------- >>> network.import_from_pandapower_net(net) """ logger.warning("Warning: Importing from pandapower is still in beta; not all pandapower data is supported.\nUnsupported features include: three-winding transformers, switches, in_service status, shunt impedances and tap positions of transformers.") d = {} d["Bus"] = pd.DataFrame({"v_nom" : net.bus.vn_kv.values, "v_mag_pu_set" : 1.}, index=net.bus.name) d["Load"] = pd.DataFrame({"p_set" : (net.load.scaling*net.load.p_mw).values, "q_set" : (net.load.scaling*net.load.q_mvar).values, "bus" : net.bus.name.loc[net.load.bus].values}, index=net.load.name) #deal with PV generators d["Generator"] = pd.DataFrame({"p_set" : -(net.gen.scaling*net.gen.p_mw).values, "q_set" : 0., "bus" : net.bus.name.loc[net.gen.bus].values, "control" : "PV"}, index=net.gen.name) d["Bus"].loc[net.bus.name.loc[net.gen.bus].values,"v_mag_pu_set"] = net.gen.vm_pu.values #deal with PQ "static" generators d["Generator"] = pd.concat((d["Generator"],pd.DataFrame({"p_set" : -(net.sgen.scaling*net.sgen.p_mw).values, "q_set" : -(net.sgen.scaling*net.sgen.q_mvar).values, "bus" : net.bus.name.loc[net.sgen.bus].values, "control" : "PQ"}, index=net.sgen.name)), sort=False) d["Generator"] = pd.concat((d["Generator"],pd.DataFrame({"control" : "Slack", "p_set" : 0., "q_set" : 0., "bus" : net.bus.name.loc[net.ext_grid.bus].values}, index=net.ext_grid.name.fillna("External Grid"))), sort=False) d["Bus"].loc[net.bus.name.loc[net.ext_grid.bus].values,"v_mag_pu_set"] = net.ext_grid.vm_pu.values d["Line"] = pd.DataFrame({"type" : net.line.std_type.values, "bus0" : net.bus.name.loc[net.line.from_bus].values, "bus1" : net.bus.name.loc[net.line.to_bus].values, "length" : net.line.length_km.values, "num_parallel" : net.line.parallel.values}, index=net.line.name) d["Transformer"] = pd.DataFrame({"type" : net.trafo.std_type.values, "bus0" : net.bus.name.loc[net.trafo.hv_bus].values, "bus1" : net.bus.name.loc[net.trafo.lv_bus].values, "tap_position" : net.trafo.tap_pos.values}, index=net.trafo.name) for c in ["Bus","Load","Generator","Line","Transformer"]: network.import_components_from_dataframe(d[c],c) #amalgamate buses connected by closed switches bus_switches = net.switch[(net.switch.et=="b") & net.switch.closed] bus_switches["stays"] = bus_switches.bus.map(net.bus.name) bus_switches["goes"] = bus_switches.element.map(net.bus.name) to_replace = pd.Series(bus_switches.stays.values,bus_switches.goes.values) for i in to_replace.index: network.remove("Bus",i) for c in network.iterate_components({"Load","Generator"}): c.df.bus.replace(to_replace,inplace=True) for c in network.iterate_components({"Line","Transformer"}): c.df.bus0.replace(to_replace,inplace=True) c.df.bus1.replace(to_replace,inplace=True)
def function[import_from_pandapower_net, parameter[network, net]]: constant[ Import network from pandapower net. This import function is not yet finished (see warning below). Parameters ---------- net : pandapower network Examples -------- >>> network.import_from_pandapower_net(net) ] call[name[logger].warning, parameter[constant[Warning: Importing from pandapower is still in beta; not all pandapower data is supported. Unsupported features include: three-winding transformers, switches, in_service status, shunt impedances and tap positions of transformers.]]] variable[d] assign[=] dictionary[[], []] call[name[d]][constant[Bus]] assign[=] call[name[pd].DataFrame, parameter[dictionary[[<ast.Constant object at 0x7da18f58d210>, <ast.Constant object at 0x7da18f58fa90>], [<ast.Attribute object at 0x7da18f58c520>, <ast.Constant object at 0x7da18f58ee00>]]]] call[name[d]][constant[Load]] assign[=] call[name[pd].DataFrame, parameter[dictionary[[<ast.Constant object at 0x7da18f58f160>, <ast.Constant object at 0x7da18f58f610>, <ast.Constant object at 0x7da18f58d180>], [<ast.Attribute object at 0x7da18f58eb00>, <ast.Attribute object at 0x7da18f58ce80>, <ast.Attribute object at 0x7da18f58ee60>]]]] call[name[d]][constant[Generator]] assign[=] call[name[pd].DataFrame, parameter[dictionary[[<ast.Constant object at 0x7da18f58e740>, <ast.Constant object at 0x7da18f58d450>, <ast.Constant object at 0x7da18f58f070>, <ast.Constant object at 0x7da18f58cb20>], [<ast.UnaryOp object at 0x7da18f58c490>, <ast.Constant object at 0x7da18f58e7a0>, <ast.Attribute object at 0x7da18f58c280>, <ast.Constant object at 0x7da18f58fc40>]]]] call[call[name[d]][constant[Bus]].loc][tuple[[<ast.Attribute object at 0x7da18f58cbb0>, <ast.Constant object at 0x7da18f58dae0>]]] assign[=] name[net].gen.vm_pu.values call[name[d]][constant[Generator]] assign[=] call[name[pd].concat, parameter[tuple[[<ast.Subscript object at 0x7da18f58d390>, <ast.Call object at 0x7da18f58e020>]]]] call[name[d]][constant[Generator]] assign[=] call[name[pd].concat, parameter[tuple[[<ast.Subscript object at 0x7da18dc05c90>, <ast.Call object at 0x7da18dc06b30>]]]] call[call[name[d]][constant[Bus]].loc][tuple[[<ast.Attribute object at 0x7da18dc05570>, <ast.Constant object at 0x7da18dc05db0>]]] assign[=] name[net].ext_grid.vm_pu.values call[name[d]][constant[Line]] assign[=] call[name[pd].DataFrame, parameter[dictionary[[<ast.Constant object at 0x7da18dc06650>, <ast.Constant object at 0x7da18dc04160>, <ast.Constant object at 0x7da18dc07df0>, <ast.Constant object at 0x7da18dc07a90>, <ast.Constant object at 0x7da18dc05e70>], [<ast.Attribute object at 0x7da18dc04850>, <ast.Attribute object at 0x7da18dc04340>, <ast.Attribute object at 0x7da18bccb550>, <ast.Attribute object at 0x7da18bcc9030>, <ast.Attribute object at 0x7da18bcc9e40>]]]] call[name[d]][constant[Transformer]] assign[=] call[name[pd].DataFrame, parameter[dictionary[[<ast.Constant object at 0x7da18bcc8370>, <ast.Constant object at 0x7da18bcc8340>, <ast.Constant object at 0x7da18bccaf50>, <ast.Constant object at 0x7da18bcc8310>], [<ast.Attribute object at 0x7da18bccb280>, <ast.Attribute object at 0x7da18bccafe0>, <ast.Attribute object at 0x7da18bcc8ac0>, <ast.Attribute object at 0x7da18bcc9ba0>]]]] for taget[name[c]] in starred[list[[<ast.Constant object at 0x7da18bcca9b0>, <ast.Constant object at 0x7da18bccbf10>, <ast.Constant object at 0x7da18bccb6a0>, <ast.Constant object at 0x7da18bcc89d0>, <ast.Constant object at 0x7da18bcca500>]]] begin[:] call[name[network].import_components_from_dataframe, parameter[call[name[d]][name[c]], name[c]]] variable[bus_switches] assign[=] call[name[net].switch][binary_operation[compare[name[net].switch.et equal[==] constant[b]] <ast.BitAnd object at 0x7da2590d6b60> name[net].switch.closed]] call[name[bus_switches]][constant[stays]] assign[=] call[name[bus_switches].bus.map, parameter[name[net].bus.name]] call[name[bus_switches]][constant[goes]] assign[=] call[name[bus_switches].element.map, parameter[name[net].bus.name]] variable[to_replace] assign[=] call[name[pd].Series, parameter[name[bus_switches].stays.values, name[bus_switches].goes.values]] for taget[name[i]] in starred[name[to_replace].index] begin[:] call[name[network].remove, parameter[constant[Bus], name[i]]] for taget[name[c]] in starred[call[name[network].iterate_components, parameter[<ast.Set object at 0x7da20e961150>]]] begin[:] call[name[c].df.bus.replace, parameter[name[to_replace]]] for taget[name[c]] in starred[call[name[network].iterate_components, parameter[<ast.Set object at 0x7da20e9617e0>]]] begin[:] call[name[c].df.bus0.replace, parameter[name[to_replace]]] call[name[c].df.bus1.replace, parameter[name[to_replace]]]
keyword[def] identifier[import_from_pandapower_net] ( identifier[network] , identifier[net] ): literal[string] identifier[logger] . identifier[warning] ( literal[string] ) identifier[d] ={} identifier[d] [ literal[string] ]= identifier[pd] . identifier[DataFrame] ({ literal[string] : identifier[net] . identifier[bus] . identifier[vn_kv] . identifier[values] , literal[string] : literal[int] }, identifier[index] = identifier[net] . identifier[bus] . identifier[name] ) identifier[d] [ literal[string] ]= identifier[pd] . identifier[DataFrame] ({ literal[string] :( identifier[net] . identifier[load] . identifier[scaling] * identifier[net] . identifier[load] . identifier[p_mw] ). identifier[values] , literal[string] :( identifier[net] . identifier[load] . identifier[scaling] * identifier[net] . identifier[load] . identifier[q_mvar] ). identifier[values] , literal[string] : identifier[net] . identifier[bus] . identifier[name] . identifier[loc] [ identifier[net] . identifier[load] . identifier[bus] ]. identifier[values] }, identifier[index] = identifier[net] . identifier[load] . identifier[name] ) identifier[d] [ literal[string] ]= identifier[pd] . identifier[DataFrame] ({ literal[string] :-( identifier[net] . identifier[gen] . identifier[scaling] * identifier[net] . identifier[gen] . identifier[p_mw] ). identifier[values] , literal[string] : literal[int] , literal[string] : identifier[net] . identifier[bus] . identifier[name] . identifier[loc] [ identifier[net] . identifier[gen] . identifier[bus] ]. identifier[values] , literal[string] : literal[string] }, identifier[index] = identifier[net] . identifier[gen] . identifier[name] ) identifier[d] [ literal[string] ]. identifier[loc] [ identifier[net] . identifier[bus] . identifier[name] . identifier[loc] [ identifier[net] . identifier[gen] . identifier[bus] ]. identifier[values] , literal[string] ]= identifier[net] . identifier[gen] . identifier[vm_pu] . identifier[values] identifier[d] [ literal[string] ]= identifier[pd] . identifier[concat] (( identifier[d] [ literal[string] ], identifier[pd] . identifier[DataFrame] ({ literal[string] :-( identifier[net] . identifier[sgen] . identifier[scaling] * identifier[net] . identifier[sgen] . identifier[p_mw] ). identifier[values] , literal[string] :-( identifier[net] . identifier[sgen] . identifier[scaling] * identifier[net] . identifier[sgen] . identifier[q_mvar] ). identifier[values] , literal[string] : identifier[net] . identifier[bus] . identifier[name] . identifier[loc] [ identifier[net] . identifier[sgen] . identifier[bus] ]. identifier[values] , literal[string] : literal[string] }, identifier[index] = identifier[net] . identifier[sgen] . identifier[name] )), identifier[sort] = keyword[False] ) identifier[d] [ literal[string] ]= identifier[pd] . identifier[concat] (( identifier[d] [ literal[string] ], identifier[pd] . identifier[DataFrame] ({ literal[string] : literal[string] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : identifier[net] . identifier[bus] . identifier[name] . identifier[loc] [ identifier[net] . identifier[ext_grid] . identifier[bus] ]. identifier[values] }, identifier[index] = identifier[net] . identifier[ext_grid] . identifier[name] . identifier[fillna] ( literal[string] ))), identifier[sort] = keyword[False] ) identifier[d] [ literal[string] ]. identifier[loc] [ identifier[net] . identifier[bus] . identifier[name] . identifier[loc] [ identifier[net] . identifier[ext_grid] . identifier[bus] ]. identifier[values] , literal[string] ]= identifier[net] . identifier[ext_grid] . identifier[vm_pu] . identifier[values] identifier[d] [ literal[string] ]= identifier[pd] . identifier[DataFrame] ({ literal[string] : identifier[net] . identifier[line] . identifier[std_type] . identifier[values] , literal[string] : identifier[net] . identifier[bus] . identifier[name] . identifier[loc] [ identifier[net] . identifier[line] . identifier[from_bus] ]. identifier[values] , literal[string] : identifier[net] . identifier[bus] . identifier[name] . identifier[loc] [ identifier[net] . identifier[line] . identifier[to_bus] ]. identifier[values] , literal[string] : identifier[net] . identifier[line] . identifier[length_km] . identifier[values] , literal[string] : identifier[net] . identifier[line] . identifier[parallel] . identifier[values] }, identifier[index] = identifier[net] . identifier[line] . identifier[name] ) identifier[d] [ literal[string] ]= identifier[pd] . identifier[DataFrame] ({ literal[string] : identifier[net] . identifier[trafo] . identifier[std_type] . identifier[values] , literal[string] : identifier[net] . identifier[bus] . identifier[name] . identifier[loc] [ identifier[net] . identifier[trafo] . identifier[hv_bus] ]. identifier[values] , literal[string] : identifier[net] . identifier[bus] . identifier[name] . identifier[loc] [ identifier[net] . identifier[trafo] . identifier[lv_bus] ]. identifier[values] , literal[string] : identifier[net] . identifier[trafo] . identifier[tap_pos] . identifier[values] }, identifier[index] = identifier[net] . identifier[trafo] . identifier[name] ) keyword[for] identifier[c] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]: identifier[network] . identifier[import_components_from_dataframe] ( identifier[d] [ identifier[c] ], identifier[c] ) identifier[bus_switches] = identifier[net] . identifier[switch] [( identifier[net] . identifier[switch] . identifier[et] == literal[string] )& identifier[net] . identifier[switch] . identifier[closed] ] identifier[bus_switches] [ literal[string] ]= identifier[bus_switches] . identifier[bus] . identifier[map] ( identifier[net] . identifier[bus] . identifier[name] ) identifier[bus_switches] [ literal[string] ]= identifier[bus_switches] . identifier[element] . identifier[map] ( identifier[net] . identifier[bus] . identifier[name] ) identifier[to_replace] = identifier[pd] . identifier[Series] ( identifier[bus_switches] . identifier[stays] . identifier[values] , identifier[bus_switches] . identifier[goes] . identifier[values] ) keyword[for] identifier[i] keyword[in] identifier[to_replace] . identifier[index] : identifier[network] . identifier[remove] ( literal[string] , identifier[i] ) keyword[for] identifier[c] keyword[in] identifier[network] . identifier[iterate_components] ({ literal[string] , literal[string] }): identifier[c] . identifier[df] . identifier[bus] . identifier[replace] ( identifier[to_replace] , identifier[inplace] = keyword[True] ) keyword[for] identifier[c] keyword[in] identifier[network] . identifier[iterate_components] ({ literal[string] , literal[string] }): identifier[c] . identifier[df] . identifier[bus0] . identifier[replace] ( identifier[to_replace] , identifier[inplace] = keyword[True] ) identifier[c] . identifier[df] . identifier[bus1] . identifier[replace] ( identifier[to_replace] , identifier[inplace] = keyword[True] )
def import_from_pandapower_net(network, net): """ Import network from pandapower net. This import function is not yet finished (see warning below). Parameters ---------- net : pandapower network Examples -------- >>> network.import_from_pandapower_net(net) """ logger.warning('Warning: Importing from pandapower is still in beta; not all pandapower data is supported.\nUnsupported features include: three-winding transformers, switches, in_service status, shunt impedances and tap positions of transformers.') d = {} d['Bus'] = pd.DataFrame({'v_nom': net.bus.vn_kv.values, 'v_mag_pu_set': 1.0}, index=net.bus.name) d['Load'] = pd.DataFrame({'p_set': (net.load.scaling * net.load.p_mw).values, 'q_set': (net.load.scaling * net.load.q_mvar).values, 'bus': net.bus.name.loc[net.load.bus].values}, index=net.load.name) #deal with PV generators d['Generator'] = pd.DataFrame({'p_set': -(net.gen.scaling * net.gen.p_mw).values, 'q_set': 0.0, 'bus': net.bus.name.loc[net.gen.bus].values, 'control': 'PV'}, index=net.gen.name) d['Bus'].loc[net.bus.name.loc[net.gen.bus].values, 'v_mag_pu_set'] = net.gen.vm_pu.values #deal with PQ "static" generators d['Generator'] = pd.concat((d['Generator'], pd.DataFrame({'p_set': -(net.sgen.scaling * net.sgen.p_mw).values, 'q_set': -(net.sgen.scaling * net.sgen.q_mvar).values, 'bus': net.bus.name.loc[net.sgen.bus].values, 'control': 'PQ'}, index=net.sgen.name)), sort=False) d['Generator'] = pd.concat((d['Generator'], pd.DataFrame({'control': 'Slack', 'p_set': 0.0, 'q_set': 0.0, 'bus': net.bus.name.loc[net.ext_grid.bus].values}, index=net.ext_grid.name.fillna('External Grid'))), sort=False) d['Bus'].loc[net.bus.name.loc[net.ext_grid.bus].values, 'v_mag_pu_set'] = net.ext_grid.vm_pu.values d['Line'] = pd.DataFrame({'type': net.line.std_type.values, 'bus0': net.bus.name.loc[net.line.from_bus].values, 'bus1': net.bus.name.loc[net.line.to_bus].values, 'length': net.line.length_km.values, 'num_parallel': net.line.parallel.values}, index=net.line.name) d['Transformer'] = pd.DataFrame({'type': net.trafo.std_type.values, 'bus0': net.bus.name.loc[net.trafo.hv_bus].values, 'bus1': net.bus.name.loc[net.trafo.lv_bus].values, 'tap_position': net.trafo.tap_pos.values}, index=net.trafo.name) for c in ['Bus', 'Load', 'Generator', 'Line', 'Transformer']: network.import_components_from_dataframe(d[c], c) # depends on [control=['for'], data=['c']] #amalgamate buses connected by closed switches bus_switches = net.switch[(net.switch.et == 'b') & net.switch.closed] bus_switches['stays'] = bus_switches.bus.map(net.bus.name) bus_switches['goes'] = bus_switches.element.map(net.bus.name) to_replace = pd.Series(bus_switches.stays.values, bus_switches.goes.values) for i in to_replace.index: network.remove('Bus', i) for c in network.iterate_components({'Load', 'Generator'}): c.df.bus.replace(to_replace, inplace=True) # depends on [control=['for'], data=['c']] for c in network.iterate_components({'Line', 'Transformer'}): c.df.bus0.replace(to_replace, inplace=True) c.df.bus1.replace(to_replace, inplace=True) # depends on [control=['for'], data=['c']] # depends on [control=['for'], data=['i']]
def _append_params_to_uri(cls, uri, params): """ :type uri: str :type params: dict[str, str] :rtype: str """ if params: return uri + cls._DELIMITER_URL_QUERY + urlencode(params) return uri
def function[_append_params_to_uri, parameter[cls, uri, params]]: constant[ :type uri: str :type params: dict[str, str] :rtype: str ] if name[params] begin[:] return[binary_operation[binary_operation[name[uri] + name[cls]._DELIMITER_URL_QUERY] + call[name[urlencode], parameter[name[params]]]]] return[name[uri]]
keyword[def] identifier[_append_params_to_uri] ( identifier[cls] , identifier[uri] , identifier[params] ): literal[string] keyword[if] identifier[params] : keyword[return] identifier[uri] + identifier[cls] . identifier[_DELIMITER_URL_QUERY] + identifier[urlencode] ( identifier[params] ) keyword[return] identifier[uri]
def _append_params_to_uri(cls, uri, params): """ :type uri: str :type params: dict[str, str] :rtype: str """ if params: return uri + cls._DELIMITER_URL_QUERY + urlencode(params) # depends on [control=['if'], data=[]] return uri
def check_solver(self, image_x, image_y, kwargs_lens): """ returns the precision of the solver to match the image position :param kwargs_lens: full lens model (including solved parameters) :param image_x: point source in image :param image_y: point source in image :return: precision of Euclidean distances between the different rays arriving at the image positions """ source_x, source_y = self._lensModel.ray_shooting(image_x, image_y, kwargs_lens) dist = np.sqrt((source_x - source_x[0]) ** 2 + (source_y - source_y[0]) ** 2) return dist
def function[check_solver, parameter[self, image_x, image_y, kwargs_lens]]: constant[ returns the precision of the solver to match the image position :param kwargs_lens: full lens model (including solved parameters) :param image_x: point source in image :param image_y: point source in image :return: precision of Euclidean distances between the different rays arriving at the image positions ] <ast.Tuple object at 0x7da1b26ad390> assign[=] call[name[self]._lensModel.ray_shooting, parameter[name[image_x], name[image_y], name[kwargs_lens]]] variable[dist] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[binary_operation[name[source_x] - call[name[source_x]][constant[0]]] ** constant[2]] + binary_operation[binary_operation[name[source_y] - call[name[source_y]][constant[0]]] ** constant[2]]]]] return[name[dist]]
keyword[def] identifier[check_solver] ( identifier[self] , identifier[image_x] , identifier[image_y] , identifier[kwargs_lens] ): literal[string] identifier[source_x] , identifier[source_y] = identifier[self] . identifier[_lensModel] . identifier[ray_shooting] ( identifier[image_x] , identifier[image_y] , identifier[kwargs_lens] ) identifier[dist] = identifier[np] . identifier[sqrt] (( identifier[source_x] - identifier[source_x] [ literal[int] ])** literal[int] +( identifier[source_y] - identifier[source_y] [ literal[int] ])** literal[int] ) keyword[return] identifier[dist]
def check_solver(self, image_x, image_y, kwargs_lens): """ returns the precision of the solver to match the image position :param kwargs_lens: full lens model (including solved parameters) :param image_x: point source in image :param image_y: point source in image :return: precision of Euclidean distances between the different rays arriving at the image positions """ (source_x, source_y) = self._lensModel.ray_shooting(image_x, image_y, kwargs_lens) dist = np.sqrt((source_x - source_x[0]) ** 2 + (source_y - source_y[0]) ** 2) return dist
def generate_tls_sni_01_cert(server_name, key_type=u'rsa', _generate_private_key=None): """ Generate a certificate/key pair for responding to a tls-sni-01 challenge. :param str server_name: The SAN the certificate should have. :param str key_type: The type of key to generate; usually not necessary. :rtype: ``Tuple[`~cryptography.x509.Certificate`, PrivateKey]`` :return: A tuple of the certificate and private key. """ key = (_generate_private_key or generate_private_key)(key_type) name = x509.Name([ x509.NameAttribute(NameOID.COMMON_NAME, u'acme.invalid')]) cert = ( x509.CertificateBuilder() .subject_name(name) .issuer_name(name) .not_valid_before(datetime.now() - timedelta(seconds=3600)) .not_valid_after(datetime.now() + timedelta(seconds=3600)) .serial_number(int(uuid.uuid4())) .public_key(key.public_key()) .add_extension( x509.SubjectAlternativeName([x509.DNSName(server_name)]), critical=False) .sign( private_key=key, algorithm=hashes.SHA256(), backend=default_backend()) ) return (cert, key)
def function[generate_tls_sni_01_cert, parameter[server_name, key_type, _generate_private_key]]: constant[ Generate a certificate/key pair for responding to a tls-sni-01 challenge. :param str server_name: The SAN the certificate should have. :param str key_type: The type of key to generate; usually not necessary. :rtype: ``Tuple[`~cryptography.x509.Certificate`, PrivateKey]`` :return: A tuple of the certificate and private key. ] variable[key] assign[=] call[<ast.BoolOp object at 0x7da20c76c610>, parameter[name[key_type]]] variable[name] assign[=] call[name[x509].Name, parameter[list[[<ast.Call object at 0x7da20c76e260>]]]] variable[cert] assign[=] call[call[call[call[call[call[call[call[call[name[x509].CertificateBuilder, parameter[]].subject_name, parameter[name[name]]].issuer_name, parameter[name[name]]].not_valid_before, parameter[binary_operation[call[name[datetime].now, parameter[]] - call[name[timedelta], parameter[]]]]].not_valid_after, parameter[binary_operation[call[name[datetime].now, parameter[]] + call[name[timedelta], parameter[]]]]].serial_number, parameter[call[name[int], parameter[call[name[uuid].uuid4, parameter[]]]]]].public_key, parameter[call[name[key].public_key, parameter[]]]].add_extension, parameter[call[name[x509].SubjectAlternativeName, parameter[list[[<ast.Call object at 0x7da20c76d150>]]]]]].sign, parameter[]] return[tuple[[<ast.Name object at 0x7da20c76d810>, <ast.Name object at 0x7da20c76e9b0>]]]
keyword[def] identifier[generate_tls_sni_01_cert] ( identifier[server_name] , identifier[key_type] = literal[string] , identifier[_generate_private_key] = keyword[None] ): literal[string] identifier[key] =( identifier[_generate_private_key] keyword[or] identifier[generate_private_key] )( identifier[key_type] ) identifier[name] = identifier[x509] . identifier[Name] ([ identifier[x509] . identifier[NameAttribute] ( identifier[NameOID] . identifier[COMMON_NAME] , literal[string] )]) identifier[cert] =( identifier[x509] . identifier[CertificateBuilder] () . identifier[subject_name] ( identifier[name] ) . identifier[issuer_name] ( identifier[name] ) . identifier[not_valid_before] ( identifier[datetime] . identifier[now] ()- identifier[timedelta] ( identifier[seconds] = literal[int] )) . identifier[not_valid_after] ( identifier[datetime] . identifier[now] ()+ identifier[timedelta] ( identifier[seconds] = literal[int] )) . identifier[serial_number] ( identifier[int] ( identifier[uuid] . identifier[uuid4] ())) . identifier[public_key] ( identifier[key] . identifier[public_key] ()) . identifier[add_extension] ( identifier[x509] . identifier[SubjectAlternativeName] ([ identifier[x509] . identifier[DNSName] ( identifier[server_name] )]), identifier[critical] = keyword[False] ) . identifier[sign] ( identifier[private_key] = identifier[key] , identifier[algorithm] = identifier[hashes] . identifier[SHA256] (), identifier[backend] = identifier[default_backend] ()) ) keyword[return] ( identifier[cert] , identifier[key] )
def generate_tls_sni_01_cert(server_name, key_type=u'rsa', _generate_private_key=None): """ Generate a certificate/key pair for responding to a tls-sni-01 challenge. :param str server_name: The SAN the certificate should have. :param str key_type: The type of key to generate; usually not necessary. :rtype: ``Tuple[`~cryptography.x509.Certificate`, PrivateKey]`` :return: A tuple of the certificate and private key. """ key = (_generate_private_key or generate_private_key)(key_type) name = x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, u'acme.invalid')]) cert = x509.CertificateBuilder().subject_name(name).issuer_name(name).not_valid_before(datetime.now() - timedelta(seconds=3600)).not_valid_after(datetime.now() + timedelta(seconds=3600)).serial_number(int(uuid.uuid4())).public_key(key.public_key()).add_extension(x509.SubjectAlternativeName([x509.DNSName(server_name)]), critical=False).sign(private_key=key, algorithm=hashes.SHA256(), backend=default_backend()) return (cert, key)
def iterate_tracker(maxiter, max_nc, verbose=False): """Generator that breaks after maxiter, or after the same array has been sent in more max_nc times in a row. """ last_hash = None last_hash_count = 0 arr = yield for i in xrange(maxiter): arr = yield i if arr is not None: hsh = hashlib.sha1(arr.view(np.uint8)).hexdigest() if last_hash == hsh: last_hash_count += 1 else: last_hash = hsh last_hash_count = 1 if last_hash_count >= max_nc: if verbose: print('Termination. Over %d iterations without ' 'change.' % max_nc) break
def function[iterate_tracker, parameter[maxiter, max_nc, verbose]]: constant[Generator that breaks after maxiter, or after the same array has been sent in more max_nc times in a row. ] variable[last_hash] assign[=] constant[None] variable[last_hash_count] assign[=] constant[0] variable[arr] assign[=] <ast.Yield object at 0x7da1b07ae3b0> for taget[name[i]] in starred[call[name[xrange], parameter[name[maxiter]]]] begin[:] variable[arr] assign[=] <ast.Yield object at 0x7da1b07ae170> if compare[name[arr] is_not constant[None]] begin[:] variable[hsh] assign[=] call[call[name[hashlib].sha1, parameter[call[name[arr].view, parameter[name[np].uint8]]]].hexdigest, parameter[]] if compare[name[last_hash] equal[==] name[hsh]] begin[:] <ast.AugAssign object at 0x7da1b066b880> if compare[name[last_hash_count] greater_or_equal[>=] name[max_nc]] begin[:] if name[verbose] begin[:] call[name[print], parameter[binary_operation[constant[Termination. Over %d iterations without change.] <ast.Mod object at 0x7da2590d6920> name[max_nc]]]] break
keyword[def] identifier[iterate_tracker] ( identifier[maxiter] , identifier[max_nc] , identifier[verbose] = keyword[False] ): literal[string] identifier[last_hash] = keyword[None] identifier[last_hash_count] = literal[int] identifier[arr] = keyword[yield] keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[maxiter] ): identifier[arr] = keyword[yield] identifier[i] keyword[if] identifier[arr] keyword[is] keyword[not] keyword[None] : identifier[hsh] = identifier[hashlib] . identifier[sha1] ( identifier[arr] . identifier[view] ( identifier[np] . identifier[uint8] )). identifier[hexdigest] () keyword[if] identifier[last_hash] == identifier[hsh] : identifier[last_hash_count] += literal[int] keyword[else] : identifier[last_hash] = identifier[hsh] identifier[last_hash_count] = literal[int] keyword[if] identifier[last_hash_count] >= identifier[max_nc] : keyword[if] identifier[verbose] : identifier[print] ( literal[string] literal[string] % identifier[max_nc] ) keyword[break]
def iterate_tracker(maxiter, max_nc, verbose=False): """Generator that breaks after maxiter, or after the same array has been sent in more max_nc times in a row. """ last_hash = None last_hash_count = 0 arr = (yield) for i in xrange(maxiter): arr = (yield i) if arr is not None: hsh = hashlib.sha1(arr.view(np.uint8)).hexdigest() if last_hash == hsh: last_hash_count += 1 # depends on [control=['if'], data=[]] else: last_hash = hsh last_hash_count = 1 if last_hash_count >= max_nc: if verbose: print('Termination. Over %d iterations without change.' % max_nc) # depends on [control=['if'], data=[]] break # depends on [control=['if'], data=['max_nc']] # depends on [control=['if'], data=['arr']] # depends on [control=['for'], data=['i']]
def get_course_completions(cls, user, course_key): """ Returns a dictionary mapping BlockKeys to completion values for all BlockCompletion records for the given user and course_key. Return value: dict[BlockKey] = float """ user_course_completions = cls.user_course_completion_queryset(user, course_key) return cls.completion_by_block_key(user_course_completions)
def function[get_course_completions, parameter[cls, user, course_key]]: constant[ Returns a dictionary mapping BlockKeys to completion values for all BlockCompletion records for the given user and course_key. Return value: dict[BlockKey] = float ] variable[user_course_completions] assign[=] call[name[cls].user_course_completion_queryset, parameter[name[user], name[course_key]]] return[call[name[cls].completion_by_block_key, parameter[name[user_course_completions]]]]
keyword[def] identifier[get_course_completions] ( identifier[cls] , identifier[user] , identifier[course_key] ): literal[string] identifier[user_course_completions] = identifier[cls] . identifier[user_course_completion_queryset] ( identifier[user] , identifier[course_key] ) keyword[return] identifier[cls] . identifier[completion_by_block_key] ( identifier[user_course_completions] )
def get_course_completions(cls, user, course_key): """ Returns a dictionary mapping BlockKeys to completion values for all BlockCompletion records for the given user and course_key. Return value: dict[BlockKey] = float """ user_course_completions = cls.user_course_completion_queryset(user, course_key) return cls.completion_by_block_key(user_course_completions)
def context(self, name): """Get a context. Args: name (str): Name to store the context under. Returns: `ResolvedContext` object. """ data = self._context(name) context = data.get("context") if context: return context assert self.load_path context_path = os.path.join(self.load_path, "contexts", "%s.rxt" % name) context = ResolvedContext.load(context_path) data["context"] = context data["loaded"] = True return context
def function[context, parameter[self, name]]: constant[Get a context. Args: name (str): Name to store the context under. Returns: `ResolvedContext` object. ] variable[data] assign[=] call[name[self]._context, parameter[name[name]]] variable[context] assign[=] call[name[data].get, parameter[constant[context]]] if name[context] begin[:] return[name[context]] assert[name[self].load_path] variable[context_path] assign[=] call[name[os].path.join, parameter[name[self].load_path, constant[contexts], binary_operation[constant[%s.rxt] <ast.Mod object at 0x7da2590d6920> name[name]]]] variable[context] assign[=] call[name[ResolvedContext].load, parameter[name[context_path]]] call[name[data]][constant[context]] assign[=] name[context] call[name[data]][constant[loaded]] assign[=] constant[True] return[name[context]]
keyword[def] identifier[context] ( identifier[self] , identifier[name] ): literal[string] identifier[data] = identifier[self] . identifier[_context] ( identifier[name] ) identifier[context] = identifier[data] . identifier[get] ( literal[string] ) keyword[if] identifier[context] : keyword[return] identifier[context] keyword[assert] identifier[self] . identifier[load_path] identifier[context_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[load_path] , literal[string] , literal[string] % identifier[name] ) identifier[context] = identifier[ResolvedContext] . identifier[load] ( identifier[context_path] ) identifier[data] [ literal[string] ]= identifier[context] identifier[data] [ literal[string] ]= keyword[True] keyword[return] identifier[context]
def context(self, name): """Get a context. Args: name (str): Name to store the context under. Returns: `ResolvedContext` object. """ data = self._context(name) context = data.get('context') if context: return context # depends on [control=['if'], data=[]] assert self.load_path context_path = os.path.join(self.load_path, 'contexts', '%s.rxt' % name) context = ResolvedContext.load(context_path) data['context'] = context data['loaded'] = True return context
def _list_records(self, rtype=None, name=None, content=None): """List all records. Return an empty list if no records found. ``rtype``, ``name`` and ``content`` are used to filter records.""" records = [ { 'id': record['id'], 'type': record['type'], 'name': self._full_name(record['hostname']), 'content': record['destination'], 'priority': record['priority'], 'ttl': self.zone_ttl, } for record in self._raw_records(None, rtype, name, content) ] LOGGER.debug('list_records: %s', records) return records
def function[_list_records, parameter[self, rtype, name, content]]: constant[List all records. Return an empty list if no records found. ``rtype``, ``name`` and ``content`` are used to filter records.] variable[records] assign[=] <ast.ListComp object at 0x7da1b22bb760> call[name[LOGGER].debug, parameter[constant[list_records: %s], name[records]]] return[name[records]]
keyword[def] identifier[_list_records] ( identifier[self] , identifier[rtype] = keyword[None] , identifier[name] = keyword[None] , identifier[content] = keyword[None] ): literal[string] identifier[records] =[ { literal[string] : identifier[record] [ literal[string] ], literal[string] : identifier[record] [ literal[string] ], literal[string] : identifier[self] . identifier[_full_name] ( identifier[record] [ literal[string] ]), literal[string] : identifier[record] [ literal[string] ], literal[string] : identifier[record] [ literal[string] ], literal[string] : identifier[self] . identifier[zone_ttl] , } keyword[for] identifier[record] keyword[in] identifier[self] . identifier[_raw_records] ( keyword[None] , identifier[rtype] , identifier[name] , identifier[content] ) ] identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[records] ) keyword[return] identifier[records]
def _list_records(self, rtype=None, name=None, content=None): """List all records. Return an empty list if no records found. ``rtype``, ``name`` and ``content`` are used to filter records.""" records = [{'id': record['id'], 'type': record['type'], 'name': self._full_name(record['hostname']), 'content': record['destination'], 'priority': record['priority'], 'ttl': self.zone_ttl} for record in self._raw_records(None, rtype, name, content)] LOGGER.debug('list_records: %s', records) return records
def field_lookup(obj, field_path): """ Lookup django model field in similar way of django query lookup. Args: obj (instance): Django Model instance field_path (str): '__' separated field path Example: >>> from django.db import model >>> from django.contrib.auth.models import User >>> class Article(models.Model): >>> title = models.CharField('title', max_length=200) >>> author = models.ForeignKey(User, null=True, >>> related_name='permission_test_articles_author') >>> editors = models.ManyToManyField(User, >>> related_name='permission_test_articles_editors') >>> user = User.objects.create_user('test_user', 'password') >>> article = Article.objects.create(title='test_article', ... author=user) >>> article.editors.add(user) >>> assert 'test_article' == field_lookup(article, 'title') >>> assert 'test_user' == field_lookup(article, 'user__username') >>> assert ['test_user'] == list(field_lookup(article, ... 'editors__username')) """ if hasattr(obj, 'iterator'): return (field_lookup(x, field_path) for x in obj.iterator()) elif isinstance(obj, Iterable): return (field_lookup(x, field_path) for x in iter(obj)) # split the path field_path = field_path.split('__', 1) if len(field_path) == 1: return getattr(obj, field_path[0], None) return field_lookup(field_lookup(obj, field_path[0]), field_path[1])
def function[field_lookup, parameter[obj, field_path]]: constant[ Lookup django model field in similar way of django query lookup. Args: obj (instance): Django Model instance field_path (str): '__' separated field path Example: >>> from django.db import model >>> from django.contrib.auth.models import User >>> class Article(models.Model): >>> title = models.CharField('title', max_length=200) >>> author = models.ForeignKey(User, null=True, >>> related_name='permission_test_articles_author') >>> editors = models.ManyToManyField(User, >>> related_name='permission_test_articles_editors') >>> user = User.objects.create_user('test_user', 'password') >>> article = Article.objects.create(title='test_article', ... author=user) >>> article.editors.add(user) >>> assert 'test_article' == field_lookup(article, 'title') >>> assert 'test_user' == field_lookup(article, 'user__username') >>> assert ['test_user'] == list(field_lookup(article, ... 'editors__username')) ] if call[name[hasattr], parameter[name[obj], constant[iterator]]] begin[:] return[<ast.GeneratorExp object at 0x7da1b06ffdc0>] variable[field_path] assign[=] call[name[field_path].split, parameter[constant[__], constant[1]]] if compare[call[name[len], parameter[name[field_path]]] equal[==] constant[1]] begin[:] return[call[name[getattr], parameter[name[obj], call[name[field_path]][constant[0]], constant[None]]]] return[call[name[field_lookup], parameter[call[name[field_lookup], parameter[name[obj], call[name[field_path]][constant[0]]]], call[name[field_path]][constant[1]]]]]
keyword[def] identifier[field_lookup] ( identifier[obj] , identifier[field_path] ): literal[string] keyword[if] identifier[hasattr] ( identifier[obj] , literal[string] ): keyword[return] ( identifier[field_lookup] ( identifier[x] , identifier[field_path] ) keyword[for] identifier[x] keyword[in] identifier[obj] . identifier[iterator] ()) keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[Iterable] ): keyword[return] ( identifier[field_lookup] ( identifier[x] , identifier[field_path] ) keyword[for] identifier[x] keyword[in] identifier[iter] ( identifier[obj] )) identifier[field_path] = identifier[field_path] . identifier[split] ( literal[string] , literal[int] ) keyword[if] identifier[len] ( identifier[field_path] )== literal[int] : keyword[return] identifier[getattr] ( identifier[obj] , identifier[field_path] [ literal[int] ], keyword[None] ) keyword[return] identifier[field_lookup] ( identifier[field_lookup] ( identifier[obj] , identifier[field_path] [ literal[int] ]), identifier[field_path] [ literal[int] ])
def field_lookup(obj, field_path): """ Lookup django model field in similar way of django query lookup. Args: obj (instance): Django Model instance field_path (str): '__' separated field path Example: >>> from django.db import model >>> from django.contrib.auth.models import User >>> class Article(models.Model): >>> title = models.CharField('title', max_length=200) >>> author = models.ForeignKey(User, null=True, >>> related_name='permission_test_articles_author') >>> editors = models.ManyToManyField(User, >>> related_name='permission_test_articles_editors') >>> user = User.objects.create_user('test_user', 'password') >>> article = Article.objects.create(title='test_article', ... author=user) >>> article.editors.add(user) >>> assert 'test_article' == field_lookup(article, 'title') >>> assert 'test_user' == field_lookup(article, 'user__username') >>> assert ['test_user'] == list(field_lookup(article, ... 'editors__username')) """ if hasattr(obj, 'iterator'): return (field_lookup(x, field_path) for x in obj.iterator()) # depends on [control=['if'], data=[]] elif isinstance(obj, Iterable): return (field_lookup(x, field_path) for x in iter(obj)) # depends on [control=['if'], data=[]] # split the path field_path = field_path.split('__', 1) if len(field_path) == 1: return getattr(obj, field_path[0], None) # depends on [control=['if'], data=[]] return field_lookup(field_lookup(obj, field_path[0]), field_path[1])
def get_nonce(block_representation, coin_symbol='btc', api_key=None): ''' Takes a block_representation and returns the nonce ''' return get_block_overview(block_representation=block_representation, coin_symbol=coin_symbol, txn_limit=1, api_key=api_key)['bits']
def function[get_nonce, parameter[block_representation, coin_symbol, api_key]]: constant[ Takes a block_representation and returns the nonce ] return[call[call[name[get_block_overview], parameter[]]][constant[bits]]]
keyword[def] identifier[get_nonce] ( identifier[block_representation] , identifier[coin_symbol] = literal[string] , identifier[api_key] = keyword[None] ): literal[string] keyword[return] identifier[get_block_overview] ( identifier[block_representation] = identifier[block_representation] , identifier[coin_symbol] = identifier[coin_symbol] , identifier[txn_limit] = literal[int] , identifier[api_key] = identifier[api_key] )[ literal[string] ]
def get_nonce(block_representation, coin_symbol='btc', api_key=None): """ Takes a block_representation and returns the nonce """ return get_block_overview(block_representation=block_representation, coin_symbol=coin_symbol, txn_limit=1, api_key=api_key)['bits']
def bdecode(bstring): ''' Bdecodes a bencoded string e.g., d3:cow3:moo4:spam4:eggse -> {'cow': 'moo', 'spam': 'eggs'} ''' def get_val(): i = reader.next() if i.isdigit(): str_len = get_len(i) return get_str(str_len) if i == 'd': return get_dict() if i == 'l': return get_list() if i == 'i': return get_int() if i == 'e': return None def get_len(i=''): len_str = str(i) next_char = reader.next() if next_char == 'e': # The line that collapses the dictionary return None while next_char is not ':': len_str += next_char next_char = reader.next() else: return int(len_str) def get_dict(): this_dict = {} while 1: str_len = get_len() if str_len is None: # This dict is done return this_dict key = get_str(str_len) val = get_val() this_dict[key] = val def get_int(): int_str = '' i = reader.next() while i is not 'e': int_str += i i = reader.next() else: return int(int_str) def get_str(str_len): this_str = '' for i in range(str_len): this_str += reader.next() return this_str def get_list(): this_list = [] while 1: val = get_val() if not val: return this_list this_list.append(val) reader = _readchar(bstring) dict_repr = get_val() return dict_repr
def function[bdecode, parameter[bstring]]: constant[ Bdecodes a bencoded string e.g., d3:cow3:moo4:spam4:eggse -> {'cow': 'moo', 'spam': 'eggs'} ] def function[get_val, parameter[]]: variable[i] assign[=] call[name[reader].next, parameter[]] if call[name[i].isdigit, parameter[]] begin[:] variable[str_len] assign[=] call[name[get_len], parameter[name[i]]] return[call[name[get_str], parameter[name[str_len]]]] if compare[name[i] equal[==] constant[d]] begin[:] return[call[name[get_dict], parameter[]]] if compare[name[i] equal[==] constant[l]] begin[:] return[call[name[get_list], parameter[]]] if compare[name[i] equal[==] constant[i]] begin[:] return[call[name[get_int], parameter[]]] if compare[name[i] equal[==] constant[e]] begin[:] return[constant[None]] def function[get_len, parameter[i]]: variable[len_str] assign[=] call[name[str], parameter[name[i]]] variable[next_char] assign[=] call[name[reader].next, parameter[]] if compare[name[next_char] equal[==] constant[e]] begin[:] return[constant[None]] while compare[name[next_char] is_not constant[:]] begin[:] <ast.AugAssign object at 0x7da1b10eb7f0> variable[next_char] assign[=] call[name[reader].next, parameter[]] def function[get_dict, parameter[]]: variable[this_dict] assign[=] dictionary[[], []] while constant[1] begin[:] variable[str_len] assign[=] call[name[get_len], parameter[]] if compare[name[str_len] is constant[None]] begin[:] return[name[this_dict]] variable[key] assign[=] call[name[get_str], parameter[name[str_len]]] variable[val] assign[=] call[name[get_val], parameter[]] call[name[this_dict]][name[key]] assign[=] name[val] def function[get_int, parameter[]]: variable[int_str] assign[=] constant[] variable[i] assign[=] call[name[reader].next, parameter[]] while compare[name[i] is_not constant[e]] begin[:] <ast.AugAssign object at 0x7da1b10e8280> variable[i] assign[=] call[name[reader].next, parameter[]] def function[get_str, parameter[str_len]]: variable[this_str] assign[=] constant[] for taget[name[i]] in starred[call[name[range], parameter[name[str_len]]]] begin[:] <ast.AugAssign object at 0x7da1b10e8040> return[name[this_str]] def function[get_list, parameter[]]: variable[this_list] assign[=] list[[]] while constant[1] begin[:] variable[val] assign[=] call[name[get_val], parameter[]] if <ast.UnaryOp object at 0x7da1b118c850> begin[:] return[name[this_list]] call[name[this_list].append, parameter[name[val]]] variable[reader] assign[=] call[name[_readchar], parameter[name[bstring]]] variable[dict_repr] assign[=] call[name[get_val], parameter[]] return[name[dict_repr]]
keyword[def] identifier[bdecode] ( identifier[bstring] ): literal[string] keyword[def] identifier[get_val] (): identifier[i] = identifier[reader] . identifier[next] () keyword[if] identifier[i] . identifier[isdigit] (): identifier[str_len] = identifier[get_len] ( identifier[i] ) keyword[return] identifier[get_str] ( identifier[str_len] ) keyword[if] identifier[i] == literal[string] : keyword[return] identifier[get_dict] () keyword[if] identifier[i] == literal[string] : keyword[return] identifier[get_list] () keyword[if] identifier[i] == literal[string] : keyword[return] identifier[get_int] () keyword[if] identifier[i] == literal[string] : keyword[return] keyword[None] keyword[def] identifier[get_len] ( identifier[i] = literal[string] ): identifier[len_str] = identifier[str] ( identifier[i] ) identifier[next_char] = identifier[reader] . identifier[next] () keyword[if] identifier[next_char] == literal[string] : keyword[return] keyword[None] keyword[while] identifier[next_char] keyword[is] keyword[not] literal[string] : identifier[len_str] += identifier[next_char] identifier[next_char] = identifier[reader] . identifier[next] () keyword[else] : keyword[return] identifier[int] ( identifier[len_str] ) keyword[def] identifier[get_dict] (): identifier[this_dict] ={} keyword[while] literal[int] : identifier[str_len] = identifier[get_len] () keyword[if] identifier[str_len] keyword[is] keyword[None] : keyword[return] identifier[this_dict] identifier[key] = identifier[get_str] ( identifier[str_len] ) identifier[val] = identifier[get_val] () identifier[this_dict] [ identifier[key] ]= identifier[val] keyword[def] identifier[get_int] (): identifier[int_str] = literal[string] identifier[i] = identifier[reader] . identifier[next] () keyword[while] identifier[i] keyword[is] keyword[not] literal[string] : identifier[int_str] += identifier[i] identifier[i] = identifier[reader] . identifier[next] () keyword[else] : keyword[return] identifier[int] ( identifier[int_str] ) keyword[def] identifier[get_str] ( identifier[str_len] ): identifier[this_str] = literal[string] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[str_len] ): identifier[this_str] += identifier[reader] . identifier[next] () keyword[return] identifier[this_str] keyword[def] identifier[get_list] (): identifier[this_list] =[] keyword[while] literal[int] : identifier[val] = identifier[get_val] () keyword[if] keyword[not] identifier[val] : keyword[return] identifier[this_list] identifier[this_list] . identifier[append] ( identifier[val] ) identifier[reader] = identifier[_readchar] ( identifier[bstring] ) identifier[dict_repr] = identifier[get_val] () keyword[return] identifier[dict_repr]
def bdecode(bstring): """ Bdecodes a bencoded string e.g., d3:cow3:moo4:spam4:eggse -> {'cow': 'moo', 'spam': 'eggs'} """ def get_val(): i = reader.next() if i.isdigit(): str_len = get_len(i) return get_str(str_len) # depends on [control=['if'], data=[]] if i == 'd': return get_dict() # depends on [control=['if'], data=[]] if i == 'l': return get_list() # depends on [control=['if'], data=[]] if i == 'i': return get_int() # depends on [control=['if'], data=[]] if i == 'e': return None # depends on [control=['if'], data=[]] def get_len(i=''): len_str = str(i) next_char = reader.next() if next_char == 'e': # The line that collapses the dictionary return None # depends on [control=['if'], data=[]] while next_char is not ':': len_str += next_char next_char = reader.next() # depends on [control=['while'], data=['next_char']] else: return int(len_str) def get_dict(): this_dict = {} while 1: str_len = get_len() if str_len is None: # This dict is done return this_dict # depends on [control=['if'], data=[]] key = get_str(str_len) val = get_val() this_dict[key] = val # depends on [control=['while'], data=[]] def get_int(): int_str = '' i = reader.next() while i is not 'e': int_str += i i = reader.next() # depends on [control=['while'], data=['i']] else: return int(int_str) def get_str(str_len): this_str = '' for i in range(str_len): this_str += reader.next() # depends on [control=['for'], data=[]] return this_str def get_list(): this_list = [] while 1: val = get_val() if not val: return this_list # depends on [control=['if'], data=[]] this_list.append(val) # depends on [control=['while'], data=[]] reader = _readchar(bstring) dict_repr = get_val() return dict_repr
def _read(self): """Read the kube config file. """ stream = self.path.read_text() data = yaml.load(stream) return data
def function[_read, parameter[self]]: constant[Read the kube config file. ] variable[stream] assign[=] call[name[self].path.read_text, parameter[]] variable[data] assign[=] call[name[yaml].load, parameter[name[stream]]] return[name[data]]
keyword[def] identifier[_read] ( identifier[self] ): literal[string] identifier[stream] = identifier[self] . identifier[path] . identifier[read_text] () identifier[data] = identifier[yaml] . identifier[load] ( identifier[stream] ) keyword[return] identifier[data]
def _read(self): """Read the kube config file. """ stream = self.path.read_text() data = yaml.load(stream) return data
def _notebook_dir_changed(self, name, old, new): """do a bit of validation of the notebook dir""" if os.path.exists(new) and not os.path.isdir(new): raise TraitError("notebook dir %r is not a directory" % new) if not os.path.exists(new): self.log.info("Creating notebook dir %s", new) try: os.mkdir(new) except: raise TraitError("Couldn't create notebook dir %r" % new)
def function[_notebook_dir_changed, parameter[self, name, old, new]]: constant[do a bit of validation of the notebook dir] if <ast.BoolOp object at 0x7da20c6ab5e0> begin[:] <ast.Raise object at 0x7da20c6a89d0> if <ast.UnaryOp object at 0x7da20c6a9660> begin[:] call[name[self].log.info, parameter[constant[Creating notebook dir %s], name[new]]] <ast.Try object at 0x7da2045664a0>
keyword[def] identifier[_notebook_dir_changed] ( identifier[self] , identifier[name] , identifier[old] , identifier[new] ): literal[string] keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[new] ) keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[new] ): keyword[raise] identifier[TraitError] ( literal[string] % identifier[new] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[new] ): identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[new] ) keyword[try] : identifier[os] . identifier[mkdir] ( identifier[new] ) keyword[except] : keyword[raise] identifier[TraitError] ( literal[string] % identifier[new] )
def _notebook_dir_changed(self, name, old, new): """do a bit of validation of the notebook dir""" if os.path.exists(new) and (not os.path.isdir(new)): raise TraitError('notebook dir %r is not a directory' % new) # depends on [control=['if'], data=[]] if not os.path.exists(new): self.log.info('Creating notebook dir %s', new) try: os.mkdir(new) # depends on [control=['try'], data=[]] except: raise TraitError("Couldn't create notebook dir %r" % new) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
def iter(self, reset=False, reverse=False): """ Yield processed files one at a time, in natural order. """ files = os.listdir(self.source) files.sort(reverse=reverse) for filename in files: try: yield self.get(filename, reset) except PostNotFound: continue
def function[iter, parameter[self, reset, reverse]]: constant[ Yield processed files one at a time, in natural order. ] variable[files] assign[=] call[name[os].listdir, parameter[name[self].source]] call[name[files].sort, parameter[]] for taget[name[filename]] in starred[name[files]] begin[:] <ast.Try object at 0x7da18f723fa0>
keyword[def] identifier[iter] ( identifier[self] , identifier[reset] = keyword[False] , identifier[reverse] = keyword[False] ): literal[string] identifier[files] = identifier[os] . identifier[listdir] ( identifier[self] . identifier[source] ) identifier[files] . identifier[sort] ( identifier[reverse] = identifier[reverse] ) keyword[for] identifier[filename] keyword[in] identifier[files] : keyword[try] : keyword[yield] identifier[self] . identifier[get] ( identifier[filename] , identifier[reset] ) keyword[except] identifier[PostNotFound] : keyword[continue]
def iter(self, reset=False, reverse=False): """ Yield processed files one at a time, in natural order. """ files = os.listdir(self.source) files.sort(reverse=reverse) for filename in files: try: yield self.get(filename, reset) # depends on [control=['try'], data=[]] except PostNotFound: continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['filename']]
def _close(self): """ Close the file if it was created by the writer. If a buffer or file-like object was passed in, for example a GzipFile, then leave this file open for the caller to close. In either case, attempt to flush the file contents to ensure they are written to disk (if supported) """ # Some file-like objects might not support flush try: self._file.flush() except AttributeError: pass if self._own_file: self._file.close()
def function[_close, parameter[self]]: constant[ Close the file if it was created by the writer. If a buffer or file-like object was passed in, for example a GzipFile, then leave this file open for the caller to close. In either case, attempt to flush the file contents to ensure they are written to disk (if supported) ] <ast.Try object at 0x7da20e9b2f20> if name[self]._own_file begin[:] call[name[self]._file.close, parameter[]]
keyword[def] identifier[_close] ( identifier[self] ): literal[string] keyword[try] : identifier[self] . identifier[_file] . identifier[flush] () keyword[except] identifier[AttributeError] : keyword[pass] keyword[if] identifier[self] . identifier[_own_file] : identifier[self] . identifier[_file] . identifier[close] ()
def _close(self): """ Close the file if it was created by the writer. If a buffer or file-like object was passed in, for example a GzipFile, then leave this file open for the caller to close. In either case, attempt to flush the file contents to ensure they are written to disk (if supported) """ # Some file-like objects might not support flush try: self._file.flush() # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] if self._own_file: self._file.close() # depends on [control=['if'], data=[]]
def get_transaction_details(tx_hash, coin_symbol='btc', limit=None, tx_input_offset=None, tx_output_offset=None, include_hex=False, show_confidence=False, confidence_only=False, api_key=None): """ Takes a tx_hash, coin_symbol, and limit and returns the transaction details Optional: - limit: # inputs/ouputs to include (applies to both) - tx_input_offset: input offset - tx_output_offset: output offset - include_hex: include the raw TX hex - show_confidence: adds confidence information to unconfirmed TXRefs. - confidence_only: show only the confidence statistics and don't return the rest of the endpoint details (faster) """ assert is_valid_hash(tx_hash), tx_hash assert is_valid_coin_symbol(coin_symbol), coin_symbol added = 'txs/{}{}'.format(tx_hash, '/confidence' if confidence_only else '') url = make_url(coin_symbol, added) params = {} if api_key: params['token'] = api_key if limit: params['limit'] = limit if tx_input_offset: params['inStart'] = tx_input_offset if tx_output_offset: params['outStart'] = tx_output_offset if include_hex: params['includeHex'] = 'true' if show_confidence and not confidence_only: params['includeConfidence'] = 'true' r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) response_dict = get_valid_json(r) if 'error' not in response_dict and not confidence_only: if response_dict['block_height'] > 0: response_dict['confirmed'] = parser.parse(response_dict['confirmed']) else: response_dict['block_height'] = None # Blockcypher reports fake times if it's not in a block response_dict['confirmed'] = None # format this string as a datetime object response_dict['received'] = parser.parse(response_dict['received']) return response_dict
def function[get_transaction_details, parameter[tx_hash, coin_symbol, limit, tx_input_offset, tx_output_offset, include_hex, show_confidence, confidence_only, api_key]]: constant[ Takes a tx_hash, coin_symbol, and limit and returns the transaction details Optional: - limit: # inputs/ouputs to include (applies to both) - tx_input_offset: input offset - tx_output_offset: output offset - include_hex: include the raw TX hex - show_confidence: adds confidence information to unconfirmed TXRefs. - confidence_only: show only the confidence statistics and don't return the rest of the endpoint details (faster) ] assert[call[name[is_valid_hash], parameter[name[tx_hash]]]] assert[call[name[is_valid_coin_symbol], parameter[name[coin_symbol]]]] variable[added] assign[=] call[constant[txs/{}{}].format, parameter[name[tx_hash], <ast.IfExp object at 0x7da18bccb9d0>]] variable[url] assign[=] call[name[make_url], parameter[name[coin_symbol], name[added]]] variable[params] assign[=] dictionary[[], []] if name[api_key] begin[:] call[name[params]][constant[token]] assign[=] name[api_key] if name[limit] begin[:] call[name[params]][constant[limit]] assign[=] name[limit] if name[tx_input_offset] begin[:] call[name[params]][constant[inStart]] assign[=] name[tx_input_offset] if name[tx_output_offset] begin[:] call[name[params]][constant[outStart]] assign[=] name[tx_output_offset] if name[include_hex] begin[:] call[name[params]][constant[includeHex]] assign[=] constant[true] if <ast.BoolOp object at 0x7da18bccbac0> begin[:] call[name[params]][constant[includeConfidence]] assign[=] constant[true] variable[r] assign[=] call[name[requests].get, parameter[name[url]]] variable[response_dict] assign[=] call[name[get_valid_json], parameter[name[r]]] if <ast.BoolOp object at 0x7da18bcc9f30> begin[:] if compare[call[name[response_dict]][constant[block_height]] greater[>] constant[0]] begin[:] call[name[response_dict]][constant[confirmed]] assign[=] call[name[parser].parse, parameter[call[name[response_dict]][constant[confirmed]]]] call[name[response_dict]][constant[received]] assign[=] call[name[parser].parse, parameter[call[name[response_dict]][constant[received]]]] return[name[response_dict]]
keyword[def] identifier[get_transaction_details] ( identifier[tx_hash] , identifier[coin_symbol] = literal[string] , identifier[limit] = keyword[None] , identifier[tx_input_offset] = keyword[None] , identifier[tx_output_offset] = keyword[None] , identifier[include_hex] = keyword[False] , identifier[show_confidence] = keyword[False] , identifier[confidence_only] = keyword[False] , identifier[api_key] = keyword[None] ): literal[string] keyword[assert] identifier[is_valid_hash] ( identifier[tx_hash] ), identifier[tx_hash] keyword[assert] identifier[is_valid_coin_symbol] ( identifier[coin_symbol] ), identifier[coin_symbol] identifier[added] = literal[string] . identifier[format] ( identifier[tx_hash] , literal[string] keyword[if] identifier[confidence_only] keyword[else] literal[string] ) identifier[url] = identifier[make_url] ( identifier[coin_symbol] , identifier[added] ) identifier[params] ={} keyword[if] identifier[api_key] : identifier[params] [ literal[string] ]= identifier[api_key] keyword[if] identifier[limit] : identifier[params] [ literal[string] ]= identifier[limit] keyword[if] identifier[tx_input_offset] : identifier[params] [ literal[string] ]= identifier[tx_input_offset] keyword[if] identifier[tx_output_offset] : identifier[params] [ literal[string] ]= identifier[tx_output_offset] keyword[if] identifier[include_hex] : identifier[params] [ literal[string] ]= literal[string] keyword[if] identifier[show_confidence] keyword[and] keyword[not] identifier[confidence_only] : identifier[params] [ literal[string] ]= literal[string] identifier[r] = identifier[requests] . identifier[get] ( identifier[url] , identifier[params] = identifier[params] , identifier[verify] = keyword[True] , identifier[timeout] = identifier[TIMEOUT_IN_SECONDS] ) identifier[response_dict] = identifier[get_valid_json] ( identifier[r] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[response_dict] keyword[and] keyword[not] identifier[confidence_only] : keyword[if] identifier[response_dict] [ literal[string] ]> literal[int] : identifier[response_dict] [ literal[string] ]= identifier[parser] . identifier[parse] ( identifier[response_dict] [ literal[string] ]) keyword[else] : identifier[response_dict] [ literal[string] ]= keyword[None] identifier[response_dict] [ literal[string] ]= keyword[None] identifier[response_dict] [ literal[string] ]= identifier[parser] . identifier[parse] ( identifier[response_dict] [ literal[string] ]) keyword[return] identifier[response_dict]
def get_transaction_details(tx_hash, coin_symbol='btc', limit=None, tx_input_offset=None, tx_output_offset=None, include_hex=False, show_confidence=False, confidence_only=False, api_key=None): """ Takes a tx_hash, coin_symbol, and limit and returns the transaction details Optional: - limit: # inputs/ouputs to include (applies to both) - tx_input_offset: input offset - tx_output_offset: output offset - include_hex: include the raw TX hex - show_confidence: adds confidence information to unconfirmed TXRefs. - confidence_only: show only the confidence statistics and don't return the rest of the endpoint details (faster) """ assert is_valid_hash(tx_hash), tx_hash assert is_valid_coin_symbol(coin_symbol), coin_symbol added = 'txs/{}{}'.format(tx_hash, '/confidence' if confidence_only else '') url = make_url(coin_symbol, added) params = {} if api_key: params['token'] = api_key # depends on [control=['if'], data=[]] if limit: params['limit'] = limit # depends on [control=['if'], data=[]] if tx_input_offset: params['inStart'] = tx_input_offset # depends on [control=['if'], data=[]] if tx_output_offset: params['outStart'] = tx_output_offset # depends on [control=['if'], data=[]] if include_hex: params['includeHex'] = 'true' # depends on [control=['if'], data=[]] if show_confidence and (not confidence_only): params['includeConfidence'] = 'true' # depends on [control=['if'], data=[]] r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) response_dict = get_valid_json(r) if 'error' not in response_dict and (not confidence_only): if response_dict['block_height'] > 0: response_dict['confirmed'] = parser.parse(response_dict['confirmed']) # depends on [control=['if'], data=[]] else: response_dict['block_height'] = None # Blockcypher reports fake times if it's not in a block response_dict['confirmed'] = None # format this string as a datetime object response_dict['received'] = parser.parse(response_dict['received']) # depends on [control=['if'], data=[]] return response_dict
def get_valid_user_by_email(email): """ Return user instance """ user = get_user(email) if user: if user.valid is False: return Err("user not valid") return Ok(user) return Err("user not exists")
def function[get_valid_user_by_email, parameter[email]]: constant[ Return user instance ] variable[user] assign[=] call[name[get_user], parameter[name[email]]] if name[user] begin[:] if compare[name[user].valid is constant[False]] begin[:] return[call[name[Err], parameter[constant[user not valid]]]] return[call[name[Ok], parameter[name[user]]]] return[call[name[Err], parameter[constant[user not exists]]]]
keyword[def] identifier[get_valid_user_by_email] ( identifier[email] ): literal[string] identifier[user] = identifier[get_user] ( identifier[email] ) keyword[if] identifier[user] : keyword[if] identifier[user] . identifier[valid] keyword[is] keyword[False] : keyword[return] identifier[Err] ( literal[string] ) keyword[return] identifier[Ok] ( identifier[user] ) keyword[return] identifier[Err] ( literal[string] )
def get_valid_user_by_email(email): """ Return user instance """ user = get_user(email) if user: if user.valid is False: return Err('user not valid') # depends on [control=['if'], data=[]] return Ok(user) # depends on [control=['if'], data=[]] return Err('user not exists')
async def set_heating_level(self, level, duration=0): """Update heating data json.""" url = '{}/devices/{}'.format(API_URL, self.device.deviceid) # Catch bad inputs level = 10 if level < 10 else level level = 100 if level > 100 else level if self.side == 'left': data = { 'leftHeatingDuration': duration, 'leftTargetHeatingLevel': level } elif self.side == 'right': data = { 'rightHeatingDuration': duration, 'rightTargetHeatingLevel': level } set_heat = await self.device.api_put(url, data) if set_heat is None: _LOGGER.error('Unable to set eight heating level.') else: # Standard device json is returned after setting self.device.handle_device_json(set_heat['device'])
<ast.AsyncFunctionDef object at 0x7da204567bb0>
keyword[async] keyword[def] identifier[set_heating_level] ( identifier[self] , identifier[level] , identifier[duration] = literal[int] ): literal[string] identifier[url] = literal[string] . identifier[format] ( identifier[API_URL] , identifier[self] . identifier[device] . identifier[deviceid] ) identifier[level] = literal[int] keyword[if] identifier[level] < literal[int] keyword[else] identifier[level] identifier[level] = literal[int] keyword[if] identifier[level] > literal[int] keyword[else] identifier[level] keyword[if] identifier[self] . identifier[side] == literal[string] : identifier[data] ={ literal[string] : identifier[duration] , literal[string] : identifier[level] } keyword[elif] identifier[self] . identifier[side] == literal[string] : identifier[data] ={ literal[string] : identifier[duration] , literal[string] : identifier[level] } identifier[set_heat] = keyword[await] identifier[self] . identifier[device] . identifier[api_put] ( identifier[url] , identifier[data] ) keyword[if] identifier[set_heat] keyword[is] keyword[None] : identifier[_LOGGER] . identifier[error] ( literal[string] ) keyword[else] : identifier[self] . identifier[device] . identifier[handle_device_json] ( identifier[set_heat] [ literal[string] ])
async def set_heating_level(self, level, duration=0): """Update heating data json.""" url = '{}/devices/{}'.format(API_URL, self.device.deviceid) # Catch bad inputs level = 10 if level < 10 else level level = 100 if level > 100 else level if self.side == 'left': data = {'leftHeatingDuration': duration, 'leftTargetHeatingLevel': level} # depends on [control=['if'], data=[]] elif self.side == 'right': data = {'rightHeatingDuration': duration, 'rightTargetHeatingLevel': level} # depends on [control=['if'], data=[]] set_heat = await self.device.api_put(url, data) if set_heat is None: _LOGGER.error('Unable to set eight heating level.') # depends on [control=['if'], data=[]] else: # Standard device json is returned after setting self.device.handle_device_json(set_heat['device'])
def ilumin(method, target, et, fixref, abcorr, obsrvr, spoint): """ Find the illumination angles (phase, solar incidence, and emission) at a specified surface point of a target body. This routine supersedes illum. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ilumin_c.html :param method: Computation method. :type method: str :param target: Name of target body. :type target: str :param et: Epoch in ephemeris seconds past J2000. :type et: float :param fixref: Body-fixed, body-centered target body frame. :type fixref: str :param abcorr: Desired aberration correction. :type abcorr: str :param obsrvr: Name of observing body. :type obsrvr: str :param spoint: Body-fixed coordinates of a target surface point. :type spoint: 3-Element Array of floats :return: Target surface point epoch, Vector from observer to target surface point, Phase angle, Solar incidence angle, and Emission angle at the surface point. :rtype: tuple """ method = stypes.stringToCharP(method) target = stypes.stringToCharP(target) et = ctypes.c_double(et) fixref = stypes.stringToCharP(fixref) abcorr = stypes.stringToCharP(abcorr) obsrvr = stypes.stringToCharP(obsrvr) spoint = stypes.toDoubleVector(spoint) trgepc = ctypes.c_double(0) srfvec = stypes.emptyDoubleVector(3) phase = ctypes.c_double(0) solar = ctypes.c_double(0) emissn = ctypes.c_double(0) libspice.ilumin_c(method, target, et, fixref, abcorr, obsrvr, spoint, ctypes.byref(trgepc), srfvec, ctypes.byref(phase), ctypes.byref(solar), ctypes.byref(emissn)) return trgepc.value, stypes.cVectorToPython( srfvec), phase.value, solar.value, emissn.value
def function[ilumin, parameter[method, target, et, fixref, abcorr, obsrvr, spoint]]: constant[ Find the illumination angles (phase, solar incidence, and emission) at a specified surface point of a target body. This routine supersedes illum. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ilumin_c.html :param method: Computation method. :type method: str :param target: Name of target body. :type target: str :param et: Epoch in ephemeris seconds past J2000. :type et: float :param fixref: Body-fixed, body-centered target body frame. :type fixref: str :param abcorr: Desired aberration correction. :type abcorr: str :param obsrvr: Name of observing body. :type obsrvr: str :param spoint: Body-fixed coordinates of a target surface point. :type spoint: 3-Element Array of floats :return: Target surface point epoch, Vector from observer to target surface point, Phase angle, Solar incidence angle, and Emission angle at the surface point. :rtype: tuple ] variable[method] assign[=] call[name[stypes].stringToCharP, parameter[name[method]]] variable[target] assign[=] call[name[stypes].stringToCharP, parameter[name[target]]] variable[et] assign[=] call[name[ctypes].c_double, parameter[name[et]]] variable[fixref] assign[=] call[name[stypes].stringToCharP, parameter[name[fixref]]] variable[abcorr] assign[=] call[name[stypes].stringToCharP, parameter[name[abcorr]]] variable[obsrvr] assign[=] call[name[stypes].stringToCharP, parameter[name[obsrvr]]] variable[spoint] assign[=] call[name[stypes].toDoubleVector, parameter[name[spoint]]] variable[trgepc] assign[=] call[name[ctypes].c_double, parameter[constant[0]]] variable[srfvec] assign[=] call[name[stypes].emptyDoubleVector, parameter[constant[3]]] variable[phase] assign[=] call[name[ctypes].c_double, parameter[constant[0]]] variable[solar] assign[=] call[name[ctypes].c_double, parameter[constant[0]]] variable[emissn] assign[=] call[name[ctypes].c_double, parameter[constant[0]]] call[name[libspice].ilumin_c, parameter[name[method], name[target], name[et], name[fixref], name[abcorr], name[obsrvr], name[spoint], call[name[ctypes].byref, parameter[name[trgepc]]], name[srfvec], call[name[ctypes].byref, parameter[name[phase]]], call[name[ctypes].byref, parameter[name[solar]]], call[name[ctypes].byref, parameter[name[emissn]]]]] return[tuple[[<ast.Attribute object at 0x7da18f09f790>, <ast.Call object at 0x7da18f09f100>, <ast.Attribute object at 0x7da18f09fe50>, <ast.Attribute object at 0x7da18f09f580>, <ast.Attribute object at 0x7da18f09f6a0>]]]
keyword[def] identifier[ilumin] ( identifier[method] , identifier[target] , identifier[et] , identifier[fixref] , identifier[abcorr] , identifier[obsrvr] , identifier[spoint] ): literal[string] identifier[method] = identifier[stypes] . identifier[stringToCharP] ( identifier[method] ) identifier[target] = identifier[stypes] . identifier[stringToCharP] ( identifier[target] ) identifier[et] = identifier[ctypes] . identifier[c_double] ( identifier[et] ) identifier[fixref] = identifier[stypes] . identifier[stringToCharP] ( identifier[fixref] ) identifier[abcorr] = identifier[stypes] . identifier[stringToCharP] ( identifier[abcorr] ) identifier[obsrvr] = identifier[stypes] . identifier[stringToCharP] ( identifier[obsrvr] ) identifier[spoint] = identifier[stypes] . identifier[toDoubleVector] ( identifier[spoint] ) identifier[trgepc] = identifier[ctypes] . identifier[c_double] ( literal[int] ) identifier[srfvec] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] ) identifier[phase] = identifier[ctypes] . identifier[c_double] ( literal[int] ) identifier[solar] = identifier[ctypes] . identifier[c_double] ( literal[int] ) identifier[emissn] = identifier[ctypes] . identifier[c_double] ( literal[int] ) identifier[libspice] . identifier[ilumin_c] ( identifier[method] , identifier[target] , identifier[et] , identifier[fixref] , identifier[abcorr] , identifier[obsrvr] , identifier[spoint] , identifier[ctypes] . identifier[byref] ( identifier[trgepc] ), identifier[srfvec] , identifier[ctypes] . identifier[byref] ( identifier[phase] ), identifier[ctypes] . identifier[byref] ( identifier[solar] ), identifier[ctypes] . identifier[byref] ( identifier[emissn] )) keyword[return] identifier[trgepc] . identifier[value] , identifier[stypes] . identifier[cVectorToPython] ( identifier[srfvec] ), identifier[phase] . identifier[value] , identifier[solar] . identifier[value] , identifier[emissn] . identifier[value]
def ilumin(method, target, et, fixref, abcorr, obsrvr, spoint): """ Find the illumination angles (phase, solar incidence, and emission) at a specified surface point of a target body. This routine supersedes illum. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ilumin_c.html :param method: Computation method. :type method: str :param target: Name of target body. :type target: str :param et: Epoch in ephemeris seconds past J2000. :type et: float :param fixref: Body-fixed, body-centered target body frame. :type fixref: str :param abcorr: Desired aberration correction. :type abcorr: str :param obsrvr: Name of observing body. :type obsrvr: str :param spoint: Body-fixed coordinates of a target surface point. :type spoint: 3-Element Array of floats :return: Target surface point epoch, Vector from observer to target surface point, Phase angle, Solar incidence angle, and Emission angle at the surface point. :rtype: tuple """ method = stypes.stringToCharP(method) target = stypes.stringToCharP(target) et = ctypes.c_double(et) fixref = stypes.stringToCharP(fixref) abcorr = stypes.stringToCharP(abcorr) obsrvr = stypes.stringToCharP(obsrvr) spoint = stypes.toDoubleVector(spoint) trgepc = ctypes.c_double(0) srfvec = stypes.emptyDoubleVector(3) phase = ctypes.c_double(0) solar = ctypes.c_double(0) emissn = ctypes.c_double(0) libspice.ilumin_c(method, target, et, fixref, abcorr, obsrvr, spoint, ctypes.byref(trgepc), srfvec, ctypes.byref(phase), ctypes.byref(solar), ctypes.byref(emissn)) return (trgepc.value, stypes.cVectorToPython(srfvec), phase.value, solar.value, emissn.value)
def _ready(self): """ check if ther is som data is ready to get read. :return True if there is some date :rtype bool """ # if DOUT pin is low, data is ready for reading _is_ready = GPIO.input(self._dout) == 0 logging.debug("check data ready for reading: {result}".format( result="YES" if _is_ready is True else "NO" )) return _is_ready
def function[_ready, parameter[self]]: constant[ check if ther is som data is ready to get read. :return True if there is some date :rtype bool ] variable[_is_ready] assign[=] compare[call[name[GPIO].input, parameter[name[self]._dout]] equal[==] constant[0]] call[name[logging].debug, parameter[call[constant[check data ready for reading: {result}].format, parameter[]]]] return[name[_is_ready]]
keyword[def] identifier[_ready] ( identifier[self] ): literal[string] identifier[_is_ready] = identifier[GPIO] . identifier[input] ( identifier[self] . identifier[_dout] )== literal[int] identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[result] = literal[string] keyword[if] identifier[_is_ready] keyword[is] keyword[True] keyword[else] literal[string] )) keyword[return] identifier[_is_ready]
def _ready(self): """ check if ther is som data is ready to get read. :return True if there is some date :rtype bool """ # if DOUT pin is low, data is ready for reading _is_ready = GPIO.input(self._dout) == 0 logging.debug('check data ready for reading: {result}'.format(result='YES' if _is_ready is True else 'NO')) return _is_ready
def prettify(xml_string): ''' Indent an xml string with four spaces, and add an additional line break after each node. ''' reparsed = xml.dom.minidom.parseString(xml_string) return reparsed.toprettyxml(indent=" ")
def function[prettify, parameter[xml_string]]: constant[ Indent an xml string with four spaces, and add an additional line break after each node. ] variable[reparsed] assign[=] call[name[xml].dom.minidom.parseString, parameter[name[xml_string]]] return[call[name[reparsed].toprettyxml, parameter[]]]
keyword[def] identifier[prettify] ( identifier[xml_string] ): literal[string] identifier[reparsed] = identifier[xml] . identifier[dom] . identifier[minidom] . identifier[parseString] ( identifier[xml_string] ) keyword[return] identifier[reparsed] . identifier[toprettyxml] ( identifier[indent] = literal[string] )
def prettify(xml_string): """ Indent an xml string with four spaces, and add an additional line break after each node. """ reparsed = xml.dom.minidom.parseString(xml_string) return reparsed.toprettyxml(indent=' ')
def modify(self, **kwargs): """Modify a contact. Returns status message Optional Parameters: * name -- Contact name Type: String * email -- Contact email address Type: String * cellphone -- Cellphone number, without the country code part. In some countries you are supposed to exclude leading zeroes. (Requires countrycode and countryiso) Type: String * countrycode -- Cellphone country code (Requires cellphone and countryiso) Type: String * countryiso -- Cellphone country ISO code. For example: US (USA), GB (Britain) or SE (Sweden) (Requires cellphone and countrycode) Type: String * defaultsmsprovider -- Default SMS provider Type: String ['clickatell', 'bulksms', 'esendex', 'cellsynt'] * directtwitter -- Send tweets as direct messages Type: Boolean Default: True * twitteruser -- Twitter user Type: String """ # Warn user about unhandled parameters for key in kwargs: if key not in ['email', 'cellphone', 'countrycode', 'countryiso', 'defaultsmsprovider', 'directtwitter', 'twitteruser', 'name']: sys.stderr.write("'%s'" % key + ' is not a valid argument ' + 'of <PingdomContact>.modify()\n') response = self.pingdom.request('PUT', 'notification_contacts/%s' % self.id, kwargs) return response.json()['message']
def function[modify, parameter[self]]: constant[Modify a contact. Returns status message Optional Parameters: * name -- Contact name Type: String * email -- Contact email address Type: String * cellphone -- Cellphone number, without the country code part. In some countries you are supposed to exclude leading zeroes. (Requires countrycode and countryiso) Type: String * countrycode -- Cellphone country code (Requires cellphone and countryiso) Type: String * countryiso -- Cellphone country ISO code. For example: US (USA), GB (Britain) or SE (Sweden) (Requires cellphone and countrycode) Type: String * defaultsmsprovider -- Default SMS provider Type: String ['clickatell', 'bulksms', 'esendex', 'cellsynt'] * directtwitter -- Send tweets as direct messages Type: Boolean Default: True * twitteruser -- Twitter user Type: String ] for taget[name[key]] in starred[name[kwargs]] begin[:] if compare[name[key] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b0fe6cb0>, <ast.Constant object at 0x7da1b0fe40a0>, <ast.Constant object at 0x7da1b0fe6a10>, <ast.Constant object at 0x7da1b0fe52d0>, <ast.Constant object at 0x7da1b0fe60e0>, <ast.Constant object at 0x7da1b0fe7070>, <ast.Constant object at 0x7da1b0fe5ba0>, <ast.Constant object at 0x7da1b0fe72e0>]]] begin[:] call[name[sys].stderr.write, parameter[binary_operation[binary_operation[binary_operation[constant['%s'] <ast.Mod object at 0x7da2590d6920> name[key]] + constant[ is not a valid argument ]] + constant[of <PingdomContact>.modify() ]]]] variable[response] assign[=] call[name[self].pingdom.request, parameter[constant[PUT], binary_operation[constant[notification_contacts/%s] <ast.Mod object at 0x7da2590d6920> name[self].id], name[kwargs]]] return[call[call[name[response].json, parameter[]]][constant[message]]]
keyword[def] identifier[modify] ( identifier[self] ,** identifier[kwargs] ): literal[string] keyword[for] identifier[key] keyword[in] identifier[kwargs] : keyword[if] identifier[key] keyword[not] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]: identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] % identifier[key] + literal[string] + literal[string] ) identifier[response] = identifier[self] . identifier[pingdom] . identifier[request] ( literal[string] , literal[string] % identifier[self] . identifier[id] , identifier[kwargs] ) keyword[return] identifier[response] . identifier[json] ()[ literal[string] ]
def modify(self, **kwargs): """Modify a contact. Returns status message Optional Parameters: * name -- Contact name Type: String * email -- Contact email address Type: String * cellphone -- Cellphone number, without the country code part. In some countries you are supposed to exclude leading zeroes. (Requires countrycode and countryiso) Type: String * countrycode -- Cellphone country code (Requires cellphone and countryiso) Type: String * countryiso -- Cellphone country ISO code. For example: US (USA), GB (Britain) or SE (Sweden) (Requires cellphone and countrycode) Type: String * defaultsmsprovider -- Default SMS provider Type: String ['clickatell', 'bulksms', 'esendex', 'cellsynt'] * directtwitter -- Send tweets as direct messages Type: Boolean Default: True * twitteruser -- Twitter user Type: String """ # Warn user about unhandled parameters for key in kwargs: if key not in ['email', 'cellphone', 'countrycode', 'countryiso', 'defaultsmsprovider', 'directtwitter', 'twitteruser', 'name']: sys.stderr.write("'%s'" % key + ' is not a valid argument ' + 'of <PingdomContact>.modify()\n') # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']] response = self.pingdom.request('PUT', 'notification_contacts/%s' % self.id, kwargs) return response.json()['message']
def iglob(pathname): """Return an iterator which yields the paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la fnmatch. """ if not has_magic(pathname): yield pathname return dirname, basename = os.path.split(pathname) if not dirname: for name in glob1(dirname, basename): yield name return if has_magic(dirname): dirs = iglob(dirname) else: dirs = [dirname] if has_magic(basename): glob_in_dir = glob1 else: glob_in_dir = glob0 for dirname in dirs: for name in glob_in_dir(dirname, basename): yield os.path.join(dirname, name)
def function[iglob, parameter[pathname]]: constant[Return an iterator which yields the paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la fnmatch. ] if <ast.UnaryOp object at 0x7da1b08459f0> begin[:] <ast.Yield object at 0x7da1b0847520> return[None] <ast.Tuple object at 0x7da1b0846a40> assign[=] call[name[os].path.split, parameter[name[pathname]]] if <ast.UnaryOp object at 0x7da1b080bcd0> begin[:] for taget[name[name]] in starred[call[name[glob1], parameter[name[dirname], name[basename]]]] begin[:] <ast.Yield object at 0x7da1b080b6d0> return[None] if call[name[has_magic], parameter[name[dirname]]] begin[:] variable[dirs] assign[=] call[name[iglob], parameter[name[dirname]]] if call[name[has_magic], parameter[name[basename]]] begin[:] variable[glob_in_dir] assign[=] name[glob1] for taget[name[dirname]] in starred[name[dirs]] begin[:] for taget[name[name]] in starred[call[name[glob_in_dir], parameter[name[dirname], name[basename]]]] begin[:] <ast.Yield object at 0x7da1b07aa6b0>
keyword[def] identifier[iglob] ( identifier[pathname] ): literal[string] keyword[if] keyword[not] identifier[has_magic] ( identifier[pathname] ): keyword[yield] identifier[pathname] keyword[return] identifier[dirname] , identifier[basename] = identifier[os] . identifier[path] . identifier[split] ( identifier[pathname] ) keyword[if] keyword[not] identifier[dirname] : keyword[for] identifier[name] keyword[in] identifier[glob1] ( identifier[dirname] , identifier[basename] ): keyword[yield] identifier[name] keyword[return] keyword[if] identifier[has_magic] ( identifier[dirname] ): identifier[dirs] = identifier[iglob] ( identifier[dirname] ) keyword[else] : identifier[dirs] =[ identifier[dirname] ] keyword[if] identifier[has_magic] ( identifier[basename] ): identifier[glob_in_dir] = identifier[glob1] keyword[else] : identifier[glob_in_dir] = identifier[glob0] keyword[for] identifier[dirname] keyword[in] identifier[dirs] : keyword[for] identifier[name] keyword[in] identifier[glob_in_dir] ( identifier[dirname] , identifier[basename] ): keyword[yield] identifier[os] . identifier[path] . identifier[join] ( identifier[dirname] , identifier[name] )
def iglob(pathname): """Return an iterator which yields the paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la fnmatch. """ if not has_magic(pathname): yield pathname return # depends on [control=['if'], data=[]] (dirname, basename) = os.path.split(pathname) if not dirname: for name in glob1(dirname, basename): yield name # depends on [control=['for'], data=['name']] return # depends on [control=['if'], data=[]] if has_magic(dirname): dirs = iglob(dirname) # depends on [control=['if'], data=[]] else: dirs = [dirname] if has_magic(basename): glob_in_dir = glob1 # depends on [control=['if'], data=[]] else: glob_in_dir = glob0 for dirname in dirs: for name in glob_in_dir(dirname, basename): yield os.path.join(dirname, name) # depends on [control=['for'], data=['name']] # depends on [control=['for'], data=['dirname']]
def check_inlet(self, helper): """ check the Inlets of Raritan PDUs """ # walk the data try: inlet_values = self.sess.walk_oid(self.oids['oid_inlet_value']) inlet_units = self.sess.walk_oid(self.oids['oid_inlet_unit']) inlet_digits = self.sess.walk_oid(self.oids['oid_inlet_digits']) inlet_states = self.sess.walk_oid(self.oids['oid_inlet_state']) inlet_warning_uppers = self.sess.walk_oid(self.oids['oid_inlet_warning_upper']) inlet_critical_uppers = self.sess.walk_oid(self.oids['oid_inlet_critical_upper']) inlet_critical_lowers = self.sess.walk_oid(self.oids['oid_inlet_critical_lower']) inlet_warning_lowers = self.sess.walk_oid(self.oids['oid_inlet_warning_lower']) except health_monitoring_plugins.SnmpException as e: helper.exit(summary=str(e), exit_code=unknown, perfdata='') # just print the summary, that the inlet sensors are checked helper.add_summary("Inlet") # all list must have the same length, if not something went wrong. that makes it easier and we need less loops # translate the data in human readable units with help of the dicts for x in range(len(inlet_values)): inlet_unit = units[int(inlet_units[x].val)] inlet_digit = inlet_digits[x].val inlet_state = states[int(inlet_states[x].val)] inlet_value = real_value(inlet_values[x].val, inlet_digit) inlet_warning_upper = real_value(inlet_warning_uppers[x].val, inlet_digit) inlet_critical_upper = real_value(inlet_critical_uppers[x].val, inlet_digit) inlet_warning_lower = real_value(inlet_warning_lowers[x].val, inlet_digit) inlet_critical_lower = real_value(inlet_critical_lowers[x].val, inlet_digit) if inlet_state != "normal": # we don't want to use the thresholds. we rely on the state value of the device helper.add_summary("%s %s is %s" % (inlet_value, inlet_unit, inlet_state)) helper.status(critical) # we always want to see the values in the long output and in the perf data helper.add_summary("%s %s" % (inlet_value, inlet_unit)) helper.add_long_output("%s %s: %s" % (inlet_value, inlet_unit, inlet_state)) helper.add_metric("Sensor " + str(x) + " -%s-" % inlet_unit, inlet_value, inlet_warning_lower +\ ":" + inlet_warning_upper, inlet_critical_lower + ":" +\ inlet_critical_upper, "", "", "")
def function[check_inlet, parameter[self, helper]]: constant[ check the Inlets of Raritan PDUs ] <ast.Try object at 0x7da1b1b04490> call[name[helper].add_summary, parameter[constant[Inlet]]] for taget[name[x]] in starred[call[name[range], parameter[call[name[len], parameter[name[inlet_values]]]]]] begin[:] variable[inlet_unit] assign[=] call[name[units]][call[name[int], parameter[call[name[inlet_units]][name[x]].val]]] variable[inlet_digit] assign[=] call[name[inlet_digits]][name[x]].val variable[inlet_state] assign[=] call[name[states]][call[name[int], parameter[call[name[inlet_states]][name[x]].val]]] variable[inlet_value] assign[=] call[name[real_value], parameter[call[name[inlet_values]][name[x]].val, name[inlet_digit]]] variable[inlet_warning_upper] assign[=] call[name[real_value], parameter[call[name[inlet_warning_uppers]][name[x]].val, name[inlet_digit]]] variable[inlet_critical_upper] assign[=] call[name[real_value], parameter[call[name[inlet_critical_uppers]][name[x]].val, name[inlet_digit]]] variable[inlet_warning_lower] assign[=] call[name[real_value], parameter[call[name[inlet_warning_lowers]][name[x]].val, name[inlet_digit]]] variable[inlet_critical_lower] assign[=] call[name[real_value], parameter[call[name[inlet_critical_lowers]][name[x]].val, name[inlet_digit]]] if compare[name[inlet_state] not_equal[!=] constant[normal]] begin[:] call[name[helper].add_summary, parameter[binary_operation[constant[%s %s is %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1bdc580>, <ast.Name object at 0x7da1b1bdc550>, <ast.Name object at 0x7da1b1bdc5b0>]]]]] call[name[helper].status, parameter[name[critical]]] call[name[helper].add_summary, parameter[binary_operation[constant[%s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1bdeb30>, <ast.Name object at 0x7da1b1bdea10>]]]]] call[name[helper].add_long_output, parameter[binary_operation[constant[%s %s: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1bdc2b0>, <ast.Name object at 0x7da1b1bde560>, <ast.Name object at 0x7da1b1bde410>]]]]] call[name[helper].add_metric, parameter[binary_operation[binary_operation[constant[Sensor ] + call[name[str], parameter[name[x]]]] + binary_operation[constant[ -%s-] <ast.Mod object at 0x7da2590d6920> name[inlet_unit]]], name[inlet_value], binary_operation[binary_operation[name[inlet_warning_lower] + constant[:]] + name[inlet_warning_upper]], binary_operation[binary_operation[name[inlet_critical_lower] + constant[:]] + name[inlet_critical_upper]], constant[], constant[], constant[]]]
keyword[def] identifier[check_inlet] ( identifier[self] , identifier[helper] ): literal[string] keyword[try] : identifier[inlet_values] = identifier[self] . identifier[sess] . identifier[walk_oid] ( identifier[self] . identifier[oids] [ literal[string] ]) identifier[inlet_units] = identifier[self] . identifier[sess] . identifier[walk_oid] ( identifier[self] . identifier[oids] [ literal[string] ]) identifier[inlet_digits] = identifier[self] . identifier[sess] . identifier[walk_oid] ( identifier[self] . identifier[oids] [ literal[string] ]) identifier[inlet_states] = identifier[self] . identifier[sess] . identifier[walk_oid] ( identifier[self] . identifier[oids] [ literal[string] ]) identifier[inlet_warning_uppers] = identifier[self] . identifier[sess] . identifier[walk_oid] ( identifier[self] . identifier[oids] [ literal[string] ]) identifier[inlet_critical_uppers] = identifier[self] . identifier[sess] . identifier[walk_oid] ( identifier[self] . identifier[oids] [ literal[string] ]) identifier[inlet_critical_lowers] = identifier[self] . identifier[sess] . identifier[walk_oid] ( identifier[self] . identifier[oids] [ literal[string] ]) identifier[inlet_warning_lowers] = identifier[self] . identifier[sess] . identifier[walk_oid] ( identifier[self] . identifier[oids] [ literal[string] ]) keyword[except] identifier[health_monitoring_plugins] . identifier[SnmpException] keyword[as] identifier[e] : identifier[helper] . identifier[exit] ( identifier[summary] = identifier[str] ( identifier[e] ), identifier[exit_code] = identifier[unknown] , identifier[perfdata] = literal[string] ) identifier[helper] . identifier[add_summary] ( literal[string] ) keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[len] ( identifier[inlet_values] )): identifier[inlet_unit] = identifier[units] [ identifier[int] ( identifier[inlet_units] [ identifier[x] ]. identifier[val] )] identifier[inlet_digit] = identifier[inlet_digits] [ identifier[x] ]. identifier[val] identifier[inlet_state] = identifier[states] [ identifier[int] ( identifier[inlet_states] [ identifier[x] ]. identifier[val] )] identifier[inlet_value] = identifier[real_value] ( identifier[inlet_values] [ identifier[x] ]. identifier[val] , identifier[inlet_digit] ) identifier[inlet_warning_upper] = identifier[real_value] ( identifier[inlet_warning_uppers] [ identifier[x] ]. identifier[val] , identifier[inlet_digit] ) identifier[inlet_critical_upper] = identifier[real_value] ( identifier[inlet_critical_uppers] [ identifier[x] ]. identifier[val] , identifier[inlet_digit] ) identifier[inlet_warning_lower] = identifier[real_value] ( identifier[inlet_warning_lowers] [ identifier[x] ]. identifier[val] , identifier[inlet_digit] ) identifier[inlet_critical_lower] = identifier[real_value] ( identifier[inlet_critical_lowers] [ identifier[x] ]. identifier[val] , identifier[inlet_digit] ) keyword[if] identifier[inlet_state] != literal[string] : identifier[helper] . identifier[add_summary] ( literal[string] %( identifier[inlet_value] , identifier[inlet_unit] , identifier[inlet_state] )) identifier[helper] . identifier[status] ( identifier[critical] ) identifier[helper] . identifier[add_summary] ( literal[string] %( identifier[inlet_value] , identifier[inlet_unit] )) identifier[helper] . identifier[add_long_output] ( literal[string] %( identifier[inlet_value] , identifier[inlet_unit] , identifier[inlet_state] )) identifier[helper] . identifier[add_metric] ( literal[string] + identifier[str] ( identifier[x] )+ literal[string] % identifier[inlet_unit] , identifier[inlet_value] , identifier[inlet_warning_lower] + literal[string] + identifier[inlet_warning_upper] , identifier[inlet_critical_lower] + literal[string] + identifier[inlet_critical_upper] , literal[string] , literal[string] , literal[string] )
def check_inlet(self, helper): """ check the Inlets of Raritan PDUs """ # walk the data try: inlet_values = self.sess.walk_oid(self.oids['oid_inlet_value']) inlet_units = self.sess.walk_oid(self.oids['oid_inlet_unit']) inlet_digits = self.sess.walk_oid(self.oids['oid_inlet_digits']) inlet_states = self.sess.walk_oid(self.oids['oid_inlet_state']) inlet_warning_uppers = self.sess.walk_oid(self.oids['oid_inlet_warning_upper']) inlet_critical_uppers = self.sess.walk_oid(self.oids['oid_inlet_critical_upper']) inlet_critical_lowers = self.sess.walk_oid(self.oids['oid_inlet_critical_lower']) inlet_warning_lowers = self.sess.walk_oid(self.oids['oid_inlet_warning_lower']) # depends on [control=['try'], data=[]] except health_monitoring_plugins.SnmpException as e: helper.exit(summary=str(e), exit_code=unknown, perfdata='') # depends on [control=['except'], data=['e']] # just print the summary, that the inlet sensors are checked helper.add_summary('Inlet') # all list must have the same length, if not something went wrong. that makes it easier and we need less loops # translate the data in human readable units with help of the dicts for x in range(len(inlet_values)): inlet_unit = units[int(inlet_units[x].val)] inlet_digit = inlet_digits[x].val inlet_state = states[int(inlet_states[x].val)] inlet_value = real_value(inlet_values[x].val, inlet_digit) inlet_warning_upper = real_value(inlet_warning_uppers[x].val, inlet_digit) inlet_critical_upper = real_value(inlet_critical_uppers[x].val, inlet_digit) inlet_warning_lower = real_value(inlet_warning_lowers[x].val, inlet_digit) inlet_critical_lower = real_value(inlet_critical_lowers[x].val, inlet_digit) if inlet_state != 'normal': # we don't want to use the thresholds. we rely on the state value of the device helper.add_summary('%s %s is %s' % (inlet_value, inlet_unit, inlet_state)) helper.status(critical) # depends on [control=['if'], data=['inlet_state']] # we always want to see the values in the long output and in the perf data helper.add_summary('%s %s' % (inlet_value, inlet_unit)) helper.add_long_output('%s %s: %s' % (inlet_value, inlet_unit, inlet_state)) helper.add_metric('Sensor ' + str(x) + ' -%s-' % inlet_unit, inlet_value, inlet_warning_lower + ':' + inlet_warning_upper, inlet_critical_lower + ':' + inlet_critical_upper, '', '', '') # depends on [control=['for'], data=['x']]
def peek_lock_queue_message(self, queue_name, timeout='60'): ''' Automically retrieves and locks a message from a queue for processing. The message is guaranteed not to be delivered to other receivers (on the same subscription only) during the lock duration period specified in the queue description. Once the lock expires, the message will be available to other receivers. In order to complete processing of the message, the receiver should issue a delete command with the lock ID received from this operation. To abandon processing of the message and unlock it for other receivers, an Unlock Message command should be issued, or the lock duration period can expire. queue_name: Name of the queue. timeout: Optional. The timeout parameter is expressed in seconds. ''' _validate_not_none('queue_name', queue_name) request = HTTPRequest() request.method = 'POST' request.host = self._get_host() request.path = '/' + _str(queue_name) + '/messages/head' request.query = [('timeout', _int_or_none(timeout))] request.path, request.query = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _create_message(response, self)
def function[peek_lock_queue_message, parameter[self, queue_name, timeout]]: constant[ Automically retrieves and locks a message from a queue for processing. The message is guaranteed not to be delivered to other receivers (on the same subscription only) during the lock duration period specified in the queue description. Once the lock expires, the message will be available to other receivers. In order to complete processing of the message, the receiver should issue a delete command with the lock ID received from this operation. To abandon processing of the message and unlock it for other receivers, an Unlock Message command should be issued, or the lock duration period can expire. queue_name: Name of the queue. timeout: Optional. The timeout parameter is expressed in seconds. ] call[name[_validate_not_none], parameter[constant[queue_name], name[queue_name]]] variable[request] assign[=] call[name[HTTPRequest], parameter[]] name[request].method assign[=] constant[POST] name[request].host assign[=] call[name[self]._get_host, parameter[]] name[request].path assign[=] binary_operation[binary_operation[constant[/] + call[name[_str], parameter[name[queue_name]]]] + constant[/messages/head]] name[request].query assign[=] list[[<ast.Tuple object at 0x7da204565420>]] <ast.Tuple object at 0x7da204567670> assign[=] call[name[self]._httpclient._update_request_uri_query, parameter[name[request]]] name[request].headers assign[=] call[name[self]._update_service_bus_header, parameter[name[request]]] variable[response] assign[=] call[name[self]._perform_request, parameter[name[request]]] return[call[name[_create_message], parameter[name[response], name[self]]]]
keyword[def] identifier[peek_lock_queue_message] ( identifier[self] , identifier[queue_name] , identifier[timeout] = literal[string] ): literal[string] identifier[_validate_not_none] ( literal[string] , identifier[queue_name] ) identifier[request] = identifier[HTTPRequest] () identifier[request] . identifier[method] = literal[string] identifier[request] . identifier[host] = identifier[self] . identifier[_get_host] () identifier[request] . identifier[path] = literal[string] + identifier[_str] ( identifier[queue_name] )+ literal[string] identifier[request] . identifier[query] =[( literal[string] , identifier[_int_or_none] ( identifier[timeout] ))] identifier[request] . identifier[path] , identifier[request] . identifier[query] = identifier[self] . identifier[_httpclient] . identifier[_update_request_uri_query] ( identifier[request] ) identifier[request] . identifier[headers] = identifier[self] . identifier[_update_service_bus_header] ( identifier[request] ) identifier[response] = identifier[self] . identifier[_perform_request] ( identifier[request] ) keyword[return] identifier[_create_message] ( identifier[response] , identifier[self] )
def peek_lock_queue_message(self, queue_name, timeout='60'): """ Automically retrieves and locks a message from a queue for processing. The message is guaranteed not to be delivered to other receivers (on the same subscription only) during the lock duration period specified in the queue description. Once the lock expires, the message will be available to other receivers. In order to complete processing of the message, the receiver should issue a delete command with the lock ID received from this operation. To abandon processing of the message and unlock it for other receivers, an Unlock Message command should be issued, or the lock duration period can expire. queue_name: Name of the queue. timeout: Optional. The timeout parameter is expressed in seconds. """ _validate_not_none('queue_name', queue_name) request = HTTPRequest() request.method = 'POST' request.host = self._get_host() request.path = '/' + _str(queue_name) + '/messages/head' request.query = [('timeout', _int_or_none(timeout))] (request.path, request.query) = self._httpclient._update_request_uri_query(request) # pylint: disable=protected-access request.headers = self._update_service_bus_header(request) response = self._perform_request(request) return _create_message(response, self)
def get_descriptors_in_module(mdl, submodule=True): r"""Get descriptors in module. Parameters: mdl(module): module to search submodule(bool): search recursively Returns: Iterator[Descriptor] """ __all__ = getattr(mdl, "__all__", None) if __all__ is None: __all__ = dir(mdl) all_values = (getattr(mdl, name) for name in __all__ if name[:1] != "_") if submodule: for v in all_values: if is_descriptor_class(v): yield v if isinstance(v, ModuleType): for v in get_descriptors_in_module(v, submodule=True): yield v else: for v in all_values: if is_descriptor_class(v): yield v
def function[get_descriptors_in_module, parameter[mdl, submodule]]: constant[Get descriptors in module. Parameters: mdl(module): module to search submodule(bool): search recursively Returns: Iterator[Descriptor] ] variable[__all__] assign[=] call[name[getattr], parameter[name[mdl], constant[__all__], constant[None]]] if compare[name[__all__] is constant[None]] begin[:] variable[__all__] assign[=] call[name[dir], parameter[name[mdl]]] variable[all_values] assign[=] <ast.GeneratorExp object at 0x7da1b020e380> if name[submodule] begin[:] for taget[name[v]] in starred[name[all_values]] begin[:] if call[name[is_descriptor_class], parameter[name[v]]] begin[:] <ast.Yield object at 0x7da1b020f910> if call[name[isinstance], parameter[name[v], name[ModuleType]]] begin[:] for taget[name[v]] in starred[call[name[get_descriptors_in_module], parameter[name[v]]]] begin[:] <ast.Yield object at 0x7da1b0352500>
keyword[def] identifier[get_descriptors_in_module] ( identifier[mdl] , identifier[submodule] = keyword[True] ): literal[string] identifier[__all__] = identifier[getattr] ( identifier[mdl] , literal[string] , keyword[None] ) keyword[if] identifier[__all__] keyword[is] keyword[None] : identifier[__all__] = identifier[dir] ( identifier[mdl] ) identifier[all_values] =( identifier[getattr] ( identifier[mdl] , identifier[name] ) keyword[for] identifier[name] keyword[in] identifier[__all__] keyword[if] identifier[name] [: literal[int] ]!= literal[string] ) keyword[if] identifier[submodule] : keyword[for] identifier[v] keyword[in] identifier[all_values] : keyword[if] identifier[is_descriptor_class] ( identifier[v] ): keyword[yield] identifier[v] keyword[if] identifier[isinstance] ( identifier[v] , identifier[ModuleType] ): keyword[for] identifier[v] keyword[in] identifier[get_descriptors_in_module] ( identifier[v] , identifier[submodule] = keyword[True] ): keyword[yield] identifier[v] keyword[else] : keyword[for] identifier[v] keyword[in] identifier[all_values] : keyword[if] identifier[is_descriptor_class] ( identifier[v] ): keyword[yield] identifier[v]
def get_descriptors_in_module(mdl, submodule=True): """Get descriptors in module. Parameters: mdl(module): module to search submodule(bool): search recursively Returns: Iterator[Descriptor] """ __all__ = getattr(mdl, '__all__', None) if __all__ is None: __all__ = dir(mdl) # depends on [control=['if'], data=['__all__']] all_values = (getattr(mdl, name) for name in __all__ if name[:1] != '_') if submodule: for v in all_values: if is_descriptor_class(v): yield v # depends on [control=['if'], data=[]] if isinstance(v, ModuleType): for v in get_descriptors_in_module(v, submodule=True): yield v # depends on [control=['for'], data=['v']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['v']] # depends on [control=['if'], data=[]] else: for v in all_values: if is_descriptor_class(v): yield v # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['v']]
def bovy_ars(domain,isDomainFinite,abcissae,hx,hpx,nsamples=1, hxparams=(),maxn=100): """bovy_ars: Implementation of the Adaptive-Rejection Sampling algorithm by Gilks & Wild (1992): Adaptive Rejection Sampling for Gibbs Sampling, Applied Statistics, 41, 337 Based on Wild & Gilks (1993), Algorithm AS 287: Adaptive Rejection Sampling from Log-concave Density Functions, Applied Statistics, 42, 701 Input: domain - [.,.] upper and lower limit to the domain isDomainFinite - [.,.] is there a lower/upper limit to the domain? abcissae - initial list of abcissae (must lie on either side of the peak in hx if the domain is unbounded hx - function that evaluates h(x) = ln g(x) hpx - function that evaluates hp(x) = d h(x) / d x nsamples - (optional) number of desired samples (default=1) hxparams - (optional) a tuple of parameters for h(x) and h'(x) maxn - (optional) maximum number of updates to the hull (default=100) Output: list with nsamples of samples from exp(h(x)) External dependencies: math scipy scipy.stats History: 2009-05-21 - Written - Bovy (NYU) """ #First set-up the upper and lower hulls hull=setup_hull(domain,isDomainFinite,abcissae,hx,hpx,hxparams) #Then start sampling: call sampleone repeatedly out= [] nupdates= 0 for ii in range(int(nsamples)): thissample, hull, nupdates= sampleone(hull,hx,hpx,domain,isDomainFinite,maxn,nupdates,hxparams) out.append(thissample) return out
def function[bovy_ars, parameter[domain, isDomainFinite, abcissae, hx, hpx, nsamples, hxparams, maxn]]: constant[bovy_ars: Implementation of the Adaptive-Rejection Sampling algorithm by Gilks & Wild (1992): Adaptive Rejection Sampling for Gibbs Sampling, Applied Statistics, 41, 337 Based on Wild & Gilks (1993), Algorithm AS 287: Adaptive Rejection Sampling from Log-concave Density Functions, Applied Statistics, 42, 701 Input: domain - [.,.] upper and lower limit to the domain isDomainFinite - [.,.] is there a lower/upper limit to the domain? abcissae - initial list of abcissae (must lie on either side of the peak in hx if the domain is unbounded hx - function that evaluates h(x) = ln g(x) hpx - function that evaluates hp(x) = d h(x) / d x nsamples - (optional) number of desired samples (default=1) hxparams - (optional) a tuple of parameters for h(x) and h'(x) maxn - (optional) maximum number of updates to the hull (default=100) Output: list with nsamples of samples from exp(h(x)) External dependencies: math scipy scipy.stats History: 2009-05-21 - Written - Bovy (NYU) ] variable[hull] assign[=] call[name[setup_hull], parameter[name[domain], name[isDomainFinite], name[abcissae], name[hx], name[hpx], name[hxparams]]] variable[out] assign[=] list[[]] variable[nupdates] assign[=] constant[0] for taget[name[ii]] in starred[call[name[range], parameter[call[name[int], parameter[name[nsamples]]]]]] begin[:] <ast.Tuple object at 0x7da18fe93a30> assign[=] call[name[sampleone], parameter[name[hull], name[hx], name[hpx], name[domain], name[isDomainFinite], name[maxn], name[nupdates], name[hxparams]]] call[name[out].append, parameter[name[thissample]]] return[name[out]]
keyword[def] identifier[bovy_ars] ( identifier[domain] , identifier[isDomainFinite] , identifier[abcissae] , identifier[hx] , identifier[hpx] , identifier[nsamples] = literal[int] , identifier[hxparams] =(), identifier[maxn] = literal[int] ): literal[string] identifier[hull] = identifier[setup_hull] ( identifier[domain] , identifier[isDomainFinite] , identifier[abcissae] , identifier[hx] , identifier[hpx] , identifier[hxparams] ) identifier[out] =[] identifier[nupdates] = literal[int] keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[int] ( identifier[nsamples] )): identifier[thissample] , identifier[hull] , identifier[nupdates] = identifier[sampleone] ( identifier[hull] , identifier[hx] , identifier[hpx] , identifier[domain] , identifier[isDomainFinite] , identifier[maxn] , identifier[nupdates] , identifier[hxparams] ) identifier[out] . identifier[append] ( identifier[thissample] ) keyword[return] identifier[out]
def bovy_ars(domain, isDomainFinite, abcissae, hx, hpx, nsamples=1, hxparams=(), maxn=100): """bovy_ars: Implementation of the Adaptive-Rejection Sampling algorithm by Gilks & Wild (1992): Adaptive Rejection Sampling for Gibbs Sampling, Applied Statistics, 41, 337 Based on Wild & Gilks (1993), Algorithm AS 287: Adaptive Rejection Sampling from Log-concave Density Functions, Applied Statistics, 42, 701 Input: domain - [.,.] upper and lower limit to the domain isDomainFinite - [.,.] is there a lower/upper limit to the domain? abcissae - initial list of abcissae (must lie on either side of the peak in hx if the domain is unbounded hx - function that evaluates h(x) = ln g(x) hpx - function that evaluates hp(x) = d h(x) / d x nsamples - (optional) number of desired samples (default=1) hxparams - (optional) a tuple of parameters for h(x) and h'(x) maxn - (optional) maximum number of updates to the hull (default=100) Output: list with nsamples of samples from exp(h(x)) External dependencies: math scipy scipy.stats History: 2009-05-21 - Written - Bovy (NYU) """ #First set-up the upper and lower hulls hull = setup_hull(domain, isDomainFinite, abcissae, hx, hpx, hxparams) #Then start sampling: call sampleone repeatedly out = [] nupdates = 0 for ii in range(int(nsamples)): (thissample, hull, nupdates) = sampleone(hull, hx, hpx, domain, isDomainFinite, maxn, nupdates, hxparams) out.append(thissample) # depends on [control=['for'], data=[]] return out
def local_minima(vector,min_distance = 4, brd_mode = "wrap"): """ Internal finder for local minima . Returns UNSORTED indices of minima in input vector. """ fits = gaussian_filter(numpy.asarray(vector,dtype=numpy.float32),1., mode=brd_mode) for ii in range(len(fits)): if fits[ii] == fits[ii-1]: fits[ii-1] = numpy.pi/2.0 minfits = minimum_filter(fits, size=min_distance, mode=brd_mode) minima_mask = fits == minfits minima = numpy.transpose(minima_mask.nonzero()) return numpy.asarray(minima)
def function[local_minima, parameter[vector, min_distance, brd_mode]]: constant[ Internal finder for local minima . Returns UNSORTED indices of minima in input vector. ] variable[fits] assign[=] call[name[gaussian_filter], parameter[call[name[numpy].asarray, parameter[name[vector]]], constant[1.0]]] for taget[name[ii]] in starred[call[name[range], parameter[call[name[len], parameter[name[fits]]]]]] begin[:] if compare[call[name[fits]][name[ii]] equal[==] call[name[fits]][binary_operation[name[ii] - constant[1]]]] begin[:] call[name[fits]][binary_operation[name[ii] - constant[1]]] assign[=] binary_operation[name[numpy].pi / constant[2.0]] variable[minfits] assign[=] call[name[minimum_filter], parameter[name[fits]]] variable[minima_mask] assign[=] compare[name[fits] equal[==] name[minfits]] variable[minima] assign[=] call[name[numpy].transpose, parameter[call[name[minima_mask].nonzero, parameter[]]]] return[call[name[numpy].asarray, parameter[name[minima]]]]
keyword[def] identifier[local_minima] ( identifier[vector] , identifier[min_distance] = literal[int] , identifier[brd_mode] = literal[string] ): literal[string] identifier[fits] = identifier[gaussian_filter] ( identifier[numpy] . identifier[asarray] ( identifier[vector] , identifier[dtype] = identifier[numpy] . identifier[float32] ), literal[int] , identifier[mode] = identifier[brd_mode] ) keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[len] ( identifier[fits] )): keyword[if] identifier[fits] [ identifier[ii] ]== identifier[fits] [ identifier[ii] - literal[int] ]: identifier[fits] [ identifier[ii] - literal[int] ]= identifier[numpy] . identifier[pi] / literal[int] identifier[minfits] = identifier[minimum_filter] ( identifier[fits] , identifier[size] = identifier[min_distance] , identifier[mode] = identifier[brd_mode] ) identifier[minima_mask] = identifier[fits] == identifier[minfits] identifier[minima] = identifier[numpy] . identifier[transpose] ( identifier[minima_mask] . identifier[nonzero] ()) keyword[return] identifier[numpy] . identifier[asarray] ( identifier[minima] )
def local_minima(vector, min_distance=4, brd_mode='wrap'): """ Internal finder for local minima . Returns UNSORTED indices of minima in input vector. """ fits = gaussian_filter(numpy.asarray(vector, dtype=numpy.float32), 1.0, mode=brd_mode) for ii in range(len(fits)): if fits[ii] == fits[ii - 1]: fits[ii - 1] = numpy.pi / 2.0 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ii']] minfits = minimum_filter(fits, size=min_distance, mode=brd_mode) minima_mask = fits == minfits minima = numpy.transpose(minima_mask.nonzero()) return numpy.asarray(minima)
def ignores(self, *args): """ :param args: Event objects :returns: None Any event that is ignored is acceptable but discarded """ for event in args: self._ignored.add(event.name)
def function[ignores, parameter[self]]: constant[ :param args: Event objects :returns: None Any event that is ignored is acceptable but discarded ] for taget[name[event]] in starred[name[args]] begin[:] call[name[self]._ignored.add, parameter[name[event].name]]
keyword[def] identifier[ignores] ( identifier[self] ,* identifier[args] ): literal[string] keyword[for] identifier[event] keyword[in] identifier[args] : identifier[self] . identifier[_ignored] . identifier[add] ( identifier[event] . identifier[name] )
def ignores(self, *args): """ :param args: Event objects :returns: None Any event that is ignored is acceptable but discarded """ for event in args: self._ignored.add(event.name) # depends on [control=['for'], data=['event']]
async def action_handler(self): """ Call vtep controller in sequence, merge mutiple calls if possible When a bind relationship is updated, we always send all logical ports to a logicalswitch, to make sure it recovers from some failed updates (so called idempotency). When multiple calls are pending, we only need to send the last of them. """ bind_event = VtepControllerCall.createMatcher(self._conn) event_queue = [] timeout_flag = [False] async def handle_action(): while event_queue or timeout_flag[0]: events = event_queue[:] del event_queue[:] for e in events: # every event must have physname , phyiname # physname: physical switch name - must be same with OVSDB-VTEP switch # phyiname: physical port name - must be same with the corresponding port physname = e.physname phyiname = e.phyiname if e.type == VtepControllerCall.UNBINDALL: # clear all other event info self._store_event[(physname,phyiname)] = {"all":e} elif e.type == VtepControllerCall.BIND: # bind will combine bind event before vlanid = e.vlanid if (physname,phyiname) in self._store_event: v = self._store_event[(physname,phyiname)] if vlanid in v: logicalports = e.logicalports v.update({vlanid:(e.type,e.logicalnetworkid,e.vni,logicalports)}) self._store_event[(physname,phyiname)] = v else: # new bind info , no combind event v.update({vlanid:(e.type,e.logicalnetworkid,e.vni,e.logicalports)}) self._store_event[(physname,phyiname)] = v else: self._store_event[(physname,phyiname)] = {vlanid:(e.type,e.logicalnetworkid, e.vni,e.logicalports)} elif e.type == VtepControllerCall.UNBIND: vlanid = e.vlanid if (physname,phyiname) in self._store_event: v = self._store_event[(physname,phyiname)] v.update({vlanid:(e.type,e.logicalnetworkid)}) self._store_event[(physname,phyiname)] = v else: self._store_event[(physname,phyiname)] = {vlanid:(e.type,e.logicalnetworkid)} else: self._parent._logger.warning("catch error type event %r , ignore it", exc_info=True) continue call = [] target_name = "vtepcontroller" for k,v in self._store_event.items(): if "all" in v: # send unbindall call.append(self.api(self,target_name,"unbindphysicalport", {"physicalswitch": k[0], "physicalport": k[1]}, timeout=10)) # unbindall , del it whatever del v["all"] try: await self.execute_all(call) except Exception: self._parent._logger.warning("unbindall remove call failed", exc_info=True) for k,v in self._store_event.items(): for vlanid , e in dict(v).items(): if vlanid != "all": if e[0] == VtepControllerCall.BIND: params = {"physicalswitch": k[0], "physicalport": k[1], "vlanid": vlanid, "logicalnetwork": e[1], "vni":e[2], "logicalports": e[3]} try: await self.api(self,target_name,"updatelogicalswitch", params,timeout=10) except Exception: self._parent._logger.warning("update logical switch error,try next %r",params, exc_info=True) else: del self._store_event[k][vlanid] elif e[0] == VtepControllerCall.UNBIND: params = {"logicalnetwork":e[1], "physicalswitch":k[0], "physicalport":k[1], "vlanid":vlanid} try: await self.api(self,target_name,"unbindlogicalswitch", params,timeout=10) except Exception: self._parent._logger.warning("unbind logical switch error,try next %r",params, exc_info=True) else: del self._store_event[k][vlanid] self._store_event = dict((k,v) for k,v in self._store_event.items() if v) if timeout_flag[0]: timeout_flag[0] = False def append_event(event, matcher): event_queue.append(event) while True: timeout, ev, m = await self.wait_with_timeout(10, bind_event) if not timeout: event_queue.append(ev) else: timeout_flag[0] = True await self.with_callback(handle_action(), append_event, bind_event)
<ast.AsyncFunctionDef object at 0x7da20c992980>
keyword[async] keyword[def] identifier[action_handler] ( identifier[self] ): literal[string] identifier[bind_event] = identifier[VtepControllerCall] . identifier[createMatcher] ( identifier[self] . identifier[_conn] ) identifier[event_queue] =[] identifier[timeout_flag] =[ keyword[False] ] keyword[async] keyword[def] identifier[handle_action] (): keyword[while] identifier[event_queue] keyword[or] identifier[timeout_flag] [ literal[int] ]: identifier[events] = identifier[event_queue] [:] keyword[del] identifier[event_queue] [:] keyword[for] identifier[e] keyword[in] identifier[events] : identifier[physname] = identifier[e] . identifier[physname] identifier[phyiname] = identifier[e] . identifier[phyiname] keyword[if] identifier[e] . identifier[type] == identifier[VtepControllerCall] . identifier[UNBINDALL] : identifier[self] . identifier[_store_event] [( identifier[physname] , identifier[phyiname] )]={ literal[string] : identifier[e] } keyword[elif] identifier[e] . identifier[type] == identifier[VtepControllerCall] . identifier[BIND] : identifier[vlanid] = identifier[e] . identifier[vlanid] keyword[if] ( identifier[physname] , identifier[phyiname] ) keyword[in] identifier[self] . identifier[_store_event] : identifier[v] = identifier[self] . identifier[_store_event] [( identifier[physname] , identifier[phyiname] )] keyword[if] identifier[vlanid] keyword[in] identifier[v] : identifier[logicalports] = identifier[e] . identifier[logicalports] identifier[v] . identifier[update] ({ identifier[vlanid] :( identifier[e] . identifier[type] , identifier[e] . identifier[logicalnetworkid] , identifier[e] . identifier[vni] , identifier[logicalports] )}) identifier[self] . identifier[_store_event] [( identifier[physname] , identifier[phyiname] )]= identifier[v] keyword[else] : identifier[v] . identifier[update] ({ identifier[vlanid] :( identifier[e] . identifier[type] , identifier[e] . identifier[logicalnetworkid] , identifier[e] . identifier[vni] , identifier[e] . identifier[logicalports] )}) identifier[self] . identifier[_store_event] [( identifier[physname] , identifier[phyiname] )]= identifier[v] keyword[else] : identifier[self] . identifier[_store_event] [( identifier[physname] , identifier[phyiname] )]={ identifier[vlanid] :( identifier[e] . identifier[type] , identifier[e] . identifier[logicalnetworkid] , identifier[e] . identifier[vni] , identifier[e] . identifier[logicalports] )} keyword[elif] identifier[e] . identifier[type] == identifier[VtepControllerCall] . identifier[UNBIND] : identifier[vlanid] = identifier[e] . identifier[vlanid] keyword[if] ( identifier[physname] , identifier[phyiname] ) keyword[in] identifier[self] . identifier[_store_event] : identifier[v] = identifier[self] . identifier[_store_event] [( identifier[physname] , identifier[phyiname] )] identifier[v] . identifier[update] ({ identifier[vlanid] :( identifier[e] . identifier[type] , identifier[e] . identifier[logicalnetworkid] )}) identifier[self] . identifier[_store_event] [( identifier[physname] , identifier[phyiname] )]= identifier[v] keyword[else] : identifier[self] . identifier[_store_event] [( identifier[physname] , identifier[phyiname] )]={ identifier[vlanid] :( identifier[e] . identifier[type] , identifier[e] . identifier[logicalnetworkid] )} keyword[else] : identifier[self] . identifier[_parent] . identifier[_logger] . identifier[warning] ( literal[string] , identifier[exc_info] = keyword[True] ) keyword[continue] identifier[call] =[] identifier[target_name] = literal[string] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[_store_event] . identifier[items] (): keyword[if] literal[string] keyword[in] identifier[v] : identifier[call] . identifier[append] ( identifier[self] . identifier[api] ( identifier[self] , identifier[target_name] , literal[string] , { literal[string] : identifier[k] [ literal[int] ], literal[string] : identifier[k] [ literal[int] ]}, identifier[timeout] = literal[int] )) keyword[del] identifier[v] [ literal[string] ] keyword[try] : keyword[await] identifier[self] . identifier[execute_all] ( identifier[call] ) keyword[except] identifier[Exception] : identifier[self] . identifier[_parent] . identifier[_logger] . identifier[warning] ( literal[string] , identifier[exc_info] = keyword[True] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[_store_event] . identifier[items] (): keyword[for] identifier[vlanid] , identifier[e] keyword[in] identifier[dict] ( identifier[v] ). identifier[items] (): keyword[if] identifier[vlanid] != literal[string] : keyword[if] identifier[e] [ literal[int] ]== identifier[VtepControllerCall] . identifier[BIND] : identifier[params] ={ literal[string] : identifier[k] [ literal[int] ], literal[string] : identifier[k] [ literal[int] ], literal[string] : identifier[vlanid] , literal[string] : identifier[e] [ literal[int] ], literal[string] : identifier[e] [ literal[int] ], literal[string] : identifier[e] [ literal[int] ]} keyword[try] : keyword[await] identifier[self] . identifier[api] ( identifier[self] , identifier[target_name] , literal[string] , identifier[params] , identifier[timeout] = literal[int] ) keyword[except] identifier[Exception] : identifier[self] . identifier[_parent] . identifier[_logger] . identifier[warning] ( literal[string] , identifier[params] , identifier[exc_info] = keyword[True] ) keyword[else] : keyword[del] identifier[self] . identifier[_store_event] [ identifier[k] ][ identifier[vlanid] ] keyword[elif] identifier[e] [ literal[int] ]== identifier[VtepControllerCall] . identifier[UNBIND] : identifier[params] ={ literal[string] : identifier[e] [ literal[int] ], literal[string] : identifier[k] [ literal[int] ], literal[string] : identifier[k] [ literal[int] ], literal[string] : identifier[vlanid] } keyword[try] : keyword[await] identifier[self] . identifier[api] ( identifier[self] , identifier[target_name] , literal[string] , identifier[params] , identifier[timeout] = literal[int] ) keyword[except] identifier[Exception] : identifier[self] . identifier[_parent] . identifier[_logger] . identifier[warning] ( literal[string] , identifier[params] , identifier[exc_info] = keyword[True] ) keyword[else] : keyword[del] identifier[self] . identifier[_store_event] [ identifier[k] ][ identifier[vlanid] ] identifier[self] . identifier[_store_event] = identifier[dict] (( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[_store_event] . identifier[items] () keyword[if] identifier[v] ) keyword[if] identifier[timeout_flag] [ literal[int] ]: identifier[timeout_flag] [ literal[int] ]= keyword[False] keyword[def] identifier[append_event] ( identifier[event] , identifier[matcher] ): identifier[event_queue] . identifier[append] ( identifier[event] ) keyword[while] keyword[True] : identifier[timeout] , identifier[ev] , identifier[m] = keyword[await] identifier[self] . identifier[wait_with_timeout] ( literal[int] , identifier[bind_event] ) keyword[if] keyword[not] identifier[timeout] : identifier[event_queue] . identifier[append] ( identifier[ev] ) keyword[else] : identifier[timeout_flag] [ literal[int] ]= keyword[True] keyword[await] identifier[self] . identifier[with_callback] ( identifier[handle_action] (), identifier[append_event] , identifier[bind_event] )
async def action_handler(self): """ Call vtep controller in sequence, merge mutiple calls if possible When a bind relationship is updated, we always send all logical ports to a logicalswitch, to make sure it recovers from some failed updates (so called idempotency). When multiple calls are pending, we only need to send the last of them. """ bind_event = VtepControllerCall.createMatcher(self._conn) event_queue = [] timeout_flag = [False] async def handle_action(): while event_queue or timeout_flag[0]: events = event_queue[:] del event_queue[:] for e in events: # every event must have physname , phyiname # physname: physical switch name - must be same with OVSDB-VTEP switch # phyiname: physical port name - must be same with the corresponding port physname = e.physname phyiname = e.phyiname if e.type == VtepControllerCall.UNBINDALL: # clear all other event info self._store_event[physname, phyiname] = {'all': e} # depends on [control=['if'], data=[]] elif e.type == VtepControllerCall.BIND: # bind will combine bind event before vlanid = e.vlanid if (physname, phyiname) in self._store_event: v = self._store_event[physname, phyiname] if vlanid in v: logicalports = e.logicalports v.update({vlanid: (e.type, e.logicalnetworkid, e.vni, logicalports)}) self._store_event[physname, phyiname] = v # depends on [control=['if'], data=['vlanid', 'v']] else: # new bind info , no combind event v.update({vlanid: (e.type, e.logicalnetworkid, e.vni, e.logicalports)}) self._store_event[physname, phyiname] = v # depends on [control=['if'], data=[]] else: self._store_event[physname, phyiname] = {vlanid: (e.type, e.logicalnetworkid, e.vni, e.logicalports)} # depends on [control=['if'], data=[]] elif e.type == VtepControllerCall.UNBIND: vlanid = e.vlanid if (physname, phyiname) in self._store_event: v = self._store_event[physname, phyiname] v.update({vlanid: (e.type, e.logicalnetworkid)}) self._store_event[physname, phyiname] = v # depends on [control=['if'], data=[]] else: self._store_event[physname, phyiname] = {vlanid: (e.type, e.logicalnetworkid)} # depends on [control=['if'], data=[]] else: self._parent._logger.warning('catch error type event %r , ignore it', exc_info=True) continue # depends on [control=['for'], data=['e']] call = [] target_name = 'vtepcontroller' for (k, v) in self._store_event.items(): if 'all' in v: # send unbindall call.append(self.api(self, target_name, 'unbindphysicalport', {'physicalswitch': k[0], 'physicalport': k[1]}, timeout=10)) # unbindall , del it whatever del v['all'] # depends on [control=['if'], data=['v']] # depends on [control=['for'], data=[]] try: await self.execute_all(call) # depends on [control=['try'], data=[]] except Exception: self._parent._logger.warning('unbindall remove call failed', exc_info=True) # depends on [control=['except'], data=[]] for (k, v) in self._store_event.items(): for (vlanid, e) in dict(v).items(): if vlanid != 'all': if e[0] == VtepControllerCall.BIND: params = {'physicalswitch': k[0], 'physicalport': k[1], 'vlanid': vlanid, 'logicalnetwork': e[1], 'vni': e[2], 'logicalports': e[3]} try: await self.api(self, target_name, 'updatelogicalswitch', params, timeout=10) # depends on [control=['try'], data=[]] except Exception: self._parent._logger.warning('update logical switch error,try next %r', params, exc_info=True) # depends on [control=['except'], data=[]] else: del self._store_event[k][vlanid] # depends on [control=['if'], data=[]] elif e[0] == VtepControllerCall.UNBIND: params = {'logicalnetwork': e[1], 'physicalswitch': k[0], 'physicalport': k[1], 'vlanid': vlanid} try: await self.api(self, target_name, 'unbindlogicalswitch', params, timeout=10) # depends on [control=['try'], data=[]] except Exception: self._parent._logger.warning('unbind logical switch error,try next %r', params, exc_info=True) # depends on [control=['except'], data=[]] else: del self._store_event[k][vlanid] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['vlanid']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] self._store_event = dict(((k, v) for (k, v) in self._store_event.items() if v)) if timeout_flag[0]: timeout_flag[0] = False # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] def append_event(event, matcher): event_queue.append(event) while True: (timeout, ev, m) = await self.wait_with_timeout(10, bind_event) if not timeout: event_queue.append(ev) # depends on [control=['if'], data=[]] else: timeout_flag[0] = True await self.with_callback(handle_action(), append_event, bind_event) # depends on [control=['while'], data=[]]
def postinit(self, value=None, slice=None): """Do some setup after initialisation. :param value: What is being indexed. :type value: NodeNG or None :param slice: The slice being used to lookup. :type slice: NodeNG or None """ self.value = value self.slice = slice
def function[postinit, parameter[self, value, slice]]: constant[Do some setup after initialisation. :param value: What is being indexed. :type value: NodeNG or None :param slice: The slice being used to lookup. :type slice: NodeNG or None ] name[self].value assign[=] name[value] name[self].slice assign[=] name[slice]
keyword[def] identifier[postinit] ( identifier[self] , identifier[value] = keyword[None] , identifier[slice] = keyword[None] ): literal[string] identifier[self] . identifier[value] = identifier[value] identifier[self] . identifier[slice] = identifier[slice]
def postinit(self, value=None, slice=None): """Do some setup after initialisation. :param value: What is being indexed. :type value: NodeNG or None :param slice: The slice being used to lookup. :type slice: NodeNG or None """ self.value = value self.slice = slice
def rename(dct, mapping): """ Rename the keys of a dictionary with the given mapping >>> rename({"a": 1, "BBB": 2}, {"a": "AAA"}) {'AAA': 1, 'BBB': 2} """ def _block(memo, key): if key in dct: memo[mapping[key]] = dct[key] return memo else: return memo return reduce(_block, mapping, omit(dct, *mapping.keys()))
def function[rename, parameter[dct, mapping]]: constant[ Rename the keys of a dictionary with the given mapping >>> rename({"a": 1, "BBB": 2}, {"a": "AAA"}) {'AAA': 1, 'BBB': 2} ] def function[_block, parameter[memo, key]]: if compare[name[key] in name[dct]] begin[:] call[name[memo]][call[name[mapping]][name[key]]] assign[=] call[name[dct]][name[key]] return[name[memo]] return[call[name[reduce], parameter[name[_block], name[mapping], call[name[omit], parameter[name[dct], <ast.Starred object at 0x7da1b25d1de0>]]]]]
keyword[def] identifier[rename] ( identifier[dct] , identifier[mapping] ): literal[string] keyword[def] identifier[_block] ( identifier[memo] , identifier[key] ): keyword[if] identifier[key] keyword[in] identifier[dct] : identifier[memo] [ identifier[mapping] [ identifier[key] ]]= identifier[dct] [ identifier[key] ] keyword[return] identifier[memo] keyword[else] : keyword[return] identifier[memo] keyword[return] identifier[reduce] ( identifier[_block] , identifier[mapping] , identifier[omit] ( identifier[dct] ,* identifier[mapping] . identifier[keys] ()))
def rename(dct, mapping): """ Rename the keys of a dictionary with the given mapping >>> rename({"a": 1, "BBB": 2}, {"a": "AAA"}) {'AAA': 1, 'BBB': 2} """ def _block(memo, key): if key in dct: memo[mapping[key]] = dct[key] return memo # depends on [control=['if'], data=['key', 'dct']] else: return memo return reduce(_block, mapping, omit(dct, *mapping.keys()))
def get_config(args): """ Method to get the correct configuration file for a set of command line arguments. Takes into account --config-file, --plugins, -- """ options, _ = defaults.DEFAULT_OPTIONS.parse(args, ignore_errors=True) read_config = not options.skip_default_config cfg = CoverageReporterConfig(read_config) for path in options.config_file: cfg.read(path) cfg.plugin_dirs.extend(options.plugin_dir) cfg.plugins.extend(options.plugin) return cfg
def function[get_config, parameter[args]]: constant[ Method to get the correct configuration file for a set of command line arguments. Takes into account --config-file, --plugins, -- ] <ast.Tuple object at 0x7da2054a4490> assign[=] call[name[defaults].DEFAULT_OPTIONS.parse, parameter[name[args]]] variable[read_config] assign[=] <ast.UnaryOp object at 0x7da18eb56650> variable[cfg] assign[=] call[name[CoverageReporterConfig], parameter[name[read_config]]] for taget[name[path]] in starred[name[options].config_file] begin[:] call[name[cfg].read, parameter[name[path]]] call[name[cfg].plugin_dirs.extend, parameter[name[options].plugin_dir]] call[name[cfg].plugins.extend, parameter[name[options].plugin]] return[name[cfg]]
keyword[def] identifier[get_config] ( identifier[args] ): literal[string] identifier[options] , identifier[_] = identifier[defaults] . identifier[DEFAULT_OPTIONS] . identifier[parse] ( identifier[args] , identifier[ignore_errors] = keyword[True] ) identifier[read_config] = keyword[not] identifier[options] . identifier[skip_default_config] identifier[cfg] = identifier[CoverageReporterConfig] ( identifier[read_config] ) keyword[for] identifier[path] keyword[in] identifier[options] . identifier[config_file] : identifier[cfg] . identifier[read] ( identifier[path] ) identifier[cfg] . identifier[plugin_dirs] . identifier[extend] ( identifier[options] . identifier[plugin_dir] ) identifier[cfg] . identifier[plugins] . identifier[extend] ( identifier[options] . identifier[plugin] ) keyword[return] identifier[cfg]
def get_config(args): """ Method to get the correct configuration file for a set of command line arguments. Takes into account --config-file, --plugins, -- """ (options, _) = defaults.DEFAULT_OPTIONS.parse(args, ignore_errors=True) read_config = not options.skip_default_config cfg = CoverageReporterConfig(read_config) for path in options.config_file: cfg.read(path) # depends on [control=['for'], data=['path']] cfg.plugin_dirs.extend(options.plugin_dir) cfg.plugins.extend(options.plugin) return cfg
def add_edge(self, agent_id1, agent_id2, edge_attr_dict=None, *edge_attrs): """ Add an edge between agent_id1 and agent_id2. agent_id1 and agent_id2 correspond to Networkx node IDs. This is a wrapper for the Networkx.Graph method `.add_edge`. Agents agent_id1 and agent_id2 will be automatically added if they are not already present in the graph. Edge attributes can be specified using keywords or passing a dictionary with key-value pairs Parameters ---------- agent_id1, agent_id2 : nodes Nodes (as defined by Networkx) can be any hashable type except NoneType edge_attr_dict : dictionary, optional (default = no attributes) Dictionary of edge attributes. Assigns values to specified keyword attributes and overwrites them if already present. edge_attrs : keyword arguments, optional Edge attributes such as labels can be assigned directly using keyowrd arguments """ if agent_id1 in self.global_topology.nodes(data=False): if agent_id2 in self.global_topology.nodes(data=False): self.global_topology.add_edge(agent_id1, agent_id2, edge_attr_dict=edge_attr_dict, *edge_attrs) else: raise ValueError('\'agent_id2\'[{}] not in list of existing agents in the network'.format(agent_id2)) else: raise ValueError('\'agent_id1\'[{}] not in list of existing agents in the network'.format(agent_id1))
def function[add_edge, parameter[self, agent_id1, agent_id2, edge_attr_dict]]: constant[ Add an edge between agent_id1 and agent_id2. agent_id1 and agent_id2 correspond to Networkx node IDs. This is a wrapper for the Networkx.Graph method `.add_edge`. Agents agent_id1 and agent_id2 will be automatically added if they are not already present in the graph. Edge attributes can be specified using keywords or passing a dictionary with key-value pairs Parameters ---------- agent_id1, agent_id2 : nodes Nodes (as defined by Networkx) can be any hashable type except NoneType edge_attr_dict : dictionary, optional (default = no attributes) Dictionary of edge attributes. Assigns values to specified keyword attributes and overwrites them if already present. edge_attrs : keyword arguments, optional Edge attributes such as labels can be assigned directly using keyowrd arguments ] if compare[name[agent_id1] in call[name[self].global_topology.nodes, parameter[]]] begin[:] if compare[name[agent_id2] in call[name[self].global_topology.nodes, parameter[]]] begin[:] call[name[self].global_topology.add_edge, parameter[name[agent_id1], name[agent_id2], <ast.Starred object at 0x7da1b25d1150>]]
keyword[def] identifier[add_edge] ( identifier[self] , identifier[agent_id1] , identifier[agent_id2] , identifier[edge_attr_dict] = keyword[None] ,* identifier[edge_attrs] ): literal[string] keyword[if] identifier[agent_id1] keyword[in] identifier[self] . identifier[global_topology] . identifier[nodes] ( identifier[data] = keyword[False] ): keyword[if] identifier[agent_id2] keyword[in] identifier[self] . identifier[global_topology] . identifier[nodes] ( identifier[data] = keyword[False] ): identifier[self] . identifier[global_topology] . identifier[add_edge] ( identifier[agent_id1] , identifier[agent_id2] , identifier[edge_attr_dict] = identifier[edge_attr_dict] ,* identifier[edge_attrs] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[agent_id2] )) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[agent_id1] ))
def add_edge(self, agent_id1, agent_id2, edge_attr_dict=None, *edge_attrs): """ Add an edge between agent_id1 and agent_id2. agent_id1 and agent_id2 correspond to Networkx node IDs. This is a wrapper for the Networkx.Graph method `.add_edge`. Agents agent_id1 and agent_id2 will be automatically added if they are not already present in the graph. Edge attributes can be specified using keywords or passing a dictionary with key-value pairs Parameters ---------- agent_id1, agent_id2 : nodes Nodes (as defined by Networkx) can be any hashable type except NoneType edge_attr_dict : dictionary, optional (default = no attributes) Dictionary of edge attributes. Assigns values to specified keyword attributes and overwrites them if already present. edge_attrs : keyword arguments, optional Edge attributes such as labels can be assigned directly using keyowrd arguments """ if agent_id1 in self.global_topology.nodes(data=False): if agent_id2 in self.global_topology.nodes(data=False): self.global_topology.add_edge(agent_id1, agent_id2, *edge_attrs, edge_attr_dict=edge_attr_dict) # depends on [control=['if'], data=['agent_id2']] else: raise ValueError("'agent_id2'[{}] not in list of existing agents in the network".format(agent_id2)) # depends on [control=['if'], data=['agent_id1']] else: raise ValueError("'agent_id1'[{}] not in list of existing agents in the network".format(agent_id1))
def validate(self, value): "Check if value consists only of valid emails." # Use the parent's handling of required fields, etc. super(MultiEmailField, self).validate(value) for email in value: validate_email(email)
def function[validate, parameter[self, value]]: constant[Check if value consists only of valid emails.] call[call[name[super], parameter[name[MultiEmailField], name[self]]].validate, parameter[name[value]]] for taget[name[email]] in starred[name[value]] begin[:] call[name[validate_email], parameter[name[email]]]
keyword[def] identifier[validate] ( identifier[self] , identifier[value] ): literal[string] identifier[super] ( identifier[MultiEmailField] , identifier[self] ). identifier[validate] ( identifier[value] ) keyword[for] identifier[email] keyword[in] identifier[value] : identifier[validate_email] ( identifier[email] )
def validate(self, value): """Check if value consists only of valid emails.""" # Use the parent's handling of required fields, etc. super(MultiEmailField, self).validate(value) for email in value: validate_email(email) # depends on [control=['for'], data=['email']]
def is_in(self, search_list, pair): """ If pair is in search_list, return the index. Otherwise return -1 """ index = -1 for nr, i in enumerate(search_list): if(np.all(i == pair)): return nr return index
def function[is_in, parameter[self, search_list, pair]]: constant[ If pair is in search_list, return the index. Otherwise return -1 ] variable[index] assign[=] <ast.UnaryOp object at 0x7da18bccae60> for taget[tuple[[<ast.Name object at 0x7da18bcc8940>, <ast.Name object at 0x7da18bcc8070>]]] in starred[call[name[enumerate], parameter[name[search_list]]]] begin[:] if call[name[np].all, parameter[compare[name[i] equal[==] name[pair]]]] begin[:] return[name[nr]] return[name[index]]
keyword[def] identifier[is_in] ( identifier[self] , identifier[search_list] , identifier[pair] ): literal[string] identifier[index] =- literal[int] keyword[for] identifier[nr] , identifier[i] keyword[in] identifier[enumerate] ( identifier[search_list] ): keyword[if] ( identifier[np] . identifier[all] ( identifier[i] == identifier[pair] )): keyword[return] identifier[nr] keyword[return] identifier[index]
def is_in(self, search_list, pair): """ If pair is in search_list, return the index. Otherwise return -1 """ index = -1 for (nr, i) in enumerate(search_list): if np.all(i == pair): return nr # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return index
def __split_genomic_interval_filename(fn): """ Split a filename of the format chrom:start-end.ext or chrom.ext (full chrom). :return: tuple of (chrom, start, end) -- 'start' and 'end' are None if not present in the filename. """ if fn is None or fn == "": raise ValueError("invalid filename: " + str(fn)) fn = ".".join(fn.split(".")[:-1]) parts = fn.split(":") if len(parts) == 1: return (parts[0].strip(), None, None) else: r_parts = parts[1].split("-") if len(r_parts) != 2: raise ValueError("Invalid filename: " + str(fn)) return (parts[0].strip(), int(r_parts[0]), int(r_parts[1]))
def function[__split_genomic_interval_filename, parameter[fn]]: constant[ Split a filename of the format chrom:start-end.ext or chrom.ext (full chrom). :return: tuple of (chrom, start, end) -- 'start' and 'end' are None if not present in the filename. ] if <ast.BoolOp object at 0x7da2054a6bc0> begin[:] <ast.Raise object at 0x7da2054a4ca0> variable[fn] assign[=] call[constant[.].join, parameter[call[call[name[fn].split, parameter[constant[.]]]][<ast.Slice object at 0x7da2054a47c0>]]] variable[parts] assign[=] call[name[fn].split, parameter[constant[:]]] if compare[call[name[len], parameter[name[parts]]] equal[==] constant[1]] begin[:] return[tuple[[<ast.Call object at 0x7da18eb57df0>, <ast.Constant object at 0x7da18eb54dc0>, <ast.Constant object at 0x7da18eb55de0>]]]
keyword[def] identifier[__split_genomic_interval_filename] ( identifier[fn] ): literal[string] keyword[if] identifier[fn] keyword[is] keyword[None] keyword[or] identifier[fn] == literal[string] : keyword[raise] identifier[ValueError] ( literal[string] + identifier[str] ( identifier[fn] )) identifier[fn] = literal[string] . identifier[join] ( identifier[fn] . identifier[split] ( literal[string] )[:- literal[int] ]) identifier[parts] = identifier[fn] . identifier[split] ( literal[string] ) keyword[if] identifier[len] ( identifier[parts] )== literal[int] : keyword[return] ( identifier[parts] [ literal[int] ]. identifier[strip] (), keyword[None] , keyword[None] ) keyword[else] : identifier[r_parts] = identifier[parts] [ literal[int] ]. identifier[split] ( literal[string] ) keyword[if] identifier[len] ( identifier[r_parts] )!= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] + identifier[str] ( identifier[fn] )) keyword[return] ( identifier[parts] [ literal[int] ]. identifier[strip] (), identifier[int] ( identifier[r_parts] [ literal[int] ]), identifier[int] ( identifier[r_parts] [ literal[int] ]))
def __split_genomic_interval_filename(fn): """ Split a filename of the format chrom:start-end.ext or chrom.ext (full chrom). :return: tuple of (chrom, start, end) -- 'start' and 'end' are None if not present in the filename. """ if fn is None or fn == '': raise ValueError('invalid filename: ' + str(fn)) # depends on [control=['if'], data=[]] fn = '.'.join(fn.split('.')[:-1]) parts = fn.split(':') if len(parts) == 1: return (parts[0].strip(), None, None) # depends on [control=['if'], data=[]] else: r_parts = parts[1].split('-') if len(r_parts) != 2: raise ValueError('Invalid filename: ' + str(fn)) # depends on [control=['if'], data=[]] return (parts[0].strip(), int(r_parts[0]), int(r_parts[1]))
def pf_to_n(L, pf, R): """Returns the number of non-intersecting spheres required to achieve as close to a given packing fraction as possible, along with the actual achieved packing fraction. for a number of non-intersecting spheres. Parameters ---------- L: float array, shape (d,) System lengths. pf: float Fraction of space to be occupied by the spheres. R: float Sphere radius. Returns ------- n: integer Number of spheres required to achieve a packing fraction `pf_actual` pf_actual: Fraction of space occupied by `n` spheres. This is the closest possible fraction achievable to `pf`. """ dim = L.shape[0] n = int(round(pf * np.product(L) / sphere_volume(R, dim))) pf_actual = n_to_pf(L, n, R) return n, pf_actual
def function[pf_to_n, parameter[L, pf, R]]: constant[Returns the number of non-intersecting spheres required to achieve as close to a given packing fraction as possible, along with the actual achieved packing fraction. for a number of non-intersecting spheres. Parameters ---------- L: float array, shape (d,) System lengths. pf: float Fraction of space to be occupied by the spheres. R: float Sphere radius. Returns ------- n: integer Number of spheres required to achieve a packing fraction `pf_actual` pf_actual: Fraction of space occupied by `n` spheres. This is the closest possible fraction achievable to `pf`. ] variable[dim] assign[=] call[name[L].shape][constant[0]] variable[n] assign[=] call[name[int], parameter[call[name[round], parameter[binary_operation[binary_operation[name[pf] * call[name[np].product, parameter[name[L]]]] / call[name[sphere_volume], parameter[name[R], name[dim]]]]]]]] variable[pf_actual] assign[=] call[name[n_to_pf], parameter[name[L], name[n], name[R]]] return[tuple[[<ast.Name object at 0x7da1b1435630>, <ast.Name object at 0x7da1b1436590>]]]
keyword[def] identifier[pf_to_n] ( identifier[L] , identifier[pf] , identifier[R] ): literal[string] identifier[dim] = identifier[L] . identifier[shape] [ literal[int] ] identifier[n] = identifier[int] ( identifier[round] ( identifier[pf] * identifier[np] . identifier[product] ( identifier[L] )/ identifier[sphere_volume] ( identifier[R] , identifier[dim] ))) identifier[pf_actual] = identifier[n_to_pf] ( identifier[L] , identifier[n] , identifier[R] ) keyword[return] identifier[n] , identifier[pf_actual]
def pf_to_n(L, pf, R): """Returns the number of non-intersecting spheres required to achieve as close to a given packing fraction as possible, along with the actual achieved packing fraction. for a number of non-intersecting spheres. Parameters ---------- L: float array, shape (d,) System lengths. pf: float Fraction of space to be occupied by the spheres. R: float Sphere radius. Returns ------- n: integer Number of spheres required to achieve a packing fraction `pf_actual` pf_actual: Fraction of space occupied by `n` spheres. This is the closest possible fraction achievable to `pf`. """ dim = L.shape[0] n = int(round(pf * np.product(L) / sphere_volume(R, dim))) pf_actual = n_to_pf(L, n, R) return (n, pf_actual)
def delete_beacon(self, name): ''' Delete a beacon item ''' if name in self._get_beacons(include_opts=False): comment = 'Cannot delete beacon item {0}, ' \ 'it is configured in pillar.'.format(name) complete = False else: if name in self.opts['beacons']: del self.opts['beacons'][name] comment = 'Deleting beacon item: {0}'.format(name) else: comment = 'Beacon item {0} not found.'.format(name) complete = True # Fire the complete event back along with updated list of beacons evt = salt.utils.event.get_event('minion', opts=self.opts) evt.fire_event({'complete': complete, 'comment': comment, 'beacons': self.opts['beacons']}, tag='/salt/minion/minion_beacon_delete_complete') return True
def function[delete_beacon, parameter[self, name]]: constant[ Delete a beacon item ] if compare[name[name] in call[name[self]._get_beacons, parameter[]]] begin[:] variable[comment] assign[=] call[constant[Cannot delete beacon item {0}, it is configured in pillar.].format, parameter[name[name]]] variable[complete] assign[=] constant[False] variable[evt] assign[=] call[name[salt].utils.event.get_event, parameter[constant[minion]]] call[name[evt].fire_event, parameter[dictionary[[<ast.Constant object at 0x7da1b20efa30>, <ast.Constant object at 0x7da1b20edf60>, <ast.Constant object at 0x7da1b20efa60>], [<ast.Name object at 0x7da1b20ef2b0>, <ast.Name object at 0x7da1b20ee4a0>, <ast.Subscript object at 0x7da1b20edd20>]]]] return[constant[True]]
keyword[def] identifier[delete_beacon] ( identifier[self] , identifier[name] ): literal[string] keyword[if] identifier[name] keyword[in] identifier[self] . identifier[_get_beacons] ( identifier[include_opts] = keyword[False] ): identifier[comment] = literal[string] literal[string] . identifier[format] ( identifier[name] ) identifier[complete] = keyword[False] keyword[else] : keyword[if] identifier[name] keyword[in] identifier[self] . identifier[opts] [ literal[string] ]: keyword[del] identifier[self] . identifier[opts] [ literal[string] ][ identifier[name] ] identifier[comment] = literal[string] . identifier[format] ( identifier[name] ) keyword[else] : identifier[comment] = literal[string] . identifier[format] ( identifier[name] ) identifier[complete] = keyword[True] identifier[evt] = identifier[salt] . identifier[utils] . identifier[event] . identifier[get_event] ( literal[string] , identifier[opts] = identifier[self] . identifier[opts] ) identifier[evt] . identifier[fire_event] ({ literal[string] : identifier[complete] , literal[string] : identifier[comment] , literal[string] : identifier[self] . identifier[opts] [ literal[string] ]}, identifier[tag] = literal[string] ) keyword[return] keyword[True]
def delete_beacon(self, name): """ Delete a beacon item """ if name in self._get_beacons(include_opts=False): comment = 'Cannot delete beacon item {0}, it is configured in pillar.'.format(name) complete = False # depends on [control=['if'], data=['name']] else: if name in self.opts['beacons']: del self.opts['beacons'][name] comment = 'Deleting beacon item: {0}'.format(name) # depends on [control=['if'], data=['name']] else: comment = 'Beacon item {0} not found.'.format(name) complete = True # Fire the complete event back along with updated list of beacons evt = salt.utils.event.get_event('minion', opts=self.opts) evt.fire_event({'complete': complete, 'comment': comment, 'beacons': self.opts['beacons']}, tag='/salt/minion/minion_beacon_delete_complete') return True
def start(dashboards, once, secrets): """Display a dashboard from the dashboard file(s) provided in the DASHBOARDS Paths and/or URLs for dashboards (URLs must secrets with http or https) """ if secrets is None: secrets = os.path.join(os.path.expanduser("~"), "/.doodledashboard/secrets") try: loaded_secrets = try_read_secrets_file(secrets) except InvalidSecretsException as err: click.echo(get_error_message(err, default="Secrets file is invalid"), err=True) raise click.Abort() read_configs = [""" dashboard: display: type: console """] for dashboard_file in dashboards: read_configs.append(read_file(dashboard_file)) dashboard_config = DashboardConfigReader(initialise_component_loader(), loaded_secrets) try: dashboard = read_dashboard_from_config(dashboard_config, read_configs) except YAMLError as err: click.echo(get_error_message(err, default="Dashboard configuration is invalid"), err=True) raise click.Abort() try: DashboardValidator().validate(dashboard) except ValidationException as err: click.echo(get_error_message(err, default="Dashboard configuration is invalid"), err=True) raise click.Abort() explain_dashboard(dashboard) click.echo("Dashboard running...") while True: try: DashboardRunner(dashboard).cycle() except SecretNotFound as err: click.echo(get_error_message(err, default="Datafeed didn't have required secret"), err=True) raise click.Abort() if once: break
def function[start, parameter[dashboards, once, secrets]]: constant[Display a dashboard from the dashboard file(s) provided in the DASHBOARDS Paths and/or URLs for dashboards (URLs must secrets with http or https) ] if compare[name[secrets] is constant[None]] begin[:] variable[secrets] assign[=] call[name[os].path.join, parameter[call[name[os].path.expanduser, parameter[constant[~]]], constant[/.doodledashboard/secrets]]] <ast.Try object at 0x7da1b021c970> variable[read_configs] assign[=] list[[<ast.Constant object at 0x7da1b021d2d0>]] for taget[name[dashboard_file]] in starred[name[dashboards]] begin[:] call[name[read_configs].append, parameter[call[name[read_file], parameter[name[dashboard_file]]]]] variable[dashboard_config] assign[=] call[name[DashboardConfigReader], parameter[call[name[initialise_component_loader], parameter[]], name[loaded_secrets]]] <ast.Try object at 0x7da1b021d420> <ast.Try object at 0x7da1b021d180> call[name[explain_dashboard], parameter[name[dashboard]]] call[name[click].echo, parameter[constant[Dashboard running...]]] while constant[True] begin[:] <ast.Try object at 0x7da1b021fc40> if name[once] begin[:] break
keyword[def] identifier[start] ( identifier[dashboards] , identifier[once] , identifier[secrets] ): literal[string] keyword[if] identifier[secrets] keyword[is] keyword[None] : identifier[secrets] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] ), literal[string] ) keyword[try] : identifier[loaded_secrets] = identifier[try_read_secrets_file] ( identifier[secrets] ) keyword[except] identifier[InvalidSecretsException] keyword[as] identifier[err] : identifier[click] . identifier[echo] ( identifier[get_error_message] ( identifier[err] , identifier[default] = literal[string] ), identifier[err] = keyword[True] ) keyword[raise] identifier[click] . identifier[Abort] () identifier[read_configs] =[ literal[string] ] keyword[for] identifier[dashboard_file] keyword[in] identifier[dashboards] : identifier[read_configs] . identifier[append] ( identifier[read_file] ( identifier[dashboard_file] )) identifier[dashboard_config] = identifier[DashboardConfigReader] ( identifier[initialise_component_loader] (), identifier[loaded_secrets] ) keyword[try] : identifier[dashboard] = identifier[read_dashboard_from_config] ( identifier[dashboard_config] , identifier[read_configs] ) keyword[except] identifier[YAMLError] keyword[as] identifier[err] : identifier[click] . identifier[echo] ( identifier[get_error_message] ( identifier[err] , identifier[default] = literal[string] ), identifier[err] = keyword[True] ) keyword[raise] identifier[click] . identifier[Abort] () keyword[try] : identifier[DashboardValidator] (). identifier[validate] ( identifier[dashboard] ) keyword[except] identifier[ValidationException] keyword[as] identifier[err] : identifier[click] . identifier[echo] ( identifier[get_error_message] ( identifier[err] , identifier[default] = literal[string] ), identifier[err] = keyword[True] ) keyword[raise] identifier[click] . identifier[Abort] () identifier[explain_dashboard] ( identifier[dashboard] ) identifier[click] . identifier[echo] ( literal[string] ) keyword[while] keyword[True] : keyword[try] : identifier[DashboardRunner] ( identifier[dashboard] ). identifier[cycle] () keyword[except] identifier[SecretNotFound] keyword[as] identifier[err] : identifier[click] . identifier[echo] ( identifier[get_error_message] ( identifier[err] , identifier[default] = literal[string] ), identifier[err] = keyword[True] ) keyword[raise] identifier[click] . identifier[Abort] () keyword[if] identifier[once] : keyword[break]
def start(dashboards, once, secrets): """Display a dashboard from the dashboard file(s) provided in the DASHBOARDS Paths and/or URLs for dashboards (URLs must secrets with http or https) """ if secrets is None: secrets = os.path.join(os.path.expanduser('~'), '/.doodledashboard/secrets') # depends on [control=['if'], data=['secrets']] try: loaded_secrets = try_read_secrets_file(secrets) # depends on [control=['try'], data=[]] except InvalidSecretsException as err: click.echo(get_error_message(err, default='Secrets file is invalid'), err=True) raise click.Abort() # depends on [control=['except'], data=['err']] read_configs = ['\n dashboard:\n display:\n type: console\n '] for dashboard_file in dashboards: read_configs.append(read_file(dashboard_file)) # depends on [control=['for'], data=['dashboard_file']] dashboard_config = DashboardConfigReader(initialise_component_loader(), loaded_secrets) try: dashboard = read_dashboard_from_config(dashboard_config, read_configs) # depends on [control=['try'], data=[]] except YAMLError as err: click.echo(get_error_message(err, default='Dashboard configuration is invalid'), err=True) raise click.Abort() # depends on [control=['except'], data=['err']] try: DashboardValidator().validate(dashboard) # depends on [control=['try'], data=[]] except ValidationException as err: click.echo(get_error_message(err, default='Dashboard configuration is invalid'), err=True) raise click.Abort() # depends on [control=['except'], data=['err']] explain_dashboard(dashboard) click.echo('Dashboard running...') while True: try: DashboardRunner(dashboard).cycle() # depends on [control=['try'], data=[]] except SecretNotFound as err: click.echo(get_error_message(err, default="Datafeed didn't have required secret"), err=True) raise click.Abort() # depends on [control=['except'], data=['err']] if once: break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
def project_create_event(self, proj_info): """Create project.""" LOG.debug("Processing create %(proj)s event.", {'proj': proj_info}) proj_id = proj_info.get('resource_info') self.project_create_func(proj_id)
def function[project_create_event, parameter[self, proj_info]]: constant[Create project.] call[name[LOG].debug, parameter[constant[Processing create %(proj)s event.], dictionary[[<ast.Constant object at 0x7da2041d97e0>], [<ast.Name object at 0x7da2041d9660>]]]] variable[proj_id] assign[=] call[name[proj_info].get, parameter[constant[resource_info]]] call[name[self].project_create_func, parameter[name[proj_id]]]
keyword[def] identifier[project_create_event] ( identifier[self] , identifier[proj_info] ): literal[string] identifier[LOG] . identifier[debug] ( literal[string] ,{ literal[string] : identifier[proj_info] }) identifier[proj_id] = identifier[proj_info] . identifier[get] ( literal[string] ) identifier[self] . identifier[project_create_func] ( identifier[proj_id] )
def project_create_event(self, proj_info): """Create project.""" LOG.debug('Processing create %(proj)s event.', {'proj': proj_info}) proj_id = proj_info.get('resource_info') self.project_create_func(proj_id)
def set_pid_params(self, *args, **kwargs): '''Set PID parameters for all joints in the skeleton. Parameters for this method are passed directly to the `pid` constructor. ''' for joint in self.joints: joint.target_angles = [None] * joint.ADOF joint.controllers = [pid(*args, **kwargs) for i in range(joint.ADOF)]
def function[set_pid_params, parameter[self]]: constant[Set PID parameters for all joints in the skeleton. Parameters for this method are passed directly to the `pid` constructor. ] for taget[name[joint]] in starred[name[self].joints] begin[:] name[joint].target_angles assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b004b520>]] * name[joint].ADOF] name[joint].controllers assign[=] <ast.ListComp object at 0x7da1b004add0>
keyword[def] identifier[set_pid_params] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[for] identifier[joint] keyword[in] identifier[self] . identifier[joints] : identifier[joint] . identifier[target_angles] =[ keyword[None] ]* identifier[joint] . identifier[ADOF] identifier[joint] . identifier[controllers] =[ identifier[pid] (* identifier[args] ,** identifier[kwargs] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[joint] . identifier[ADOF] )]
def set_pid_params(self, *args, **kwargs): """Set PID parameters for all joints in the skeleton. Parameters for this method are passed directly to the `pid` constructor. """ for joint in self.joints: joint.target_angles = [None] * joint.ADOF joint.controllers = [pid(*args, **kwargs) for i in range(joint.ADOF)] # depends on [control=['for'], data=['joint']]
def matching_ind_und(CIJ0): ''' M0 = MATCHING_IND_UND(CIJ) computes matching index for undirected graph specified by adjacency matrix CIJ. Matching index is a measure of similarity between two nodes' connectivity profiles (excluding their mutual connection, should it exist). Parameters ---------- CIJ : NxN np.ndarray undirected adjacency matrix Returns ------- M0 : NxN np.ndarray matching index matrix ''' K = np.sum(CIJ0, axis=0) n = len(CIJ0) R = (K != 0) N = np.sum(R) xR, = np.where(R == 0) CIJ = np.delete(np.delete(CIJ0, xR, axis=0), xR, axis=1) I = np.logical_not(np.eye(N)) M = np.zeros((N, N)) for i in range(N): c1 = CIJ[i, :] use = np.logical_or(c1, CIJ) use[:, i] = 0 use *= I ncon1 = c1 * use ncon2 = c1 * CIJ ncon = np.sum(ncon1 + ncon2, axis=1) print(ncon) M[:, i] = 2 * np.sum(np.logical_and(ncon1, ncon2), axis=1) / ncon M *= I M[np.isnan(M)] = 0 M0 = np.zeros((n, n)) yR, = np.where(R) M0[np.ix_(yR, yR)] = M return M0
def function[matching_ind_und, parameter[CIJ0]]: constant[ M0 = MATCHING_IND_UND(CIJ) computes matching index for undirected graph specified by adjacency matrix CIJ. Matching index is a measure of similarity between two nodes' connectivity profiles (excluding their mutual connection, should it exist). Parameters ---------- CIJ : NxN np.ndarray undirected adjacency matrix Returns ------- M0 : NxN np.ndarray matching index matrix ] variable[K] assign[=] call[name[np].sum, parameter[name[CIJ0]]] variable[n] assign[=] call[name[len], parameter[name[CIJ0]]] variable[R] assign[=] compare[name[K] not_equal[!=] constant[0]] variable[N] assign[=] call[name[np].sum, parameter[name[R]]] <ast.Tuple object at 0x7da1b08d0100> assign[=] call[name[np].where, parameter[compare[name[R] equal[==] constant[0]]]] variable[CIJ] assign[=] call[name[np].delete, parameter[call[name[np].delete, parameter[name[CIJ0], name[xR]]], name[xR]]] variable[I] assign[=] call[name[np].logical_not, parameter[call[name[np].eye, parameter[name[N]]]]] variable[M] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b08d18a0>, <ast.Name object at 0x7da1b08d34c0>]]]] for taget[name[i]] in starred[call[name[range], parameter[name[N]]]] begin[:] variable[c1] assign[=] call[name[CIJ]][tuple[[<ast.Name object at 0x7da1b08d2620>, <ast.Slice object at 0x7da1b08d2860>]]] variable[use] assign[=] call[name[np].logical_or, parameter[name[c1], name[CIJ]]] call[name[use]][tuple[[<ast.Slice object at 0x7da1b08d2fb0>, <ast.Name object at 0x7da1b08d0e50>]]] assign[=] constant[0] <ast.AugAssign object at 0x7da1b08d0940> variable[ncon1] assign[=] binary_operation[name[c1] * name[use]] variable[ncon2] assign[=] binary_operation[name[c1] * name[CIJ]] variable[ncon] assign[=] call[name[np].sum, parameter[binary_operation[name[ncon1] + name[ncon2]]]] call[name[print], parameter[name[ncon]]] call[name[M]][tuple[[<ast.Slice object at 0x7da1b08d3ee0>, <ast.Name object at 0x7da1b08d1570>]]] assign[=] binary_operation[binary_operation[constant[2] * call[name[np].sum, parameter[call[name[np].logical_and, parameter[name[ncon1], name[ncon2]]]]]] / name[ncon]] <ast.AugAssign object at 0x7da1b08d0fa0> call[name[M]][call[name[np].isnan, parameter[name[M]]]] assign[=] constant[0] variable[M0] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b08d2d70>, <ast.Name object at 0x7da1b08d2680>]]]] <ast.Tuple object at 0x7da1b08d37c0> assign[=] call[name[np].where, parameter[name[R]]] call[name[M0]][call[name[np].ix_, parameter[name[yR], name[yR]]]] assign[=] name[M] return[name[M0]]
keyword[def] identifier[matching_ind_und] ( identifier[CIJ0] ): literal[string] identifier[K] = identifier[np] . identifier[sum] ( identifier[CIJ0] , identifier[axis] = literal[int] ) identifier[n] = identifier[len] ( identifier[CIJ0] ) identifier[R] =( identifier[K] != literal[int] ) identifier[N] = identifier[np] . identifier[sum] ( identifier[R] ) identifier[xR] ,= identifier[np] . identifier[where] ( identifier[R] == literal[int] ) identifier[CIJ] = identifier[np] . identifier[delete] ( identifier[np] . identifier[delete] ( identifier[CIJ0] , identifier[xR] , identifier[axis] = literal[int] ), identifier[xR] , identifier[axis] = literal[int] ) identifier[I] = identifier[np] . identifier[logical_not] ( identifier[np] . identifier[eye] ( identifier[N] )) identifier[M] = identifier[np] . identifier[zeros] (( identifier[N] , identifier[N] )) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[N] ): identifier[c1] = identifier[CIJ] [ identifier[i] ,:] identifier[use] = identifier[np] . identifier[logical_or] ( identifier[c1] , identifier[CIJ] ) identifier[use] [:, identifier[i] ]= literal[int] identifier[use] *= identifier[I] identifier[ncon1] = identifier[c1] * identifier[use] identifier[ncon2] = identifier[c1] * identifier[CIJ] identifier[ncon] = identifier[np] . identifier[sum] ( identifier[ncon1] + identifier[ncon2] , identifier[axis] = literal[int] ) identifier[print] ( identifier[ncon] ) identifier[M] [:, identifier[i] ]= literal[int] * identifier[np] . identifier[sum] ( identifier[np] . identifier[logical_and] ( identifier[ncon1] , identifier[ncon2] ), identifier[axis] = literal[int] )/ identifier[ncon] identifier[M] *= identifier[I] identifier[M] [ identifier[np] . identifier[isnan] ( identifier[M] )]= literal[int] identifier[M0] = identifier[np] . identifier[zeros] (( identifier[n] , identifier[n] )) identifier[yR] ,= identifier[np] . identifier[where] ( identifier[R] ) identifier[M0] [ identifier[np] . identifier[ix_] ( identifier[yR] , identifier[yR] )]= identifier[M] keyword[return] identifier[M0]
def matching_ind_und(CIJ0): """ M0 = MATCHING_IND_UND(CIJ) computes matching index for undirected graph specified by adjacency matrix CIJ. Matching index is a measure of similarity between two nodes' connectivity profiles (excluding their mutual connection, should it exist). Parameters ---------- CIJ : NxN np.ndarray undirected adjacency matrix Returns ------- M0 : NxN np.ndarray matching index matrix """ K = np.sum(CIJ0, axis=0) n = len(CIJ0) R = K != 0 N = np.sum(R) (xR,) = np.where(R == 0) CIJ = np.delete(np.delete(CIJ0, xR, axis=0), xR, axis=1) I = np.logical_not(np.eye(N)) M = np.zeros((N, N)) for i in range(N): c1 = CIJ[i, :] use = np.logical_or(c1, CIJ) use[:, i] = 0 use *= I ncon1 = c1 * use ncon2 = c1 * CIJ ncon = np.sum(ncon1 + ncon2, axis=1) print(ncon) M[:, i] = 2 * np.sum(np.logical_and(ncon1, ncon2), axis=1) / ncon # depends on [control=['for'], data=['i']] M *= I M[np.isnan(M)] = 0 M0 = np.zeros((n, n)) (yR,) = np.where(R) M0[np.ix_(yR, yR)] = M return M0
def read(self, timeout=READ_TIMEOUT, raw=False): ''' Read data from the arm. Data is returned as a latin_1 encoded string, or raw bytes if 'raw' is True. ''' time.sleep(READ_SLEEP_TIME) raw_out = self.ser.read(self.ser.in_waiting) out = raw_out.decode(OUTPUT_ENCODING) time_waiting = 0 while len(out) == 0 or ending_in(out.strip(OUTPUT_STRIP_CHARS), RESPONSE_END_WORDS) is None: time.sleep(READ_SLEEP_TIME) time_waiting += READ_SLEEP_TIME raw_out += self.ser.read(self.ser.in_waiting) out = raw_out.decode(OUTPUT_ENCODING) # TODO how to handle timeouts, if they're now unexpected? if time_waiting >= timeout: break if raw: return raw_out return out
def function[read, parameter[self, timeout, raw]]: constant[ Read data from the arm. Data is returned as a latin_1 encoded string, or raw bytes if 'raw' is True. ] call[name[time].sleep, parameter[name[READ_SLEEP_TIME]]] variable[raw_out] assign[=] call[name[self].ser.read, parameter[name[self].ser.in_waiting]] variable[out] assign[=] call[name[raw_out].decode, parameter[name[OUTPUT_ENCODING]]] variable[time_waiting] assign[=] constant[0] while <ast.BoolOp object at 0x7da2044c18d0> begin[:] call[name[time].sleep, parameter[name[READ_SLEEP_TIME]]] <ast.AugAssign object at 0x7da2044c3370> <ast.AugAssign object at 0x7da2044c2230> variable[out] assign[=] call[name[raw_out].decode, parameter[name[OUTPUT_ENCODING]]] if compare[name[time_waiting] greater_or_equal[>=] name[timeout]] begin[:] break if name[raw] begin[:] return[name[raw_out]] return[name[out]]
keyword[def] identifier[read] ( identifier[self] , identifier[timeout] = identifier[READ_TIMEOUT] , identifier[raw] = keyword[False] ): literal[string] identifier[time] . identifier[sleep] ( identifier[READ_SLEEP_TIME] ) identifier[raw_out] = identifier[self] . identifier[ser] . identifier[read] ( identifier[self] . identifier[ser] . identifier[in_waiting] ) identifier[out] = identifier[raw_out] . identifier[decode] ( identifier[OUTPUT_ENCODING] ) identifier[time_waiting] = literal[int] keyword[while] identifier[len] ( identifier[out] )== literal[int] keyword[or] identifier[ending_in] ( identifier[out] . identifier[strip] ( identifier[OUTPUT_STRIP_CHARS] ), identifier[RESPONSE_END_WORDS] ) keyword[is] keyword[None] : identifier[time] . identifier[sleep] ( identifier[READ_SLEEP_TIME] ) identifier[time_waiting] += identifier[READ_SLEEP_TIME] identifier[raw_out] += identifier[self] . identifier[ser] . identifier[read] ( identifier[self] . identifier[ser] . identifier[in_waiting] ) identifier[out] = identifier[raw_out] . identifier[decode] ( identifier[OUTPUT_ENCODING] ) keyword[if] identifier[time_waiting] >= identifier[timeout] : keyword[break] keyword[if] identifier[raw] : keyword[return] identifier[raw_out] keyword[return] identifier[out]
def read(self, timeout=READ_TIMEOUT, raw=False): """ Read data from the arm. Data is returned as a latin_1 encoded string, or raw bytes if 'raw' is True. """ time.sleep(READ_SLEEP_TIME) raw_out = self.ser.read(self.ser.in_waiting) out = raw_out.decode(OUTPUT_ENCODING) time_waiting = 0 while len(out) == 0 or ending_in(out.strip(OUTPUT_STRIP_CHARS), RESPONSE_END_WORDS) is None: time.sleep(READ_SLEEP_TIME) time_waiting += READ_SLEEP_TIME raw_out += self.ser.read(self.ser.in_waiting) out = raw_out.decode(OUTPUT_ENCODING) # TODO how to handle timeouts, if they're now unexpected? if time_waiting >= timeout: break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] if raw: return raw_out # depends on [control=['if'], data=[]] return out
def qteMacroData(self, widgetObj: QtGui.QWidget=None): """ Retrieve ``widgetObj`` specific data previously saved with ``qteSaveMacroData``. If no data has been stored previously then **None** is returned. If ``widgetObj`` is **None** then the calling widget ``self.qteWidget`` will be used. |Args| * ``widgetObj`` (**QWidget**): the widget/applet with which the data should be associated. |Returns| * **object**: the previously stored data. |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. * **QtmacsOtherError** if ``widgetObj`` was not added with ``qteAddWidget`` method. """ # Check type of input arguments. if not hasattr(widgetObj, '_qteAdmin') and (widgetObj is not None): msg = '<widgetObj> was probably not added with <qteAddWidget>' msg += ' method because it lacks the <_qteAdmin> attribute.' raise QtmacsOtherError(msg) # If no widget was specified then use the calling widget. if not widgetObj: widgetObj = self.qteWidget # Retrieve the data structure. try: _ = widgetObj._qteAdmin.macroData[self.qteMacroName()] except KeyError: # If the entry does not exist then this is a bug; create # an empty entry for next time. widgetObj._qteAdmin.macroData[self.qteMacroName()] = None # Return the data. return widgetObj._qteAdmin.macroData[self.qteMacroName()]
def function[qteMacroData, parameter[self, widgetObj]]: constant[ Retrieve ``widgetObj`` specific data previously saved with ``qteSaveMacroData``. If no data has been stored previously then **None** is returned. If ``widgetObj`` is **None** then the calling widget ``self.qteWidget`` will be used. |Args| * ``widgetObj`` (**QWidget**): the widget/applet with which the data should be associated. |Returns| * **object**: the previously stored data. |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. * **QtmacsOtherError** if ``widgetObj`` was not added with ``qteAddWidget`` method. ] if <ast.BoolOp object at 0x7da18eb54130> begin[:] variable[msg] assign[=] constant[<widgetObj> was probably not added with <qteAddWidget>] <ast.AugAssign object at 0x7da18eb549d0> <ast.Raise object at 0x7da18eb57010> if <ast.UnaryOp object at 0x7da18eb57e50> begin[:] variable[widgetObj] assign[=] name[self].qteWidget <ast.Try object at 0x7da2044c2a10> return[call[name[widgetObj]._qteAdmin.macroData][call[name[self].qteMacroName, parameter[]]]]
keyword[def] identifier[qteMacroData] ( identifier[self] , identifier[widgetObj] : identifier[QtGui] . identifier[QWidget] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[widgetObj] , literal[string] ) keyword[and] ( identifier[widgetObj] keyword[is] keyword[not] keyword[None] ): identifier[msg] = literal[string] identifier[msg] += literal[string] keyword[raise] identifier[QtmacsOtherError] ( identifier[msg] ) keyword[if] keyword[not] identifier[widgetObj] : identifier[widgetObj] = identifier[self] . identifier[qteWidget] keyword[try] : identifier[_] = identifier[widgetObj] . identifier[_qteAdmin] . identifier[macroData] [ identifier[self] . identifier[qteMacroName] ()] keyword[except] identifier[KeyError] : identifier[widgetObj] . identifier[_qteAdmin] . identifier[macroData] [ identifier[self] . identifier[qteMacroName] ()]= keyword[None] keyword[return] identifier[widgetObj] . identifier[_qteAdmin] . identifier[macroData] [ identifier[self] . identifier[qteMacroName] ()]
def qteMacroData(self, widgetObj: QtGui.QWidget=None): """ Retrieve ``widgetObj`` specific data previously saved with ``qteSaveMacroData``. If no data has been stored previously then **None** is returned. If ``widgetObj`` is **None** then the calling widget ``self.qteWidget`` will be used. |Args| * ``widgetObj`` (**QWidget**): the widget/applet with which the data should be associated. |Returns| * **object**: the previously stored data. |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. * **QtmacsOtherError** if ``widgetObj`` was not added with ``qteAddWidget`` method. """ # Check type of input arguments. if not hasattr(widgetObj, '_qteAdmin') and widgetObj is not None: msg = '<widgetObj> was probably not added with <qteAddWidget>' msg += ' method because it lacks the <_qteAdmin> attribute.' raise QtmacsOtherError(msg) # depends on [control=['if'], data=[]] # If no widget was specified then use the calling widget. if not widgetObj: widgetObj = self.qteWidget # depends on [control=['if'], data=[]] # Retrieve the data structure. try: _ = widgetObj._qteAdmin.macroData[self.qteMacroName()] # depends on [control=['try'], data=[]] except KeyError: # If the entry does not exist then this is a bug; create # an empty entry for next time. widgetObj._qteAdmin.macroData[self.qteMacroName()] = None # depends on [control=['except'], data=[]] # Return the data. return widgetObj._qteAdmin.macroData[self.qteMacroName()]
def proximal_convex_conj_l1(space, lam=1, g=None): r"""Proximal operator factory of the L1 norm/distance convex conjugate. Implements the proximal operator of the convex conjugate of the functional :: F(x) = lam ||x - g||_1 with ``x`` and ``g`` elements in ``space``, and scaling factor ``lam``. Parameters ---------- space : `LinearSpace` or `ProductSpace` of `LinearSpace` spaces Domain of the functional F lam : positive float, optional Scaling factor or regularization parameter. g : ``space`` element, optional Element to which the L1 distance is taken. Default: ``space.zero``. Returns ------- prox_factory : function Factory for the proximal operator to be initialized. Notes ----- The convex conjugate :math:`F^*` of the functional .. math:: F(x) = \lambda \|x - g\|_1. is in the case of scalar-valued functions given by .. math:: F^*(y) = \iota_{B_\infty} \big( \lambda^{-1}\, y \big) + \left\langle \lambda^{-1}\, y,\: g \right\rangle, where :math:`\iota_{B_\infty}` is the indicator function of the unit ball with respect to :math:`\|\cdot\|_\infty`. For vector-valued functions, the convex conjugate is .. math:: F^*(y) = \sum_{k=1}^d F^*(y_k) due to separability of the (non-isotropic) 1-norm. For a step size :math:`\sigma`, the proximal operator of :math:`\sigma F^*` is given by .. math:: \mathrm{prox}_{\sigma F^*}(y) = \frac{\lambda (y - \sigma g)}{ \max(\lambda, |y - \sigma g|)} Here, all operations are to be read pointwise. For vector-valued :math:`x` and :math:`g`, the (non-isotropic) proximal operator is the component-wise scalar proximal: .. math:: \mathrm{prox}_{\sigma F^*}(x) = \left( \mathrm{prox}_{\sigma F^*}(x_1), \dots, \mathrm{prox}_{\sigma F^*}(x_d) \right), where :math:`d` is the number of components of :math:`x`. See Also -------- proximal_convex_conj_l1_l2 : isotropic variant for vector-valued functions proximal_l1 : proximal without convex conjugate """ # Fix for rounding errors dtype = getattr(space, 'dtype', float) eps = np.finfo(dtype).resolution * 10 lam = float(lam * (1 - eps)) if g is not None and g not in space: raise TypeError('{!r} is not an element of {!r}'.format(g, space)) class ProximalConvexConjL1(Operator): """Proximal operator of the L1 norm/distance convex conjugate.""" def __init__(self, sigma): """Initialize a new instance. Parameters ---------- sigma : positive float or pointwise positive space.element Step size parameter. If scalar, it contains a global stepsize, otherwise the space.element defines a stepsize for each point. """ super(ProximalConvexConjL1, self).__init__( domain=space, range=space, linear=False) if np.isscalar(sigma): self.sigma = float(sigma) else: self.sigma = space.element(sigma) def _call(self, x, out): """Return ``self(x, out=out)``.""" # lam * (x - sig * g) / max(lam, |x - sig * g|) # diff = x - sig * g if g is not None: diff = self.domain.element() diff.lincomb(1, x, -self.sigma, g) else: if x is out: # Handle aliased `x` and `out` # This is necessary since we write to both `diff` and # `out`. diff = x.copy() else: diff = x # out = max( |x-sig*g|, lam ) / lam diff.ufuncs.absolute(out=out) out.ufuncs.maximum(lam, out=out) out /= lam # out = diff / ... diff.divide(out, out=out) return ProximalConvexConjL1
def function[proximal_convex_conj_l1, parameter[space, lam, g]]: constant[Proximal operator factory of the L1 norm/distance convex conjugate. Implements the proximal operator of the convex conjugate of the functional :: F(x) = lam ||x - g||_1 with ``x`` and ``g`` elements in ``space``, and scaling factor ``lam``. Parameters ---------- space : `LinearSpace` or `ProductSpace` of `LinearSpace` spaces Domain of the functional F lam : positive float, optional Scaling factor or regularization parameter. g : ``space`` element, optional Element to which the L1 distance is taken. Default: ``space.zero``. Returns ------- prox_factory : function Factory for the proximal operator to be initialized. Notes ----- The convex conjugate :math:`F^*` of the functional .. math:: F(x) = \lambda \|x - g\|_1. is in the case of scalar-valued functions given by .. math:: F^*(y) = \iota_{B_\infty} \big( \lambda^{-1}\, y \big) + \left\langle \lambda^{-1}\, y,\: g \right\rangle, where :math:`\iota_{B_\infty}` is the indicator function of the unit ball with respect to :math:`\|\cdot\|_\infty`. For vector-valued functions, the convex conjugate is .. math:: F^*(y) = \sum_{k=1}^d F^*(y_k) due to separability of the (non-isotropic) 1-norm. For a step size :math:`\sigma`, the proximal operator of :math:`\sigma F^*` is given by .. math:: \mathrm{prox}_{\sigma F^*}(y) = \frac{\lambda (y - \sigma g)}{ \max(\lambda, |y - \sigma g|)} Here, all operations are to be read pointwise. For vector-valued :math:`x` and :math:`g`, the (non-isotropic) proximal operator is the component-wise scalar proximal: .. math:: \mathrm{prox}_{\sigma F^*}(x) = \left( \mathrm{prox}_{\sigma F^*}(x_1), \dots, \mathrm{prox}_{\sigma F^*}(x_d) \right), where :math:`d` is the number of components of :math:`x`. See Also -------- proximal_convex_conj_l1_l2 : isotropic variant for vector-valued functions proximal_l1 : proximal without convex conjugate ] variable[dtype] assign[=] call[name[getattr], parameter[name[space], constant[dtype], name[float]]] variable[eps] assign[=] binary_operation[call[name[np].finfo, parameter[name[dtype]]].resolution * constant[10]] variable[lam] assign[=] call[name[float], parameter[binary_operation[name[lam] * binary_operation[constant[1] - name[eps]]]]] if <ast.BoolOp object at 0x7da1b1e97760> begin[:] <ast.Raise object at 0x7da1b1e94700> class class[ProximalConvexConjL1, parameter[]] begin[:] constant[Proximal operator of the L1 norm/distance convex conjugate.] def function[__init__, parameter[self, sigma]]: constant[Initialize a new instance. Parameters ---------- sigma : positive float or pointwise positive space.element Step size parameter. If scalar, it contains a global stepsize, otherwise the space.element defines a stepsize for each point. ] call[call[name[super], parameter[name[ProximalConvexConjL1], name[self]]].__init__, parameter[]] if call[name[np].isscalar, parameter[name[sigma]]] begin[:] name[self].sigma assign[=] call[name[float], parameter[name[sigma]]] def function[_call, parameter[self, x, out]]: constant[Return ``self(x, out=out)``.] if compare[name[g] is_not constant[None]] begin[:] variable[diff] assign[=] call[name[self].domain.element, parameter[]] call[name[diff].lincomb, parameter[constant[1], name[x], <ast.UnaryOp object at 0x7da1b1ec5cf0>, name[g]]] call[name[diff].ufuncs.absolute, parameter[]] call[name[out].ufuncs.maximum, parameter[name[lam]]] <ast.AugAssign object at 0x7da1b1ec6e30> call[name[diff].divide, parameter[name[out]]] return[name[ProximalConvexConjL1]]
keyword[def] identifier[proximal_convex_conj_l1] ( identifier[space] , identifier[lam] = literal[int] , identifier[g] = keyword[None] ): literal[string] identifier[dtype] = identifier[getattr] ( identifier[space] , literal[string] , identifier[float] ) identifier[eps] = identifier[np] . identifier[finfo] ( identifier[dtype] ). identifier[resolution] * literal[int] identifier[lam] = identifier[float] ( identifier[lam] *( literal[int] - identifier[eps] )) keyword[if] identifier[g] keyword[is] keyword[not] keyword[None] keyword[and] identifier[g] keyword[not] keyword[in] identifier[space] : keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[g] , identifier[space] )) keyword[class] identifier[ProximalConvexConjL1] ( identifier[Operator] ): literal[string] keyword[def] identifier[__init__] ( identifier[self] , identifier[sigma] ): literal[string] identifier[super] ( identifier[ProximalConvexConjL1] , identifier[self] ). identifier[__init__] ( identifier[domain] = identifier[space] , identifier[range] = identifier[space] , identifier[linear] = keyword[False] ) keyword[if] identifier[np] . identifier[isscalar] ( identifier[sigma] ): identifier[self] . identifier[sigma] = identifier[float] ( identifier[sigma] ) keyword[else] : identifier[self] . identifier[sigma] = identifier[space] . identifier[element] ( identifier[sigma] ) keyword[def] identifier[_call] ( identifier[self] , identifier[x] , identifier[out] ): literal[string] keyword[if] identifier[g] keyword[is] keyword[not] keyword[None] : identifier[diff] = identifier[self] . identifier[domain] . identifier[element] () identifier[diff] . identifier[lincomb] ( literal[int] , identifier[x] ,- identifier[self] . identifier[sigma] , identifier[g] ) keyword[else] : keyword[if] identifier[x] keyword[is] identifier[out] : identifier[diff] = identifier[x] . identifier[copy] () keyword[else] : identifier[diff] = identifier[x] identifier[diff] . identifier[ufuncs] . identifier[absolute] ( identifier[out] = identifier[out] ) identifier[out] . identifier[ufuncs] . identifier[maximum] ( identifier[lam] , identifier[out] = identifier[out] ) identifier[out] /= identifier[lam] identifier[diff] . identifier[divide] ( identifier[out] , identifier[out] = identifier[out] ) keyword[return] identifier[ProximalConvexConjL1]
def proximal_convex_conj_l1(space, lam=1, g=None): """Proximal operator factory of the L1 norm/distance convex conjugate. Implements the proximal operator of the convex conjugate of the functional :: F(x) = lam ||x - g||_1 with ``x`` and ``g`` elements in ``space``, and scaling factor ``lam``. Parameters ---------- space : `LinearSpace` or `ProductSpace` of `LinearSpace` spaces Domain of the functional F lam : positive float, optional Scaling factor or regularization parameter. g : ``space`` element, optional Element to which the L1 distance is taken. Default: ``space.zero``. Returns ------- prox_factory : function Factory for the proximal operator to be initialized. Notes ----- The convex conjugate :math:`F^*` of the functional .. math:: F(x) = \\lambda \\|x - g\\|_1. is in the case of scalar-valued functions given by .. math:: F^*(y) = \\iota_{B_\\infty} \\big( \\lambda^{-1}\\, y \\big) + \\left\\langle \\lambda^{-1}\\, y,\\: g \\right\\rangle, where :math:`\\iota_{B_\\infty}` is the indicator function of the unit ball with respect to :math:`\\|\\cdot\\|_\\infty`. For vector-valued functions, the convex conjugate is .. math:: F^*(y) = \\sum_{k=1}^d F^*(y_k) due to separability of the (non-isotropic) 1-norm. For a step size :math:`\\sigma`, the proximal operator of :math:`\\sigma F^*` is given by .. math:: \\mathrm{prox}_{\\sigma F^*}(y) = \\frac{\\lambda (y - \\sigma g)}{ \\max(\\lambda, |y - \\sigma g|)} Here, all operations are to be read pointwise. For vector-valued :math:`x` and :math:`g`, the (non-isotropic) proximal operator is the component-wise scalar proximal: .. math:: \\mathrm{prox}_{\\sigma F^*}(x) = \\left( \\mathrm{prox}_{\\sigma F^*}(x_1), \\dots, \\mathrm{prox}_{\\sigma F^*}(x_d) \\right), where :math:`d` is the number of components of :math:`x`. See Also -------- proximal_convex_conj_l1_l2 : isotropic variant for vector-valued functions proximal_l1 : proximal without convex conjugate """ # Fix for rounding errors dtype = getattr(space, 'dtype', float) eps = np.finfo(dtype).resolution * 10 lam = float(lam * (1 - eps)) if g is not None and g not in space: raise TypeError('{!r} is not an element of {!r}'.format(g, space)) # depends on [control=['if'], data=[]] class ProximalConvexConjL1(Operator): """Proximal operator of the L1 norm/distance convex conjugate.""" def __init__(self, sigma): """Initialize a new instance. Parameters ---------- sigma : positive float or pointwise positive space.element Step size parameter. If scalar, it contains a global stepsize, otherwise the space.element defines a stepsize for each point. """ super(ProximalConvexConjL1, self).__init__(domain=space, range=space, linear=False) if np.isscalar(sigma): self.sigma = float(sigma) # depends on [control=['if'], data=[]] else: self.sigma = space.element(sigma) def _call(self, x, out): """Return ``self(x, out=out)``.""" # lam * (x - sig * g) / max(lam, |x - sig * g|) # diff = x - sig * g if g is not None: diff = self.domain.element() diff.lincomb(1, x, -self.sigma, g) # depends on [control=['if'], data=['g']] elif x is out: # Handle aliased `x` and `out` # This is necessary since we write to both `diff` and # `out`. diff = x.copy() # depends on [control=['if'], data=['x']] else: diff = x # out = max( |x-sig*g|, lam ) / lam diff.ufuncs.absolute(out=out) out.ufuncs.maximum(lam, out=out) out /= lam # out = diff / ... diff.divide(out, out=out) return ProximalConvexConjL1
def token_meta(opt): """Generates metadata for a token""" meta = { 'via': 'aomi', 'operation': opt.operation, 'hostname': socket.gethostname() } if 'USER' in os.environ: meta['unix_user'] = os.environ['USER'] if opt.metadata: meta_bits = opt.metadata.split(',') for meta_bit in meta_bits: key, value = meta_bit.split('=') if key not in meta: meta[key] = value for key, value in meta.items(): LOG.debug("Token metadata %s %s", key, value) return meta
def function[token_meta, parameter[opt]]: constant[Generates metadata for a token] variable[meta] assign[=] dictionary[[<ast.Constant object at 0x7da1b1bef3a0>, <ast.Constant object at 0x7da1b1bee980>, <ast.Constant object at 0x7da1b1beca30>], [<ast.Constant object at 0x7da1b1bedf90>, <ast.Attribute object at 0x7da1b1beea10>, <ast.Call object at 0x7da1b1bede10>]] if compare[constant[USER] in name[os].environ] begin[:] call[name[meta]][constant[unix_user]] assign[=] call[name[os].environ][constant[USER]] if name[opt].metadata begin[:] variable[meta_bits] assign[=] call[name[opt].metadata.split, parameter[constant[,]]] for taget[name[meta_bit]] in starred[name[meta_bits]] begin[:] <ast.Tuple object at 0x7da1b1bece20> assign[=] call[name[meta_bit].split, parameter[constant[=]]] if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[meta]] begin[:] call[name[meta]][name[key]] assign[=] name[value] for taget[tuple[[<ast.Name object at 0x7da1b1bef190>, <ast.Name object at 0x7da1b1bee9e0>]]] in starred[call[name[meta].items, parameter[]]] begin[:] call[name[LOG].debug, parameter[constant[Token metadata %s %s], name[key], name[value]]] return[name[meta]]
keyword[def] identifier[token_meta] ( identifier[opt] ): literal[string] identifier[meta] ={ literal[string] : literal[string] , literal[string] : identifier[opt] . identifier[operation] , literal[string] : identifier[socket] . identifier[gethostname] () } keyword[if] literal[string] keyword[in] identifier[os] . identifier[environ] : identifier[meta] [ literal[string] ]= identifier[os] . identifier[environ] [ literal[string] ] keyword[if] identifier[opt] . identifier[metadata] : identifier[meta_bits] = identifier[opt] . identifier[metadata] . identifier[split] ( literal[string] ) keyword[for] identifier[meta_bit] keyword[in] identifier[meta_bits] : identifier[key] , identifier[value] = identifier[meta_bit] . identifier[split] ( literal[string] ) keyword[if] identifier[key] keyword[not] keyword[in] identifier[meta] : identifier[meta] [ identifier[key] ]= identifier[value] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[meta] . identifier[items] (): identifier[LOG] . identifier[debug] ( literal[string] , identifier[key] , identifier[value] ) keyword[return] identifier[meta]
def token_meta(opt): """Generates metadata for a token""" meta = {'via': 'aomi', 'operation': opt.operation, 'hostname': socket.gethostname()} if 'USER' in os.environ: meta['unix_user'] = os.environ['USER'] # depends on [control=['if'], data=[]] if opt.metadata: meta_bits = opt.metadata.split(',') for meta_bit in meta_bits: (key, value) = meta_bit.split('=') # depends on [control=['for'], data=['meta_bit']] if key not in meta: meta[key] = value # depends on [control=['if'], data=['key', 'meta']] # depends on [control=['if'], data=[]] for (key, value) in meta.items(): LOG.debug('Token metadata %s %s', key, value) # depends on [control=['for'], data=[]] return meta
def plot_distance_landscape_projection(self, x_axis, y_axis, ax=None, *args, **kwargs): """ Plots the distance landscape jointly-generated from all the results :param x_axis: symbol to plot on x axis :param y_axis: symbol to plot on y axis :param ax: axis object to plot onto :param args: arguments to pass to :func:`matplotlib.pyplot.contourf` :param kwargs: keyword arguments to pass to :func:`matplotlib.pyplot.contourf` :return: """ # Gather all x, y, z's to plot first as this would make the gradient landscape better x_all, y_all, z_all = [], [], [] for result in self.results: x, y, z = result.distance_landscape_as_3d_data(x_axis, y_axis) x_all.extend(x) y_all.extend(y) z_all.extend(z) # Now plot the contour for x_all, y_all and z_all plot_contour(x_all, y_all, z_all, x_axis, y_axis, ax=ax, *args, **kwargs)
def function[plot_distance_landscape_projection, parameter[self, x_axis, y_axis, ax]]: constant[ Plots the distance landscape jointly-generated from all the results :param x_axis: symbol to plot on x axis :param y_axis: symbol to plot on y axis :param ax: axis object to plot onto :param args: arguments to pass to :func:`matplotlib.pyplot.contourf` :param kwargs: keyword arguments to pass to :func:`matplotlib.pyplot.contourf` :return: ] <ast.Tuple object at 0x7da207f00820> assign[=] tuple[[<ast.List object at 0x7da207f00b50>, <ast.List object at 0x7da207f02d10>, <ast.List object at 0x7da207f010c0>]] for taget[name[result]] in starred[name[self].results] begin[:] <ast.Tuple object at 0x7da207f030a0> assign[=] call[name[result].distance_landscape_as_3d_data, parameter[name[x_axis], name[y_axis]]] call[name[x_all].extend, parameter[name[x]]] call[name[y_all].extend, parameter[name[y]]] call[name[z_all].extend, parameter[name[z]]] call[name[plot_contour], parameter[name[x_all], name[y_all], name[z_all], name[x_axis], name[y_axis], <ast.Starred object at 0x7da1b26adf30>]]
keyword[def] identifier[plot_distance_landscape_projection] ( identifier[self] , identifier[x_axis] , identifier[y_axis] , identifier[ax] = keyword[None] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[x_all] , identifier[y_all] , identifier[z_all] =[],[],[] keyword[for] identifier[result] keyword[in] identifier[self] . identifier[results] : identifier[x] , identifier[y] , identifier[z] = identifier[result] . identifier[distance_landscape_as_3d_data] ( identifier[x_axis] , identifier[y_axis] ) identifier[x_all] . identifier[extend] ( identifier[x] ) identifier[y_all] . identifier[extend] ( identifier[y] ) identifier[z_all] . identifier[extend] ( identifier[z] ) identifier[plot_contour] ( identifier[x_all] , identifier[y_all] , identifier[z_all] , identifier[x_axis] , identifier[y_axis] , identifier[ax] = identifier[ax] ,* identifier[args] ,** identifier[kwargs] )
def plot_distance_landscape_projection(self, x_axis, y_axis, ax=None, *args, **kwargs): """ Plots the distance landscape jointly-generated from all the results :param x_axis: symbol to plot on x axis :param y_axis: symbol to plot on y axis :param ax: axis object to plot onto :param args: arguments to pass to :func:`matplotlib.pyplot.contourf` :param kwargs: keyword arguments to pass to :func:`matplotlib.pyplot.contourf` :return: """ # Gather all x, y, z's to plot first as this would make the gradient landscape better (x_all, y_all, z_all) = ([], [], []) for result in self.results: (x, y, z) = result.distance_landscape_as_3d_data(x_axis, y_axis) x_all.extend(x) y_all.extend(y) z_all.extend(z) # depends on [control=['for'], data=['result']] # Now plot the contour for x_all, y_all and z_all plot_contour(x_all, y_all, z_all, x_axis, y_axis, *args, ax=ax, **kwargs)
def post_event_cancel(self, id, **data): """ POST /events/:id/cancel/ Cancels an event if it has not already been deleted. In order for cancel to be permitted, there must be no pending or completed orders. Returns a boolean indicating success or failure of the cancel. """ return self.post("/events/{0}/cancel/".format(id), data=data)
def function[post_event_cancel, parameter[self, id]]: constant[ POST /events/:id/cancel/ Cancels an event if it has not already been deleted. In order for cancel to be permitted, there must be no pending or completed orders. Returns a boolean indicating success or failure of the cancel. ] return[call[name[self].post, parameter[call[constant[/events/{0}/cancel/].format, parameter[name[id]]]]]]
keyword[def] identifier[post_event_cancel] ( identifier[self] , identifier[id] ,** identifier[data] ): literal[string] keyword[return] identifier[self] . identifier[post] ( literal[string] . identifier[format] ( identifier[id] ), identifier[data] = identifier[data] )
def post_event_cancel(self, id, **data): """ POST /events/:id/cancel/ Cancels an event if it has not already been deleted. In order for cancel to be permitted, there must be no pending or completed orders. Returns a boolean indicating success or failure of the cancel. """ return self.post('/events/{0}/cancel/'.format(id), data=data)
def get_table(self, table, retry=DEFAULT_RETRY): """Fetch the table referenced by ``table``. Args: table (Union[ \ :class:`~google.cloud.bigquery.table.Table`, \ :class:`~google.cloud.bigquery.table.TableReference`, \ str, \ ]): A reference to the table to fetch from the BigQuery API. If a string is passed in, this method attempts to create a table reference from a string using :func:`google.cloud.bigquery.table.TableReference.from_string`. retry (:class:`google.api_core.retry.Retry`): (Optional) How to retry the RPC. Returns: google.cloud.bigquery.table.Table: A ``Table`` instance. """ table_ref = _table_arg_to_table_ref(table, default_project=self.project) api_response = self._call_api(retry, method="GET", path=table_ref.path) return Table.from_api_repr(api_response)
def function[get_table, parameter[self, table, retry]]: constant[Fetch the table referenced by ``table``. Args: table (Union[ :class:`~google.cloud.bigquery.table.Table`, :class:`~google.cloud.bigquery.table.TableReference`, str, ]): A reference to the table to fetch from the BigQuery API. If a string is passed in, this method attempts to create a table reference from a string using :func:`google.cloud.bigquery.table.TableReference.from_string`. retry (:class:`google.api_core.retry.Retry`): (Optional) How to retry the RPC. Returns: google.cloud.bigquery.table.Table: A ``Table`` instance. ] variable[table_ref] assign[=] call[name[_table_arg_to_table_ref], parameter[name[table]]] variable[api_response] assign[=] call[name[self]._call_api, parameter[name[retry]]] return[call[name[Table].from_api_repr, parameter[name[api_response]]]]
keyword[def] identifier[get_table] ( identifier[self] , identifier[table] , identifier[retry] = identifier[DEFAULT_RETRY] ): literal[string] identifier[table_ref] = identifier[_table_arg_to_table_ref] ( identifier[table] , identifier[default_project] = identifier[self] . identifier[project] ) identifier[api_response] = identifier[self] . identifier[_call_api] ( identifier[retry] , identifier[method] = literal[string] , identifier[path] = identifier[table_ref] . identifier[path] ) keyword[return] identifier[Table] . identifier[from_api_repr] ( identifier[api_response] )
def get_table(self, table, retry=DEFAULT_RETRY): """Fetch the table referenced by ``table``. Args: table (Union[ :class:`~google.cloud.bigquery.table.Table`, :class:`~google.cloud.bigquery.table.TableReference`, str, ]): A reference to the table to fetch from the BigQuery API. If a string is passed in, this method attempts to create a table reference from a string using :func:`google.cloud.bigquery.table.TableReference.from_string`. retry (:class:`google.api_core.retry.Retry`): (Optional) How to retry the RPC. Returns: google.cloud.bigquery.table.Table: A ``Table`` instance. """ table_ref = _table_arg_to_table_ref(table, default_project=self.project) api_response = self._call_api(retry, method='GET', path=table_ref.path) return Table.from_api_repr(api_response)
def add_job_from_json(self, job_json, destructive=False): """ Construct a new Job from an imported JSON spec. """ logger.debug('Importing job from JSON document: {0}'.format(job_json)) rec = self.backend.decode_import_json(job_json) if destructive: try: self.delete_job(rec['name']) except DagobahError: # expected if no job with this name pass self._add_job_from_spec(rec, use_job_id=False) self.commit(cascade=True)
def function[add_job_from_json, parameter[self, job_json, destructive]]: constant[ Construct a new Job from an imported JSON spec. ] call[name[logger].debug, parameter[call[constant[Importing job from JSON document: {0}].format, parameter[name[job_json]]]]] variable[rec] assign[=] call[name[self].backend.decode_import_json, parameter[name[job_json]]] if name[destructive] begin[:] <ast.Try object at 0x7da1b0b592d0> call[name[self]._add_job_from_spec, parameter[name[rec]]] call[name[self].commit, parameter[]]
keyword[def] identifier[add_job_from_json] ( identifier[self] , identifier[job_json] , identifier[destructive] = keyword[False] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[job_json] )) identifier[rec] = identifier[self] . identifier[backend] . identifier[decode_import_json] ( identifier[job_json] ) keyword[if] identifier[destructive] : keyword[try] : identifier[self] . identifier[delete_job] ( identifier[rec] [ literal[string] ]) keyword[except] identifier[DagobahError] : keyword[pass] identifier[self] . identifier[_add_job_from_spec] ( identifier[rec] , identifier[use_job_id] = keyword[False] ) identifier[self] . identifier[commit] ( identifier[cascade] = keyword[True] )
def add_job_from_json(self, job_json, destructive=False): """ Construct a new Job from an imported JSON spec. """ logger.debug('Importing job from JSON document: {0}'.format(job_json)) rec = self.backend.decode_import_json(job_json) if destructive: try: self.delete_job(rec['name']) # depends on [control=['try'], data=[]] except DagobahError: # expected if no job with this name pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] self._add_job_from_spec(rec, use_job_id=False) self.commit(cascade=True)
def _fix_ctx(m2_ctx, issuer=None): ''' This is part of an ugly hack to fix an ancient bug in M2Crypto https://bugzilla.osafoundation.org/show_bug.cgi?id=7530#c13 ''' ctx = _Ctx.from_address(int(m2_ctx)) # pylint: disable=no-member ctx.flags = 0 ctx.subject_cert = None ctx.subject_req = None ctx.crl = None if issuer is None: ctx.issuer_cert = None else: ctx.issuer_cert = int(issuer.x509)
def function[_fix_ctx, parameter[m2_ctx, issuer]]: constant[ This is part of an ugly hack to fix an ancient bug in M2Crypto https://bugzilla.osafoundation.org/show_bug.cgi?id=7530#c13 ] variable[ctx] assign[=] call[name[_Ctx].from_address, parameter[call[name[int], parameter[name[m2_ctx]]]]] name[ctx].flags assign[=] constant[0] name[ctx].subject_cert assign[=] constant[None] name[ctx].subject_req assign[=] constant[None] name[ctx].crl assign[=] constant[None] if compare[name[issuer] is constant[None]] begin[:] name[ctx].issuer_cert assign[=] constant[None]
keyword[def] identifier[_fix_ctx] ( identifier[m2_ctx] , identifier[issuer] = keyword[None] ): literal[string] identifier[ctx] = identifier[_Ctx] . identifier[from_address] ( identifier[int] ( identifier[m2_ctx] )) identifier[ctx] . identifier[flags] = literal[int] identifier[ctx] . identifier[subject_cert] = keyword[None] identifier[ctx] . identifier[subject_req] = keyword[None] identifier[ctx] . identifier[crl] = keyword[None] keyword[if] identifier[issuer] keyword[is] keyword[None] : identifier[ctx] . identifier[issuer_cert] = keyword[None] keyword[else] : identifier[ctx] . identifier[issuer_cert] = identifier[int] ( identifier[issuer] . identifier[x509] )
def _fix_ctx(m2_ctx, issuer=None): """ This is part of an ugly hack to fix an ancient bug in M2Crypto https://bugzilla.osafoundation.org/show_bug.cgi?id=7530#c13 """ ctx = _Ctx.from_address(int(m2_ctx)) # pylint: disable=no-member ctx.flags = 0 ctx.subject_cert = None ctx.subject_req = None ctx.crl = None if issuer is None: ctx.issuer_cert = None # depends on [control=['if'], data=[]] else: ctx.issuer_cert = int(issuer.x509)
def remove_internal_subnet(self, context_id, subnet_id): """Remove an internal subnet from a tunnel context. :param int context_id: The id-value representing the context instance. :param int subnet_id: The id-value representing the internal subnet. :return bool: True if internal subnet removal was successful. """ return self.context.removePrivateSubnetFromNetworkTunnel(subnet_id, id=context_id)
def function[remove_internal_subnet, parameter[self, context_id, subnet_id]]: constant[Remove an internal subnet from a tunnel context. :param int context_id: The id-value representing the context instance. :param int subnet_id: The id-value representing the internal subnet. :return bool: True if internal subnet removal was successful. ] return[call[name[self].context.removePrivateSubnetFromNetworkTunnel, parameter[name[subnet_id]]]]
keyword[def] identifier[remove_internal_subnet] ( identifier[self] , identifier[context_id] , identifier[subnet_id] ): literal[string] keyword[return] identifier[self] . identifier[context] . identifier[removePrivateSubnetFromNetworkTunnel] ( identifier[subnet_id] , identifier[id] = identifier[context_id] )
def remove_internal_subnet(self, context_id, subnet_id): """Remove an internal subnet from a tunnel context. :param int context_id: The id-value representing the context instance. :param int subnet_id: The id-value representing the internal subnet. :return bool: True if internal subnet removal was successful. """ return self.context.removePrivateSubnetFromNetworkTunnel(subnet_id, id=context_id)
def create_parser(): """Creat a commandline parser for epubcheck :return Argumentparser: """ parser = ArgumentParser( prog='epubcheck', description="EpubCheck v%s - Validate your ebooks" % __version__ ) # Arguments parser.add_argument( 'path', nargs='?', default=getcwd(), help="Path to EPUB-file or folder for batch validation. " "The current directory will be processed if this argument " "is not specified." ) # Options parser.add_argument( '-x', '--xls', nargs='?', type=FileType(mode='wb'), const='epubcheck_report.xls', help='Create a detailed Excel report.' ) parser.add_argument( '-c', '--csv', nargs='?', type=FileType(mode='wb'), const='epubcheck_report.csv', help='Create a CSV report.' ) parser.add_argument( '-r', '--recursive', action='store_true', help='Recurse into subfolders.' ) return parser
def function[create_parser, parameter[]]: constant[Creat a commandline parser for epubcheck :return Argumentparser: ] variable[parser] assign[=] call[name[ArgumentParser], parameter[]] call[name[parser].add_argument, parameter[constant[path]]] call[name[parser].add_argument, parameter[constant[-x], constant[--xls]]] call[name[parser].add_argument, parameter[constant[-c], constant[--csv]]] call[name[parser].add_argument, parameter[constant[-r], constant[--recursive]]] return[name[parser]]
keyword[def] identifier[create_parser] (): literal[string] identifier[parser] = identifier[ArgumentParser] ( identifier[prog] = literal[string] , identifier[description] = literal[string] % identifier[__version__] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[nargs] = literal[string] , identifier[default] = identifier[getcwd] (), identifier[help] = literal[string] literal[string] literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[nargs] = literal[string] , identifier[type] = identifier[FileType] ( identifier[mode] = literal[string] ), identifier[const] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[nargs] = literal[string] , identifier[type] = identifier[FileType] ( identifier[mode] = literal[string] ), identifier[const] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] ) keyword[return] identifier[parser]
def create_parser(): """Creat a commandline parser for epubcheck :return Argumentparser: """ parser = ArgumentParser(prog='epubcheck', description='EpubCheck v%s - Validate your ebooks' % __version__) # Arguments parser.add_argument('path', nargs='?', default=getcwd(), help='Path to EPUB-file or folder for batch validation. The current directory will be processed if this argument is not specified.') # Options parser.add_argument('-x', '--xls', nargs='?', type=FileType(mode='wb'), const='epubcheck_report.xls', help='Create a detailed Excel report.') parser.add_argument('-c', '--csv', nargs='?', type=FileType(mode='wb'), const='epubcheck_report.csv', help='Create a CSV report.') parser.add_argument('-r', '--recursive', action='store_true', help='Recurse into subfolders.') return parser
def databoxes(ds, xscript=0, yscript=1, eyscript=None, exscript=None, g=None, plotter=xy_data, transpose=False, **kwargs): """ Plots the listed databox objects with the specified scripts. ds list of databoxes xscript script for x data yscript script for y data eyscript script for y error exscript script for x error plotter function used to do the plotting transpose applies databox.transpose() prior to plotting g optional dictionary of globals for the supplied scripts **kwargs are sent to plotter() """ if not _fun.is_iterable(ds): ds = [ds] if 'xlabel' not in kwargs: kwargs['xlabel'] = str(xscript) if 'ylabel' not in kwargs: kwargs['ylabel'] = str(yscript) # First make sure everything is a list of scripts (or None's) if not _fun.is_iterable(xscript): xscript = [xscript] if not _fun.is_iterable(yscript): yscript = [yscript] if not _fun.is_iterable(exscript): exscript = [exscript] if not _fun.is_iterable(eyscript): eyscript = [eyscript] # make sure exscript matches shape with xscript (and the same for y) if len(exscript) < len(xscript): for n in range(len(xscript)-1): exscript.append(exscript[0]) if len(eyscript) < len(yscript): for n in range(len(yscript)-1): eyscript.append(eyscript[0]) # Make xscript and exscript match in shape with yscript and eyscript if len(xscript) < len(yscript): for n in range(len(yscript)-1): xscript.append(xscript[0]) exscript.append(exscript[0]) # check for the reverse possibility if len(yscript) < len(xscript): for n in range(len(xscript)-1): yscript.append(yscript[0]) eyscript.append(eyscript[0]) # now check for None's (counting scripts) for n in range(len(xscript)): if xscript[n] is None and yscript[n] is None: print("Two None scripts? But why?") return if xscript[n] is None: if type(yscript[n])==str: xscript[n] = 'range(len('+yscript[n]+'))' else: xscript[n] = 'range(len(c('+str(yscript[n])+')))' if yscript[n] is None: if type(xscript[n])==str: yscript[n] = 'range(len('+xscript[n]+'))' else: yscript[n] = 'range(len(c('+str(xscript[n])+')))' xdatas = [] ydatas = [] exdatas = [] eydatas = [] labels = [] # Loop over all the data boxes for i in range(len(ds)): # Reset the default globals all_globals = dict(n=i,m=len(ds)-1-i) # Update them with the user-specified globals if not g==None: all_globals.update(g) # For ease of coding d = ds[i] # Take the transpose if necessary if transpose: d = d.transpose() # Generate the x-data; returns a list of outputs, one for each xscript xdata = d(xscript, all_globals) # Loop over each xdata, appending to the master list, and generating a label for n in range(len(xdata)): xdatas.append(xdata[n]) if len(xdata)>1: labels.append(str(n)+": "+_os.path.split(d.path)[-1]) else: labels.append(_os.path.split(d.path)[-1]) # Append the other data sets to their master lists for y in d( yscript, all_globals): ydatas.append(y) for x in d(exscript, all_globals): exdatas.append(x) for y in d(eyscript, all_globals): eydatas.append(y) if "label" in kwargs: labels = kwargs.pop("label") plotter(xdatas, ydatas, eydatas, exdatas, label=labels, **kwargs)
def function[databoxes, parameter[ds, xscript, yscript, eyscript, exscript, g, plotter, transpose]]: constant[ Plots the listed databox objects with the specified scripts. ds list of databoxes xscript script for x data yscript script for y data eyscript script for y error exscript script for x error plotter function used to do the plotting transpose applies databox.transpose() prior to plotting g optional dictionary of globals for the supplied scripts **kwargs are sent to plotter() ] if <ast.UnaryOp object at 0x7da2043468c0> begin[:] variable[ds] assign[=] list[[<ast.Name object at 0x7da204344250>]] if compare[constant[xlabel] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:] call[name[kwargs]][constant[xlabel]] assign[=] call[name[str], parameter[name[xscript]]] if compare[constant[ylabel] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:] call[name[kwargs]][constant[ylabel]] assign[=] call[name[str], parameter[name[yscript]]] if <ast.UnaryOp object at 0x7da2043467d0> begin[:] variable[xscript] assign[=] list[[<ast.Name object at 0x7da2043445b0>]] if <ast.UnaryOp object at 0x7da2043479a0> begin[:] variable[yscript] assign[=] list[[<ast.Name object at 0x7da204345930>]] if <ast.UnaryOp object at 0x7da204345690> begin[:] variable[exscript] assign[=] list[[<ast.Name object at 0x7da204347d60>]] if <ast.UnaryOp object at 0x7da204344fd0> begin[:] variable[eyscript] assign[=] list[[<ast.Name object at 0x7da204344b50>]] if compare[call[name[len], parameter[name[exscript]]] less[<] call[name[len], parameter[name[xscript]]]] begin[:] for taget[name[n]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[xscript]]] - constant[1]]]]] begin[:] call[name[exscript].append, parameter[call[name[exscript]][constant[0]]]] if compare[call[name[len], parameter[name[eyscript]]] less[<] call[name[len], parameter[name[yscript]]]] begin[:] for taget[name[n]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[yscript]]] - constant[1]]]]] begin[:] call[name[eyscript].append, parameter[call[name[eyscript]][constant[0]]]] if compare[call[name[len], parameter[name[xscript]]] less[<] call[name[len], parameter[name[yscript]]]] begin[:] for taget[name[n]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[yscript]]] - constant[1]]]]] begin[:] call[name[xscript].append, parameter[call[name[xscript]][constant[0]]]] call[name[exscript].append, parameter[call[name[exscript]][constant[0]]]] if compare[call[name[len], parameter[name[yscript]]] less[<] call[name[len], parameter[name[xscript]]]] begin[:] for taget[name[n]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[xscript]]] - constant[1]]]]] begin[:] call[name[yscript].append, parameter[call[name[yscript]][constant[0]]]] call[name[eyscript].append, parameter[call[name[eyscript]][constant[0]]]] for taget[name[n]] in starred[call[name[range], parameter[call[name[len], parameter[name[xscript]]]]]] begin[:] if <ast.BoolOp object at 0x7da20c6e5420> begin[:] call[name[print], parameter[constant[Two None scripts? But why?]]] return[None] if compare[call[name[xscript]][name[n]] is constant[None]] begin[:] if compare[call[name[type], parameter[call[name[yscript]][name[n]]]] equal[==] name[str]] begin[:] call[name[xscript]][name[n]] assign[=] binary_operation[binary_operation[constant[range(len(] + call[name[yscript]][name[n]]] + constant[))]] if compare[call[name[yscript]][name[n]] is constant[None]] begin[:] if compare[call[name[type], parameter[call[name[xscript]][name[n]]]] equal[==] name[str]] begin[:] call[name[yscript]][name[n]] assign[=] binary_operation[binary_operation[constant[range(len(] + call[name[xscript]][name[n]]] + constant[))]] variable[xdatas] assign[=] list[[]] variable[ydatas] assign[=] list[[]] variable[exdatas] assign[=] list[[]] variable[eydatas] assign[=] list[[]] variable[labels] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[ds]]]]]] begin[:] variable[all_globals] assign[=] call[name[dict], parameter[]] if <ast.UnaryOp object at 0x7da20c6e53f0> begin[:] call[name[all_globals].update, parameter[name[g]]] variable[d] assign[=] call[name[ds]][name[i]] if name[transpose] begin[:] variable[d] assign[=] call[name[d].transpose, parameter[]] variable[xdata] assign[=] call[name[d], parameter[name[xscript], name[all_globals]]] for taget[name[n]] in starred[call[name[range], parameter[call[name[len], parameter[name[xdata]]]]]] begin[:] call[name[xdatas].append, parameter[call[name[xdata]][name[n]]]] if compare[call[name[len], parameter[name[xdata]]] greater[>] constant[1]] begin[:] call[name[labels].append, parameter[binary_operation[binary_operation[call[name[str], parameter[name[n]]] + constant[: ]] + call[call[name[_os].path.split, parameter[name[d].path]]][<ast.UnaryOp object at 0x7da1b1a47100>]]]] for taget[name[y]] in starred[call[name[d], parameter[name[yscript], name[all_globals]]]] begin[:] call[name[ydatas].append, parameter[name[y]]] for taget[name[x]] in starred[call[name[d], parameter[name[exscript], name[all_globals]]]] begin[:] call[name[exdatas].append, parameter[name[x]]] for taget[name[y]] in starred[call[name[d], parameter[name[eyscript], name[all_globals]]]] begin[:] call[name[eydatas].append, parameter[name[y]]] if compare[constant[label] in name[kwargs]] begin[:] variable[labels] assign[=] call[name[kwargs].pop, parameter[constant[label]]] call[name[plotter], parameter[name[xdatas], name[ydatas], name[eydatas], name[exdatas]]]
keyword[def] identifier[databoxes] ( identifier[ds] , identifier[xscript] = literal[int] , identifier[yscript] = literal[int] , identifier[eyscript] = keyword[None] , identifier[exscript] = keyword[None] , identifier[g] = keyword[None] , identifier[plotter] = identifier[xy_data] , identifier[transpose] = keyword[False] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[_fun] . identifier[is_iterable] ( identifier[ds] ): identifier[ds] =[ identifier[ds] ] keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] : identifier[kwargs] [ literal[string] ]= identifier[str] ( identifier[xscript] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] : identifier[kwargs] [ literal[string] ]= identifier[str] ( identifier[yscript] ) keyword[if] keyword[not] identifier[_fun] . identifier[is_iterable] ( identifier[xscript] ): identifier[xscript] =[ identifier[xscript] ] keyword[if] keyword[not] identifier[_fun] . identifier[is_iterable] ( identifier[yscript] ): identifier[yscript] =[ identifier[yscript] ] keyword[if] keyword[not] identifier[_fun] . identifier[is_iterable] ( identifier[exscript] ): identifier[exscript] =[ identifier[exscript] ] keyword[if] keyword[not] identifier[_fun] . identifier[is_iterable] ( identifier[eyscript] ): identifier[eyscript] =[ identifier[eyscript] ] keyword[if] identifier[len] ( identifier[exscript] )< identifier[len] ( identifier[xscript] ): keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[len] ( identifier[xscript] )- literal[int] ): identifier[exscript] . identifier[append] ( identifier[exscript] [ literal[int] ]) keyword[if] identifier[len] ( identifier[eyscript] )< identifier[len] ( identifier[yscript] ): keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[len] ( identifier[yscript] )- literal[int] ): identifier[eyscript] . identifier[append] ( identifier[eyscript] [ literal[int] ]) keyword[if] identifier[len] ( identifier[xscript] )< identifier[len] ( identifier[yscript] ): keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[len] ( identifier[yscript] )- literal[int] ): identifier[xscript] . identifier[append] ( identifier[xscript] [ literal[int] ]) identifier[exscript] . identifier[append] ( identifier[exscript] [ literal[int] ]) keyword[if] identifier[len] ( identifier[yscript] )< identifier[len] ( identifier[xscript] ): keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[len] ( identifier[xscript] )- literal[int] ): identifier[yscript] . identifier[append] ( identifier[yscript] [ literal[int] ]) identifier[eyscript] . identifier[append] ( identifier[eyscript] [ literal[int] ]) keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[len] ( identifier[xscript] )): keyword[if] identifier[xscript] [ identifier[n] ] keyword[is] keyword[None] keyword[and] identifier[yscript] [ identifier[n] ] keyword[is] keyword[None] : identifier[print] ( literal[string] ) keyword[return] keyword[if] identifier[xscript] [ identifier[n] ] keyword[is] keyword[None] : keyword[if] identifier[type] ( identifier[yscript] [ identifier[n] ])== identifier[str] : identifier[xscript] [ identifier[n] ]= literal[string] + identifier[yscript] [ identifier[n] ]+ literal[string] keyword[else] : identifier[xscript] [ identifier[n] ]= literal[string] + identifier[str] ( identifier[yscript] [ identifier[n] ])+ literal[string] keyword[if] identifier[yscript] [ identifier[n] ] keyword[is] keyword[None] : keyword[if] identifier[type] ( identifier[xscript] [ identifier[n] ])== identifier[str] : identifier[yscript] [ identifier[n] ]= literal[string] + identifier[xscript] [ identifier[n] ]+ literal[string] keyword[else] : identifier[yscript] [ identifier[n] ]= literal[string] + identifier[str] ( identifier[xscript] [ identifier[n] ])+ literal[string] identifier[xdatas] =[] identifier[ydatas] =[] identifier[exdatas] =[] identifier[eydatas] =[] identifier[labels] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[ds] )): identifier[all_globals] = identifier[dict] ( identifier[n] = identifier[i] , identifier[m] = identifier[len] ( identifier[ds] )- literal[int] - identifier[i] ) keyword[if] keyword[not] identifier[g] == keyword[None] : identifier[all_globals] . identifier[update] ( identifier[g] ) identifier[d] = identifier[ds] [ identifier[i] ] keyword[if] identifier[transpose] : identifier[d] = identifier[d] . identifier[transpose] () identifier[xdata] = identifier[d] ( identifier[xscript] , identifier[all_globals] ) keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[len] ( identifier[xdata] )): identifier[xdatas] . identifier[append] ( identifier[xdata] [ identifier[n] ]) keyword[if] identifier[len] ( identifier[xdata] )> literal[int] : identifier[labels] . identifier[append] ( identifier[str] ( identifier[n] )+ literal[string] + identifier[_os] . identifier[path] . identifier[split] ( identifier[d] . identifier[path] )[- literal[int] ]) keyword[else] : identifier[labels] . identifier[append] ( identifier[_os] . identifier[path] . identifier[split] ( identifier[d] . identifier[path] )[- literal[int] ]) keyword[for] identifier[y] keyword[in] identifier[d] ( identifier[yscript] , identifier[all_globals] ): identifier[ydatas] . identifier[append] ( identifier[y] ) keyword[for] identifier[x] keyword[in] identifier[d] ( identifier[exscript] , identifier[all_globals] ): identifier[exdatas] . identifier[append] ( identifier[x] ) keyword[for] identifier[y] keyword[in] identifier[d] ( identifier[eyscript] , identifier[all_globals] ): identifier[eydatas] . identifier[append] ( identifier[y] ) keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[labels] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[plotter] ( identifier[xdatas] , identifier[ydatas] , identifier[eydatas] , identifier[exdatas] , identifier[label] = identifier[labels] ,** identifier[kwargs] )
def databoxes(ds, xscript=0, yscript=1, eyscript=None, exscript=None, g=None, plotter=xy_data, transpose=False, **kwargs): """ Plots the listed databox objects with the specified scripts. ds list of databoxes xscript script for x data yscript script for y data eyscript script for y error exscript script for x error plotter function used to do the plotting transpose applies databox.transpose() prior to plotting g optional dictionary of globals for the supplied scripts **kwargs are sent to plotter() """ if not _fun.is_iterable(ds): ds = [ds] # depends on [control=['if'], data=[]] if 'xlabel' not in kwargs: kwargs['xlabel'] = str(xscript) # depends on [control=['if'], data=['kwargs']] if 'ylabel' not in kwargs: kwargs['ylabel'] = str(yscript) # depends on [control=['if'], data=['kwargs']] # First make sure everything is a list of scripts (or None's) if not _fun.is_iterable(xscript): xscript = [xscript] # depends on [control=['if'], data=[]] if not _fun.is_iterable(yscript): yscript = [yscript] # depends on [control=['if'], data=[]] if not _fun.is_iterable(exscript): exscript = [exscript] # depends on [control=['if'], data=[]] if not _fun.is_iterable(eyscript): eyscript = [eyscript] # depends on [control=['if'], data=[]] # make sure exscript matches shape with xscript (and the same for y) if len(exscript) < len(xscript): for n in range(len(xscript) - 1): exscript.append(exscript[0]) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] if len(eyscript) < len(yscript): for n in range(len(yscript) - 1): eyscript.append(eyscript[0]) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # Make xscript and exscript match in shape with yscript and eyscript if len(xscript) < len(yscript): for n in range(len(yscript) - 1): xscript.append(xscript[0]) exscript.append(exscript[0]) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # check for the reverse possibility if len(yscript) < len(xscript): for n in range(len(xscript) - 1): yscript.append(yscript[0]) eyscript.append(eyscript[0]) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # now check for None's (counting scripts) for n in range(len(xscript)): if xscript[n] is None and yscript[n] is None: print('Two None scripts? But why?') return # depends on [control=['if'], data=[]] if xscript[n] is None: if type(yscript[n]) == str: xscript[n] = 'range(len(' + yscript[n] + '))' # depends on [control=['if'], data=[]] else: xscript[n] = 'range(len(c(' + str(yscript[n]) + ')))' # depends on [control=['if'], data=[]] if yscript[n] is None: if type(xscript[n]) == str: yscript[n] = 'range(len(' + xscript[n] + '))' # depends on [control=['if'], data=[]] else: yscript[n] = 'range(len(c(' + str(xscript[n]) + ')))' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n']] xdatas = [] ydatas = [] exdatas = [] eydatas = [] labels = [] # Loop over all the data boxes for i in range(len(ds)): # Reset the default globals all_globals = dict(n=i, m=len(ds) - 1 - i) # Update them with the user-specified globals if not g == None: all_globals.update(g) # depends on [control=['if'], data=[]] # For ease of coding d = ds[i] # Take the transpose if necessary if transpose: d = d.transpose() # depends on [control=['if'], data=[]] # Generate the x-data; returns a list of outputs, one for each xscript xdata = d(xscript, all_globals) # Loop over each xdata, appending to the master list, and generating a label for n in range(len(xdata)): xdatas.append(xdata[n]) if len(xdata) > 1: labels.append(str(n) + ': ' + _os.path.split(d.path)[-1]) # depends on [control=['if'], data=[]] else: labels.append(_os.path.split(d.path)[-1]) # depends on [control=['for'], data=['n']] # Append the other data sets to their master lists for y in d(yscript, all_globals): ydatas.append(y) # depends on [control=['for'], data=['y']] for x in d(exscript, all_globals): exdatas.append(x) # depends on [control=['for'], data=['x']] for y in d(eyscript, all_globals): eydatas.append(y) # depends on [control=['for'], data=['y']] # depends on [control=['for'], data=['i']] if 'label' in kwargs: labels = kwargs.pop('label') # depends on [control=['if'], data=['kwargs']] plotter(xdatas, ydatas, eydatas, exdatas, label=labels, **kwargs)
def _plot_gene_groups_brackets(gene_groups_ax, group_positions, group_labels, left_adjustment=-0.3, right_adjustment=0.3, rotation=None, orientation='top'): """ Draws brackets that represent groups of genes on the give axis. For best results, this axis is located on top of an image whose x axis contains gene names. The gene_groups_ax should share the x axis with the main ax. Eg: gene_groups_ax = fig.add_subplot(axs[0, 0], sharex=dot_ax) This function is used by dotplot, heatmap etc. Parameters ---------- gene_groups_ax : matplotlib axis In this axis the gene marks are drawn group_positions : list of `tuples` Each item in the list, should contain the start and end position that the bracket should cover. Eg. [(0, 4), (5, 8)] means that there are two brackets, one for the var_names (eg genes) in positions 0-4 and other for positions 5-8 group_labels : list List of group labels left_adjustment : `float` adjustment to plot the bracket start slightly before or after the first gene position. If the value is negative the start is moved before. right_adjustment : `float` adjustment to plot the bracket end slightly before or after the last gene position If the value is negative the start is moved before. rotation : `float` (default None) rotation degrees for the labels. If not given, small labels (<4 characters) are not rotated, otherwise, they are rotated 90 degrees orientation : `str` (default `top`) location of the brackets. Either `top` or `right` Returns ------- None """ import matplotlib.patches as patches from matplotlib.path import Path # get the 'brackets' coordinates as lists of start and end positions left = [x[0] + left_adjustment for x in group_positions] right = [x[1] + right_adjustment for x in group_positions] # verts and codes are used by PathPatch to make the brackets verts = [] codes = [] if orientation == 'top': # rotate labels if any of them is longer than 4 characters if rotation is None and group_labels is not None and len(group_labels) > 0: if max([len(x) for x in group_labels]) > 4: rotation = 90 else: rotation = 0 for idx in range(len(left)): verts.append((left[idx], 0)) # lower-left verts.append((left[idx], 0.6)) # upper-left verts.append((right[idx], 0.6)) # upper-right verts.append((right[idx], 0)) # lower-right codes.append(Path.MOVETO) codes.append(Path.LINETO) codes.append(Path.LINETO) codes.append(Path.LINETO) try: group_x_center = left[idx] + float(right[idx] - left[idx]) / 2 gene_groups_ax.text(group_x_center, 1.1, group_labels[idx], ha='center', va='bottom', rotation=rotation) except: pass else: top = left bottom = right for idx in range(len(top)): verts.append((0, top[idx])) # upper-left verts.append((0.15, top[idx])) # upper-right verts.append((0.15, bottom[idx])) # lower-right verts.append((0, bottom[idx])) # lower-left codes.append(Path.MOVETO) codes.append(Path.LINETO) codes.append(Path.LINETO) codes.append(Path.LINETO) try: diff = bottom[idx] - top[idx] group_y_center = top[idx] + float(diff) / 2 if diff * 2 < len(group_labels[idx]): # cut label to fit available space group_labels[idx] = group_labels[idx][:int(diff * 2)] + "." gene_groups_ax.text(0.6, group_y_center, group_labels[idx], ha='right', va='center', rotation=270, fontsize='small') except Exception as e: print('problems {}'.format(e)) pass path = Path(verts, codes) patch = patches.PathPatch(path, facecolor='none', lw=1.5) gene_groups_ax.add_patch(patch) gene_groups_ax.spines['right'].set_visible(False) gene_groups_ax.spines['top'].set_visible(False) gene_groups_ax.spines['left'].set_visible(False) gene_groups_ax.spines['bottom'].set_visible(False) gene_groups_ax.grid(False) # remove y ticks gene_groups_ax.tick_params(axis='y', left=False, labelleft=False) # remove x ticks and labels gene_groups_ax.tick_params(axis='x', bottom=False, labelbottom=False, labeltop=False)
def function[_plot_gene_groups_brackets, parameter[gene_groups_ax, group_positions, group_labels, left_adjustment, right_adjustment, rotation, orientation]]: constant[ Draws brackets that represent groups of genes on the give axis. For best results, this axis is located on top of an image whose x axis contains gene names. The gene_groups_ax should share the x axis with the main ax. Eg: gene_groups_ax = fig.add_subplot(axs[0, 0], sharex=dot_ax) This function is used by dotplot, heatmap etc. Parameters ---------- gene_groups_ax : matplotlib axis In this axis the gene marks are drawn group_positions : list of `tuples` Each item in the list, should contain the start and end position that the bracket should cover. Eg. [(0, 4), (5, 8)] means that there are two brackets, one for the var_names (eg genes) in positions 0-4 and other for positions 5-8 group_labels : list List of group labels left_adjustment : `float` adjustment to plot the bracket start slightly before or after the first gene position. If the value is negative the start is moved before. right_adjustment : `float` adjustment to plot the bracket end slightly before or after the last gene position If the value is negative the start is moved before. rotation : `float` (default None) rotation degrees for the labels. If not given, small labels (<4 characters) are not rotated, otherwise, they are rotated 90 degrees orientation : `str` (default `top`) location of the brackets. Either `top` or `right` Returns ------- None ] import module[matplotlib.patches] as alias[patches] from relative_module[matplotlib.path] import module[Path] variable[left] assign[=] <ast.ListComp object at 0x7da1b2346aa0> variable[right] assign[=] <ast.ListComp object at 0x7da1b23471f0> variable[verts] assign[=] list[[]] variable[codes] assign[=] list[[]] if compare[name[orientation] equal[==] constant[top]] begin[:] if <ast.BoolOp object at 0x7da1b2346da0> begin[:] if compare[call[name[max], parameter[<ast.ListComp object at 0x7da1b2346020>]] greater[>] constant[4]] begin[:] variable[rotation] assign[=] constant[90] for taget[name[idx]] in starred[call[name[range], parameter[call[name[len], parameter[name[left]]]]]] begin[:] call[name[verts].append, parameter[tuple[[<ast.Subscript object at 0x7da1b2346890>, <ast.Constant object at 0x7da1b2345a80>]]]] call[name[verts].append, parameter[tuple[[<ast.Subscript object at 0x7da1b2345510>, <ast.Constant object at 0x7da1b2344760>]]]] call[name[verts].append, parameter[tuple[[<ast.Subscript object at 0x7da1b2344f10>, <ast.Constant object at 0x7da1b2347040>]]]] call[name[verts].append, parameter[tuple[[<ast.Subscript object at 0x7da1b23446a0>, <ast.Constant object at 0x7da1b2344850>]]]] call[name[codes].append, parameter[name[Path].MOVETO]] call[name[codes].append, parameter[name[Path].LINETO]] call[name[codes].append, parameter[name[Path].LINETO]] call[name[codes].append, parameter[name[Path].LINETO]] <ast.Try object at 0x7da1b2345180> variable[path] assign[=] call[name[Path], parameter[name[verts], name[codes]]] variable[patch] assign[=] call[name[patches].PathPatch, parameter[name[path]]] call[name[gene_groups_ax].add_patch, parameter[name[patch]]] call[call[name[gene_groups_ax].spines][constant[right]].set_visible, parameter[constant[False]]] call[call[name[gene_groups_ax].spines][constant[top]].set_visible, parameter[constant[False]]] call[call[name[gene_groups_ax].spines][constant[left]].set_visible, parameter[constant[False]]] call[call[name[gene_groups_ax].spines][constant[bottom]].set_visible, parameter[constant[False]]] call[name[gene_groups_ax].grid, parameter[constant[False]]] call[name[gene_groups_ax].tick_params, parameter[]] call[name[gene_groups_ax].tick_params, parameter[]]
keyword[def] identifier[_plot_gene_groups_brackets] ( identifier[gene_groups_ax] , identifier[group_positions] , identifier[group_labels] , identifier[left_adjustment] =- literal[int] , identifier[right_adjustment] = literal[int] , identifier[rotation] = keyword[None] , identifier[orientation] = literal[string] ): literal[string] keyword[import] identifier[matplotlib] . identifier[patches] keyword[as] identifier[patches] keyword[from] identifier[matplotlib] . identifier[path] keyword[import] identifier[Path] identifier[left] =[ identifier[x] [ literal[int] ]+ identifier[left_adjustment] keyword[for] identifier[x] keyword[in] identifier[group_positions] ] identifier[right] =[ identifier[x] [ literal[int] ]+ identifier[right_adjustment] keyword[for] identifier[x] keyword[in] identifier[group_positions] ] identifier[verts] =[] identifier[codes] =[] keyword[if] identifier[orientation] == literal[string] : keyword[if] identifier[rotation] keyword[is] keyword[None] keyword[and] identifier[group_labels] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[group_labels] )> literal[int] : keyword[if] identifier[max] ([ identifier[len] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[group_labels] ])> literal[int] : identifier[rotation] = literal[int] keyword[else] : identifier[rotation] = literal[int] keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[len] ( identifier[left] )): identifier[verts] . identifier[append] (( identifier[left] [ identifier[idx] ], literal[int] )) identifier[verts] . identifier[append] (( identifier[left] [ identifier[idx] ], literal[int] )) identifier[verts] . identifier[append] (( identifier[right] [ identifier[idx] ], literal[int] )) identifier[verts] . identifier[append] (( identifier[right] [ identifier[idx] ], literal[int] )) identifier[codes] . identifier[append] ( identifier[Path] . identifier[MOVETO] ) identifier[codes] . identifier[append] ( identifier[Path] . identifier[LINETO] ) identifier[codes] . identifier[append] ( identifier[Path] . identifier[LINETO] ) identifier[codes] . identifier[append] ( identifier[Path] . identifier[LINETO] ) keyword[try] : identifier[group_x_center] = identifier[left] [ identifier[idx] ]+ identifier[float] ( identifier[right] [ identifier[idx] ]- identifier[left] [ identifier[idx] ])/ literal[int] identifier[gene_groups_ax] . identifier[text] ( identifier[group_x_center] , literal[int] , identifier[group_labels] [ identifier[idx] ], identifier[ha] = literal[string] , identifier[va] = literal[string] , identifier[rotation] = identifier[rotation] ) keyword[except] : keyword[pass] keyword[else] : identifier[top] = identifier[left] identifier[bottom] = identifier[right] keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[len] ( identifier[top] )): identifier[verts] . identifier[append] (( literal[int] , identifier[top] [ identifier[idx] ])) identifier[verts] . identifier[append] (( literal[int] , identifier[top] [ identifier[idx] ])) identifier[verts] . identifier[append] (( literal[int] , identifier[bottom] [ identifier[idx] ])) identifier[verts] . identifier[append] (( literal[int] , identifier[bottom] [ identifier[idx] ])) identifier[codes] . identifier[append] ( identifier[Path] . identifier[MOVETO] ) identifier[codes] . identifier[append] ( identifier[Path] . identifier[LINETO] ) identifier[codes] . identifier[append] ( identifier[Path] . identifier[LINETO] ) identifier[codes] . identifier[append] ( identifier[Path] . identifier[LINETO] ) keyword[try] : identifier[diff] = identifier[bottom] [ identifier[idx] ]- identifier[top] [ identifier[idx] ] identifier[group_y_center] = identifier[top] [ identifier[idx] ]+ identifier[float] ( identifier[diff] )/ literal[int] keyword[if] identifier[diff] * literal[int] < identifier[len] ( identifier[group_labels] [ identifier[idx] ]): identifier[group_labels] [ identifier[idx] ]= identifier[group_labels] [ identifier[idx] ][: identifier[int] ( identifier[diff] * literal[int] )]+ literal[string] identifier[gene_groups_ax] . identifier[text] ( literal[int] , identifier[group_y_center] , identifier[group_labels] [ identifier[idx] ], identifier[ha] = literal[string] , identifier[va] = literal[string] , identifier[rotation] = literal[int] , identifier[fontsize] = literal[string] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[print] ( literal[string] . identifier[format] ( identifier[e] )) keyword[pass] identifier[path] = identifier[Path] ( identifier[verts] , identifier[codes] ) identifier[patch] = identifier[patches] . identifier[PathPatch] ( identifier[path] , identifier[facecolor] = literal[string] , identifier[lw] = literal[int] ) identifier[gene_groups_ax] . identifier[add_patch] ( identifier[patch] ) identifier[gene_groups_ax] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] ) identifier[gene_groups_ax] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] ) identifier[gene_groups_ax] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] ) identifier[gene_groups_ax] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] ) identifier[gene_groups_ax] . identifier[grid] ( keyword[False] ) identifier[gene_groups_ax] . identifier[tick_params] ( identifier[axis] = literal[string] , identifier[left] = keyword[False] , identifier[labelleft] = keyword[False] ) identifier[gene_groups_ax] . identifier[tick_params] ( identifier[axis] = literal[string] , identifier[bottom] = keyword[False] , identifier[labelbottom] = keyword[False] , identifier[labeltop] = keyword[False] )
def _plot_gene_groups_brackets(gene_groups_ax, group_positions, group_labels, left_adjustment=-0.3, right_adjustment=0.3, rotation=None, orientation='top'): """ Draws brackets that represent groups of genes on the give axis. For best results, this axis is located on top of an image whose x axis contains gene names. The gene_groups_ax should share the x axis with the main ax. Eg: gene_groups_ax = fig.add_subplot(axs[0, 0], sharex=dot_ax) This function is used by dotplot, heatmap etc. Parameters ---------- gene_groups_ax : matplotlib axis In this axis the gene marks are drawn group_positions : list of `tuples` Each item in the list, should contain the start and end position that the bracket should cover. Eg. [(0, 4), (5, 8)] means that there are two brackets, one for the var_names (eg genes) in positions 0-4 and other for positions 5-8 group_labels : list List of group labels left_adjustment : `float` adjustment to plot the bracket start slightly before or after the first gene position. If the value is negative the start is moved before. right_adjustment : `float` adjustment to plot the bracket end slightly before or after the last gene position If the value is negative the start is moved before. rotation : `float` (default None) rotation degrees for the labels. If not given, small labels (<4 characters) are not rotated, otherwise, they are rotated 90 degrees orientation : `str` (default `top`) location of the brackets. Either `top` or `right` Returns ------- None """ import matplotlib.patches as patches from matplotlib.path import Path # get the 'brackets' coordinates as lists of start and end positions left = [x[0] + left_adjustment for x in group_positions] right = [x[1] + right_adjustment for x in group_positions] # verts and codes are used by PathPatch to make the brackets verts = [] codes = [] if orientation == 'top': # rotate labels if any of them is longer than 4 characters if rotation is None and group_labels is not None and (len(group_labels) > 0): if max([len(x) for x in group_labels]) > 4: rotation = 90 # depends on [control=['if'], data=[]] else: rotation = 0 # depends on [control=['if'], data=[]] for idx in range(len(left)): verts.append((left[idx], 0)) # lower-left verts.append((left[idx], 0.6)) # upper-left verts.append((right[idx], 0.6)) # upper-right verts.append((right[idx], 0)) # lower-right codes.append(Path.MOVETO) codes.append(Path.LINETO) codes.append(Path.LINETO) codes.append(Path.LINETO) try: group_x_center = left[idx] + float(right[idx] - left[idx]) / 2 gene_groups_ax.text(group_x_center, 1.1, group_labels[idx], ha='center', va='bottom', rotation=rotation) # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['idx']] # depends on [control=['if'], data=[]] else: top = left bottom = right for idx in range(len(top)): verts.append((0, top[idx])) # upper-left verts.append((0.15, top[idx])) # upper-right verts.append((0.15, bottom[idx])) # lower-right verts.append((0, bottom[idx])) # lower-left codes.append(Path.MOVETO) codes.append(Path.LINETO) codes.append(Path.LINETO) codes.append(Path.LINETO) try: diff = bottom[idx] - top[idx] group_y_center = top[idx] + float(diff) / 2 if diff * 2 < len(group_labels[idx]): # cut label to fit available space group_labels[idx] = group_labels[idx][:int(diff * 2)] + '.' # depends on [control=['if'], data=[]] gene_groups_ax.text(0.6, group_y_center, group_labels[idx], ha='right', va='center', rotation=270, fontsize='small') # depends on [control=['try'], data=[]] except Exception as e: print('problems {}'.format(e)) pass # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['idx']] path = Path(verts, codes) patch = patches.PathPatch(path, facecolor='none', lw=1.5) gene_groups_ax.add_patch(patch) gene_groups_ax.spines['right'].set_visible(False) gene_groups_ax.spines['top'].set_visible(False) gene_groups_ax.spines['left'].set_visible(False) gene_groups_ax.spines['bottom'].set_visible(False) gene_groups_ax.grid(False) # remove y ticks gene_groups_ax.tick_params(axis='y', left=False, labelleft=False) # remove x ticks and labels gene_groups_ax.tick_params(axis='x', bottom=False, labelbottom=False, labeltop=False)
def _handle_type_decl(self, node, scope, ctxt, stream): """TODO: Docstring for _handle_type_decl. :node: TODO :scope: TODO :ctxt: TODO :stream: TODO :returns: TODO """ self._dlog("handling type decl") decl = self._handle_node(node.type, scope, ctxt, stream) return decl
def function[_handle_type_decl, parameter[self, node, scope, ctxt, stream]]: constant[TODO: Docstring for _handle_type_decl. :node: TODO :scope: TODO :ctxt: TODO :stream: TODO :returns: TODO ] call[name[self]._dlog, parameter[constant[handling type decl]]] variable[decl] assign[=] call[name[self]._handle_node, parameter[name[node].type, name[scope], name[ctxt], name[stream]]] return[name[decl]]
keyword[def] identifier[_handle_type_decl] ( identifier[self] , identifier[node] , identifier[scope] , identifier[ctxt] , identifier[stream] ): literal[string] identifier[self] . identifier[_dlog] ( literal[string] ) identifier[decl] = identifier[self] . identifier[_handle_node] ( identifier[node] . identifier[type] , identifier[scope] , identifier[ctxt] , identifier[stream] ) keyword[return] identifier[decl]
def _handle_type_decl(self, node, scope, ctxt, stream): """TODO: Docstring for _handle_type_decl. :node: TODO :scope: TODO :ctxt: TODO :stream: TODO :returns: TODO """ self._dlog('handling type decl') decl = self._handle_node(node.type, scope, ctxt, stream) return decl
def calculate_gradient(self, batch_info, device, model, rollout): """ Calculate loss of the supplied rollout """ evaluator = model.evaluate(rollout) dones_tensor = evaluator.get('rollout:dones') rewards_tensor = evaluator.get('rollout:rewards') assert dones_tensor.dtype == torch.float32 with torch.no_grad(): target_evaluator = self.target_model.evaluate(rollout) if self.double_dqn: # DOUBLE DQN target_q = target_evaluator.get('model:q_next') model_q = evaluator.get('model:q_next') # Select largest 'target' value based on action that 'model' selects values = target_q.gather(1, model_q.argmax(dim=1, keepdim=True)).squeeze(1) else: # REGULAR DQN # [0] is because in pytorch .max(...) returns tuple (max values, argmax) values = target_evaluator.get('model:q_next').max(dim=1)[0] forward_steps = rollout.extra_data.get('forward_steps', 1) estimated_return = rewards_tensor + (self.discount_factor ** forward_steps) * values * (1 - dones_tensor) q_selected = evaluator.get('model:action:q') if evaluator.is_provided('rollout:weights'): weights = evaluator.get('rollout:weights') else: weights = torch.ones_like(rewards_tensor) original_losses = F.smooth_l1_loss(q_selected, estimated_return, reduction='none') loss_value = torch.mean(weights * original_losses) loss_value.backward() return { 'loss': loss_value.item(), # We need it to update priorities in the replay buffer: 'errors': original_losses.detach().cpu().numpy(), 'average_q_selected': torch.mean(q_selected).item(), 'average_q_target': torch.mean(estimated_return).item() }
def function[calculate_gradient, parameter[self, batch_info, device, model, rollout]]: constant[ Calculate loss of the supplied rollout ] variable[evaluator] assign[=] call[name[model].evaluate, parameter[name[rollout]]] variable[dones_tensor] assign[=] call[name[evaluator].get, parameter[constant[rollout:dones]]] variable[rewards_tensor] assign[=] call[name[evaluator].get, parameter[constant[rollout:rewards]]] assert[compare[name[dones_tensor].dtype equal[==] name[torch].float32]] with call[name[torch].no_grad, parameter[]] begin[:] variable[target_evaluator] assign[=] call[name[self].target_model.evaluate, parameter[name[rollout]]] if name[self].double_dqn begin[:] variable[target_q] assign[=] call[name[target_evaluator].get, parameter[constant[model:q_next]]] variable[model_q] assign[=] call[name[evaluator].get, parameter[constant[model:q_next]]] variable[values] assign[=] call[call[name[target_q].gather, parameter[constant[1], call[name[model_q].argmax, parameter[]]]].squeeze, parameter[constant[1]]] variable[forward_steps] assign[=] call[name[rollout].extra_data.get, parameter[constant[forward_steps], constant[1]]] variable[estimated_return] assign[=] binary_operation[name[rewards_tensor] + binary_operation[binary_operation[binary_operation[name[self].discount_factor ** name[forward_steps]] * name[values]] * binary_operation[constant[1] - name[dones_tensor]]]] variable[q_selected] assign[=] call[name[evaluator].get, parameter[constant[model:action:q]]] if call[name[evaluator].is_provided, parameter[constant[rollout:weights]]] begin[:] variable[weights] assign[=] call[name[evaluator].get, parameter[constant[rollout:weights]]] variable[original_losses] assign[=] call[name[F].smooth_l1_loss, parameter[name[q_selected], name[estimated_return]]] variable[loss_value] assign[=] call[name[torch].mean, parameter[binary_operation[name[weights] * name[original_losses]]]] call[name[loss_value].backward, parameter[]] return[dictionary[[<ast.Constant object at 0x7da1b15f0a00>, <ast.Constant object at 0x7da1b15f0ac0>, <ast.Constant object at 0x7da1b15f1090>, <ast.Constant object at 0x7da1b15f14b0>], [<ast.Call object at 0x7da1b15f2c50>, <ast.Call object at 0x7da1b15f0730>, <ast.Call object at 0x7da1b15f08e0>, <ast.Call object at 0x7da1b15f2770>]]]
keyword[def] identifier[calculate_gradient] ( identifier[self] , identifier[batch_info] , identifier[device] , identifier[model] , identifier[rollout] ): literal[string] identifier[evaluator] = identifier[model] . identifier[evaluate] ( identifier[rollout] ) identifier[dones_tensor] = identifier[evaluator] . identifier[get] ( literal[string] ) identifier[rewards_tensor] = identifier[evaluator] . identifier[get] ( literal[string] ) keyword[assert] identifier[dones_tensor] . identifier[dtype] == identifier[torch] . identifier[float32] keyword[with] identifier[torch] . identifier[no_grad] (): identifier[target_evaluator] = identifier[self] . identifier[target_model] . identifier[evaluate] ( identifier[rollout] ) keyword[if] identifier[self] . identifier[double_dqn] : identifier[target_q] = identifier[target_evaluator] . identifier[get] ( literal[string] ) identifier[model_q] = identifier[evaluator] . identifier[get] ( literal[string] ) identifier[values] = identifier[target_q] . identifier[gather] ( literal[int] , identifier[model_q] . identifier[argmax] ( identifier[dim] = literal[int] , identifier[keepdim] = keyword[True] )). identifier[squeeze] ( literal[int] ) keyword[else] : identifier[values] = identifier[target_evaluator] . identifier[get] ( literal[string] ). identifier[max] ( identifier[dim] = literal[int] )[ literal[int] ] identifier[forward_steps] = identifier[rollout] . identifier[extra_data] . identifier[get] ( literal[string] , literal[int] ) identifier[estimated_return] = identifier[rewards_tensor] +( identifier[self] . identifier[discount_factor] ** identifier[forward_steps] )* identifier[values] *( literal[int] - identifier[dones_tensor] ) identifier[q_selected] = identifier[evaluator] . identifier[get] ( literal[string] ) keyword[if] identifier[evaluator] . identifier[is_provided] ( literal[string] ): identifier[weights] = identifier[evaluator] . identifier[get] ( literal[string] ) keyword[else] : identifier[weights] = identifier[torch] . identifier[ones_like] ( identifier[rewards_tensor] ) identifier[original_losses] = identifier[F] . identifier[smooth_l1_loss] ( identifier[q_selected] , identifier[estimated_return] , identifier[reduction] = literal[string] ) identifier[loss_value] = identifier[torch] . identifier[mean] ( identifier[weights] * identifier[original_losses] ) identifier[loss_value] . identifier[backward] () keyword[return] { literal[string] : identifier[loss_value] . identifier[item] (), literal[string] : identifier[original_losses] . identifier[detach] (). identifier[cpu] (). identifier[numpy] (), literal[string] : identifier[torch] . identifier[mean] ( identifier[q_selected] ). identifier[item] (), literal[string] : identifier[torch] . identifier[mean] ( identifier[estimated_return] ). identifier[item] () }
def calculate_gradient(self, batch_info, device, model, rollout): """ Calculate loss of the supplied rollout """ evaluator = model.evaluate(rollout) dones_tensor = evaluator.get('rollout:dones') rewards_tensor = evaluator.get('rollout:rewards') assert dones_tensor.dtype == torch.float32 with torch.no_grad(): target_evaluator = self.target_model.evaluate(rollout) if self.double_dqn: # DOUBLE DQN target_q = target_evaluator.get('model:q_next') model_q = evaluator.get('model:q_next') # Select largest 'target' value based on action that 'model' selects values = target_q.gather(1, model_q.argmax(dim=1, keepdim=True)).squeeze(1) # depends on [control=['if'], data=[]] else: # REGULAR DQN # [0] is because in pytorch .max(...) returns tuple (max values, argmax) values = target_evaluator.get('model:q_next').max(dim=1)[0] forward_steps = rollout.extra_data.get('forward_steps', 1) estimated_return = rewards_tensor + self.discount_factor ** forward_steps * values * (1 - dones_tensor) # depends on [control=['with'], data=[]] q_selected = evaluator.get('model:action:q') if evaluator.is_provided('rollout:weights'): weights = evaluator.get('rollout:weights') # depends on [control=['if'], data=[]] else: weights = torch.ones_like(rewards_tensor) original_losses = F.smooth_l1_loss(q_selected, estimated_return, reduction='none') loss_value = torch.mean(weights * original_losses) loss_value.backward() # We need it to update priorities in the replay buffer: return {'loss': loss_value.item(), 'errors': original_losses.detach().cpu().numpy(), 'average_q_selected': torch.mean(q_selected).item(), 'average_q_target': torch.mean(estimated_return).item()}
def notify_height_changed(self): """ Called by a row when its height changes, triggering the graphic frame to recalculate its total height (as the sum of the row heights). """ new_table_height = sum([row.height for row in self.rows]) self._graphic_frame.height = new_table_height
def function[notify_height_changed, parameter[self]]: constant[ Called by a row when its height changes, triggering the graphic frame to recalculate its total height (as the sum of the row heights). ] variable[new_table_height] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da204961c60>]] name[self]._graphic_frame.height assign[=] name[new_table_height]
keyword[def] identifier[notify_height_changed] ( identifier[self] ): literal[string] identifier[new_table_height] = identifier[sum] ([ identifier[row] . identifier[height] keyword[for] identifier[row] keyword[in] identifier[self] . identifier[rows] ]) identifier[self] . identifier[_graphic_frame] . identifier[height] = identifier[new_table_height]
def notify_height_changed(self): """ Called by a row when its height changes, triggering the graphic frame to recalculate its total height (as the sum of the row heights). """ new_table_height = sum([row.height for row in self.rows]) self._graphic_frame.height = new_table_height
def flatten(nested, containers=(list, tuple)): """ Flatten a nested list in-place and return it. """ flat = list(nested) # handle iterators / generators i = 0 while i < len(flat): while isinstance(flat[i], containers): if not flat[i]: # kill empty list flat.pop(i) # inspect new 'i'th element in outer loop i -= 1 break else: flat[i:i + 1] = (flat[i]) # 'i'th element is scalar, proceed i += 1 return flat
def function[flatten, parameter[nested, containers]]: constant[ Flatten a nested list in-place and return it. ] variable[flat] assign[=] call[name[list], parameter[name[nested]]] variable[i] assign[=] constant[0] while compare[name[i] less[<] call[name[len], parameter[name[flat]]]] begin[:] while call[name[isinstance], parameter[call[name[flat]][name[i]], name[containers]]] begin[:] if <ast.UnaryOp object at 0x7da1b13a4c70> begin[:] call[name[flat].pop, parameter[name[i]]] <ast.AugAssign object at 0x7da1b13a4a90> break <ast.AugAssign object at 0x7da1b13a5600> return[name[flat]]
keyword[def] identifier[flatten] ( identifier[nested] , identifier[containers] =( identifier[list] , identifier[tuple] )): literal[string] identifier[flat] = identifier[list] ( identifier[nested] ) identifier[i] = literal[int] keyword[while] identifier[i] < identifier[len] ( identifier[flat] ): keyword[while] identifier[isinstance] ( identifier[flat] [ identifier[i] ], identifier[containers] ): keyword[if] keyword[not] identifier[flat] [ identifier[i] ]: identifier[flat] . identifier[pop] ( identifier[i] ) identifier[i] -= literal[int] keyword[break] keyword[else] : identifier[flat] [ identifier[i] : identifier[i] + literal[int] ]=( identifier[flat] [ identifier[i] ]) identifier[i] += literal[int] keyword[return] identifier[flat]
def flatten(nested, containers=(list, tuple)): """ Flatten a nested list in-place and return it. """ flat = list(nested) # handle iterators / generators i = 0 while i < len(flat): while isinstance(flat[i], containers): if not flat[i]: # kill empty list flat.pop(i) # inspect new 'i'th element in outer loop i -= 1 break # depends on [control=['if'], data=[]] else: flat[i:i + 1] = flat[i] # depends on [control=['while'], data=[]] # 'i'th element is scalar, proceed i += 1 # depends on [control=['while'], data=['i']] return flat
def fill_holes(mesh): """ Fill single- triangle holes on triangular meshes by adding new triangles to fill the holes. New triangles will have proper winding and normals, and if face colors exist the color of the last face will be assigned to the new triangles. Parameters --------- mesh : trimesh.Trimesh Mesh will be repaired in- place """ def hole_to_faces(hole): """ Given a loop of vertex indices representing a hole, turn it into triangular faces. If unable to do so, return None Parameters --------- hole: ordered loop of vertex indices Returns --------- (n, 3) new faces (m, 3) new vertices """ hole = np.asanyarray(hole) # the case where the hole is just a single missing triangle if len(hole) == 3: return [hole], [] # the hole is a quad, which we fill with two triangles if len(hole) == 4: face_A = hole[[0, 1, 2]] face_B = hole[[2, 3, 0]] return [face_A, face_B], [] return [], [] if len(mesh.faces) < 3: return False if mesh.is_watertight: return True # we know that in a watertight mesh every edge will be included twice # thus every edge which appears only once is part of a hole boundary boundary_groups = group_rows(mesh.edges_sorted, require_count=1) if len(boundary_groups) < 3: watertight = len(boundary_groups) == 0 return watertight boundary_edges = mesh.edges[boundary_groups] index_as_dict = [{'index': i} for i in boundary_groups] # we create a graph of the boundary edges, and find cycles. g = nx.from_edgelist(np.column_stack((boundary_edges, index_as_dict))) cycles = np.array(nx.cycle_basis(g)) new_faces = [] new_vertex = [] for hole in cycles: # convert the hole, which is a polygon of vertex indices # to triangles and new vertices faces, vertex = hole_to_faces(hole=hole) if len(faces) == 0: continue # remeshing returns new vertices as negative indices, so change those # to absolute indices which won't be screwed up by the later appends faces = np.array(faces) faces[faces < 0] += len(new_vertex) + len(mesh.vertices) + len(vertex) new_vertex.extend(vertex) new_faces.extend(faces) new_faces = np.array(new_faces) new_vertex = np.array(new_vertex) if len(new_faces) == 0: # no new faces have been added, so nothing further to do # the mesh is NOT watertight, as boundary groups exist # but we didn't add any new faces to fill them in return False for face_index, face in enumerate(new_faces): # we compare the edge from the new face with # the boundary edge from the source mesh edge_test = face[0:2] edge_boundary = mesh.edges[g.get_edge_data(*edge_test)['index']] # in a well construced mesh, the winding is such that adjacent triangles # have reversed edges to each other. Here we check to make sure the # edges are reversed, and if they aren't we simply reverse the face reversed = edge_test[0] == edge_boundary[1] if not reversed: new_faces[face_index] = face[::-1] # stack vertices into clean (n, 3) float if len(new_vertex) != 0: new_vertices = np.vstack((mesh.vertices, new_vertex)) else: new_vertices = mesh.vertices # try to save face normals if we can if 'face_normals' in mesh._cache.cache: cached_normals = mesh._cache.cache['face_normals'] else: cached_normals = None # also we can remove any zero are triangles by masking here new_normals, valid = triangles.normals(new_vertices[new_faces]) # all the added faces were broken if not valid.any(): return False # apply the new faces and vertices mesh.faces = np.vstack((mesh._data['faces'], new_faces[valid])) mesh.vertices = new_vertices # dump the cache and set id to the new hash mesh._cache.verify() # save us a normals recompute if we can if cached_normals is not None: mesh.face_normals = np.vstack((cached_normals, new_normals)) # this is usually the case where two vertices of a triangle are just # over tol.merge apart, but the normal calculation is screwed up # these could be fixed by merging the vertices in question here: # if not valid.all(): if mesh.visual.defined and mesh.visual.kind == 'face': # if face colors exist, assign the last face color to the new faces # note that this is a little cheesey, but it is very inexpensive and # is the right thing to do if the mesh is a single color. stored = mesh.visual._data['face_colors'] color_shape = np.shape(stored) if len(color_shape) == 2: new_colors = np.tile(stored[-1], (np.sum(valid), 1)) new_colors = np.vstack((stored, new_colors)) mesh.visual.face_colors = new_colors log.debug('Filled in mesh with %i triangles', np.sum(valid)) return mesh.is_watertight
def function[fill_holes, parameter[mesh]]: constant[ Fill single- triangle holes on triangular meshes by adding new triangles to fill the holes. New triangles will have proper winding and normals, and if face colors exist the color of the last face will be assigned to the new triangles. Parameters --------- mesh : trimesh.Trimesh Mesh will be repaired in- place ] def function[hole_to_faces, parameter[hole]]: constant[ Given a loop of vertex indices representing a hole, turn it into triangular faces. If unable to do so, return None Parameters --------- hole: ordered loop of vertex indices Returns --------- (n, 3) new faces (m, 3) new vertices ] variable[hole] assign[=] call[name[np].asanyarray, parameter[name[hole]]] if compare[call[name[len], parameter[name[hole]]] equal[==] constant[3]] begin[:] return[tuple[[<ast.List object at 0x7da18bc73d60>, <ast.List object at 0x7da18bc738b0>]]] if compare[call[name[len], parameter[name[hole]]] equal[==] constant[4]] begin[:] variable[face_A] assign[=] call[name[hole]][list[[<ast.Constant object at 0x7da18bc708e0>, <ast.Constant object at 0x7da18bc711b0>, <ast.Constant object at 0x7da18bc700d0>]]] variable[face_B] assign[=] call[name[hole]][list[[<ast.Constant object at 0x7da18bc727a0>, <ast.Constant object at 0x7da18bc70b20>, <ast.Constant object at 0x7da18bc70250>]]] return[tuple[[<ast.List object at 0x7da18bc73f40>, <ast.List object at 0x7da18bc72200>]]] return[tuple[[<ast.List object at 0x7da18bc73be0>, <ast.List object at 0x7da18bc713c0>]]] if compare[call[name[len], parameter[name[mesh].faces]] less[<] constant[3]] begin[:] return[constant[False]] if name[mesh].is_watertight begin[:] return[constant[True]] variable[boundary_groups] assign[=] call[name[group_rows], parameter[name[mesh].edges_sorted]] if compare[call[name[len], parameter[name[boundary_groups]]] less[<] constant[3]] begin[:] variable[watertight] assign[=] compare[call[name[len], parameter[name[boundary_groups]]] equal[==] constant[0]] return[name[watertight]] variable[boundary_edges] assign[=] call[name[mesh].edges][name[boundary_groups]] variable[index_as_dict] assign[=] <ast.ListComp object at 0x7da18bc73220> variable[g] assign[=] call[name[nx].from_edgelist, parameter[call[name[np].column_stack, parameter[tuple[[<ast.Name object at 0x7da18bc716c0>, <ast.Name object at 0x7da18bc72080>]]]]]] variable[cycles] assign[=] call[name[np].array, parameter[call[name[nx].cycle_basis, parameter[name[g]]]]] variable[new_faces] assign[=] list[[]] variable[new_vertex] assign[=] list[[]] for taget[name[hole]] in starred[name[cycles]] begin[:] <ast.Tuple object at 0x7da20e9b30a0> assign[=] call[name[hole_to_faces], parameter[]] if compare[call[name[len], parameter[name[faces]]] equal[==] constant[0]] begin[:] continue variable[faces] assign[=] call[name[np].array, parameter[name[faces]]] <ast.AugAssign object at 0x7da20e9b2110> call[name[new_vertex].extend, parameter[name[vertex]]] call[name[new_faces].extend, parameter[name[faces]]] variable[new_faces] assign[=] call[name[np].array, parameter[name[new_faces]]] variable[new_vertex] assign[=] call[name[np].array, parameter[name[new_vertex]]] if compare[call[name[len], parameter[name[new_faces]]] equal[==] constant[0]] begin[:] return[constant[False]] for taget[tuple[[<ast.Name object at 0x7da20e9b1ab0>, <ast.Name object at 0x7da20e9b1780>]]] in starred[call[name[enumerate], parameter[name[new_faces]]]] begin[:] variable[edge_test] assign[=] call[name[face]][<ast.Slice object at 0x7da20e9b1c60>] variable[edge_boundary] assign[=] call[name[mesh].edges][call[call[name[g].get_edge_data, parameter[<ast.Starred object at 0x7da20e9b3ee0>]]][constant[index]]] variable[reversed] assign[=] compare[call[name[edge_test]][constant[0]] equal[==] call[name[edge_boundary]][constant[1]]] if <ast.UnaryOp object at 0x7da20e9b19f0> begin[:] call[name[new_faces]][name[face_index]] assign[=] call[name[face]][<ast.Slice object at 0x7da20e9b0f10>] if compare[call[name[len], parameter[name[new_vertex]]] not_equal[!=] constant[0]] begin[:] variable[new_vertices] assign[=] call[name[np].vstack, parameter[tuple[[<ast.Attribute object at 0x7da20e9b3910>, <ast.Name object at 0x7da20e9b29b0>]]]] if compare[constant[face_normals] in name[mesh]._cache.cache] begin[:] variable[cached_normals] assign[=] call[name[mesh]._cache.cache][constant[face_normals]] <ast.Tuple object at 0x7da20c9903a0> assign[=] call[name[triangles].normals, parameter[call[name[new_vertices]][name[new_faces]]]] if <ast.UnaryOp object at 0x7da20c9924a0> begin[:] return[constant[False]] name[mesh].faces assign[=] call[name[np].vstack, parameter[tuple[[<ast.Subscript object at 0x7da20c993a30>, <ast.Subscript object at 0x7da20c991330>]]]] name[mesh].vertices assign[=] name[new_vertices] call[name[mesh]._cache.verify, parameter[]] if compare[name[cached_normals] is_not constant[None]] begin[:] name[mesh].face_normals assign[=] call[name[np].vstack, parameter[tuple[[<ast.Name object at 0x7da20c9917b0>, <ast.Name object at 0x7da20c990610>]]]] if <ast.BoolOp object at 0x7da20c993df0> begin[:] variable[stored] assign[=] call[name[mesh].visual._data][constant[face_colors]] variable[color_shape] assign[=] call[name[np].shape, parameter[name[stored]]] if compare[call[name[len], parameter[name[color_shape]]] equal[==] constant[2]] begin[:] variable[new_colors] assign[=] call[name[np].tile, parameter[call[name[stored]][<ast.UnaryOp object at 0x7da20c990c10>], tuple[[<ast.Call object at 0x7da20c993be0>, <ast.Constant object at 0x7da20c990400>]]]] variable[new_colors] assign[=] call[name[np].vstack, parameter[tuple[[<ast.Name object at 0x7da20c993f10>, <ast.Name object at 0x7da20c9905b0>]]]] name[mesh].visual.face_colors assign[=] name[new_colors] call[name[log].debug, parameter[constant[Filled in mesh with %i triangles], call[name[np].sum, parameter[name[valid]]]]] return[name[mesh].is_watertight]
keyword[def] identifier[fill_holes] ( identifier[mesh] ): literal[string] keyword[def] identifier[hole_to_faces] ( identifier[hole] ): literal[string] identifier[hole] = identifier[np] . identifier[asanyarray] ( identifier[hole] ) keyword[if] identifier[len] ( identifier[hole] )== literal[int] : keyword[return] [ identifier[hole] ],[] keyword[if] identifier[len] ( identifier[hole] )== literal[int] : identifier[face_A] = identifier[hole] [[ literal[int] , literal[int] , literal[int] ]] identifier[face_B] = identifier[hole] [[ literal[int] , literal[int] , literal[int] ]] keyword[return] [ identifier[face_A] , identifier[face_B] ],[] keyword[return] [],[] keyword[if] identifier[len] ( identifier[mesh] . identifier[faces] )< literal[int] : keyword[return] keyword[False] keyword[if] identifier[mesh] . identifier[is_watertight] : keyword[return] keyword[True] identifier[boundary_groups] = identifier[group_rows] ( identifier[mesh] . identifier[edges_sorted] , identifier[require_count] = literal[int] ) keyword[if] identifier[len] ( identifier[boundary_groups] )< literal[int] : identifier[watertight] = identifier[len] ( identifier[boundary_groups] )== literal[int] keyword[return] identifier[watertight] identifier[boundary_edges] = identifier[mesh] . identifier[edges] [ identifier[boundary_groups] ] identifier[index_as_dict] =[{ literal[string] : identifier[i] } keyword[for] identifier[i] keyword[in] identifier[boundary_groups] ] identifier[g] = identifier[nx] . identifier[from_edgelist] ( identifier[np] . identifier[column_stack] (( identifier[boundary_edges] , identifier[index_as_dict] ))) identifier[cycles] = identifier[np] . identifier[array] ( identifier[nx] . identifier[cycle_basis] ( identifier[g] )) identifier[new_faces] =[] identifier[new_vertex] =[] keyword[for] identifier[hole] keyword[in] identifier[cycles] : identifier[faces] , identifier[vertex] = identifier[hole_to_faces] ( identifier[hole] = identifier[hole] ) keyword[if] identifier[len] ( identifier[faces] )== literal[int] : keyword[continue] identifier[faces] = identifier[np] . identifier[array] ( identifier[faces] ) identifier[faces] [ identifier[faces] < literal[int] ]+= identifier[len] ( identifier[new_vertex] )+ identifier[len] ( identifier[mesh] . identifier[vertices] )+ identifier[len] ( identifier[vertex] ) identifier[new_vertex] . identifier[extend] ( identifier[vertex] ) identifier[new_faces] . identifier[extend] ( identifier[faces] ) identifier[new_faces] = identifier[np] . identifier[array] ( identifier[new_faces] ) identifier[new_vertex] = identifier[np] . identifier[array] ( identifier[new_vertex] ) keyword[if] identifier[len] ( identifier[new_faces] )== literal[int] : keyword[return] keyword[False] keyword[for] identifier[face_index] , identifier[face] keyword[in] identifier[enumerate] ( identifier[new_faces] ): identifier[edge_test] = identifier[face] [ literal[int] : literal[int] ] identifier[edge_boundary] = identifier[mesh] . identifier[edges] [ identifier[g] . identifier[get_edge_data] (* identifier[edge_test] )[ literal[string] ]] identifier[reversed] = identifier[edge_test] [ literal[int] ]== identifier[edge_boundary] [ literal[int] ] keyword[if] keyword[not] identifier[reversed] : identifier[new_faces] [ identifier[face_index] ]= identifier[face] [::- literal[int] ] keyword[if] identifier[len] ( identifier[new_vertex] )!= literal[int] : identifier[new_vertices] = identifier[np] . identifier[vstack] (( identifier[mesh] . identifier[vertices] , identifier[new_vertex] )) keyword[else] : identifier[new_vertices] = identifier[mesh] . identifier[vertices] keyword[if] literal[string] keyword[in] identifier[mesh] . identifier[_cache] . identifier[cache] : identifier[cached_normals] = identifier[mesh] . identifier[_cache] . identifier[cache] [ literal[string] ] keyword[else] : identifier[cached_normals] = keyword[None] identifier[new_normals] , identifier[valid] = identifier[triangles] . identifier[normals] ( identifier[new_vertices] [ identifier[new_faces] ]) keyword[if] keyword[not] identifier[valid] . identifier[any] (): keyword[return] keyword[False] identifier[mesh] . identifier[faces] = identifier[np] . identifier[vstack] (( identifier[mesh] . identifier[_data] [ literal[string] ], identifier[new_faces] [ identifier[valid] ])) identifier[mesh] . identifier[vertices] = identifier[new_vertices] identifier[mesh] . identifier[_cache] . identifier[verify] () keyword[if] identifier[cached_normals] keyword[is] keyword[not] keyword[None] : identifier[mesh] . identifier[face_normals] = identifier[np] . identifier[vstack] (( identifier[cached_normals] , identifier[new_normals] )) keyword[if] identifier[mesh] . identifier[visual] . identifier[defined] keyword[and] identifier[mesh] . identifier[visual] . identifier[kind] == literal[string] : identifier[stored] = identifier[mesh] . identifier[visual] . identifier[_data] [ literal[string] ] identifier[color_shape] = identifier[np] . identifier[shape] ( identifier[stored] ) keyword[if] identifier[len] ( identifier[color_shape] )== literal[int] : identifier[new_colors] = identifier[np] . identifier[tile] ( identifier[stored] [- literal[int] ],( identifier[np] . identifier[sum] ( identifier[valid] ), literal[int] )) identifier[new_colors] = identifier[np] . identifier[vstack] (( identifier[stored] , identifier[new_colors] )) identifier[mesh] . identifier[visual] . identifier[face_colors] = identifier[new_colors] identifier[log] . identifier[debug] ( literal[string] , identifier[np] . identifier[sum] ( identifier[valid] )) keyword[return] identifier[mesh] . identifier[is_watertight]
def fill_holes(mesh): """ Fill single- triangle holes on triangular meshes by adding new triangles to fill the holes. New triangles will have proper winding and normals, and if face colors exist the color of the last face will be assigned to the new triangles. Parameters --------- mesh : trimesh.Trimesh Mesh will be repaired in- place """ def hole_to_faces(hole): """ Given a loop of vertex indices representing a hole, turn it into triangular faces. If unable to do so, return None Parameters --------- hole: ordered loop of vertex indices Returns --------- (n, 3) new faces (m, 3) new vertices """ hole = np.asanyarray(hole) # the case where the hole is just a single missing triangle if len(hole) == 3: return ([hole], []) # depends on [control=['if'], data=[]] # the hole is a quad, which we fill with two triangles if len(hole) == 4: face_A = hole[[0, 1, 2]] face_B = hole[[2, 3, 0]] return ([face_A, face_B], []) # depends on [control=['if'], data=[]] return ([], []) if len(mesh.faces) < 3: return False # depends on [control=['if'], data=[]] if mesh.is_watertight: return True # depends on [control=['if'], data=[]] # we know that in a watertight mesh every edge will be included twice # thus every edge which appears only once is part of a hole boundary boundary_groups = group_rows(mesh.edges_sorted, require_count=1) if len(boundary_groups) < 3: watertight = len(boundary_groups) == 0 return watertight # depends on [control=['if'], data=[]] boundary_edges = mesh.edges[boundary_groups] index_as_dict = [{'index': i} for i in boundary_groups] # we create a graph of the boundary edges, and find cycles. g = nx.from_edgelist(np.column_stack((boundary_edges, index_as_dict))) cycles = np.array(nx.cycle_basis(g)) new_faces = [] new_vertex = [] for hole in cycles: # convert the hole, which is a polygon of vertex indices # to triangles and new vertices (faces, vertex) = hole_to_faces(hole=hole) if len(faces) == 0: continue # depends on [control=['if'], data=[]] # remeshing returns new vertices as negative indices, so change those # to absolute indices which won't be screwed up by the later appends faces = np.array(faces) faces[faces < 0] += len(new_vertex) + len(mesh.vertices) + len(vertex) new_vertex.extend(vertex) new_faces.extend(faces) # depends on [control=['for'], data=['hole']] new_faces = np.array(new_faces) new_vertex = np.array(new_vertex) if len(new_faces) == 0: # no new faces have been added, so nothing further to do # the mesh is NOT watertight, as boundary groups exist # but we didn't add any new faces to fill them in return False # depends on [control=['if'], data=[]] for (face_index, face) in enumerate(new_faces): # we compare the edge from the new face with # the boundary edge from the source mesh edge_test = face[0:2] edge_boundary = mesh.edges[g.get_edge_data(*edge_test)['index']] # in a well construced mesh, the winding is such that adjacent triangles # have reversed edges to each other. Here we check to make sure the # edges are reversed, and if they aren't we simply reverse the face reversed = edge_test[0] == edge_boundary[1] if not reversed: new_faces[face_index] = face[::-1] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # stack vertices into clean (n, 3) float if len(new_vertex) != 0: new_vertices = np.vstack((mesh.vertices, new_vertex)) # depends on [control=['if'], data=[]] else: new_vertices = mesh.vertices # try to save face normals if we can if 'face_normals' in mesh._cache.cache: cached_normals = mesh._cache.cache['face_normals'] # depends on [control=['if'], data=[]] else: cached_normals = None # also we can remove any zero are triangles by masking here (new_normals, valid) = triangles.normals(new_vertices[new_faces]) # all the added faces were broken if not valid.any(): return False # depends on [control=['if'], data=[]] # apply the new faces and vertices mesh.faces = np.vstack((mesh._data['faces'], new_faces[valid])) mesh.vertices = new_vertices # dump the cache and set id to the new hash mesh._cache.verify() # save us a normals recompute if we can if cached_normals is not None: mesh.face_normals = np.vstack((cached_normals, new_normals)) # depends on [control=['if'], data=['cached_normals']] # this is usually the case where two vertices of a triangle are just # over tol.merge apart, but the normal calculation is screwed up # these could be fixed by merging the vertices in question here: # if not valid.all(): if mesh.visual.defined and mesh.visual.kind == 'face': # if face colors exist, assign the last face color to the new faces # note that this is a little cheesey, but it is very inexpensive and # is the right thing to do if the mesh is a single color. stored = mesh.visual._data['face_colors'] color_shape = np.shape(stored) if len(color_shape) == 2: new_colors = np.tile(stored[-1], (np.sum(valid), 1)) new_colors = np.vstack((stored, new_colors)) mesh.visual.face_colors = new_colors # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] log.debug('Filled in mesh with %i triangles', np.sum(valid)) return mesh.is_watertight
def copy_attributes(source, destination, ignore_patterns=[]): """ Copy the attributes from a source object to a destination object. """ for attr in _wildcard_filter(dir(source), *ignore_patterns): setattr(destination, attr, getattr(source, attr))
def function[copy_attributes, parameter[source, destination, ignore_patterns]]: constant[ Copy the attributes from a source object to a destination object. ] for taget[name[attr]] in starred[call[name[_wildcard_filter], parameter[call[name[dir], parameter[name[source]]], <ast.Starred object at 0x7da1afe0dde0>]]] begin[:] call[name[setattr], parameter[name[destination], name[attr], call[name[getattr], parameter[name[source], name[attr]]]]]
keyword[def] identifier[copy_attributes] ( identifier[source] , identifier[destination] , identifier[ignore_patterns] =[]): literal[string] keyword[for] identifier[attr] keyword[in] identifier[_wildcard_filter] ( identifier[dir] ( identifier[source] ),* identifier[ignore_patterns] ): identifier[setattr] ( identifier[destination] , identifier[attr] , identifier[getattr] ( identifier[source] , identifier[attr] ))
def copy_attributes(source, destination, ignore_patterns=[]): """ Copy the attributes from a source object to a destination object. """ for attr in _wildcard_filter(dir(source), *ignore_patterns): setattr(destination, attr, getattr(source, attr)) # depends on [control=['for'], data=['attr']]
def initialise(): """ Detects, prompts and initialises the project. Stores project and tool configuration in the `changes` module. """ global settings, project_settings # Global changes settings settings = Changes.load() # Project specific settings project_settings = Project.load(GitHubRepository(auth_token=settings.auth_token))
def function[initialise, parameter[]]: constant[ Detects, prompts and initialises the project. Stores project and tool configuration in the `changes` module. ] <ast.Global object at 0x7da1b05f0df0> variable[settings] assign[=] call[name[Changes].load, parameter[]] variable[project_settings] assign[=] call[name[Project].load, parameter[call[name[GitHubRepository], parameter[]]]]
keyword[def] identifier[initialise] (): literal[string] keyword[global] identifier[settings] , identifier[project_settings] identifier[settings] = identifier[Changes] . identifier[load] () identifier[project_settings] = identifier[Project] . identifier[load] ( identifier[GitHubRepository] ( identifier[auth_token] = identifier[settings] . identifier[auth_token] ))
def initialise(): """ Detects, prompts and initialises the project. Stores project and tool configuration in the `changes` module. """ global settings, project_settings # Global changes settings settings = Changes.load() # Project specific settings project_settings = Project.load(GitHubRepository(auth_token=settings.auth_token))
def deserialize(self, content_type, strdata): """Deserialize string of given content type. `self` unused in this implementation. >>> s = teststore() >>> s.deserialize('application/json', '{"id": "1", "name": "Toto"}') {u'id': u'1', u'name': u'Toto'} >>> s.deserialize('text/plain', 'id: 1, name: Toto') Traceback (most recent call last): ... ValueError: Unsupported content type "text/plain" """ if content_type != 'application/json': raise ValueError('Unsupported content type "' + content_type + '"') return json.loads(strdata)
def function[deserialize, parameter[self, content_type, strdata]]: constant[Deserialize string of given content type. `self` unused in this implementation. >>> s = teststore() >>> s.deserialize('application/json', '{"id": "1", "name": "Toto"}') {u'id': u'1', u'name': u'Toto'} >>> s.deserialize('text/plain', 'id: 1, name: Toto') Traceback (most recent call last): ... ValueError: Unsupported content type "text/plain" ] if compare[name[content_type] not_equal[!=] constant[application/json]] begin[:] <ast.Raise object at 0x7da18f810670> return[call[name[json].loads, parameter[name[strdata]]]]
keyword[def] identifier[deserialize] ( identifier[self] , identifier[content_type] , identifier[strdata] ): literal[string] keyword[if] identifier[content_type] != literal[string] : keyword[raise] identifier[ValueError] ( literal[string] + identifier[content_type] + literal[string] ) keyword[return] identifier[json] . identifier[loads] ( identifier[strdata] )
def deserialize(self, content_type, strdata): """Deserialize string of given content type. `self` unused in this implementation. >>> s = teststore() >>> s.deserialize('application/json', '{"id": "1", "name": "Toto"}') {u'id': u'1', u'name': u'Toto'} >>> s.deserialize('text/plain', 'id: 1, name: Toto') Traceback (most recent call last): ... ValueError: Unsupported content type "text/plain" """ if content_type != 'application/json': raise ValueError('Unsupported content type "' + content_type + '"') # depends on [control=['if'], data=['content_type']] return json.loads(strdata)
def _extract(archive, compression, cmd, format, verbosity, outdir): """Extract an LZMA or XZ archive with the lzma Python module.""" targetname = util.get_single_outfile(outdir, archive) try: with lzma.LZMAFile(archive, **_get_lzma_options(format)) as lzmafile: with open(targetname, 'wb') as targetfile: data = lzmafile.read(READ_SIZE_BYTES) while data: targetfile.write(data) data = lzmafile.read(READ_SIZE_BYTES) except Exception as err: msg = "error extracting %s to %s: %s" % (archive, targetname, err) raise util.PatoolError(msg) return None
def function[_extract, parameter[archive, compression, cmd, format, verbosity, outdir]]: constant[Extract an LZMA or XZ archive with the lzma Python module.] variable[targetname] assign[=] call[name[util].get_single_outfile, parameter[name[outdir], name[archive]]] <ast.Try object at 0x7da1b0604370> return[constant[None]]
keyword[def] identifier[_extract] ( identifier[archive] , identifier[compression] , identifier[cmd] , identifier[format] , identifier[verbosity] , identifier[outdir] ): literal[string] identifier[targetname] = identifier[util] . identifier[get_single_outfile] ( identifier[outdir] , identifier[archive] ) keyword[try] : keyword[with] identifier[lzma] . identifier[LZMAFile] ( identifier[archive] ,** identifier[_get_lzma_options] ( identifier[format] )) keyword[as] identifier[lzmafile] : keyword[with] identifier[open] ( identifier[targetname] , literal[string] ) keyword[as] identifier[targetfile] : identifier[data] = identifier[lzmafile] . identifier[read] ( identifier[READ_SIZE_BYTES] ) keyword[while] identifier[data] : identifier[targetfile] . identifier[write] ( identifier[data] ) identifier[data] = identifier[lzmafile] . identifier[read] ( identifier[READ_SIZE_BYTES] ) keyword[except] identifier[Exception] keyword[as] identifier[err] : identifier[msg] = literal[string] %( identifier[archive] , identifier[targetname] , identifier[err] ) keyword[raise] identifier[util] . identifier[PatoolError] ( identifier[msg] ) keyword[return] keyword[None]
def _extract(archive, compression, cmd, format, verbosity, outdir): """Extract an LZMA or XZ archive with the lzma Python module.""" targetname = util.get_single_outfile(outdir, archive) try: with lzma.LZMAFile(archive, **_get_lzma_options(format)) as lzmafile: with open(targetname, 'wb') as targetfile: data = lzmafile.read(READ_SIZE_BYTES) while data: targetfile.write(data) data = lzmafile.read(READ_SIZE_BYTES) # depends on [control=['while'], data=[]] # depends on [control=['with'], data=['targetfile']] # depends on [control=['with'], data=['lzmafile']] # depends on [control=['try'], data=[]] except Exception as err: msg = 'error extracting %s to %s: %s' % (archive, targetname, err) raise util.PatoolError(msg) # depends on [control=['except'], data=['err']] return None
def __parse_tag(self, tag, count): """Raises IOError and APEBadItemError""" fileobj = cBytesIO(tag) for i in xrange(count): tag_data = fileobj.read(8) # someone writes wrong item counts if not tag_data: break if len(tag_data) != 8: raise error size = cdata.uint32_le(tag_data[:4]) flags = cdata.uint32_le(tag_data[4:8]) # Bits 1 and 2 bits are flags, 0-3 # Bit 0 is read/write flag, ignored kind = (flags & 6) >> 1 if kind == 3: raise APEBadItemError("value type must be 0, 1, or 2") key = value = fileobj.read(1) if not key: raise APEBadItemError while key[-1:] != b'\x00' and value: value = fileobj.read(1) if not value: raise APEBadItemError key += value if key[-1:] == b"\x00": key = key[:-1] if PY3: try: key = key.decode("ascii") except UnicodeError as err: reraise(APEBadItemError, err, sys.exc_info()[2]) value = fileobj.read(size) if len(value) != size: raise APEBadItemError value = _get_value_type(kind)._new(value) self[key] = value
def function[__parse_tag, parameter[self, tag, count]]: constant[Raises IOError and APEBadItemError] variable[fileobj] assign[=] call[name[cBytesIO], parameter[name[tag]]] for taget[name[i]] in starred[call[name[xrange], parameter[name[count]]]] begin[:] variable[tag_data] assign[=] call[name[fileobj].read, parameter[constant[8]]] if <ast.UnaryOp object at 0x7da1b2015270> begin[:] break if compare[call[name[len], parameter[name[tag_data]]] not_equal[!=] constant[8]] begin[:] <ast.Raise object at 0x7da1b20148b0> variable[size] assign[=] call[name[cdata].uint32_le, parameter[call[name[tag_data]][<ast.Slice object at 0x7da1b2017670>]]] variable[flags] assign[=] call[name[cdata].uint32_le, parameter[call[name[tag_data]][<ast.Slice object at 0x7da1b2016830>]]] variable[kind] assign[=] binary_operation[binary_operation[name[flags] <ast.BitAnd object at 0x7da2590d6b60> constant[6]] <ast.RShift object at 0x7da2590d6a40> constant[1]] if compare[name[kind] equal[==] constant[3]] begin[:] <ast.Raise object at 0x7da1b2015a80> variable[key] assign[=] call[name[fileobj].read, parameter[constant[1]]] if <ast.UnaryOp object at 0x7da1b2015d80> begin[:] <ast.Raise object at 0x7da1b2017160> while <ast.BoolOp object at 0x7da1b2097c40> begin[:] variable[value] assign[=] call[name[fileobj].read, parameter[constant[1]]] if <ast.UnaryOp object at 0x7da1b2095570> begin[:] <ast.Raise object at 0x7da1b2096020> <ast.AugAssign object at 0x7da1b2095540> if compare[call[name[key]][<ast.Slice object at 0x7da1b2094310>] equal[==] constant[b'\x00']] begin[:] variable[key] assign[=] call[name[key]][<ast.Slice object at 0x7da1b2094e20>] if name[PY3] begin[:] <ast.Try object at 0x7da1b2096d70> variable[value] assign[=] call[name[fileobj].read, parameter[name[size]]] if compare[call[name[len], parameter[name[value]]] not_equal[!=] name[size]] begin[:] <ast.Raise object at 0x7da1b1e46530> variable[value] assign[=] call[call[name[_get_value_type], parameter[name[kind]]]._new, parameter[name[value]]] call[name[self]][name[key]] assign[=] name[value]
keyword[def] identifier[__parse_tag] ( identifier[self] , identifier[tag] , identifier[count] ): literal[string] identifier[fileobj] = identifier[cBytesIO] ( identifier[tag] ) keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[count] ): identifier[tag_data] = identifier[fileobj] . identifier[read] ( literal[int] ) keyword[if] keyword[not] identifier[tag_data] : keyword[break] keyword[if] identifier[len] ( identifier[tag_data] )!= literal[int] : keyword[raise] identifier[error] identifier[size] = identifier[cdata] . identifier[uint32_le] ( identifier[tag_data] [: literal[int] ]) identifier[flags] = identifier[cdata] . identifier[uint32_le] ( identifier[tag_data] [ literal[int] : literal[int] ]) identifier[kind] =( identifier[flags] & literal[int] )>> literal[int] keyword[if] identifier[kind] == literal[int] : keyword[raise] identifier[APEBadItemError] ( literal[string] ) identifier[key] = identifier[value] = identifier[fileobj] . identifier[read] ( literal[int] ) keyword[if] keyword[not] identifier[key] : keyword[raise] identifier[APEBadItemError] keyword[while] identifier[key] [- literal[int] :]!= literal[string] keyword[and] identifier[value] : identifier[value] = identifier[fileobj] . identifier[read] ( literal[int] ) keyword[if] keyword[not] identifier[value] : keyword[raise] identifier[APEBadItemError] identifier[key] += identifier[value] keyword[if] identifier[key] [- literal[int] :]== literal[string] : identifier[key] = identifier[key] [:- literal[int] ] keyword[if] identifier[PY3] : keyword[try] : identifier[key] = identifier[key] . identifier[decode] ( literal[string] ) keyword[except] identifier[UnicodeError] keyword[as] identifier[err] : identifier[reraise] ( identifier[APEBadItemError] , identifier[err] , identifier[sys] . identifier[exc_info] ()[ literal[int] ]) identifier[value] = identifier[fileobj] . identifier[read] ( identifier[size] ) keyword[if] identifier[len] ( identifier[value] )!= identifier[size] : keyword[raise] identifier[APEBadItemError] identifier[value] = identifier[_get_value_type] ( identifier[kind] ). identifier[_new] ( identifier[value] ) identifier[self] [ identifier[key] ]= identifier[value]
def __parse_tag(self, tag, count): """Raises IOError and APEBadItemError""" fileobj = cBytesIO(tag) for i in xrange(count): tag_data = fileobj.read(8) # someone writes wrong item counts if not tag_data: break # depends on [control=['if'], data=[]] if len(tag_data) != 8: raise error # depends on [control=['if'], data=[]] size = cdata.uint32_le(tag_data[:4]) flags = cdata.uint32_le(tag_data[4:8]) # Bits 1 and 2 bits are flags, 0-3 # Bit 0 is read/write flag, ignored kind = (flags & 6) >> 1 if kind == 3: raise APEBadItemError('value type must be 0, 1, or 2') # depends on [control=['if'], data=[]] key = value = fileobj.read(1) if not key: raise APEBadItemError # depends on [control=['if'], data=[]] while key[-1:] != b'\x00' and value: value = fileobj.read(1) if not value: raise APEBadItemError # depends on [control=['if'], data=[]] key += value # depends on [control=['while'], data=[]] if key[-1:] == b'\x00': key = key[:-1] # depends on [control=['if'], data=[]] if PY3: try: key = key.decode('ascii') # depends on [control=['try'], data=[]] except UnicodeError as err: reraise(APEBadItemError, err, sys.exc_info()[2]) # depends on [control=['except'], data=['err']] # depends on [control=['if'], data=[]] value = fileobj.read(size) if len(value) != size: raise APEBadItemError # depends on [control=['if'], data=[]] value = _get_value_type(kind)._new(value) self[key] = value # depends on [control=['for'], data=[]]
def exec_output(cls, command, shell=True, encoding='utf-8'): """ Return execution output :param encoding: charset used to decode the stdout :type encoding: str :return: the return of the command :rtype: unicode string """ proc = Popen(command, shell=shell, stdout=PIPE) stdout, _stderr = proc.communicate() if proc.returncode == 0: return stdout.decode(encoding) return ''
def function[exec_output, parameter[cls, command, shell, encoding]]: constant[ Return execution output :param encoding: charset used to decode the stdout :type encoding: str :return: the return of the command :rtype: unicode string ] variable[proc] assign[=] call[name[Popen], parameter[name[command]]] <ast.Tuple object at 0x7da18c4cd060> assign[=] call[name[proc].communicate, parameter[]] if compare[name[proc].returncode equal[==] constant[0]] begin[:] return[call[name[stdout].decode, parameter[name[encoding]]]] return[constant[]]
keyword[def] identifier[exec_output] ( identifier[cls] , identifier[command] , identifier[shell] = keyword[True] , identifier[encoding] = literal[string] ): literal[string] identifier[proc] = identifier[Popen] ( identifier[command] , identifier[shell] = identifier[shell] , identifier[stdout] = identifier[PIPE] ) identifier[stdout] , identifier[_stderr] = identifier[proc] . identifier[communicate] () keyword[if] identifier[proc] . identifier[returncode] == literal[int] : keyword[return] identifier[stdout] . identifier[decode] ( identifier[encoding] ) keyword[return] literal[string]
def exec_output(cls, command, shell=True, encoding='utf-8'): """ Return execution output :param encoding: charset used to decode the stdout :type encoding: str :return: the return of the command :rtype: unicode string """ proc = Popen(command, shell=shell, stdout=PIPE) (stdout, _stderr) = proc.communicate() if proc.returncode == 0: return stdout.decode(encoding) # depends on [control=['if'], data=[]] return ''
def p_property_list(self, p): """property_list : property_assignment | property_list COMMA property_assignment """ if len(p) == 2: p[0] = [p[1]] else: p[1].append(p[3]) p[0] = p[1]
def function[p_property_list, parameter[self, p]]: constant[property_list : property_assignment | property_list COMMA property_assignment ] if compare[call[name[len], parameter[name[p]]] equal[==] constant[2]] begin[:] call[name[p]][constant[0]] assign[=] list[[<ast.Subscript object at 0x7da18f58c1f0>]]
keyword[def] identifier[p_property_list] ( identifier[self] , identifier[p] ): literal[string] keyword[if] identifier[len] ( identifier[p] )== literal[int] : identifier[p] [ literal[int] ]=[ identifier[p] [ literal[int] ]] keyword[else] : identifier[p] [ literal[int] ]. identifier[append] ( identifier[p] [ literal[int] ]) identifier[p] [ literal[int] ]= identifier[p] [ literal[int] ]
def p_property_list(self, p): """property_list : property_assignment | property_list COMMA property_assignment """ if len(p) == 2: p[0] = [p[1]] # depends on [control=['if'], data=[]] else: p[1].append(p[3]) p[0] = p[1]
def main(**kwargs): """Entry point to run databench.""" parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--version', action='version', version='%(prog)s {}'.format(DATABENCH_VERSION)) parser.add_argument('--log', dest='loglevel', default="INFO", type=str.upper, help=('log level (info, warning, error, critical or ' 'debug, default info)')) parser.add_argument('--no-watch', dest='watch', default=True, action='store_false', help='do not watch and restart when files change') parser.add_argument('--host', dest='host', default=os.environ.get('HOST', '127.0.0.1'), help='host address for webserver (default 127.0.0.1)') parser.add_argument('--port', dest='port', type=int, default=int(os.environ.get('PORT', 5000)), help='port for webserver') if not kwargs: parser.add_argument('--analyses', default=None, help='import path for analyses') parser.add_argument('--build', default=False, action='store_true', help='run the build command and exit') parser.add_argument('--coverage', default=False, help=argparse.SUPPRESS) ssl_args = parser.add_argument_group('SSL') ssl_args.add_argument('--ssl-certfile', dest='ssl_certfile', default=os.environ.get('SSLCERTFILE'), help='SSL certificate file') ssl_args.add_argument('--ssl-keyfile', dest='ssl_keyfile', default=os.environ.get('SSLKEYFILE'), help='SSL key file') ssl_args.add_argument('--ssl-port', dest='ssl_port', type=int, default=int(os.environ.get('SSLPORT', 0)), help='SSL port for webserver') args, analyses_args = parser.parse_known_args() # coverage cov = None if args.coverage: import coverage cov = coverage.Coverage(data_file=args.coverage, data_suffix=True) cov.start() # this is included here so that is included in coverage from .app import App, SingleApp # log logging.basicConfig(level=getattr(logging, args.loglevel)) if args.loglevel != 'INFO': logging.info('Set loglevel to {}.'.format(args.loglevel)) # show versions and setup logging.info('Databench {}'.format(DATABENCH_VERSION)) if args.host in ('localhost', '127.0.0.1'): logging.info('Open http://{}:{} in a web browser.' ''.format(args.host, args.port)) logging.debug('host={}, port={}'.format(args.host, args.port)) logging.debug('Python {}'.format(sys.version)) if analyses_args: logging.debug('Arguments passed to analyses: {}'.format(analyses_args)) if not kwargs: app = App(args.analyses, cli_args=analyses_args, debug=args.watch) else: app = SingleApp(cli_args=analyses_args, debug=args.watch, **kwargs) # check whether this is just a quick build if args.build: logging.info('Build mode: only run build command and exit.') app.build() if cov: cov.stop() cov.save() return # HTTP server tornado_app = app.tornado_app() tornado_app.listen(args.port, args.host) # HTTPS server if args.ssl_port: if args.ssl_certfile and args.ssl_keyfile: ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ssl_ctx.load_cert_chain(args.ssl_certfile, args.ssl_keyfile) else: # use Tornado's self signed certificates module_dir = os.path.dirname(tornado.__file__) ssl_ctx = { 'certfile': os.path.join(module_dir, 'test', 'test.crt'), 'keyfile': os.path.join(module_dir, 'test', 'test.key'), } logging.info('Open https://{}:{} in a web browser.' ''.format(args.host, args.ssl_port)) tornado_app.listen(args.ssl_port, ssl_options=ssl_ctx) try: tornado.ioloop.IOLoop.current().start() except KeyboardInterrupt: tornado.ioloop.IOLoop.current().stop() if cov: cov.stop() cov.save()
def function[main, parameter[]]: constant[Entry point to run databench.] variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[parser].add_argument, parameter[constant[--version]]] call[name[parser].add_argument, parameter[constant[--log]]] call[name[parser].add_argument, parameter[constant[--no-watch]]] call[name[parser].add_argument, parameter[constant[--host]]] call[name[parser].add_argument, parameter[constant[--port]]] if <ast.UnaryOp object at 0x7da20c76c910> begin[:] call[name[parser].add_argument, parameter[constant[--analyses]]] call[name[parser].add_argument, parameter[constant[--build]]] call[name[parser].add_argument, parameter[constant[--coverage]]] variable[ssl_args] assign[=] call[name[parser].add_argument_group, parameter[constant[SSL]]] call[name[ssl_args].add_argument, parameter[constant[--ssl-certfile]]] call[name[ssl_args].add_argument, parameter[constant[--ssl-keyfile]]] call[name[ssl_args].add_argument, parameter[constant[--ssl-port]]] <ast.Tuple object at 0x7da20cabf430> assign[=] call[name[parser].parse_known_args, parameter[]] variable[cov] assign[=] constant[None] if name[args].coverage begin[:] import module[coverage] variable[cov] assign[=] call[name[coverage].Coverage, parameter[]] call[name[cov].start, parameter[]] from relative_module[app] import module[App], module[SingleApp] call[name[logging].basicConfig, parameter[]] if compare[name[args].loglevel not_equal[!=] constant[INFO]] begin[:] call[name[logging].info, parameter[call[constant[Set loglevel to {}.].format, parameter[name[args].loglevel]]]] call[name[logging].info, parameter[call[constant[Databench {}].format, parameter[name[DATABENCH_VERSION]]]]] if compare[name[args].host in tuple[[<ast.Constant object at 0x7da20c76e980>, <ast.Constant object at 0x7da20c76c520>]]] begin[:] call[name[logging].info, parameter[call[constant[Open http://{}:{} in a web browser.].format, parameter[name[args].host, name[args].port]]]] call[name[logging].debug, parameter[call[constant[host={}, port={}].format, parameter[name[args].host, name[args].port]]]] call[name[logging].debug, parameter[call[constant[Python {}].format, parameter[name[sys].version]]]] if name[analyses_args] begin[:] call[name[logging].debug, parameter[call[constant[Arguments passed to analyses: {}].format, parameter[name[analyses_args]]]]] if <ast.UnaryOp object at 0x7da20c76eb90> begin[:] variable[app] assign[=] call[name[App], parameter[name[args].analyses]] if name[args].build begin[:] call[name[logging].info, parameter[constant[Build mode: only run build command and exit.]]] call[name[app].build, parameter[]] if name[cov] begin[:] call[name[cov].stop, parameter[]] call[name[cov].save, parameter[]] return[None] variable[tornado_app] assign[=] call[name[app].tornado_app, parameter[]] call[name[tornado_app].listen, parameter[name[args].port, name[args].host]] if name[args].ssl_port begin[:] if <ast.BoolOp object at 0x7da20c76cc40> begin[:] variable[ssl_ctx] assign[=] call[name[ssl].create_default_context, parameter[name[ssl].Purpose.CLIENT_AUTH]] call[name[ssl_ctx].load_cert_chain, parameter[name[args].ssl_certfile, name[args].ssl_keyfile]] call[name[logging].info, parameter[call[constant[Open https://{}:{} in a web browser.].format, parameter[name[args].host, name[args].ssl_port]]]] call[name[tornado_app].listen, parameter[name[args].ssl_port]] <ast.Try object at 0x7da2045643a0>
keyword[def] identifier[main] (** identifier[kwargs] ): literal[string] identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = identifier[__doc__] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[version] = literal[string] . identifier[format] ( identifier[DATABENCH_VERSION] )) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[dest] = literal[string] , identifier[default] = literal[string] , identifier[type] = identifier[str] . identifier[upper] , identifier[help] =( literal[string] literal[string] )) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[dest] = literal[string] , identifier[default] = keyword[True] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[dest] = literal[string] , identifier[default] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[string] ), identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[dest] = literal[string] , identifier[type] = identifier[int] , identifier[default] = identifier[int] ( identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[int] )), identifier[help] = literal[string] ) keyword[if] keyword[not] identifier[kwargs] : identifier[parser] . identifier[add_argument] ( literal[string] , identifier[default] = keyword[None] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[default] = keyword[False] , identifier[help] = identifier[argparse] . identifier[SUPPRESS] ) identifier[ssl_args] = identifier[parser] . identifier[add_argument_group] ( literal[string] ) identifier[ssl_args] . identifier[add_argument] ( literal[string] , identifier[dest] = literal[string] , identifier[default] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] ), identifier[help] = literal[string] ) identifier[ssl_args] . identifier[add_argument] ( literal[string] , identifier[dest] = literal[string] , identifier[default] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] ), identifier[help] = literal[string] ) identifier[ssl_args] . identifier[add_argument] ( literal[string] , identifier[dest] = literal[string] , identifier[type] = identifier[int] , identifier[default] = identifier[int] ( identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[int] )), identifier[help] = literal[string] ) identifier[args] , identifier[analyses_args] = identifier[parser] . identifier[parse_known_args] () identifier[cov] = keyword[None] keyword[if] identifier[args] . identifier[coverage] : keyword[import] identifier[coverage] identifier[cov] = identifier[coverage] . identifier[Coverage] ( identifier[data_file] = identifier[args] . identifier[coverage] , identifier[data_suffix] = keyword[True] ) identifier[cov] . identifier[start] () keyword[from] . identifier[app] keyword[import] identifier[App] , identifier[SingleApp] identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[getattr] ( identifier[logging] , identifier[args] . identifier[loglevel] )) keyword[if] identifier[args] . identifier[loglevel] != literal[string] : identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[args] . identifier[loglevel] )) identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[DATABENCH_VERSION] )) keyword[if] identifier[args] . identifier[host] keyword[in] ( literal[string] , literal[string] ): identifier[logging] . identifier[info] ( literal[string] literal[string] . identifier[format] ( identifier[args] . identifier[host] , identifier[args] . identifier[port] )) identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[args] . identifier[host] , identifier[args] . identifier[port] )) identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[sys] . identifier[version] )) keyword[if] identifier[analyses_args] : identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[analyses_args] )) keyword[if] keyword[not] identifier[kwargs] : identifier[app] = identifier[App] ( identifier[args] . identifier[analyses] , identifier[cli_args] = identifier[analyses_args] , identifier[debug] = identifier[args] . identifier[watch] ) keyword[else] : identifier[app] = identifier[SingleApp] ( identifier[cli_args] = identifier[analyses_args] , identifier[debug] = identifier[args] . identifier[watch] ,** identifier[kwargs] ) keyword[if] identifier[args] . identifier[build] : identifier[logging] . identifier[info] ( literal[string] ) identifier[app] . identifier[build] () keyword[if] identifier[cov] : identifier[cov] . identifier[stop] () identifier[cov] . identifier[save] () keyword[return] identifier[tornado_app] = identifier[app] . identifier[tornado_app] () identifier[tornado_app] . identifier[listen] ( identifier[args] . identifier[port] , identifier[args] . identifier[host] ) keyword[if] identifier[args] . identifier[ssl_port] : keyword[if] identifier[args] . identifier[ssl_certfile] keyword[and] identifier[args] . identifier[ssl_keyfile] : identifier[ssl_ctx] = identifier[ssl] . identifier[create_default_context] ( identifier[ssl] . identifier[Purpose] . identifier[CLIENT_AUTH] ) identifier[ssl_ctx] . identifier[load_cert_chain] ( identifier[args] . identifier[ssl_certfile] , identifier[args] . identifier[ssl_keyfile] ) keyword[else] : identifier[module_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[tornado] . identifier[__file__] ) identifier[ssl_ctx] ={ literal[string] : identifier[os] . identifier[path] . identifier[join] ( identifier[module_dir] , literal[string] , literal[string] ), literal[string] : identifier[os] . identifier[path] . identifier[join] ( identifier[module_dir] , literal[string] , literal[string] ), } identifier[logging] . identifier[info] ( literal[string] literal[string] . identifier[format] ( identifier[args] . identifier[host] , identifier[args] . identifier[ssl_port] )) identifier[tornado_app] . identifier[listen] ( identifier[args] . identifier[ssl_port] , identifier[ssl_options] = identifier[ssl_ctx] ) keyword[try] : identifier[tornado] . identifier[ioloop] . identifier[IOLoop] . identifier[current] (). identifier[start] () keyword[except] identifier[KeyboardInterrupt] : identifier[tornado] . identifier[ioloop] . identifier[IOLoop] . identifier[current] (). identifier[stop] () keyword[if] identifier[cov] : identifier[cov] . identifier[stop] () identifier[cov] . identifier[save] ()
def main(**kwargs): """Entry point to run databench.""" parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--version', action='version', version='%(prog)s {}'.format(DATABENCH_VERSION)) parser.add_argument('--log', dest='loglevel', default='INFO', type=str.upper, help='log level (info, warning, error, critical or debug, default info)') parser.add_argument('--no-watch', dest='watch', default=True, action='store_false', help='do not watch and restart when files change') parser.add_argument('--host', dest='host', default=os.environ.get('HOST', '127.0.0.1'), help='host address for webserver (default 127.0.0.1)') parser.add_argument('--port', dest='port', type=int, default=int(os.environ.get('PORT', 5000)), help='port for webserver') if not kwargs: parser.add_argument('--analyses', default=None, help='import path for analyses') # depends on [control=['if'], data=[]] parser.add_argument('--build', default=False, action='store_true', help='run the build command and exit') parser.add_argument('--coverage', default=False, help=argparse.SUPPRESS) ssl_args = parser.add_argument_group('SSL') ssl_args.add_argument('--ssl-certfile', dest='ssl_certfile', default=os.environ.get('SSLCERTFILE'), help='SSL certificate file') ssl_args.add_argument('--ssl-keyfile', dest='ssl_keyfile', default=os.environ.get('SSLKEYFILE'), help='SSL key file') ssl_args.add_argument('--ssl-port', dest='ssl_port', type=int, default=int(os.environ.get('SSLPORT', 0)), help='SSL port for webserver') (args, analyses_args) = parser.parse_known_args() # coverage cov = None if args.coverage: import coverage cov = coverage.Coverage(data_file=args.coverage, data_suffix=True) cov.start() # depends on [control=['if'], data=[]] # this is included here so that is included in coverage from .app import App, SingleApp # log logging.basicConfig(level=getattr(logging, args.loglevel)) if args.loglevel != 'INFO': logging.info('Set loglevel to {}.'.format(args.loglevel)) # depends on [control=['if'], data=[]] # show versions and setup logging.info('Databench {}'.format(DATABENCH_VERSION)) if args.host in ('localhost', '127.0.0.1'): logging.info('Open http://{}:{} in a web browser.'.format(args.host, args.port)) # depends on [control=['if'], data=[]] logging.debug('host={}, port={}'.format(args.host, args.port)) logging.debug('Python {}'.format(sys.version)) if analyses_args: logging.debug('Arguments passed to analyses: {}'.format(analyses_args)) # depends on [control=['if'], data=[]] if not kwargs: app = App(args.analyses, cli_args=analyses_args, debug=args.watch) # depends on [control=['if'], data=[]] else: app = SingleApp(cli_args=analyses_args, debug=args.watch, **kwargs) # check whether this is just a quick build if args.build: logging.info('Build mode: only run build command and exit.') app.build() if cov: cov.stop() cov.save() # depends on [control=['if'], data=[]] return # depends on [control=['if'], data=[]] # HTTP server tornado_app = app.tornado_app() tornado_app.listen(args.port, args.host) # HTTPS server if args.ssl_port: if args.ssl_certfile and args.ssl_keyfile: ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ssl_ctx.load_cert_chain(args.ssl_certfile, args.ssl_keyfile) # depends on [control=['if'], data=[]] else: # use Tornado's self signed certificates module_dir = os.path.dirname(tornado.__file__) ssl_ctx = {'certfile': os.path.join(module_dir, 'test', 'test.crt'), 'keyfile': os.path.join(module_dir, 'test', 'test.key')} logging.info('Open https://{}:{} in a web browser.'.format(args.host, args.ssl_port)) tornado_app.listen(args.ssl_port, ssl_options=ssl_ctx) # depends on [control=['if'], data=[]] try: tornado.ioloop.IOLoop.current().start() # depends on [control=['try'], data=[]] except KeyboardInterrupt: tornado.ioloop.IOLoop.current().stop() if cov: cov.stop() cov.save() # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]]
def to_match(self): """Return a unicode object with the MATCH representation of this expression.""" self.validate() mark_name, field_name = self.location.get_location_name() validate_safe_string(mark_name) if field_name is not None: raise AssertionError(u'Vertex location has non-None field_name: ' u'{} {}'.format(field_name, self.location)) return mark_name
def function[to_match, parameter[self]]: constant[Return a unicode object with the MATCH representation of this expression.] call[name[self].validate, parameter[]] <ast.Tuple object at 0x7da1b1726800> assign[=] call[name[self].location.get_location_name, parameter[]] call[name[validate_safe_string], parameter[name[mark_name]]] if compare[name[field_name] is_not constant[None]] begin[:] <ast.Raise object at 0x7da1b1727910> return[name[mark_name]]
keyword[def] identifier[to_match] ( identifier[self] ): literal[string] identifier[self] . identifier[validate] () identifier[mark_name] , identifier[field_name] = identifier[self] . identifier[location] . identifier[get_location_name] () identifier[validate_safe_string] ( identifier[mark_name] ) keyword[if] identifier[field_name] keyword[is] keyword[not] keyword[None] : keyword[raise] identifier[AssertionError] ( literal[string] literal[string] . identifier[format] ( identifier[field_name] , identifier[self] . identifier[location] )) keyword[return] identifier[mark_name]
def to_match(self): """Return a unicode object with the MATCH representation of this expression.""" self.validate() (mark_name, field_name) = self.location.get_location_name() validate_safe_string(mark_name) if field_name is not None: raise AssertionError(u'Vertex location has non-None field_name: {} {}'.format(field_name, self.location)) # depends on [control=['if'], data=['field_name']] return mark_name
def get_dict(self): """ Returns a dict containing the host's attributes. The following keys are contained: - hostname - address - protocol - port :rtype: dict :return: The resulting dictionary. """ return {'hostname': self.get_name(), 'address': self.get_address(), 'protocol': self.get_protocol(), 'port': self.get_tcp_port()}
def function[get_dict, parameter[self]]: constant[ Returns a dict containing the host's attributes. The following keys are contained: - hostname - address - protocol - port :rtype: dict :return: The resulting dictionary. ] return[dictionary[[<ast.Constant object at 0x7da1b0652140>, <ast.Constant object at 0x7da1b06520e0>, <ast.Constant object at 0x7da1b0651f30>, <ast.Constant object at 0x7da1b0651d20>], [<ast.Call object at 0x7da1b0651c90>, <ast.Call object at 0x7da1b0650880>, <ast.Call object at 0x7da1b0652b30>, <ast.Call object at 0x7da1b0651030>]]]
keyword[def] identifier[get_dict] ( identifier[self] ): literal[string] keyword[return] { literal[string] : identifier[self] . identifier[get_name] (), literal[string] : identifier[self] . identifier[get_address] (), literal[string] : identifier[self] . identifier[get_protocol] (), literal[string] : identifier[self] . identifier[get_tcp_port] ()}
def get_dict(self): """ Returns a dict containing the host's attributes. The following keys are contained: - hostname - address - protocol - port :rtype: dict :return: The resulting dictionary. """ return {'hostname': self.get_name(), 'address': self.get_address(), 'protocol': self.get_protocol(), 'port': self.get_tcp_port()}