code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def index_collection(self, filenames): "Index a whole collection of files." for filename in filenames: self.index_document(open(filename).read(), filename)
def function[index_collection, parameter[self, filenames]]: constant[Index a whole collection of files.] for taget[name[filename]] in starred[name[filenames]] begin[:] call[name[self].index_document, parameter[call[call[name[open], parameter[name[filename]]].read, parameter[]], name[filename]]]
keyword[def] identifier[index_collection] ( identifier[self] , identifier[filenames] ): literal[string] keyword[for] identifier[filename] keyword[in] identifier[filenames] : identifier[self] . identifier[index_document] ( identifier[open] ( identifier[filename] ). identifier[read] (), identifier[filename] )
def index_collection(self, filenames): """Index a whole collection of files.""" for filename in filenames: self.index_document(open(filename).read(), filename) # depends on [control=['for'], data=['filename']]
def active(self): """ Returns the count of non-skipped actors. :return: the count :rtype: int """ result = 0 for actor in self.actors: if not actor.skip: result += 1 return result
def function[active, parameter[self]]: constant[ Returns the count of non-skipped actors. :return: the count :rtype: int ] variable[result] assign[=] constant[0] for taget[name[actor]] in starred[name[self].actors] begin[:] if <ast.UnaryOp object at 0x7da1b06be050> begin[:] <ast.AugAssign object at 0x7da1b06bf130> return[name[result]]
keyword[def] identifier[active] ( identifier[self] ): literal[string] identifier[result] = literal[int] keyword[for] identifier[actor] keyword[in] identifier[self] . identifier[actors] : keyword[if] keyword[not] identifier[actor] . identifier[skip] : identifier[result] += literal[int] keyword[return] identifier[result]
def active(self): """ Returns the count of non-skipped actors. :return: the count :rtype: int """ result = 0 for actor in self.actors: if not actor.skip: result += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['actor']] return result
def random_expr(depth, vlist, ops): """Generate a random expression tree. Args: depth: At least one leaf will be this many levels down from the top. vlist: A list of chars. These chars are randomly selected as leaf values. ops: A list of ExprOp instances. Returns: An ExprNode instance which is the root of the generated expression tree. """ if not depth: return str(vlist[random.randrange(len(vlist))]) max_depth_side = random.randrange(2) other_side_depth = random.randrange(depth) left = random_expr(depth - 1 if max_depth_side else other_side_depth, vlist, ops) right = random_expr(depth - 1 if not max_depth_side else other_side_depth, vlist, ops) op = ops[random.randrange(len(ops))] return ExprNode(left, right, op)
def function[random_expr, parameter[depth, vlist, ops]]: constant[Generate a random expression tree. Args: depth: At least one leaf will be this many levels down from the top. vlist: A list of chars. These chars are randomly selected as leaf values. ops: A list of ExprOp instances. Returns: An ExprNode instance which is the root of the generated expression tree. ] if <ast.UnaryOp object at 0x7da1b209b1c0> begin[:] return[call[name[str], parameter[call[name[vlist]][call[name[random].randrange, parameter[call[name[len], parameter[name[vlist]]]]]]]]] variable[max_depth_side] assign[=] call[name[random].randrange, parameter[constant[2]]] variable[other_side_depth] assign[=] call[name[random].randrange, parameter[name[depth]]] variable[left] assign[=] call[name[random_expr], parameter[<ast.IfExp object at 0x7da1b2098eb0>, name[vlist], name[ops]]] variable[right] assign[=] call[name[random_expr], parameter[<ast.IfExp object at 0x7da1b203f850>, name[vlist], name[ops]]] variable[op] assign[=] call[name[ops]][call[name[random].randrange, parameter[call[name[len], parameter[name[ops]]]]]] return[call[name[ExprNode], parameter[name[left], name[right], name[op]]]]
keyword[def] identifier[random_expr] ( identifier[depth] , identifier[vlist] , identifier[ops] ): literal[string] keyword[if] keyword[not] identifier[depth] : keyword[return] identifier[str] ( identifier[vlist] [ identifier[random] . identifier[randrange] ( identifier[len] ( identifier[vlist] ))]) identifier[max_depth_side] = identifier[random] . identifier[randrange] ( literal[int] ) identifier[other_side_depth] = identifier[random] . identifier[randrange] ( identifier[depth] ) identifier[left] = identifier[random_expr] ( identifier[depth] - literal[int] keyword[if] identifier[max_depth_side] keyword[else] identifier[other_side_depth] , identifier[vlist] , identifier[ops] ) identifier[right] = identifier[random_expr] ( identifier[depth] - literal[int] keyword[if] keyword[not] identifier[max_depth_side] keyword[else] identifier[other_side_depth] , identifier[vlist] , identifier[ops] ) identifier[op] = identifier[ops] [ identifier[random] . identifier[randrange] ( identifier[len] ( identifier[ops] ))] keyword[return] identifier[ExprNode] ( identifier[left] , identifier[right] , identifier[op] )
def random_expr(depth, vlist, ops): """Generate a random expression tree. Args: depth: At least one leaf will be this many levels down from the top. vlist: A list of chars. These chars are randomly selected as leaf values. ops: A list of ExprOp instances. Returns: An ExprNode instance which is the root of the generated expression tree. """ if not depth: return str(vlist[random.randrange(len(vlist))]) # depends on [control=['if'], data=[]] max_depth_side = random.randrange(2) other_side_depth = random.randrange(depth) left = random_expr(depth - 1 if max_depth_side else other_side_depth, vlist, ops) right = random_expr(depth - 1 if not max_depth_side else other_side_depth, vlist, ops) op = ops[random.randrange(len(ops))] return ExprNode(left, right, op)
def disableTemperature(self): """ Specifies the device should NOT write temperature values to the FIFO, is not applied until enableFIFO is called. :return: """ logger.debug("Disabling temperature sensor") self.fifoSensorMask &= ~self.enableTemperatureMask self._setSampleSizeBytes()
def function[disableTemperature, parameter[self]]: constant[ Specifies the device should NOT write temperature values to the FIFO, is not applied until enableFIFO is called. :return: ] call[name[logger].debug, parameter[constant[Disabling temperature sensor]]] <ast.AugAssign object at 0x7da1b0f507c0> call[name[self]._setSampleSizeBytes, parameter[]]
keyword[def] identifier[disableTemperature] ( identifier[self] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] ) identifier[self] . identifier[fifoSensorMask] &=~ identifier[self] . identifier[enableTemperatureMask] identifier[self] . identifier[_setSampleSizeBytes] ()
def disableTemperature(self): """ Specifies the device should NOT write temperature values to the FIFO, is not applied until enableFIFO is called. :return: """ logger.debug('Disabling temperature sensor') self.fifoSensorMask &= ~self.enableTemperatureMask self._setSampleSizeBytes()
def save_data(X, y, path): """Save data as a CSV, LibSVM or HDF5 file based on the file extension. Args: X (numpy or scipy sparse matrix): Data matrix y (numpy array): Target vector. If None, all zero vector will be saved. path (str): Path to the CSV, LibSVM or HDF5 file to save data. """ catalog = {'.csv': save_csv, '.sps': save_libsvm, '.h5': save_hdf5} ext = os.path.splitext(path)[1] func = catalog[ext] if y is None: y = np.zeros((X.shape[0], )) func(X, y, path)
def function[save_data, parameter[X, y, path]]: constant[Save data as a CSV, LibSVM or HDF5 file based on the file extension. Args: X (numpy or scipy sparse matrix): Data matrix y (numpy array): Target vector. If None, all zero vector will be saved. path (str): Path to the CSV, LibSVM or HDF5 file to save data. ] variable[catalog] assign[=] dictionary[[<ast.Constant object at 0x7da2054a6590>, <ast.Constant object at 0x7da2046211b0>, <ast.Constant object at 0x7da204623ee0>], [<ast.Name object at 0x7da204620670>, <ast.Name object at 0x7da204620160>, <ast.Name object at 0x7da204621540>]] variable[ext] assign[=] call[call[name[os].path.splitext, parameter[name[path]]]][constant[1]] variable[func] assign[=] call[name[catalog]][name[ext]] if compare[name[y] is constant[None]] begin[:] variable[y] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Subscript object at 0x7da204621cc0>]]]] call[name[func], parameter[name[X], name[y], name[path]]]
keyword[def] identifier[save_data] ( identifier[X] , identifier[y] , identifier[path] ): literal[string] identifier[catalog] ={ literal[string] : identifier[save_csv] , literal[string] : identifier[save_libsvm] , literal[string] : identifier[save_hdf5] } identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[path] )[ literal[int] ] identifier[func] = identifier[catalog] [ identifier[ext] ] keyword[if] identifier[y] keyword[is] keyword[None] : identifier[y] = identifier[np] . identifier[zeros] (( identifier[X] . identifier[shape] [ literal[int] ],)) identifier[func] ( identifier[X] , identifier[y] , identifier[path] )
def save_data(X, y, path): """Save data as a CSV, LibSVM or HDF5 file based on the file extension. Args: X (numpy or scipy sparse matrix): Data matrix y (numpy array): Target vector. If None, all zero vector will be saved. path (str): Path to the CSV, LibSVM or HDF5 file to save data. """ catalog = {'.csv': save_csv, '.sps': save_libsvm, '.h5': save_hdf5} ext = os.path.splitext(path)[1] func = catalog[ext] if y is None: y = np.zeros((X.shape[0],)) # depends on [control=['if'], data=['y']] func(X, y, path)
def hashes_match(self, dep_tree): """ Compares the app deptree file hashes with the hashes stored in the cache. """ hashes = self.get_hashes() for module, info in dep_tree.items(): md5 = self.get_hash(info['path']) if md5 != hashes[info['path']]: return False return True
def function[hashes_match, parameter[self, dep_tree]]: constant[ Compares the app deptree file hashes with the hashes stored in the cache. ] variable[hashes] assign[=] call[name[self].get_hashes, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b025f070>, <ast.Name object at 0x7da1b025d6c0>]]] in starred[call[name[dep_tree].items, parameter[]]] begin[:] variable[md5] assign[=] call[name[self].get_hash, parameter[call[name[info]][constant[path]]]] if compare[name[md5] not_equal[!=] call[name[hashes]][call[name[info]][constant[path]]]] begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[hashes_match] ( identifier[self] , identifier[dep_tree] ): literal[string] identifier[hashes] = identifier[self] . identifier[get_hashes] () keyword[for] identifier[module] , identifier[info] keyword[in] identifier[dep_tree] . identifier[items] (): identifier[md5] = identifier[self] . identifier[get_hash] ( identifier[info] [ literal[string] ]) keyword[if] identifier[md5] != identifier[hashes] [ identifier[info] [ literal[string] ]]: keyword[return] keyword[False] keyword[return] keyword[True]
def hashes_match(self, dep_tree): """ Compares the app deptree file hashes with the hashes stored in the cache. """ hashes = self.get_hashes() for (module, info) in dep_tree.items(): md5 = self.get_hash(info['path']) if md5 != hashes[info['path']]: return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return True
def regex(pattern, prompt=None, empty=False, flags=0): """Prompt a string that matches a regular expression. Parameters ---------- pattern : str A regular expression that must be matched. prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. flags : int, optional Flags that will be passed to ``re.match``. Returns ------- Match or None A match object if the user entered a matching string. None if the user pressed only Enter and ``empty`` was True. See Also -------- re.match """ s = _prompt_input(prompt) if empty and not s: return None else: m = re.match(pattern, s, flags=flags) if m: return m else: return regex(pattern, prompt=prompt, empty=empty, flags=flags)
def function[regex, parameter[pattern, prompt, empty, flags]]: constant[Prompt a string that matches a regular expression. Parameters ---------- pattern : str A regular expression that must be matched. prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. flags : int, optional Flags that will be passed to ``re.match``. Returns ------- Match or None A match object if the user entered a matching string. None if the user pressed only Enter and ``empty`` was True. See Also -------- re.match ] variable[s] assign[=] call[name[_prompt_input], parameter[name[prompt]]] if <ast.BoolOp object at 0x7da20c76f850> begin[:] return[constant[None]]
keyword[def] identifier[regex] ( identifier[pattern] , identifier[prompt] = keyword[None] , identifier[empty] = keyword[False] , identifier[flags] = literal[int] ): literal[string] identifier[s] = identifier[_prompt_input] ( identifier[prompt] ) keyword[if] identifier[empty] keyword[and] keyword[not] identifier[s] : keyword[return] keyword[None] keyword[else] : identifier[m] = identifier[re] . identifier[match] ( identifier[pattern] , identifier[s] , identifier[flags] = identifier[flags] ) keyword[if] identifier[m] : keyword[return] identifier[m] keyword[else] : keyword[return] identifier[regex] ( identifier[pattern] , identifier[prompt] = identifier[prompt] , identifier[empty] = identifier[empty] , identifier[flags] = identifier[flags] )
def regex(pattern, prompt=None, empty=False, flags=0): """Prompt a string that matches a regular expression. Parameters ---------- pattern : str A regular expression that must be matched. prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. flags : int, optional Flags that will be passed to ``re.match``. Returns ------- Match or None A match object if the user entered a matching string. None if the user pressed only Enter and ``empty`` was True. See Also -------- re.match """ s = _prompt_input(prompt) if empty and (not s): return None # depends on [control=['if'], data=[]] else: m = re.match(pattern, s, flags=flags) if m: return m # depends on [control=['if'], data=[]] else: return regex(pattern, prompt=prompt, empty=empty, flags=flags)
def call(self, request=None, *args, **kwargs): """ Calls multiple time - with retry. :param request: :return: response """ if request is not None: self.request = request retry = self.request.configuration.retry if not isinstance(retry, SimpleRetry): raise Error('Currently only the fast retry strategy is supported') last_exception = None for i in range(0, retry.max_retry): try: if i > 0: retry.sleep_jitter() self.call_once() return self.response except Exception as ex: last_exception = RequestFailed(message='Request failed', cause=ex) logger.debug("Request %d failed, exception: %s" % (i, ex)) # Last exception - throw it here to have a stack if i+1 == retry.max_retry: raise last_exception raise last_exception
def function[call, parameter[self, request]]: constant[ Calls multiple time - with retry. :param request: :return: response ] if compare[name[request] is_not constant[None]] begin[:] name[self].request assign[=] name[request] variable[retry] assign[=] name[self].request.configuration.retry if <ast.UnaryOp object at 0x7da1b14d0190> begin[:] <ast.Raise object at 0x7da1b14d13c0> variable[last_exception] assign[=] constant[None] for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[retry].max_retry]]] begin[:] <ast.Try object at 0x7da1b14d2ec0> <ast.Raise object at 0x7da1b14d2470>
keyword[def] identifier[call] ( identifier[self] , identifier[request] = keyword[None] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[request] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[request] = identifier[request] identifier[retry] = identifier[self] . identifier[request] . identifier[configuration] . identifier[retry] keyword[if] keyword[not] identifier[isinstance] ( identifier[retry] , identifier[SimpleRetry] ): keyword[raise] identifier[Error] ( literal[string] ) identifier[last_exception] = keyword[None] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[retry] . identifier[max_retry] ): keyword[try] : keyword[if] identifier[i] > literal[int] : identifier[retry] . identifier[sleep_jitter] () identifier[self] . identifier[call_once] () keyword[return] identifier[self] . identifier[response] keyword[except] identifier[Exception] keyword[as] identifier[ex] : identifier[last_exception] = identifier[RequestFailed] ( identifier[message] = literal[string] , identifier[cause] = identifier[ex] ) identifier[logger] . identifier[debug] ( literal[string] %( identifier[i] , identifier[ex] )) keyword[if] identifier[i] + literal[int] == identifier[retry] . identifier[max_retry] : keyword[raise] identifier[last_exception] keyword[raise] identifier[last_exception]
def call(self, request=None, *args, **kwargs): """ Calls multiple time - with retry. :param request: :return: response """ if request is not None: self.request = request # depends on [control=['if'], data=['request']] retry = self.request.configuration.retry if not isinstance(retry, SimpleRetry): raise Error('Currently only the fast retry strategy is supported') # depends on [control=['if'], data=[]] last_exception = None for i in range(0, retry.max_retry): try: if i > 0: retry.sleep_jitter() # depends on [control=['if'], data=[]] self.call_once() return self.response # depends on [control=['try'], data=[]] except Exception as ex: last_exception = RequestFailed(message='Request failed', cause=ex) logger.debug('Request %d failed, exception: %s' % (i, ex)) # Last exception - throw it here to have a stack if i + 1 == retry.max_retry: raise last_exception # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['ex']] # depends on [control=['for'], data=['i']] raise last_exception
def unwrap(self): """ Unwrapping the inspector based on the type """ if self.args_type == "MODULE_FUNCTION": setattr(self.obj, self.prop, self.orig_func) elif self.args_type == "MODULE": delattr(self.obj, "__SINONLOCK__") elif self.args_type == "FUNCTION": setattr(CPSCOPE, self.obj.__name__, self.orig_func) elif self.args_type == "PURE": setattr(self.pure, "func", self.orig_func)
def function[unwrap, parameter[self]]: constant[ Unwrapping the inspector based on the type ] if compare[name[self].args_type equal[==] constant[MODULE_FUNCTION]] begin[:] call[name[setattr], parameter[name[self].obj, name[self].prop, name[self].orig_func]]
keyword[def] identifier[unwrap] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[args_type] == literal[string] : identifier[setattr] ( identifier[self] . identifier[obj] , identifier[self] . identifier[prop] , identifier[self] . identifier[orig_func] ) keyword[elif] identifier[self] . identifier[args_type] == literal[string] : identifier[delattr] ( identifier[self] . identifier[obj] , literal[string] ) keyword[elif] identifier[self] . identifier[args_type] == literal[string] : identifier[setattr] ( identifier[CPSCOPE] , identifier[self] . identifier[obj] . identifier[__name__] , identifier[self] . identifier[orig_func] ) keyword[elif] identifier[self] . identifier[args_type] == literal[string] : identifier[setattr] ( identifier[self] . identifier[pure] , literal[string] , identifier[self] . identifier[orig_func] )
def unwrap(self): """ Unwrapping the inspector based on the type """ if self.args_type == 'MODULE_FUNCTION': setattr(self.obj, self.prop, self.orig_func) # depends on [control=['if'], data=[]] elif self.args_type == 'MODULE': delattr(self.obj, '__SINONLOCK__') # depends on [control=['if'], data=[]] elif self.args_type == 'FUNCTION': setattr(CPSCOPE, self.obj.__name__, self.orig_func) # depends on [control=['if'], data=[]] elif self.args_type == 'PURE': setattr(self.pure, 'func', self.orig_func) # depends on [control=['if'], data=[]]
def get_roots(self): """ Returns a list of roots of the graph. Examples -------- >>> from pgmpy.base import DAG >>> graph = DAG([('A', 'B'), ('B', 'C'), ('B', 'D'), ('E', 'B')]) >>> graph.get_roots() ['A', 'E'] """ return [node for node, in_degree in dict(self.in_degree()).items() if in_degree == 0]
def function[get_roots, parameter[self]]: constant[ Returns a list of roots of the graph. Examples -------- >>> from pgmpy.base import DAG >>> graph = DAG([('A', 'B'), ('B', 'C'), ('B', 'D'), ('E', 'B')]) >>> graph.get_roots() ['A', 'E'] ] return[<ast.ListComp object at 0x7da18f810f10>]
keyword[def] identifier[get_roots] ( identifier[self] ): literal[string] keyword[return] [ identifier[node] keyword[for] identifier[node] , identifier[in_degree] keyword[in] identifier[dict] ( identifier[self] . identifier[in_degree] ()). identifier[items] () keyword[if] identifier[in_degree] == literal[int] ]
def get_roots(self): """ Returns a list of roots of the graph. Examples -------- >>> from pgmpy.base import DAG >>> graph = DAG([('A', 'B'), ('B', 'C'), ('B', 'D'), ('E', 'B')]) >>> graph.get_roots() ['A', 'E'] """ return [node for (node, in_degree) in dict(self.in_degree()).items() if in_degree == 0]
def from_pb(cls, app_profile_pb, instance): """Creates an instance app_profile from a protobuf. :type app_profile_pb: :class:`instance_pb2.app_profile_pb` :param app_profile_pb: An instance protobuf object. :type instance: :class:`google.cloud.bigtable.instance.Instance` :param instance: The instance that owns the cluster. :rtype: :class:`AppProfile` :returns: The AppProfile parsed from the protobuf response. :raises: :class:`ValueError <exceptions.ValueError>` if the AppProfile name does not match ``projects/{project}/instances/{instance_id}/appProfiles/{app_profile_id}`` or if the parsed instance ID does not match the istance ID on the client. or if the parsed project ID does not match the project ID on the client. """ match_app_profile_name = _APP_PROFILE_NAME_RE.match(app_profile_pb.name) if match_app_profile_name is None: raise ValueError( "AppProfile protobuf name was not in the " "expected format.", app_profile_pb.name, ) if match_app_profile_name.group("instance") != instance.instance_id: raise ValueError( "Instance ID on app_profile does not match the " "instance ID on the client" ) if match_app_profile_name.group("project") != instance._client.project: raise ValueError( "Project ID on app_profile does not match the " "project ID on the client" ) app_profile_id = match_app_profile_name.group("app_profile_id") result = cls(app_profile_id, instance) result._update_from_pb(app_profile_pb) return result
def function[from_pb, parameter[cls, app_profile_pb, instance]]: constant[Creates an instance app_profile from a protobuf. :type app_profile_pb: :class:`instance_pb2.app_profile_pb` :param app_profile_pb: An instance protobuf object. :type instance: :class:`google.cloud.bigtable.instance.Instance` :param instance: The instance that owns the cluster. :rtype: :class:`AppProfile` :returns: The AppProfile parsed from the protobuf response. :raises: :class:`ValueError <exceptions.ValueError>` if the AppProfile name does not match ``projects/{project}/instances/{instance_id}/appProfiles/{app_profile_id}`` or if the parsed instance ID does not match the istance ID on the client. or if the parsed project ID does not match the project ID on the client. ] variable[match_app_profile_name] assign[=] call[name[_APP_PROFILE_NAME_RE].match, parameter[name[app_profile_pb].name]] if compare[name[match_app_profile_name] is constant[None]] begin[:] <ast.Raise object at 0x7da2047e8e20> if compare[call[name[match_app_profile_name].group, parameter[constant[instance]]] not_equal[!=] name[instance].instance_id] begin[:] <ast.Raise object at 0x7da2047ea140> if compare[call[name[match_app_profile_name].group, parameter[constant[project]]] not_equal[!=] name[instance]._client.project] begin[:] <ast.Raise object at 0x7da2047eb2e0> variable[app_profile_id] assign[=] call[name[match_app_profile_name].group, parameter[constant[app_profile_id]]] variable[result] assign[=] call[name[cls], parameter[name[app_profile_id], name[instance]]] call[name[result]._update_from_pb, parameter[name[app_profile_pb]]] return[name[result]]
keyword[def] identifier[from_pb] ( identifier[cls] , identifier[app_profile_pb] , identifier[instance] ): literal[string] identifier[match_app_profile_name] = identifier[_APP_PROFILE_NAME_RE] . identifier[match] ( identifier[app_profile_pb] . identifier[name] ) keyword[if] identifier[match_app_profile_name] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] , identifier[app_profile_pb] . identifier[name] , ) keyword[if] identifier[match_app_profile_name] . identifier[group] ( literal[string] )!= identifier[instance] . identifier[instance_id] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) keyword[if] identifier[match_app_profile_name] . identifier[group] ( literal[string] )!= identifier[instance] . identifier[_client] . identifier[project] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) identifier[app_profile_id] = identifier[match_app_profile_name] . identifier[group] ( literal[string] ) identifier[result] = identifier[cls] ( identifier[app_profile_id] , identifier[instance] ) identifier[result] . identifier[_update_from_pb] ( identifier[app_profile_pb] ) keyword[return] identifier[result]
def from_pb(cls, app_profile_pb, instance): """Creates an instance app_profile from a protobuf. :type app_profile_pb: :class:`instance_pb2.app_profile_pb` :param app_profile_pb: An instance protobuf object. :type instance: :class:`google.cloud.bigtable.instance.Instance` :param instance: The instance that owns the cluster. :rtype: :class:`AppProfile` :returns: The AppProfile parsed from the protobuf response. :raises: :class:`ValueError <exceptions.ValueError>` if the AppProfile name does not match ``projects/{project}/instances/{instance_id}/appProfiles/{app_profile_id}`` or if the parsed instance ID does not match the istance ID on the client. or if the parsed project ID does not match the project ID on the client. """ match_app_profile_name = _APP_PROFILE_NAME_RE.match(app_profile_pb.name) if match_app_profile_name is None: raise ValueError('AppProfile protobuf name was not in the expected format.', app_profile_pb.name) # depends on [control=['if'], data=[]] if match_app_profile_name.group('instance') != instance.instance_id: raise ValueError('Instance ID on app_profile does not match the instance ID on the client') # depends on [control=['if'], data=[]] if match_app_profile_name.group('project') != instance._client.project: raise ValueError('Project ID on app_profile does not match the project ID on the client') # depends on [control=['if'], data=[]] app_profile_id = match_app_profile_name.group('app_profile_id') result = cls(app_profile_id, instance) result._update_from_pb(app_profile_pb) return result
def add_to(self, parent, name=None, index=None): """Add element to a parent.""" parent.add_child(self, name=name, index=index) return self
def function[add_to, parameter[self, parent, name, index]]: constant[Add element to a parent.] call[name[parent].add_child, parameter[name[self]]] return[name[self]]
keyword[def] identifier[add_to] ( identifier[self] , identifier[parent] , identifier[name] = keyword[None] , identifier[index] = keyword[None] ): literal[string] identifier[parent] . identifier[add_child] ( identifier[self] , identifier[name] = identifier[name] , identifier[index] = identifier[index] ) keyword[return] identifier[self]
def add_to(self, parent, name=None, index=None): """Add element to a parent.""" parent.add_child(self, name=name, index=index) return self
def poll(self, verbose_model_scoring_history = False): """ Wait until the job finishes. This method will continuously query the server about the status of the job, until the job reaches a completion. During this time we will display (in stdout) a progress bar with % completion status. """ try: hidden = not H2OJob.__PROGRESS_BAR__ pb = ProgressBar(title=self._job_type + " progress", hidden=hidden) if verbose_model_scoring_history: pb.execute(self._refresh_job_status, print_verbose_info=lambda x: self._print_verbose_info() if int(x * 10) % 5 == 0 else " ") else: pb.execute(self._refresh_job_status) except StopIteration as e: if str(e) == "cancelled": h2o.api("POST /3/Jobs/%s/cancel" % self.job_key) self.status = "CANCELLED" # Potentially we may want to re-raise the exception here assert self.status in {"DONE", "CANCELLED", "FAILED"} or self._poll_count <= 0, \ "Polling finished while the job has status %s" % self.status if self.warnings: for w in self.warnings: warnings.warn(w) # check if failed... and politely print relevant message if self.status == "CANCELLED": raise H2OJobCancelled("Job<%s> was cancelled by the user." % self.job_key) if self.status == "FAILED": if (isinstance(self.job, dict)) and ("stacktrace" in list(self.job)): raise EnvironmentError("Job with key {} failed with an exception: {}\nstacktrace: " "\n{}".format(self.job_key, self.exception, self.job["stacktrace"])) else: raise EnvironmentError("Job with key %s failed with an exception: %s" % (self.job_key, self.exception)) return self
def function[poll, parameter[self, verbose_model_scoring_history]]: constant[ Wait until the job finishes. This method will continuously query the server about the status of the job, until the job reaches a completion. During this time we will display (in stdout) a progress bar with % completion status. ] <ast.Try object at 0x7da18c4cfc70> assert[<ast.BoolOp object at 0x7da18c4cdc90>] if name[self].warnings begin[:] for taget[name[w]] in starred[name[self].warnings] begin[:] call[name[warnings].warn, parameter[name[w]]] if compare[name[self].status equal[==] constant[CANCELLED]] begin[:] <ast.Raise object at 0x7da18c4ce440> if compare[name[self].status equal[==] constant[FAILED]] begin[:] if <ast.BoolOp object at 0x7da18c4cc670> begin[:] <ast.Raise object at 0x7da18c4cfd90> return[name[self]]
keyword[def] identifier[poll] ( identifier[self] , identifier[verbose_model_scoring_history] = keyword[False] ): literal[string] keyword[try] : identifier[hidden] = keyword[not] identifier[H2OJob] . identifier[__PROGRESS_BAR__] identifier[pb] = identifier[ProgressBar] ( identifier[title] = identifier[self] . identifier[_job_type] + literal[string] , identifier[hidden] = identifier[hidden] ) keyword[if] identifier[verbose_model_scoring_history] : identifier[pb] . identifier[execute] ( identifier[self] . identifier[_refresh_job_status] , identifier[print_verbose_info] = keyword[lambda] identifier[x] : identifier[self] . identifier[_print_verbose_info] () keyword[if] identifier[int] ( identifier[x] * literal[int] )% literal[int] == literal[int] keyword[else] literal[string] ) keyword[else] : identifier[pb] . identifier[execute] ( identifier[self] . identifier[_refresh_job_status] ) keyword[except] identifier[StopIteration] keyword[as] identifier[e] : keyword[if] identifier[str] ( identifier[e] )== literal[string] : identifier[h2o] . identifier[api] ( literal[string] % identifier[self] . identifier[job_key] ) identifier[self] . identifier[status] = literal[string] keyword[assert] identifier[self] . identifier[status] keyword[in] { literal[string] , literal[string] , literal[string] } keyword[or] identifier[self] . identifier[_poll_count] <= literal[int] , literal[string] % identifier[self] . identifier[status] keyword[if] identifier[self] . identifier[warnings] : keyword[for] identifier[w] keyword[in] identifier[self] . identifier[warnings] : identifier[warnings] . identifier[warn] ( identifier[w] ) keyword[if] identifier[self] . identifier[status] == literal[string] : keyword[raise] identifier[H2OJobCancelled] ( literal[string] % identifier[self] . identifier[job_key] ) keyword[if] identifier[self] . identifier[status] == literal[string] : keyword[if] ( identifier[isinstance] ( identifier[self] . identifier[job] , identifier[dict] )) keyword[and] ( literal[string] keyword[in] identifier[list] ( identifier[self] . identifier[job] )): keyword[raise] identifier[EnvironmentError] ( literal[string] literal[string] . identifier[format] ( identifier[self] . identifier[job_key] , identifier[self] . identifier[exception] , identifier[self] . identifier[job] [ literal[string] ])) keyword[else] : keyword[raise] identifier[EnvironmentError] ( literal[string] %( identifier[self] . identifier[job_key] , identifier[self] . identifier[exception] )) keyword[return] identifier[self]
def poll(self, verbose_model_scoring_history=False): """ Wait until the job finishes. This method will continuously query the server about the status of the job, until the job reaches a completion. During this time we will display (in stdout) a progress bar with % completion status. """ try: hidden = not H2OJob.__PROGRESS_BAR__ pb = ProgressBar(title=self._job_type + ' progress', hidden=hidden) if verbose_model_scoring_history: pb.execute(self._refresh_job_status, print_verbose_info=lambda x: self._print_verbose_info() if int(x * 10) % 5 == 0 else ' ') # depends on [control=['if'], data=[]] else: pb.execute(self._refresh_job_status) # depends on [control=['try'], data=[]] except StopIteration as e: if str(e) == 'cancelled': h2o.api('POST /3/Jobs/%s/cancel' % self.job_key) self.status = 'CANCELLED' # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] # Potentially we may want to re-raise the exception here assert self.status in {'DONE', 'CANCELLED', 'FAILED'} or self._poll_count <= 0, 'Polling finished while the job has status %s' % self.status if self.warnings: for w in self.warnings: warnings.warn(w) # depends on [control=['for'], data=['w']] # depends on [control=['if'], data=[]] # check if failed... and politely print relevant message if self.status == 'CANCELLED': raise H2OJobCancelled('Job<%s> was cancelled by the user.' % self.job_key) # depends on [control=['if'], data=[]] if self.status == 'FAILED': if isinstance(self.job, dict) and 'stacktrace' in list(self.job): raise EnvironmentError('Job with key {} failed with an exception: {}\nstacktrace: \n{}'.format(self.job_key, self.exception, self.job['stacktrace'])) # depends on [control=['if'], data=[]] else: raise EnvironmentError('Job with key %s failed with an exception: %s' % (self.job_key, self.exception)) # depends on [control=['if'], data=[]] return self
def _calculate_credit_charge(self, message): """ Calculates the credit charge for a request based on the command. If connection.supports_multi_credit is not True then the credit charge isn't valid so it returns 0. The credit charge is the number of credits that are required for sending/receiving data over 64 kilobytes, in the existing messages only the Read, Write, Query Directory or IOCTL commands will end in this scenario and each require their own calculation to get the proper value. The generic formula for calculating the credit charge is https://msdn.microsoft.com/en-us/library/dn529312.aspx (max(SendPayloadSize, Expected ResponsePayloadSize) - 1) / 65536 + 1 :param message: The message being sent :return: The credit charge to set on the header """ credit_size = 65536 if not self.supports_multi_credit: credit_charge = 0 elif message.COMMAND == Commands.SMB2_READ: max_size = message['length'].get_value() + \ message['read_channel_info_length'].get_value() - 1 credit_charge = math.ceil(max_size / credit_size) elif message.COMMAND == Commands.SMB2_WRITE: max_size = message['length'].get_value() + \ message['write_channel_info_length'].get_value() - 1 credit_charge = math.ceil(max_size / credit_size) elif message.COMMAND == Commands.SMB2_IOCTL: max_in_size = len(message['buffer']) max_out_size = message['max_output_response'].get_value() max_size = max(max_in_size, max_out_size) - 1 credit_charge = math.ceil(max_size / credit_size) elif message.COMMAND == Commands.SMB2_QUERY_DIRECTORY: max_in_size = len(message['buffer']) max_out_size = message['output_buffer_length'].get_value() max_size = max(max_in_size, max_out_size) - 1 credit_charge = math.ceil(max_size / credit_size) else: credit_charge = 1 # python 2 returns a float where we need an integer return int(credit_charge)
def function[_calculate_credit_charge, parameter[self, message]]: constant[ Calculates the credit charge for a request based on the command. If connection.supports_multi_credit is not True then the credit charge isn't valid so it returns 0. The credit charge is the number of credits that are required for sending/receiving data over 64 kilobytes, in the existing messages only the Read, Write, Query Directory or IOCTL commands will end in this scenario and each require their own calculation to get the proper value. The generic formula for calculating the credit charge is https://msdn.microsoft.com/en-us/library/dn529312.aspx (max(SendPayloadSize, Expected ResponsePayloadSize) - 1) / 65536 + 1 :param message: The message being sent :return: The credit charge to set on the header ] variable[credit_size] assign[=] constant[65536] if <ast.UnaryOp object at 0x7da207f01450> begin[:] variable[credit_charge] assign[=] constant[0] return[call[name[int], parameter[name[credit_charge]]]]
keyword[def] identifier[_calculate_credit_charge] ( identifier[self] , identifier[message] ): literal[string] identifier[credit_size] = literal[int] keyword[if] keyword[not] identifier[self] . identifier[supports_multi_credit] : identifier[credit_charge] = literal[int] keyword[elif] identifier[message] . identifier[COMMAND] == identifier[Commands] . identifier[SMB2_READ] : identifier[max_size] = identifier[message] [ literal[string] ]. identifier[get_value] ()+ identifier[message] [ literal[string] ]. identifier[get_value] ()- literal[int] identifier[credit_charge] = identifier[math] . identifier[ceil] ( identifier[max_size] / identifier[credit_size] ) keyword[elif] identifier[message] . identifier[COMMAND] == identifier[Commands] . identifier[SMB2_WRITE] : identifier[max_size] = identifier[message] [ literal[string] ]. identifier[get_value] ()+ identifier[message] [ literal[string] ]. identifier[get_value] ()- literal[int] identifier[credit_charge] = identifier[math] . identifier[ceil] ( identifier[max_size] / identifier[credit_size] ) keyword[elif] identifier[message] . identifier[COMMAND] == identifier[Commands] . identifier[SMB2_IOCTL] : identifier[max_in_size] = identifier[len] ( identifier[message] [ literal[string] ]) identifier[max_out_size] = identifier[message] [ literal[string] ]. identifier[get_value] () identifier[max_size] = identifier[max] ( identifier[max_in_size] , identifier[max_out_size] )- literal[int] identifier[credit_charge] = identifier[math] . identifier[ceil] ( identifier[max_size] / identifier[credit_size] ) keyword[elif] identifier[message] . identifier[COMMAND] == identifier[Commands] . identifier[SMB2_QUERY_DIRECTORY] : identifier[max_in_size] = identifier[len] ( identifier[message] [ literal[string] ]) identifier[max_out_size] = identifier[message] [ literal[string] ]. identifier[get_value] () identifier[max_size] = identifier[max] ( identifier[max_in_size] , identifier[max_out_size] )- literal[int] identifier[credit_charge] = identifier[math] . identifier[ceil] ( identifier[max_size] / identifier[credit_size] ) keyword[else] : identifier[credit_charge] = literal[int] keyword[return] identifier[int] ( identifier[credit_charge] )
def _calculate_credit_charge(self, message): """ Calculates the credit charge for a request based on the command. If connection.supports_multi_credit is not True then the credit charge isn't valid so it returns 0. The credit charge is the number of credits that are required for sending/receiving data over 64 kilobytes, in the existing messages only the Read, Write, Query Directory or IOCTL commands will end in this scenario and each require their own calculation to get the proper value. The generic formula for calculating the credit charge is https://msdn.microsoft.com/en-us/library/dn529312.aspx (max(SendPayloadSize, Expected ResponsePayloadSize) - 1) / 65536 + 1 :param message: The message being sent :return: The credit charge to set on the header """ credit_size = 65536 if not self.supports_multi_credit: credit_charge = 0 # depends on [control=['if'], data=[]] elif message.COMMAND == Commands.SMB2_READ: max_size = message['length'].get_value() + message['read_channel_info_length'].get_value() - 1 credit_charge = math.ceil(max_size / credit_size) # depends on [control=['if'], data=[]] elif message.COMMAND == Commands.SMB2_WRITE: max_size = message['length'].get_value() + message['write_channel_info_length'].get_value() - 1 credit_charge = math.ceil(max_size / credit_size) # depends on [control=['if'], data=[]] elif message.COMMAND == Commands.SMB2_IOCTL: max_in_size = len(message['buffer']) max_out_size = message['max_output_response'].get_value() max_size = max(max_in_size, max_out_size) - 1 credit_charge = math.ceil(max_size / credit_size) # depends on [control=['if'], data=[]] elif message.COMMAND == Commands.SMB2_QUERY_DIRECTORY: max_in_size = len(message['buffer']) max_out_size = message['output_buffer_length'].get_value() max_size = max(max_in_size, max_out_size) - 1 credit_charge = math.ceil(max_size / credit_size) # depends on [control=['if'], data=[]] else: credit_charge = 1 # python 2 returns a float where we need an integer return int(credit_charge)
def last(self, n=1): """ Get the last element of an array. Passing **n** will return the last N values in the array. The **guard** check allows it to work with `_.map`. """ res = self.obj[-n:] if len(res) is 1: res = res[0] return self._wrap(res)
def function[last, parameter[self, n]]: constant[ Get the last element of an array. Passing **n** will return the last N values in the array. The **guard** check allows it to work with `_.map`. ] variable[res] assign[=] call[name[self].obj][<ast.Slice object at 0x7da18f58ebc0>] if compare[call[name[len], parameter[name[res]]] is constant[1]] begin[:] variable[res] assign[=] call[name[res]][constant[0]] return[call[name[self]._wrap, parameter[name[res]]]]
keyword[def] identifier[last] ( identifier[self] , identifier[n] = literal[int] ): literal[string] identifier[res] = identifier[self] . identifier[obj] [- identifier[n] :] keyword[if] identifier[len] ( identifier[res] ) keyword[is] literal[int] : identifier[res] = identifier[res] [ literal[int] ] keyword[return] identifier[self] . identifier[_wrap] ( identifier[res] )
def last(self, n=1): """ Get the last element of an array. Passing **n** will return the last N values in the array. The **guard** check allows it to work with `_.map`. """ res = self.obj[-n:] if len(res) is 1: res = res[0] # depends on [control=['if'], data=[]] return self._wrap(res)
def exists(config): """ Check whether the .wily/ directory exists. :param config: The configuration :type config: :class:`wily.config.WilyConfig` :return: Whether the .wily directory exists :rtype: ``boolean`` """ exists = ( pathlib.Path(config.cache_path).exists() and pathlib.Path(config.cache_path).is_dir() ) if not exists: return False index_path = pathlib.Path(config.cache_path) / "index.json" if index_path.exists(): with open(index_path, "r") as out: index = json.load(out) if index["version"] != __version__: # TODO: Inspect the versions properly. logger.warning( "Wily cache is old, you may incur errors until you rebuild the cache." ) else: logger.warning( "Wily cache was not versioned, you may incur errors until you rebuild the cache." ) create_index(config) return True
def function[exists, parameter[config]]: constant[ Check whether the .wily/ directory exists. :param config: The configuration :type config: :class:`wily.config.WilyConfig` :return: Whether the .wily directory exists :rtype: ``boolean`` ] variable[exists] assign[=] <ast.BoolOp object at 0x7da18c4ce320> if <ast.UnaryOp object at 0x7da18c4cc6a0> begin[:] return[constant[False]] variable[index_path] assign[=] binary_operation[call[name[pathlib].Path, parameter[name[config].cache_path]] / constant[index.json]] if call[name[index_path].exists, parameter[]] begin[:] with call[name[open], parameter[name[index_path], constant[r]]] begin[:] variable[index] assign[=] call[name[json].load, parameter[name[out]]] if compare[call[name[index]][constant[version]] not_equal[!=] name[__version__]] begin[:] call[name[logger].warning, parameter[constant[Wily cache is old, you may incur errors until you rebuild the cache.]]] return[constant[True]]
keyword[def] identifier[exists] ( identifier[config] ): literal[string] identifier[exists] =( identifier[pathlib] . identifier[Path] ( identifier[config] . identifier[cache_path] ). identifier[exists] () keyword[and] identifier[pathlib] . identifier[Path] ( identifier[config] . identifier[cache_path] ). identifier[is_dir] () ) keyword[if] keyword[not] identifier[exists] : keyword[return] keyword[False] identifier[index_path] = identifier[pathlib] . identifier[Path] ( identifier[config] . identifier[cache_path] )/ literal[string] keyword[if] identifier[index_path] . identifier[exists] (): keyword[with] identifier[open] ( identifier[index_path] , literal[string] ) keyword[as] identifier[out] : identifier[index] = identifier[json] . identifier[load] ( identifier[out] ) keyword[if] identifier[index] [ literal[string] ]!= identifier[__version__] : identifier[logger] . identifier[warning] ( literal[string] ) keyword[else] : identifier[logger] . identifier[warning] ( literal[string] ) identifier[create_index] ( identifier[config] ) keyword[return] keyword[True]
def exists(config): """ Check whether the .wily/ directory exists. :param config: The configuration :type config: :class:`wily.config.WilyConfig` :return: Whether the .wily directory exists :rtype: ``boolean`` """ exists = pathlib.Path(config.cache_path).exists() and pathlib.Path(config.cache_path).is_dir() if not exists: return False # depends on [control=['if'], data=[]] index_path = pathlib.Path(config.cache_path) / 'index.json' if index_path.exists(): with open(index_path, 'r') as out: index = json.load(out) # depends on [control=['with'], data=['out']] if index['version'] != __version__: # TODO: Inspect the versions properly. logger.warning('Wily cache is old, you may incur errors until you rebuild the cache.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: logger.warning('Wily cache was not versioned, you may incur errors until you rebuild the cache.') create_index(config) return True
def replace(self, pattern, replacement): """ Replace all instances of a pattern with a replacement. Args: pattern (str): Pattern to replace replacement (str): Text to insert """ for i, line in enumerate(self): if pattern in line: self[i] = line.replace(pattern, replacement)
def function[replace, parameter[self, pattern, replacement]]: constant[ Replace all instances of a pattern with a replacement. Args: pattern (str): Pattern to replace replacement (str): Text to insert ] for taget[tuple[[<ast.Name object at 0x7da2041db5b0>, <ast.Name object at 0x7da2041d98d0>]]] in starred[call[name[enumerate], parameter[name[self]]]] begin[:] if compare[name[pattern] in name[line]] begin[:] call[name[self]][name[i]] assign[=] call[name[line].replace, parameter[name[pattern], name[replacement]]]
keyword[def] identifier[replace] ( identifier[self] , identifier[pattern] , identifier[replacement] ): literal[string] keyword[for] identifier[i] , identifier[line] keyword[in] identifier[enumerate] ( identifier[self] ): keyword[if] identifier[pattern] keyword[in] identifier[line] : identifier[self] [ identifier[i] ]= identifier[line] . identifier[replace] ( identifier[pattern] , identifier[replacement] )
def replace(self, pattern, replacement): """ Replace all instances of a pattern with a replacement. Args: pattern (str): Pattern to replace replacement (str): Text to insert """ for (i, line) in enumerate(self): if pattern in line: self[i] = line.replace(pattern, replacement) # depends on [control=['if'], data=['pattern', 'line']] # depends on [control=['for'], data=[]]
def to_sdp(self): """ Return a string representation suitable for SDP. """ sdp = '%s %d %s %d %s %d typ %s' % ( self.foundation, self.component, self.transport, self.priority, self.host, self.port, self.type) if self.related_address is not None: sdp += ' raddr %s' % self.related_address if self.related_port is not None: sdp += ' rport %s' % self.related_port if self.tcptype is not None: sdp += ' tcptype %s' % self.tcptype if self.generation is not None: sdp += ' generation %d' % self.generation return sdp
def function[to_sdp, parameter[self]]: constant[ Return a string representation suitable for SDP. ] variable[sdp] assign[=] binary_operation[constant[%s %d %s %d %s %d typ %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da204346710>, <ast.Attribute object at 0x7da204345de0>, <ast.Attribute object at 0x7da204344340>, <ast.Attribute object at 0x7da204345750>, <ast.Attribute object at 0x7da204345780>, <ast.Attribute object at 0x7da2043470d0>, <ast.Attribute object at 0x7da204345ba0>]]] if compare[name[self].related_address is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da2043479a0> if compare[name[self].related_port is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da204345f90> if compare[name[self].tcptype is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da204344220> if compare[name[self].generation is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da2043467d0> return[name[sdp]]
keyword[def] identifier[to_sdp] ( identifier[self] ): literal[string] identifier[sdp] = literal[string] %( identifier[self] . identifier[foundation] , identifier[self] . identifier[component] , identifier[self] . identifier[transport] , identifier[self] . identifier[priority] , identifier[self] . identifier[host] , identifier[self] . identifier[port] , identifier[self] . identifier[type] ) keyword[if] identifier[self] . identifier[related_address] keyword[is] keyword[not] keyword[None] : identifier[sdp] += literal[string] % identifier[self] . identifier[related_address] keyword[if] identifier[self] . identifier[related_port] keyword[is] keyword[not] keyword[None] : identifier[sdp] += literal[string] % identifier[self] . identifier[related_port] keyword[if] identifier[self] . identifier[tcptype] keyword[is] keyword[not] keyword[None] : identifier[sdp] += literal[string] % identifier[self] . identifier[tcptype] keyword[if] identifier[self] . identifier[generation] keyword[is] keyword[not] keyword[None] : identifier[sdp] += literal[string] % identifier[self] . identifier[generation] keyword[return] identifier[sdp]
def to_sdp(self): """ Return a string representation suitable for SDP. """ sdp = '%s %d %s %d %s %d typ %s' % (self.foundation, self.component, self.transport, self.priority, self.host, self.port, self.type) if self.related_address is not None: sdp += ' raddr %s' % self.related_address # depends on [control=['if'], data=[]] if self.related_port is not None: sdp += ' rport %s' % self.related_port # depends on [control=['if'], data=[]] if self.tcptype is not None: sdp += ' tcptype %s' % self.tcptype # depends on [control=['if'], data=[]] if self.generation is not None: sdp += ' generation %d' % self.generation # depends on [control=['if'], data=[]] return sdp
def calc_z0_and_conv_factor_from_ratio_of_harmonics(z, z2, NA=0.999): """ Calculates the Conversion Factor and physical amplitude of motion in nms by comparison of the ratio of the heights of the z signal and second harmonic of z. Parameters ---------- z : ndarray array containing z signal in volts z2 : ndarray array containing second harmonic of z signal in volts NA : float NA of mirror used in experiment Returns ------- z0 : float Physical average amplitude of motion in nms ConvFactor : float Conversion Factor between volts and nms """ V1 = calc_mean_amp(z) V2 = calc_mean_amp(z2) ratio = V2/V1 beta = 4*ratio laserWavelength = 1550e-9 # in m k0 = (2*pi)/(laserWavelength) WaistSize = laserWavelength/(pi*NA) Zr = pi*WaistSize**2/laserWavelength z0 = beta/(k0 - 1/Zr) ConvFactor = V1/z0 T0 = 300 return z0, ConvFactor
def function[calc_z0_and_conv_factor_from_ratio_of_harmonics, parameter[z, z2, NA]]: constant[ Calculates the Conversion Factor and physical amplitude of motion in nms by comparison of the ratio of the heights of the z signal and second harmonic of z. Parameters ---------- z : ndarray array containing z signal in volts z2 : ndarray array containing second harmonic of z signal in volts NA : float NA of mirror used in experiment Returns ------- z0 : float Physical average amplitude of motion in nms ConvFactor : float Conversion Factor between volts and nms ] variable[V1] assign[=] call[name[calc_mean_amp], parameter[name[z]]] variable[V2] assign[=] call[name[calc_mean_amp], parameter[name[z2]]] variable[ratio] assign[=] binary_operation[name[V2] / name[V1]] variable[beta] assign[=] binary_operation[constant[4] * name[ratio]] variable[laserWavelength] assign[=] constant[1.55e-06] variable[k0] assign[=] binary_operation[binary_operation[constant[2] * name[pi]] / name[laserWavelength]] variable[WaistSize] assign[=] binary_operation[name[laserWavelength] / binary_operation[name[pi] * name[NA]]] variable[Zr] assign[=] binary_operation[binary_operation[name[pi] * binary_operation[name[WaistSize] ** constant[2]]] / name[laserWavelength]] variable[z0] assign[=] binary_operation[name[beta] / binary_operation[name[k0] - binary_operation[constant[1] / name[Zr]]]] variable[ConvFactor] assign[=] binary_operation[name[V1] / name[z0]] variable[T0] assign[=] constant[300] return[tuple[[<ast.Name object at 0x7da1b28f4d60>, <ast.Name object at 0x7da1b28f67a0>]]]
keyword[def] identifier[calc_z0_and_conv_factor_from_ratio_of_harmonics] ( identifier[z] , identifier[z2] , identifier[NA] = literal[int] ): literal[string] identifier[V1] = identifier[calc_mean_amp] ( identifier[z] ) identifier[V2] = identifier[calc_mean_amp] ( identifier[z2] ) identifier[ratio] = identifier[V2] / identifier[V1] identifier[beta] = literal[int] * identifier[ratio] identifier[laserWavelength] = literal[int] identifier[k0] =( literal[int] * identifier[pi] )/( identifier[laserWavelength] ) identifier[WaistSize] = identifier[laserWavelength] /( identifier[pi] * identifier[NA] ) identifier[Zr] = identifier[pi] * identifier[WaistSize] ** literal[int] / identifier[laserWavelength] identifier[z0] = identifier[beta] /( identifier[k0] - literal[int] / identifier[Zr] ) identifier[ConvFactor] = identifier[V1] / identifier[z0] identifier[T0] = literal[int] keyword[return] identifier[z0] , identifier[ConvFactor]
def calc_z0_and_conv_factor_from_ratio_of_harmonics(z, z2, NA=0.999): """ Calculates the Conversion Factor and physical amplitude of motion in nms by comparison of the ratio of the heights of the z signal and second harmonic of z. Parameters ---------- z : ndarray array containing z signal in volts z2 : ndarray array containing second harmonic of z signal in volts NA : float NA of mirror used in experiment Returns ------- z0 : float Physical average amplitude of motion in nms ConvFactor : float Conversion Factor between volts and nms """ V1 = calc_mean_amp(z) V2 = calc_mean_amp(z2) ratio = V2 / V1 beta = 4 * ratio laserWavelength = 1.55e-06 # in m k0 = 2 * pi / laserWavelength WaistSize = laserWavelength / (pi * NA) Zr = pi * WaistSize ** 2 / laserWavelength z0 = beta / (k0 - 1 / Zr) ConvFactor = V1 / z0 T0 = 300 return (z0, ConvFactor)
def add_dependency(self,my_dep): """ Adds a dependency to the dependency layer @type my_dep: L{Cdependency} @param my_dep: dependency object """ if self.dependency_layer is None: self.dependency_layer = Cdependencies() self.root.append(self.dependency_layer.get_node()) self.dependency_layer.add_dependency(my_dep)
def function[add_dependency, parameter[self, my_dep]]: constant[ Adds a dependency to the dependency layer @type my_dep: L{Cdependency} @param my_dep: dependency object ] if compare[name[self].dependency_layer is constant[None]] begin[:] name[self].dependency_layer assign[=] call[name[Cdependencies], parameter[]] call[name[self].root.append, parameter[call[name[self].dependency_layer.get_node, parameter[]]]] call[name[self].dependency_layer.add_dependency, parameter[name[my_dep]]]
keyword[def] identifier[add_dependency] ( identifier[self] , identifier[my_dep] ): literal[string] keyword[if] identifier[self] . identifier[dependency_layer] keyword[is] keyword[None] : identifier[self] . identifier[dependency_layer] = identifier[Cdependencies] () identifier[self] . identifier[root] . identifier[append] ( identifier[self] . identifier[dependency_layer] . identifier[get_node] ()) identifier[self] . identifier[dependency_layer] . identifier[add_dependency] ( identifier[my_dep] )
def add_dependency(self, my_dep): """ Adds a dependency to the dependency layer @type my_dep: L{Cdependency} @param my_dep: dependency object """ if self.dependency_layer is None: self.dependency_layer = Cdependencies() self.root.append(self.dependency_layer.get_node()) # depends on [control=['if'], data=[]] self.dependency_layer.add_dependency(my_dep)
def from_file(filename, **kwargs): """ Create a GeoRaster object from a file """ ndv, xsize, ysize, geot, projection, datatype = get_geo_info(filename, **kwargs) data = gdalnumeric.LoadFile(filename, **kwargs) data = np.ma.masked_array(data, mask=data == ndv, fill_value=ndv) return GeoRaster(data, geot, nodata_value=ndv, projection=projection, datatype=datatype)
def function[from_file, parameter[filename]]: constant[ Create a GeoRaster object from a file ] <ast.Tuple object at 0x7da2041d9ba0> assign[=] call[name[get_geo_info], parameter[name[filename]]] variable[data] assign[=] call[name[gdalnumeric].LoadFile, parameter[name[filename]]] variable[data] assign[=] call[name[np].ma.masked_array, parameter[name[data]]] return[call[name[GeoRaster], parameter[name[data], name[geot]]]]
keyword[def] identifier[from_file] ( identifier[filename] ,** identifier[kwargs] ): literal[string] identifier[ndv] , identifier[xsize] , identifier[ysize] , identifier[geot] , identifier[projection] , identifier[datatype] = identifier[get_geo_info] ( identifier[filename] ,** identifier[kwargs] ) identifier[data] = identifier[gdalnumeric] . identifier[LoadFile] ( identifier[filename] ,** identifier[kwargs] ) identifier[data] = identifier[np] . identifier[ma] . identifier[masked_array] ( identifier[data] , identifier[mask] = identifier[data] == identifier[ndv] , identifier[fill_value] = identifier[ndv] ) keyword[return] identifier[GeoRaster] ( identifier[data] , identifier[geot] , identifier[nodata_value] = identifier[ndv] , identifier[projection] = identifier[projection] , identifier[datatype] = identifier[datatype] )
def from_file(filename, **kwargs): """ Create a GeoRaster object from a file """ (ndv, xsize, ysize, geot, projection, datatype) = get_geo_info(filename, **kwargs) data = gdalnumeric.LoadFile(filename, **kwargs) data = np.ma.masked_array(data, mask=data == ndv, fill_value=ndv) return GeoRaster(data, geot, nodata_value=ndv, projection=projection, datatype=datatype)
def cli(obj, show_userinfo): """Display logged in user or full userinfo.""" client = obj['client'] userinfo = client.userinfo() if show_userinfo: for k, v in userinfo.items(): if isinstance(v, list): v = ', '.join(v) click.echo('{:20}: {}'.format(k, v)) else: click.echo(userinfo['preferred_username'])
def function[cli, parameter[obj, show_userinfo]]: constant[Display logged in user or full userinfo.] variable[client] assign[=] call[name[obj]][constant[client]] variable[userinfo] assign[=] call[name[client].userinfo, parameter[]] if name[show_userinfo] begin[:] for taget[tuple[[<ast.Name object at 0x7da18dc9b250>, <ast.Name object at 0x7da18dc9af20>]]] in starred[call[name[userinfo].items, parameter[]]] begin[:] if call[name[isinstance], parameter[name[v], name[list]]] begin[:] variable[v] assign[=] call[constant[, ].join, parameter[name[v]]] call[name[click].echo, parameter[call[constant[{:20}: {}].format, parameter[name[k], name[v]]]]]
keyword[def] identifier[cli] ( identifier[obj] , identifier[show_userinfo] ): literal[string] identifier[client] = identifier[obj] [ literal[string] ] identifier[userinfo] = identifier[client] . identifier[userinfo] () keyword[if] identifier[show_userinfo] : keyword[for] identifier[k] , identifier[v] keyword[in] identifier[userinfo] . identifier[items] (): keyword[if] identifier[isinstance] ( identifier[v] , identifier[list] ): identifier[v] = literal[string] . identifier[join] ( identifier[v] ) identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[k] , identifier[v] )) keyword[else] : identifier[click] . identifier[echo] ( identifier[userinfo] [ literal[string] ])
def cli(obj, show_userinfo): """Display logged in user or full userinfo.""" client = obj['client'] userinfo = client.userinfo() if show_userinfo: for (k, v) in userinfo.items(): if isinstance(v, list): v = ', '.join(v) # depends on [control=['if'], data=[]] click.echo('{:20}: {}'.format(k, v)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: click.echo(userinfo['preferred_username'])
def _load_profile(self, profile_name): """Load a profile by name Called by load_user_options """ # find the profile default_profile = self._profile_list[0] for profile in self._profile_list: if profile.get('default', False): # explicit default, not the first default_profile = profile if profile['display_name'] == profile_name: break else: if profile_name: # name specified, but not found raise ValueError("No such profile: %s. Options include: %s" % ( profile_name, ', '.join(p['display_name'] for p in self._profile_list) )) else: # no name specified, use the default profile = default_profile self.log.debug("Applying KubeSpawner override for profile '%s'", profile['display_name']) kubespawner_override = profile.get('kubespawner_override', {}) for k, v in kubespawner_override.items(): if callable(v): v = v(self) self.log.debug(".. overriding KubeSpawner value %s=%s (callable result)", k, v) else: self.log.debug(".. overriding KubeSpawner value %s=%s", k, v) setattr(self, k, v)
def function[_load_profile, parameter[self, profile_name]]: constant[Load a profile by name Called by load_user_options ] variable[default_profile] assign[=] call[name[self]._profile_list][constant[0]] for taget[name[profile]] in starred[name[self]._profile_list] begin[:] if call[name[profile].get, parameter[constant[default], constant[False]]] begin[:] variable[default_profile] assign[=] name[profile] if compare[call[name[profile]][constant[display_name]] equal[==] name[profile_name]] begin[:] break call[name[self].log.debug, parameter[constant[Applying KubeSpawner override for profile '%s'], call[name[profile]][constant[display_name]]]] variable[kubespawner_override] assign[=] call[name[profile].get, parameter[constant[kubespawner_override], dictionary[[], []]]] for taget[tuple[[<ast.Name object at 0x7da18eb55a50>, <ast.Name object at 0x7da1b16440a0>]]] in starred[call[name[kubespawner_override].items, parameter[]]] begin[:] if call[name[callable], parameter[name[v]]] begin[:] variable[v] assign[=] call[name[v], parameter[name[self]]] call[name[self].log.debug, parameter[constant[.. overriding KubeSpawner value %s=%s (callable result)], name[k], name[v]]] call[name[setattr], parameter[name[self], name[k], name[v]]]
keyword[def] identifier[_load_profile] ( identifier[self] , identifier[profile_name] ): literal[string] identifier[default_profile] = identifier[self] . identifier[_profile_list] [ literal[int] ] keyword[for] identifier[profile] keyword[in] identifier[self] . identifier[_profile_list] : keyword[if] identifier[profile] . identifier[get] ( literal[string] , keyword[False] ): identifier[default_profile] = identifier[profile] keyword[if] identifier[profile] [ literal[string] ]== identifier[profile_name] : keyword[break] keyword[else] : keyword[if] identifier[profile_name] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[profile_name] , literal[string] . identifier[join] ( identifier[p] [ literal[string] ] keyword[for] identifier[p] keyword[in] identifier[self] . identifier[_profile_list] ) )) keyword[else] : identifier[profile] = identifier[default_profile] identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[profile] [ literal[string] ]) identifier[kubespawner_override] = identifier[profile] . identifier[get] ( literal[string] ,{}) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kubespawner_override] . identifier[items] (): keyword[if] identifier[callable] ( identifier[v] ): identifier[v] = identifier[v] ( identifier[self] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[k] , identifier[v] ) keyword[else] : identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[k] , identifier[v] ) identifier[setattr] ( identifier[self] , identifier[k] , identifier[v] )
def _load_profile(self, profile_name): """Load a profile by name Called by load_user_options """ # find the profile default_profile = self._profile_list[0] for profile in self._profile_list: if profile.get('default', False): # explicit default, not the first default_profile = profile # depends on [control=['if'], data=[]] if profile['display_name'] == profile_name: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['profile']] else: if profile_name: # name specified, but not found raise ValueError('No such profile: %s. Options include: %s' % (profile_name, ', '.join((p['display_name'] for p in self._profile_list)))) # depends on [control=['if'], data=[]] else: # no name specified, use the default profile = default_profile self.log.debug("Applying KubeSpawner override for profile '%s'", profile['display_name']) kubespawner_override = profile.get('kubespawner_override', {}) for (k, v) in kubespawner_override.items(): if callable(v): v = v(self) self.log.debug('.. overriding KubeSpawner value %s=%s (callable result)', k, v) # depends on [control=['if'], data=[]] else: self.log.debug('.. overriding KubeSpawner value %s=%s', k, v) setattr(self, k, v) # depends on [control=['for'], data=[]]
def create(cls, counter_user_alias, share_detail, status, monetary_account_id=None, draft_share_invite_bank_id=None, share_type=None, start_date=None, end_date=None, custom_headers=None): """ Create a new share inquiry for a monetary account, specifying the permission the other bunq user will have on it. :type user_id: int :type monetary_account_id: int :param counter_user_alias: The pointer of the user to share with. :type counter_user_alias: object_.Pointer :param share_detail: The share details. Only one of these objects may be passed. :type share_detail: object_.ShareDetail :param status: The status of the share. Can be PENDING, REVOKED (the user deletes the share inquiry before it's accepted), ACCEPTED, CANCELLED (the user deletes an active share) or CANCELLATION_PENDING, CANCELLATION_ACCEPTED, CANCELLATION_REJECTED (for canceling mutual connects). :type status: str :param draft_share_invite_bank_id: The id of the draft share invite bank. :type draft_share_invite_bank_id: int :param share_type: The share type, either STANDARD or MUTUAL. :type share_type: str :param start_date: The start date of this share. :type start_date: str :param end_date: The expiration date of this share. :type end_date: str :type custom_headers: dict[str, str]|None :rtype: BunqResponseInt """ if custom_headers is None: custom_headers = {} request_map = { cls.FIELD_COUNTER_USER_ALIAS: counter_user_alias, cls.FIELD_DRAFT_SHARE_INVITE_BANK_ID: draft_share_invite_bank_id, cls.FIELD_SHARE_DETAIL: share_detail, cls.FIELD_STATUS: status, cls.FIELD_SHARE_TYPE: share_type, cls.FIELD_START_DATE: start_date, cls.FIELD_END_DATE: end_date } request_map_string = converter.class_to_json(request_map) request_map_string = cls._remove_field_for_request(request_map_string) api_client = client.ApiClient(cls._get_api_context()) request_bytes = request_map_string.encode() endpoint_url = cls._ENDPOINT_URL_CREATE.format(cls._determine_user_id(), cls._determine_monetary_account_id( monetary_account_id)) response_raw = api_client.post(endpoint_url, request_bytes, custom_headers) return BunqResponseInt.cast_from_bunq_response( cls._process_for_id(response_raw) )
def function[create, parameter[cls, counter_user_alias, share_detail, status, monetary_account_id, draft_share_invite_bank_id, share_type, start_date, end_date, custom_headers]]: constant[ Create a new share inquiry for a monetary account, specifying the permission the other bunq user will have on it. :type user_id: int :type monetary_account_id: int :param counter_user_alias: The pointer of the user to share with. :type counter_user_alias: object_.Pointer :param share_detail: The share details. Only one of these objects may be passed. :type share_detail: object_.ShareDetail :param status: The status of the share. Can be PENDING, REVOKED (the user deletes the share inquiry before it's accepted), ACCEPTED, CANCELLED (the user deletes an active share) or CANCELLATION_PENDING, CANCELLATION_ACCEPTED, CANCELLATION_REJECTED (for canceling mutual connects). :type status: str :param draft_share_invite_bank_id: The id of the draft share invite bank. :type draft_share_invite_bank_id: int :param share_type: The share type, either STANDARD or MUTUAL. :type share_type: str :param start_date: The start date of this share. :type start_date: str :param end_date: The expiration date of this share. :type end_date: str :type custom_headers: dict[str, str]|None :rtype: BunqResponseInt ] if compare[name[custom_headers] is constant[None]] begin[:] variable[custom_headers] assign[=] dictionary[[], []] variable[request_map] assign[=] dictionary[[<ast.Attribute object at 0x7da1b08448e0>, <ast.Attribute object at 0x7da1b0846fb0>, <ast.Attribute object at 0x7da1b08441f0>, <ast.Attribute object at 0x7da1b0845540>, <ast.Attribute object at 0x7da1b0845e70>, <ast.Attribute object at 0x7da1b0847a90>, <ast.Attribute object at 0x7da1b0844d90>], [<ast.Name object at 0x7da1b0845030>, <ast.Name object at 0x7da1b0844070>, <ast.Name object at 0x7da1b08477f0>, <ast.Name object at 0x7da1b0845960>, <ast.Name object at 0x7da1b0845330>, <ast.Name object at 0x7da1b08477c0>, <ast.Name object at 0x7da1b0846ce0>]] variable[request_map_string] assign[=] call[name[converter].class_to_json, parameter[name[request_map]]] variable[request_map_string] assign[=] call[name[cls]._remove_field_for_request, parameter[name[request_map_string]]] variable[api_client] assign[=] call[name[client].ApiClient, parameter[call[name[cls]._get_api_context, parameter[]]]] variable[request_bytes] assign[=] call[name[request_map_string].encode, parameter[]] variable[endpoint_url] assign[=] call[name[cls]._ENDPOINT_URL_CREATE.format, parameter[call[name[cls]._determine_user_id, parameter[]], call[name[cls]._determine_monetary_account_id, parameter[name[monetary_account_id]]]]] variable[response_raw] assign[=] call[name[api_client].post, parameter[name[endpoint_url], name[request_bytes], name[custom_headers]]] return[call[name[BunqResponseInt].cast_from_bunq_response, parameter[call[name[cls]._process_for_id, parameter[name[response_raw]]]]]]
keyword[def] identifier[create] ( identifier[cls] , identifier[counter_user_alias] , identifier[share_detail] , identifier[status] , identifier[monetary_account_id] = keyword[None] , identifier[draft_share_invite_bank_id] = keyword[None] , identifier[share_type] = keyword[None] , identifier[start_date] = keyword[None] , identifier[end_date] = keyword[None] , identifier[custom_headers] = keyword[None] ): literal[string] keyword[if] identifier[custom_headers] keyword[is] keyword[None] : identifier[custom_headers] ={} identifier[request_map] ={ identifier[cls] . identifier[FIELD_COUNTER_USER_ALIAS] : identifier[counter_user_alias] , identifier[cls] . identifier[FIELD_DRAFT_SHARE_INVITE_BANK_ID] : identifier[draft_share_invite_bank_id] , identifier[cls] . identifier[FIELD_SHARE_DETAIL] : identifier[share_detail] , identifier[cls] . identifier[FIELD_STATUS] : identifier[status] , identifier[cls] . identifier[FIELD_SHARE_TYPE] : identifier[share_type] , identifier[cls] . identifier[FIELD_START_DATE] : identifier[start_date] , identifier[cls] . identifier[FIELD_END_DATE] : identifier[end_date] } identifier[request_map_string] = identifier[converter] . identifier[class_to_json] ( identifier[request_map] ) identifier[request_map_string] = identifier[cls] . identifier[_remove_field_for_request] ( identifier[request_map_string] ) identifier[api_client] = identifier[client] . identifier[ApiClient] ( identifier[cls] . identifier[_get_api_context] ()) identifier[request_bytes] = identifier[request_map_string] . identifier[encode] () identifier[endpoint_url] = identifier[cls] . identifier[_ENDPOINT_URL_CREATE] . identifier[format] ( identifier[cls] . identifier[_determine_user_id] (), identifier[cls] . identifier[_determine_monetary_account_id] ( identifier[monetary_account_id] )) identifier[response_raw] = identifier[api_client] . identifier[post] ( identifier[endpoint_url] , identifier[request_bytes] , identifier[custom_headers] ) keyword[return] identifier[BunqResponseInt] . identifier[cast_from_bunq_response] ( identifier[cls] . identifier[_process_for_id] ( identifier[response_raw] ) )
def create(cls, counter_user_alias, share_detail, status, monetary_account_id=None, draft_share_invite_bank_id=None, share_type=None, start_date=None, end_date=None, custom_headers=None): """ Create a new share inquiry for a monetary account, specifying the permission the other bunq user will have on it. :type user_id: int :type monetary_account_id: int :param counter_user_alias: The pointer of the user to share with. :type counter_user_alias: object_.Pointer :param share_detail: The share details. Only one of these objects may be passed. :type share_detail: object_.ShareDetail :param status: The status of the share. Can be PENDING, REVOKED (the user deletes the share inquiry before it's accepted), ACCEPTED, CANCELLED (the user deletes an active share) or CANCELLATION_PENDING, CANCELLATION_ACCEPTED, CANCELLATION_REJECTED (for canceling mutual connects). :type status: str :param draft_share_invite_bank_id: The id of the draft share invite bank. :type draft_share_invite_bank_id: int :param share_type: The share type, either STANDARD or MUTUAL. :type share_type: str :param start_date: The start date of this share. :type start_date: str :param end_date: The expiration date of this share. :type end_date: str :type custom_headers: dict[str, str]|None :rtype: BunqResponseInt """ if custom_headers is None: custom_headers = {} # depends on [control=['if'], data=['custom_headers']] request_map = {cls.FIELD_COUNTER_USER_ALIAS: counter_user_alias, cls.FIELD_DRAFT_SHARE_INVITE_BANK_ID: draft_share_invite_bank_id, cls.FIELD_SHARE_DETAIL: share_detail, cls.FIELD_STATUS: status, cls.FIELD_SHARE_TYPE: share_type, cls.FIELD_START_DATE: start_date, cls.FIELD_END_DATE: end_date} request_map_string = converter.class_to_json(request_map) request_map_string = cls._remove_field_for_request(request_map_string) api_client = client.ApiClient(cls._get_api_context()) request_bytes = request_map_string.encode() endpoint_url = cls._ENDPOINT_URL_CREATE.format(cls._determine_user_id(), cls._determine_monetary_account_id(monetary_account_id)) response_raw = api_client.post(endpoint_url, request_bytes, custom_headers) return BunqResponseInt.cast_from_bunq_response(cls._process_for_id(response_raw))
def to_dict(self): """ Serializes a definition to a dictionary, ready for json. Children are serialised recursively. """ ddict = {'name': self.name, 'icon': self.icon, 'line': self.line, 'column': self.column, 'children': [], 'description': self.description, 'user_data': self.user_data, 'path': self.file_path} for child in self.children: ddict['children'].append(child.to_dict()) return ddict
def function[to_dict, parameter[self]]: constant[ Serializes a definition to a dictionary, ready for json. Children are serialised recursively. ] variable[ddict] assign[=] dictionary[[<ast.Constant object at 0x7da18f810280>, <ast.Constant object at 0x7da18f811750>, <ast.Constant object at 0x7da204567df0>, <ast.Constant object at 0x7da204567280>, <ast.Constant object at 0x7da204564520>, <ast.Constant object at 0x7da204565a50>, <ast.Constant object at 0x7da204565420>, <ast.Constant object at 0x7da2045651b0>], [<ast.Attribute object at 0x7da204564f40>, <ast.Attribute object at 0x7da204567310>, <ast.Attribute object at 0x7da204564a00>, <ast.Attribute object at 0x7da204567d00>, <ast.List object at 0x7da204567ac0>, <ast.Attribute object at 0x7da204564880>, <ast.Attribute object at 0x7da204567d30>, <ast.Attribute object at 0x7da2045641f0>]] for taget[name[child]] in starred[name[self].children] begin[:] call[call[name[ddict]][constant[children]].append, parameter[call[name[child].to_dict, parameter[]]]] return[name[ddict]]
keyword[def] identifier[to_dict] ( identifier[self] ): literal[string] identifier[ddict] ={ literal[string] : identifier[self] . identifier[name] , literal[string] : identifier[self] . identifier[icon] , literal[string] : identifier[self] . identifier[line] , literal[string] : identifier[self] . identifier[column] , literal[string] :[], literal[string] : identifier[self] . identifier[description] , literal[string] : identifier[self] . identifier[user_data] , literal[string] : identifier[self] . identifier[file_path] } keyword[for] identifier[child] keyword[in] identifier[self] . identifier[children] : identifier[ddict] [ literal[string] ]. identifier[append] ( identifier[child] . identifier[to_dict] ()) keyword[return] identifier[ddict]
def to_dict(self): """ Serializes a definition to a dictionary, ready for json. Children are serialised recursively. """ ddict = {'name': self.name, 'icon': self.icon, 'line': self.line, 'column': self.column, 'children': [], 'description': self.description, 'user_data': self.user_data, 'path': self.file_path} for child in self.children: ddict['children'].append(child.to_dict()) # depends on [control=['for'], data=['child']] return ddict
def ptconcat(output_file, input_files, overwrite=False): """Concatenate HDF5 Files""" filt = tb.Filters( complevel=5, shuffle=True, fletcher32=True, complib='zlib' ) out_tabs = {} dt_file = input_files[0] log.info("Reading data struct '%s'..." % dt_file) h5struc = tb.open_file(dt_file, 'r') log.info("Opening output file '%s'..." % output_file) if overwrite: outmode = 'w' else: outmode = 'a' h5out = tb.open_file(output_file, outmode) for node in h5struc.walk_nodes('/', classname='Table'): path = node._v_pathname log.debug(path) dtype = node.dtype p, n = os.path.split(path) out_tabs[path] = h5out.create_table( p, n, description=dtype, filters=filt, createparents=True ) h5struc.close() for fname in input_files: log.info('Reading %s...' % fname) h5 = tb.open_file(fname) for path, out in out_tabs.items(): tab = h5.get_node(path) out.append(tab[:]) h5.close() h5out.close()
def function[ptconcat, parameter[output_file, input_files, overwrite]]: constant[Concatenate HDF5 Files] variable[filt] assign[=] call[name[tb].Filters, parameter[]] variable[out_tabs] assign[=] dictionary[[], []] variable[dt_file] assign[=] call[name[input_files]][constant[0]] call[name[log].info, parameter[binary_operation[constant[Reading data struct '%s'...] <ast.Mod object at 0x7da2590d6920> name[dt_file]]]] variable[h5struc] assign[=] call[name[tb].open_file, parameter[name[dt_file], constant[r]]] call[name[log].info, parameter[binary_operation[constant[Opening output file '%s'...] <ast.Mod object at 0x7da2590d6920> name[output_file]]]] if name[overwrite] begin[:] variable[outmode] assign[=] constant[w] variable[h5out] assign[=] call[name[tb].open_file, parameter[name[output_file], name[outmode]]] for taget[name[node]] in starred[call[name[h5struc].walk_nodes, parameter[constant[/]]]] begin[:] variable[path] assign[=] name[node]._v_pathname call[name[log].debug, parameter[name[path]]] variable[dtype] assign[=] name[node].dtype <ast.Tuple object at 0x7da20e963cd0> assign[=] call[name[os].path.split, parameter[name[path]]] call[name[out_tabs]][name[path]] assign[=] call[name[h5out].create_table, parameter[name[p], name[n]]] call[name[h5struc].close, parameter[]] for taget[name[fname]] in starred[name[input_files]] begin[:] call[name[log].info, parameter[binary_operation[constant[Reading %s...] <ast.Mod object at 0x7da2590d6920> name[fname]]]] variable[h5] assign[=] call[name[tb].open_file, parameter[name[fname]]] for taget[tuple[[<ast.Name object at 0x7da1b26ac700>, <ast.Name object at 0x7da1b26ac9d0>]]] in starred[call[name[out_tabs].items, parameter[]]] begin[:] variable[tab] assign[=] call[name[h5].get_node, parameter[name[path]]] call[name[out].append, parameter[call[name[tab]][<ast.Slice object at 0x7da1b26af1f0>]]] call[name[h5].close, parameter[]] call[name[h5out].close, parameter[]]
keyword[def] identifier[ptconcat] ( identifier[output_file] , identifier[input_files] , identifier[overwrite] = keyword[False] ): literal[string] identifier[filt] = identifier[tb] . identifier[Filters] ( identifier[complevel] = literal[int] , identifier[shuffle] = keyword[True] , identifier[fletcher32] = keyword[True] , identifier[complib] = literal[string] ) identifier[out_tabs] ={} identifier[dt_file] = identifier[input_files] [ literal[int] ] identifier[log] . identifier[info] ( literal[string] % identifier[dt_file] ) identifier[h5struc] = identifier[tb] . identifier[open_file] ( identifier[dt_file] , literal[string] ) identifier[log] . identifier[info] ( literal[string] % identifier[output_file] ) keyword[if] identifier[overwrite] : identifier[outmode] = literal[string] keyword[else] : identifier[outmode] = literal[string] identifier[h5out] = identifier[tb] . identifier[open_file] ( identifier[output_file] , identifier[outmode] ) keyword[for] identifier[node] keyword[in] identifier[h5struc] . identifier[walk_nodes] ( literal[string] , identifier[classname] = literal[string] ): identifier[path] = identifier[node] . identifier[_v_pathname] identifier[log] . identifier[debug] ( identifier[path] ) identifier[dtype] = identifier[node] . identifier[dtype] identifier[p] , identifier[n] = identifier[os] . identifier[path] . identifier[split] ( identifier[path] ) identifier[out_tabs] [ identifier[path] ]= identifier[h5out] . identifier[create_table] ( identifier[p] , identifier[n] , identifier[description] = identifier[dtype] , identifier[filters] = identifier[filt] , identifier[createparents] = keyword[True] ) identifier[h5struc] . identifier[close] () keyword[for] identifier[fname] keyword[in] identifier[input_files] : identifier[log] . identifier[info] ( literal[string] % identifier[fname] ) identifier[h5] = identifier[tb] . identifier[open_file] ( identifier[fname] ) keyword[for] identifier[path] , identifier[out] keyword[in] identifier[out_tabs] . identifier[items] (): identifier[tab] = identifier[h5] . identifier[get_node] ( identifier[path] ) identifier[out] . identifier[append] ( identifier[tab] [:]) identifier[h5] . identifier[close] () identifier[h5out] . identifier[close] ()
def ptconcat(output_file, input_files, overwrite=False): """Concatenate HDF5 Files""" filt = tb.Filters(complevel=5, shuffle=True, fletcher32=True, complib='zlib') out_tabs = {} dt_file = input_files[0] log.info("Reading data struct '%s'..." % dt_file) h5struc = tb.open_file(dt_file, 'r') log.info("Opening output file '%s'..." % output_file) if overwrite: outmode = 'w' # depends on [control=['if'], data=[]] else: outmode = 'a' h5out = tb.open_file(output_file, outmode) for node in h5struc.walk_nodes('/', classname='Table'): path = node._v_pathname log.debug(path) dtype = node.dtype (p, n) = os.path.split(path) out_tabs[path] = h5out.create_table(p, n, description=dtype, filters=filt, createparents=True) # depends on [control=['for'], data=['node']] h5struc.close() for fname in input_files: log.info('Reading %s...' % fname) h5 = tb.open_file(fname) for (path, out) in out_tabs.items(): tab = h5.get_node(path) out.append(tab[:]) # depends on [control=['for'], data=[]] h5.close() # depends on [control=['for'], data=['fname']] h5out.close()
def mangle(self, name, x): """ Mangle the name by hashing the I{name} and appending I{x}. @return: the mangled name. """ h = abs(hash(name)) return '%s-%s' % (h, x)
def function[mangle, parameter[self, name, x]]: constant[ Mangle the name by hashing the I{name} and appending I{x}. @return: the mangled name. ] variable[h] assign[=] call[name[abs], parameter[call[name[hash], parameter[name[name]]]]] return[binary_operation[constant[%s-%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18dc06890>, <ast.Name object at 0x7da18dc06cb0>]]]]
keyword[def] identifier[mangle] ( identifier[self] , identifier[name] , identifier[x] ): literal[string] identifier[h] = identifier[abs] ( identifier[hash] ( identifier[name] )) keyword[return] literal[string] %( identifier[h] , identifier[x] )
def mangle(self, name, x): """ Mangle the name by hashing the I{name} and appending I{x}. @return: the mangled name. """ h = abs(hash(name)) return '%s-%s' % (h, x)
def _GetStatus(self): """Retrieves status information. Returns: dict[str, object]: status attributes, indexed by name. """ if self._analysis_mediator: number_of_produced_event_tags = ( self._analysis_mediator.number_of_produced_event_tags) number_of_produced_reports = ( self._analysis_mediator.number_of_produced_analysis_reports) else: number_of_produced_event_tags = None number_of_produced_reports = None if self._process_information: used_memory = self._process_information.GetUsedMemory() or 0 else: used_memory = 0 if self._memory_profiler: self._memory_profiler.Sample('main', used_memory) status = { 'display_name': '', 'identifier': self._name, 'number_of_consumed_event_tags': None, 'number_of_consumed_events': self._number_of_consumed_events, 'number_of_consumed_reports': None, 'number_of_consumed_sources': None, 'number_of_consumed_warnings': None, 'number_of_produced_event_tags': number_of_produced_event_tags, 'number_of_produced_events': None, 'number_of_produced_reports': number_of_produced_reports, 'number_of_produced_sources': None, 'number_of_produced_warnings': None, 'processing_status': self._status, 'task_identifier': None, 'used_memory': used_memory} if self._status in ( definitions.STATUS_INDICATOR_ABORTED, definitions.STATUS_INDICATOR_COMPLETED): self._foreman_status_wait_event.set() return status
def function[_GetStatus, parameter[self]]: constant[Retrieves status information. Returns: dict[str, object]: status attributes, indexed by name. ] if name[self]._analysis_mediator begin[:] variable[number_of_produced_event_tags] assign[=] name[self]._analysis_mediator.number_of_produced_event_tags variable[number_of_produced_reports] assign[=] name[self]._analysis_mediator.number_of_produced_analysis_reports if name[self]._process_information begin[:] variable[used_memory] assign[=] <ast.BoolOp object at 0x7da18bc72980> if name[self]._memory_profiler begin[:] call[name[self]._memory_profiler.Sample, parameter[constant[main], name[used_memory]]] variable[status] assign[=] dictionary[[<ast.Constant object at 0x7da18bc725f0>, <ast.Constant object at 0x7da18bc72830>, <ast.Constant object at 0x7da18bc71360>, <ast.Constant object at 0x7da18bc71f30>, <ast.Constant object at 0x7da18bc73b80>, <ast.Constant object at 0x7da18bc724a0>, <ast.Constant object at 0x7da18bc714b0>, <ast.Constant object at 0x7da18bc70f40>, <ast.Constant object at 0x7da18bc71000>, <ast.Constant object at 0x7da18bc72320>, <ast.Constant object at 0x7da18bc70610>, <ast.Constant object at 0x7da18bc73dc0>, <ast.Constant object at 0x7da18bc73af0>, <ast.Constant object at 0x7da18bc72650>, <ast.Constant object at 0x7da18bc703d0>], [<ast.Constant object at 0x7da18bc72f20>, <ast.Attribute object at 0x7da18bc73430>, <ast.Constant object at 0x7da18bc70a90>, <ast.Attribute object at 0x7da18bc710c0>, <ast.Constant object at 0x7da18bc72fe0>, <ast.Constant object at 0x7da18bc71840>, <ast.Constant object at 0x7da18bc72470>, <ast.Name object at 0x7da18bc73940>, <ast.Constant object at 0x7da18bc732b0>, <ast.Name object at 0x7da18bc73610>, <ast.Constant object at 0x7da18bc73f70>, <ast.Constant object at 0x7da18bc72d10>, <ast.Attribute object at 0x7da18bc71150>, <ast.Constant object at 0x7da18bc73d30>, <ast.Name object at 0x7da18bc70280>]] if compare[name[self]._status in tuple[[<ast.Attribute object at 0x7da18bc708e0>, <ast.Attribute object at 0x7da18bc70b20>]]] begin[:] call[name[self]._foreman_status_wait_event.set, parameter[]] return[name[status]]
keyword[def] identifier[_GetStatus] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_analysis_mediator] : identifier[number_of_produced_event_tags] =( identifier[self] . identifier[_analysis_mediator] . identifier[number_of_produced_event_tags] ) identifier[number_of_produced_reports] =( identifier[self] . identifier[_analysis_mediator] . identifier[number_of_produced_analysis_reports] ) keyword[else] : identifier[number_of_produced_event_tags] = keyword[None] identifier[number_of_produced_reports] = keyword[None] keyword[if] identifier[self] . identifier[_process_information] : identifier[used_memory] = identifier[self] . identifier[_process_information] . identifier[GetUsedMemory] () keyword[or] literal[int] keyword[else] : identifier[used_memory] = literal[int] keyword[if] identifier[self] . identifier[_memory_profiler] : identifier[self] . identifier[_memory_profiler] . identifier[Sample] ( literal[string] , identifier[used_memory] ) identifier[status] ={ literal[string] : literal[string] , literal[string] : identifier[self] . identifier[_name] , literal[string] : keyword[None] , literal[string] : identifier[self] . identifier[_number_of_consumed_events] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : identifier[number_of_produced_event_tags] , literal[string] : keyword[None] , literal[string] : identifier[number_of_produced_reports] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : identifier[self] . identifier[_status] , literal[string] : keyword[None] , literal[string] : identifier[used_memory] } keyword[if] identifier[self] . identifier[_status] keyword[in] ( identifier[definitions] . identifier[STATUS_INDICATOR_ABORTED] , identifier[definitions] . identifier[STATUS_INDICATOR_COMPLETED] ): identifier[self] . identifier[_foreman_status_wait_event] . identifier[set] () keyword[return] identifier[status]
def _GetStatus(self): """Retrieves status information. Returns: dict[str, object]: status attributes, indexed by name. """ if self._analysis_mediator: number_of_produced_event_tags = self._analysis_mediator.number_of_produced_event_tags number_of_produced_reports = self._analysis_mediator.number_of_produced_analysis_reports # depends on [control=['if'], data=[]] else: number_of_produced_event_tags = None number_of_produced_reports = None if self._process_information: used_memory = self._process_information.GetUsedMemory() or 0 # depends on [control=['if'], data=[]] else: used_memory = 0 if self._memory_profiler: self._memory_profiler.Sample('main', used_memory) # depends on [control=['if'], data=[]] status = {'display_name': '', 'identifier': self._name, 'number_of_consumed_event_tags': None, 'number_of_consumed_events': self._number_of_consumed_events, 'number_of_consumed_reports': None, 'number_of_consumed_sources': None, 'number_of_consumed_warnings': None, 'number_of_produced_event_tags': number_of_produced_event_tags, 'number_of_produced_events': None, 'number_of_produced_reports': number_of_produced_reports, 'number_of_produced_sources': None, 'number_of_produced_warnings': None, 'processing_status': self._status, 'task_identifier': None, 'used_memory': used_memory} if self._status in (definitions.STATUS_INDICATOR_ABORTED, definitions.STATUS_INDICATOR_COMPLETED): self._foreman_status_wait_event.set() # depends on [control=['if'], data=[]] return status
def _resolve_user_group_names(opts): ''' Resolve user and group names in related opts ''' name_id_opts = {'uid': 'user.info', 'gid': 'group.info'} for ind, opt in enumerate(opts): if opt.split('=')[0] in name_id_opts: _givenid = opt.split('=')[1] _param = opt.split('=')[0] _id = _givenid if not re.match('[0-9]+$', _givenid): _info = __salt__[name_id_opts[_param]](_givenid) if _info and _param in _info: _id = _info[_param] opts[ind] = _param + '=' + six.text_type(_id) opts[ind] = opts[ind].replace('\\040', '\\ ') return opts
def function[_resolve_user_group_names, parameter[opts]]: constant[ Resolve user and group names in related opts ] variable[name_id_opts] assign[=] dictionary[[<ast.Constant object at 0x7da1b2022740>, <ast.Constant object at 0x7da1b2023400>], [<ast.Constant object at 0x7da1b2023370>, <ast.Constant object at 0x7da1b2020400>]] for taget[tuple[[<ast.Name object at 0x7da1b2021f90>, <ast.Name object at 0x7da1b2022050>]]] in starred[call[name[enumerate], parameter[name[opts]]]] begin[:] if compare[call[call[name[opt].split, parameter[constant[=]]]][constant[0]] in name[name_id_opts]] begin[:] variable[_givenid] assign[=] call[call[name[opt].split, parameter[constant[=]]]][constant[1]] variable[_param] assign[=] call[call[name[opt].split, parameter[constant[=]]]][constant[0]] variable[_id] assign[=] name[_givenid] if <ast.UnaryOp object at 0x7da1b2020580> begin[:] variable[_info] assign[=] call[call[name[__salt__]][call[name[name_id_opts]][name[_param]]], parameter[name[_givenid]]] if <ast.BoolOp object at 0x7da1b20203d0> begin[:] variable[_id] assign[=] call[name[_info]][name[_param]] call[name[opts]][name[ind]] assign[=] binary_operation[binary_operation[name[_param] + constant[=]] + call[name[six].text_type, parameter[name[_id]]]] call[name[opts]][name[ind]] assign[=] call[call[name[opts]][name[ind]].replace, parameter[constant[\040], constant[\ ]]] return[name[opts]]
keyword[def] identifier[_resolve_user_group_names] ( identifier[opts] ): literal[string] identifier[name_id_opts] ={ literal[string] : literal[string] , literal[string] : literal[string] } keyword[for] identifier[ind] , identifier[opt] keyword[in] identifier[enumerate] ( identifier[opts] ): keyword[if] identifier[opt] . identifier[split] ( literal[string] )[ literal[int] ] keyword[in] identifier[name_id_opts] : identifier[_givenid] = identifier[opt] . identifier[split] ( literal[string] )[ literal[int] ] identifier[_param] = identifier[opt] . identifier[split] ( literal[string] )[ literal[int] ] identifier[_id] = identifier[_givenid] keyword[if] keyword[not] identifier[re] . identifier[match] ( literal[string] , identifier[_givenid] ): identifier[_info] = identifier[__salt__] [ identifier[name_id_opts] [ identifier[_param] ]]( identifier[_givenid] ) keyword[if] identifier[_info] keyword[and] identifier[_param] keyword[in] identifier[_info] : identifier[_id] = identifier[_info] [ identifier[_param] ] identifier[opts] [ identifier[ind] ]= identifier[_param] + literal[string] + identifier[six] . identifier[text_type] ( identifier[_id] ) identifier[opts] [ identifier[ind] ]= identifier[opts] [ identifier[ind] ]. identifier[replace] ( literal[string] , literal[string] ) keyword[return] identifier[opts]
def _resolve_user_group_names(opts): """ Resolve user and group names in related opts """ name_id_opts = {'uid': 'user.info', 'gid': 'group.info'} for (ind, opt) in enumerate(opts): if opt.split('=')[0] in name_id_opts: _givenid = opt.split('=')[1] _param = opt.split('=')[0] _id = _givenid if not re.match('[0-9]+$', _givenid): _info = __salt__[name_id_opts[_param]](_givenid) if _info and _param in _info: _id = _info[_param] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] opts[ind] = _param + '=' + six.text_type(_id) # depends on [control=['if'], data=['name_id_opts']] opts[ind] = opts[ind].replace('\\040', '\\ ') # depends on [control=['for'], data=[]] return opts
def vanderwaals(target, pressure='pore.pressure', temperature='pore.temperature', critical_pressure='pore.critical_pressure', critical_temperature='pore.critical_temperature'): r""" Uses Van der Waals equation of state to calculate the density of a real gas Parameters ---------- target : OpenPNM Object The object for which these values are being calculated. This controls the length of the calculated array, and also provides access to other necessary thermofluid properties. pressure : string The dictionary key containing the pressure values in Pascals (Pa) temperature : string The dictionary key containing the temperature values in Kelvin (K) critical_pressure : string The dictionary key containing the critical pressure values in Pascals (Pa) critical_temperature : string The dictionary key containing the critical temperature values in Kelvin (K) Returns ------- rho, the density in [mol/m3] """ P = target[pressure]/100000 T = target[temperature] Pc = target[critical_pressure]/100000 # convert to bars Tc = target[critical_temperature] R = 83.1447 a = 27*(R**2)*(Tc**2)/(64*Pc) b = R*Tc/(8*Pc) a1 = -1/b a2 = (R*T+b*P)/(a*b) a3 = -P/(a*b) a0 = sp.ones(sp.shape(a1)) coeffs = sp.vstack((a0, a1, a2, a3)).T density = sp.array([sp.roots(C) for C in coeffs]) value = sp.real(density[:, 2])*1e6 # Convert it to mol/m3 return value
def function[vanderwaals, parameter[target, pressure, temperature, critical_pressure, critical_temperature]]: constant[ Uses Van der Waals equation of state to calculate the density of a real gas Parameters ---------- target : OpenPNM Object The object for which these values are being calculated. This controls the length of the calculated array, and also provides access to other necessary thermofluid properties. pressure : string The dictionary key containing the pressure values in Pascals (Pa) temperature : string The dictionary key containing the temperature values in Kelvin (K) critical_pressure : string The dictionary key containing the critical pressure values in Pascals (Pa) critical_temperature : string The dictionary key containing the critical temperature values in Kelvin (K) Returns ------- rho, the density in [mol/m3] ] variable[P] assign[=] binary_operation[call[name[target]][name[pressure]] / constant[100000]] variable[T] assign[=] call[name[target]][name[temperature]] variable[Pc] assign[=] binary_operation[call[name[target]][name[critical_pressure]] / constant[100000]] variable[Tc] assign[=] call[name[target]][name[critical_temperature]] variable[R] assign[=] constant[83.1447] variable[a] assign[=] binary_operation[binary_operation[binary_operation[constant[27] * binary_operation[name[R] ** constant[2]]] * binary_operation[name[Tc] ** constant[2]]] / binary_operation[constant[64] * name[Pc]]] variable[b] assign[=] binary_operation[binary_operation[name[R] * name[Tc]] / binary_operation[constant[8] * name[Pc]]] variable[a1] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b26ae4d0> / name[b]] variable[a2] assign[=] binary_operation[binary_operation[binary_operation[name[R] * name[T]] + binary_operation[name[b] * name[P]]] / binary_operation[name[a] * name[b]]] variable[a3] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b26af910> / binary_operation[name[a] * name[b]]] variable[a0] assign[=] call[name[sp].ones, parameter[call[name[sp].shape, parameter[name[a1]]]]] variable[coeffs] assign[=] call[name[sp].vstack, parameter[tuple[[<ast.Name object at 0x7da1b26afb50>, <ast.Name object at 0x7da1b26ae4a0>, <ast.Name object at 0x7da1b26ae410>, <ast.Name object at 0x7da1b26ae8c0>]]]].T variable[density] assign[=] call[name[sp].array, parameter[<ast.ListComp object at 0x7da1b26ad570>]] variable[value] assign[=] binary_operation[call[name[sp].real, parameter[call[name[density]][tuple[[<ast.Slice object at 0x7da1b26adc30>, <ast.Constant object at 0x7da1b26ad780>]]]]] * constant[1000000.0]] return[name[value]]
keyword[def] identifier[vanderwaals] ( identifier[target] , identifier[pressure] = literal[string] , identifier[temperature] = literal[string] , identifier[critical_pressure] = literal[string] , identifier[critical_temperature] = literal[string] ): literal[string] identifier[P] = identifier[target] [ identifier[pressure] ]/ literal[int] identifier[T] = identifier[target] [ identifier[temperature] ] identifier[Pc] = identifier[target] [ identifier[critical_pressure] ]/ literal[int] identifier[Tc] = identifier[target] [ identifier[critical_temperature] ] identifier[R] = literal[int] identifier[a] = literal[int] *( identifier[R] ** literal[int] )*( identifier[Tc] ** literal[int] )/( literal[int] * identifier[Pc] ) identifier[b] = identifier[R] * identifier[Tc] /( literal[int] * identifier[Pc] ) identifier[a1] =- literal[int] / identifier[b] identifier[a2] =( identifier[R] * identifier[T] + identifier[b] * identifier[P] )/( identifier[a] * identifier[b] ) identifier[a3] =- identifier[P] /( identifier[a] * identifier[b] ) identifier[a0] = identifier[sp] . identifier[ones] ( identifier[sp] . identifier[shape] ( identifier[a1] )) identifier[coeffs] = identifier[sp] . identifier[vstack] (( identifier[a0] , identifier[a1] , identifier[a2] , identifier[a3] )). identifier[T] identifier[density] = identifier[sp] . identifier[array] ([ identifier[sp] . identifier[roots] ( identifier[C] ) keyword[for] identifier[C] keyword[in] identifier[coeffs] ]) identifier[value] = identifier[sp] . identifier[real] ( identifier[density] [:, literal[int] ])* literal[int] keyword[return] identifier[value]
def vanderwaals(target, pressure='pore.pressure', temperature='pore.temperature', critical_pressure='pore.critical_pressure', critical_temperature='pore.critical_temperature'): """ Uses Van der Waals equation of state to calculate the density of a real gas Parameters ---------- target : OpenPNM Object The object for which these values are being calculated. This controls the length of the calculated array, and also provides access to other necessary thermofluid properties. pressure : string The dictionary key containing the pressure values in Pascals (Pa) temperature : string The dictionary key containing the temperature values in Kelvin (K) critical_pressure : string The dictionary key containing the critical pressure values in Pascals (Pa) critical_temperature : string The dictionary key containing the critical temperature values in Kelvin (K) Returns ------- rho, the density in [mol/m3] """ P = target[pressure] / 100000 T = target[temperature] Pc = target[critical_pressure] / 100000 # convert to bars Tc = target[critical_temperature] R = 83.1447 a = 27 * R ** 2 * Tc ** 2 / (64 * Pc) b = R * Tc / (8 * Pc) a1 = -1 / b a2 = (R * T + b * P) / (a * b) a3 = -P / (a * b) a0 = sp.ones(sp.shape(a1)) coeffs = sp.vstack((a0, a1, a2, a3)).T density = sp.array([sp.roots(C) for C in coeffs]) value = sp.real(density[:, 2]) * 1000000.0 # Convert it to mol/m3 return value
def apply(self, name, foci): """ Apply a named transformation to a set of foci. If the named transformation doesn't exist, return foci untransformed. """ if name in self.transformations: return transform(foci, self.transformations[name]) else: logger.info( "No transformation named '%s' found; coordinates left " "untransformed." % name) return foci
def function[apply, parameter[self, name, foci]]: constant[ Apply a named transformation to a set of foci. If the named transformation doesn't exist, return foci untransformed. ] if compare[name[name] in name[self].transformations] begin[:] return[call[name[transform], parameter[name[foci], call[name[self].transformations][name[name]]]]]
keyword[def] identifier[apply] ( identifier[self] , identifier[name] , identifier[foci] ): literal[string] keyword[if] identifier[name] keyword[in] identifier[self] . identifier[transformations] : keyword[return] identifier[transform] ( identifier[foci] , identifier[self] . identifier[transformations] [ identifier[name] ]) keyword[else] : identifier[logger] . identifier[info] ( literal[string] literal[string] % identifier[name] ) keyword[return] identifier[foci]
def apply(self, name, foci): """ Apply a named transformation to a set of foci. If the named transformation doesn't exist, return foci untransformed. """ if name in self.transformations: return transform(foci, self.transformations[name]) # depends on [control=['if'], data=['name']] else: logger.info("No transformation named '%s' found; coordinates left untransformed." % name) return foci
def keypress_callback(self, obj, ev): """ VTK callback for keypress events. Keypress events: * ``e``: exit the application * ``p``: pick object (hover the mouse and then press to pick) * ``f``: fly to point (click somewhere in the window and press to fly) * ``r``: reset the camera * ``s`` and ``w``: switch between solid and wireframe modes * ``b``: change background color * ``m``: change color of the picked object * ``d``: print debug information (of picked object, point, etc.) * ``h``: change object visibility * ``n``: reset object visibility * ``arrow keys``: pan the model Please refer to `vtkInteractorStyle <https://vtk.org/doc/nightly/html/classvtkInteractorStyle.html>`_ class reference for more details. :param obj: render window interactor :type obj: vtkRenderWindowInteractor :param ev: event name :type ev: str """ key = obj.GetKeySym() # pressed key (as str) render_window = obj.GetRenderWindow() # vtkRenderWindow renderer = render_window.GetRenderers().GetFirstRenderer() # vtkRenderer picker = obj.GetPicker() # vtkPropPicker actor = picker.GetActor() # vtkActor # Custom keypress events if key == 'Up': camera = renderer.GetActiveCamera() # vtkCamera camera.Pitch(2.5) if key == 'Down': camera = renderer.GetActiveCamera() # vtkCamera camera.Pitch(-2.5) if key == 'Left': camera = renderer.GetActiveCamera() # vtkCamera camera.Yaw(-2.5) if key == 'Right': camera = renderer.GetActiveCamera() # vtkCamera camera.Yaw(2.5) if key == 'b': if self._bg_id >= len(self._bg): self._bg_id = 0 renderer.SetBackground(*self._bg[self._bg_id]) self._bg_id += 1 if key == 'm': if actor is not None: actor.GetProperty().SetColor(random(), random(), random()) if key == 'd': if actor is not None: print("Name:", actor.GetMapper().GetArrayName()) print("Index:", actor.GetMapper().GetArrayId()) print("Selected point:", picker.GetSelectionPoint()[0:2]) print("# of visible actors:", renderer.VisibleActorCount()) if key == 'h': if actor is not None: actor.SetVisibility(not actor.GetVisibility()) if key == 'n': actors = renderer.GetActors() # vtkActorCollection for actor in actors: actor.VisibilityOn() # Update render window render_window.Render()
def function[keypress_callback, parameter[self, obj, ev]]: constant[ VTK callback for keypress events. Keypress events: * ``e``: exit the application * ``p``: pick object (hover the mouse and then press to pick) * ``f``: fly to point (click somewhere in the window and press to fly) * ``r``: reset the camera * ``s`` and ``w``: switch between solid and wireframe modes * ``b``: change background color * ``m``: change color of the picked object * ``d``: print debug information (of picked object, point, etc.) * ``h``: change object visibility * ``n``: reset object visibility * ``arrow keys``: pan the model Please refer to `vtkInteractorStyle <https://vtk.org/doc/nightly/html/classvtkInteractorStyle.html>`_ class reference for more details. :param obj: render window interactor :type obj: vtkRenderWindowInteractor :param ev: event name :type ev: str ] variable[key] assign[=] call[name[obj].GetKeySym, parameter[]] variable[render_window] assign[=] call[name[obj].GetRenderWindow, parameter[]] variable[renderer] assign[=] call[call[name[render_window].GetRenderers, parameter[]].GetFirstRenderer, parameter[]] variable[picker] assign[=] call[name[obj].GetPicker, parameter[]] variable[actor] assign[=] call[name[picker].GetActor, parameter[]] if compare[name[key] equal[==] constant[Up]] begin[:] variable[camera] assign[=] call[name[renderer].GetActiveCamera, parameter[]] call[name[camera].Pitch, parameter[constant[2.5]]] if compare[name[key] equal[==] constant[Down]] begin[:] variable[camera] assign[=] call[name[renderer].GetActiveCamera, parameter[]] call[name[camera].Pitch, parameter[<ast.UnaryOp object at 0x7da1b16a89a0>]] if compare[name[key] equal[==] constant[Left]] begin[:] variable[camera] assign[=] call[name[renderer].GetActiveCamera, parameter[]] call[name[camera].Yaw, parameter[<ast.UnaryOp object at 0x7da1b16a97e0>]] if compare[name[key] equal[==] constant[Right]] begin[:] variable[camera] assign[=] call[name[renderer].GetActiveCamera, parameter[]] call[name[camera].Yaw, parameter[constant[2.5]]] if compare[name[key] equal[==] constant[b]] begin[:] if compare[name[self]._bg_id greater_or_equal[>=] call[name[len], parameter[name[self]._bg]]] begin[:] name[self]._bg_id assign[=] constant[0] call[name[renderer].SetBackground, parameter[<ast.Starred object at 0x7da1b16a8610>]] <ast.AugAssign object at 0x7da1b16a86d0> if compare[name[key] equal[==] constant[m]] begin[:] if compare[name[actor] is_not constant[None]] begin[:] call[call[name[actor].GetProperty, parameter[]].SetColor, parameter[call[name[random], parameter[]], call[name[random], parameter[]], call[name[random], parameter[]]]] if compare[name[key] equal[==] constant[d]] begin[:] if compare[name[actor] is_not constant[None]] begin[:] call[name[print], parameter[constant[Name:], call[call[name[actor].GetMapper, parameter[]].GetArrayName, parameter[]]]] call[name[print], parameter[constant[Index:], call[call[name[actor].GetMapper, parameter[]].GetArrayId, parameter[]]]] call[name[print], parameter[constant[Selected point:], call[call[name[picker].GetSelectionPoint, parameter[]]][<ast.Slice object at 0x7da1b16ab130>]]] call[name[print], parameter[constant[# of visible actors:], call[name[renderer].VisibleActorCount, parameter[]]]] if compare[name[key] equal[==] constant[h]] begin[:] if compare[name[actor] is_not constant[None]] begin[:] call[name[actor].SetVisibility, parameter[<ast.UnaryOp object at 0x7da1b16ab730>]] if compare[name[key] equal[==] constant[n]] begin[:] variable[actors] assign[=] call[name[renderer].GetActors, parameter[]] for taget[name[actor]] in starred[name[actors]] begin[:] call[name[actor].VisibilityOn, parameter[]] call[name[render_window].Render, parameter[]]
keyword[def] identifier[keypress_callback] ( identifier[self] , identifier[obj] , identifier[ev] ): literal[string] identifier[key] = identifier[obj] . identifier[GetKeySym] () identifier[render_window] = identifier[obj] . identifier[GetRenderWindow] () identifier[renderer] = identifier[render_window] . identifier[GetRenderers] (). identifier[GetFirstRenderer] () identifier[picker] = identifier[obj] . identifier[GetPicker] () identifier[actor] = identifier[picker] . identifier[GetActor] () keyword[if] identifier[key] == literal[string] : identifier[camera] = identifier[renderer] . identifier[GetActiveCamera] () identifier[camera] . identifier[Pitch] ( literal[int] ) keyword[if] identifier[key] == literal[string] : identifier[camera] = identifier[renderer] . identifier[GetActiveCamera] () identifier[camera] . identifier[Pitch] (- literal[int] ) keyword[if] identifier[key] == literal[string] : identifier[camera] = identifier[renderer] . identifier[GetActiveCamera] () identifier[camera] . identifier[Yaw] (- literal[int] ) keyword[if] identifier[key] == literal[string] : identifier[camera] = identifier[renderer] . identifier[GetActiveCamera] () identifier[camera] . identifier[Yaw] ( literal[int] ) keyword[if] identifier[key] == literal[string] : keyword[if] identifier[self] . identifier[_bg_id] >= identifier[len] ( identifier[self] . identifier[_bg] ): identifier[self] . identifier[_bg_id] = literal[int] identifier[renderer] . identifier[SetBackground] (* identifier[self] . identifier[_bg] [ identifier[self] . identifier[_bg_id] ]) identifier[self] . identifier[_bg_id] += literal[int] keyword[if] identifier[key] == literal[string] : keyword[if] identifier[actor] keyword[is] keyword[not] keyword[None] : identifier[actor] . identifier[GetProperty] (). identifier[SetColor] ( identifier[random] (), identifier[random] (), identifier[random] ()) keyword[if] identifier[key] == literal[string] : keyword[if] identifier[actor] keyword[is] keyword[not] keyword[None] : identifier[print] ( literal[string] , identifier[actor] . identifier[GetMapper] (). identifier[GetArrayName] ()) identifier[print] ( literal[string] , identifier[actor] . identifier[GetMapper] (). identifier[GetArrayId] ()) identifier[print] ( literal[string] , identifier[picker] . identifier[GetSelectionPoint] ()[ literal[int] : literal[int] ]) identifier[print] ( literal[string] , identifier[renderer] . identifier[VisibleActorCount] ()) keyword[if] identifier[key] == literal[string] : keyword[if] identifier[actor] keyword[is] keyword[not] keyword[None] : identifier[actor] . identifier[SetVisibility] ( keyword[not] identifier[actor] . identifier[GetVisibility] ()) keyword[if] identifier[key] == literal[string] : identifier[actors] = identifier[renderer] . identifier[GetActors] () keyword[for] identifier[actor] keyword[in] identifier[actors] : identifier[actor] . identifier[VisibilityOn] () identifier[render_window] . identifier[Render] ()
def keypress_callback(self, obj, ev): """ VTK callback for keypress events. Keypress events: * ``e``: exit the application * ``p``: pick object (hover the mouse and then press to pick) * ``f``: fly to point (click somewhere in the window and press to fly) * ``r``: reset the camera * ``s`` and ``w``: switch between solid and wireframe modes * ``b``: change background color * ``m``: change color of the picked object * ``d``: print debug information (of picked object, point, etc.) * ``h``: change object visibility * ``n``: reset object visibility * ``arrow keys``: pan the model Please refer to `vtkInteractorStyle <https://vtk.org/doc/nightly/html/classvtkInteractorStyle.html>`_ class reference for more details. :param obj: render window interactor :type obj: vtkRenderWindowInteractor :param ev: event name :type ev: str """ key = obj.GetKeySym() # pressed key (as str) render_window = obj.GetRenderWindow() # vtkRenderWindow renderer = render_window.GetRenderers().GetFirstRenderer() # vtkRenderer picker = obj.GetPicker() # vtkPropPicker actor = picker.GetActor() # vtkActor # Custom keypress events if key == 'Up': camera = renderer.GetActiveCamera() # vtkCamera camera.Pitch(2.5) # depends on [control=['if'], data=[]] if key == 'Down': camera = renderer.GetActiveCamera() # vtkCamera camera.Pitch(-2.5) # depends on [control=['if'], data=[]] if key == 'Left': camera = renderer.GetActiveCamera() # vtkCamera camera.Yaw(-2.5) # depends on [control=['if'], data=[]] if key == 'Right': camera = renderer.GetActiveCamera() # vtkCamera camera.Yaw(2.5) # depends on [control=['if'], data=[]] if key == 'b': if self._bg_id >= len(self._bg): self._bg_id = 0 # depends on [control=['if'], data=[]] renderer.SetBackground(*self._bg[self._bg_id]) self._bg_id += 1 # depends on [control=['if'], data=[]] if key == 'm': if actor is not None: actor.GetProperty().SetColor(random(), random(), random()) # depends on [control=['if'], data=['actor']] # depends on [control=['if'], data=[]] if key == 'd': if actor is not None: print('Name:', actor.GetMapper().GetArrayName()) print('Index:', actor.GetMapper().GetArrayId()) # depends on [control=['if'], data=['actor']] print('Selected point:', picker.GetSelectionPoint()[0:2]) print('# of visible actors:', renderer.VisibleActorCount()) # depends on [control=['if'], data=[]] if key == 'h': if actor is not None: actor.SetVisibility(not actor.GetVisibility()) # depends on [control=['if'], data=['actor']] # depends on [control=['if'], data=[]] if key == 'n': actors = renderer.GetActors() # vtkActorCollection for actor in actors: actor.VisibilityOn() # depends on [control=['for'], data=['actor']] # depends on [control=['if'], data=[]] # Update render window render_window.Render()
def save(self, data, xparent=None): """ Parses the element from XML to Python. :param data | <variant> xparent | <xml.etree.ElementTree.Element> || None :return <xml.etree.ElementTree.Element> """ if xparent is not None: elem = ElementTree.SubElement(xparent, 'dict') else: elem = ElementTree.Element('dict') for key, value in sorted(data.items()): xitem = ElementTree.SubElement(elem, 'item') xitem.set('key', nstr(key)) XmlDataIO.toXml(value, xitem) return elem
def function[save, parameter[self, data, xparent]]: constant[ Parses the element from XML to Python. :param data | <variant> xparent | <xml.etree.ElementTree.Element> || None :return <xml.etree.ElementTree.Element> ] if compare[name[xparent] is_not constant[None]] begin[:] variable[elem] assign[=] call[name[ElementTree].SubElement, parameter[name[xparent], constant[dict]]] for taget[tuple[[<ast.Name object at 0x7da1b2717250>, <ast.Name object at 0x7da1b2716890>]]] in starred[call[name[sorted], parameter[call[name[data].items, parameter[]]]]] begin[:] variable[xitem] assign[=] call[name[ElementTree].SubElement, parameter[name[elem], constant[item]]] call[name[xitem].set, parameter[constant[key], call[name[nstr], parameter[name[key]]]]] call[name[XmlDataIO].toXml, parameter[name[value], name[xitem]]] return[name[elem]]
keyword[def] identifier[save] ( identifier[self] , identifier[data] , identifier[xparent] = keyword[None] ): literal[string] keyword[if] identifier[xparent] keyword[is] keyword[not] keyword[None] : identifier[elem] = identifier[ElementTree] . identifier[SubElement] ( identifier[xparent] , literal[string] ) keyword[else] : identifier[elem] = identifier[ElementTree] . identifier[Element] ( literal[string] ) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[sorted] ( identifier[data] . identifier[items] ()): identifier[xitem] = identifier[ElementTree] . identifier[SubElement] ( identifier[elem] , literal[string] ) identifier[xitem] . identifier[set] ( literal[string] , identifier[nstr] ( identifier[key] )) identifier[XmlDataIO] . identifier[toXml] ( identifier[value] , identifier[xitem] ) keyword[return] identifier[elem]
def save(self, data, xparent=None): """ Parses the element from XML to Python. :param data | <variant> xparent | <xml.etree.ElementTree.Element> || None :return <xml.etree.ElementTree.Element> """ if xparent is not None: elem = ElementTree.SubElement(xparent, 'dict') # depends on [control=['if'], data=['xparent']] else: elem = ElementTree.Element('dict') for (key, value) in sorted(data.items()): xitem = ElementTree.SubElement(elem, 'item') xitem.set('key', nstr(key)) XmlDataIO.toXml(value, xitem) # depends on [control=['for'], data=[]] return elem
def AddComment(self, comment): """Adds a comment to the event tag. Args: comment (str): comment. """ if not comment: return if not self.comment: self.comment = comment else: self.comment = ''.join([self.comment, comment])
def function[AddComment, parameter[self, comment]]: constant[Adds a comment to the event tag. Args: comment (str): comment. ] if <ast.UnaryOp object at 0x7da2046233d0> begin[:] return[None] if <ast.UnaryOp object at 0x7da204621690> begin[:] name[self].comment assign[=] name[comment]
keyword[def] identifier[AddComment] ( identifier[self] , identifier[comment] ): literal[string] keyword[if] keyword[not] identifier[comment] : keyword[return] keyword[if] keyword[not] identifier[self] . identifier[comment] : identifier[self] . identifier[comment] = identifier[comment] keyword[else] : identifier[self] . identifier[comment] = literal[string] . identifier[join] ([ identifier[self] . identifier[comment] , identifier[comment] ])
def AddComment(self, comment): """Adds a comment to the event tag. Args: comment (str): comment. """ if not comment: return # depends on [control=['if'], data=[]] if not self.comment: self.comment = comment # depends on [control=['if'], data=[]] else: self.comment = ''.join([self.comment, comment])
def allow_role(role): """Allow a role identified by an email address.""" def processor(action, argument): db.session.add( ActionRoles.allow(action, argument=argument, role_id=role.id) ) return processor
def function[allow_role, parameter[role]]: constant[Allow a role identified by an email address.] def function[processor, parameter[action, argument]]: call[name[db].session.add, parameter[call[name[ActionRoles].allow, parameter[name[action]]]]] return[name[processor]]
keyword[def] identifier[allow_role] ( identifier[role] ): literal[string] keyword[def] identifier[processor] ( identifier[action] , identifier[argument] ): identifier[db] . identifier[session] . identifier[add] ( identifier[ActionRoles] . identifier[allow] ( identifier[action] , identifier[argument] = identifier[argument] , identifier[role_id] = identifier[role] . identifier[id] ) ) keyword[return] identifier[processor]
def allow_role(role): """Allow a role identified by an email address.""" def processor(action, argument): db.session.add(ActionRoles.allow(action, argument=argument, role_id=role.id)) return processor
def parse_option(self, option, block_name, *values): """ Parse app path values for option. """ # treat arguments as part of the program name (support spaces in name) values = [x.replace(' ', '\\ ') if not x.startswith(os.sep) else x for x in [str(v) for v in values]] if option == 'close': option = 'start_' + option key = option.split('_', 1)[0] self.paths[key] = set(common.extract_app_paths(values))
def function[parse_option, parameter[self, option, block_name]]: constant[ Parse app path values for option. ] variable[values] assign[=] <ast.ListComp object at 0x7da18f723dc0> if compare[name[option] equal[==] constant[close]] begin[:] variable[option] assign[=] binary_operation[constant[start_] + name[option]] variable[key] assign[=] call[call[name[option].split, parameter[constant[_], constant[1]]]][constant[0]] call[name[self].paths][name[key]] assign[=] call[name[set], parameter[call[name[common].extract_app_paths, parameter[name[values]]]]]
keyword[def] identifier[parse_option] ( identifier[self] , identifier[option] , identifier[block_name] ,* identifier[values] ): literal[string] identifier[values] =[ identifier[x] . identifier[replace] ( literal[string] , literal[string] ) keyword[if] keyword[not] identifier[x] . identifier[startswith] ( identifier[os] . identifier[sep] ) keyword[else] identifier[x] keyword[for] identifier[x] keyword[in] [ identifier[str] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[values] ]] keyword[if] identifier[option] == literal[string] : identifier[option] = literal[string] + identifier[option] identifier[key] = identifier[option] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ] identifier[self] . identifier[paths] [ identifier[key] ]= identifier[set] ( identifier[common] . identifier[extract_app_paths] ( identifier[values] ))
def parse_option(self, option, block_name, *values): """ Parse app path values for option. """ # treat arguments as part of the program name (support spaces in name) values = [x.replace(' ', '\\ ') if not x.startswith(os.sep) else x for x in [str(v) for v in values]] if option == 'close': option = 'start_' + option # depends on [control=['if'], data=['option']] key = option.split('_', 1)[0] self.paths[key] = set(common.extract_app_paths(values))
def os_details(): """ Returns a dictionary containing details about the operating system """ # Compute architecture and linkage bits, linkage = platform.architecture() results = { # Machine details "platform.arch.bits": bits, "platform.arch.linkage": linkage, "platform.machine": platform.machine(), "platform.process": platform.processor(), "sys.byteorder": sys.byteorder, # OS details "os.name": os.name, "host.name": socket.gethostname(), "sys.platform": sys.platform, "platform.system": platform.system(), "platform.release": platform.release(), "platform.version": platform.version(), "encoding.filesystem": sys.getfilesystemencoding(), } # Paths and line separators for name in "sep", "altsep", "pathsep", "linesep": results["os.{0}".format(name)] = getattr(os, name, None) try: # Available since Python 3.4 results["os.cpu_count"] = os.cpu_count() except AttributeError: results["os.cpu_count"] = None try: # Only for Unix # pylint: disable=E1101 results["sys.dlopenflags"] = sys.getdlopenflags() except AttributeError: results["sys.dlopenflags"] = None return results
def function[os_details, parameter[]]: constant[ Returns a dictionary containing details about the operating system ] <ast.Tuple object at 0x7da1b04f7040> assign[=] call[name[platform].architecture, parameter[]] variable[results] assign[=] dictionary[[<ast.Constant object at 0x7da1b04f5c60>, <ast.Constant object at 0x7da1b04f5f30>, <ast.Constant object at 0x7da1b04f6b60>, <ast.Constant object at 0x7da1b04f5f60>, <ast.Constant object at 0x7da1b04f6230>, <ast.Constant object at 0x7da1b04f6410>, <ast.Constant object at 0x7da1b04f4dc0>, <ast.Constant object at 0x7da1b04f4100>, <ast.Constant object at 0x7da1b04f4910>, <ast.Constant object at 0x7da1b04f5060>, <ast.Constant object at 0x7da1b04f51e0>, <ast.Constant object at 0x7da1b04f5d20>], [<ast.Name object at 0x7da1b04f6590>, <ast.Name object at 0x7da1b04f5030>, <ast.Call object at 0x7da1b04f4b80>, <ast.Call object at 0x7da1b04f51b0>, <ast.Attribute object at 0x7da1b04f6f20>, <ast.Attribute object at 0x7da1b04f64d0>, <ast.Call object at 0x7da1b04f59c0>, <ast.Attribute object at 0x7da1b04f4d60>, <ast.Call object at 0x7da1b04f5810>, <ast.Call object at 0x7da1b04f6830>, <ast.Call object at 0x7da1b04f6110>, <ast.Call object at 0x7da1b04f72b0>]] for taget[name[name]] in starred[tuple[[<ast.Constant object at 0x7da1b04f7430>, <ast.Constant object at 0x7da1b04f5630>, <ast.Constant object at 0x7da1b04f6e30>, <ast.Constant object at 0x7da1b04f4250>]]] begin[:] call[name[results]][call[constant[os.{0}].format, parameter[name[name]]]] assign[=] call[name[getattr], parameter[name[os], name[name], constant[None]]] <ast.Try object at 0x7da1b034a110> <ast.Try object at 0x7da1b0349120> return[name[results]]
keyword[def] identifier[os_details] (): literal[string] identifier[bits] , identifier[linkage] = identifier[platform] . identifier[architecture] () identifier[results] ={ literal[string] : identifier[bits] , literal[string] : identifier[linkage] , literal[string] : identifier[platform] . identifier[machine] (), literal[string] : identifier[platform] . identifier[processor] (), literal[string] : identifier[sys] . identifier[byteorder] , literal[string] : identifier[os] . identifier[name] , literal[string] : identifier[socket] . identifier[gethostname] (), literal[string] : identifier[sys] . identifier[platform] , literal[string] : identifier[platform] . identifier[system] (), literal[string] : identifier[platform] . identifier[release] (), literal[string] : identifier[platform] . identifier[version] (), literal[string] : identifier[sys] . identifier[getfilesystemencoding] (), } keyword[for] identifier[name] keyword[in] literal[string] , literal[string] , literal[string] , literal[string] : identifier[results] [ literal[string] . identifier[format] ( identifier[name] )]= identifier[getattr] ( identifier[os] , identifier[name] , keyword[None] ) keyword[try] : identifier[results] [ literal[string] ]= identifier[os] . identifier[cpu_count] () keyword[except] identifier[AttributeError] : identifier[results] [ literal[string] ]= keyword[None] keyword[try] : identifier[results] [ literal[string] ]= identifier[sys] . identifier[getdlopenflags] () keyword[except] identifier[AttributeError] : identifier[results] [ literal[string] ]= keyword[None] keyword[return] identifier[results]
def os_details(): """ Returns a dictionary containing details about the operating system """ # Compute architecture and linkage (bits, linkage) = platform.architecture() # Machine details # OS details results = {'platform.arch.bits': bits, 'platform.arch.linkage': linkage, 'platform.machine': platform.machine(), 'platform.process': platform.processor(), 'sys.byteorder': sys.byteorder, 'os.name': os.name, 'host.name': socket.gethostname(), 'sys.platform': sys.platform, 'platform.system': platform.system(), 'platform.release': platform.release(), 'platform.version': platform.version(), 'encoding.filesystem': sys.getfilesystemencoding()} # Paths and line separators for name in ('sep', 'altsep', 'pathsep', 'linesep'): results['os.{0}'.format(name)] = getattr(os, name, None) # depends on [control=['for'], data=['name']] try: # Available since Python 3.4 results['os.cpu_count'] = os.cpu_count() # depends on [control=['try'], data=[]] except AttributeError: results['os.cpu_count'] = None # depends on [control=['except'], data=[]] try: # Only for Unix # pylint: disable=E1101 results['sys.dlopenflags'] = sys.getdlopenflags() # depends on [control=['try'], data=[]] except AttributeError: results['sys.dlopenflags'] = None # depends on [control=['except'], data=[]] return results
def get_median_mag(self, area, rake): """ Return magnitude (Mw) given the area and rake. Setting the rake to ``None`` causes their "All" rupture-types to be applied. :param area: Area in square km. :param rake: Rake angle (the rupture propagation direction) in degrees, from -180 to 180. """ assert rake is None or -180 <= rake <= 180 if rake is None: # their "All" case return 4.07 + 0.98 * log10(area) elif (-45 <= rake <= 45) or (rake > 135) or (rake < -135): # strike slip return 3.98 + 1.02 * log10(area) elif rake > 0: # thrust/reverse return 4.33 + 0.90 * log10(area) else: # normal return 3.93 + 1.02 * log10(area)
def function[get_median_mag, parameter[self, area, rake]]: constant[ Return magnitude (Mw) given the area and rake. Setting the rake to ``None`` causes their "All" rupture-types to be applied. :param area: Area in square km. :param rake: Rake angle (the rupture propagation direction) in degrees, from -180 to 180. ] assert[<ast.BoolOp object at 0x7da18ede4a30>] if compare[name[rake] is constant[None]] begin[:] return[binary_operation[constant[4.07] + binary_operation[constant[0.98] * call[name[log10], parameter[name[area]]]]]]
keyword[def] identifier[get_median_mag] ( identifier[self] , identifier[area] , identifier[rake] ): literal[string] keyword[assert] identifier[rake] keyword[is] keyword[None] keyword[or] - literal[int] <= identifier[rake] <= literal[int] keyword[if] identifier[rake] keyword[is] keyword[None] : keyword[return] literal[int] + literal[int] * identifier[log10] ( identifier[area] ) keyword[elif] (- literal[int] <= identifier[rake] <= literal[int] ) keyword[or] ( identifier[rake] > literal[int] ) keyword[or] ( identifier[rake] <- literal[int] ): keyword[return] literal[int] + literal[int] * identifier[log10] ( identifier[area] ) keyword[elif] identifier[rake] > literal[int] : keyword[return] literal[int] + literal[int] * identifier[log10] ( identifier[area] ) keyword[else] : keyword[return] literal[int] + literal[int] * identifier[log10] ( identifier[area] )
def get_median_mag(self, area, rake): """ Return magnitude (Mw) given the area and rake. Setting the rake to ``None`` causes their "All" rupture-types to be applied. :param area: Area in square km. :param rake: Rake angle (the rupture propagation direction) in degrees, from -180 to 180. """ assert rake is None or -180 <= rake <= 180 if rake is None: # their "All" case return 4.07 + 0.98 * log10(area) # depends on [control=['if'], data=[]] elif -45 <= rake <= 45 or rake > 135 or rake < -135: # strike slip return 3.98 + 1.02 * log10(area) # depends on [control=['if'], data=[]] elif rake > 0: # thrust/reverse return 4.33 + 0.9 * log10(area) # depends on [control=['if'], data=[]] else: # normal return 3.93 + 1.02 * log10(area)
def _create_pax_generic_header(cls, pax_headers, type=tarfile.XHDTYPE): """Return a POSIX.1-2001 extended or global header sequence that contains a list of keyword, value pairs. The values must be unicode objects. """ records = [] for keyword, value in pax_headers.iteritems(): try: keyword = keyword.encode("utf8") except Exception: pass try: value = value.encode("utf8") except Exception: pass l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' n = p = 0 while True: n = l + len(str(p)) if n == p: break p = n records.append("%d %s=%s\n" % (p, keyword, value)) records = "".join(records) # We use a hardcoded "././@PaxHeader" name like star does # instead of the one that POSIX recommends. info = {} info["name"] = "././@PaxHeader" info["type"] = type info["size"] = len(records) info["magic"] = tarfile.POSIX_MAGIC # Create pax header + record blocks. return cls._create_header(info, tarfile.USTAR_FORMAT) + \ cls._create_payload(records)
def function[_create_pax_generic_header, parameter[cls, pax_headers, type]]: constant[Return a POSIX.1-2001 extended or global header sequence that contains a list of keyword, value pairs. The values must be unicode objects. ] variable[records] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b12bd2a0>, <ast.Name object at 0x7da1b12bf1f0>]]] in starred[call[name[pax_headers].iteritems, parameter[]]] begin[:] <ast.Try object at 0x7da1b12bca00> <ast.Try object at 0x7da1b12bd870> variable[l] assign[=] binary_operation[binary_operation[call[name[len], parameter[name[keyword]]] + call[name[len], parameter[name[value]]]] + constant[3]] variable[n] assign[=] constant[0] while constant[True] begin[:] variable[n] assign[=] binary_operation[name[l] + call[name[len], parameter[call[name[str], parameter[name[p]]]]]] if compare[name[n] equal[==] name[p]] begin[:] break variable[p] assign[=] name[n] call[name[records].append, parameter[binary_operation[constant[%d %s=%s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b12bd5d0>, <ast.Name object at 0x7da1b12bd7e0>, <ast.Name object at 0x7da1b12bc850>]]]]] variable[records] assign[=] call[constant[].join, parameter[name[records]]] variable[info] assign[=] dictionary[[], []] call[name[info]][constant[name]] assign[=] constant[././@PaxHeader] call[name[info]][constant[type]] assign[=] name[type] call[name[info]][constant[size]] assign[=] call[name[len], parameter[name[records]]] call[name[info]][constant[magic]] assign[=] name[tarfile].POSIX_MAGIC return[binary_operation[call[name[cls]._create_header, parameter[name[info], name[tarfile].USTAR_FORMAT]] + call[name[cls]._create_payload, parameter[name[records]]]]]
keyword[def] identifier[_create_pax_generic_header] ( identifier[cls] , identifier[pax_headers] , identifier[type] = identifier[tarfile] . identifier[XHDTYPE] ): literal[string] identifier[records] =[] keyword[for] identifier[keyword] , identifier[value] keyword[in] identifier[pax_headers] . identifier[iteritems] (): keyword[try] : identifier[keyword] = identifier[keyword] . identifier[encode] ( literal[string] ) keyword[except] identifier[Exception] : keyword[pass] keyword[try] : identifier[value] = identifier[value] . identifier[encode] ( literal[string] ) keyword[except] identifier[Exception] : keyword[pass] identifier[l] = identifier[len] ( identifier[keyword] )+ identifier[len] ( identifier[value] )+ literal[int] identifier[n] = identifier[p] = literal[int] keyword[while] keyword[True] : identifier[n] = identifier[l] + identifier[len] ( identifier[str] ( identifier[p] )) keyword[if] identifier[n] == identifier[p] : keyword[break] identifier[p] = identifier[n] identifier[records] . identifier[append] ( literal[string] %( identifier[p] , identifier[keyword] , identifier[value] )) identifier[records] = literal[string] . identifier[join] ( identifier[records] ) identifier[info] ={} identifier[info] [ literal[string] ]= literal[string] identifier[info] [ literal[string] ]= identifier[type] identifier[info] [ literal[string] ]= identifier[len] ( identifier[records] ) identifier[info] [ literal[string] ]= identifier[tarfile] . identifier[POSIX_MAGIC] keyword[return] identifier[cls] . identifier[_create_header] ( identifier[info] , identifier[tarfile] . identifier[USTAR_FORMAT] )+ identifier[cls] . identifier[_create_payload] ( identifier[records] )
def _create_pax_generic_header(cls, pax_headers, type=tarfile.XHDTYPE): """Return a POSIX.1-2001 extended or global header sequence that contains a list of keyword, value pairs. The values must be unicode objects. """ records = [] for (keyword, value) in pax_headers.iteritems(): try: keyword = keyword.encode('utf8') # depends on [control=['try'], data=[]] except Exception: pass # depends on [control=['except'], data=[]] try: value = value.encode('utf8') # depends on [control=['try'], data=[]] except Exception: pass # depends on [control=['except'], data=[]] l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' n = p = 0 while True: n = l + len(str(p)) if n == p: break # depends on [control=['if'], data=[]] p = n # depends on [control=['while'], data=[]] records.append('%d %s=%s\n' % (p, keyword, value)) # depends on [control=['for'], data=[]] records = ''.join(records) # We use a hardcoded "././@PaxHeader" name like star does # instead of the one that POSIX recommends. info = {} info['name'] = '././@PaxHeader' info['type'] = type info['size'] = len(records) info['magic'] = tarfile.POSIX_MAGIC # Create pax header + record blocks. return cls._create_header(info, tarfile.USTAR_FORMAT) + cls._create_payload(records)
def run_repair_pdb(self, silent=False, force_rerun=False): """Run FoldX RepairPDB on this PDB file. Original command:: foldx --command=RepairPDB --pdb=4bxi.pdb Args: silent (bool): If FoldX output should be silenced from printing to the shell. force_rerun (bool): If FoldX RepairPDB should be rerun even if a repaired file exists. """ # Create RepairPDB command foldx_repair_pdb = 'foldx --command=RepairPDB --pdb={}'.format(self.pdb_file) # Repaired PDB output file name foldx_repair_outfile = '{}_Repair.pdb'.format(op.splitext(self.pdb_file)[0]) # Run RepairPDB ssbio.utils.command_runner(shell_command=foldx_repair_pdb, force_rerun_flag=force_rerun, silent=silent, outfile_checker=foldx_repair_outfile, cwd=self.foldx_dir) # TODO: write stdout/stderr to log file somewhere! self.repaired_pdb_outfile = foldx_repair_outfile
def function[run_repair_pdb, parameter[self, silent, force_rerun]]: constant[Run FoldX RepairPDB on this PDB file. Original command:: foldx --command=RepairPDB --pdb=4bxi.pdb Args: silent (bool): If FoldX output should be silenced from printing to the shell. force_rerun (bool): If FoldX RepairPDB should be rerun even if a repaired file exists. ] variable[foldx_repair_pdb] assign[=] call[constant[foldx --command=RepairPDB --pdb={}].format, parameter[name[self].pdb_file]] variable[foldx_repair_outfile] assign[=] call[constant[{}_Repair.pdb].format, parameter[call[call[name[op].splitext, parameter[name[self].pdb_file]]][constant[0]]]] call[name[ssbio].utils.command_runner, parameter[]] name[self].repaired_pdb_outfile assign[=] name[foldx_repair_outfile]
keyword[def] identifier[run_repair_pdb] ( identifier[self] , identifier[silent] = keyword[False] , identifier[force_rerun] = keyword[False] ): literal[string] identifier[foldx_repair_pdb] = literal[string] . identifier[format] ( identifier[self] . identifier[pdb_file] ) identifier[foldx_repair_outfile] = literal[string] . identifier[format] ( identifier[op] . identifier[splitext] ( identifier[self] . identifier[pdb_file] )[ literal[int] ]) identifier[ssbio] . identifier[utils] . identifier[command_runner] ( identifier[shell_command] = identifier[foldx_repair_pdb] , identifier[force_rerun_flag] = identifier[force_rerun] , identifier[silent] = identifier[silent] , identifier[outfile_checker] = identifier[foldx_repair_outfile] , identifier[cwd] = identifier[self] . identifier[foldx_dir] ) identifier[self] . identifier[repaired_pdb_outfile] = identifier[foldx_repair_outfile]
def run_repair_pdb(self, silent=False, force_rerun=False): """Run FoldX RepairPDB on this PDB file. Original command:: foldx --command=RepairPDB --pdb=4bxi.pdb Args: silent (bool): If FoldX output should be silenced from printing to the shell. force_rerun (bool): If FoldX RepairPDB should be rerun even if a repaired file exists. """ # Create RepairPDB command foldx_repair_pdb = 'foldx --command=RepairPDB --pdb={}'.format(self.pdb_file) # Repaired PDB output file name foldx_repair_outfile = '{}_Repair.pdb'.format(op.splitext(self.pdb_file)[0]) # Run RepairPDB ssbio.utils.command_runner(shell_command=foldx_repair_pdb, force_rerun_flag=force_rerun, silent=silent, outfile_checker=foldx_repair_outfile, cwd=self.foldx_dir) # TODO: write stdout/stderr to log file somewhere! self.repaired_pdb_outfile = foldx_repair_outfile
def _check_item_type(item, field_name, allowed_types, expect_list=False, required_channels='all'): """ Check the item's type against a set of allowed types. Vary the print message regarding whether the item can be None. Helper to `BaseRecord.check_field`. Parameters ---------- item : any The item to check. field_name : str The field name. allowed_types : iterable Iterable of types the item is allowed to be. expect_list : bool, optional Whether the item is expected to be a list. required_channels : list, optional List of integers specifying which channels of the item must be present. May be set to 'all' to indicate all channels. Only used if `expect_list` is True, ie. item is a list, and its subelements are to be checked. Notes ----- This is called by `check_field`, which determines whether the item should be a list or not. This function should generally not be called by the user directly. """ if expect_list: if not isinstance(item, list): raise TypeError('Field `%s` must be a list.' % field_name) # All channels of the field must be present. if required_channels == 'all': required_channels = list(range(len(item))) for ch in range(len(item)): # Check whether the field may be None if ch in required_channels: allowed_types_ch = allowed_types else: allowed_types_ch = allowed_types + (type(None),) if not isinstance(item[ch], allowed_types_ch): raise TypeError('Channel %d of field `%s` must be one of the following types:' % (ch, field_name), allowed_types_ch) else: if not isinstance(item, allowed_types): raise TypeError('Field `%s` must be one of the following types:', allowed_types)
def function[_check_item_type, parameter[item, field_name, allowed_types, expect_list, required_channels]]: constant[ Check the item's type against a set of allowed types. Vary the print message regarding whether the item can be None. Helper to `BaseRecord.check_field`. Parameters ---------- item : any The item to check. field_name : str The field name. allowed_types : iterable Iterable of types the item is allowed to be. expect_list : bool, optional Whether the item is expected to be a list. required_channels : list, optional List of integers specifying which channels of the item must be present. May be set to 'all' to indicate all channels. Only used if `expect_list` is True, ie. item is a list, and its subelements are to be checked. Notes ----- This is called by `check_field`, which determines whether the item should be a list or not. This function should generally not be called by the user directly. ] if name[expect_list] begin[:] if <ast.UnaryOp object at 0x7da1b19ef610> begin[:] <ast.Raise object at 0x7da1b19eef50> if compare[name[required_channels] equal[==] constant[all]] begin[:] variable[required_channels] assign[=] call[name[list], parameter[call[name[range], parameter[call[name[len], parameter[name[item]]]]]]] for taget[name[ch]] in starred[call[name[range], parameter[call[name[len], parameter[name[item]]]]]] begin[:] if compare[name[ch] in name[required_channels]] begin[:] variable[allowed_types_ch] assign[=] name[allowed_types] if <ast.UnaryOp object at 0x7da1b19ed810> begin[:] <ast.Raise object at 0x7da1b19eceb0>
keyword[def] identifier[_check_item_type] ( identifier[item] , identifier[field_name] , identifier[allowed_types] , identifier[expect_list] = keyword[False] , identifier[required_channels] = literal[string] ): literal[string] keyword[if] identifier[expect_list] : keyword[if] keyword[not] identifier[isinstance] ( identifier[item] , identifier[list] ): keyword[raise] identifier[TypeError] ( literal[string] % identifier[field_name] ) keyword[if] identifier[required_channels] == literal[string] : identifier[required_channels] = identifier[list] ( identifier[range] ( identifier[len] ( identifier[item] ))) keyword[for] identifier[ch] keyword[in] identifier[range] ( identifier[len] ( identifier[item] )): keyword[if] identifier[ch] keyword[in] identifier[required_channels] : identifier[allowed_types_ch] = identifier[allowed_types] keyword[else] : identifier[allowed_types_ch] = identifier[allowed_types] +( identifier[type] ( keyword[None] ),) keyword[if] keyword[not] identifier[isinstance] ( identifier[item] [ identifier[ch] ], identifier[allowed_types_ch] ): keyword[raise] identifier[TypeError] ( literal[string] %( identifier[ch] , identifier[field_name] ), identifier[allowed_types_ch] ) keyword[else] : keyword[if] keyword[not] identifier[isinstance] ( identifier[item] , identifier[allowed_types] ): keyword[raise] identifier[TypeError] ( literal[string] , identifier[allowed_types] )
def _check_item_type(item, field_name, allowed_types, expect_list=False, required_channels='all'): """ Check the item's type against a set of allowed types. Vary the print message regarding whether the item can be None. Helper to `BaseRecord.check_field`. Parameters ---------- item : any The item to check. field_name : str The field name. allowed_types : iterable Iterable of types the item is allowed to be. expect_list : bool, optional Whether the item is expected to be a list. required_channels : list, optional List of integers specifying which channels of the item must be present. May be set to 'all' to indicate all channels. Only used if `expect_list` is True, ie. item is a list, and its subelements are to be checked. Notes ----- This is called by `check_field`, which determines whether the item should be a list or not. This function should generally not be called by the user directly. """ if expect_list: if not isinstance(item, list): raise TypeError('Field `%s` must be a list.' % field_name) # depends on [control=['if'], data=[]] # All channels of the field must be present. if required_channels == 'all': required_channels = list(range(len(item))) # depends on [control=['if'], data=['required_channels']] for ch in range(len(item)): # Check whether the field may be None if ch in required_channels: allowed_types_ch = allowed_types # depends on [control=['if'], data=[]] else: allowed_types_ch = allowed_types + (type(None),) if not isinstance(item[ch], allowed_types_ch): raise TypeError('Channel %d of field `%s` must be one of the following types:' % (ch, field_name), allowed_types_ch) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ch']] # depends on [control=['if'], data=[]] elif not isinstance(item, allowed_types): raise TypeError('Field `%s` must be one of the following types:', allowed_types) # depends on [control=['if'], data=[]]
def zinger(rest): "ZING!" name = 'you' if rest: name = rest.strip() karma.Karma.store.change(name, -1) return "OH MAN!!! %s TOTALLY GOT ZING'D!" % (name.upper())
def function[zinger, parameter[rest]]: constant[ZING!] variable[name] assign[=] constant[you] if name[rest] begin[:] variable[name] assign[=] call[name[rest].strip, parameter[]] call[name[karma].Karma.store.change, parameter[name[name], <ast.UnaryOp object at 0x7da1b039a890>]] return[binary_operation[constant[OH MAN!!! %s TOTALLY GOT ZING'D!] <ast.Mod object at 0x7da2590d6920> call[name[name].upper, parameter[]]]]
keyword[def] identifier[zinger] ( identifier[rest] ): literal[string] identifier[name] = literal[string] keyword[if] identifier[rest] : identifier[name] = identifier[rest] . identifier[strip] () identifier[karma] . identifier[Karma] . identifier[store] . identifier[change] ( identifier[name] ,- literal[int] ) keyword[return] literal[string] %( identifier[name] . identifier[upper] ())
def zinger(rest): """ZING!""" name = 'you' if rest: name = rest.strip() karma.Karma.store.change(name, -1) # depends on [control=['if'], data=[]] return "OH MAN!!! %s TOTALLY GOT ZING'D!" % name.upper()
def dump(self, C_out, scale_out=None, stream=None, fmt='lha', skip_redundant=True): """Return a string representation of the parameters and Wilson coefficients `C_out` in DSixTools output format. If `stream` is specified, export it to a file. `fmt` defaults to `lha` (the SLHA-like DSixTools format), but can also be `json` or `yaml` (see the pylha documentation).""" C = OrderedDict() if scale_out is not None: C['SCALES'] = {'values': [[1, self.scale_high], [2, scale_out]]} else: C['SCALES'] = {'values': [[1, self.scale_high]]} sm = io.sm_dict2lha(C_out)['BLOCK'] C.update(sm) wc = io.wc_dict2lha(C_out, skip_redundant=skip_redundant)['BLOCK'] C.update(wc) return pylha.dump({'BLOCK': C}, fmt=fmt, stream=stream)
def function[dump, parameter[self, C_out, scale_out, stream, fmt, skip_redundant]]: constant[Return a string representation of the parameters and Wilson coefficients `C_out` in DSixTools output format. If `stream` is specified, export it to a file. `fmt` defaults to `lha` (the SLHA-like DSixTools format), but can also be `json` or `yaml` (see the pylha documentation).] variable[C] assign[=] call[name[OrderedDict], parameter[]] if compare[name[scale_out] is_not constant[None]] begin[:] call[name[C]][constant[SCALES]] assign[=] dictionary[[<ast.Constant object at 0x7da1b26ac4f0>], [<ast.List object at 0x7da1b26afd60>]] variable[sm] assign[=] call[call[name[io].sm_dict2lha, parameter[name[C_out]]]][constant[BLOCK]] call[name[C].update, parameter[name[sm]]] variable[wc] assign[=] call[call[name[io].wc_dict2lha, parameter[name[C_out]]]][constant[BLOCK]] call[name[C].update, parameter[name[wc]]] return[call[name[pylha].dump, parameter[dictionary[[<ast.Constant object at 0x7da1b26adea0>], [<ast.Name object at 0x7da1b26af040>]]]]]
keyword[def] identifier[dump] ( identifier[self] , identifier[C_out] , identifier[scale_out] = keyword[None] , identifier[stream] = keyword[None] , identifier[fmt] = literal[string] , identifier[skip_redundant] = keyword[True] ): literal[string] identifier[C] = identifier[OrderedDict] () keyword[if] identifier[scale_out] keyword[is] keyword[not] keyword[None] : identifier[C] [ literal[string] ]={ literal[string] :[[ literal[int] , identifier[self] . identifier[scale_high] ],[ literal[int] , identifier[scale_out] ]]} keyword[else] : identifier[C] [ literal[string] ]={ literal[string] :[[ literal[int] , identifier[self] . identifier[scale_high] ]]} identifier[sm] = identifier[io] . identifier[sm_dict2lha] ( identifier[C_out] )[ literal[string] ] identifier[C] . identifier[update] ( identifier[sm] ) identifier[wc] = identifier[io] . identifier[wc_dict2lha] ( identifier[C_out] , identifier[skip_redundant] = identifier[skip_redundant] )[ literal[string] ] identifier[C] . identifier[update] ( identifier[wc] ) keyword[return] identifier[pylha] . identifier[dump] ({ literal[string] : identifier[C] }, identifier[fmt] = identifier[fmt] , identifier[stream] = identifier[stream] )
def dump(self, C_out, scale_out=None, stream=None, fmt='lha', skip_redundant=True): """Return a string representation of the parameters and Wilson coefficients `C_out` in DSixTools output format. If `stream` is specified, export it to a file. `fmt` defaults to `lha` (the SLHA-like DSixTools format), but can also be `json` or `yaml` (see the pylha documentation).""" C = OrderedDict() if scale_out is not None: C['SCALES'] = {'values': [[1, self.scale_high], [2, scale_out]]} # depends on [control=['if'], data=['scale_out']] else: C['SCALES'] = {'values': [[1, self.scale_high]]} sm = io.sm_dict2lha(C_out)['BLOCK'] C.update(sm) wc = io.wc_dict2lha(C_out, skip_redundant=skip_redundant)['BLOCK'] C.update(wc) return pylha.dump({'BLOCK': C}, fmt=fmt, stream=stream)
def to_array(self): """ Serializes this InlineQueryResultPhoto to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(InlineQueryResultPhoto, self).to_array() array['type'] = u(self.type) # py2: type unicode, py3: type str array['id'] = u(self.id) # py2: type unicode, py3: type str array['photo_url'] = u(self.photo_url) # py2: type unicode, py3: type str array['thumb_url'] = u(self.thumb_url) # py2: type unicode, py3: type str if self.photo_width is not None: array['photo_width'] = int(self.photo_width) # type int if self.photo_height is not None: array['photo_height'] = int(self.photo_height) # type int if self.title is not None: array['title'] = u(self.title) # py2: type unicode, py3: type str if self.description is not None: array['description'] = u(self.description) # py2: type unicode, py3: type str if self.caption is not None: array['caption'] = u(self.caption) # py2: type unicode, py3: type str if self.parse_mode is not None: array['parse_mode'] = u(self.parse_mode) # py2: type unicode, py3: type str if self.reply_markup is not None: array['reply_markup'] = self.reply_markup.to_array() # type InlineKeyboardMarkup if self.input_message_content is not None: array['input_message_content'] = self.input_message_content.to_array() # type InputMessageContent return array
def function[to_array, parameter[self]]: constant[ Serializes this InlineQueryResultPhoto to a dictionary. :return: dictionary representation of this object. :rtype: dict ] variable[array] assign[=] call[call[name[super], parameter[name[InlineQueryResultPhoto], name[self]]].to_array, parameter[]] call[name[array]][constant[type]] assign[=] call[name[u], parameter[name[self].type]] call[name[array]][constant[id]] assign[=] call[name[u], parameter[name[self].id]] call[name[array]][constant[photo_url]] assign[=] call[name[u], parameter[name[self].photo_url]] call[name[array]][constant[thumb_url]] assign[=] call[name[u], parameter[name[self].thumb_url]] if compare[name[self].photo_width is_not constant[None]] begin[:] call[name[array]][constant[photo_width]] assign[=] call[name[int], parameter[name[self].photo_width]] if compare[name[self].photo_height is_not constant[None]] begin[:] call[name[array]][constant[photo_height]] assign[=] call[name[int], parameter[name[self].photo_height]] if compare[name[self].title is_not constant[None]] begin[:] call[name[array]][constant[title]] assign[=] call[name[u], parameter[name[self].title]] if compare[name[self].description is_not constant[None]] begin[:] call[name[array]][constant[description]] assign[=] call[name[u], parameter[name[self].description]] if compare[name[self].caption is_not constant[None]] begin[:] call[name[array]][constant[caption]] assign[=] call[name[u], parameter[name[self].caption]] if compare[name[self].parse_mode is_not constant[None]] begin[:] call[name[array]][constant[parse_mode]] assign[=] call[name[u], parameter[name[self].parse_mode]] if compare[name[self].reply_markup is_not constant[None]] begin[:] call[name[array]][constant[reply_markup]] assign[=] call[name[self].reply_markup.to_array, parameter[]] if compare[name[self].input_message_content is_not constant[None]] begin[:] call[name[array]][constant[input_message_content]] assign[=] call[name[self].input_message_content.to_array, parameter[]] return[name[array]]
keyword[def] identifier[to_array] ( identifier[self] ): literal[string] identifier[array] = identifier[super] ( identifier[InlineQueryResultPhoto] , identifier[self] ). identifier[to_array] () identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[type] ) identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[id] ) identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[photo_url] ) identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[thumb_url] ) keyword[if] identifier[self] . identifier[photo_width] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[int] ( identifier[self] . identifier[photo_width] ) keyword[if] identifier[self] . identifier[photo_height] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[int] ( identifier[self] . identifier[photo_height] ) keyword[if] identifier[self] . identifier[title] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[title] ) keyword[if] identifier[self] . identifier[description] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[description] ) keyword[if] identifier[self] . identifier[caption] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[caption] ) keyword[if] identifier[self] . identifier[parse_mode] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[parse_mode] ) keyword[if] identifier[self] . identifier[reply_markup] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[self] . identifier[reply_markup] . identifier[to_array] () keyword[if] identifier[self] . identifier[input_message_content] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[self] . identifier[input_message_content] . identifier[to_array] () keyword[return] identifier[array]
def to_array(self): """ Serializes this InlineQueryResultPhoto to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(InlineQueryResultPhoto, self).to_array() array['type'] = u(self.type) # py2: type unicode, py3: type str array['id'] = u(self.id) # py2: type unicode, py3: type str array['photo_url'] = u(self.photo_url) # py2: type unicode, py3: type str array['thumb_url'] = u(self.thumb_url) # py2: type unicode, py3: type str if self.photo_width is not None: array['photo_width'] = int(self.photo_width) # type int # depends on [control=['if'], data=[]] if self.photo_height is not None: array['photo_height'] = int(self.photo_height) # type int # depends on [control=['if'], data=[]] if self.title is not None: array['title'] = u(self.title) # py2: type unicode, py3: type str # depends on [control=['if'], data=[]] if self.description is not None: array['description'] = u(self.description) # py2: type unicode, py3: type str # depends on [control=['if'], data=[]] if self.caption is not None: array['caption'] = u(self.caption) # py2: type unicode, py3: type str # depends on [control=['if'], data=[]] if self.parse_mode is not None: array['parse_mode'] = u(self.parse_mode) # py2: type unicode, py3: type str # depends on [control=['if'], data=[]] if self.reply_markup is not None: array['reply_markup'] = self.reply_markup.to_array() # type InlineKeyboardMarkup # depends on [control=['if'], data=[]] if self.input_message_content is not None: array['input_message_content'] = self.input_message_content.to_array() # type InputMessageContent # depends on [control=['if'], data=[]] return array
def _reshape_irregular_array(self, array, section_height, image_width): '''Reshapes arrays of ranks not in {1, 2, 4} ''' section_area = section_height * image_width flattened_array = np.ravel(array) if not self.config['show_all']: flattened_array = flattened_array[:int(section_area/MIN_SQUARE_SIZE)] cell_count = np.prod(flattened_array.shape) cell_area = section_area / cell_count cell_side_length = max(1, floor(sqrt(cell_area))) row_count = max(1, int(section_height / cell_side_length)) col_count = int(cell_count / row_count) # Reshape the truncated array so that it has the same aspect ratio as # the section. # Truncate whatever remaining values there are that don't fit. Hopefully # it doesn't matter that the last few (< section count) aren't there. section = np.reshape(flattened_array[:row_count * col_count], (row_count, col_count)) return section
def function[_reshape_irregular_array, parameter[self, array, section_height, image_width]]: constant[Reshapes arrays of ranks not in {1, 2, 4} ] variable[section_area] assign[=] binary_operation[name[section_height] * name[image_width]] variable[flattened_array] assign[=] call[name[np].ravel, parameter[name[array]]] if <ast.UnaryOp object at 0x7da1b1f98eb0> begin[:] variable[flattened_array] assign[=] call[name[flattened_array]][<ast.Slice object at 0x7da1b1f9b310>] variable[cell_count] assign[=] call[name[np].prod, parameter[name[flattened_array].shape]] variable[cell_area] assign[=] binary_operation[name[section_area] / name[cell_count]] variable[cell_side_length] assign[=] call[name[max], parameter[constant[1], call[name[floor], parameter[call[name[sqrt], parameter[name[cell_area]]]]]]] variable[row_count] assign[=] call[name[max], parameter[constant[1], call[name[int], parameter[binary_operation[name[section_height] / name[cell_side_length]]]]]] variable[col_count] assign[=] call[name[int], parameter[binary_operation[name[cell_count] / name[row_count]]]] variable[section] assign[=] call[name[np].reshape, parameter[call[name[flattened_array]][<ast.Slice object at 0x7da1b21eafe0>], tuple[[<ast.Name object at 0x7da1b21e9630>, <ast.Name object at 0x7da1b21e92a0>]]]] return[name[section]]
keyword[def] identifier[_reshape_irregular_array] ( identifier[self] , identifier[array] , identifier[section_height] , identifier[image_width] ): literal[string] identifier[section_area] = identifier[section_height] * identifier[image_width] identifier[flattened_array] = identifier[np] . identifier[ravel] ( identifier[array] ) keyword[if] keyword[not] identifier[self] . identifier[config] [ literal[string] ]: identifier[flattened_array] = identifier[flattened_array] [: identifier[int] ( identifier[section_area] / identifier[MIN_SQUARE_SIZE] )] identifier[cell_count] = identifier[np] . identifier[prod] ( identifier[flattened_array] . identifier[shape] ) identifier[cell_area] = identifier[section_area] / identifier[cell_count] identifier[cell_side_length] = identifier[max] ( literal[int] , identifier[floor] ( identifier[sqrt] ( identifier[cell_area] ))) identifier[row_count] = identifier[max] ( literal[int] , identifier[int] ( identifier[section_height] / identifier[cell_side_length] )) identifier[col_count] = identifier[int] ( identifier[cell_count] / identifier[row_count] ) identifier[section] = identifier[np] . identifier[reshape] ( identifier[flattened_array] [: identifier[row_count] * identifier[col_count] ], ( identifier[row_count] , identifier[col_count] )) keyword[return] identifier[section]
def _reshape_irregular_array(self, array, section_height, image_width): """Reshapes arrays of ranks not in {1, 2, 4} """ section_area = section_height * image_width flattened_array = np.ravel(array) if not self.config['show_all']: flattened_array = flattened_array[:int(section_area / MIN_SQUARE_SIZE)] # depends on [control=['if'], data=[]] cell_count = np.prod(flattened_array.shape) cell_area = section_area / cell_count cell_side_length = max(1, floor(sqrt(cell_area))) row_count = max(1, int(section_height / cell_side_length)) col_count = int(cell_count / row_count) # Reshape the truncated array so that it has the same aspect ratio as # the section. # Truncate whatever remaining values there are that don't fit. Hopefully # it doesn't matter that the last few (< section count) aren't there. section = np.reshape(flattened_array[:row_count * col_count], (row_count, col_count)) return section
def nla_get_u64(nla): """Return value of 64 bit integer attribute as an int(). https://github.com/thom311/libnl/blob/libnl3_2_25/lib/attr.c#L649 Positional arguments: nla -- 64 bit integer attribute (nlattr class instance). Returns: Payload as an int(). """ tmp = c_uint64(0) if nla and nla_len(nla) >= sizeof(tmp): tmp = c_uint64.from_buffer(nla_data(nla)[:SIZEOF_U64]) return int(tmp.value)
def function[nla_get_u64, parameter[nla]]: constant[Return value of 64 bit integer attribute as an int(). https://github.com/thom311/libnl/blob/libnl3_2_25/lib/attr.c#L649 Positional arguments: nla -- 64 bit integer attribute (nlattr class instance). Returns: Payload as an int(). ] variable[tmp] assign[=] call[name[c_uint64], parameter[constant[0]]] if <ast.BoolOp object at 0x7da1b28f2290> begin[:] variable[tmp] assign[=] call[name[c_uint64].from_buffer, parameter[call[call[name[nla_data], parameter[name[nla]]]][<ast.Slice object at 0x7da1b28f0cd0>]]] return[call[name[int], parameter[name[tmp].value]]]
keyword[def] identifier[nla_get_u64] ( identifier[nla] ): literal[string] identifier[tmp] = identifier[c_uint64] ( literal[int] ) keyword[if] identifier[nla] keyword[and] identifier[nla_len] ( identifier[nla] )>= identifier[sizeof] ( identifier[tmp] ): identifier[tmp] = identifier[c_uint64] . identifier[from_buffer] ( identifier[nla_data] ( identifier[nla] )[: identifier[SIZEOF_U64] ]) keyword[return] identifier[int] ( identifier[tmp] . identifier[value] )
def nla_get_u64(nla): """Return value of 64 bit integer attribute as an int(). https://github.com/thom311/libnl/blob/libnl3_2_25/lib/attr.c#L649 Positional arguments: nla -- 64 bit integer attribute (nlattr class instance). Returns: Payload as an int(). """ tmp = c_uint64(0) if nla and nla_len(nla) >= sizeof(tmp): tmp = c_uint64.from_buffer(nla_data(nla)[:SIZEOF_U64]) # depends on [control=['if'], data=[]] return int(tmp.value)
def zadd(self, *args, **kwargs): """ For each score/value given as paramter, do a "zadd" call with score/self.instance as parameter call for each value. Values must be primary keys of the related model. """ if 'values_callback' not in kwargs: kwargs['values_callback'] = self._to_fields pieces = fields.SortedSetField.coerce_zadd_args(*args, **kwargs) for (score, related_field) in zip(*[iter(pieces)] * 2): related_method = getattr(related_field, 'zadd') related_method(score, self.instance._pk, values_callback=None)
def function[zadd, parameter[self]]: constant[ For each score/value given as paramter, do a "zadd" call with score/self.instance as parameter call for each value. Values must be primary keys of the related model. ] if compare[constant[values_callback] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:] call[name[kwargs]][constant[values_callback]] assign[=] name[self]._to_fields variable[pieces] assign[=] call[name[fields].SortedSetField.coerce_zadd_args, parameter[<ast.Starred object at 0x7da1b1309c60>]] for taget[tuple[[<ast.Name object at 0x7da1b1309300>, <ast.Name object at 0x7da1b130a350>]]] in starred[call[name[zip], parameter[<ast.Starred object at 0x7da1b1308040>]]] begin[:] variable[related_method] assign[=] call[name[getattr], parameter[name[related_field], constant[zadd]]] call[name[related_method], parameter[name[score], name[self].instance._pk]]
keyword[def] identifier[zadd] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] : identifier[kwargs] [ literal[string] ]= identifier[self] . identifier[_to_fields] identifier[pieces] = identifier[fields] . identifier[SortedSetField] . identifier[coerce_zadd_args] (* identifier[args] ,** identifier[kwargs] ) keyword[for] ( identifier[score] , identifier[related_field] ) keyword[in] identifier[zip] (*[ identifier[iter] ( identifier[pieces] )]* literal[int] ): identifier[related_method] = identifier[getattr] ( identifier[related_field] , literal[string] ) identifier[related_method] ( identifier[score] , identifier[self] . identifier[instance] . identifier[_pk] , identifier[values_callback] = keyword[None] )
def zadd(self, *args, **kwargs): """ For each score/value given as paramter, do a "zadd" call with score/self.instance as parameter call for each value. Values must be primary keys of the related model. """ if 'values_callback' not in kwargs: kwargs['values_callback'] = self._to_fields # depends on [control=['if'], data=['kwargs']] pieces = fields.SortedSetField.coerce_zadd_args(*args, **kwargs) for (score, related_field) in zip(*[iter(pieces)] * 2): related_method = getattr(related_field, 'zadd') related_method(score, self.instance._pk, values_callback=None) # depends on [control=['for'], data=[]]
def mask(array, predicates, new_value, ty): """ Returns a new array, with each element in the original array satisfying the passed-in predicate set to `new_value` Args: array (WeldObject / Numpy.ndarray): Input array predicates (WeldObject / Numpy.ndarray<bool>): Predicate set new_value (WeldObject / Numpy.ndarray / str): mask value ty (WeldType): Type of each element in the input array Returns: A WeldObject representing this computation """ weld_obj = WeldObject(encoder_, decoder_) array_var = weld_obj.update(array) if isinstance(array, WeldObject): array_var = array.obj_id weld_obj.dependencies[array_var] = array predicates_var = weld_obj.update(predicates) if isinstance(predicates, WeldObject): predicates_var = predicates.obj_id weld_obj.dependencies[predicates_var] = predicates if str(ty).startswith("vec"): new_value_var = weld_obj.update(new_value) if isinstance(new_value, WeldObject): new_value_var = new_value.obj_id weld_obj.dependencies[new_value_var] = new_value else: new_value_var = "%s(%s)" % (ty, str(new_value)) weld_template = """ map( zip(%(array)s, %(predicates)s), |p: {%(ty)s, bool}| if (p.$1, %(new_value)s, p.$0) ) """ weld_obj.weld_code = weld_template % { "array": array_var, "predicates": predicates_var, "new_value": new_value_var, "ty": ty} return weld_obj
def function[mask, parameter[array, predicates, new_value, ty]]: constant[ Returns a new array, with each element in the original array satisfying the passed-in predicate set to `new_value` Args: array (WeldObject / Numpy.ndarray): Input array predicates (WeldObject / Numpy.ndarray<bool>): Predicate set new_value (WeldObject / Numpy.ndarray / str): mask value ty (WeldType): Type of each element in the input array Returns: A WeldObject representing this computation ] variable[weld_obj] assign[=] call[name[WeldObject], parameter[name[encoder_], name[decoder_]]] variable[array_var] assign[=] call[name[weld_obj].update, parameter[name[array]]] if call[name[isinstance], parameter[name[array], name[WeldObject]]] begin[:] variable[array_var] assign[=] name[array].obj_id call[name[weld_obj].dependencies][name[array_var]] assign[=] name[array] variable[predicates_var] assign[=] call[name[weld_obj].update, parameter[name[predicates]]] if call[name[isinstance], parameter[name[predicates], name[WeldObject]]] begin[:] variable[predicates_var] assign[=] name[predicates].obj_id call[name[weld_obj].dependencies][name[predicates_var]] assign[=] name[predicates] if call[call[name[str], parameter[name[ty]]].startswith, parameter[constant[vec]]] begin[:] variable[new_value_var] assign[=] call[name[weld_obj].update, parameter[name[new_value]]] if call[name[isinstance], parameter[name[new_value], name[WeldObject]]] begin[:] variable[new_value_var] assign[=] name[new_value].obj_id call[name[weld_obj].dependencies][name[new_value_var]] assign[=] name[new_value] variable[weld_template] assign[=] constant[ map( zip(%(array)s, %(predicates)s), |p: {%(ty)s, bool}| if (p.$1, %(new_value)s, p.$0) ) ] name[weld_obj].weld_code assign[=] binary_operation[name[weld_template] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b1bedba0>, <ast.Constant object at 0x7da1b1bee0e0>, <ast.Constant object at 0x7da1b1bec100>, <ast.Constant object at 0x7da1b1bec9a0>], [<ast.Name object at 0x7da1b1becbe0>, <ast.Name object at 0x7da1b1bedc30>, <ast.Name object at 0x7da1b1bee860>, <ast.Name object at 0x7da1b1becd30>]]] return[name[weld_obj]]
keyword[def] identifier[mask] ( identifier[array] , identifier[predicates] , identifier[new_value] , identifier[ty] ): literal[string] identifier[weld_obj] = identifier[WeldObject] ( identifier[encoder_] , identifier[decoder_] ) identifier[array_var] = identifier[weld_obj] . identifier[update] ( identifier[array] ) keyword[if] identifier[isinstance] ( identifier[array] , identifier[WeldObject] ): identifier[array_var] = identifier[array] . identifier[obj_id] identifier[weld_obj] . identifier[dependencies] [ identifier[array_var] ]= identifier[array] identifier[predicates_var] = identifier[weld_obj] . identifier[update] ( identifier[predicates] ) keyword[if] identifier[isinstance] ( identifier[predicates] , identifier[WeldObject] ): identifier[predicates_var] = identifier[predicates] . identifier[obj_id] identifier[weld_obj] . identifier[dependencies] [ identifier[predicates_var] ]= identifier[predicates] keyword[if] identifier[str] ( identifier[ty] ). identifier[startswith] ( literal[string] ): identifier[new_value_var] = identifier[weld_obj] . identifier[update] ( identifier[new_value] ) keyword[if] identifier[isinstance] ( identifier[new_value] , identifier[WeldObject] ): identifier[new_value_var] = identifier[new_value] . identifier[obj_id] identifier[weld_obj] . identifier[dependencies] [ identifier[new_value_var] ]= identifier[new_value] keyword[else] : identifier[new_value_var] = literal[string] %( identifier[ty] , identifier[str] ( identifier[new_value] )) identifier[weld_template] = literal[string] identifier[weld_obj] . identifier[weld_code] = identifier[weld_template] %{ literal[string] : identifier[array_var] , literal[string] : identifier[predicates_var] , literal[string] : identifier[new_value_var] , literal[string] : identifier[ty] } keyword[return] identifier[weld_obj]
def mask(array, predicates, new_value, ty): """ Returns a new array, with each element in the original array satisfying the passed-in predicate set to `new_value` Args: array (WeldObject / Numpy.ndarray): Input array predicates (WeldObject / Numpy.ndarray<bool>): Predicate set new_value (WeldObject / Numpy.ndarray / str): mask value ty (WeldType): Type of each element in the input array Returns: A WeldObject representing this computation """ weld_obj = WeldObject(encoder_, decoder_) array_var = weld_obj.update(array) if isinstance(array, WeldObject): array_var = array.obj_id weld_obj.dependencies[array_var] = array # depends on [control=['if'], data=[]] predicates_var = weld_obj.update(predicates) if isinstance(predicates, WeldObject): predicates_var = predicates.obj_id weld_obj.dependencies[predicates_var] = predicates # depends on [control=['if'], data=[]] if str(ty).startswith('vec'): new_value_var = weld_obj.update(new_value) if isinstance(new_value, WeldObject): new_value_var = new_value.obj_id weld_obj.dependencies[new_value_var] = new_value # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: new_value_var = '%s(%s)' % (ty, str(new_value)) weld_template = '\n map(\n zip(%(array)s, %(predicates)s),\n |p: {%(ty)s, bool}| if (p.$1, %(new_value)s, p.$0)\n )\n ' weld_obj.weld_code = weld_template % {'array': array_var, 'predicates': predicates_var, 'new_value': new_value_var, 'ty': ty} return weld_obj
def get_project(self, project_name): """Return the project with a given name. :param project_name: The name to search for. :type project_name: str :return: The project that has the name ``project_name`` or ``None`` if no project is found. :rtype: :class:`pytodoist.todoist.Project` >>> from pytodoist import todoist >>> user = todoist.login('john.doe@gmail.com', 'password') >>> project = user.get_project('Inbox') >>> print(project.name) Inbox """ for project in self.get_projects(): if project.name == project_name: return project
def function[get_project, parameter[self, project_name]]: constant[Return the project with a given name. :param project_name: The name to search for. :type project_name: str :return: The project that has the name ``project_name`` or ``None`` if no project is found. :rtype: :class:`pytodoist.todoist.Project` >>> from pytodoist import todoist >>> user = todoist.login('john.doe@gmail.com', 'password') >>> project = user.get_project('Inbox') >>> print(project.name) Inbox ] for taget[name[project]] in starred[call[name[self].get_projects, parameter[]]] begin[:] if compare[name[project].name equal[==] name[project_name]] begin[:] return[name[project]]
keyword[def] identifier[get_project] ( identifier[self] , identifier[project_name] ): literal[string] keyword[for] identifier[project] keyword[in] identifier[self] . identifier[get_projects] (): keyword[if] identifier[project] . identifier[name] == identifier[project_name] : keyword[return] identifier[project]
def get_project(self, project_name): """Return the project with a given name. :param project_name: The name to search for. :type project_name: str :return: The project that has the name ``project_name`` or ``None`` if no project is found. :rtype: :class:`pytodoist.todoist.Project` >>> from pytodoist import todoist >>> user = todoist.login('john.doe@gmail.com', 'password') >>> project = user.get_project('Inbox') >>> print(project.name) Inbox """ for project in self.get_projects(): if project.name == project_name: return project # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['project']]
def Install(self, apk_path, destination_dir='', replace_existing=True, grant_permissions=False, timeout_ms=None, transfer_progress_callback=None): """Install an apk to the device. Doesn't support verifier file, instead allows destination directory to be overridden. Args: apk_path: Local path to apk to install. destination_dir: Optional destination directory. Use /system/app/ for persistent applications. replace_existing: whether to replace existing application grant_permissions: If True, grant all permissions to the app specified in its manifest timeout_ms: Expected timeout for pushing and installing. transfer_progress_callback: callback method that accepts filename, bytes_written and total_bytes of APK transfer Returns: The pm install output. """ if not destination_dir: destination_dir = '/data/local/tmp/' basename = os.path.basename(apk_path) destination_path = posixpath.join(destination_dir, basename) self.Push(apk_path, destination_path, timeout_ms=timeout_ms, progress_callback=transfer_progress_callback) cmd = ['pm install'] if grant_permissions: cmd.append('-g') if replace_existing: cmd.append('-r') cmd.append('"{}"'.format(destination_path)) ret = self.Shell(' '.join(cmd), timeout_ms=timeout_ms) # Remove the apk rm_cmd = ['rm', destination_path] rmret = self.Shell(' '.join(rm_cmd), timeout_ms=timeout_ms) return ret
def function[Install, parameter[self, apk_path, destination_dir, replace_existing, grant_permissions, timeout_ms, transfer_progress_callback]]: constant[Install an apk to the device. Doesn't support verifier file, instead allows destination directory to be overridden. Args: apk_path: Local path to apk to install. destination_dir: Optional destination directory. Use /system/app/ for persistent applications. replace_existing: whether to replace existing application grant_permissions: If True, grant all permissions to the app specified in its manifest timeout_ms: Expected timeout for pushing and installing. transfer_progress_callback: callback method that accepts filename, bytes_written and total_bytes of APK transfer Returns: The pm install output. ] if <ast.UnaryOp object at 0x7da1b19b9450> begin[:] variable[destination_dir] assign[=] constant[/data/local/tmp/] variable[basename] assign[=] call[name[os].path.basename, parameter[name[apk_path]]] variable[destination_path] assign[=] call[name[posixpath].join, parameter[name[destination_dir], name[basename]]] call[name[self].Push, parameter[name[apk_path], name[destination_path]]] variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b19b84c0>]] if name[grant_permissions] begin[:] call[name[cmd].append, parameter[constant[-g]]] if name[replace_existing] begin[:] call[name[cmd].append, parameter[constant[-r]]] call[name[cmd].append, parameter[call[constant["{}"].format, parameter[name[destination_path]]]]] variable[ret] assign[=] call[name[self].Shell, parameter[call[constant[ ].join, parameter[name[cmd]]]]] variable[rm_cmd] assign[=] list[[<ast.Constant object at 0x7da1b170f910>, <ast.Name object at 0x7da1b170dd80>]] variable[rmret] assign[=] call[name[self].Shell, parameter[call[constant[ ].join, parameter[name[rm_cmd]]]]] return[name[ret]]
keyword[def] identifier[Install] ( identifier[self] , identifier[apk_path] , identifier[destination_dir] = literal[string] , identifier[replace_existing] = keyword[True] , identifier[grant_permissions] = keyword[False] , identifier[timeout_ms] = keyword[None] , identifier[transfer_progress_callback] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[destination_dir] : identifier[destination_dir] = literal[string] identifier[basename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[apk_path] ) identifier[destination_path] = identifier[posixpath] . identifier[join] ( identifier[destination_dir] , identifier[basename] ) identifier[self] . identifier[Push] ( identifier[apk_path] , identifier[destination_path] , identifier[timeout_ms] = identifier[timeout_ms] , identifier[progress_callback] = identifier[transfer_progress_callback] ) identifier[cmd] =[ literal[string] ] keyword[if] identifier[grant_permissions] : identifier[cmd] . identifier[append] ( literal[string] ) keyword[if] identifier[replace_existing] : identifier[cmd] . identifier[append] ( literal[string] ) identifier[cmd] . identifier[append] ( literal[string] . identifier[format] ( identifier[destination_path] )) identifier[ret] = identifier[self] . identifier[Shell] ( literal[string] . identifier[join] ( identifier[cmd] ), identifier[timeout_ms] = identifier[timeout_ms] ) identifier[rm_cmd] =[ literal[string] , identifier[destination_path] ] identifier[rmret] = identifier[self] . identifier[Shell] ( literal[string] . identifier[join] ( identifier[rm_cmd] ), identifier[timeout_ms] = identifier[timeout_ms] ) keyword[return] identifier[ret]
def Install(self, apk_path, destination_dir='', replace_existing=True, grant_permissions=False, timeout_ms=None, transfer_progress_callback=None): """Install an apk to the device. Doesn't support verifier file, instead allows destination directory to be overridden. Args: apk_path: Local path to apk to install. destination_dir: Optional destination directory. Use /system/app/ for persistent applications. replace_existing: whether to replace existing application grant_permissions: If True, grant all permissions to the app specified in its manifest timeout_ms: Expected timeout for pushing and installing. transfer_progress_callback: callback method that accepts filename, bytes_written and total_bytes of APK transfer Returns: The pm install output. """ if not destination_dir: destination_dir = '/data/local/tmp/' # depends on [control=['if'], data=[]] basename = os.path.basename(apk_path) destination_path = posixpath.join(destination_dir, basename) self.Push(apk_path, destination_path, timeout_ms=timeout_ms, progress_callback=transfer_progress_callback) cmd = ['pm install'] if grant_permissions: cmd.append('-g') # depends on [control=['if'], data=[]] if replace_existing: cmd.append('-r') # depends on [control=['if'], data=[]] cmd.append('"{}"'.format(destination_path)) ret = self.Shell(' '.join(cmd), timeout_ms=timeout_ms) # Remove the apk rm_cmd = ['rm', destination_path] rmret = self.Shell(' '.join(rm_cmd), timeout_ms=timeout_ms) return ret
def item_huisnummer_adapter(obj, request): """ Adapter for rendering an object of :class:`crabpy.gateway.crab.Huisnummer` to json. """ return { 'id': obj.id, 'huisnummer': obj.huisnummer, 'postadres': obj.postadres, 'status': { 'id': obj.status.id, 'naam': obj.status.naam, 'definitie': obj.status.definitie }, 'metadata': { 'begin_tijd': obj.metadata.begin_tijd, 'begin_datum': obj.metadata.begin_datum, 'begin_bewerking': { 'id': obj.metadata.begin_bewerking.id, 'naam': obj.metadata.begin_bewerking.naam, 'definitie': obj.metadata.begin_bewerking.definitie }, 'begin_organisatie': { 'id': obj.metadata.begin_organisatie.id, 'naam': obj.metadata.begin_organisatie.naam, 'definitie': obj.metadata.begin_organisatie.definitie } }, 'bounding_box': obj.bounding_box }
def function[item_huisnummer_adapter, parameter[obj, request]]: constant[ Adapter for rendering an object of :class:`crabpy.gateway.crab.Huisnummer` to json. ] return[dictionary[[<ast.Constant object at 0x7da18ede7f70>, <ast.Constant object at 0x7da18ede5870>, <ast.Constant object at 0x7da18ede5ed0>, <ast.Constant object at 0x7da18ede5840>, <ast.Constant object at 0x7da18ede6b60>, <ast.Constant object at 0x7da18ede5180>], [<ast.Attribute object at 0x7da18ede74f0>, <ast.Attribute object at 0x7da18ede6800>, <ast.Attribute object at 0x7da18ede4a30>, <ast.Dict object at 0x7da18ede5db0>, <ast.Dict object at 0x7da18ede72e0>, <ast.Attribute object at 0x7da1b0a9f940>]]]
keyword[def] identifier[item_huisnummer_adapter] ( identifier[obj] , identifier[request] ): literal[string] keyword[return] { literal[string] : identifier[obj] . identifier[id] , literal[string] : identifier[obj] . identifier[huisnummer] , literal[string] : identifier[obj] . identifier[postadres] , literal[string] :{ literal[string] : identifier[obj] . identifier[status] . identifier[id] , literal[string] : identifier[obj] . identifier[status] . identifier[naam] , literal[string] : identifier[obj] . identifier[status] . identifier[definitie] }, literal[string] :{ literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_tijd] , literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_datum] , literal[string] :{ literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_bewerking] . identifier[id] , literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_bewerking] . identifier[naam] , literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_bewerking] . identifier[definitie] }, literal[string] :{ literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_organisatie] . identifier[id] , literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_organisatie] . identifier[naam] , literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_organisatie] . identifier[definitie] } }, literal[string] : identifier[obj] . identifier[bounding_box] }
def item_huisnummer_adapter(obj, request): """ Adapter for rendering an object of :class:`crabpy.gateway.crab.Huisnummer` to json. """ return {'id': obj.id, 'huisnummer': obj.huisnummer, 'postadres': obj.postadres, 'status': {'id': obj.status.id, 'naam': obj.status.naam, 'definitie': obj.status.definitie}, 'metadata': {'begin_tijd': obj.metadata.begin_tijd, 'begin_datum': obj.metadata.begin_datum, 'begin_bewerking': {'id': obj.metadata.begin_bewerking.id, 'naam': obj.metadata.begin_bewerking.naam, 'definitie': obj.metadata.begin_bewerking.definitie}, 'begin_organisatie': {'id': obj.metadata.begin_organisatie.id, 'naam': obj.metadata.begin_organisatie.naam, 'definitie': obj.metadata.begin_organisatie.definitie}}, 'bounding_box': obj.bounding_box}
def _Rzderiv(self,R,z,phi=0.,t=0.): """ NAME: _Rzderiv PURPOSE: evaluate the mixed R,z derivative for this potential INPUT: R - Galactocentric cylindrical radius z - vertical height phi - azimuth t - time OUTPUT: d2phi/dR/dz HISTORY: 2015-02-07 - Written - Bovy (IAS) """ return self._mn3[0].Rzderiv(R,z,phi=phi,t=t)\ +self._mn3[1].Rzderiv(R,z,phi=phi,t=t)\ +self._mn3[2].Rzderiv(R,z,phi=phi,t=t)
def function[_Rzderiv, parameter[self, R, z, phi, t]]: constant[ NAME: _Rzderiv PURPOSE: evaluate the mixed R,z derivative for this potential INPUT: R - Galactocentric cylindrical radius z - vertical height phi - azimuth t - time OUTPUT: d2phi/dR/dz HISTORY: 2015-02-07 - Written - Bovy (IAS) ] return[binary_operation[binary_operation[call[call[name[self]._mn3][constant[0]].Rzderiv, parameter[name[R], name[z]]] + call[call[name[self]._mn3][constant[1]].Rzderiv, parameter[name[R], name[z]]]] + call[call[name[self]._mn3][constant[2]].Rzderiv, parameter[name[R], name[z]]]]]
keyword[def] identifier[_Rzderiv] ( identifier[self] , identifier[R] , identifier[z] , identifier[phi] = literal[int] , identifier[t] = literal[int] ): literal[string] keyword[return] identifier[self] . identifier[_mn3] [ literal[int] ]. identifier[Rzderiv] ( identifier[R] , identifier[z] , identifier[phi] = identifier[phi] , identifier[t] = identifier[t] )+ identifier[self] . identifier[_mn3] [ literal[int] ]. identifier[Rzderiv] ( identifier[R] , identifier[z] , identifier[phi] = identifier[phi] , identifier[t] = identifier[t] )+ identifier[self] . identifier[_mn3] [ literal[int] ]. identifier[Rzderiv] ( identifier[R] , identifier[z] , identifier[phi] = identifier[phi] , identifier[t] = identifier[t] )
def _Rzderiv(self, R, z, phi=0.0, t=0.0): """ NAME: _Rzderiv PURPOSE: evaluate the mixed R,z derivative for this potential INPUT: R - Galactocentric cylindrical radius z - vertical height phi - azimuth t - time OUTPUT: d2phi/dR/dz HISTORY: 2015-02-07 - Written - Bovy (IAS) """ return self._mn3[0].Rzderiv(R, z, phi=phi, t=t) + self._mn3[1].Rzderiv(R, z, phi=phi, t=t) + self._mn3[2].Rzderiv(R, z, phi=phi, t=t)
def create(self, properties): """ Create a new (user-defined) User Role in this HMC. Authorization requirements: * Task permission to the "Manage User Roles" task. Parameters: properties (dict): Initial property values. Allowable properties are defined in section 'Request body contents' in section 'Create User Role' in the :term:`HMC API` book. Returns: UserRole: The resource object for the new User Role. The object will have its 'object-uri' property set as returned by the HMC, and will also have the input properties set. Raises: :exc:`~zhmcclient.HTTPError` :exc:`~zhmcclient.ParseError` :exc:`~zhmcclient.AuthError` :exc:`~zhmcclient.ConnectionError` """ result = self.session.post(self.console.uri + '/user-roles', body=properties) # There should not be overlaps, but just in case there are, the # returned props should overwrite the input props: props = copy.deepcopy(properties) props.update(result) name = props.get(self._name_prop, None) uri = props[self._uri_prop] user_role = UserRole(self, uri, name, props) self._name_uri_cache.update(name, uri) return user_role
def function[create, parameter[self, properties]]: constant[ Create a new (user-defined) User Role in this HMC. Authorization requirements: * Task permission to the "Manage User Roles" task. Parameters: properties (dict): Initial property values. Allowable properties are defined in section 'Request body contents' in section 'Create User Role' in the :term:`HMC API` book. Returns: UserRole: The resource object for the new User Role. The object will have its 'object-uri' property set as returned by the HMC, and will also have the input properties set. Raises: :exc:`~zhmcclient.HTTPError` :exc:`~zhmcclient.ParseError` :exc:`~zhmcclient.AuthError` :exc:`~zhmcclient.ConnectionError` ] variable[result] assign[=] call[name[self].session.post, parameter[binary_operation[name[self].console.uri + constant[/user-roles]]]] variable[props] assign[=] call[name[copy].deepcopy, parameter[name[properties]]] call[name[props].update, parameter[name[result]]] variable[name] assign[=] call[name[props].get, parameter[name[self]._name_prop, constant[None]]] variable[uri] assign[=] call[name[props]][name[self]._uri_prop] variable[user_role] assign[=] call[name[UserRole], parameter[name[self], name[uri], name[name], name[props]]] call[name[self]._name_uri_cache.update, parameter[name[name], name[uri]]] return[name[user_role]]
keyword[def] identifier[create] ( identifier[self] , identifier[properties] ): literal[string] identifier[result] = identifier[self] . identifier[session] . identifier[post] ( identifier[self] . identifier[console] . identifier[uri] + literal[string] , identifier[body] = identifier[properties] ) identifier[props] = identifier[copy] . identifier[deepcopy] ( identifier[properties] ) identifier[props] . identifier[update] ( identifier[result] ) identifier[name] = identifier[props] . identifier[get] ( identifier[self] . identifier[_name_prop] , keyword[None] ) identifier[uri] = identifier[props] [ identifier[self] . identifier[_uri_prop] ] identifier[user_role] = identifier[UserRole] ( identifier[self] , identifier[uri] , identifier[name] , identifier[props] ) identifier[self] . identifier[_name_uri_cache] . identifier[update] ( identifier[name] , identifier[uri] ) keyword[return] identifier[user_role]
def create(self, properties): """ Create a new (user-defined) User Role in this HMC. Authorization requirements: * Task permission to the "Manage User Roles" task. Parameters: properties (dict): Initial property values. Allowable properties are defined in section 'Request body contents' in section 'Create User Role' in the :term:`HMC API` book. Returns: UserRole: The resource object for the new User Role. The object will have its 'object-uri' property set as returned by the HMC, and will also have the input properties set. Raises: :exc:`~zhmcclient.HTTPError` :exc:`~zhmcclient.ParseError` :exc:`~zhmcclient.AuthError` :exc:`~zhmcclient.ConnectionError` """ result = self.session.post(self.console.uri + '/user-roles', body=properties) # There should not be overlaps, but just in case there are, the # returned props should overwrite the input props: props = copy.deepcopy(properties) props.update(result) name = props.get(self._name_prop, None) uri = props[self._uri_prop] user_role = UserRole(self, uri, name, props) self._name_uri_cache.update(name, uri) return user_role
def to_cmd_args(mapping): # type: (dict) -> list """Transform a dictionary in a list of cmd arguments. Example: >>>args = mapping.to_cmd_args({'model_dir': '/opt/ml/model', 'batch_size': 25}) >>> >>>print(args) ['--model_dir', '/opt/ml/model', '--batch_size', 25] Args: mapping (dict[str, object]): A Python mapping. Returns: (list): List of cmd arguments """ sorted_keys = sorted(mapping.keys()) def arg_name(obj): string = _decode(obj) if string: return u'--%s' % string if len(string) > 1 else u'-%s' % string else: return u'' arg_names = [arg_name(argument) for argument in sorted_keys] def arg_value(value): if hasattr(value, 'items'): map_items = ['%s=%s' % (k, v) for k, v in sorted(value.items())] return ','.join(map_items) return _decode(value) arg_values = [arg_value(mapping[key]) for key in sorted_keys] items = zip(arg_names, arg_values) return [item for item in itertools.chain.from_iterable(items)]
def function[to_cmd_args, parameter[mapping]]: constant[Transform a dictionary in a list of cmd arguments. Example: >>>args = mapping.to_cmd_args({'model_dir': '/opt/ml/model', 'batch_size': 25}) >>> >>>print(args) ['--model_dir', '/opt/ml/model', '--batch_size', 25] Args: mapping (dict[str, object]): A Python mapping. Returns: (list): List of cmd arguments ] variable[sorted_keys] assign[=] call[name[sorted], parameter[call[name[mapping].keys, parameter[]]]] def function[arg_name, parameter[obj]]: variable[string] assign[=] call[name[_decode], parameter[name[obj]]] if name[string] begin[:] return[<ast.IfExp object at 0x7da1b1647dc0>] variable[arg_names] assign[=] <ast.ListComp object at 0x7da1b1646fb0> def function[arg_value, parameter[value]]: if call[name[hasattr], parameter[name[value], constant[items]]] begin[:] variable[map_items] assign[=] <ast.ListComp object at 0x7da1b16442e0> return[call[constant[,].join, parameter[name[map_items]]]] return[call[name[_decode], parameter[name[value]]]] variable[arg_values] assign[=] <ast.ListComp object at 0x7da1b1645ae0> variable[items] assign[=] call[name[zip], parameter[name[arg_names], name[arg_values]]] return[<ast.ListComp object at 0x7da1b1645db0>]
keyword[def] identifier[to_cmd_args] ( identifier[mapping] ): literal[string] identifier[sorted_keys] = identifier[sorted] ( identifier[mapping] . identifier[keys] ()) keyword[def] identifier[arg_name] ( identifier[obj] ): identifier[string] = identifier[_decode] ( identifier[obj] ) keyword[if] identifier[string] : keyword[return] literal[string] % identifier[string] keyword[if] identifier[len] ( identifier[string] )> literal[int] keyword[else] literal[string] % identifier[string] keyword[else] : keyword[return] literal[string] identifier[arg_names] =[ identifier[arg_name] ( identifier[argument] ) keyword[for] identifier[argument] keyword[in] identifier[sorted_keys] ] keyword[def] identifier[arg_value] ( identifier[value] ): keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ): identifier[map_items] =[ literal[string] %( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[sorted] ( identifier[value] . identifier[items] ())] keyword[return] literal[string] . identifier[join] ( identifier[map_items] ) keyword[return] identifier[_decode] ( identifier[value] ) identifier[arg_values] =[ identifier[arg_value] ( identifier[mapping] [ identifier[key] ]) keyword[for] identifier[key] keyword[in] identifier[sorted_keys] ] identifier[items] = identifier[zip] ( identifier[arg_names] , identifier[arg_values] ) keyword[return] [ identifier[item] keyword[for] identifier[item] keyword[in] identifier[itertools] . identifier[chain] . identifier[from_iterable] ( identifier[items] )]
def to_cmd_args(mapping): # type: (dict) -> list "Transform a dictionary in a list of cmd arguments.\n Example:\n >>>args = mapping.to_cmd_args({'model_dir': '/opt/ml/model', 'batch_size': 25})\n >>>\n >>>print(args)\n ['--model_dir', '/opt/ml/model', '--batch_size', 25]\n Args:\n mapping (dict[str, object]): A Python mapping.\n Returns:\n (list): List of cmd arguments\n " sorted_keys = sorted(mapping.keys()) def arg_name(obj): string = _decode(obj) if string: return u'--%s' % string if len(string) > 1 else u'-%s' % string # depends on [control=['if'], data=[]] else: return u'' arg_names = [arg_name(argument) for argument in sorted_keys] def arg_value(value): if hasattr(value, 'items'): map_items = ['%s=%s' % (k, v) for (k, v) in sorted(value.items())] return ','.join(map_items) # depends on [control=['if'], data=[]] return _decode(value) arg_values = [arg_value(mapping[key]) for key in sorted_keys] items = zip(arg_names, arg_values) return [item for item in itertools.chain.from_iterable(items)]
def handle_m2m(self, sender, instance, **kwargs): """ Handle many to many relationships """ self.handle_save(instance.__class__, instance)
def function[handle_m2m, parameter[self, sender, instance]]: constant[ Handle many to many relationships ] call[name[self].handle_save, parameter[name[instance].__class__, name[instance]]]
keyword[def] identifier[handle_m2m] ( identifier[self] , identifier[sender] , identifier[instance] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[handle_save] ( identifier[instance] . identifier[__class__] , identifier[instance] )
def handle_m2m(self, sender, instance, **kwargs): """ Handle many to many relationships """ self.handle_save(instance.__class__, instance)
def _iso_num_weeks(iso_year): "Get the number of ISO-weeks in this year" year_start = _iso_year_start(iso_year) next_year_start = _iso_year_start(iso_year+1) year_num_weeks = ((next_year_start - year_start).days) // 7 return year_num_weeks
def function[_iso_num_weeks, parameter[iso_year]]: constant[Get the number of ISO-weeks in this year] variable[year_start] assign[=] call[name[_iso_year_start], parameter[name[iso_year]]] variable[next_year_start] assign[=] call[name[_iso_year_start], parameter[binary_operation[name[iso_year] + constant[1]]]] variable[year_num_weeks] assign[=] binary_operation[binary_operation[name[next_year_start] - name[year_start]].days <ast.FloorDiv object at 0x7da2590d6bc0> constant[7]] return[name[year_num_weeks]]
keyword[def] identifier[_iso_num_weeks] ( identifier[iso_year] ): literal[string] identifier[year_start] = identifier[_iso_year_start] ( identifier[iso_year] ) identifier[next_year_start] = identifier[_iso_year_start] ( identifier[iso_year] + literal[int] ) identifier[year_num_weeks] =(( identifier[next_year_start] - identifier[year_start] ). identifier[days] )// literal[int] keyword[return] identifier[year_num_weeks]
def _iso_num_weeks(iso_year): """Get the number of ISO-weeks in this year""" year_start = _iso_year_start(iso_year) next_year_start = _iso_year_start(iso_year + 1) year_num_weeks = (next_year_start - year_start).days // 7 return year_num_weeks
def union(self, *dstreams): """ Create a unified DStream from multiple DStreams of the same type and same slide duration. """ if not dstreams: raise ValueError("should have at least one DStream to union") if len(dstreams) == 1: return dstreams[0] if len(set(s._jrdd_deserializer for s in dstreams)) > 1: raise ValueError("All DStreams should have same serializer") if len(set(s._slideDuration for s in dstreams)) > 1: raise ValueError("All DStreams should have same slide duration") cls = SparkContext._jvm.org.apache.spark.streaming.api.java.JavaDStream jdstreams = SparkContext._gateway.new_array(cls, len(dstreams)) for i in range(0, len(dstreams)): jdstreams[i] = dstreams[i]._jdstream return DStream(self._jssc.union(jdstreams), self, dstreams[0]._jrdd_deserializer)
def function[union, parameter[self]]: constant[ Create a unified DStream from multiple DStreams of the same type and same slide duration. ] if <ast.UnaryOp object at 0x7da1b20b4f10> begin[:] <ast.Raise object at 0x7da1b20b5180> if compare[call[name[len], parameter[name[dstreams]]] equal[==] constant[1]] begin[:] return[call[name[dstreams]][constant[0]]] if compare[call[name[len], parameter[call[name[set], parameter[<ast.GeneratorExp object at 0x7da1b20b6200>]]]] greater[>] constant[1]] begin[:] <ast.Raise object at 0x7da1b20b6590> if compare[call[name[len], parameter[call[name[set], parameter[<ast.GeneratorExp object at 0x7da20c993b50>]]]] greater[>] constant[1]] begin[:] <ast.Raise object at 0x7da20e956c80> variable[cls] assign[=] name[SparkContext]._jvm.org.apache.spark.streaming.api.java.JavaDStream variable[jdstreams] assign[=] call[name[SparkContext]._gateway.new_array, parameter[name[cls], call[name[len], parameter[name[dstreams]]]]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[dstreams]]]]]] begin[:] call[name[jdstreams]][name[i]] assign[=] call[name[dstreams]][name[i]]._jdstream return[call[name[DStream], parameter[call[name[self]._jssc.union, parameter[name[jdstreams]]], name[self], call[name[dstreams]][constant[0]]._jrdd_deserializer]]]
keyword[def] identifier[union] ( identifier[self] ,* identifier[dstreams] ): literal[string] keyword[if] keyword[not] identifier[dstreams] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[len] ( identifier[dstreams] )== literal[int] : keyword[return] identifier[dstreams] [ literal[int] ] keyword[if] identifier[len] ( identifier[set] ( identifier[s] . identifier[_jrdd_deserializer] keyword[for] identifier[s] keyword[in] identifier[dstreams] ))> literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[len] ( identifier[set] ( identifier[s] . identifier[_slideDuration] keyword[for] identifier[s] keyword[in] identifier[dstreams] ))> literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[cls] = identifier[SparkContext] . identifier[_jvm] . identifier[org] . identifier[apache] . identifier[spark] . identifier[streaming] . identifier[api] . identifier[java] . identifier[JavaDStream] identifier[jdstreams] = identifier[SparkContext] . identifier[_gateway] . identifier[new_array] ( identifier[cls] , identifier[len] ( identifier[dstreams] )) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[dstreams] )): identifier[jdstreams] [ identifier[i] ]= identifier[dstreams] [ identifier[i] ]. identifier[_jdstream] keyword[return] identifier[DStream] ( identifier[self] . identifier[_jssc] . identifier[union] ( identifier[jdstreams] ), identifier[self] , identifier[dstreams] [ literal[int] ]. identifier[_jrdd_deserializer] )
def union(self, *dstreams): """ Create a unified DStream from multiple DStreams of the same type and same slide duration. """ if not dstreams: raise ValueError('should have at least one DStream to union') # depends on [control=['if'], data=[]] if len(dstreams) == 1: return dstreams[0] # depends on [control=['if'], data=[]] if len(set((s._jrdd_deserializer for s in dstreams))) > 1: raise ValueError('All DStreams should have same serializer') # depends on [control=['if'], data=[]] if len(set((s._slideDuration for s in dstreams))) > 1: raise ValueError('All DStreams should have same slide duration') # depends on [control=['if'], data=[]] cls = SparkContext._jvm.org.apache.spark.streaming.api.java.JavaDStream jdstreams = SparkContext._gateway.new_array(cls, len(dstreams)) for i in range(0, len(dstreams)): jdstreams[i] = dstreams[i]._jdstream # depends on [control=['for'], data=['i']] return DStream(self._jssc.union(jdstreams), self, dstreams[0]._jrdd_deserializer)
def fit_overlays(self, text, start=None, end=None, **kw): """ Get an overlay thet fits the range [start, end). """ for ovl in text.overlays: if ovl.match(props=self.props_match, rng=(start, end)): yield ovl
def function[fit_overlays, parameter[self, text, start, end]]: constant[ Get an overlay thet fits the range [start, end). ] for taget[name[ovl]] in starred[name[text].overlays] begin[:] if call[name[ovl].match, parameter[]] begin[:] <ast.Yield object at 0x7da2043462c0>
keyword[def] identifier[fit_overlays] ( identifier[self] , identifier[text] , identifier[start] = keyword[None] , identifier[end] = keyword[None] ,** identifier[kw] ): literal[string] keyword[for] identifier[ovl] keyword[in] identifier[text] . identifier[overlays] : keyword[if] identifier[ovl] . identifier[match] ( identifier[props] = identifier[self] . identifier[props_match] , identifier[rng] =( identifier[start] , identifier[end] )): keyword[yield] identifier[ovl]
def fit_overlays(self, text, start=None, end=None, **kw): """ Get an overlay thet fits the range [start, end). """ for ovl in text.overlays: if ovl.match(props=self.props_match, rng=(start, end)): yield ovl # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ovl']]
def project(original_image, perturbed_images, alphas, shape, constraint): """ Projection onto given l2 / linf balls in a batch. """ alphas_shape = [len(alphas)] + [1] * len(shape) alphas = alphas.reshape(alphas_shape) if constraint == 'l2': projected = (1-alphas) * original_image + alphas * perturbed_images elif constraint == 'linf': projected = clip_image( perturbed_images, original_image - alphas, original_image + alphas ) return projected
def function[project, parameter[original_image, perturbed_images, alphas, shape, constraint]]: constant[ Projection onto given l2 / linf balls in a batch. ] variable[alphas_shape] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b1ef00a0>]] + binary_operation[list[[<ast.Constant object at 0x7da1b1ef02e0>]] * call[name[len], parameter[name[shape]]]]] variable[alphas] assign[=] call[name[alphas].reshape, parameter[name[alphas_shape]]] if compare[name[constraint] equal[==] constant[l2]] begin[:] variable[projected] assign[=] binary_operation[binary_operation[binary_operation[constant[1] - name[alphas]] * name[original_image]] + binary_operation[name[alphas] * name[perturbed_images]]] return[name[projected]]
keyword[def] identifier[project] ( identifier[original_image] , identifier[perturbed_images] , identifier[alphas] , identifier[shape] , identifier[constraint] ): literal[string] identifier[alphas_shape] =[ identifier[len] ( identifier[alphas] )]+[ literal[int] ]* identifier[len] ( identifier[shape] ) identifier[alphas] = identifier[alphas] . identifier[reshape] ( identifier[alphas_shape] ) keyword[if] identifier[constraint] == literal[string] : identifier[projected] =( literal[int] - identifier[alphas] )* identifier[original_image] + identifier[alphas] * identifier[perturbed_images] keyword[elif] identifier[constraint] == literal[string] : identifier[projected] = identifier[clip_image] ( identifier[perturbed_images] , identifier[original_image] - identifier[alphas] , identifier[original_image] + identifier[alphas] ) keyword[return] identifier[projected]
def project(original_image, perturbed_images, alphas, shape, constraint): """ Projection onto given l2 / linf balls in a batch. """ alphas_shape = [len(alphas)] + [1] * len(shape) alphas = alphas.reshape(alphas_shape) if constraint == 'l2': projected = (1 - alphas) * original_image + alphas * perturbed_images # depends on [control=['if'], data=[]] elif constraint == 'linf': projected = clip_image(perturbed_images, original_image - alphas, original_image + alphas) # depends on [control=['if'], data=[]] return projected
def resolve_to_callable(callable_name): """ Resolve string :callable_name: to a callable. :param callable_name: String representing callable name as registered in ramses registry or dotted import path of callable. Can be wrapped in double curly brackets, e.g. '{{my_callable}}'. """ from . import registry clean_callable_name = callable_name.replace( '{{', '').replace('}}', '').strip() try: return registry.get(clean_callable_name) except KeyError: try: from zope.dottedname.resolve import resolve return resolve(clean_callable_name) except ImportError: raise ImportError( 'Failed to load callable `{}`'.format(clean_callable_name))
def function[resolve_to_callable, parameter[callable_name]]: constant[ Resolve string :callable_name: to a callable. :param callable_name: String representing callable name as registered in ramses registry or dotted import path of callable. Can be wrapped in double curly brackets, e.g. '{{my_callable}}'. ] from relative_module[None] import module[registry] variable[clean_callable_name] assign[=] call[call[call[name[callable_name].replace, parameter[constant[{{], constant[]]].replace, parameter[constant[}}], constant[]]].strip, parameter[]] <ast.Try object at 0x7da18f721750>
keyword[def] identifier[resolve_to_callable] ( identifier[callable_name] ): literal[string] keyword[from] . keyword[import] identifier[registry] identifier[clean_callable_name] = identifier[callable_name] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[strip] () keyword[try] : keyword[return] identifier[registry] . identifier[get] ( identifier[clean_callable_name] ) keyword[except] identifier[KeyError] : keyword[try] : keyword[from] identifier[zope] . identifier[dottedname] . identifier[resolve] keyword[import] identifier[resolve] keyword[return] identifier[resolve] ( identifier[clean_callable_name] ) keyword[except] identifier[ImportError] : keyword[raise] identifier[ImportError] ( literal[string] . identifier[format] ( identifier[clean_callable_name] ))
def resolve_to_callable(callable_name): """ Resolve string :callable_name: to a callable. :param callable_name: String representing callable name as registered in ramses registry or dotted import path of callable. Can be wrapped in double curly brackets, e.g. '{{my_callable}}'. """ from . import registry clean_callable_name = callable_name.replace('{{', '').replace('}}', '').strip() try: return registry.get(clean_callable_name) # depends on [control=['try'], data=[]] except KeyError: try: from zope.dottedname.resolve import resolve return resolve(clean_callable_name) # depends on [control=['try'], data=[]] except ImportError: raise ImportError('Failed to load callable `{}`'.format(clean_callable_name)) # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
def get_allowed_methods(self): """Returns a coma-separated list of method names that are allowed on this instance. Useful to set the ``Allowed`` response header. """ return ", ".join([method for method in dir(self) if method.upper() == method and callable(getattr(self, method))])
def function[get_allowed_methods, parameter[self]]: constant[Returns a coma-separated list of method names that are allowed on this instance. Useful to set the ``Allowed`` response header. ] return[call[constant[, ].join, parameter[<ast.ListComp object at 0x7da1b2298820>]]]
keyword[def] identifier[get_allowed_methods] ( identifier[self] ): literal[string] keyword[return] literal[string] . identifier[join] ([ identifier[method] keyword[for] identifier[method] keyword[in] identifier[dir] ( identifier[self] ) keyword[if] identifier[method] . identifier[upper] ()== identifier[method] keyword[and] identifier[callable] ( identifier[getattr] ( identifier[self] , identifier[method] ))])
def get_allowed_methods(self): """Returns a coma-separated list of method names that are allowed on this instance. Useful to set the ``Allowed`` response header. """ return ', '.join([method for method in dir(self) if method.upper() == method and callable(getattr(self, method))])
def pressAndHold(*args): ''' press and hold. Do NOT release. accepts as many arguments as you want. e.g. pressAndHold('left_arrow', 'a','b'). ''' for i in args: win32api.keybd_event(VK_CODE[i], 0,0,0) time.sleep(.05)
def function[pressAndHold, parameter[]]: constant[ press and hold. Do NOT release. accepts as many arguments as you want. e.g. pressAndHold('left_arrow', 'a','b'). ] for taget[name[i]] in starred[name[args]] begin[:] call[name[win32api].keybd_event, parameter[call[name[VK_CODE]][name[i]], constant[0], constant[0], constant[0]]] call[name[time].sleep, parameter[constant[0.05]]]
keyword[def] identifier[pressAndHold] (* identifier[args] ): literal[string] keyword[for] identifier[i] keyword[in] identifier[args] : identifier[win32api] . identifier[keybd_event] ( identifier[VK_CODE] [ identifier[i] ], literal[int] , literal[int] , literal[int] ) identifier[time] . identifier[sleep] ( literal[int] )
def pressAndHold(*args): """ press and hold. Do NOT release. accepts as many arguments as you want. e.g. pressAndHold('left_arrow', 'a','b'). """ for i in args: win32api.keybd_event(VK_CODE[i], 0, 0, 0) time.sleep(0.05) # depends on [control=['for'], data=['i']]
def addParts(parentPart, childPath, count, index): """ BUILD A hierarchy BY REPEATEDLY CALLING self METHOD WITH VARIOUS childPaths count IS THE NUMBER FOUND FOR self PATH """ if index == None: index = 0 if index == len(childPath): return c = childPath[index] parentPart.count = coalesce(parentPart.count, 0) + count if parentPart.partitions == None: parentPart.partitions = FlatList() for i, part in enumerate(parentPart.partitions): if part.name == c.name: addParts(part, childPath, count, index + 1) return parentPart.partitions.append(c) addParts(c, childPath, count, index + 1)
def function[addParts, parameter[parentPart, childPath, count, index]]: constant[ BUILD A hierarchy BY REPEATEDLY CALLING self METHOD WITH VARIOUS childPaths count IS THE NUMBER FOUND FOR self PATH ] if compare[name[index] equal[==] constant[None]] begin[:] variable[index] assign[=] constant[0] if compare[name[index] equal[==] call[name[len], parameter[name[childPath]]]] begin[:] return[None] variable[c] assign[=] call[name[childPath]][name[index]] name[parentPart].count assign[=] binary_operation[call[name[coalesce], parameter[name[parentPart].count, constant[0]]] + name[count]] if compare[name[parentPart].partitions equal[==] constant[None]] begin[:] name[parentPart].partitions assign[=] call[name[FlatList], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b0af6ef0>, <ast.Name object at 0x7da1b0af6dd0>]]] in starred[call[name[enumerate], parameter[name[parentPart].partitions]]] begin[:] if compare[name[part].name equal[==] name[c].name] begin[:] call[name[addParts], parameter[name[part], name[childPath], name[count], binary_operation[name[index] + constant[1]]]] return[None] call[name[parentPart].partitions.append, parameter[name[c]]] call[name[addParts], parameter[name[c], name[childPath], name[count], binary_operation[name[index] + constant[1]]]]
keyword[def] identifier[addParts] ( identifier[parentPart] , identifier[childPath] , identifier[count] , identifier[index] ): literal[string] keyword[if] identifier[index] == keyword[None] : identifier[index] = literal[int] keyword[if] identifier[index] == identifier[len] ( identifier[childPath] ): keyword[return] identifier[c] = identifier[childPath] [ identifier[index] ] identifier[parentPart] . identifier[count] = identifier[coalesce] ( identifier[parentPart] . identifier[count] , literal[int] )+ identifier[count] keyword[if] identifier[parentPart] . identifier[partitions] == keyword[None] : identifier[parentPart] . identifier[partitions] = identifier[FlatList] () keyword[for] identifier[i] , identifier[part] keyword[in] identifier[enumerate] ( identifier[parentPart] . identifier[partitions] ): keyword[if] identifier[part] . identifier[name] == identifier[c] . identifier[name] : identifier[addParts] ( identifier[part] , identifier[childPath] , identifier[count] , identifier[index] + literal[int] ) keyword[return] identifier[parentPart] . identifier[partitions] . identifier[append] ( identifier[c] ) identifier[addParts] ( identifier[c] , identifier[childPath] , identifier[count] , identifier[index] + literal[int] )
def addParts(parentPart, childPath, count, index): """ BUILD A hierarchy BY REPEATEDLY CALLING self METHOD WITH VARIOUS childPaths count IS THE NUMBER FOUND FOR self PATH """ if index == None: index = 0 # depends on [control=['if'], data=['index']] if index == len(childPath): return # depends on [control=['if'], data=[]] c = childPath[index] parentPart.count = coalesce(parentPart.count, 0) + count if parentPart.partitions == None: parentPart.partitions = FlatList() # depends on [control=['if'], data=[]] for (i, part) in enumerate(parentPart.partitions): if part.name == c.name: addParts(part, childPath, count, index + 1) return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] parentPart.partitions.append(c) addParts(c, childPath, count, index + 1)
def getvalue(x): """Return the single value of x or raise TypError if more than one value.""" if isrepeating(x): raise TypeError( "Ambiguous call to getvalue for %r which has more than one value." % x) for value in getvalues(x): return value
def function[getvalue, parameter[x]]: constant[Return the single value of x or raise TypError if more than one value.] if call[name[isrepeating], parameter[name[x]]] begin[:] <ast.Raise object at 0x7da1b0ffb9d0> for taget[name[value]] in starred[call[name[getvalues], parameter[name[x]]]] begin[:] return[name[value]]
keyword[def] identifier[getvalue] ( identifier[x] ): literal[string] keyword[if] identifier[isrepeating] ( identifier[x] ): keyword[raise] identifier[TypeError] ( literal[string] % identifier[x] ) keyword[for] identifier[value] keyword[in] identifier[getvalues] ( identifier[x] ): keyword[return] identifier[value]
def getvalue(x): """Return the single value of x or raise TypError if more than one value.""" if isrepeating(x): raise TypeError('Ambiguous call to getvalue for %r which has more than one value.' % x) # depends on [control=['if'], data=[]] for value in getvalues(x): return value # depends on [control=['for'], data=['value']]
def getInstanceMetrics( self, forAllExcept: int) -> Tuple[Optional[int], Optional[float]]: """ Calculate and return the average throughput of all the instances except the one specified as `forAllExcept`. """ m = [(reqs, tm) for i, (reqs, tm) in self.numOrderedRequests.items() if i != forAllExcept] if m: reqs, tm = zip(*m) return sum(reqs), sum(tm) else: return None, None
def function[getInstanceMetrics, parameter[self, forAllExcept]]: constant[ Calculate and return the average throughput of all the instances except the one specified as `forAllExcept`. ] variable[m] assign[=] <ast.ListComp object at 0x7da2054a5f30> if name[m] begin[:] <ast.Tuple object at 0x7da2054a4640> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da2054a66e0>]] return[tuple[[<ast.Call object at 0x7da2054a5bd0>, <ast.Call object at 0x7da2054a6470>]]]
keyword[def] identifier[getInstanceMetrics] ( identifier[self] , identifier[forAllExcept] : identifier[int] )-> identifier[Tuple] [ identifier[Optional] [ identifier[int] ], identifier[Optional] [ identifier[float] ]]: literal[string] identifier[m] =[( identifier[reqs] , identifier[tm] ) keyword[for] identifier[i] ,( identifier[reqs] , identifier[tm] ) keyword[in] identifier[self] . identifier[numOrderedRequests] . identifier[items] () keyword[if] identifier[i] != identifier[forAllExcept] ] keyword[if] identifier[m] : identifier[reqs] , identifier[tm] = identifier[zip] (* identifier[m] ) keyword[return] identifier[sum] ( identifier[reqs] ), identifier[sum] ( identifier[tm] ) keyword[else] : keyword[return] keyword[None] , keyword[None]
def getInstanceMetrics(self, forAllExcept: int) -> Tuple[Optional[int], Optional[float]]: """ Calculate and return the average throughput of all the instances except the one specified as `forAllExcept`. """ m = [(reqs, tm) for (i, (reqs, tm)) in self.numOrderedRequests.items() if i != forAllExcept] if m: (reqs, tm) = zip(*m) return (sum(reqs), sum(tm)) # depends on [control=['if'], data=[]] else: return (None, None)
def export_schema_to_dict(back_references): """Exports the supported import/export schema to a dictionary""" databases = [Database.export_schema(recursive=True, include_parent_ref=back_references)] clusters = [DruidCluster.export_schema(recursive=True, include_parent_ref=back_references)] data = dict() if databases: data[DATABASES_KEY] = databases if clusters: data[DRUID_CLUSTERS_KEY] = clusters return data
def function[export_schema_to_dict, parameter[back_references]]: constant[Exports the supported import/export schema to a dictionary] variable[databases] assign[=] list[[<ast.Call object at 0x7da1b2078280>]] variable[clusters] assign[=] list[[<ast.Call object at 0x7da1b2078490>]] variable[data] assign[=] call[name[dict], parameter[]] if name[databases] begin[:] call[name[data]][name[DATABASES_KEY]] assign[=] name[databases] if name[clusters] begin[:] call[name[data]][name[DRUID_CLUSTERS_KEY]] assign[=] name[clusters] return[name[data]]
keyword[def] identifier[export_schema_to_dict] ( identifier[back_references] ): literal[string] identifier[databases] =[ identifier[Database] . identifier[export_schema] ( identifier[recursive] = keyword[True] , identifier[include_parent_ref] = identifier[back_references] )] identifier[clusters] =[ identifier[DruidCluster] . identifier[export_schema] ( identifier[recursive] = keyword[True] , identifier[include_parent_ref] = identifier[back_references] )] identifier[data] = identifier[dict] () keyword[if] identifier[databases] : identifier[data] [ identifier[DATABASES_KEY] ]= identifier[databases] keyword[if] identifier[clusters] : identifier[data] [ identifier[DRUID_CLUSTERS_KEY] ]= identifier[clusters] keyword[return] identifier[data]
def export_schema_to_dict(back_references): """Exports the supported import/export schema to a dictionary""" databases = [Database.export_schema(recursive=True, include_parent_ref=back_references)] clusters = [DruidCluster.export_schema(recursive=True, include_parent_ref=back_references)] data = dict() if databases: data[DATABASES_KEY] = databases # depends on [control=['if'], data=[]] if clusters: data[DRUID_CLUSTERS_KEY] = clusters # depends on [control=['if'], data=[]] return data
def show(ctx): """ Show migrations list """ for app_name, app in ctx.obj['config']['apps'].items(): click.echo(click.style(app_name, fg='green', bold=True)) for migration in app['migrations']: applied = ctx.obj['db'].is_migration_applied(app_name, migration) click.echo(' {0} {1}'.format(migration, click.style('(applied)', bold=True) if applied else ''))
def function[show, parameter[ctx]]: constant[ Show migrations list ] for taget[tuple[[<ast.Name object at 0x7da1b092c7c0>, <ast.Name object at 0x7da1b092f010>]]] in starred[call[call[call[name[ctx].obj][constant[config]]][constant[apps]].items, parameter[]]] begin[:] call[name[click].echo, parameter[call[name[click].style, parameter[name[app_name]]]]] for taget[name[migration]] in starred[call[name[app]][constant[migrations]]] begin[:] variable[applied] assign[=] call[call[name[ctx].obj][constant[db]].is_migration_applied, parameter[name[app_name], name[migration]]] call[name[click].echo, parameter[call[constant[ {0} {1}].format, parameter[name[migration], <ast.IfExp object at 0x7da1b092c3d0>]]]]
keyword[def] identifier[show] ( identifier[ctx] ): literal[string] keyword[for] identifier[app_name] , identifier[app] keyword[in] identifier[ctx] . identifier[obj] [ literal[string] ][ literal[string] ]. identifier[items] (): identifier[click] . identifier[echo] ( identifier[click] . identifier[style] ( identifier[app_name] , identifier[fg] = literal[string] , identifier[bold] = keyword[True] )) keyword[for] identifier[migration] keyword[in] identifier[app] [ literal[string] ]: identifier[applied] = identifier[ctx] . identifier[obj] [ literal[string] ]. identifier[is_migration_applied] ( identifier[app_name] , identifier[migration] ) identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[migration] , identifier[click] . identifier[style] ( literal[string] , identifier[bold] = keyword[True] ) keyword[if] identifier[applied] keyword[else] literal[string] ))
def show(ctx): """ Show migrations list """ for (app_name, app) in ctx.obj['config']['apps'].items(): click.echo(click.style(app_name, fg='green', bold=True)) for migration in app['migrations']: applied = ctx.obj['db'].is_migration_applied(app_name, migration) click.echo(' {0} {1}'.format(migration, click.style('(applied)', bold=True) if applied else '')) # depends on [control=['for'], data=['migration']] # depends on [control=['for'], data=[]]
def _load_int(self): """Load internal data from file and return it.""" values = numpy.fromfile(self.filepath_int) if self.NDIM > 0: values = values.reshape(self.seriesshape) return values
def function[_load_int, parameter[self]]: constant[Load internal data from file and return it.] variable[values] assign[=] call[name[numpy].fromfile, parameter[name[self].filepath_int]] if compare[name[self].NDIM greater[>] constant[0]] begin[:] variable[values] assign[=] call[name[values].reshape, parameter[name[self].seriesshape]] return[name[values]]
keyword[def] identifier[_load_int] ( identifier[self] ): literal[string] identifier[values] = identifier[numpy] . identifier[fromfile] ( identifier[self] . identifier[filepath_int] ) keyword[if] identifier[self] . identifier[NDIM] > literal[int] : identifier[values] = identifier[values] . identifier[reshape] ( identifier[self] . identifier[seriesshape] ) keyword[return] identifier[values]
def _load_int(self): """Load internal data from file and return it.""" values = numpy.fromfile(self.filepath_int) if self.NDIM > 0: values = values.reshape(self.seriesshape) # depends on [control=['if'], data=[]] return values
def checkout(self, ref, cb=None): """Checkout a bundle from the remote. Returns a file-like object""" if self.is_api: return self._checkout_api(ref, cb=cb) else: return self._checkout_fs(ref, cb=cb)
def function[checkout, parameter[self, ref, cb]]: constant[Checkout a bundle from the remote. Returns a file-like object] if name[self].is_api begin[:] return[call[name[self]._checkout_api, parameter[name[ref]]]]
keyword[def] identifier[checkout] ( identifier[self] , identifier[ref] , identifier[cb] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[is_api] : keyword[return] identifier[self] . identifier[_checkout_api] ( identifier[ref] , identifier[cb] = identifier[cb] ) keyword[else] : keyword[return] identifier[self] . identifier[_checkout_fs] ( identifier[ref] , identifier[cb] = identifier[cb] )
def checkout(self, ref, cb=None): """Checkout a bundle from the remote. Returns a file-like object""" if self.is_api: return self._checkout_api(ref, cb=cb) # depends on [control=['if'], data=[]] else: return self._checkout_fs(ref, cb=cb)
def cols_to_dt(df, col_list,set_format = None,infer_format = True,dest = False): """ Coerces a list of columns to datetime Parameters: df - DataFrame DataFrame to operate on col_list - list of strings names of columns to coerce dest - bool, default False Whether to apply the result to the DataFrame or return it. True is apply, False is return. """ if not dest: return _pd.DataFrame({col_name:col_to_dt(df,col_name,set_format,infer_format) for col_name in col_list}) for col_name in col_list: col_to_dt(df,col_name,set_format,infer_format,dest)
def function[cols_to_dt, parameter[df, col_list, set_format, infer_format, dest]]: constant[ Coerces a list of columns to datetime Parameters: df - DataFrame DataFrame to operate on col_list - list of strings names of columns to coerce dest - bool, default False Whether to apply the result to the DataFrame or return it. True is apply, False is return. ] if <ast.UnaryOp object at 0x7da18f09d000> begin[:] return[call[name[_pd].DataFrame, parameter[<ast.DictComp object at 0x7da18f09f580>]]] for taget[name[col_name]] in starred[name[col_list]] begin[:] call[name[col_to_dt], parameter[name[df], name[col_name], name[set_format], name[infer_format], name[dest]]]
keyword[def] identifier[cols_to_dt] ( identifier[df] , identifier[col_list] , identifier[set_format] = keyword[None] , identifier[infer_format] = keyword[True] , identifier[dest] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[dest] : keyword[return] identifier[_pd] . identifier[DataFrame] ({ identifier[col_name] : identifier[col_to_dt] ( identifier[df] , identifier[col_name] , identifier[set_format] , identifier[infer_format] ) keyword[for] identifier[col_name] keyword[in] identifier[col_list] }) keyword[for] identifier[col_name] keyword[in] identifier[col_list] : identifier[col_to_dt] ( identifier[df] , identifier[col_name] , identifier[set_format] , identifier[infer_format] , identifier[dest] )
def cols_to_dt(df, col_list, set_format=None, infer_format=True, dest=False): """ Coerces a list of columns to datetime Parameters: df - DataFrame DataFrame to operate on col_list - list of strings names of columns to coerce dest - bool, default False Whether to apply the result to the DataFrame or return it. True is apply, False is return. """ if not dest: return _pd.DataFrame({col_name: col_to_dt(df, col_name, set_format, infer_format) for col_name in col_list}) # depends on [control=['if'], data=[]] for col_name in col_list: col_to_dt(df, col_name, set_format, infer_format, dest) # depends on [control=['for'], data=['col_name']]
def complete(self): """ When *local_workflow_require_branches* of the task was set to *True*, returns whether the :py:meth:`run` method has been called before. Otherwise, the call is forwarded to the super class. """ if self.task.local_workflow_require_branches: return self._has_run else: return super(LocalWorkflowProxy, self).complete()
def function[complete, parameter[self]]: constant[ When *local_workflow_require_branches* of the task was set to *True*, returns whether the :py:meth:`run` method has been called before. Otherwise, the call is forwarded to the super class. ] if name[self].task.local_workflow_require_branches begin[:] return[name[self]._has_run]
keyword[def] identifier[complete] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[task] . identifier[local_workflow_require_branches] : keyword[return] identifier[self] . identifier[_has_run] keyword[else] : keyword[return] identifier[super] ( identifier[LocalWorkflowProxy] , identifier[self] ). identifier[complete] ()
def complete(self): """ When *local_workflow_require_branches* of the task was set to *True*, returns whether the :py:meth:`run` method has been called before. Otherwise, the call is forwarded to the super class. """ if self.task.local_workflow_require_branches: return self._has_run # depends on [control=['if'], data=[]] else: return super(LocalWorkflowProxy, self).complete()
def add_padding(self, name, left = 0, right = 0, top = 0, bottom = 0, value = 0, input_name = 'data', output_name = 'out', padding_type = 'constant'): """ Add a padding layer to the model. Kindly refer to NeuralNetwork.proto for details. Parameters ---------- name: str The name of this layer. left: int Number of elements to be padded on the left side of the input blob. right: int Number of elements to be padded on the right side of the input blob. top: int Number of elements to be padded on the top of the input blob. bottom: int Number of elements to be padded on the bottom of the input blob. value: float Value of the elements padded. Used only when padding_type = 'constant' input_name: str The input blob name of this layer. output_name: str The output blob name of this layer. padding_type: str Type of the padding. Can be one of 'constant', 'reflection' or 'replication' See Also -------- add_crop, add_convolution, add_pooling """ # Currently only constant padding is supported. spec = self.spec nn_spec = self.nn_spec # Add a new layer spec_layer = nn_spec.layers.add() spec_layer.name = name spec_layer.input.append(input_name) spec_layer.output.append(output_name) spec_layer_params = spec_layer.padding # Set the parameters if padding_type == 'constant': spec_layer_params.constant.value = value elif padding_type == 'reflection': spec_layer_params.reflection.MergeFromString(b'') elif padding_type == 'replication': spec_layer_params.replication.MergeFromString(b'') else: raise ValueError("Unknown padding_type %s" %(padding_type)) height_border = spec_layer_params.paddingAmounts.borderAmounts.add() height_border.startEdgeSize = top height_border.endEdgeSize = bottom width_border = spec_layer_params.paddingAmounts.borderAmounts.add() width_border.startEdgeSize = left width_border.endEdgeSize = right
def function[add_padding, parameter[self, name, left, right, top, bottom, value, input_name, output_name, padding_type]]: constant[ Add a padding layer to the model. Kindly refer to NeuralNetwork.proto for details. Parameters ---------- name: str The name of this layer. left: int Number of elements to be padded on the left side of the input blob. right: int Number of elements to be padded on the right side of the input blob. top: int Number of elements to be padded on the top of the input blob. bottom: int Number of elements to be padded on the bottom of the input blob. value: float Value of the elements padded. Used only when padding_type = 'constant' input_name: str The input blob name of this layer. output_name: str The output blob name of this layer. padding_type: str Type of the padding. Can be one of 'constant', 'reflection' or 'replication' See Also -------- add_crop, add_convolution, add_pooling ] variable[spec] assign[=] name[self].spec variable[nn_spec] assign[=] name[self].nn_spec variable[spec_layer] assign[=] call[name[nn_spec].layers.add, parameter[]] name[spec_layer].name assign[=] name[name] call[name[spec_layer].input.append, parameter[name[input_name]]] call[name[spec_layer].output.append, parameter[name[output_name]]] variable[spec_layer_params] assign[=] name[spec_layer].padding if compare[name[padding_type] equal[==] constant[constant]] begin[:] name[spec_layer_params].constant.value assign[=] name[value] variable[height_border] assign[=] call[name[spec_layer_params].paddingAmounts.borderAmounts.add, parameter[]] name[height_border].startEdgeSize assign[=] name[top] name[height_border].endEdgeSize assign[=] name[bottom] variable[width_border] assign[=] call[name[spec_layer_params].paddingAmounts.borderAmounts.add, parameter[]] name[width_border].startEdgeSize assign[=] name[left] name[width_border].endEdgeSize assign[=] name[right]
keyword[def] identifier[add_padding] ( identifier[self] , identifier[name] , identifier[left] = literal[int] , identifier[right] = literal[int] , identifier[top] = literal[int] , identifier[bottom] = literal[int] , identifier[value] = literal[int] , identifier[input_name] = literal[string] , identifier[output_name] = literal[string] , identifier[padding_type] = literal[string] ): literal[string] identifier[spec] = identifier[self] . identifier[spec] identifier[nn_spec] = identifier[self] . identifier[nn_spec] identifier[spec_layer] = identifier[nn_spec] . identifier[layers] . identifier[add] () identifier[spec_layer] . identifier[name] = identifier[name] identifier[spec_layer] . identifier[input] . identifier[append] ( identifier[input_name] ) identifier[spec_layer] . identifier[output] . identifier[append] ( identifier[output_name] ) identifier[spec_layer_params] = identifier[spec_layer] . identifier[padding] keyword[if] identifier[padding_type] == literal[string] : identifier[spec_layer_params] . identifier[constant] . identifier[value] = identifier[value] keyword[elif] identifier[padding_type] == literal[string] : identifier[spec_layer_params] . identifier[reflection] . identifier[MergeFromString] ( literal[string] ) keyword[elif] identifier[padding_type] == literal[string] : identifier[spec_layer_params] . identifier[replication] . identifier[MergeFromString] ( literal[string] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[padding_type] )) identifier[height_border] = identifier[spec_layer_params] . identifier[paddingAmounts] . identifier[borderAmounts] . identifier[add] () identifier[height_border] . identifier[startEdgeSize] = identifier[top] identifier[height_border] . identifier[endEdgeSize] = identifier[bottom] identifier[width_border] = identifier[spec_layer_params] . identifier[paddingAmounts] . identifier[borderAmounts] . identifier[add] () identifier[width_border] . identifier[startEdgeSize] = identifier[left] identifier[width_border] . identifier[endEdgeSize] = identifier[right]
def add_padding(self, name, left=0, right=0, top=0, bottom=0, value=0, input_name='data', output_name='out', padding_type='constant'): """ Add a padding layer to the model. Kindly refer to NeuralNetwork.proto for details. Parameters ---------- name: str The name of this layer. left: int Number of elements to be padded on the left side of the input blob. right: int Number of elements to be padded on the right side of the input blob. top: int Number of elements to be padded on the top of the input blob. bottom: int Number of elements to be padded on the bottom of the input blob. value: float Value of the elements padded. Used only when padding_type = 'constant' input_name: str The input blob name of this layer. output_name: str The output blob name of this layer. padding_type: str Type of the padding. Can be one of 'constant', 'reflection' or 'replication' See Also -------- add_crop, add_convolution, add_pooling """ # Currently only constant padding is supported. spec = self.spec nn_spec = self.nn_spec # Add a new layer spec_layer = nn_spec.layers.add() spec_layer.name = name spec_layer.input.append(input_name) spec_layer.output.append(output_name) spec_layer_params = spec_layer.padding # Set the parameters if padding_type == 'constant': spec_layer_params.constant.value = value # depends on [control=['if'], data=[]] elif padding_type == 'reflection': spec_layer_params.reflection.MergeFromString(b'') # depends on [control=['if'], data=[]] elif padding_type == 'replication': spec_layer_params.replication.MergeFromString(b'') # depends on [control=['if'], data=[]] else: raise ValueError('Unknown padding_type %s' % padding_type) height_border = spec_layer_params.paddingAmounts.borderAmounts.add() height_border.startEdgeSize = top height_border.endEdgeSize = bottom width_border = spec_layer_params.paddingAmounts.borderAmounts.add() width_border.startEdgeSize = left width_border.endEdgeSize = right
def get_conv_params(mass1, mass2, spin1z, spin2z, metricParams, fUpper, lambda1=None, lambda2=None, quadparam1=None, quadparam2=None): """ Function to convert between masses and spins and locations in the mu parameter space. Mu = Cartesian metric, but not principal components. Parameters ----------- mass1 : float Mass of heavier body. mass2 : float Mass of lighter body. spin1z : float Spin of body 1. spin2z : float Spin of body 2. metricParams : metricParameters instance Structure holding all the options for construction of the metric and the eigenvalues, eigenvectors and covariance matrix needed to manipulate the space. fUpper : float The value of fUpper to use when getting the mu coordinates from the lambda coordinates. This must be a key in metricParams.evals and metricParams.evecs (ie. we must know how to do the transformation for the given value of fUpper) Returns -------- mus : list of floats or numpy.arrays Position of the system(s) in the mu coordinate system """ # Do this by masses -> lambdas lambdas = get_chirp_params(mass1, mass2, spin1z, spin2z, metricParams.f0, metricParams.pnOrder, lambda1=lambda1, lambda2=lambda2, quadparam1=quadparam1, quadparam2=quadparam2) # and lambdas -> mus mus = get_mu_params(lambdas, metricParams, fUpper) return mus
def function[get_conv_params, parameter[mass1, mass2, spin1z, spin2z, metricParams, fUpper, lambda1, lambda2, quadparam1, quadparam2]]: constant[ Function to convert between masses and spins and locations in the mu parameter space. Mu = Cartesian metric, but not principal components. Parameters ----------- mass1 : float Mass of heavier body. mass2 : float Mass of lighter body. spin1z : float Spin of body 1. spin2z : float Spin of body 2. metricParams : metricParameters instance Structure holding all the options for construction of the metric and the eigenvalues, eigenvectors and covariance matrix needed to manipulate the space. fUpper : float The value of fUpper to use when getting the mu coordinates from the lambda coordinates. This must be a key in metricParams.evals and metricParams.evecs (ie. we must know how to do the transformation for the given value of fUpper) Returns -------- mus : list of floats or numpy.arrays Position of the system(s) in the mu coordinate system ] variable[lambdas] assign[=] call[name[get_chirp_params], parameter[name[mass1], name[mass2], name[spin1z], name[spin2z], name[metricParams].f0, name[metricParams].pnOrder]] variable[mus] assign[=] call[name[get_mu_params], parameter[name[lambdas], name[metricParams], name[fUpper]]] return[name[mus]]
keyword[def] identifier[get_conv_params] ( identifier[mass1] , identifier[mass2] , identifier[spin1z] , identifier[spin2z] , identifier[metricParams] , identifier[fUpper] , identifier[lambda1] = keyword[None] , identifier[lambda2] = keyword[None] , identifier[quadparam1] = keyword[None] , identifier[quadparam2] = keyword[None] ): literal[string] identifier[lambdas] = identifier[get_chirp_params] ( identifier[mass1] , identifier[mass2] , identifier[spin1z] , identifier[spin2z] , identifier[metricParams] . identifier[f0] , identifier[metricParams] . identifier[pnOrder] , identifier[lambda1] = identifier[lambda1] , identifier[lambda2] = identifier[lambda2] , identifier[quadparam1] = identifier[quadparam1] , identifier[quadparam2] = identifier[quadparam2] ) identifier[mus] = identifier[get_mu_params] ( identifier[lambdas] , identifier[metricParams] , identifier[fUpper] ) keyword[return] identifier[mus]
def get_conv_params(mass1, mass2, spin1z, spin2z, metricParams, fUpper, lambda1=None, lambda2=None, quadparam1=None, quadparam2=None): """ Function to convert between masses and spins and locations in the mu parameter space. Mu = Cartesian metric, but not principal components. Parameters ----------- mass1 : float Mass of heavier body. mass2 : float Mass of lighter body. spin1z : float Spin of body 1. spin2z : float Spin of body 2. metricParams : metricParameters instance Structure holding all the options for construction of the metric and the eigenvalues, eigenvectors and covariance matrix needed to manipulate the space. fUpper : float The value of fUpper to use when getting the mu coordinates from the lambda coordinates. This must be a key in metricParams.evals and metricParams.evecs (ie. we must know how to do the transformation for the given value of fUpper) Returns -------- mus : list of floats or numpy.arrays Position of the system(s) in the mu coordinate system """ # Do this by masses -> lambdas lambdas = get_chirp_params(mass1, mass2, spin1z, spin2z, metricParams.f0, metricParams.pnOrder, lambda1=lambda1, lambda2=lambda2, quadparam1=quadparam1, quadparam2=quadparam2) # and lambdas -> mus mus = get_mu_params(lambdas, metricParams, fUpper) return mus
def _cutout(x, n_holes:uniform_int=1, length:uniform_int=40): "Cut out `n_holes` number of square holes of size `length` in image at random locations." h,w = x.shape[1:] for n in range(n_holes): h_y = np.random.randint(0, h) h_x = np.random.randint(0, w) y1 = int(np.clip(h_y - length / 2, 0, h)) y2 = int(np.clip(h_y + length / 2, 0, h)) x1 = int(np.clip(h_x - length / 2, 0, w)) x2 = int(np.clip(h_x + length / 2, 0, w)) x[:, y1:y2, x1:x2] = 0 return x
def function[_cutout, parameter[x, n_holes, length]]: constant[Cut out `n_holes` number of square holes of size `length` in image at random locations.] <ast.Tuple object at 0x7da1b1e11e70> assign[=] call[name[x].shape][<ast.Slice object at 0x7da1b1e11d50>] for taget[name[n]] in starred[call[name[range], parameter[name[n_holes]]]] begin[:] variable[h_y] assign[=] call[name[np].random.randint, parameter[constant[0], name[h]]] variable[h_x] assign[=] call[name[np].random.randint, parameter[constant[0], name[w]]] variable[y1] assign[=] call[name[int], parameter[call[name[np].clip, parameter[binary_operation[name[h_y] - binary_operation[name[length] / constant[2]]], constant[0], name[h]]]]] variable[y2] assign[=] call[name[int], parameter[call[name[np].clip, parameter[binary_operation[name[h_y] + binary_operation[name[length] / constant[2]]], constant[0], name[h]]]]] variable[x1] assign[=] call[name[int], parameter[call[name[np].clip, parameter[binary_operation[name[h_x] - binary_operation[name[length] / constant[2]]], constant[0], name[w]]]]] variable[x2] assign[=] call[name[int], parameter[call[name[np].clip, parameter[binary_operation[name[h_x] + binary_operation[name[length] / constant[2]]], constant[0], name[w]]]]] call[name[x]][tuple[[<ast.Slice object at 0x7da1b202b910>, <ast.Slice object at 0x7da1b202b370>, <ast.Slice object at 0x7da1b2028220>]]] assign[=] constant[0] return[name[x]]
keyword[def] identifier[_cutout] ( identifier[x] , identifier[n_holes] : identifier[uniform_int] = literal[int] , identifier[length] : identifier[uniform_int] = literal[int] ): literal[string] identifier[h] , identifier[w] = identifier[x] . identifier[shape] [ literal[int] :] keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[n_holes] ): identifier[h_y] = identifier[np] . identifier[random] . identifier[randint] ( literal[int] , identifier[h] ) identifier[h_x] = identifier[np] . identifier[random] . identifier[randint] ( literal[int] , identifier[w] ) identifier[y1] = identifier[int] ( identifier[np] . identifier[clip] ( identifier[h_y] - identifier[length] / literal[int] , literal[int] , identifier[h] )) identifier[y2] = identifier[int] ( identifier[np] . identifier[clip] ( identifier[h_y] + identifier[length] / literal[int] , literal[int] , identifier[h] )) identifier[x1] = identifier[int] ( identifier[np] . identifier[clip] ( identifier[h_x] - identifier[length] / literal[int] , literal[int] , identifier[w] )) identifier[x2] = identifier[int] ( identifier[np] . identifier[clip] ( identifier[h_x] + identifier[length] / literal[int] , literal[int] , identifier[w] )) identifier[x] [:, identifier[y1] : identifier[y2] , identifier[x1] : identifier[x2] ]= literal[int] keyword[return] identifier[x]
def _cutout(x, n_holes: uniform_int=1, length: uniform_int=40): """Cut out `n_holes` number of square holes of size `length` in image at random locations.""" (h, w) = x.shape[1:] for n in range(n_holes): h_y = np.random.randint(0, h) h_x = np.random.randint(0, w) y1 = int(np.clip(h_y - length / 2, 0, h)) y2 = int(np.clip(h_y + length / 2, 0, h)) x1 = int(np.clip(h_x - length / 2, 0, w)) x2 = int(np.clip(h_x + length / 2, 0, w)) x[:, y1:y2, x1:x2] = 0 # depends on [control=['for'], data=[]] return x
def _get_xml_value(value): """Convert an individual value to an XML string. Calls itself recursively for dictionaries and lists. Uses some heuristics to convert the data to XML: - In dictionaries, the keys become the tag name. - In lists the tag name is 'child' with an order-attribute giving the list index. - All other values are included as is. All values are escaped to fit into the XML document. :param value: The value to convert to XML. :type value: Any valid Python value :rtype: string """ retval = [] if isinstance(value, dict): for key, value in value.iteritems(): retval.append('<' + xml_escape(str(key)) + '>') retval.append(_get_xml_value(value)) retval.append('</' + xml_escape(str(key)) + '>') elif isinstance(value, list): for key, value in enumerate(value): retval.append('<child order="' + xml_escape(str(key)) + '">') retval.append(_get_xml_value(value)) retval.append('</child>') elif isinstance(value, bool): retval.append(xml_escape(str(value).lower())) elif isinstance(value, unicode): retval.append(xml_escape(value.encode('utf-8'))) else: retval.append(xml_escape(str(value))) return "".join(retval)
def function[_get_xml_value, parameter[value]]: constant[Convert an individual value to an XML string. Calls itself recursively for dictionaries and lists. Uses some heuristics to convert the data to XML: - In dictionaries, the keys become the tag name. - In lists the tag name is 'child' with an order-attribute giving the list index. - All other values are included as is. All values are escaped to fit into the XML document. :param value: The value to convert to XML. :type value: Any valid Python value :rtype: string ] variable[retval] assign[=] list[[]] if call[name[isinstance], parameter[name[value], name[dict]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b2369cf0>, <ast.Name object at 0x7da1b236b1f0>]]] in starred[call[name[value].iteritems, parameter[]]] begin[:] call[name[retval].append, parameter[binary_operation[binary_operation[constant[<] + call[name[xml_escape], parameter[call[name[str], parameter[name[key]]]]]] + constant[>]]]] call[name[retval].append, parameter[call[name[_get_xml_value], parameter[name[value]]]]] call[name[retval].append, parameter[binary_operation[binary_operation[constant[</] + call[name[xml_escape], parameter[call[name[str], parameter[name[key]]]]]] + constant[>]]]] return[call[constant[].join, parameter[name[retval]]]]
keyword[def] identifier[_get_xml_value] ( identifier[value] ): literal[string] identifier[retval] =[] keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ): keyword[for] identifier[key] , identifier[value] keyword[in] identifier[value] . identifier[iteritems] (): identifier[retval] . identifier[append] ( literal[string] + identifier[xml_escape] ( identifier[str] ( identifier[key] ))+ literal[string] ) identifier[retval] . identifier[append] ( identifier[_get_xml_value] ( identifier[value] )) identifier[retval] . identifier[append] ( literal[string] + identifier[xml_escape] ( identifier[str] ( identifier[key] ))+ literal[string] ) keyword[elif] identifier[isinstance] ( identifier[value] , identifier[list] ): keyword[for] identifier[key] , identifier[value] keyword[in] identifier[enumerate] ( identifier[value] ): identifier[retval] . identifier[append] ( literal[string] + identifier[xml_escape] ( identifier[str] ( identifier[key] ))+ literal[string] ) identifier[retval] . identifier[append] ( identifier[_get_xml_value] ( identifier[value] )) identifier[retval] . identifier[append] ( literal[string] ) keyword[elif] identifier[isinstance] ( identifier[value] , identifier[bool] ): identifier[retval] . identifier[append] ( identifier[xml_escape] ( identifier[str] ( identifier[value] ). identifier[lower] ())) keyword[elif] identifier[isinstance] ( identifier[value] , identifier[unicode] ): identifier[retval] . identifier[append] ( identifier[xml_escape] ( identifier[value] . identifier[encode] ( literal[string] ))) keyword[else] : identifier[retval] . identifier[append] ( identifier[xml_escape] ( identifier[str] ( identifier[value] ))) keyword[return] literal[string] . identifier[join] ( identifier[retval] )
def _get_xml_value(value): """Convert an individual value to an XML string. Calls itself recursively for dictionaries and lists. Uses some heuristics to convert the data to XML: - In dictionaries, the keys become the tag name. - In lists the tag name is 'child' with an order-attribute giving the list index. - All other values are included as is. All values are escaped to fit into the XML document. :param value: The value to convert to XML. :type value: Any valid Python value :rtype: string """ retval = [] if isinstance(value, dict): for (key, value) in value.iteritems(): retval.append('<' + xml_escape(str(key)) + '>') retval.append(_get_xml_value(value)) retval.append('</' + xml_escape(str(key)) + '>') # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(value, list): for (key, value) in enumerate(value): retval.append('<child order="' + xml_escape(str(key)) + '">') retval.append(_get_xml_value(value)) retval.append('</child>') # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(value, bool): retval.append(xml_escape(str(value).lower())) # depends on [control=['if'], data=[]] elif isinstance(value, unicode): retval.append(xml_escape(value.encode('utf-8'))) # depends on [control=['if'], data=[]] else: retval.append(xml_escape(str(value))) return ''.join(retval)
def entry_from_raw(self, rval: RawEntry, jptr: JSONPointer = "") -> EntryValue: """Transform a raw (leaf-)list entry into the cooked form. Args: rval: raw entry (scalar or object) jptr: JSON pointer of the entry Raises: NonexistentSchemaNode: If a member inside `rval` is not defined in the schema. RawTypeError: If a scalar value inside `rval` is of incorrect type. """ return super().from_raw(rval, jptr)
def function[entry_from_raw, parameter[self, rval, jptr]]: constant[Transform a raw (leaf-)list entry into the cooked form. Args: rval: raw entry (scalar or object) jptr: JSON pointer of the entry Raises: NonexistentSchemaNode: If a member inside `rval` is not defined in the schema. RawTypeError: If a scalar value inside `rval` is of incorrect type. ] return[call[call[name[super], parameter[]].from_raw, parameter[name[rval], name[jptr]]]]
keyword[def] identifier[entry_from_raw] ( identifier[self] , identifier[rval] : identifier[RawEntry] , identifier[jptr] : identifier[JSONPointer] = literal[string] )-> identifier[EntryValue] : literal[string] keyword[return] identifier[super] (). identifier[from_raw] ( identifier[rval] , identifier[jptr] )
def entry_from_raw(self, rval: RawEntry, jptr: JSONPointer='') -> EntryValue: """Transform a raw (leaf-)list entry into the cooked form. Args: rval: raw entry (scalar or object) jptr: JSON pointer of the entry Raises: NonexistentSchemaNode: If a member inside `rval` is not defined in the schema. RawTypeError: If a scalar value inside `rval` is of incorrect type. """ return super().from_raw(rval, jptr)
def is_valid(self, qstr=None): """Return True if string is valid""" if qstr is None: qstr = self.currentText() return QUrl(qstr).isValid()
def function[is_valid, parameter[self, qstr]]: constant[Return True if string is valid] if compare[name[qstr] is constant[None]] begin[:] variable[qstr] assign[=] call[name[self].currentText, parameter[]] return[call[call[name[QUrl], parameter[name[qstr]]].isValid, parameter[]]]
keyword[def] identifier[is_valid] ( identifier[self] , identifier[qstr] = keyword[None] ): literal[string] keyword[if] identifier[qstr] keyword[is] keyword[None] : identifier[qstr] = identifier[self] . identifier[currentText] () keyword[return] identifier[QUrl] ( identifier[qstr] ). identifier[isValid] ()
def is_valid(self, qstr=None): """Return True if string is valid""" if qstr is None: qstr = self.currentText() # depends on [control=['if'], data=['qstr']] return QUrl(qstr).isValid()
def load_external_class(python_file, base_class): """ Returns a tuple: (subclass of base_class, module) """ loaded_module = load_external_module(python_file) # Find a class that extends base_class loaded_class = extract_class(loaded_module, base_class) return loaded_class, loaded_module
def function[load_external_class, parameter[python_file, base_class]]: constant[ Returns a tuple: (subclass of base_class, module) ] variable[loaded_module] assign[=] call[name[load_external_module], parameter[name[python_file]]] variable[loaded_class] assign[=] call[name[extract_class], parameter[name[loaded_module], name[base_class]]] return[tuple[[<ast.Name object at 0x7da204620700>, <ast.Name object at 0x7da204620460>]]]
keyword[def] identifier[load_external_class] ( identifier[python_file] , identifier[base_class] ): literal[string] identifier[loaded_module] = identifier[load_external_module] ( identifier[python_file] ) identifier[loaded_class] = identifier[extract_class] ( identifier[loaded_module] , identifier[base_class] ) keyword[return] identifier[loaded_class] , identifier[loaded_module]
def load_external_class(python_file, base_class): """ Returns a tuple: (subclass of base_class, module) """ loaded_module = load_external_module(python_file) # Find a class that extends base_class loaded_class = extract_class(loaded_module, base_class) return (loaded_class, loaded_module)
def plot(self, wavelengths=None, flux_unit=None, area=None, vegaspec=None, **kwargs): # pragma: no cover """Plot the spectrum. .. note:: Uses :mod:`matplotlib`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for integration. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. flux_unit : str or `~astropy.units.core.Unit` or `None` Flux is converted to this unit for plotting. If not given, internal unit is used. area, vegaspec See :func:`~synphot.units.convert_flux`. kwargs : dict See :func:`BaseSpectrum.plot`. Raises ------ synphot.exceptions.SynphotError Invalid inputs. """ w, y = self._get_arrays(wavelengths, flux_unit=flux_unit, area=area, vegaspec=vegaspec) self._do_plot(w, y, **kwargs)
def function[plot, parameter[self, wavelengths, flux_unit, area, vegaspec]]: constant[Plot the spectrum. .. note:: Uses :mod:`matplotlib`. Parameters ---------- wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for integration. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` is used. flux_unit : str or `~astropy.units.core.Unit` or `None` Flux is converted to this unit for plotting. If not given, internal unit is used. area, vegaspec See :func:`~synphot.units.convert_flux`. kwargs : dict See :func:`BaseSpectrum.plot`. Raises ------ synphot.exceptions.SynphotError Invalid inputs. ] <ast.Tuple object at 0x7da18f811690> assign[=] call[name[self]._get_arrays, parameter[name[wavelengths]]] call[name[self]._do_plot, parameter[name[w], name[y]]]
keyword[def] identifier[plot] ( identifier[self] , identifier[wavelengths] = keyword[None] , identifier[flux_unit] = keyword[None] , identifier[area] = keyword[None] , identifier[vegaspec] = keyword[None] , ** identifier[kwargs] ): literal[string] identifier[w] , identifier[y] = identifier[self] . identifier[_get_arrays] ( identifier[wavelengths] , identifier[flux_unit] = identifier[flux_unit] , identifier[area] = identifier[area] , identifier[vegaspec] = identifier[vegaspec] ) identifier[self] . identifier[_do_plot] ( identifier[w] , identifier[y] ,** identifier[kwargs] )
def plot(self, wavelengths=None, flux_unit=None, area=None, vegaspec=None, **kwargs): # pragma: no cover 'Plot the spectrum.\n\n .. note:: Uses :mod:`matplotlib`.\n\n Parameters\n ----------\n wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None`\n Wavelength values for integration.\n If not a Quantity, assumed to be in Angstrom.\n If `None`, ``self.waveset`` is used.\n\n flux_unit : str or `~astropy.units.core.Unit` or `None`\n Flux is converted to this unit for plotting.\n If not given, internal unit is used.\n\n area, vegaspec\n See :func:`~synphot.units.convert_flux`.\n\n kwargs : dict\n See :func:`BaseSpectrum.plot`.\n\n Raises\n ------\n synphot.exceptions.SynphotError\n Invalid inputs.\n\n ' (w, y) = self._get_arrays(wavelengths, flux_unit=flux_unit, area=area, vegaspec=vegaspec) self._do_plot(w, y, **kwargs)
def Runtime_setCustomObjectFormatterEnabled(self, enabled): """ Function path: Runtime.setCustomObjectFormatterEnabled Domain: Runtime Method name: setCustomObjectFormatterEnabled WARNING: This function is marked 'Experimental'! Parameters: Required arguments: 'enabled' (type: boolean) -> No description No return value. """ assert isinstance(enabled, (bool,) ), "Argument 'enabled' must be of type '['bool']'. Received type: '%s'" % type( enabled) subdom_funcs = self.synchronous_command( 'Runtime.setCustomObjectFormatterEnabled', enabled=enabled) return subdom_funcs
def function[Runtime_setCustomObjectFormatterEnabled, parameter[self, enabled]]: constant[ Function path: Runtime.setCustomObjectFormatterEnabled Domain: Runtime Method name: setCustomObjectFormatterEnabled WARNING: This function is marked 'Experimental'! Parameters: Required arguments: 'enabled' (type: boolean) -> No description No return value. ] assert[call[name[isinstance], parameter[name[enabled], tuple[[<ast.Name object at 0x7da1b1106410>]]]]] variable[subdom_funcs] assign[=] call[name[self].synchronous_command, parameter[constant[Runtime.setCustomObjectFormatterEnabled]]] return[name[subdom_funcs]]
keyword[def] identifier[Runtime_setCustomObjectFormatterEnabled] ( identifier[self] , identifier[enabled] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[enabled] ,( identifier[bool] ,) ), literal[string] % identifier[type] ( identifier[enabled] ) identifier[subdom_funcs] = identifier[self] . identifier[synchronous_command] ( literal[string] , identifier[enabled] = identifier[enabled] ) keyword[return] identifier[subdom_funcs]
def Runtime_setCustomObjectFormatterEnabled(self, enabled): """ Function path: Runtime.setCustomObjectFormatterEnabled Domain: Runtime Method name: setCustomObjectFormatterEnabled WARNING: This function is marked 'Experimental'! Parameters: Required arguments: 'enabled' (type: boolean) -> No description No return value. """ assert isinstance(enabled, (bool,)), "Argument 'enabled' must be of type '['bool']'. Received type: '%s'" % type(enabled) subdom_funcs = self.synchronous_command('Runtime.setCustomObjectFormatterEnabled', enabled=enabled) return subdom_funcs
def expand_indicators(indicator): """Process indicators expanding file hashes/custom indicators into multiple entries. Args: indicator (string): " : " delimited string Returns: (list): a list of indicators split on " : ". """ if indicator.count(' : ') > 0: # handle all multi-valued indicators types (file hashes and custom indicators) indicator_list = [] # group 1 - lazy capture everything to first <space>:<space> or end of line iregx_pattern = r'^(.*?(?=\s\:\s|$))?' iregx_pattern += r'(?:\s\:\s)?' # remove <space>:<space> # group 2 - look behind for <space>:<space>, lazy capture everything # to look ahead (optional <space>):<space> or end of line iregx_pattern += r'((?<=\s\:\s).*?(?=(?:\s)?\:\s|$))?' iregx_pattern += r'(?:(?:\s)?\:\s)?' # remove (optional <space>):<space> # group 3 - look behind for <space>:<space>, lazy capture everything # to look ahead end of line iregx_pattern += r'((?<=\s\:\s).*?(?=$))?$' iregx = re.compile(iregx_pattern) indicators = iregx.search(indicator) if indicators is not None: indicator_list = list(indicators.groups()) else: # handle all single valued indicator types (address, host, etc) indicator_list = [indicator] return indicator_list
def function[expand_indicators, parameter[indicator]]: constant[Process indicators expanding file hashes/custom indicators into multiple entries. Args: indicator (string): " : " delimited string Returns: (list): a list of indicators split on " : ". ] if compare[call[name[indicator].count, parameter[constant[ : ]]] greater[>] constant[0]] begin[:] variable[indicator_list] assign[=] list[[]] variable[iregx_pattern] assign[=] constant[^(.*?(?=\s\:\s|$))?] <ast.AugAssign object at 0x7da207f01120> <ast.AugAssign object at 0x7da207f018a0> <ast.AugAssign object at 0x7da207f00f10> <ast.AugAssign object at 0x7da207f00a30> variable[iregx] assign[=] call[name[re].compile, parameter[name[iregx_pattern]]] variable[indicators] assign[=] call[name[iregx].search, parameter[name[indicator]]] if compare[name[indicators] is_not constant[None]] begin[:] variable[indicator_list] assign[=] call[name[list], parameter[call[name[indicators].groups, parameter[]]]] return[name[indicator_list]]
keyword[def] identifier[expand_indicators] ( identifier[indicator] ): literal[string] keyword[if] identifier[indicator] . identifier[count] ( literal[string] )> literal[int] : identifier[indicator_list] =[] identifier[iregx_pattern] = literal[string] identifier[iregx_pattern] += literal[string] identifier[iregx_pattern] += literal[string] identifier[iregx_pattern] += literal[string] identifier[iregx_pattern] += literal[string] identifier[iregx] = identifier[re] . identifier[compile] ( identifier[iregx_pattern] ) identifier[indicators] = identifier[iregx] . identifier[search] ( identifier[indicator] ) keyword[if] identifier[indicators] keyword[is] keyword[not] keyword[None] : identifier[indicator_list] = identifier[list] ( identifier[indicators] . identifier[groups] ()) keyword[else] : identifier[indicator_list] =[ identifier[indicator] ] keyword[return] identifier[indicator_list]
def expand_indicators(indicator): """Process indicators expanding file hashes/custom indicators into multiple entries. Args: indicator (string): " : " delimited string Returns: (list): a list of indicators split on " : ". """ if indicator.count(' : ') > 0: # handle all multi-valued indicators types (file hashes and custom indicators) indicator_list = [] # group 1 - lazy capture everything to first <space>:<space> or end of line iregx_pattern = '^(.*?(?=\\s\\:\\s|$))?' iregx_pattern += '(?:\\s\\:\\s)?' # remove <space>:<space> # group 2 - look behind for <space>:<space>, lazy capture everything # to look ahead (optional <space>):<space> or end of line iregx_pattern += '((?<=\\s\\:\\s).*?(?=(?:\\s)?\\:\\s|$))?' iregx_pattern += '(?:(?:\\s)?\\:\\s)?' # remove (optional <space>):<space> # group 3 - look behind for <space>:<space>, lazy capture everything # to look ahead end of line iregx_pattern += '((?<=\\s\\:\\s).*?(?=$))?$' iregx = re.compile(iregx_pattern) indicators = iregx.search(indicator) if indicators is not None: indicator_list = list(indicators.groups()) # depends on [control=['if'], data=['indicators']] # depends on [control=['if'], data=[]] else: # handle all single valued indicator types (address, host, etc) indicator_list = [indicator] return indicator_list
def zero_crossing_before(self, n): """Find nearest zero crossing in waveform before frame ``n``""" n_in_samples = int(n * self.samplerate) search_start = n_in_samples - self.samplerate if search_start < 0: search_start = 0 frame = zero_crossing_last( self.range_as_mono(search_start, n_in_samples)) + search_start return frame / float(self.samplerate)
def function[zero_crossing_before, parameter[self, n]]: constant[Find nearest zero crossing in waveform before frame ``n``] variable[n_in_samples] assign[=] call[name[int], parameter[binary_operation[name[n] * name[self].samplerate]]] variable[search_start] assign[=] binary_operation[name[n_in_samples] - name[self].samplerate] if compare[name[search_start] less[<] constant[0]] begin[:] variable[search_start] assign[=] constant[0] variable[frame] assign[=] binary_operation[call[name[zero_crossing_last], parameter[call[name[self].range_as_mono, parameter[name[search_start], name[n_in_samples]]]]] + name[search_start]] return[binary_operation[name[frame] / call[name[float], parameter[name[self].samplerate]]]]
keyword[def] identifier[zero_crossing_before] ( identifier[self] , identifier[n] ): literal[string] identifier[n_in_samples] = identifier[int] ( identifier[n] * identifier[self] . identifier[samplerate] ) identifier[search_start] = identifier[n_in_samples] - identifier[self] . identifier[samplerate] keyword[if] identifier[search_start] < literal[int] : identifier[search_start] = literal[int] identifier[frame] = identifier[zero_crossing_last] ( identifier[self] . identifier[range_as_mono] ( identifier[search_start] , identifier[n_in_samples] ))+ identifier[search_start] keyword[return] identifier[frame] / identifier[float] ( identifier[self] . identifier[samplerate] )
def zero_crossing_before(self, n): """Find nearest zero crossing in waveform before frame ``n``""" n_in_samples = int(n * self.samplerate) search_start = n_in_samples - self.samplerate if search_start < 0: search_start = 0 # depends on [control=['if'], data=['search_start']] frame = zero_crossing_last(self.range_as_mono(search_start, n_in_samples)) + search_start return frame / float(self.samplerate)
def decode(cls, root_element): """ Decode the object to the object :param root_element: the parsed xml Element :type root_element: xml.etree.ElementTree.Element :return: the decoded Element as object :rtype: object """ new_object = cls() field_names_to_attributes = new_object._get_field_names_to_attributes() for child_element in root_element: new_object._set_field(new_object, field_names_to_attributes, child_element) return new_object
def function[decode, parameter[cls, root_element]]: constant[ Decode the object to the object :param root_element: the parsed xml Element :type root_element: xml.etree.ElementTree.Element :return: the decoded Element as object :rtype: object ] variable[new_object] assign[=] call[name[cls], parameter[]] variable[field_names_to_attributes] assign[=] call[name[new_object]._get_field_names_to_attributes, parameter[]] for taget[name[child_element]] in starred[name[root_element]] begin[:] call[name[new_object]._set_field, parameter[name[new_object], name[field_names_to_attributes], name[child_element]]] return[name[new_object]]
keyword[def] identifier[decode] ( identifier[cls] , identifier[root_element] ): literal[string] identifier[new_object] = identifier[cls] () identifier[field_names_to_attributes] = identifier[new_object] . identifier[_get_field_names_to_attributes] () keyword[for] identifier[child_element] keyword[in] identifier[root_element] : identifier[new_object] . identifier[_set_field] ( identifier[new_object] , identifier[field_names_to_attributes] , identifier[child_element] ) keyword[return] identifier[new_object]
def decode(cls, root_element): """ Decode the object to the object :param root_element: the parsed xml Element :type root_element: xml.etree.ElementTree.Element :return: the decoded Element as object :rtype: object """ new_object = cls() field_names_to_attributes = new_object._get_field_names_to_attributes() for child_element in root_element: new_object._set_field(new_object, field_names_to_attributes, child_element) # depends on [control=['for'], data=['child_element']] return new_object
def intersectionlist_to_matrix(ilist, xterms, yterms): """ WILL BE DEPRECATED Replace with method to return pandas dataframe """ z = [ [0] * len(xterms) for i1 in range(len(yterms)) ] xmap = {} xi = 0 for x in xterms: xmap[x] = xi xi = xi+1 ymap = {} yi = 0 for y in yterms: ymap[y] = yi yi = yi+1 for i in ilist: z[ymap[i['y']]][xmap[i['x']]] = i['j'] logging.debug("Z={}".format(z)) return (z,xterms,yterms)
def function[intersectionlist_to_matrix, parameter[ilist, xterms, yterms]]: constant[ WILL BE DEPRECATED Replace with method to return pandas dataframe ] variable[z] assign[=] <ast.ListComp object at 0x7da20e954670> variable[xmap] assign[=] dictionary[[], []] variable[xi] assign[=] constant[0] for taget[name[x]] in starred[name[xterms]] begin[:] call[name[xmap]][name[x]] assign[=] name[xi] variable[xi] assign[=] binary_operation[name[xi] + constant[1]] variable[ymap] assign[=] dictionary[[], []] variable[yi] assign[=] constant[0] for taget[name[y]] in starred[name[yterms]] begin[:] call[name[ymap]][name[y]] assign[=] name[yi] variable[yi] assign[=] binary_operation[name[yi] + constant[1]] for taget[name[i]] in starred[name[ilist]] begin[:] call[call[name[z]][call[name[ymap]][call[name[i]][constant[y]]]]][call[name[xmap]][call[name[i]][constant[x]]]] assign[=] call[name[i]][constant[j]] call[name[logging].debug, parameter[call[constant[Z={}].format, parameter[name[z]]]]] return[tuple[[<ast.Name object at 0x7da1b08bb940>, <ast.Name object at 0x7da1b08b9f00>, <ast.Name object at 0x7da1b08bbca0>]]]
keyword[def] identifier[intersectionlist_to_matrix] ( identifier[ilist] , identifier[xterms] , identifier[yterms] ): literal[string] identifier[z] =[[ literal[int] ]* identifier[len] ( identifier[xterms] ) keyword[for] identifier[i1] keyword[in] identifier[range] ( identifier[len] ( identifier[yterms] ))] identifier[xmap] ={} identifier[xi] = literal[int] keyword[for] identifier[x] keyword[in] identifier[xterms] : identifier[xmap] [ identifier[x] ]= identifier[xi] identifier[xi] = identifier[xi] + literal[int] identifier[ymap] ={} identifier[yi] = literal[int] keyword[for] identifier[y] keyword[in] identifier[yterms] : identifier[ymap] [ identifier[y] ]= identifier[yi] identifier[yi] = identifier[yi] + literal[int] keyword[for] identifier[i] keyword[in] identifier[ilist] : identifier[z] [ identifier[ymap] [ identifier[i] [ literal[string] ]]][ identifier[xmap] [ identifier[i] [ literal[string] ]]]= identifier[i] [ literal[string] ] identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[z] )) keyword[return] ( identifier[z] , identifier[xterms] , identifier[yterms] )
def intersectionlist_to_matrix(ilist, xterms, yterms): """ WILL BE DEPRECATED Replace with method to return pandas dataframe """ z = [[0] * len(xterms) for i1 in range(len(yterms))] xmap = {} xi = 0 for x in xterms: xmap[x] = xi xi = xi + 1 # depends on [control=['for'], data=['x']] ymap = {} yi = 0 for y in yterms: ymap[y] = yi yi = yi + 1 # depends on [control=['for'], data=['y']] for i in ilist: z[ymap[i['y']]][xmap[i['x']]] = i['j'] # depends on [control=['for'], data=['i']] logging.debug('Z={}'.format(z)) return (z, xterms, yterms)
def _get_queue_lock(self, queue, log): """Get queue lock for max worker queues. For max worker queues it returns a Lock if acquired and whether it failed to acquire the lock. """ max_workers = self.max_workers_per_queue # Check if this is single worker queue for part in dotted_parts(queue): if part in self.single_worker_queues: log.debug('single worker queue') max_workers = 1 break # Max worker queues require us to get a queue lock before # moving tasks if max_workers: queue_lock = Semaphore(self.connection, self._key(LOCK_REDIS_KEY, queue), self.id, max_locks=max_workers, timeout=self.config['ACTIVE_TASK_UPDATE_TIMEOUT']) acquired, locks = queue_lock.acquire() if not acquired: return None, True log.debug('acquired queue lock', locks=locks) else: queue_lock = None return queue_lock, False
def function[_get_queue_lock, parameter[self, queue, log]]: constant[Get queue lock for max worker queues. For max worker queues it returns a Lock if acquired and whether it failed to acquire the lock. ] variable[max_workers] assign[=] name[self].max_workers_per_queue for taget[name[part]] in starred[call[name[dotted_parts], parameter[name[queue]]]] begin[:] if compare[name[part] in name[self].single_worker_queues] begin[:] call[name[log].debug, parameter[constant[single worker queue]]] variable[max_workers] assign[=] constant[1] break if name[max_workers] begin[:] variable[queue_lock] assign[=] call[name[Semaphore], parameter[name[self].connection, call[name[self]._key, parameter[name[LOCK_REDIS_KEY], name[queue]]], name[self].id]] <ast.Tuple object at 0x7da1b155f550> assign[=] call[name[queue_lock].acquire, parameter[]] if <ast.UnaryOp object at 0x7da1b155f190> begin[:] return[tuple[[<ast.Constant object at 0x7da1b155ea10>, <ast.Constant object at 0x7da1b155ee90>]]] call[name[log].debug, parameter[constant[acquired queue lock]]] return[tuple[[<ast.Name object at 0x7da1b155ead0>, <ast.Constant object at 0x7da1b155e920>]]]
keyword[def] identifier[_get_queue_lock] ( identifier[self] , identifier[queue] , identifier[log] ): literal[string] identifier[max_workers] = identifier[self] . identifier[max_workers_per_queue] keyword[for] identifier[part] keyword[in] identifier[dotted_parts] ( identifier[queue] ): keyword[if] identifier[part] keyword[in] identifier[self] . identifier[single_worker_queues] : identifier[log] . identifier[debug] ( literal[string] ) identifier[max_workers] = literal[int] keyword[break] keyword[if] identifier[max_workers] : identifier[queue_lock] = identifier[Semaphore] ( identifier[self] . identifier[connection] , identifier[self] . identifier[_key] ( identifier[LOCK_REDIS_KEY] , identifier[queue] ), identifier[self] . identifier[id] , identifier[max_locks] = identifier[max_workers] , identifier[timeout] = identifier[self] . identifier[config] [ literal[string] ]) identifier[acquired] , identifier[locks] = identifier[queue_lock] . identifier[acquire] () keyword[if] keyword[not] identifier[acquired] : keyword[return] keyword[None] , keyword[True] identifier[log] . identifier[debug] ( literal[string] , identifier[locks] = identifier[locks] ) keyword[else] : identifier[queue_lock] = keyword[None] keyword[return] identifier[queue_lock] , keyword[False]
def _get_queue_lock(self, queue, log): """Get queue lock for max worker queues. For max worker queues it returns a Lock if acquired and whether it failed to acquire the lock. """ max_workers = self.max_workers_per_queue # Check if this is single worker queue for part in dotted_parts(queue): if part in self.single_worker_queues: log.debug('single worker queue') max_workers = 1 break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['part']] # Max worker queues require us to get a queue lock before # moving tasks if max_workers: queue_lock = Semaphore(self.connection, self._key(LOCK_REDIS_KEY, queue), self.id, max_locks=max_workers, timeout=self.config['ACTIVE_TASK_UPDATE_TIMEOUT']) (acquired, locks) = queue_lock.acquire() if not acquired: return (None, True) # depends on [control=['if'], data=[]] log.debug('acquired queue lock', locks=locks) # depends on [control=['if'], data=[]] else: queue_lock = None return (queue_lock, False)
def EXPGauss(w_F, compute_uncertainty=True, is_timeseries=False): """Estimate free energy difference using gaussian approximation to one-sided (unidirectional) exponential averaging. Parameters ---------- w_F : np.ndarray, float w_F[t] is the forward work value from snapshot t. t = 0...(T-1) Length T is deduced from vector. compute_uncertainty : bool, optional, default=True if False, will disable computation of the statistical uncertainty (default: True) is_timeseries : bool, default=False if True, correlation in data is corrected for by estimation of statisitcal inefficiency (default: False) Use this option if you are providing correlated timeseries data and have not subsampled the data to produce uncorrelated samples. Returns ------- result_vals : dictionary Possible keys in the result_vals dictionary 'Delta_f' : float Free energy difference between the two states 'dDelta_f': float Estimated standard deviation of free energy difference between the two states. Notes ----- If you are prodividing correlated timeseries data, be sure to set the 'timeseries' flag to True Examples -------- Compute the free energy difference given a sample of forward work values. >>> from pymbar import testsystems >>> [w_F, w_R] = testsystems.gaussian_work_example(mu_F=None, DeltaF=1.0, seed=0) >>> results = EXPGauss(w_F) >>> print('Forward Gaussian approximated free energy difference is %.3f +- %.3f kT' % (results['Delta_f'], results['dDelta_f'])) Forward Gaussian approximated free energy difference is 1.049 +- 0.089 kT >>> results = EXPGauss(w_R) >>> print('Reverse Gaussian approximated free energy difference is %.3f +- %.3f kT' % (results['Delta_f'], results['dDelta_f'])) Reverse Gaussian approximated free energy difference is -1.073 +- 0.080 kT """ # Get number of work measurements. T = float(np.size(w_F)) # number of work measurements var = np.var(w_F) # Estimate free energy difference by Gaussian approximation, dG = <U> - 0.5*var(U) DeltaF = np.average(w_F) - 0.5 * var result_vals = dict() if compute_uncertainty: # Compute effective number of uncorrelated samples. g = 1.0 # statistical inefficiency T_eff = T if is_timeseries: # Estimate statistical inefficiency of x timeseries. import timeseries g = timeseries.statisticalInefficiency(w_F, w_F) T_eff = T / g # Estimate standard error of E[x]. dx2 = var / T_eff + 0.5 * var * var / (T_eff - 1) dDeltaF = np.sqrt(dx2) # Return estimate of free energy difference and uncertainty. result_vals['Delta_f'] = DeltaF result_vals['dDelta_f'] = dDeltaF else: result_vals['Delta_f'] = DeltaF return result_vals
def function[EXPGauss, parameter[w_F, compute_uncertainty, is_timeseries]]: constant[Estimate free energy difference using gaussian approximation to one-sided (unidirectional) exponential averaging. Parameters ---------- w_F : np.ndarray, float w_F[t] is the forward work value from snapshot t. t = 0...(T-1) Length T is deduced from vector. compute_uncertainty : bool, optional, default=True if False, will disable computation of the statistical uncertainty (default: True) is_timeseries : bool, default=False if True, correlation in data is corrected for by estimation of statisitcal inefficiency (default: False) Use this option if you are providing correlated timeseries data and have not subsampled the data to produce uncorrelated samples. Returns ------- result_vals : dictionary Possible keys in the result_vals dictionary 'Delta_f' : float Free energy difference between the two states 'dDelta_f': float Estimated standard deviation of free energy difference between the two states. Notes ----- If you are prodividing correlated timeseries data, be sure to set the 'timeseries' flag to True Examples -------- Compute the free energy difference given a sample of forward work values. >>> from pymbar import testsystems >>> [w_F, w_R] = testsystems.gaussian_work_example(mu_F=None, DeltaF=1.0, seed=0) >>> results = EXPGauss(w_F) >>> print('Forward Gaussian approximated free energy difference is %.3f +- %.3f kT' % (results['Delta_f'], results['dDelta_f'])) Forward Gaussian approximated free energy difference is 1.049 +- 0.089 kT >>> results = EXPGauss(w_R) >>> print('Reverse Gaussian approximated free energy difference is %.3f +- %.3f kT' % (results['Delta_f'], results['dDelta_f'])) Reverse Gaussian approximated free energy difference is -1.073 +- 0.080 kT ] variable[T] assign[=] call[name[float], parameter[call[name[np].size, parameter[name[w_F]]]]] variable[var] assign[=] call[name[np].var, parameter[name[w_F]]] variable[DeltaF] assign[=] binary_operation[call[name[np].average, parameter[name[w_F]]] - binary_operation[constant[0.5] * name[var]]] variable[result_vals] assign[=] call[name[dict], parameter[]] if name[compute_uncertainty] begin[:] variable[g] assign[=] constant[1.0] variable[T_eff] assign[=] name[T] if name[is_timeseries] begin[:] import module[timeseries] variable[g] assign[=] call[name[timeseries].statisticalInefficiency, parameter[name[w_F], name[w_F]]] variable[T_eff] assign[=] binary_operation[name[T] / name[g]] variable[dx2] assign[=] binary_operation[binary_operation[name[var] / name[T_eff]] + binary_operation[binary_operation[binary_operation[constant[0.5] * name[var]] * name[var]] / binary_operation[name[T_eff] - constant[1]]]] variable[dDeltaF] assign[=] call[name[np].sqrt, parameter[name[dx2]]] call[name[result_vals]][constant[Delta_f]] assign[=] name[DeltaF] call[name[result_vals]][constant[dDelta_f]] assign[=] name[dDeltaF] return[name[result_vals]]
keyword[def] identifier[EXPGauss] ( identifier[w_F] , identifier[compute_uncertainty] = keyword[True] , identifier[is_timeseries] = keyword[False] ): literal[string] identifier[T] = identifier[float] ( identifier[np] . identifier[size] ( identifier[w_F] )) identifier[var] = identifier[np] . identifier[var] ( identifier[w_F] ) identifier[DeltaF] = identifier[np] . identifier[average] ( identifier[w_F] )- literal[int] * identifier[var] identifier[result_vals] = identifier[dict] () keyword[if] identifier[compute_uncertainty] : identifier[g] = literal[int] identifier[T_eff] = identifier[T] keyword[if] identifier[is_timeseries] : keyword[import] identifier[timeseries] identifier[g] = identifier[timeseries] . identifier[statisticalInefficiency] ( identifier[w_F] , identifier[w_F] ) identifier[T_eff] = identifier[T] / identifier[g] identifier[dx2] = identifier[var] / identifier[T_eff] + literal[int] * identifier[var] * identifier[var] /( identifier[T_eff] - literal[int] ) identifier[dDeltaF] = identifier[np] . identifier[sqrt] ( identifier[dx2] ) identifier[result_vals] [ literal[string] ]= identifier[DeltaF] identifier[result_vals] [ literal[string] ]= identifier[dDeltaF] keyword[else] : identifier[result_vals] [ literal[string] ]= identifier[DeltaF] keyword[return] identifier[result_vals]
def EXPGauss(w_F, compute_uncertainty=True, is_timeseries=False): """Estimate free energy difference using gaussian approximation to one-sided (unidirectional) exponential averaging. Parameters ---------- w_F : np.ndarray, float w_F[t] is the forward work value from snapshot t. t = 0...(T-1) Length T is deduced from vector. compute_uncertainty : bool, optional, default=True if False, will disable computation of the statistical uncertainty (default: True) is_timeseries : bool, default=False if True, correlation in data is corrected for by estimation of statisitcal inefficiency (default: False) Use this option if you are providing correlated timeseries data and have not subsampled the data to produce uncorrelated samples. Returns ------- result_vals : dictionary Possible keys in the result_vals dictionary 'Delta_f' : float Free energy difference between the two states 'dDelta_f': float Estimated standard deviation of free energy difference between the two states. Notes ----- If you are prodividing correlated timeseries data, be sure to set the 'timeseries' flag to True Examples -------- Compute the free energy difference given a sample of forward work values. >>> from pymbar import testsystems >>> [w_F, w_R] = testsystems.gaussian_work_example(mu_F=None, DeltaF=1.0, seed=0) >>> results = EXPGauss(w_F) >>> print('Forward Gaussian approximated free energy difference is %.3f +- %.3f kT' % (results['Delta_f'], results['dDelta_f'])) Forward Gaussian approximated free energy difference is 1.049 +- 0.089 kT >>> results = EXPGauss(w_R) >>> print('Reverse Gaussian approximated free energy difference is %.3f +- %.3f kT' % (results['Delta_f'], results['dDelta_f'])) Reverse Gaussian approximated free energy difference is -1.073 +- 0.080 kT """ # Get number of work measurements. T = float(np.size(w_F)) # number of work measurements var = np.var(w_F) # Estimate free energy difference by Gaussian approximation, dG = <U> - 0.5*var(U) DeltaF = np.average(w_F) - 0.5 * var result_vals = dict() if compute_uncertainty: # Compute effective number of uncorrelated samples. g = 1.0 # statistical inefficiency T_eff = T if is_timeseries: # Estimate statistical inefficiency of x timeseries. import timeseries g = timeseries.statisticalInefficiency(w_F, w_F) T_eff = T / g # depends on [control=['if'], data=[]] # Estimate standard error of E[x]. dx2 = var / T_eff + 0.5 * var * var / (T_eff - 1) dDeltaF = np.sqrt(dx2) # Return estimate of free energy difference and uncertainty. result_vals['Delta_f'] = DeltaF result_vals['dDelta_f'] = dDeltaF # depends on [control=['if'], data=[]] else: result_vals['Delta_f'] = DeltaF return result_vals
def listcoins(self): ''' Use this function to list all coins with their data which are available on cryptocoincharts. Usage: http://api.cryptocoincharts.info/listCoins ''' url = self.API_PATH + 'listCoins' json_data = json.loads(self._getdata(url)) coins = [] for entry in json_data: coin = Coin() coin.id = entry['id'] coin.name = entry['name'] coin.website = entry['website'] coin.price_btc = entry['price_btc'] coin.volume_btc = entry['volume_btc'] coins.append(coin) return coins
def function[listcoins, parameter[self]]: constant[ Use this function to list all coins with their data which are available on cryptocoincharts. Usage: http://api.cryptocoincharts.info/listCoins ] variable[url] assign[=] binary_operation[name[self].API_PATH + constant[listCoins]] variable[json_data] assign[=] call[name[json].loads, parameter[call[name[self]._getdata, parameter[name[url]]]]] variable[coins] assign[=] list[[]] for taget[name[entry]] in starred[name[json_data]] begin[:] variable[coin] assign[=] call[name[Coin], parameter[]] name[coin].id assign[=] call[name[entry]][constant[id]] name[coin].name assign[=] call[name[entry]][constant[name]] name[coin].website assign[=] call[name[entry]][constant[website]] name[coin].price_btc assign[=] call[name[entry]][constant[price_btc]] name[coin].volume_btc assign[=] call[name[entry]][constant[volume_btc]] call[name[coins].append, parameter[name[coin]]] return[name[coins]]
keyword[def] identifier[listcoins] ( identifier[self] ): literal[string] identifier[url] = identifier[self] . identifier[API_PATH] + literal[string] identifier[json_data] = identifier[json] . identifier[loads] ( identifier[self] . identifier[_getdata] ( identifier[url] )) identifier[coins] =[] keyword[for] identifier[entry] keyword[in] identifier[json_data] : identifier[coin] = identifier[Coin] () identifier[coin] . identifier[id] = identifier[entry] [ literal[string] ] identifier[coin] . identifier[name] = identifier[entry] [ literal[string] ] identifier[coin] . identifier[website] = identifier[entry] [ literal[string] ] identifier[coin] . identifier[price_btc] = identifier[entry] [ literal[string] ] identifier[coin] . identifier[volume_btc] = identifier[entry] [ literal[string] ] identifier[coins] . identifier[append] ( identifier[coin] ) keyword[return] identifier[coins]
def listcoins(self): """ Use this function to list all coins with their data which are available on cryptocoincharts. Usage: http://api.cryptocoincharts.info/listCoins """ url = self.API_PATH + 'listCoins' json_data = json.loads(self._getdata(url)) coins = [] for entry in json_data: coin = Coin() coin.id = entry['id'] coin.name = entry['name'] coin.website = entry['website'] coin.price_btc = entry['price_btc'] coin.volume_btc = entry['volume_btc'] coins.append(coin) # depends on [control=['for'], data=['entry']] return coins
def compute_allocated_size(size, is_encrypted): # type: (int, bool) -> int """Compute allocated size on disk :param int size: size (content length) :param bool is_ecrypted: if entity is encrypted :rtype: int :return: required size on disk """ # compute size if size > 0: if is_encrypted: # cipher_len_without_iv = (clear_len / aes_bs + 1) * aes_bs allocatesize = ( size // blobxfer.models.download.Descriptor._AES_BLOCKSIZE - 1 ) * blobxfer.models.download.Descriptor._AES_BLOCKSIZE if allocatesize < 0: raise RuntimeError('allocatesize is negative') else: allocatesize = size else: allocatesize = 0 return allocatesize
def function[compute_allocated_size, parameter[size, is_encrypted]]: constant[Compute allocated size on disk :param int size: size (content length) :param bool is_ecrypted: if entity is encrypted :rtype: int :return: required size on disk ] if compare[name[size] greater[>] constant[0]] begin[:] if name[is_encrypted] begin[:] variable[allocatesize] assign[=] binary_operation[binary_operation[binary_operation[name[size] <ast.FloorDiv object at 0x7da2590d6bc0> name[blobxfer].models.download.Descriptor._AES_BLOCKSIZE] - constant[1]] * name[blobxfer].models.download.Descriptor._AES_BLOCKSIZE] if compare[name[allocatesize] less[<] constant[0]] begin[:] <ast.Raise object at 0x7da20c990340> return[name[allocatesize]]
keyword[def] identifier[compute_allocated_size] ( identifier[size] , identifier[is_encrypted] ): literal[string] keyword[if] identifier[size] > literal[int] : keyword[if] identifier[is_encrypted] : identifier[allocatesize] =( identifier[size] // identifier[blobxfer] . identifier[models] . identifier[download] . identifier[Descriptor] . identifier[_AES_BLOCKSIZE] - literal[int] )* identifier[blobxfer] . identifier[models] . identifier[download] . identifier[Descriptor] . identifier[_AES_BLOCKSIZE] keyword[if] identifier[allocatesize] < literal[int] : keyword[raise] identifier[RuntimeError] ( literal[string] ) keyword[else] : identifier[allocatesize] = identifier[size] keyword[else] : identifier[allocatesize] = literal[int] keyword[return] identifier[allocatesize]
def compute_allocated_size(size, is_encrypted): # type: (int, bool) -> int 'Compute allocated size on disk\n :param int size: size (content length)\n :param bool is_ecrypted: if entity is encrypted\n :rtype: int\n :return: required size on disk\n ' # compute size if size > 0: if is_encrypted: # cipher_len_without_iv = (clear_len / aes_bs + 1) * aes_bs allocatesize = (size // blobxfer.models.download.Descriptor._AES_BLOCKSIZE - 1) * blobxfer.models.download.Descriptor._AES_BLOCKSIZE if allocatesize < 0: raise RuntimeError('allocatesize is negative') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: allocatesize = size # depends on [control=['if'], data=['size']] else: allocatesize = 0 return allocatesize
def IsPayable(self): """ Flag indicating if the contract accepts payments. Returns: bool: True if supported. False otherwise. """ from neo.Core.State.ContractState import ContractPropertyState return self.ContractProperties & ContractPropertyState.Payable > 0
def function[IsPayable, parameter[self]]: constant[ Flag indicating if the contract accepts payments. Returns: bool: True if supported. False otherwise. ] from relative_module[neo.Core.State.ContractState] import module[ContractPropertyState] return[compare[binary_operation[name[self].ContractProperties <ast.BitAnd object at 0x7da2590d6b60> name[ContractPropertyState].Payable] greater[>] constant[0]]]
keyword[def] identifier[IsPayable] ( identifier[self] ): literal[string] keyword[from] identifier[neo] . identifier[Core] . identifier[State] . identifier[ContractState] keyword[import] identifier[ContractPropertyState] keyword[return] identifier[self] . identifier[ContractProperties] & identifier[ContractPropertyState] . identifier[Payable] > literal[int]
def IsPayable(self): """ Flag indicating if the contract accepts payments. Returns: bool: True if supported. False otherwise. """ from neo.Core.State.ContractState import ContractPropertyState return self.ContractProperties & ContractPropertyState.Payable > 0
def getPlatformsByName(platformNames=['all'], mode=None, tags=[], excludePlatformNames=[]): """Method that recovers the names of the <Platforms> in a given list. :param platformNames: List of strings containing the possible platforms. :param mode: The mode of the search. The following can be chosen: ["phonefy", "usufy", "searchfy"]. :param tags: Just in case the method to select the candidates is a series of tags. :param excludePlatformNames: List of strings to be excluded from the search. :return: Array of <Platforms> classes. """ allPlatformsList = getAllPlatformObjects(mode) platformList = [] # Tags has priority over platform if "all" in platformNames and len(tags) == 0: # Last condition: checking if "all" has been provided for plat in allPlatformsList: if str(plat.platformName).lower() not in excludePlatformNames: platformList.append(plat) return platformList else: # going through the regexpList for name in platformNames: if name not in excludePlatformNames: for plat in allPlatformsList: # Verifying if the parameter was provided if name == str(plat.platformName).lower(): platformList.append(plat) break # We need to perform additional checks to verify the Wikipedia platforms, which are called with a single parameter try: if name == str(plat.parameterName).lower(): platformList.append(plat) break except: pass # Verifying if any of the platform tags match the original tag for t in plat.tags: if t in tags: platformList.append(plat) break # If the platformList is empty, we will return all if platformList == []: return allPlatformsList else: return platformList
def function[getPlatformsByName, parameter[platformNames, mode, tags, excludePlatformNames]]: constant[Method that recovers the names of the <Platforms> in a given list. :param platformNames: List of strings containing the possible platforms. :param mode: The mode of the search. The following can be chosen: ["phonefy", "usufy", "searchfy"]. :param tags: Just in case the method to select the candidates is a series of tags. :param excludePlatformNames: List of strings to be excluded from the search. :return: Array of <Platforms> classes. ] variable[allPlatformsList] assign[=] call[name[getAllPlatformObjects], parameter[name[mode]]] variable[platformList] assign[=] list[[]] if <ast.BoolOp object at 0x7da1b13aa980> begin[:] for taget[name[plat]] in starred[name[allPlatformsList]] begin[:] if compare[call[call[name[str], parameter[name[plat].platformName]].lower, parameter[]] <ast.NotIn object at 0x7da2590d7190> name[excludePlatformNames]] begin[:] call[name[platformList].append, parameter[name[plat]]] return[name[platformList]] if compare[name[platformList] equal[==] list[[]]] begin[:] return[name[allPlatformsList]]
keyword[def] identifier[getPlatformsByName] ( identifier[platformNames] =[ literal[string] ], identifier[mode] = keyword[None] , identifier[tags] =[], identifier[excludePlatformNames] =[]): literal[string] identifier[allPlatformsList] = identifier[getAllPlatformObjects] ( identifier[mode] ) identifier[platformList] =[] keyword[if] literal[string] keyword[in] identifier[platformNames] keyword[and] identifier[len] ( identifier[tags] )== literal[int] : keyword[for] identifier[plat] keyword[in] identifier[allPlatformsList] : keyword[if] identifier[str] ( identifier[plat] . identifier[platformName] ). identifier[lower] () keyword[not] keyword[in] identifier[excludePlatformNames] : identifier[platformList] . identifier[append] ( identifier[plat] ) keyword[return] identifier[platformList] keyword[else] : keyword[for] identifier[name] keyword[in] identifier[platformNames] : keyword[if] identifier[name] keyword[not] keyword[in] identifier[excludePlatformNames] : keyword[for] identifier[plat] keyword[in] identifier[allPlatformsList] : keyword[if] identifier[name] == identifier[str] ( identifier[plat] . identifier[platformName] ). identifier[lower] (): identifier[platformList] . identifier[append] ( identifier[plat] ) keyword[break] keyword[try] : keyword[if] identifier[name] == identifier[str] ( identifier[plat] . identifier[parameterName] ). identifier[lower] (): identifier[platformList] . identifier[append] ( identifier[plat] ) keyword[break] keyword[except] : keyword[pass] keyword[for] identifier[t] keyword[in] identifier[plat] . identifier[tags] : keyword[if] identifier[t] keyword[in] identifier[tags] : identifier[platformList] . identifier[append] ( identifier[plat] ) keyword[break] keyword[if] identifier[platformList] ==[]: keyword[return] identifier[allPlatformsList] keyword[else] : keyword[return] identifier[platformList]
def getPlatformsByName(platformNames=['all'], mode=None, tags=[], excludePlatformNames=[]): """Method that recovers the names of the <Platforms> in a given list. :param platformNames: List of strings containing the possible platforms. :param mode: The mode of the search. The following can be chosen: ["phonefy", "usufy", "searchfy"]. :param tags: Just in case the method to select the candidates is a series of tags. :param excludePlatformNames: List of strings to be excluded from the search. :return: Array of <Platforms> classes. """ allPlatformsList = getAllPlatformObjects(mode) platformList = [] # Tags has priority over platform if 'all' in platformNames and len(tags) == 0: # Last condition: checking if "all" has been provided for plat in allPlatformsList: if str(plat.platformName).lower() not in excludePlatformNames: platformList.append(plat) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['plat']] return platformList # depends on [control=['if'], data=[]] else: # going through the regexpList for name in platformNames: if name not in excludePlatformNames: for plat in allPlatformsList: # Verifying if the parameter was provided if name == str(plat.platformName).lower(): platformList.append(plat) break # depends on [control=['if'], data=[]] # We need to perform additional checks to verify the Wikipedia platforms, which are called with a single parameter try: if name == str(plat.parameterName).lower(): platformList.append(plat) break # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] # Verifying if any of the platform tags match the original tag for t in plat.tags: if t in tags: platformList.append(plat) break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['t']] # depends on [control=['for'], data=['plat']] # depends on [control=['if'], data=['name']] # depends on [control=['for'], data=['name']] # If the platformList is empty, we will return all if platformList == []: return allPlatformsList # depends on [control=['if'], data=[]] else: return platformList
def mito(args): """ %prog mito chrM.fa input.bam Identify mitochondrial deletions. """ p = OptionParser(mito.__doc__) p.set_aws_opts(store="hli-mv-data-science/htang/mito-deletions") p.add_option("--realignonly", default=False, action="store_true", help="Realign only") p.add_option("--svonly", default=False, action="store_true", help="Run Realign => SV calls only") p.add_option("--support", default=1, type="int", help="Minimum number of supporting reads") p.set_home("speedseq", default="/mnt/software/speedseq/bin") p.set_cpus() opts, args = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) chrMfa, bamfile = args store = opts.output_path cleanup = not opts.nocleanup if not op.exists(chrMfa): logging.debug("File `{}` missing. Exiting.".format(chrMfa)) return chrMfai = chrMfa + ".fai" if not op.exists(chrMfai): cmd = "samtools index {}".format(chrMfa) sh(cmd) if not bamfile.endswith(".bam"): bamfiles = [x.strip() for x in open(bamfile)] else: bamfiles = [bamfile] if store: computed = ls_s3(store) computed = [op.basename(x).split('.')[0] for x in computed if x.endswith(".depth")] remaining_samples = [x for x in bamfiles if op.basename(x).split(".")[0] not in computed] logging.debug("Already computed on `{}`: {}". format(store, len(bamfiles) - len(remaining_samples))) bamfiles = remaining_samples logging.debug("Total samples: {}".format(len(bamfiles))) for bamfile in bamfiles: run_mito(chrMfa, bamfile, opts, realignonly=opts.realignonly, svonly=opts.svonly, store=store, cleanup=cleanup)
def function[mito, parameter[args]]: constant[ %prog mito chrM.fa input.bam Identify mitochondrial deletions. ] variable[p] assign[=] call[name[OptionParser], parameter[name[mito].__doc__]] call[name[p].set_aws_opts, parameter[]] call[name[p].add_option, parameter[constant[--realignonly]]] call[name[p].add_option, parameter[constant[--svonly]]] call[name[p].add_option, parameter[constant[--support]]] call[name[p].set_home, parameter[constant[speedseq]]] call[name[p].set_cpus, parameter[]] <ast.Tuple object at 0x7da1b0796f20> assign[=] call[name[p].parse_args, parameter[name[args]]] if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[2]] begin[:] call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b07968f0>]] <ast.Tuple object at 0x7da1b0813fd0> assign[=] name[args] variable[store] assign[=] name[opts].output_path variable[cleanup] assign[=] <ast.UnaryOp object at 0x7da1b0811e10> if <ast.UnaryOp object at 0x7da1b0812fe0> begin[:] call[name[logging].debug, parameter[call[constant[File `{}` missing. Exiting.].format, parameter[name[chrMfa]]]]] return[None] variable[chrMfai] assign[=] binary_operation[name[chrMfa] + constant[.fai]] if <ast.UnaryOp object at 0x7da1b0811780> begin[:] variable[cmd] assign[=] call[constant[samtools index {}].format, parameter[name[chrMfa]]] call[name[sh], parameter[name[cmd]]] if <ast.UnaryOp object at 0x7da1b0812650> begin[:] variable[bamfiles] assign[=] <ast.ListComp object at 0x7da1b0810ac0> if name[store] begin[:] variable[computed] assign[=] call[name[ls_s3], parameter[name[store]]] variable[computed] assign[=] <ast.ListComp object at 0x7da1b0811390> variable[remaining_samples] assign[=] <ast.ListComp object at 0x7da1b08139d0> call[name[logging].debug, parameter[call[constant[Already computed on `{}`: {}].format, parameter[name[store], binary_operation[call[name[len], parameter[name[bamfiles]]] - call[name[len], parameter[name[remaining_samples]]]]]]]] variable[bamfiles] assign[=] name[remaining_samples] call[name[logging].debug, parameter[call[constant[Total samples: {}].format, parameter[call[name[len], parameter[name[bamfiles]]]]]]] for taget[name[bamfile]] in starred[name[bamfiles]] begin[:] call[name[run_mito], parameter[name[chrMfa], name[bamfile], name[opts]]]
keyword[def] identifier[mito] ( identifier[args] ): literal[string] identifier[p] = identifier[OptionParser] ( identifier[mito] . identifier[__doc__] ) identifier[p] . identifier[set_aws_opts] ( identifier[store] = literal[string] ) identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = literal[int] , identifier[type] = literal[string] , identifier[help] = literal[string] ) identifier[p] . identifier[set_home] ( literal[string] , identifier[default] = literal[string] ) identifier[p] . identifier[set_cpus] () identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] ) keyword[if] identifier[len] ( identifier[args] )!= literal[int] : identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ()) identifier[chrMfa] , identifier[bamfile] = identifier[args] identifier[store] = identifier[opts] . identifier[output_path] identifier[cleanup] = keyword[not] identifier[opts] . identifier[nocleanup] keyword[if] keyword[not] identifier[op] . identifier[exists] ( identifier[chrMfa] ): identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[chrMfa] )) keyword[return] identifier[chrMfai] = identifier[chrMfa] + literal[string] keyword[if] keyword[not] identifier[op] . identifier[exists] ( identifier[chrMfai] ): identifier[cmd] = literal[string] . identifier[format] ( identifier[chrMfa] ) identifier[sh] ( identifier[cmd] ) keyword[if] keyword[not] identifier[bamfile] . identifier[endswith] ( literal[string] ): identifier[bamfiles] =[ identifier[x] . identifier[strip] () keyword[for] identifier[x] keyword[in] identifier[open] ( identifier[bamfile] )] keyword[else] : identifier[bamfiles] =[ identifier[bamfile] ] keyword[if] identifier[store] : identifier[computed] = identifier[ls_s3] ( identifier[store] ) identifier[computed] =[ identifier[op] . identifier[basename] ( identifier[x] ). identifier[split] ( literal[string] )[ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[computed] keyword[if] identifier[x] . identifier[endswith] ( literal[string] )] identifier[remaining_samples] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[bamfiles] keyword[if] identifier[op] . identifier[basename] ( identifier[x] ). identifier[split] ( literal[string] )[ literal[int] ] keyword[not] keyword[in] identifier[computed] ] identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[store] , identifier[len] ( identifier[bamfiles] )- identifier[len] ( identifier[remaining_samples] ))) identifier[bamfiles] = identifier[remaining_samples] identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[len] ( identifier[bamfiles] ))) keyword[for] identifier[bamfile] keyword[in] identifier[bamfiles] : identifier[run_mito] ( identifier[chrMfa] , identifier[bamfile] , identifier[opts] , identifier[realignonly] = identifier[opts] . identifier[realignonly] , identifier[svonly] = identifier[opts] . identifier[svonly] , identifier[store] = identifier[store] , identifier[cleanup] = identifier[cleanup] )
def mito(args): """ %prog mito chrM.fa input.bam Identify mitochondrial deletions. """ p = OptionParser(mito.__doc__) p.set_aws_opts(store='hli-mv-data-science/htang/mito-deletions') p.add_option('--realignonly', default=False, action='store_true', help='Realign only') p.add_option('--svonly', default=False, action='store_true', help='Run Realign => SV calls only') p.add_option('--support', default=1, type='int', help='Minimum number of supporting reads') p.set_home('speedseq', default='/mnt/software/speedseq/bin') p.set_cpus() (opts, args) = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) # depends on [control=['if'], data=[]] (chrMfa, bamfile) = args store = opts.output_path cleanup = not opts.nocleanup if not op.exists(chrMfa): logging.debug('File `{}` missing. Exiting.'.format(chrMfa)) return # depends on [control=['if'], data=[]] chrMfai = chrMfa + '.fai' if not op.exists(chrMfai): cmd = 'samtools index {}'.format(chrMfa) sh(cmd) # depends on [control=['if'], data=[]] if not bamfile.endswith('.bam'): bamfiles = [x.strip() for x in open(bamfile)] # depends on [control=['if'], data=[]] else: bamfiles = [bamfile] if store: computed = ls_s3(store) computed = [op.basename(x).split('.')[0] for x in computed if x.endswith('.depth')] remaining_samples = [x for x in bamfiles if op.basename(x).split('.')[0] not in computed] logging.debug('Already computed on `{}`: {}'.format(store, len(bamfiles) - len(remaining_samples))) bamfiles = remaining_samples # depends on [control=['if'], data=[]] logging.debug('Total samples: {}'.format(len(bamfiles))) for bamfile in bamfiles: run_mito(chrMfa, bamfile, opts, realignonly=opts.realignonly, svonly=opts.svonly, store=store, cleanup=cleanup) # depends on [control=['for'], data=['bamfile']]
def switch_db(name): """ Hack to switch Flask-Pymongo db :param name: db name """ with app.app_context(): app.extensions['pymongo'][mongo.config_prefix] = mongo.cx, mongo.cx[name]
def function[switch_db, parameter[name]]: constant[ Hack to switch Flask-Pymongo db :param name: db name ] with call[name[app].app_context, parameter[]] begin[:] call[call[name[app].extensions][constant[pymongo]]][name[mongo].config_prefix] assign[=] tuple[[<ast.Attribute object at 0x7da18c4cf010>, <ast.Subscript object at 0x7da18c4cc1f0>]]
keyword[def] identifier[switch_db] ( identifier[name] ): literal[string] keyword[with] identifier[app] . identifier[app_context] (): identifier[app] . identifier[extensions] [ literal[string] ][ identifier[mongo] . identifier[config_prefix] ]= identifier[mongo] . identifier[cx] , identifier[mongo] . identifier[cx] [ identifier[name] ]
def switch_db(name): """ Hack to switch Flask-Pymongo db :param name: db name """ with app.app_context(): app.extensions['pymongo'][mongo.config_prefix] = (mongo.cx, mongo.cx[name]) # depends on [control=['with'], data=[]]
def _load(self): """Load values for all ConfigProperty attributes""" for attr_name, config_prop in self._iter_config_props(): found = False for loader in self._loaders: if loader.exists(config_prop.property_key): raw_value = loader.get(config_prop.property_key) converted_value = config_prop.load(raw_value) self._set_instance_prop(attr_name, config_prop, converted_value) found = True break if not found: if not config_prop.required or config_prop.default is not None: self._set_instance_prop(attr_name, config_prop, config_prop.default) else: raise ValueError('Missing required ConfigProperty {}'.format(attr_name))
def function[_load, parameter[self]]: constant[Load values for all ConfigProperty attributes] for taget[tuple[[<ast.Name object at 0x7da1b257dff0>, <ast.Name object at 0x7da1b257d390>]]] in starred[call[name[self]._iter_config_props, parameter[]]] begin[:] variable[found] assign[=] constant[False] for taget[name[loader]] in starred[name[self]._loaders] begin[:] if call[name[loader].exists, parameter[name[config_prop].property_key]] begin[:] variable[raw_value] assign[=] call[name[loader].get, parameter[name[config_prop].property_key]] variable[converted_value] assign[=] call[name[config_prop].load, parameter[name[raw_value]]] call[name[self]._set_instance_prop, parameter[name[attr_name], name[config_prop], name[converted_value]]] variable[found] assign[=] constant[True] break if <ast.UnaryOp object at 0x7da1b257cfa0> begin[:] if <ast.BoolOp object at 0x7da1b257c5e0> begin[:] call[name[self]._set_instance_prop, parameter[name[attr_name], name[config_prop], name[config_prop].default]]
keyword[def] identifier[_load] ( identifier[self] ): literal[string] keyword[for] identifier[attr_name] , identifier[config_prop] keyword[in] identifier[self] . identifier[_iter_config_props] (): identifier[found] = keyword[False] keyword[for] identifier[loader] keyword[in] identifier[self] . identifier[_loaders] : keyword[if] identifier[loader] . identifier[exists] ( identifier[config_prop] . identifier[property_key] ): identifier[raw_value] = identifier[loader] . identifier[get] ( identifier[config_prop] . identifier[property_key] ) identifier[converted_value] = identifier[config_prop] . identifier[load] ( identifier[raw_value] ) identifier[self] . identifier[_set_instance_prop] ( identifier[attr_name] , identifier[config_prop] , identifier[converted_value] ) identifier[found] = keyword[True] keyword[break] keyword[if] keyword[not] identifier[found] : keyword[if] keyword[not] identifier[config_prop] . identifier[required] keyword[or] identifier[config_prop] . identifier[default] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[_set_instance_prop] ( identifier[attr_name] , identifier[config_prop] , identifier[config_prop] . identifier[default] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[attr_name] ))
def _load(self): """Load values for all ConfigProperty attributes""" for (attr_name, config_prop) in self._iter_config_props(): found = False for loader in self._loaders: if loader.exists(config_prop.property_key): raw_value = loader.get(config_prop.property_key) converted_value = config_prop.load(raw_value) self._set_instance_prop(attr_name, config_prop, converted_value) found = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['loader']] if not found: if not config_prop.required or config_prop.default is not None: self._set_instance_prop(attr_name, config_prop, config_prop.default) # depends on [control=['if'], data=[]] else: raise ValueError('Missing required ConfigProperty {}'.format(attr_name)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def from_query(query, engine=None, limit=None): """ Execute an ORM style query, and return the result in :class:`prettytable.PrettyTable`. :param query: an ``sqlalchemy.orm.Query`` object. :param engine: an ``sqlalchemy.engine.base.Engine`` object. :param limit: int, limit rows to return. :return: a ``prettytable.PrettyTable`` object **中文文档** 将通过ORM的查询结果中的数据放入prettytable中. """ if limit is not None: query = query.limit(limit) result_proxy = execute_query_return_result_proxy(query) return from_db_cursor(result_proxy.cursor)
def function[from_query, parameter[query, engine, limit]]: constant[ Execute an ORM style query, and return the result in :class:`prettytable.PrettyTable`. :param query: an ``sqlalchemy.orm.Query`` object. :param engine: an ``sqlalchemy.engine.base.Engine`` object. :param limit: int, limit rows to return. :return: a ``prettytable.PrettyTable`` object **中文文档** 将通过ORM的查询结果中的数据放入prettytable中. ] if compare[name[limit] is_not constant[None]] begin[:] variable[query] assign[=] call[name[query].limit, parameter[name[limit]]] variable[result_proxy] assign[=] call[name[execute_query_return_result_proxy], parameter[name[query]]] return[call[name[from_db_cursor], parameter[name[result_proxy].cursor]]]
keyword[def] identifier[from_query] ( identifier[query] , identifier[engine] = keyword[None] , identifier[limit] = keyword[None] ): literal[string] keyword[if] identifier[limit] keyword[is] keyword[not] keyword[None] : identifier[query] = identifier[query] . identifier[limit] ( identifier[limit] ) identifier[result_proxy] = identifier[execute_query_return_result_proxy] ( identifier[query] ) keyword[return] identifier[from_db_cursor] ( identifier[result_proxy] . identifier[cursor] )
def from_query(query, engine=None, limit=None): """ Execute an ORM style query, and return the result in :class:`prettytable.PrettyTable`. :param query: an ``sqlalchemy.orm.Query`` object. :param engine: an ``sqlalchemy.engine.base.Engine`` object. :param limit: int, limit rows to return. :return: a ``prettytable.PrettyTable`` object **中文文档** 将通过ORM的查询结果中的数据放入prettytable中. """ if limit is not None: query = query.limit(limit) # depends on [control=['if'], data=['limit']] result_proxy = execute_query_return_result_proxy(query) return from_db_cursor(result_proxy.cursor)
def _autozoom(self): """Calculate zoom and location.""" bounds = self._autobounds() attrs = {} midpoint = lambda a, b: (a + b)/2 attrs['location'] = ( midpoint(bounds['min_lat'], bounds['max_lat']), midpoint(bounds['min_lon'], bounds['max_lon']) ) # self._folium_map.fit_bounds( # [bounds['min_long'], bounds['min_lat']], # [bounds['max_long'], bounds['max_lat']] # ) # remove the following with new Folium release # rough approximation, assuming max_zoom is 18 import math try: lat_diff = bounds['max_lat'] - bounds['min_lat'] lon_diff = bounds['max_lon'] - bounds['min_lon'] area, max_area = lat_diff*lon_diff, 180*360 if area: factor = 1 + max(0, 1 - self._width/1000)/2 + max(0, 1-area**0.5)/2 zoom = math.log(area/max_area)/-factor else: zoom = self._default_zoom zoom = max(1, min(18, round(zoom))) attrs['zoom_start'] = zoom except ValueError as e: raise Exception('Check that your locations are lat-lon pairs', e) return attrs
def function[_autozoom, parameter[self]]: constant[Calculate zoom and location.] variable[bounds] assign[=] call[name[self]._autobounds, parameter[]] variable[attrs] assign[=] dictionary[[], []] variable[midpoint] assign[=] <ast.Lambda object at 0x7da1b0791300> call[name[attrs]][constant[location]] assign[=] tuple[[<ast.Call object at 0x7da1b0793f70>, <ast.Call object at 0x7da1b07918a0>]] import module[math] <ast.Try object at 0x7da1b0790ee0> return[name[attrs]]
keyword[def] identifier[_autozoom] ( identifier[self] ): literal[string] identifier[bounds] = identifier[self] . identifier[_autobounds] () identifier[attrs] ={} identifier[midpoint] = keyword[lambda] identifier[a] , identifier[b] :( identifier[a] + identifier[b] )/ literal[int] identifier[attrs] [ literal[string] ]=( identifier[midpoint] ( identifier[bounds] [ literal[string] ], identifier[bounds] [ literal[string] ]), identifier[midpoint] ( identifier[bounds] [ literal[string] ], identifier[bounds] [ literal[string] ]) ) keyword[import] identifier[math] keyword[try] : identifier[lat_diff] = identifier[bounds] [ literal[string] ]- identifier[bounds] [ literal[string] ] identifier[lon_diff] = identifier[bounds] [ literal[string] ]- identifier[bounds] [ literal[string] ] identifier[area] , identifier[max_area] = identifier[lat_diff] * identifier[lon_diff] , literal[int] * literal[int] keyword[if] identifier[area] : identifier[factor] = literal[int] + identifier[max] ( literal[int] , literal[int] - identifier[self] . identifier[_width] / literal[int] )/ literal[int] + identifier[max] ( literal[int] , literal[int] - identifier[area] ** literal[int] )/ literal[int] identifier[zoom] = identifier[math] . identifier[log] ( identifier[area] / identifier[max_area] )/- identifier[factor] keyword[else] : identifier[zoom] = identifier[self] . identifier[_default_zoom] identifier[zoom] = identifier[max] ( literal[int] , identifier[min] ( literal[int] , identifier[round] ( identifier[zoom] ))) identifier[attrs] [ literal[string] ]= identifier[zoom] keyword[except] identifier[ValueError] keyword[as] identifier[e] : keyword[raise] identifier[Exception] ( literal[string] , identifier[e] ) keyword[return] identifier[attrs]
def _autozoom(self): """Calculate zoom and location.""" bounds = self._autobounds() attrs = {} midpoint = lambda a, b: (a + b) / 2 attrs['location'] = (midpoint(bounds['min_lat'], bounds['max_lat']), midpoint(bounds['min_lon'], bounds['max_lon'])) # self._folium_map.fit_bounds( # [bounds['min_long'], bounds['min_lat']], # [bounds['max_long'], bounds['max_lat']] # ) # remove the following with new Folium release # rough approximation, assuming max_zoom is 18 import math try: lat_diff = bounds['max_lat'] - bounds['min_lat'] lon_diff = bounds['max_lon'] - bounds['min_lon'] (area, max_area) = (lat_diff * lon_diff, 180 * 360) if area: factor = 1 + max(0, 1 - self._width / 1000) / 2 + max(0, 1 - area ** 0.5) / 2 zoom = math.log(area / max_area) / -factor # depends on [control=['if'], data=[]] else: zoom = self._default_zoom zoom = max(1, min(18, round(zoom))) attrs['zoom_start'] = zoom # depends on [control=['try'], data=[]] except ValueError as e: raise Exception('Check that your locations are lat-lon pairs', e) # depends on [control=['except'], data=['e']] return attrs
def add_object(cls, attr, title='', display=''): """Adds a ``list_display`` attribute showing an object. Supports double underscore attribute name dereferencing. :param attr: Name of the attribute to dereference from the corresponding object, i.e. what will be lined to. This name supports double underscore object link referencing for ``models.ForeignKey`` members. :param title: Title for the column of the django admin table. If not given it defaults to a capitalized version of ``attr`` :param display: What to display as the text for the link being shown. If not given it defaults to the string representation of the object for the row: ``str(obj)``. This parameter supports django templating, the context for which contains a dictionary key named "obj" with the value being the object for the row. """ global klass_count klass_count += 1 fn_name = 'dyn_fn_%d' % klass_count cls.list_display.append(fn_name) if not title: title = attr.capitalize() # python scoping is a bit weird with default values, if it isn't # referenced the inner function won't see it, so assign it for use _display = display def _ref(self, obj): field_obj = admin_obj_attr(obj, attr) if not field_obj: return '' return _obj_display(field_obj, _display) _ref.short_description = title _ref.allow_tags = True _ref.admin_order_field = attr setattr(cls, fn_name, _ref)
def function[add_object, parameter[cls, attr, title, display]]: constant[Adds a ``list_display`` attribute showing an object. Supports double underscore attribute name dereferencing. :param attr: Name of the attribute to dereference from the corresponding object, i.e. what will be lined to. This name supports double underscore object link referencing for ``models.ForeignKey`` members. :param title: Title for the column of the django admin table. If not given it defaults to a capitalized version of ``attr`` :param display: What to display as the text for the link being shown. If not given it defaults to the string representation of the object for the row: ``str(obj)``. This parameter supports django templating, the context for which contains a dictionary key named "obj" with the value being the object for the row. ] <ast.Global object at 0x7da20e956d70> <ast.AugAssign object at 0x7da20e956770> variable[fn_name] assign[=] binary_operation[constant[dyn_fn_%d] <ast.Mod object at 0x7da2590d6920> name[klass_count]] call[name[cls].list_display.append, parameter[name[fn_name]]] if <ast.UnaryOp object at 0x7da20e956aa0> begin[:] variable[title] assign[=] call[name[attr].capitalize, parameter[]] variable[_display] assign[=] name[display] def function[_ref, parameter[self, obj]]: variable[field_obj] assign[=] call[name[admin_obj_attr], parameter[name[obj], name[attr]]] if <ast.UnaryOp object at 0x7da20e9555d0> begin[:] return[constant[]] return[call[name[_obj_display], parameter[name[field_obj], name[_display]]]] name[_ref].short_description assign[=] name[title] name[_ref].allow_tags assign[=] constant[True] name[_ref].admin_order_field assign[=] name[attr] call[name[setattr], parameter[name[cls], name[fn_name], name[_ref]]]
keyword[def] identifier[add_object] ( identifier[cls] , identifier[attr] , identifier[title] = literal[string] , identifier[display] = literal[string] ): literal[string] keyword[global] identifier[klass_count] identifier[klass_count] += literal[int] identifier[fn_name] = literal[string] % identifier[klass_count] identifier[cls] . identifier[list_display] . identifier[append] ( identifier[fn_name] ) keyword[if] keyword[not] identifier[title] : identifier[title] = identifier[attr] . identifier[capitalize] () identifier[_display] = identifier[display] keyword[def] identifier[_ref] ( identifier[self] , identifier[obj] ): identifier[field_obj] = identifier[admin_obj_attr] ( identifier[obj] , identifier[attr] ) keyword[if] keyword[not] identifier[field_obj] : keyword[return] literal[string] keyword[return] identifier[_obj_display] ( identifier[field_obj] , identifier[_display] ) identifier[_ref] . identifier[short_description] = identifier[title] identifier[_ref] . identifier[allow_tags] = keyword[True] identifier[_ref] . identifier[admin_order_field] = identifier[attr] identifier[setattr] ( identifier[cls] , identifier[fn_name] , identifier[_ref] )
def add_object(cls, attr, title='', display=''): """Adds a ``list_display`` attribute showing an object. Supports double underscore attribute name dereferencing. :param attr: Name of the attribute to dereference from the corresponding object, i.e. what will be lined to. This name supports double underscore object link referencing for ``models.ForeignKey`` members. :param title: Title for the column of the django admin table. If not given it defaults to a capitalized version of ``attr`` :param display: What to display as the text for the link being shown. If not given it defaults to the string representation of the object for the row: ``str(obj)``. This parameter supports django templating, the context for which contains a dictionary key named "obj" with the value being the object for the row. """ global klass_count klass_count += 1 fn_name = 'dyn_fn_%d' % klass_count cls.list_display.append(fn_name) if not title: title = attr.capitalize() # depends on [control=['if'], data=[]] # python scoping is a bit weird with default values, if it isn't # referenced the inner function won't see it, so assign it for use _display = display def _ref(self, obj): field_obj = admin_obj_attr(obj, attr) if not field_obj: return '' # depends on [control=['if'], data=[]] return _obj_display(field_obj, _display) _ref.short_description = title _ref.allow_tags = True _ref.admin_order_field = attr setattr(cls, fn_name, _ref)
def autobuild_trub_script(file_name, slot_assignments=None, os_info=None, sensor_graph=None, app_info=None, use_safeupdate=False): """Build a trub script that loads given firmware into the given slots. slot_assignments should be a list of tuples in the following form: ("slot X" or "controller", firmware_image_name) The output of this autobuild action will be a trub script in build/output/<file_name> that assigns the given firmware to each slot in the order specified in the slot_assignments list. Args: file_name (str): The name of the output file that we should create. This file name should end in .trub slot_assignments (list of (str, str)): A list of tuples containing the slot name and the firmware image that we should use to build our update script. Optional os_info (tuple(int, str)): A tuple of OS version tag and X.Y version number that will be set as part of the OTA script if included. Optional. sensor_graph (str): Name of sgf file. Optional. app_info (tuple(int, str)): A tuple of App version tag and X.Y version number that will be set as part of the OTA script if included. Optional. use_safeupdate (bool): If True, Enables safemode before the firmware update records, then disables them after the firmware update records. """ build_update_script(file_name, slot_assignments, os_info, sensor_graph, app_info, use_safeupdate)
def function[autobuild_trub_script, parameter[file_name, slot_assignments, os_info, sensor_graph, app_info, use_safeupdate]]: constant[Build a trub script that loads given firmware into the given slots. slot_assignments should be a list of tuples in the following form: ("slot X" or "controller", firmware_image_name) The output of this autobuild action will be a trub script in build/output/<file_name> that assigns the given firmware to each slot in the order specified in the slot_assignments list. Args: file_name (str): The name of the output file that we should create. This file name should end in .trub slot_assignments (list of (str, str)): A list of tuples containing the slot name and the firmware image that we should use to build our update script. Optional os_info (tuple(int, str)): A tuple of OS version tag and X.Y version number that will be set as part of the OTA script if included. Optional. sensor_graph (str): Name of sgf file. Optional. app_info (tuple(int, str)): A tuple of App version tag and X.Y version number that will be set as part of the OTA script if included. Optional. use_safeupdate (bool): If True, Enables safemode before the firmware update records, then disables them after the firmware update records. ] call[name[build_update_script], parameter[name[file_name], name[slot_assignments], name[os_info], name[sensor_graph], name[app_info], name[use_safeupdate]]]
keyword[def] identifier[autobuild_trub_script] ( identifier[file_name] , identifier[slot_assignments] = keyword[None] , identifier[os_info] = keyword[None] , identifier[sensor_graph] = keyword[None] , identifier[app_info] = keyword[None] , identifier[use_safeupdate] = keyword[False] ): literal[string] identifier[build_update_script] ( identifier[file_name] , identifier[slot_assignments] , identifier[os_info] , identifier[sensor_graph] , identifier[app_info] , identifier[use_safeupdate] )
def autobuild_trub_script(file_name, slot_assignments=None, os_info=None, sensor_graph=None, app_info=None, use_safeupdate=False): """Build a trub script that loads given firmware into the given slots. slot_assignments should be a list of tuples in the following form: ("slot X" or "controller", firmware_image_name) The output of this autobuild action will be a trub script in build/output/<file_name> that assigns the given firmware to each slot in the order specified in the slot_assignments list. Args: file_name (str): The name of the output file that we should create. This file name should end in .trub slot_assignments (list of (str, str)): A list of tuples containing the slot name and the firmware image that we should use to build our update script. Optional os_info (tuple(int, str)): A tuple of OS version tag and X.Y version number that will be set as part of the OTA script if included. Optional. sensor_graph (str): Name of sgf file. Optional. app_info (tuple(int, str)): A tuple of App version tag and X.Y version number that will be set as part of the OTA script if included. Optional. use_safeupdate (bool): If True, Enables safemode before the firmware update records, then disables them after the firmware update records. """ build_update_script(file_name, slot_assignments, os_info, sensor_graph, app_info, use_safeupdate)
def unblock_pin(ctx, puk, new_pin): """ Unblock the PIN. Reset the PIN using the PUK code. """ controller = ctx.obj['controller'] if not puk: puk = click.prompt( 'Enter PUK', default='', show_default=False, hide_input=True, err=True) if not new_pin: new_pin = click.prompt( 'Enter a new PIN', default='', show_default=False, hide_input=True, err=True) controller.unblock_pin(puk, new_pin)
def function[unblock_pin, parameter[ctx, puk, new_pin]]: constant[ Unblock the PIN. Reset the PIN using the PUK code. ] variable[controller] assign[=] call[name[ctx].obj][constant[controller]] if <ast.UnaryOp object at 0x7da207f9bcd0> begin[:] variable[puk] assign[=] call[name[click].prompt, parameter[constant[Enter PUK]]] if <ast.UnaryOp object at 0x7da207f9a560> begin[:] variable[new_pin] assign[=] call[name[click].prompt, parameter[constant[Enter a new PIN]]] call[name[controller].unblock_pin, parameter[name[puk], name[new_pin]]]
keyword[def] identifier[unblock_pin] ( identifier[ctx] , identifier[puk] , identifier[new_pin] ): literal[string] identifier[controller] = identifier[ctx] . identifier[obj] [ literal[string] ] keyword[if] keyword[not] identifier[puk] : identifier[puk] = identifier[click] . identifier[prompt] ( literal[string] , identifier[default] = literal[string] , identifier[show_default] = keyword[False] , identifier[hide_input] = keyword[True] , identifier[err] = keyword[True] ) keyword[if] keyword[not] identifier[new_pin] : identifier[new_pin] = identifier[click] . identifier[prompt] ( literal[string] , identifier[default] = literal[string] , identifier[show_default] = keyword[False] , identifier[hide_input] = keyword[True] , identifier[err] = keyword[True] ) identifier[controller] . identifier[unblock_pin] ( identifier[puk] , identifier[new_pin] )
def unblock_pin(ctx, puk, new_pin): """ Unblock the PIN. Reset the PIN using the PUK code. """ controller = ctx.obj['controller'] if not puk: puk = click.prompt('Enter PUK', default='', show_default=False, hide_input=True, err=True) # depends on [control=['if'], data=[]] if not new_pin: new_pin = click.prompt('Enter a new PIN', default='', show_default=False, hide_input=True, err=True) # depends on [control=['if'], data=[]] controller.unblock_pin(puk, new_pin)
def quadraticEval(a, b, c, x): """given all params return the result of quadratic equation a*x^2 + b*x + c""" return a*(x**2) + b*x + c
def function[quadraticEval, parameter[a, b, c, x]]: constant[given all params return the result of quadratic equation a*x^2 + b*x + c] return[binary_operation[binary_operation[binary_operation[name[a] * binary_operation[name[x] ** constant[2]]] + binary_operation[name[b] * name[x]]] + name[c]]]
keyword[def] identifier[quadraticEval] ( identifier[a] , identifier[b] , identifier[c] , identifier[x] ): literal[string] keyword[return] identifier[a] *( identifier[x] ** literal[int] )+ identifier[b] * identifier[x] + identifier[c]
def quadraticEval(a, b, c, x): """given all params return the result of quadratic equation a*x^2 + b*x + c""" return a * x ** 2 + b * x + c
def close_umanager(self, force=False): """Used to close an uManager session. :param force: try to close a session regardless of a connection object internal state """ if not (force or self.umanager_opened): return # make sure we've got a fresh prompt self.ser.write(self.cr) if self.read_loop(lambda x: x.endswith(self.umanager_prompt),self.timeout): self.ser.write(''.join((self.cmd_umanager_termination,self.cr))) if self.read_loop(lambda x: x.endswith(self.buf_on_exit),self.timeout): log.debug("uManager closed") else: raise Dam1021Error(2,"Failed to close uManager") else: log.debug("uManager already closed") self.umanager_opened = False
def function[close_umanager, parameter[self, force]]: constant[Used to close an uManager session. :param force: try to close a session regardless of a connection object internal state ] if <ast.UnaryOp object at 0x7da1b0cfffa0> begin[:] return[None] call[name[self].ser.write, parameter[name[self].cr]] if call[name[self].read_loop, parameter[<ast.Lambda object at 0x7da1b0cfe440>, name[self].timeout]] begin[:] call[name[self].ser.write, parameter[call[constant[].join, parameter[tuple[[<ast.Attribute object at 0x7da1b0cfdd20>, <ast.Attribute object at 0x7da1b0cff4f0>]]]]]] if call[name[self].read_loop, parameter[<ast.Lambda object at 0x7da1b0cfc4f0>, name[self].timeout]] begin[:] call[name[log].debug, parameter[constant[uManager closed]]] name[self].umanager_opened assign[=] constant[False]
keyword[def] identifier[close_umanager] ( identifier[self] , identifier[force] = keyword[False] ): literal[string] keyword[if] keyword[not] ( identifier[force] keyword[or] identifier[self] . identifier[umanager_opened] ): keyword[return] identifier[self] . identifier[ser] . identifier[write] ( identifier[self] . identifier[cr] ) keyword[if] identifier[self] . identifier[read_loop] ( keyword[lambda] identifier[x] : identifier[x] . identifier[endswith] ( identifier[self] . identifier[umanager_prompt] ), identifier[self] . identifier[timeout] ): identifier[self] . identifier[ser] . identifier[write] ( literal[string] . identifier[join] (( identifier[self] . identifier[cmd_umanager_termination] , identifier[self] . identifier[cr] ))) keyword[if] identifier[self] . identifier[read_loop] ( keyword[lambda] identifier[x] : identifier[x] . identifier[endswith] ( identifier[self] . identifier[buf_on_exit] ), identifier[self] . identifier[timeout] ): identifier[log] . identifier[debug] ( literal[string] ) keyword[else] : keyword[raise] identifier[Dam1021Error] ( literal[int] , literal[string] ) keyword[else] : identifier[log] . identifier[debug] ( literal[string] ) identifier[self] . identifier[umanager_opened] = keyword[False]
def close_umanager(self, force=False): """Used to close an uManager session. :param force: try to close a session regardless of a connection object internal state """ if not (force or self.umanager_opened): return # depends on [control=['if'], data=[]] # make sure we've got a fresh prompt self.ser.write(self.cr) if self.read_loop(lambda x: x.endswith(self.umanager_prompt), self.timeout): self.ser.write(''.join((self.cmd_umanager_termination, self.cr))) if self.read_loop(lambda x: x.endswith(self.buf_on_exit), self.timeout): log.debug('uManager closed') # depends on [control=['if'], data=[]] else: raise Dam1021Error(2, 'Failed to close uManager') # depends on [control=['if'], data=[]] else: log.debug('uManager already closed') self.umanager_opened = False