code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def _properties_model_to_dict(properties): """ Convert properties model to dict. Args: properties: Properties model. Returns: dict: Converted model. """ result = {} for attr in properties.__dict__: value = getattr(properties, attr) if hasattr(value, '__module__') and 'models' in value.__module__: value = _properties_model_to_dict(value) if not (value is None or (isinstance(value, dict) and not value)): result[attr] = value return result
def function[_properties_model_to_dict, parameter[properties]]: constant[ Convert properties model to dict. Args: properties: Properties model. Returns: dict: Converted model. ] variable[result] assign[=] dictionary[[], []] for taget[name[attr]] in starred[name[properties].__dict__] begin[:] variable[value] assign[=] call[name[getattr], parameter[name[properties], name[attr]]] if <ast.BoolOp object at 0x7da1b19d81f0> begin[:] variable[value] assign[=] call[name[_properties_model_to_dict], parameter[name[value]]] if <ast.UnaryOp object at 0x7da1b1b0f7c0> begin[:] call[name[result]][name[attr]] assign[=] name[value] return[name[result]]
keyword[def] identifier[_properties_model_to_dict] ( identifier[properties] ): literal[string] identifier[result] ={} keyword[for] identifier[attr] keyword[in] identifier[properties] . identifier[__dict__] : identifier[value] = identifier[getattr] ( identifier[properties] , identifier[attr] ) keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ) keyword[and] literal[string] keyword[in] identifier[value] . identifier[__module__] : identifier[value] = identifier[_properties_model_to_dict] ( identifier[value] ) keyword[if] keyword[not] ( identifier[value] keyword[is] keyword[None] keyword[or] ( identifier[isinstance] ( identifier[value] , identifier[dict] ) keyword[and] keyword[not] identifier[value] )): identifier[result] [ identifier[attr] ]= identifier[value] keyword[return] identifier[result]
def _properties_model_to_dict(properties): """ Convert properties model to dict. Args: properties: Properties model. Returns: dict: Converted model. """ result = {} for attr in properties.__dict__: value = getattr(properties, attr) if hasattr(value, '__module__') and 'models' in value.__module__: value = _properties_model_to_dict(value) # depends on [control=['if'], data=[]] if not (value is None or (isinstance(value, dict) and (not value))): result[attr] = value # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['attr']] return result
def unit_clause_assign(clause, model): """Return a single variable/value pair that makes clause true in the model, if possible. >>> unit_clause_assign(A|B|C, {A:True}) (None, None) >>> unit_clause_assign(B|~C, {A:True}) (None, None) >>> unit_clause_assign(~A|~B, {A:True}) (B, False) """ P, value = None, None for literal in disjuncts(clause): sym, positive = inspect_literal(literal) if sym in model: if model[sym] == positive: return None, None # clause already True elif P: return None, None # more than 1 unbound variable else: P, value = sym, positive return P, value
def function[unit_clause_assign, parameter[clause, model]]: constant[Return a single variable/value pair that makes clause true in the model, if possible. >>> unit_clause_assign(A|B|C, {A:True}) (None, None) >>> unit_clause_assign(B|~C, {A:True}) (None, None) >>> unit_clause_assign(~A|~B, {A:True}) (B, False) ] <ast.Tuple object at 0x7da2054a5120> assign[=] tuple[[<ast.Constant object at 0x7da2054a68c0>, <ast.Constant object at 0x7da2054a7eb0>]] for taget[name[literal]] in starred[call[name[disjuncts], parameter[name[clause]]]] begin[:] <ast.Tuple object at 0x7da2054a64a0> assign[=] call[name[inspect_literal], parameter[name[literal]]] if compare[name[sym] in name[model]] begin[:] if compare[call[name[model]][name[sym]] equal[==] name[positive]] begin[:] return[tuple[[<ast.Constant object at 0x7da2054a6e30>, <ast.Constant object at 0x7da2054a43d0>]]] return[tuple[[<ast.Name object at 0x7da2054a6560>, <ast.Name object at 0x7da2054a4ac0>]]]
keyword[def] identifier[unit_clause_assign] ( identifier[clause] , identifier[model] ): literal[string] identifier[P] , identifier[value] = keyword[None] , keyword[None] keyword[for] identifier[literal] keyword[in] identifier[disjuncts] ( identifier[clause] ): identifier[sym] , identifier[positive] = identifier[inspect_literal] ( identifier[literal] ) keyword[if] identifier[sym] keyword[in] identifier[model] : keyword[if] identifier[model] [ identifier[sym] ]== identifier[positive] : keyword[return] keyword[None] , keyword[None] keyword[elif] identifier[P] : keyword[return] keyword[None] , keyword[None] keyword[else] : identifier[P] , identifier[value] = identifier[sym] , identifier[positive] keyword[return] identifier[P] , identifier[value]
def unit_clause_assign(clause, model): """Return a single variable/value pair that makes clause true in the model, if possible. >>> unit_clause_assign(A|B|C, {A:True}) (None, None) >>> unit_clause_assign(B|~C, {A:True}) (None, None) >>> unit_clause_assign(~A|~B, {A:True}) (B, False) """ (P, value) = (None, None) for literal in disjuncts(clause): (sym, positive) = inspect_literal(literal) if sym in model: if model[sym] == positive: return (None, None) # clause already True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['sym', 'model']] elif P: return (None, None) # more than 1 unbound variable # depends on [control=['if'], data=[]] else: (P, value) = (sym, positive) # depends on [control=['for'], data=['literal']] return (P, value)
def _find_prefix_path(self, basedir, prefix): """Similar to _find_prefix_paths() but only returns the first match""" ret = "" for ret in self._find_prefix_paths(basedir, prefix): break if not ret: raise IOError("Could not find prefix {} in path {}".format(prefix, basedir)) return ret
def function[_find_prefix_path, parameter[self, basedir, prefix]]: constant[Similar to _find_prefix_paths() but only returns the first match] variable[ret] assign[=] constant[] for taget[name[ret]] in starred[call[name[self]._find_prefix_paths, parameter[name[basedir], name[prefix]]]] begin[:] break if <ast.UnaryOp object at 0x7da2044c3ca0> begin[:] <ast.Raise object at 0x7da2044c2fe0> return[name[ret]]
keyword[def] identifier[_find_prefix_path] ( identifier[self] , identifier[basedir] , identifier[prefix] ): literal[string] identifier[ret] = literal[string] keyword[for] identifier[ret] keyword[in] identifier[self] . identifier[_find_prefix_paths] ( identifier[basedir] , identifier[prefix] ): keyword[break] keyword[if] keyword[not] identifier[ret] : keyword[raise] identifier[IOError] ( literal[string] . identifier[format] ( identifier[prefix] , identifier[basedir] )) keyword[return] identifier[ret]
def _find_prefix_path(self, basedir, prefix): """Similar to _find_prefix_paths() but only returns the first match""" ret = '' for ret in self._find_prefix_paths(basedir, prefix): break # depends on [control=['for'], data=[]] if not ret: raise IOError('Could not find prefix {} in path {}'.format(prefix, basedir)) # depends on [control=['if'], data=[]] return ret
def slots(self, inherited=False): """Iterate over the Slots of the class.""" data = clips.data.DataObject(self._env) lib.EnvClassSlots(self._env, self._cls, data.byref, int(inherited)) return (ClassSlot(self._env, self._cls, n.encode()) for n in data.value)
def function[slots, parameter[self, inherited]]: constant[Iterate over the Slots of the class.] variable[data] assign[=] call[name[clips].data.DataObject, parameter[name[self]._env]] call[name[lib].EnvClassSlots, parameter[name[self]._env, name[self]._cls, name[data].byref, call[name[int], parameter[name[inherited]]]]] return[<ast.GeneratorExp object at 0x7da1b056cd60>]
keyword[def] identifier[slots] ( identifier[self] , identifier[inherited] = keyword[False] ): literal[string] identifier[data] = identifier[clips] . identifier[data] . identifier[DataObject] ( identifier[self] . identifier[_env] ) identifier[lib] . identifier[EnvClassSlots] ( identifier[self] . identifier[_env] , identifier[self] . identifier[_cls] , identifier[data] . identifier[byref] , identifier[int] ( identifier[inherited] )) keyword[return] ( identifier[ClassSlot] ( identifier[self] . identifier[_env] , identifier[self] . identifier[_cls] , identifier[n] . identifier[encode] ()) keyword[for] identifier[n] keyword[in] identifier[data] . identifier[value] )
def slots(self, inherited=False): """Iterate over the Slots of the class.""" data = clips.data.DataObject(self._env) lib.EnvClassSlots(self._env, self._cls, data.byref, int(inherited)) return (ClassSlot(self._env, self._cls, n.encode()) for n in data.value)
def _handle_calls(self, service_obj, calls): """ Performs method calls on service object """ for call in calls: method = call.get('method') args = call.get('args', []) kwargs = call.get('kwargs', {}) _check_type('args', args, list) _check_type('kwargs', kwargs, dict) if method is None: raise InvalidServiceConfiguration( 'Service call must define a method.' ) new_args = self._replace_scalars_in_args(args) new_kwargs = self._replace_scalars_in_kwargs(kwargs) getattr(service_obj, method)(*new_args, **new_kwargs)
def function[_handle_calls, parameter[self, service_obj, calls]]: constant[ Performs method calls on service object ] for taget[name[call]] in starred[name[calls]] begin[:] variable[method] assign[=] call[name[call].get, parameter[constant[method]]] variable[args] assign[=] call[name[call].get, parameter[constant[args], list[[]]]] variable[kwargs] assign[=] call[name[call].get, parameter[constant[kwargs], dictionary[[], []]]] call[name[_check_type], parameter[constant[args], name[args], name[list]]] call[name[_check_type], parameter[constant[kwargs], name[kwargs], name[dict]]] if compare[name[method] is constant[None]] begin[:] <ast.Raise object at 0x7da20c6a8a60> variable[new_args] assign[=] call[name[self]._replace_scalars_in_args, parameter[name[args]]] variable[new_kwargs] assign[=] call[name[self]._replace_scalars_in_kwargs, parameter[name[kwargs]]] call[call[name[getattr], parameter[name[service_obj], name[method]]], parameter[<ast.Starred object at 0x7da20c6aa740>]]
keyword[def] identifier[_handle_calls] ( identifier[self] , identifier[service_obj] , identifier[calls] ): literal[string] keyword[for] identifier[call] keyword[in] identifier[calls] : identifier[method] = identifier[call] . identifier[get] ( literal[string] ) identifier[args] = identifier[call] . identifier[get] ( literal[string] ,[]) identifier[kwargs] = identifier[call] . identifier[get] ( literal[string] ,{}) identifier[_check_type] ( literal[string] , identifier[args] , identifier[list] ) identifier[_check_type] ( literal[string] , identifier[kwargs] , identifier[dict] ) keyword[if] identifier[method] keyword[is] keyword[None] : keyword[raise] identifier[InvalidServiceConfiguration] ( literal[string] ) identifier[new_args] = identifier[self] . identifier[_replace_scalars_in_args] ( identifier[args] ) identifier[new_kwargs] = identifier[self] . identifier[_replace_scalars_in_kwargs] ( identifier[kwargs] ) identifier[getattr] ( identifier[service_obj] , identifier[method] )(* identifier[new_args] ,** identifier[new_kwargs] )
def _handle_calls(self, service_obj, calls): """ Performs method calls on service object """ for call in calls: method = call.get('method') args = call.get('args', []) kwargs = call.get('kwargs', {}) _check_type('args', args, list) _check_type('kwargs', kwargs, dict) if method is None: raise InvalidServiceConfiguration('Service call must define a method.') # depends on [control=['if'], data=[]] new_args = self._replace_scalars_in_args(args) new_kwargs = self._replace_scalars_in_kwargs(kwargs) getattr(service_obj, method)(*new_args, **new_kwargs) # depends on [control=['for'], data=['call']]
def get_instance(self, payload): """ Build an instance of RecordingInstance :param dict payload: Payload response from the API :returns: twilio.rest.api.v2010.account.call.recording.RecordingInstance :rtype: twilio.rest.api.v2010.account.call.recording.RecordingInstance """ return RecordingInstance( self._version, payload, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid'], )
def function[get_instance, parameter[self, payload]]: constant[ Build an instance of RecordingInstance :param dict payload: Payload response from the API :returns: twilio.rest.api.v2010.account.call.recording.RecordingInstance :rtype: twilio.rest.api.v2010.account.call.recording.RecordingInstance ] return[call[name[RecordingInstance], parameter[name[self]._version, name[payload]]]]
keyword[def] identifier[get_instance] ( identifier[self] , identifier[payload] ): literal[string] keyword[return] identifier[RecordingInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ], identifier[call_sid] = identifier[self] . identifier[_solution] [ literal[string] ], )
def get_instance(self, payload): """ Build an instance of RecordingInstance :param dict payload: Payload response from the API :returns: twilio.rest.api.v2010.account.call.recording.RecordingInstance :rtype: twilio.rest.api.v2010.account.call.recording.RecordingInstance """ return RecordingInstance(self._version, payload, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid'])
def load(prefix, epoch, load_optimizer_states=False, **kwargs): """Creates a model from previously saved checkpoint. Parameters ---------- prefix : str path prefix of saved model files. You should have "prefix-symbol.json", "prefix-xxxx.params", and optionally "prefix-xxxx.states", where xxxx is the epoch number. epoch : int epoch to load. load_optimizer_states : bool whether to load optimizer states. Checkpoint needs to have been made with save_optimizer_states=True. data_names : list of str Default is `('data')` for a typical model used in image classification. label_names : list of str Default is `('softmax_label')` for a typical model used in image classification. logger : Logger Default is `logging`. context : Context or list of Context Default is ``cpu()``. work_load_list : list of number Default ``None``, indicating uniform workload. fixed_param_names: list of str Default ``None``, indicating no network parameters are fixed. """ sym, args, auxs = load_checkpoint(prefix, epoch) mod = Module(symbol=sym, **kwargs) mod._arg_params = args mod._aux_params = auxs mod.params_initialized = True if load_optimizer_states: mod._preload_opt_states = '%s-%04d.states'%(prefix, epoch) return mod
def function[load, parameter[prefix, epoch, load_optimizer_states]]: constant[Creates a model from previously saved checkpoint. Parameters ---------- prefix : str path prefix of saved model files. You should have "prefix-symbol.json", "prefix-xxxx.params", and optionally "prefix-xxxx.states", where xxxx is the epoch number. epoch : int epoch to load. load_optimizer_states : bool whether to load optimizer states. Checkpoint needs to have been made with save_optimizer_states=True. data_names : list of str Default is `('data')` for a typical model used in image classification. label_names : list of str Default is `('softmax_label')` for a typical model used in image classification. logger : Logger Default is `logging`. context : Context or list of Context Default is ``cpu()``. work_load_list : list of number Default ``None``, indicating uniform workload. fixed_param_names: list of str Default ``None``, indicating no network parameters are fixed. ] <ast.Tuple object at 0x7da1b1f23550> assign[=] call[name[load_checkpoint], parameter[name[prefix], name[epoch]]] variable[mod] assign[=] call[name[Module], parameter[]] name[mod]._arg_params assign[=] name[args] name[mod]._aux_params assign[=] name[auxs] name[mod].params_initialized assign[=] constant[True] if name[load_optimizer_states] begin[:] name[mod]._preload_opt_states assign[=] binary_operation[constant[%s-%04d.states] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1f227d0>, <ast.Name object at 0x7da1b1f22860>]]] return[name[mod]]
keyword[def] identifier[load] ( identifier[prefix] , identifier[epoch] , identifier[load_optimizer_states] = keyword[False] ,** identifier[kwargs] ): literal[string] identifier[sym] , identifier[args] , identifier[auxs] = identifier[load_checkpoint] ( identifier[prefix] , identifier[epoch] ) identifier[mod] = identifier[Module] ( identifier[symbol] = identifier[sym] ,** identifier[kwargs] ) identifier[mod] . identifier[_arg_params] = identifier[args] identifier[mod] . identifier[_aux_params] = identifier[auxs] identifier[mod] . identifier[params_initialized] = keyword[True] keyword[if] identifier[load_optimizer_states] : identifier[mod] . identifier[_preload_opt_states] = literal[string] %( identifier[prefix] , identifier[epoch] ) keyword[return] identifier[mod]
def load(prefix, epoch, load_optimizer_states=False, **kwargs): """Creates a model from previously saved checkpoint. Parameters ---------- prefix : str path prefix of saved model files. You should have "prefix-symbol.json", "prefix-xxxx.params", and optionally "prefix-xxxx.states", where xxxx is the epoch number. epoch : int epoch to load. load_optimizer_states : bool whether to load optimizer states. Checkpoint needs to have been made with save_optimizer_states=True. data_names : list of str Default is `('data')` for a typical model used in image classification. label_names : list of str Default is `('softmax_label')` for a typical model used in image classification. logger : Logger Default is `logging`. context : Context or list of Context Default is ``cpu()``. work_load_list : list of number Default ``None``, indicating uniform workload. fixed_param_names: list of str Default ``None``, indicating no network parameters are fixed. """ (sym, args, auxs) = load_checkpoint(prefix, epoch) mod = Module(symbol=sym, **kwargs) mod._arg_params = args mod._aux_params = auxs mod.params_initialized = True if load_optimizer_states: mod._preload_opt_states = '%s-%04d.states' % (prefix, epoch) # depends on [control=['if'], data=[]] return mod
def copy_on_s3(self, src_file_name, dst_file_name, bucket_name): """ Copies src file to destination within a bucket. """ try: self.s3_client.head_bucket(Bucket=bucket_name) except botocore.exceptions.ClientError as e: # pragma: no cover # If a client error is thrown, then check that it was a 404 error. # If it was a 404 error, then the bucket does not exist. error_code = int(e.response['Error']['Code']) if error_code == 404: return False copy_src = { "Bucket": bucket_name, "Key": src_file_name } try: self.s3_client.copy( CopySource=copy_src, Bucket=bucket_name, Key=dst_file_name ) return True except botocore.exceptions.ClientError: # pragma: no cover return False
def function[copy_on_s3, parameter[self, src_file_name, dst_file_name, bucket_name]]: constant[ Copies src file to destination within a bucket. ] <ast.Try object at 0x7da1b21c6f20> variable[copy_src] assign[=] dictionary[[<ast.Constant object at 0x7da1b21c5270>, <ast.Constant object at 0x7da1b21c4cd0>], [<ast.Name object at 0x7da1b21c5090>, <ast.Name object at 0x7da1b21c50f0>]] <ast.Try object at 0x7da1b21c7190>
keyword[def] identifier[copy_on_s3] ( identifier[self] , identifier[src_file_name] , identifier[dst_file_name] , identifier[bucket_name] ): literal[string] keyword[try] : identifier[self] . identifier[s3_client] . identifier[head_bucket] ( identifier[Bucket] = identifier[bucket_name] ) keyword[except] identifier[botocore] . identifier[exceptions] . identifier[ClientError] keyword[as] identifier[e] : identifier[error_code] = identifier[int] ( identifier[e] . identifier[response] [ literal[string] ][ literal[string] ]) keyword[if] identifier[error_code] == literal[int] : keyword[return] keyword[False] identifier[copy_src] ={ literal[string] : identifier[bucket_name] , literal[string] : identifier[src_file_name] } keyword[try] : identifier[self] . identifier[s3_client] . identifier[copy] ( identifier[CopySource] = identifier[copy_src] , identifier[Bucket] = identifier[bucket_name] , identifier[Key] = identifier[dst_file_name] ) keyword[return] keyword[True] keyword[except] identifier[botocore] . identifier[exceptions] . identifier[ClientError] : keyword[return] keyword[False]
def copy_on_s3(self, src_file_name, dst_file_name, bucket_name): """ Copies src file to destination within a bucket. """ try: self.s3_client.head_bucket(Bucket=bucket_name) # depends on [control=['try'], data=[]] except botocore.exceptions.ClientError as e: # pragma: no cover # If a client error is thrown, then check that it was a 404 error. # If it was a 404 error, then the bucket does not exist. error_code = int(e.response['Error']['Code']) if error_code == 404: return False # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] copy_src = {'Bucket': bucket_name, 'Key': src_file_name} try: self.s3_client.copy(CopySource=copy_src, Bucket=bucket_name, Key=dst_file_name) return True # depends on [control=['try'], data=[]] except botocore.exceptions.ClientError: # pragma: no cover return False # depends on [control=['except'], data=[]]
def get_template_path(self, content=None): """ Find template. :return string: remplate path """ if isinstance(content, Paginator): return op.join('api', 'paginator.%s' % self.format) if isinstance(content, UpdatedList): return op.join('api', 'updated.%s' % self.format) app = '' name = self.resource._meta.name if not content: content = self.resource._meta.model if isinstance(content, (Model, ModelBase)): app = content._meta.app_label name = content._meta.module_name basedir = 'api' if getattr(self.resource, 'api', None): basedir = self.resource.api.prefix return op.join( basedir, str(self.resource.api or ''), app, "%s.%s" % (name, self.format) )
def function[get_template_path, parameter[self, content]]: constant[ Find template. :return string: remplate path ] if call[name[isinstance], parameter[name[content], name[Paginator]]] begin[:] return[call[name[op].join, parameter[constant[api], binary_operation[constant[paginator.%s] <ast.Mod object at 0x7da2590d6920> name[self].format]]]] if call[name[isinstance], parameter[name[content], name[UpdatedList]]] begin[:] return[call[name[op].join, parameter[constant[api], binary_operation[constant[updated.%s] <ast.Mod object at 0x7da2590d6920> name[self].format]]]] variable[app] assign[=] constant[] variable[name] assign[=] name[self].resource._meta.name if <ast.UnaryOp object at 0x7da18dc05ea0> begin[:] variable[content] assign[=] name[self].resource._meta.model if call[name[isinstance], parameter[name[content], tuple[[<ast.Name object at 0x7da18dc07970>, <ast.Name object at 0x7da18dc04dc0>]]]] begin[:] variable[app] assign[=] name[content]._meta.app_label variable[name] assign[=] name[content]._meta.module_name variable[basedir] assign[=] constant[api] if call[name[getattr], parameter[name[self].resource, constant[api], constant[None]]] begin[:] variable[basedir] assign[=] name[self].resource.api.prefix return[call[name[op].join, parameter[name[basedir], call[name[str], parameter[<ast.BoolOp object at 0x7da18dc04820>]], name[app], binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18dc042e0>, <ast.Attribute object at 0x7da18dc047f0>]]]]]]
keyword[def] identifier[get_template_path] ( identifier[self] , identifier[content] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[content] , identifier[Paginator] ): keyword[return] identifier[op] . identifier[join] ( literal[string] , literal[string] % identifier[self] . identifier[format] ) keyword[if] identifier[isinstance] ( identifier[content] , identifier[UpdatedList] ): keyword[return] identifier[op] . identifier[join] ( literal[string] , literal[string] % identifier[self] . identifier[format] ) identifier[app] = literal[string] identifier[name] = identifier[self] . identifier[resource] . identifier[_meta] . identifier[name] keyword[if] keyword[not] identifier[content] : identifier[content] = identifier[self] . identifier[resource] . identifier[_meta] . identifier[model] keyword[if] identifier[isinstance] ( identifier[content] ,( identifier[Model] , identifier[ModelBase] )): identifier[app] = identifier[content] . identifier[_meta] . identifier[app_label] identifier[name] = identifier[content] . identifier[_meta] . identifier[module_name] identifier[basedir] = literal[string] keyword[if] identifier[getattr] ( identifier[self] . identifier[resource] , literal[string] , keyword[None] ): identifier[basedir] = identifier[self] . identifier[resource] . identifier[api] . identifier[prefix] keyword[return] identifier[op] . identifier[join] ( identifier[basedir] , identifier[str] ( identifier[self] . identifier[resource] . identifier[api] keyword[or] literal[string] ), identifier[app] , literal[string] %( identifier[name] , identifier[self] . identifier[format] ) )
def get_template_path(self, content=None): """ Find template. :return string: remplate path """ if isinstance(content, Paginator): return op.join('api', 'paginator.%s' % self.format) # depends on [control=['if'], data=[]] if isinstance(content, UpdatedList): return op.join('api', 'updated.%s' % self.format) # depends on [control=['if'], data=[]] app = '' name = self.resource._meta.name if not content: content = self.resource._meta.model # depends on [control=['if'], data=[]] if isinstance(content, (Model, ModelBase)): app = content._meta.app_label name = content._meta.module_name # depends on [control=['if'], data=[]] basedir = 'api' if getattr(self.resource, 'api', None): basedir = self.resource.api.prefix # depends on [control=['if'], data=[]] return op.join(basedir, str(self.resource.api or ''), app, '%s.%s' % (name, self.format))
def _generic_callable(group_idx, a, size, fill_value, dtype=None, func=lambda g: g, **kwargs): """groups a by inds, and then applies foo to each group in turn, placing the results in an array.""" groups = _array(group_idx, a, size, ()) ret = np.full(size, fill_value, dtype=dtype or np.float64) for i, grp in enumerate(groups): if np.ndim(grp) == 1 and len(grp) > 0: ret[i] = func(grp) return ret
def function[_generic_callable, parameter[group_idx, a, size, fill_value, dtype, func]]: constant[groups a by inds, and then applies foo to each group in turn, placing the results in an array.] variable[groups] assign[=] call[name[_array], parameter[name[group_idx], name[a], name[size], tuple[[]]]] variable[ret] assign[=] call[name[np].full, parameter[name[size], name[fill_value]]] for taget[tuple[[<ast.Name object at 0x7da2046225f0>, <ast.Name object at 0x7da204623760>]]] in starred[call[name[enumerate], parameter[name[groups]]]] begin[:] if <ast.BoolOp object at 0x7da204623c40> begin[:] call[name[ret]][name[i]] assign[=] call[name[func], parameter[name[grp]]] return[name[ret]]
keyword[def] identifier[_generic_callable] ( identifier[group_idx] , identifier[a] , identifier[size] , identifier[fill_value] , identifier[dtype] = keyword[None] , identifier[func] = keyword[lambda] identifier[g] : identifier[g] ,** identifier[kwargs] ): literal[string] identifier[groups] = identifier[_array] ( identifier[group_idx] , identifier[a] , identifier[size] ,()) identifier[ret] = identifier[np] . identifier[full] ( identifier[size] , identifier[fill_value] , identifier[dtype] = identifier[dtype] keyword[or] identifier[np] . identifier[float64] ) keyword[for] identifier[i] , identifier[grp] keyword[in] identifier[enumerate] ( identifier[groups] ): keyword[if] identifier[np] . identifier[ndim] ( identifier[grp] )== literal[int] keyword[and] identifier[len] ( identifier[grp] )> literal[int] : identifier[ret] [ identifier[i] ]= identifier[func] ( identifier[grp] ) keyword[return] identifier[ret]
def _generic_callable(group_idx, a, size, fill_value, dtype=None, func=lambda g: g, **kwargs): """groups a by inds, and then applies foo to each group in turn, placing the results in an array.""" groups = _array(group_idx, a, size, ()) ret = np.full(size, fill_value, dtype=dtype or np.float64) for (i, grp) in enumerate(groups): if np.ndim(grp) == 1 and len(grp) > 0: ret[i] = func(grp) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return ret
def analyzeAll(self): """analyze every unanalyzed ABF in the folder.""" searchableData=str(self.files2) self.log.debug("considering analysis for %d ABFs",len(self.IDs)) for ID in self.IDs: if not ID+"_" in searchableData: self.log.debug("%s needs analysis",ID) try: self.analyzeABF(ID) except: print("EXCEPTION! "*100) else: self.log.debug("%s has existing analysis, not overwriting",ID) self.log.debug("verified analysis of %d ABFs",len(self.IDs))
def function[analyzeAll, parameter[self]]: constant[analyze every unanalyzed ABF in the folder.] variable[searchableData] assign[=] call[name[str], parameter[name[self].files2]] call[name[self].log.debug, parameter[constant[considering analysis for %d ABFs], call[name[len], parameter[name[self].IDs]]]] for taget[name[ID]] in starred[name[self].IDs] begin[:] if <ast.UnaryOp object at 0x7da18bcc8880> begin[:] call[name[self].log.debug, parameter[constant[%s needs analysis], name[ID]]] <ast.Try object at 0x7da18bccb160> call[name[self].log.debug, parameter[constant[verified analysis of %d ABFs], call[name[len], parameter[name[self].IDs]]]]
keyword[def] identifier[analyzeAll] ( identifier[self] ): literal[string] identifier[searchableData] = identifier[str] ( identifier[self] . identifier[files2] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[len] ( identifier[self] . identifier[IDs] )) keyword[for] identifier[ID] keyword[in] identifier[self] . identifier[IDs] : keyword[if] keyword[not] identifier[ID] + literal[string] keyword[in] identifier[searchableData] : identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[ID] ) keyword[try] : identifier[self] . identifier[analyzeABF] ( identifier[ID] ) keyword[except] : identifier[print] ( literal[string] * literal[int] ) keyword[else] : identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[ID] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[len] ( identifier[self] . identifier[IDs] ))
def analyzeAll(self): """analyze every unanalyzed ABF in the folder.""" searchableData = str(self.files2) self.log.debug('considering analysis for %d ABFs', len(self.IDs)) for ID in self.IDs: if not ID + '_' in searchableData: self.log.debug('%s needs analysis', ID) try: self.analyzeABF(ID) # depends on [control=['try'], data=[]] except: print('EXCEPTION! ' * 100) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: self.log.debug('%s has existing analysis, not overwriting', ID) # depends on [control=['for'], data=['ID']] self.log.debug('verified analysis of %d ABFs', len(self.IDs))
def is_installed(self): """Return True if the bundle is installed.""" r = self.library.resolve(self.identity.vid) return r is not None
def function[is_installed, parameter[self]]: constant[Return True if the bundle is installed.] variable[r] assign[=] call[name[self].library.resolve, parameter[name[self].identity.vid]] return[compare[name[r] is_not constant[None]]]
keyword[def] identifier[is_installed] ( identifier[self] ): literal[string] identifier[r] = identifier[self] . identifier[library] . identifier[resolve] ( identifier[self] . identifier[identity] . identifier[vid] ) keyword[return] identifier[r] keyword[is] keyword[not] keyword[None]
def is_installed(self): """Return True if the bundle is installed.""" r = self.library.resolve(self.identity.vid) return r is not None
def start(self): """ Start collecting trace information. """ origin = inspect.stack()[1][0] self.reset() # Install the tracer on this thread. self._start_tracer(origin)
def function[start, parameter[self]]: constant[ Start collecting trace information. ] variable[origin] assign[=] call[call[call[name[inspect].stack, parameter[]]][constant[1]]][constant[0]] call[name[self].reset, parameter[]] call[name[self]._start_tracer, parameter[name[origin]]]
keyword[def] identifier[start] ( identifier[self] ): literal[string] identifier[origin] = identifier[inspect] . identifier[stack] ()[ literal[int] ][ literal[int] ] identifier[self] . identifier[reset] () identifier[self] . identifier[_start_tracer] ( identifier[origin] )
def start(self): """ Start collecting trace information. """ origin = inspect.stack()[1][0] self.reset() # Install the tracer on this thread. self._start_tracer(origin)
def _style_name(self, el): """Return the style name of an element.""" if el.attributes is None: return None style_field = ('urn:oasis:names:tc:opendocument:xmlns:text:1.0', 'style-name') name = el.attributes.get(style_field, None) if not name: return None return self._get_style_name(name)
def function[_style_name, parameter[self, el]]: constant[Return the style name of an element.] if compare[name[el].attributes is constant[None]] begin[:] return[constant[None]] variable[style_field] assign[=] tuple[[<ast.Constant object at 0x7da1b0524df0>, <ast.Constant object at 0x7da1b0526e60>]] variable[name] assign[=] call[name[el].attributes.get, parameter[name[style_field], constant[None]]] if <ast.UnaryOp object at 0x7da1b05270d0> begin[:] return[constant[None]] return[call[name[self]._get_style_name, parameter[name[name]]]]
keyword[def] identifier[_style_name] ( identifier[self] , identifier[el] ): literal[string] keyword[if] identifier[el] . identifier[attributes] keyword[is] keyword[None] : keyword[return] keyword[None] identifier[style_field] =( literal[string] , literal[string] ) identifier[name] = identifier[el] . identifier[attributes] . identifier[get] ( identifier[style_field] , keyword[None] ) keyword[if] keyword[not] identifier[name] : keyword[return] keyword[None] keyword[return] identifier[self] . identifier[_get_style_name] ( identifier[name] )
def _style_name(self, el): """Return the style name of an element.""" if el.attributes is None: return None # depends on [control=['if'], data=[]] style_field = ('urn:oasis:names:tc:opendocument:xmlns:text:1.0', 'style-name') name = el.attributes.get(style_field, None) if not name: return None # depends on [control=['if'], data=[]] return self._get_style_name(name)
def _cumprod(l): """Cumulative product of a list. Args: l: a list of integers Returns: a list with one more element (starting with 1) """ ret = [1] for item in l: ret.append(ret[-1] * item) return ret
def function[_cumprod, parameter[l]]: constant[Cumulative product of a list. Args: l: a list of integers Returns: a list with one more element (starting with 1) ] variable[ret] assign[=] list[[<ast.Constant object at 0x7da2044c26e0>]] for taget[name[item]] in starred[name[l]] begin[:] call[name[ret].append, parameter[binary_operation[call[name[ret]][<ast.UnaryOp object at 0x7da20c6c6710>] * name[item]]]] return[name[ret]]
keyword[def] identifier[_cumprod] ( identifier[l] ): literal[string] identifier[ret] =[ literal[int] ] keyword[for] identifier[item] keyword[in] identifier[l] : identifier[ret] . identifier[append] ( identifier[ret] [- literal[int] ]* identifier[item] ) keyword[return] identifier[ret]
def _cumprod(l): """Cumulative product of a list. Args: l: a list of integers Returns: a list with one more element (starting with 1) """ ret = [1] for item in l: ret.append(ret[-1] * item) # depends on [control=['for'], data=['item']] return ret
def getMaximinScores(profile): """ Returns a dictionary that associates integer representations of each candidate with their Copeland score. :ivar Profile profile: A Profile object that represents an election profile. """ # Currently, we expect the profile to contain complete ordering over candidates. Ties are # allowed however. elecType = profile.getElecType() if elecType != "soc" and elecType != "toc": print("ERROR: unsupported election type") exit() wmgMap = profile.getWmg() # Initialize each Copeland score as infinity. maximinscores = {} for cand in wmgMap.keys(): maximinscores[cand] = float("inf") # For each pair of candidates, calculate the number of votes in which one beat the other. # For each pair of candidates, calculate the number of times each beats the other. for cand1, cand2 in itertools.combinations(wmgMap.keys(), 2): if cand2 in wmgMap[cand1].keys(): maximinscores[cand1] = min(maximinscores[cand1], wmgMap[cand1][cand2]) maximinscores[cand2] = min(maximinscores[cand2], wmgMap[cand2][cand1]) return maximinscores
def function[getMaximinScores, parameter[profile]]: constant[ Returns a dictionary that associates integer representations of each candidate with their Copeland score. :ivar Profile profile: A Profile object that represents an election profile. ] variable[elecType] assign[=] call[name[profile].getElecType, parameter[]] if <ast.BoolOp object at 0x7da1b2339d80> begin[:] call[name[print], parameter[constant[ERROR: unsupported election type]]] call[name[exit], parameter[]] variable[wmgMap] assign[=] call[name[profile].getWmg, parameter[]] variable[maximinscores] assign[=] dictionary[[], []] for taget[name[cand]] in starred[call[name[wmgMap].keys, parameter[]]] begin[:] call[name[maximinscores]][name[cand]] assign[=] call[name[float], parameter[constant[inf]]] for taget[tuple[[<ast.Name object at 0x7da1b2338040>, <ast.Name object at 0x7da1b2338070>]]] in starred[call[name[itertools].combinations, parameter[call[name[wmgMap].keys, parameter[]], constant[2]]]] begin[:] if compare[name[cand2] in call[call[name[wmgMap]][name[cand1]].keys, parameter[]]] begin[:] call[name[maximinscores]][name[cand1]] assign[=] call[name[min], parameter[call[name[maximinscores]][name[cand1]], call[call[name[wmgMap]][name[cand1]]][name[cand2]]]] call[name[maximinscores]][name[cand2]] assign[=] call[name[min], parameter[call[name[maximinscores]][name[cand2]], call[call[name[wmgMap]][name[cand2]]][name[cand1]]]] return[name[maximinscores]]
keyword[def] identifier[getMaximinScores] ( identifier[profile] ): literal[string] identifier[elecType] = identifier[profile] . identifier[getElecType] () keyword[if] identifier[elecType] != literal[string] keyword[and] identifier[elecType] != literal[string] : identifier[print] ( literal[string] ) identifier[exit] () identifier[wmgMap] = identifier[profile] . identifier[getWmg] () identifier[maximinscores] ={} keyword[for] identifier[cand] keyword[in] identifier[wmgMap] . identifier[keys] (): identifier[maximinscores] [ identifier[cand] ]= identifier[float] ( literal[string] ) keyword[for] identifier[cand1] , identifier[cand2] keyword[in] identifier[itertools] . identifier[combinations] ( identifier[wmgMap] . identifier[keys] (), literal[int] ): keyword[if] identifier[cand2] keyword[in] identifier[wmgMap] [ identifier[cand1] ]. identifier[keys] (): identifier[maximinscores] [ identifier[cand1] ]= identifier[min] ( identifier[maximinscores] [ identifier[cand1] ], identifier[wmgMap] [ identifier[cand1] ][ identifier[cand2] ]) identifier[maximinscores] [ identifier[cand2] ]= identifier[min] ( identifier[maximinscores] [ identifier[cand2] ], identifier[wmgMap] [ identifier[cand2] ][ identifier[cand1] ]) keyword[return] identifier[maximinscores]
def getMaximinScores(profile): """ Returns a dictionary that associates integer representations of each candidate with their Copeland score. :ivar Profile profile: A Profile object that represents an election profile. """ # Currently, we expect the profile to contain complete ordering over candidates. Ties are # allowed however. elecType = profile.getElecType() if elecType != 'soc' and elecType != 'toc': print('ERROR: unsupported election type') exit() # depends on [control=['if'], data=[]] wmgMap = profile.getWmg() # Initialize each Copeland score as infinity. maximinscores = {} for cand in wmgMap.keys(): maximinscores[cand] = float('inf') # depends on [control=['for'], data=['cand']] # For each pair of candidates, calculate the number of votes in which one beat the other. # For each pair of candidates, calculate the number of times each beats the other. for (cand1, cand2) in itertools.combinations(wmgMap.keys(), 2): if cand2 in wmgMap[cand1].keys(): maximinscores[cand1] = min(maximinscores[cand1], wmgMap[cand1][cand2]) maximinscores[cand2] = min(maximinscores[cand2], wmgMap[cand2][cand1]) # depends on [control=['if'], data=['cand2']] # depends on [control=['for'], data=[]] return maximinscores
def add_router_references(self, snet, address, dnets): """Add/update references to routers.""" if _debug: NetworkServiceAccessPoint._debug("add_router_references %r %r %r", snet, address, dnets) # see if we have an adapter for the snet if snet not in self.adapters: raise RuntimeError("no adapter for network: %d" % (snet,)) # pass this along to the cache self.router_info_cache.update_router_info(snet, address, dnets)
def function[add_router_references, parameter[self, snet, address, dnets]]: constant[Add/update references to routers.] if name[_debug] begin[:] call[name[NetworkServiceAccessPoint]._debug, parameter[constant[add_router_references %r %r %r], name[snet], name[address], name[dnets]]] if compare[name[snet] <ast.NotIn object at 0x7da2590d7190> name[self].adapters] begin[:] <ast.Raise object at 0x7da2041dafb0> call[name[self].router_info_cache.update_router_info, parameter[name[snet], name[address], name[dnets]]]
keyword[def] identifier[add_router_references] ( identifier[self] , identifier[snet] , identifier[address] , identifier[dnets] ): literal[string] keyword[if] identifier[_debug] : identifier[NetworkServiceAccessPoint] . identifier[_debug] ( literal[string] , identifier[snet] , identifier[address] , identifier[dnets] ) keyword[if] identifier[snet] keyword[not] keyword[in] identifier[self] . identifier[adapters] : keyword[raise] identifier[RuntimeError] ( literal[string] %( identifier[snet] ,)) identifier[self] . identifier[router_info_cache] . identifier[update_router_info] ( identifier[snet] , identifier[address] , identifier[dnets] )
def add_router_references(self, snet, address, dnets): """Add/update references to routers.""" if _debug: NetworkServiceAccessPoint._debug('add_router_references %r %r %r', snet, address, dnets) # depends on [control=['if'], data=[]] # see if we have an adapter for the snet if snet not in self.adapters: raise RuntimeError('no adapter for network: %d' % (snet,)) # depends on [control=['if'], data=['snet']] # pass this along to the cache self.router_info_cache.update_router_info(snet, address, dnets)
def next(self): """Move to the next token in the token stream.""" self.current_token = next(self.token_stream, None) if self.current_token is None: self.token_span = self.token_span[1], self.token_span[1] raise self.error('Unexpected end of input') self.token_span = self.current_token.span return self
def function[next, parameter[self]]: constant[Move to the next token in the token stream.] name[self].current_token assign[=] call[name[next], parameter[name[self].token_stream, constant[None]]] if compare[name[self].current_token is constant[None]] begin[:] name[self].token_span assign[=] tuple[[<ast.Subscript object at 0x7da18c4ceb60>, <ast.Subscript object at 0x7da18c4cef50>]] <ast.Raise object at 0x7da18c4cde70> name[self].token_span assign[=] name[self].current_token.span return[name[self]]
keyword[def] identifier[next] ( identifier[self] ): literal[string] identifier[self] . identifier[current_token] = identifier[next] ( identifier[self] . identifier[token_stream] , keyword[None] ) keyword[if] identifier[self] . identifier[current_token] keyword[is] keyword[None] : identifier[self] . identifier[token_span] = identifier[self] . identifier[token_span] [ literal[int] ], identifier[self] . identifier[token_span] [ literal[int] ] keyword[raise] identifier[self] . identifier[error] ( literal[string] ) identifier[self] . identifier[token_span] = identifier[self] . identifier[current_token] . identifier[span] keyword[return] identifier[self]
def next(self): """Move to the next token in the token stream.""" self.current_token = next(self.token_stream, None) if self.current_token is None: self.token_span = (self.token_span[1], self.token_span[1]) raise self.error('Unexpected end of input') # depends on [control=['if'], data=[]] self.token_span = self.current_token.span return self
def filter_missing_rna(s2bins, bins2s, rna_cov): """ remove any bins that don't have 16S """ for bin, scaffolds in list(bins2s.items()): c = 0 for s in scaffolds: if s in rna_cov: c += 1 if c == 0: del bins2s[bin] for scaffold, bin in list(s2bins.items()): if bin not in bins2s: del s2bins[scaffold] return s2bins, bins2s
def function[filter_missing_rna, parameter[s2bins, bins2s, rna_cov]]: constant[ remove any bins that don't have 16S ] for taget[tuple[[<ast.Name object at 0x7da18dc9a9e0>, <ast.Name object at 0x7da18dc982b0>]]] in starred[call[name[list], parameter[call[name[bins2s].items, parameter[]]]]] begin[:] variable[c] assign[=] constant[0] for taget[name[s]] in starred[name[scaffolds]] begin[:] if compare[name[s] in name[rna_cov]] begin[:] <ast.AugAssign object at 0x7da18dc99510> if compare[name[c] equal[==] constant[0]] begin[:] <ast.Delete object at 0x7da18dc9bd60> for taget[tuple[[<ast.Name object at 0x7da18dc99930>, <ast.Name object at 0x7da18dc9b070>]]] in starred[call[name[list], parameter[call[name[s2bins].items, parameter[]]]]] begin[:] if compare[name[bin] <ast.NotIn object at 0x7da2590d7190> name[bins2s]] begin[:] <ast.Delete object at 0x7da18dc999f0> return[tuple[[<ast.Name object at 0x7da18dc98970>, <ast.Name object at 0x7da18dc9ae00>]]]
keyword[def] identifier[filter_missing_rna] ( identifier[s2bins] , identifier[bins2s] , identifier[rna_cov] ): literal[string] keyword[for] identifier[bin] , identifier[scaffolds] keyword[in] identifier[list] ( identifier[bins2s] . identifier[items] ()): identifier[c] = literal[int] keyword[for] identifier[s] keyword[in] identifier[scaffolds] : keyword[if] identifier[s] keyword[in] identifier[rna_cov] : identifier[c] += literal[int] keyword[if] identifier[c] == literal[int] : keyword[del] identifier[bins2s] [ identifier[bin] ] keyword[for] identifier[scaffold] , identifier[bin] keyword[in] identifier[list] ( identifier[s2bins] . identifier[items] ()): keyword[if] identifier[bin] keyword[not] keyword[in] identifier[bins2s] : keyword[del] identifier[s2bins] [ identifier[scaffold] ] keyword[return] identifier[s2bins] , identifier[bins2s]
def filter_missing_rna(s2bins, bins2s, rna_cov): """ remove any bins that don't have 16S """ for (bin, scaffolds) in list(bins2s.items()): c = 0 for s in scaffolds: if s in rna_cov: c += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['s']] if c == 0: del bins2s[bin] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] for (scaffold, bin) in list(s2bins.items()): if bin not in bins2s: del s2bins[scaffold] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return (s2bins, bins2s)
def prefetch_urls(self, urls): """ 预取文件列表,文档 http://developer.qiniu.com/article/fusion/api/prefetch.html Args: urls: 待预取的文件外链列表 Returns: 一个dict变量和一个ResponseInfo对象 参考代码 examples/cdn_manager.py """ req = {} req.update({"urls": urls}) body = json.dumps(req) url = '{0}/v2/tune/prefetch'.format(self.server) return self.__post(url, body)
def function[prefetch_urls, parameter[self, urls]]: constant[ 预取文件列表,文档 http://developer.qiniu.com/article/fusion/api/prefetch.html Args: urls: 待预取的文件外链列表 Returns: 一个dict变量和一个ResponseInfo对象 参考代码 examples/cdn_manager.py ] variable[req] assign[=] dictionary[[], []] call[name[req].update, parameter[dictionary[[<ast.Constant object at 0x7da207f9ba60>], [<ast.Name object at 0x7da207f9ad70>]]]] variable[body] assign[=] call[name[json].dumps, parameter[name[req]]] variable[url] assign[=] call[constant[{0}/v2/tune/prefetch].format, parameter[name[self].server]] return[call[name[self].__post, parameter[name[url], name[body]]]]
keyword[def] identifier[prefetch_urls] ( identifier[self] , identifier[urls] ): literal[string] identifier[req] ={} identifier[req] . identifier[update] ({ literal[string] : identifier[urls] }) identifier[body] = identifier[json] . identifier[dumps] ( identifier[req] ) identifier[url] = literal[string] . identifier[format] ( identifier[self] . identifier[server] ) keyword[return] identifier[self] . identifier[__post] ( identifier[url] , identifier[body] )
def prefetch_urls(self, urls): """ 预取文件列表,文档 http://developer.qiniu.com/article/fusion/api/prefetch.html Args: urls: 待预取的文件外链列表 Returns: 一个dict变量和一个ResponseInfo对象 参考代码 examples/cdn_manager.py """ req = {} req.update({'urls': urls}) body = json.dumps(req) url = '{0}/v2/tune/prefetch'.format(self.server) return self.__post(url, body)
def expand_node_predecessors(universe, graph, node: BaseEntity) -> None: """Expand around the predecessors of the given node in the result graph. :param pybel.BELGraph universe: The graph containing the stuff to add :param pybel.BELGraph graph: The graph to add stuff to :param node: A BEL node """ skip_successors = set() for successor in universe.successors(node): if successor in graph: skip_successors.add(successor) continue graph.add_node(successor, **universe.nodes[successor]) graph.add_edges_from( (source, successor, key, data) for source, successor, key, data in universe.out_edges(node, data=True, keys=True) if successor not in skip_successors ) update_node_helper(universe, graph) update_metadata(universe, graph)
def function[expand_node_predecessors, parameter[universe, graph, node]]: constant[Expand around the predecessors of the given node in the result graph. :param pybel.BELGraph universe: The graph containing the stuff to add :param pybel.BELGraph graph: The graph to add stuff to :param node: A BEL node ] variable[skip_successors] assign[=] call[name[set], parameter[]] for taget[name[successor]] in starred[call[name[universe].successors, parameter[name[node]]]] begin[:] if compare[name[successor] in name[graph]] begin[:] call[name[skip_successors].add, parameter[name[successor]]] continue call[name[graph].add_node, parameter[name[successor]]] call[name[graph].add_edges_from, parameter[<ast.GeneratorExp object at 0x7da1b0e44430>]] call[name[update_node_helper], parameter[name[universe], name[graph]]] call[name[update_metadata], parameter[name[universe], name[graph]]]
keyword[def] identifier[expand_node_predecessors] ( identifier[universe] , identifier[graph] , identifier[node] : identifier[BaseEntity] )-> keyword[None] : literal[string] identifier[skip_successors] = identifier[set] () keyword[for] identifier[successor] keyword[in] identifier[universe] . identifier[successors] ( identifier[node] ): keyword[if] identifier[successor] keyword[in] identifier[graph] : identifier[skip_successors] . identifier[add] ( identifier[successor] ) keyword[continue] identifier[graph] . identifier[add_node] ( identifier[successor] ,** identifier[universe] . identifier[nodes] [ identifier[successor] ]) identifier[graph] . identifier[add_edges_from] ( ( identifier[source] , identifier[successor] , identifier[key] , identifier[data] ) keyword[for] identifier[source] , identifier[successor] , identifier[key] , identifier[data] keyword[in] identifier[universe] . identifier[out_edges] ( identifier[node] , identifier[data] = keyword[True] , identifier[keys] = keyword[True] ) keyword[if] identifier[successor] keyword[not] keyword[in] identifier[skip_successors] ) identifier[update_node_helper] ( identifier[universe] , identifier[graph] ) identifier[update_metadata] ( identifier[universe] , identifier[graph] )
def expand_node_predecessors(universe, graph, node: BaseEntity) -> None: """Expand around the predecessors of the given node in the result graph. :param pybel.BELGraph universe: The graph containing the stuff to add :param pybel.BELGraph graph: The graph to add stuff to :param node: A BEL node """ skip_successors = set() for successor in universe.successors(node): if successor in graph: skip_successors.add(successor) continue # depends on [control=['if'], data=['successor']] graph.add_node(successor, **universe.nodes[successor]) # depends on [control=['for'], data=['successor']] graph.add_edges_from(((source, successor, key, data) for (source, successor, key, data) in universe.out_edges(node, data=True, keys=True) if successor not in skip_successors)) update_node_helper(universe, graph) update_metadata(universe, graph)
def select_address_family(host, port): """Return ``AF_INET4``, ``AF_INET6``, or ``AF_UNIX`` depending on the host and port.""" # disabled due to problems with current ipv6 implementations # and various operating systems. Probably this code also is # not supposed to work, but I can't come up with any other # ways to implement this. # try: # info = socket.getaddrinfo(host, port, socket.AF_UNSPEC, # socket.SOCK_STREAM, 0, # socket.AI_PASSIVE) # if info: # return info[0][0] # except socket.gaierror: # pass if host.startswith("unix://"): return socket.AF_UNIX elif ":" in host and hasattr(socket, "AF_INET6"): return socket.AF_INET6 return socket.AF_INET
def function[select_address_family, parameter[host, port]]: constant[Return ``AF_INET4``, ``AF_INET6``, or ``AF_UNIX`` depending on the host and port.] if call[name[host].startswith, parameter[constant[unix://]]] begin[:] return[name[socket].AF_UNIX] return[name[socket].AF_INET]
keyword[def] identifier[select_address_family] ( identifier[host] , identifier[port] ): literal[string] keyword[if] identifier[host] . identifier[startswith] ( literal[string] ): keyword[return] identifier[socket] . identifier[AF_UNIX] keyword[elif] literal[string] keyword[in] identifier[host] keyword[and] identifier[hasattr] ( identifier[socket] , literal[string] ): keyword[return] identifier[socket] . identifier[AF_INET6] keyword[return] identifier[socket] . identifier[AF_INET]
def select_address_family(host, port): """Return ``AF_INET4``, ``AF_INET6``, or ``AF_UNIX`` depending on the host and port.""" # disabled due to problems with current ipv6 implementations # and various operating systems. Probably this code also is # not supposed to work, but I can't come up with any other # ways to implement this. # try: # info = socket.getaddrinfo(host, port, socket.AF_UNSPEC, # socket.SOCK_STREAM, 0, # socket.AI_PASSIVE) # if info: # return info[0][0] # except socket.gaierror: # pass if host.startswith('unix://'): return socket.AF_UNIX # depends on [control=['if'], data=[]] elif ':' in host and hasattr(socket, 'AF_INET6'): return socket.AF_INET6 # depends on [control=['if'], data=[]] return socket.AF_INET
def store_inputs(self, line_num, source, source_raw=None): """Store source and raw input in history and create input cache variables _i*. Parameters ---------- line_num : int The prompt number of this input. source : str Python input. source_raw : str, optional If given, this is the raw input without any IPython transformations applied to it. If not given, ``source`` is used. """ if source_raw is None: source_raw = source source = source.rstrip('\n') source_raw = source_raw.rstrip('\n') # do not store exit/quit commands if self._exit_re.match(source_raw.strip()): return self.input_hist_parsed.append(source) self.input_hist_raw.append(source_raw) with self.db_input_cache_lock: self.db_input_cache.append((line_num, source, source_raw)) # Trigger to flush cache and write to DB. if len(self.db_input_cache) >= self.db_cache_size: self.save_flag.set() # update the auto _i variables self._iii = self._ii self._ii = self._i self._i = self._i00 self._i00 = source_raw # hackish access to user namespace to create _i1,_i2... dynamically new_i = '_i%s' % line_num to_main = {'_i': self._i, '_ii': self._ii, '_iii': self._iii, new_i : self._i00 } self.shell.push(to_main, interactive=False)
def function[store_inputs, parameter[self, line_num, source, source_raw]]: constant[Store source and raw input in history and create input cache variables _i*. Parameters ---------- line_num : int The prompt number of this input. source : str Python input. source_raw : str, optional If given, this is the raw input without any IPython transformations applied to it. If not given, ``source`` is used. ] if compare[name[source_raw] is constant[None]] begin[:] variable[source_raw] assign[=] name[source] variable[source] assign[=] call[name[source].rstrip, parameter[constant[ ]]] variable[source_raw] assign[=] call[name[source_raw].rstrip, parameter[constant[ ]]] if call[name[self]._exit_re.match, parameter[call[name[source_raw].strip, parameter[]]]] begin[:] return[None] call[name[self].input_hist_parsed.append, parameter[name[source]]] call[name[self].input_hist_raw.append, parameter[name[source_raw]]] with name[self].db_input_cache_lock begin[:] call[name[self].db_input_cache.append, parameter[tuple[[<ast.Name object at 0x7da2047eb880>, <ast.Name object at 0x7da2047e9ba0>, <ast.Name object at 0x7da2047e80a0>]]]] if compare[call[name[len], parameter[name[self].db_input_cache]] greater_or_equal[>=] name[self].db_cache_size] begin[:] call[name[self].save_flag.set, parameter[]] name[self]._iii assign[=] name[self]._ii name[self]._ii assign[=] name[self]._i name[self]._i assign[=] name[self]._i00 name[self]._i00 assign[=] name[source_raw] variable[new_i] assign[=] binary_operation[constant[_i%s] <ast.Mod object at 0x7da2590d6920> name[line_num]] variable[to_main] assign[=] dictionary[[<ast.Constant object at 0x7da20c9912d0>, <ast.Constant object at 0x7da20c9917e0>, <ast.Constant object at 0x7da20c990fa0>, <ast.Name object at 0x7da20c991930>], [<ast.Attribute object at 0x7da20c990cd0>, <ast.Attribute object at 0x7da20c993250>, <ast.Attribute object at 0x7da20c992ef0>, <ast.Attribute object at 0x7da20c9939d0>]] call[name[self].shell.push, parameter[name[to_main]]]
keyword[def] identifier[store_inputs] ( identifier[self] , identifier[line_num] , identifier[source] , identifier[source_raw] = keyword[None] ): literal[string] keyword[if] identifier[source_raw] keyword[is] keyword[None] : identifier[source_raw] = identifier[source] identifier[source] = identifier[source] . identifier[rstrip] ( literal[string] ) identifier[source_raw] = identifier[source_raw] . identifier[rstrip] ( literal[string] ) keyword[if] identifier[self] . identifier[_exit_re] . identifier[match] ( identifier[source_raw] . identifier[strip] ()): keyword[return] identifier[self] . identifier[input_hist_parsed] . identifier[append] ( identifier[source] ) identifier[self] . identifier[input_hist_raw] . identifier[append] ( identifier[source_raw] ) keyword[with] identifier[self] . identifier[db_input_cache_lock] : identifier[self] . identifier[db_input_cache] . identifier[append] (( identifier[line_num] , identifier[source] , identifier[source_raw] )) keyword[if] identifier[len] ( identifier[self] . identifier[db_input_cache] )>= identifier[self] . identifier[db_cache_size] : identifier[self] . identifier[save_flag] . identifier[set] () identifier[self] . identifier[_iii] = identifier[self] . identifier[_ii] identifier[self] . identifier[_ii] = identifier[self] . identifier[_i] identifier[self] . identifier[_i] = identifier[self] . identifier[_i00] identifier[self] . identifier[_i00] = identifier[source_raw] identifier[new_i] = literal[string] % identifier[line_num] identifier[to_main] ={ literal[string] : identifier[self] . identifier[_i] , literal[string] : identifier[self] . identifier[_ii] , literal[string] : identifier[self] . identifier[_iii] , identifier[new_i] : identifier[self] . identifier[_i00] } identifier[self] . identifier[shell] . identifier[push] ( identifier[to_main] , identifier[interactive] = keyword[False] )
def store_inputs(self, line_num, source, source_raw=None): """Store source and raw input in history and create input cache variables _i*. Parameters ---------- line_num : int The prompt number of this input. source : str Python input. source_raw : str, optional If given, this is the raw input without any IPython transformations applied to it. If not given, ``source`` is used. """ if source_raw is None: source_raw = source # depends on [control=['if'], data=['source_raw']] source = source.rstrip('\n') source_raw = source_raw.rstrip('\n') # do not store exit/quit commands if self._exit_re.match(source_raw.strip()): return # depends on [control=['if'], data=[]] self.input_hist_parsed.append(source) self.input_hist_raw.append(source_raw) with self.db_input_cache_lock: self.db_input_cache.append((line_num, source, source_raw)) # Trigger to flush cache and write to DB. if len(self.db_input_cache) >= self.db_cache_size: self.save_flag.set() # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]] # update the auto _i variables self._iii = self._ii self._ii = self._i self._i = self._i00 self._i00 = source_raw # hackish access to user namespace to create _i1,_i2... dynamically new_i = '_i%s' % line_num to_main = {'_i': self._i, '_ii': self._ii, '_iii': self._iii, new_i: self._i00} self.shell.push(to_main, interactive=False)
def difference(self, other): """difference(x, y) = x(t) - y(t).""" return self.operation(other, lambda x, y: x - y)
def function[difference, parameter[self, other]]: constant[difference(x, y) = x(t) - y(t).] return[call[name[self].operation, parameter[name[other], <ast.Lambda object at 0x7da1b08fb490>]]]
keyword[def] identifier[difference] ( identifier[self] , identifier[other] ): literal[string] keyword[return] identifier[self] . identifier[operation] ( identifier[other] , keyword[lambda] identifier[x] , identifier[y] : identifier[x] - identifier[y] )
def difference(self, other): """difference(x, y) = x(t) - y(t).""" return self.operation(other, lambda x, y: x - y)
def chunk_math(text): """ Parameters ---------- text : string A mathematical context Returns ------- list : A list of single LaTeX entities Examples -------- >>> chunk_math('\sum_i^n i^2') ['\\\\sum', '_', 'i', '^', 'n', ' ', 'i', '^', '2'] >>> chunk_math('\sum_{i}^n i^2') ['\\\\sum', '_', '{', 'i', '}', '^', 'n', ' ', 'i', '^', '2'] >>> chunk_math((r'\\Delta F_0 &= \\sqrt{\\sum_{i=1}^n\\left(' ... r'\\frac{\delta F_0}{\delta x_i}' ... r'\Delta x_i\\right)^2}\\[0.2cm]' ... r'\Delta F_0 &= \sqrt{6.044 \cdot 10^{-6}\\text{m}^2}')) ['\\\\Delta', ' ', 'F', '_', '0', ' ', '&', '=', ' ', '\\\\sqrt', '{', '\\\\sum', '_', '{', 'i', '=', '1', '}', '^', 'n', '\\\\left', '(', '\\\\frac', '{', '\\\\delta', ' ', 'F', '_', '0', '}', '{', '\\\\delta', ' ', 'x', '_', 'i', '}', '\\\\Delta', ' ', 'x', '_', 'i', '\\\\right', ')', '^', '2', '}', '\\\\', '[', '0', '.', '2', 'c', 'm', ']', '\\\\Delta', ' ', 'F', '_', '0', ' ', '&', '=', ' ', '\\\\sqrt', '{', '6', '.', '0', '4', '4', ' ', '\\\\cdot', ' ', '1', '0', '^', '{', '-', '6', '}', '\\\\text', '{', 'm', '}', '^', '2', '}'] >>> chunk_math(r'\\left\\{a\\right\\}') ['\\\\left', '\\\\{', 'a', '\\\\right', '\\\\}'] >>> chunk_math(r'\\sqrt{b^2-4ac}') ['\\\\sqrt', '{', 'b', '^', '2', '-', '4', 'a', 'c', '}'] >>> chunk_math('y^{2}') ['y', '^', '{', '2', '}'] >>> chunk_math(r'2+3\\\\6 5 4') ['2', '+', '3', '\\\\\\\\', '6', ' ', '5', ' ', '4'] """ # Fail when '{' and '}' don't match - be aware of escaped symbols! opened_braces = 0 last_char = '' for char in text: if char == '{' and last_char != '\\': opened_braces += 1 if char == '}' and last_char != '\\': opened_braces -= 1 if opened_braces < 0: raise ValueError("Braces don't match: %s" % text) last_char = char if opened_braces != 0: raise ValueError("%i braces are still open" % opened_braces) # Parse single_symbol = ['_', '^', '&', '{', '}'] breaking_chars = ['\\', ' '] + single_symbol chunks = [] current_chunk = '' for char in text: if current_chunk == '': current_chunk = char continue if char == '\\': if current_chunk == '\\': current_chunk += char chunks.append(current_chunk) current_chunk = '' else: chunks.append(current_chunk) current_chunk = char elif current_chunk == '\\' and char in breaking_chars: # escaped current_chunk += char chunks.append(current_chunk) current_chunk = '' elif char in breaking_chars: chunks.append(current_chunk) current_chunk = char elif char in string.letters+string.digits and current_chunk[0] == '\\': current_chunk += char else: chunks.append(current_chunk) current_chunk = char # Add the last chunk if current_chunk != '': chunks.append(current_chunk) filtered = [] for chunk in chunks: if len(filtered) > 0 and filtered[-1] == ' ' and chunk == ' ': continue filtered.append(chunk) return filtered
def function[chunk_math, parameter[text]]: constant[ Parameters ---------- text : string A mathematical context Returns ------- list : A list of single LaTeX entities Examples -------- >>> chunk_math('\sum_i^n i^2') ['\\sum', '_', 'i', '^', 'n', ' ', 'i', '^', '2'] >>> chunk_math('\sum_{i}^n i^2') ['\\sum', '_', '{', 'i', '}', '^', 'n', ' ', 'i', '^', '2'] >>> chunk_math((r'\Delta F_0 &= \sqrt{\sum_{i=1}^n\left(' ... r'\frac{\delta F_0}{\delta x_i}' ... r'\Delta x_i\right)^2}\[0.2cm]' ... r'\Delta F_0 &= \sqrt{6.044 \cdot 10^{-6}\text{m}^2}')) ['\\Delta', ' ', 'F', '_', '0', ' ', '&', '=', ' ', '\\sqrt', '{', '\\sum', '_', '{', 'i', '=', '1', '}', '^', 'n', '\\left', '(', '\\frac', '{', '\\delta', ' ', 'F', '_', '0', '}', '{', '\\delta', ' ', 'x', '_', 'i', '}', '\\Delta', ' ', 'x', '_', 'i', '\\right', ')', '^', '2', '}', '\\', '[', '0', '.', '2', 'c', 'm', ']', '\\Delta', ' ', 'F', '_', '0', ' ', '&', '=', ' ', '\\sqrt', '{', '6', '.', '0', '4', '4', ' ', '\\cdot', ' ', '1', '0', '^', '{', '-', '6', '}', '\\text', '{', 'm', '}', '^', '2', '}'] >>> chunk_math(r'\left\{a\right\}') ['\\left', '\\{', 'a', '\\right', '\\}'] >>> chunk_math(r'\sqrt{b^2-4ac}') ['\\sqrt', '{', 'b', '^', '2', '-', '4', 'a', 'c', '}'] >>> chunk_math('y^{2}') ['y', '^', '{', '2', '}'] >>> chunk_math(r'2+3\\6 5 4') ['2', '+', '3', '\\\\', '6', ' ', '5', ' ', '4'] ] variable[opened_braces] assign[=] constant[0] variable[last_char] assign[=] constant[] for taget[name[char]] in starred[name[text]] begin[:] if <ast.BoolOp object at 0x7da1b28f0f40> begin[:] <ast.AugAssign object at 0x7da1b28f1a50> if <ast.BoolOp object at 0x7da1b28f2800> begin[:] <ast.AugAssign object at 0x7da1b28f33d0> if compare[name[opened_braces] less[<] constant[0]] begin[:] <ast.Raise object at 0x7da1b28f1fc0> variable[last_char] assign[=] name[char] if compare[name[opened_braces] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da1b28f0d00> variable[single_symbol] assign[=] list[[<ast.Constant object at 0x7da1b28f1600>, <ast.Constant object at 0x7da1b28f1390>, <ast.Constant object at 0x7da1b28f3040>, <ast.Constant object at 0x7da1b28f1360>, <ast.Constant object at 0x7da1b28f2740>]] variable[breaking_chars] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b28f3460>, <ast.Constant object at 0x7da1b28f1720>]] + name[single_symbol]] variable[chunks] assign[=] list[[]] variable[current_chunk] assign[=] constant[] for taget[name[char]] in starred[name[text]] begin[:] if compare[name[current_chunk] equal[==] constant[]] begin[:] variable[current_chunk] assign[=] name[char] continue if compare[name[char] equal[==] constant[\]] begin[:] if compare[name[current_chunk] equal[==] constant[\]] begin[:] <ast.AugAssign object at 0x7da1b28ba140> call[name[chunks].append, parameter[name[current_chunk]]] variable[current_chunk] assign[=] constant[] if compare[name[current_chunk] not_equal[!=] constant[]] begin[:] call[name[chunks].append, parameter[name[current_chunk]]] variable[filtered] assign[=] list[[]] for taget[name[chunk]] in starred[name[chunks]] begin[:] if <ast.BoolOp object at 0x7da1b28b8880> begin[:] continue call[name[filtered].append, parameter[name[chunk]]] return[name[filtered]]
keyword[def] identifier[chunk_math] ( identifier[text] ): literal[string] identifier[opened_braces] = literal[int] identifier[last_char] = literal[string] keyword[for] identifier[char] keyword[in] identifier[text] : keyword[if] identifier[char] == literal[string] keyword[and] identifier[last_char] != literal[string] : identifier[opened_braces] += literal[int] keyword[if] identifier[char] == literal[string] keyword[and] identifier[last_char] != literal[string] : identifier[opened_braces] -= literal[int] keyword[if] identifier[opened_braces] < literal[int] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[text] ) identifier[last_char] = identifier[char] keyword[if] identifier[opened_braces] != literal[int] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[opened_braces] ) identifier[single_symbol] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] identifier[breaking_chars] =[ literal[string] , literal[string] ]+ identifier[single_symbol] identifier[chunks] =[] identifier[current_chunk] = literal[string] keyword[for] identifier[char] keyword[in] identifier[text] : keyword[if] identifier[current_chunk] == literal[string] : identifier[current_chunk] = identifier[char] keyword[continue] keyword[if] identifier[char] == literal[string] : keyword[if] identifier[current_chunk] == literal[string] : identifier[current_chunk] += identifier[char] identifier[chunks] . identifier[append] ( identifier[current_chunk] ) identifier[current_chunk] = literal[string] keyword[else] : identifier[chunks] . identifier[append] ( identifier[current_chunk] ) identifier[current_chunk] = identifier[char] keyword[elif] identifier[current_chunk] == literal[string] keyword[and] identifier[char] keyword[in] identifier[breaking_chars] : identifier[current_chunk] += identifier[char] identifier[chunks] . identifier[append] ( identifier[current_chunk] ) identifier[current_chunk] = literal[string] keyword[elif] identifier[char] keyword[in] identifier[breaking_chars] : identifier[chunks] . identifier[append] ( identifier[current_chunk] ) identifier[current_chunk] = identifier[char] keyword[elif] identifier[char] keyword[in] identifier[string] . identifier[letters] + identifier[string] . identifier[digits] keyword[and] identifier[current_chunk] [ literal[int] ]== literal[string] : identifier[current_chunk] += identifier[char] keyword[else] : identifier[chunks] . identifier[append] ( identifier[current_chunk] ) identifier[current_chunk] = identifier[char] keyword[if] identifier[current_chunk] != literal[string] : identifier[chunks] . identifier[append] ( identifier[current_chunk] ) identifier[filtered] =[] keyword[for] identifier[chunk] keyword[in] identifier[chunks] : keyword[if] identifier[len] ( identifier[filtered] )> literal[int] keyword[and] identifier[filtered] [- literal[int] ]== literal[string] keyword[and] identifier[chunk] == literal[string] : keyword[continue] identifier[filtered] . identifier[append] ( identifier[chunk] ) keyword[return] identifier[filtered]
def chunk_math(text): """ Parameters ---------- text : string A mathematical context Returns ------- list : A list of single LaTeX entities Examples -------- >>> chunk_math('\\sum_i^n i^2') ['\\\\sum', '_', 'i', '^', 'n', ' ', 'i', '^', '2'] >>> chunk_math('\\sum_{i}^n i^2') ['\\\\sum', '_', '{', 'i', '}', '^', 'n', ' ', 'i', '^', '2'] >>> chunk_math((r'\\Delta F_0 &= \\sqrt{\\sum_{i=1}^n\\left(' ... r'\\frac{\\delta F_0}{\\delta x_i}' ... r'\\Delta x_i\\right)^2}\\[0.2cm]' ... r'\\Delta F_0 &= \\sqrt{6.044 \\cdot 10^{-6}\\text{m}^2}')) ['\\\\Delta', ' ', 'F', '_', '0', ' ', '&', '=', ' ', '\\\\sqrt', '{', '\\\\sum', '_', '{', 'i', '=', '1', '}', '^', 'n', '\\\\left', '(', '\\\\frac', '{', '\\\\delta', ' ', 'F', '_', '0', '}', '{', '\\\\delta', ' ', 'x', '_', 'i', '}', '\\\\Delta', ' ', 'x', '_', 'i', '\\\\right', ')', '^', '2', '}', '\\\\', '[', '0', '.', '2', 'c', 'm', ']', '\\\\Delta', ' ', 'F', '_', '0', ' ', '&', '=', ' ', '\\\\sqrt', '{', '6', '.', '0', '4', '4', ' ', '\\\\cdot', ' ', '1', '0', '^', '{', '-', '6', '}', '\\\\text', '{', 'm', '}', '^', '2', '}'] >>> chunk_math(r'\\left\\{a\\right\\}') ['\\\\left', '\\\\{', 'a', '\\\\right', '\\\\}'] >>> chunk_math(r'\\sqrt{b^2-4ac}') ['\\\\sqrt', '{', 'b', '^', '2', '-', '4', 'a', 'c', '}'] >>> chunk_math('y^{2}') ['y', '^', '{', '2', '}'] >>> chunk_math(r'2+3\\\\6 5 4') ['2', '+', '3', '\\\\\\\\', '6', ' ', '5', ' ', '4'] """ # Fail when '{' and '}' don't match - be aware of escaped symbols! opened_braces = 0 last_char = '' for char in text: if char == '{' and last_char != '\\': opened_braces += 1 # depends on [control=['if'], data=[]] if char == '}' and last_char != '\\': opened_braces -= 1 if opened_braces < 0: raise ValueError("Braces don't match: %s" % text) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] last_char = char # depends on [control=['for'], data=['char']] if opened_braces != 0: raise ValueError('%i braces are still open' % opened_braces) # depends on [control=['if'], data=['opened_braces']] # Parse single_symbol = ['_', '^', '&', '{', '}'] breaking_chars = ['\\', ' '] + single_symbol chunks = [] current_chunk = '' for char in text: if current_chunk == '': current_chunk = char continue # depends on [control=['if'], data=['current_chunk']] if char == '\\': if current_chunk == '\\': current_chunk += char chunks.append(current_chunk) current_chunk = '' # depends on [control=['if'], data=['current_chunk']] else: chunks.append(current_chunk) current_chunk = char # depends on [control=['if'], data=['char']] elif current_chunk == '\\' and char in breaking_chars: # escaped current_chunk += char chunks.append(current_chunk) current_chunk = '' # depends on [control=['if'], data=[]] elif char in breaking_chars: chunks.append(current_chunk) current_chunk = char # depends on [control=['if'], data=['char']] elif char in string.letters + string.digits and current_chunk[0] == '\\': current_chunk += char # depends on [control=['if'], data=[]] else: chunks.append(current_chunk) current_chunk = char # depends on [control=['for'], data=['char']] # Add the last chunk if current_chunk != '': chunks.append(current_chunk) # depends on [control=['if'], data=['current_chunk']] filtered = [] for chunk in chunks: if len(filtered) > 0 and filtered[-1] == ' ' and (chunk == ' '): continue # depends on [control=['if'], data=[]] filtered.append(chunk) # depends on [control=['for'], data=['chunk']] return filtered
def Unzip(iterable): """Unzips specified iterable of pairs to pair of two iterables. This function is an inversion of the standard `zip` function and the following hold: * ∀ l, r. l, r == unzip(zip(l, r)) * ∀ p. p == zip(unzip(p)) Examples: >>> Unzip([("foo", 1), ("bar", 2), ("baz", 3)]) (["foo", "bar", "baz"], [1, 2, 3]) Args: iterable: An iterable of pairs to unzip. Returns: A pair of iterables after unzipping. """ lefts = [] rights = [] for left, right in iterable: lefts.append(left) rights.append(right) return lefts, rights
def function[Unzip, parameter[iterable]]: constant[Unzips specified iterable of pairs to pair of two iterables. This function is an inversion of the standard `zip` function and the following hold: * ∀ l, r. l, r == unzip(zip(l, r)) * ∀ p. p == zip(unzip(p)) Examples: >>> Unzip([("foo", 1), ("bar", 2), ("baz", 3)]) (["foo", "bar", "baz"], [1, 2, 3]) Args: iterable: An iterable of pairs to unzip. Returns: A pair of iterables after unzipping. ] variable[lefts] assign[=] list[[]] variable[rights] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18fe916c0>, <ast.Name object at 0x7da18fe90940>]]] in starred[name[iterable]] begin[:] call[name[lefts].append, parameter[name[left]]] call[name[rights].append, parameter[name[right]]] return[tuple[[<ast.Name object at 0x7da18fe92740>, <ast.Name object at 0x7da18fe92890>]]]
keyword[def] identifier[Unzip] ( identifier[iterable] ): literal[string] identifier[lefts] =[] identifier[rights] =[] keyword[for] identifier[left] , identifier[right] keyword[in] identifier[iterable] : identifier[lefts] . identifier[append] ( identifier[left] ) identifier[rights] . identifier[append] ( identifier[right] ) keyword[return] identifier[lefts] , identifier[rights]
def Unzip(iterable): """Unzips specified iterable of pairs to pair of two iterables. This function is an inversion of the standard `zip` function and the following hold: * ∀ l, r. l, r == unzip(zip(l, r)) * ∀ p. p == zip(unzip(p)) Examples: >>> Unzip([("foo", 1), ("bar", 2), ("baz", 3)]) (["foo", "bar", "baz"], [1, 2, 3]) Args: iterable: An iterable of pairs to unzip. Returns: A pair of iterables after unzipping. """ lefts = [] rights = [] for (left, right) in iterable: lefts.append(left) rights.append(right) # depends on [control=['for'], data=[]] return (lefts, rights)
def preview(file): """Render appropiate template with embed flag.""" file_info = validate_csv(file) return render_template( 'invenio_previewer/csv_bar.html', file=file, delimiter=file_info['delimiter'], encoding=file_info['encoding'], js_bundles=current_previewer.js_bundles + ['previewer_csv_js'], css_bundles=current_previewer.css_bundles, )
def function[preview, parameter[file]]: constant[Render appropiate template with embed flag.] variable[file_info] assign[=] call[name[validate_csv], parameter[name[file]]] return[call[name[render_template], parameter[constant[invenio_previewer/csv_bar.html]]]]
keyword[def] identifier[preview] ( identifier[file] ): literal[string] identifier[file_info] = identifier[validate_csv] ( identifier[file] ) keyword[return] identifier[render_template] ( literal[string] , identifier[file] = identifier[file] , identifier[delimiter] = identifier[file_info] [ literal[string] ], identifier[encoding] = identifier[file_info] [ literal[string] ], identifier[js_bundles] = identifier[current_previewer] . identifier[js_bundles] +[ literal[string] ], identifier[css_bundles] = identifier[current_previewer] . identifier[css_bundles] , )
def preview(file): """Render appropiate template with embed flag.""" file_info = validate_csv(file) return render_template('invenio_previewer/csv_bar.html', file=file, delimiter=file_info['delimiter'], encoding=file_info['encoding'], js_bundles=current_previewer.js_bundles + ['previewer_csv_js'], css_bundles=current_previewer.css_bundles)
def create_attr_filter(request, mapped_class): """Create an ``and_`` SQLAlchemy filter (a ClauseList object) based on the request params (``queryable``, ``eq``, ``ne``, ...). Arguments: request the request. mapped_class the SQLAlchemy mapped class. """ mapping = { 'eq': '__eq__', 'ne': '__ne__', 'lt': '__lt__', 'lte': '__le__', 'gt': '__gt__', 'gte': '__ge__', 'like': 'like', 'ilike': 'ilike' } filters = [] if 'queryable' in request.params: queryable = request.params['queryable'].split(',') for k in request.params: if len(request.params[k]) <= 0 or '__' not in k: continue col, op = k.split("__") if col not in queryable or op not in mapping: continue column = getattr(mapped_class, col) f = getattr(column, mapping[op])(request.params[k]) filters.append(f) return and_(*filters) if len(filters) > 0 else None
def function[create_attr_filter, parameter[request, mapped_class]]: constant[Create an ``and_`` SQLAlchemy filter (a ClauseList object) based on the request params (``queryable``, ``eq``, ``ne``, ...). Arguments: request the request. mapped_class the SQLAlchemy mapped class. ] variable[mapping] assign[=] dictionary[[<ast.Constant object at 0x7da1b0b72920>, <ast.Constant object at 0x7da1b0b73550>, <ast.Constant object at 0x7da1b0b706d0>, <ast.Constant object at 0x7da1b0b73520>, <ast.Constant object at 0x7da1b0b72a10>, <ast.Constant object at 0x7da1b0b71b10>, <ast.Constant object at 0x7da1b0b712a0>, <ast.Constant object at 0x7da1b0b71870>], [<ast.Constant object at 0x7da1b0b723b0>, <ast.Constant object at 0x7da1b0b70df0>, <ast.Constant object at 0x7da1b0b71a80>, <ast.Constant object at 0x7da1b0b72c50>, <ast.Constant object at 0x7da1b0b72590>, <ast.Constant object at 0x7da1b0b700a0>, <ast.Constant object at 0x7da1b0b72110>, <ast.Constant object at 0x7da1b0b73640>]] variable[filters] assign[=] list[[]] if compare[constant[queryable] in name[request].params] begin[:] variable[queryable] assign[=] call[call[name[request].params][constant[queryable]].split, parameter[constant[,]]] for taget[name[k]] in starred[name[request].params] begin[:] if <ast.BoolOp object at 0x7da20c76d390> begin[:] continue <ast.Tuple object at 0x7da20c76d720> assign[=] call[name[k].split, parameter[constant[__]]] if <ast.BoolOp object at 0x7da20c76dd20> begin[:] continue variable[column] assign[=] call[name[getattr], parameter[name[mapped_class], name[col]]] variable[f] assign[=] call[call[name[getattr], parameter[name[column], call[name[mapping]][name[op]]]], parameter[call[name[request].params][name[k]]]] call[name[filters].append, parameter[name[f]]] return[<ast.IfExp object at 0x7da20c76cbb0>]
keyword[def] identifier[create_attr_filter] ( identifier[request] , identifier[mapped_class] ): literal[string] identifier[mapping] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] } identifier[filters] =[] keyword[if] literal[string] keyword[in] identifier[request] . identifier[params] : identifier[queryable] = identifier[request] . identifier[params] [ literal[string] ]. identifier[split] ( literal[string] ) keyword[for] identifier[k] keyword[in] identifier[request] . identifier[params] : keyword[if] identifier[len] ( identifier[request] . identifier[params] [ identifier[k] ])<= literal[int] keyword[or] literal[string] keyword[not] keyword[in] identifier[k] : keyword[continue] identifier[col] , identifier[op] = identifier[k] . identifier[split] ( literal[string] ) keyword[if] identifier[col] keyword[not] keyword[in] identifier[queryable] keyword[or] identifier[op] keyword[not] keyword[in] identifier[mapping] : keyword[continue] identifier[column] = identifier[getattr] ( identifier[mapped_class] , identifier[col] ) identifier[f] = identifier[getattr] ( identifier[column] , identifier[mapping] [ identifier[op] ])( identifier[request] . identifier[params] [ identifier[k] ]) identifier[filters] . identifier[append] ( identifier[f] ) keyword[return] identifier[and_] (* identifier[filters] ) keyword[if] identifier[len] ( identifier[filters] )> literal[int] keyword[else] keyword[None]
def create_attr_filter(request, mapped_class): """Create an ``and_`` SQLAlchemy filter (a ClauseList object) based on the request params (``queryable``, ``eq``, ``ne``, ...). Arguments: request the request. mapped_class the SQLAlchemy mapped class. """ mapping = {'eq': '__eq__', 'ne': '__ne__', 'lt': '__lt__', 'lte': '__le__', 'gt': '__gt__', 'gte': '__ge__', 'like': 'like', 'ilike': 'ilike'} filters = [] if 'queryable' in request.params: queryable = request.params['queryable'].split(',') for k in request.params: if len(request.params[k]) <= 0 or '__' not in k: continue # depends on [control=['if'], data=[]] (col, op) = k.split('__') if col not in queryable or op not in mapping: continue # depends on [control=['if'], data=[]] column = getattr(mapped_class, col) f = getattr(column, mapping[op])(request.params[k]) filters.append(f) # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]] return and_(*filters) if len(filters) > 0 else None
def xml(self, xml): """ Defines a XML body value to match. Arguments: xml (str|regex): body XML to match. Returns: self: current Mock instance. """ self._request.xml = xml self.add_matcher(matcher('XMLMatcher', xml))
def function[xml, parameter[self, xml]]: constant[ Defines a XML body value to match. Arguments: xml (str|regex): body XML to match. Returns: self: current Mock instance. ] name[self]._request.xml assign[=] name[xml] call[name[self].add_matcher, parameter[call[name[matcher], parameter[constant[XMLMatcher], name[xml]]]]]
keyword[def] identifier[xml] ( identifier[self] , identifier[xml] ): literal[string] identifier[self] . identifier[_request] . identifier[xml] = identifier[xml] identifier[self] . identifier[add_matcher] ( identifier[matcher] ( literal[string] , identifier[xml] ))
def xml(self, xml): """ Defines a XML body value to match. Arguments: xml (str|regex): body XML to match. Returns: self: current Mock instance. """ self._request.xml = xml self.add_matcher(matcher('XMLMatcher', xml))
def check_path_matches_patterns(path, patterns): ''' Check if the path matches at least one of the provided patterns. ''' path = os.path.abspath(path) for patt in patterns: if isinstance(patt, six.string_types): if path == patt: return True elif patt.search(path): return True return False
def function[check_path_matches_patterns, parameter[path, patterns]]: constant[ Check if the path matches at least one of the provided patterns. ] variable[path] assign[=] call[name[os].path.abspath, parameter[name[path]]] for taget[name[patt]] in starred[name[patterns]] begin[:] if call[name[isinstance], parameter[name[patt], name[six].string_types]] begin[:] if compare[name[path] equal[==] name[patt]] begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[check_path_matches_patterns] ( identifier[path] , identifier[patterns] ): literal[string] identifier[path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[path] ) keyword[for] identifier[patt] keyword[in] identifier[patterns] : keyword[if] identifier[isinstance] ( identifier[patt] , identifier[six] . identifier[string_types] ): keyword[if] identifier[path] == identifier[patt] : keyword[return] keyword[True] keyword[elif] identifier[patt] . identifier[search] ( identifier[path] ): keyword[return] keyword[True] keyword[return] keyword[False]
def check_path_matches_patterns(path, patterns): """ Check if the path matches at least one of the provided patterns. """ path = os.path.abspath(path) for patt in patterns: if isinstance(patt, six.string_types): if path == patt: return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif patt.search(path): return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['patt']] return False
def add_debug_layers_to_canvas(impact_function): """Helper method to add debug layers to QGIS from impact function. :param impact_function: The impact function used. :type impact_function: ImpactFunction """ name = 'DEBUG %s' % impact_function.name root = QgsProject.instance().layerTreeRoot() group_debug = root.insertGroup(0, name) group_debug.setItemVisibilityChecked(False) group_debug.setExpanded(False) hazard_keywords = impact_function.provenance['hazard_keywords'] exposure_keywords = impact_function.provenance['exposure_keywords'] # Let's style the hazard class in each layers. # noinspection PyBroadException try: classification = active_classification( hazard_keywords, exposure_keywords['exposure']) classification = definition(classification) classes = OrderedDict() for f in reversed(classification['classes']): classes[f['key']] = (f['color'], f['name']) hazard_class = hazard_class_field['key'] except BaseException: # We might not have a classification. But this is the debug group so # let's not raise a new exception. classification = None datastore = impact_function.datastore for layer in datastore.layers(): qgis_layer = datastore.layer(layer) if not isinstance(qgis_layer, QgsMapLayer): continue QgsProject.instance().addMapLayer( qgis_layer, False) layer_node = group_debug.insertLayer(0, qgis_layer) layer_node.setItemVisibilityChecked(False) layer_node.setExpanded(False) # Let's style layers which have a geometry and have # hazard_class if qgis_layer.type() == QgsMapLayer.VectorLayer: if qgis_layer.geometryType() not in [ QgsWkbTypes.NullGeometry, QgsWkbTypes.UnknownGeometry ] and classification: # noqa if qgis_layer.keywords['inasafe_fields'].get(hazard_class): hazard_class_style(qgis_layer, classes, True)
def function[add_debug_layers_to_canvas, parameter[impact_function]]: constant[Helper method to add debug layers to QGIS from impact function. :param impact_function: The impact function used. :type impact_function: ImpactFunction ] variable[name] assign[=] binary_operation[constant[DEBUG %s] <ast.Mod object at 0x7da2590d6920> name[impact_function].name] variable[root] assign[=] call[call[name[QgsProject].instance, parameter[]].layerTreeRoot, parameter[]] variable[group_debug] assign[=] call[name[root].insertGroup, parameter[constant[0], name[name]]] call[name[group_debug].setItemVisibilityChecked, parameter[constant[False]]] call[name[group_debug].setExpanded, parameter[constant[False]]] variable[hazard_keywords] assign[=] call[name[impact_function].provenance][constant[hazard_keywords]] variable[exposure_keywords] assign[=] call[name[impact_function].provenance][constant[exposure_keywords]] <ast.Try object at 0x7da1b0c3dc00> variable[datastore] assign[=] name[impact_function].datastore for taget[name[layer]] in starred[call[name[datastore].layers, parameter[]]] begin[:] variable[qgis_layer] assign[=] call[name[datastore].layer, parameter[name[layer]]] if <ast.UnaryOp object at 0x7da1b0cee620> begin[:] continue call[call[name[QgsProject].instance, parameter[]].addMapLayer, parameter[name[qgis_layer], constant[False]]] variable[layer_node] assign[=] call[name[group_debug].insertLayer, parameter[constant[0], name[qgis_layer]]] call[name[layer_node].setItemVisibilityChecked, parameter[constant[False]]] call[name[layer_node].setExpanded, parameter[constant[False]]] if compare[call[name[qgis_layer].type, parameter[]] equal[==] name[QgsMapLayer].VectorLayer] begin[:] if <ast.BoolOp object at 0x7da1b0cec4c0> begin[:] if call[call[name[qgis_layer].keywords][constant[inasafe_fields]].get, parameter[name[hazard_class]]] begin[:] call[name[hazard_class_style], parameter[name[qgis_layer], name[classes], constant[True]]]
keyword[def] identifier[add_debug_layers_to_canvas] ( identifier[impact_function] ): literal[string] identifier[name] = literal[string] % identifier[impact_function] . identifier[name] identifier[root] = identifier[QgsProject] . identifier[instance] (). identifier[layerTreeRoot] () identifier[group_debug] = identifier[root] . identifier[insertGroup] ( literal[int] , identifier[name] ) identifier[group_debug] . identifier[setItemVisibilityChecked] ( keyword[False] ) identifier[group_debug] . identifier[setExpanded] ( keyword[False] ) identifier[hazard_keywords] = identifier[impact_function] . identifier[provenance] [ literal[string] ] identifier[exposure_keywords] = identifier[impact_function] . identifier[provenance] [ literal[string] ] keyword[try] : identifier[classification] = identifier[active_classification] ( identifier[hazard_keywords] , identifier[exposure_keywords] [ literal[string] ]) identifier[classification] = identifier[definition] ( identifier[classification] ) identifier[classes] = identifier[OrderedDict] () keyword[for] identifier[f] keyword[in] identifier[reversed] ( identifier[classification] [ literal[string] ]): identifier[classes] [ identifier[f] [ literal[string] ]]=( identifier[f] [ literal[string] ], identifier[f] [ literal[string] ]) identifier[hazard_class] = identifier[hazard_class_field] [ literal[string] ] keyword[except] identifier[BaseException] : identifier[classification] = keyword[None] identifier[datastore] = identifier[impact_function] . identifier[datastore] keyword[for] identifier[layer] keyword[in] identifier[datastore] . identifier[layers] (): identifier[qgis_layer] = identifier[datastore] . identifier[layer] ( identifier[layer] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[qgis_layer] , identifier[QgsMapLayer] ): keyword[continue] identifier[QgsProject] . identifier[instance] (). identifier[addMapLayer] ( identifier[qgis_layer] , keyword[False] ) identifier[layer_node] = identifier[group_debug] . identifier[insertLayer] ( literal[int] , identifier[qgis_layer] ) identifier[layer_node] . identifier[setItemVisibilityChecked] ( keyword[False] ) identifier[layer_node] . identifier[setExpanded] ( keyword[False] ) keyword[if] identifier[qgis_layer] . identifier[type] ()== identifier[QgsMapLayer] . identifier[VectorLayer] : keyword[if] identifier[qgis_layer] . identifier[geometryType] () keyword[not] keyword[in] [ identifier[QgsWkbTypes] . identifier[NullGeometry] , identifier[QgsWkbTypes] . identifier[UnknownGeometry] ] keyword[and] identifier[classification] : keyword[if] identifier[qgis_layer] . identifier[keywords] [ literal[string] ]. identifier[get] ( identifier[hazard_class] ): identifier[hazard_class_style] ( identifier[qgis_layer] , identifier[classes] , keyword[True] )
def add_debug_layers_to_canvas(impact_function): """Helper method to add debug layers to QGIS from impact function. :param impact_function: The impact function used. :type impact_function: ImpactFunction """ name = 'DEBUG %s' % impact_function.name root = QgsProject.instance().layerTreeRoot() group_debug = root.insertGroup(0, name) group_debug.setItemVisibilityChecked(False) group_debug.setExpanded(False) hazard_keywords = impact_function.provenance['hazard_keywords'] exposure_keywords = impact_function.provenance['exposure_keywords'] # Let's style the hazard class in each layers. # noinspection PyBroadException try: classification = active_classification(hazard_keywords, exposure_keywords['exposure']) classification = definition(classification) classes = OrderedDict() for f in reversed(classification['classes']): classes[f['key']] = (f['color'], f['name']) # depends on [control=['for'], data=['f']] hazard_class = hazard_class_field['key'] # depends on [control=['try'], data=[]] except BaseException: # We might not have a classification. But this is the debug group so # let's not raise a new exception. classification = None # depends on [control=['except'], data=[]] datastore = impact_function.datastore for layer in datastore.layers(): qgis_layer = datastore.layer(layer) if not isinstance(qgis_layer, QgsMapLayer): continue # depends on [control=['if'], data=[]] QgsProject.instance().addMapLayer(qgis_layer, False) layer_node = group_debug.insertLayer(0, qgis_layer) layer_node.setItemVisibilityChecked(False) layer_node.setExpanded(False) # Let's style layers which have a geometry and have # hazard_class if qgis_layer.type() == QgsMapLayer.VectorLayer: if qgis_layer.geometryType() not in [QgsWkbTypes.NullGeometry, QgsWkbTypes.UnknownGeometry] and classification: # noqa if qgis_layer.keywords['inasafe_fields'].get(hazard_class): hazard_class_style(qgis_layer, classes, True) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['layer']]
def _StackSummary_extract(frame_gen, limit=None, lookup_lines=True, capture_locals=False): """ Replacement for :func:`StackSummary.extract`. Create a StackSummary from a traceback or stack object. Very simplified copy of the original StackSummary.extract(). We want always to capture locals, that is why we overwrite it. Additionally, we also capture the frame. This is a bit hacky and also not like this is originally intended (to not keep refs). :param frame_gen: A generator that yields (frame, lineno) tuples to include in the stack. :param limit: None to include all frames or the number of frames to include. :param lookup_lines: If True, lookup lines for each frame immediately, otherwise lookup is deferred until the frame is rendered. :param capture_locals: If True, the local variables from each frame will be captured as object representations into the FrameSummary. """ result = StackSummary() for f, lineno in frame_gen: co = f.f_code filename = co.co_filename name = co.co_name result.append(ExtendedFrameSummary( frame=f, filename=filename, lineno=lineno, name=name, lookup_line=False)) return result
def function[_StackSummary_extract, parameter[frame_gen, limit, lookup_lines, capture_locals]]: constant[ Replacement for :func:`StackSummary.extract`. Create a StackSummary from a traceback or stack object. Very simplified copy of the original StackSummary.extract(). We want always to capture locals, that is why we overwrite it. Additionally, we also capture the frame. This is a bit hacky and also not like this is originally intended (to not keep refs). :param frame_gen: A generator that yields (frame, lineno) tuples to include in the stack. :param limit: None to include all frames or the number of frames to include. :param lookup_lines: If True, lookup lines for each frame immediately, otherwise lookup is deferred until the frame is rendered. :param capture_locals: If True, the local variables from each frame will be captured as object representations into the FrameSummary. ] variable[result] assign[=] call[name[StackSummary], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b233cbe0>, <ast.Name object at 0x7da1b233d7b0>]]] in starred[name[frame_gen]] begin[:] variable[co] assign[=] name[f].f_code variable[filename] assign[=] name[co].co_filename variable[name] assign[=] name[co].co_name call[name[result].append, parameter[call[name[ExtendedFrameSummary], parameter[]]]] return[name[result]]
keyword[def] identifier[_StackSummary_extract] ( identifier[frame_gen] , identifier[limit] = keyword[None] , identifier[lookup_lines] = keyword[True] , identifier[capture_locals] = keyword[False] ): literal[string] identifier[result] = identifier[StackSummary] () keyword[for] identifier[f] , identifier[lineno] keyword[in] identifier[frame_gen] : identifier[co] = identifier[f] . identifier[f_code] identifier[filename] = identifier[co] . identifier[co_filename] identifier[name] = identifier[co] . identifier[co_name] identifier[result] . identifier[append] ( identifier[ExtendedFrameSummary] ( identifier[frame] = identifier[f] , identifier[filename] = identifier[filename] , identifier[lineno] = identifier[lineno] , identifier[name] = identifier[name] , identifier[lookup_line] = keyword[False] )) keyword[return] identifier[result]
def _StackSummary_extract(frame_gen, limit=None, lookup_lines=True, capture_locals=False): """ Replacement for :func:`StackSummary.extract`. Create a StackSummary from a traceback or stack object. Very simplified copy of the original StackSummary.extract(). We want always to capture locals, that is why we overwrite it. Additionally, we also capture the frame. This is a bit hacky and also not like this is originally intended (to not keep refs). :param frame_gen: A generator that yields (frame, lineno) tuples to include in the stack. :param limit: None to include all frames or the number of frames to include. :param lookup_lines: If True, lookup lines for each frame immediately, otherwise lookup is deferred until the frame is rendered. :param capture_locals: If True, the local variables from each frame will be captured as object representations into the FrameSummary. """ result = StackSummary() for (f, lineno) in frame_gen: co = f.f_code filename = co.co_filename name = co.co_name result.append(ExtendedFrameSummary(frame=f, filename=filename, lineno=lineno, name=name, lookup_line=False)) # depends on [control=['for'], data=[]] return result
def findPrefix(self, uri, default=None): """ Find the first prefix that has been mapped to a namespace URI. The local mapping is searched, then it walks up the tree until it reaches the top or finds a match. @param uri: A namespace URI. @type uri: basestring @param default: A default prefix when not found. @type default: basestring @return: A mapped prefix. @rtype: basestring """ for item in self.nsprefixes.items(): if item[1] == uri: prefix = item[0] return prefix for item in self.specialprefixes.items(): if item[1] == uri: prefix = item[0] return prefix if self.parent is not None: return self.parent.findPrefix(uri, default) else: return default
def function[findPrefix, parameter[self, uri, default]]: constant[ Find the first prefix that has been mapped to a namespace URI. The local mapping is searched, then it walks up the tree until it reaches the top or finds a match. @param uri: A namespace URI. @type uri: basestring @param default: A default prefix when not found. @type default: basestring @return: A mapped prefix. @rtype: basestring ] for taget[name[item]] in starred[call[name[self].nsprefixes.items, parameter[]]] begin[:] if compare[call[name[item]][constant[1]] equal[==] name[uri]] begin[:] variable[prefix] assign[=] call[name[item]][constant[0]] return[name[prefix]] for taget[name[item]] in starred[call[name[self].specialprefixes.items, parameter[]]] begin[:] if compare[call[name[item]][constant[1]] equal[==] name[uri]] begin[:] variable[prefix] assign[=] call[name[item]][constant[0]] return[name[prefix]] if compare[name[self].parent is_not constant[None]] begin[:] return[call[name[self].parent.findPrefix, parameter[name[uri], name[default]]]]
keyword[def] identifier[findPrefix] ( identifier[self] , identifier[uri] , identifier[default] = keyword[None] ): literal[string] keyword[for] identifier[item] keyword[in] identifier[self] . identifier[nsprefixes] . identifier[items] (): keyword[if] identifier[item] [ literal[int] ]== identifier[uri] : identifier[prefix] = identifier[item] [ literal[int] ] keyword[return] identifier[prefix] keyword[for] identifier[item] keyword[in] identifier[self] . identifier[specialprefixes] . identifier[items] (): keyword[if] identifier[item] [ literal[int] ]== identifier[uri] : identifier[prefix] = identifier[item] [ literal[int] ] keyword[return] identifier[prefix] keyword[if] identifier[self] . identifier[parent] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[self] . identifier[parent] . identifier[findPrefix] ( identifier[uri] , identifier[default] ) keyword[else] : keyword[return] identifier[default]
def findPrefix(self, uri, default=None): """ Find the first prefix that has been mapped to a namespace URI. The local mapping is searched, then it walks up the tree until it reaches the top or finds a match. @param uri: A namespace URI. @type uri: basestring @param default: A default prefix when not found. @type default: basestring @return: A mapped prefix. @rtype: basestring """ for item in self.nsprefixes.items(): if item[1] == uri: prefix = item[0] return prefix # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] for item in self.specialprefixes.items(): if item[1] == uri: prefix = item[0] return prefix # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] if self.parent is not None: return self.parent.findPrefix(uri, default) # depends on [control=['if'], data=[]] else: return default
def config_read(): """Read config info from config file.""" config_file = (u"{0}config.ini".format(CONFIG_DIR)) if not os.path.isfile(config_file): config_make(config_file) config = configparser.ConfigParser(allow_no_value=True) try: config.read(config_file, encoding='utf-8') except IOError: print("Error reading config file: {}".format(config_file)) sys.exit() # De-duplicate provider-list providers = config_prov(config) # Read credentials for listed providers (cred, to_remove) = config_cred(config, providers) # remove unsupported and credential-less providers for item in to_remove: providers.remove(item) return cred, providers
def function[config_read, parameter[]]: constant[Read config info from config file.] variable[config_file] assign[=] call[constant[{0}config.ini].format, parameter[name[CONFIG_DIR]]] if <ast.UnaryOp object at 0x7da1b2582080> begin[:] call[name[config_make], parameter[name[config_file]]] variable[config] assign[=] call[name[configparser].ConfigParser, parameter[]] <ast.Try object at 0x7da1b2580820> variable[providers] assign[=] call[name[config_prov], parameter[name[config]]] <ast.Tuple object at 0x7da1b2580c70> assign[=] call[name[config_cred], parameter[name[config], name[providers]]] for taget[name[item]] in starred[name[to_remove]] begin[:] call[name[providers].remove, parameter[name[item]]] return[tuple[[<ast.Name object at 0x7da1b2581150>, <ast.Name object at 0x7da1b2581000>]]]
keyword[def] identifier[config_read] (): literal[string] identifier[config_file] =( literal[string] . identifier[format] ( identifier[CONFIG_DIR] )) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[config_file] ): identifier[config_make] ( identifier[config_file] ) identifier[config] = identifier[configparser] . identifier[ConfigParser] ( identifier[allow_no_value] = keyword[True] ) keyword[try] : identifier[config] . identifier[read] ( identifier[config_file] , identifier[encoding] = literal[string] ) keyword[except] identifier[IOError] : identifier[print] ( literal[string] . identifier[format] ( identifier[config_file] )) identifier[sys] . identifier[exit] () identifier[providers] = identifier[config_prov] ( identifier[config] ) ( identifier[cred] , identifier[to_remove] )= identifier[config_cred] ( identifier[config] , identifier[providers] ) keyword[for] identifier[item] keyword[in] identifier[to_remove] : identifier[providers] . identifier[remove] ( identifier[item] ) keyword[return] identifier[cred] , identifier[providers]
def config_read(): """Read config info from config file.""" config_file = u'{0}config.ini'.format(CONFIG_DIR) if not os.path.isfile(config_file): config_make(config_file) # depends on [control=['if'], data=[]] config = configparser.ConfigParser(allow_no_value=True) try: config.read(config_file, encoding='utf-8') # depends on [control=['try'], data=[]] except IOError: print('Error reading config file: {}'.format(config_file)) sys.exit() # depends on [control=['except'], data=[]] # De-duplicate provider-list providers = config_prov(config) # Read credentials for listed providers (cred, to_remove) = config_cred(config, providers) # remove unsupported and credential-less providers for item in to_remove: providers.remove(item) # depends on [control=['for'], data=['item']] return (cred, providers)
def get_import_for_type(t): """given a type, return a tuple of the (module-path, type_name) or (None, None) if it is a built in.""" t_as_string = to_str(t) if not is_identifier(t_as_string): # this class expanded into something other than a single identifier # we can ignore it. This is the case when we encounter something # like the configman.converter.str_to_classes_in_namespaces # InnerClassList. We can safely ignore these things here. return (None, None) if '.' in t_as_string: parts = t_as_string.split('.') return ('.'.join(parts[:-1]), parts[-1]) else: if t_as_string in known_mapping_str_to_type: return (None, None) return (None, t_as_string)
def function[get_import_for_type, parameter[t]]: constant[given a type, return a tuple of the (module-path, type_name) or (None, None) if it is a built in.] variable[t_as_string] assign[=] call[name[to_str], parameter[name[t]]] if <ast.UnaryOp object at 0x7da18eb571f0> begin[:] return[tuple[[<ast.Constant object at 0x7da18eb54f40>, <ast.Constant object at 0x7da18eb564d0>]]] if compare[constant[.] in name[t_as_string]] begin[:] variable[parts] assign[=] call[name[t_as_string].split, parameter[constant[.]]] return[tuple[[<ast.Call object at 0x7da18eb559c0>, <ast.Subscript object at 0x7da20c6a81f0>]]]
keyword[def] identifier[get_import_for_type] ( identifier[t] ): literal[string] identifier[t_as_string] = identifier[to_str] ( identifier[t] ) keyword[if] keyword[not] identifier[is_identifier] ( identifier[t_as_string] ): keyword[return] ( keyword[None] , keyword[None] ) keyword[if] literal[string] keyword[in] identifier[t_as_string] : identifier[parts] = identifier[t_as_string] . identifier[split] ( literal[string] ) keyword[return] ( literal[string] . identifier[join] ( identifier[parts] [:- literal[int] ]), identifier[parts] [- literal[int] ]) keyword[else] : keyword[if] identifier[t_as_string] keyword[in] identifier[known_mapping_str_to_type] : keyword[return] ( keyword[None] , keyword[None] ) keyword[return] ( keyword[None] , identifier[t_as_string] )
def get_import_for_type(t): """given a type, return a tuple of the (module-path, type_name) or (None, None) if it is a built in.""" t_as_string = to_str(t) if not is_identifier(t_as_string): # this class expanded into something other than a single identifier # we can ignore it. This is the case when we encounter something # like the configman.converter.str_to_classes_in_namespaces # InnerClassList. We can safely ignore these things here. return (None, None) # depends on [control=['if'], data=[]] if '.' in t_as_string: parts = t_as_string.split('.') return ('.'.join(parts[:-1]), parts[-1]) # depends on [control=['if'], data=['t_as_string']] else: if t_as_string in known_mapping_str_to_type: return (None, None) # depends on [control=['if'], data=[]] return (None, t_as_string)
def YuvToRgb(y, u, v): '''Convert the color from YUV coordinates to RGB. Parameters: :y: The Y component value [0...1] :u: The U component value [-0.436...0.436] :v: The V component value [-0.615...0.615] Returns: The color as an (r, g, b) tuple in the range: r[0...1], g[0...1], b[0...1] >>> '(%g, %g, %g)' % Color.YuvToRgb(0.5925, -0.2916, 0.3575) '(0.999989, 0.500015, -6.3276e-05)' ''' r = y + (v * 1.13983) g = y - (u * 0.39465) - (v * 0.58060) b = y + (u * 2.03211) return (r, g, b)
def function[YuvToRgb, parameter[y, u, v]]: constant[Convert the color from YUV coordinates to RGB. Parameters: :y: The Y component value [0...1] :u: The U component value [-0.436...0.436] :v: The V component value [-0.615...0.615] Returns: The color as an (r, g, b) tuple in the range: r[0...1], g[0...1], b[0...1] >>> '(%g, %g, %g)' % Color.YuvToRgb(0.5925, -0.2916, 0.3575) '(0.999989, 0.500015, -6.3276e-05)' ] variable[r] assign[=] binary_operation[name[y] + binary_operation[name[v] * constant[1.13983]]] variable[g] assign[=] binary_operation[binary_operation[name[y] - binary_operation[name[u] * constant[0.39465]]] - binary_operation[name[v] * constant[0.5806]]] variable[b] assign[=] binary_operation[name[y] + binary_operation[name[u] * constant[2.03211]]] return[tuple[[<ast.Name object at 0x7da20c7965f0>, <ast.Name object at 0x7da20c7948b0>, <ast.Name object at 0x7da20c795930>]]]
keyword[def] identifier[YuvToRgb] ( identifier[y] , identifier[u] , identifier[v] ): literal[string] identifier[r] = identifier[y] +( identifier[v] * literal[int] ) identifier[g] = identifier[y] -( identifier[u] * literal[int] )-( identifier[v] * literal[int] ) identifier[b] = identifier[y] +( identifier[u] * literal[int] ) keyword[return] ( identifier[r] , identifier[g] , identifier[b] )
def YuvToRgb(y, u, v): """Convert the color from YUV coordinates to RGB. Parameters: :y: The Y component value [0...1] :u: The U component value [-0.436...0.436] :v: The V component value [-0.615...0.615] Returns: The color as an (r, g, b) tuple in the range: r[0...1], g[0...1], b[0...1] >>> '(%g, %g, %g)' % Color.YuvToRgb(0.5925, -0.2916, 0.3575) '(0.999989, 0.500015, -6.3276e-05)' """ r = y + v * 1.13983 g = y - u * 0.39465 - v * 0.5806 b = y + u * 2.03211 return (r, g, b)
def scan_uow_candidates(self): """ method performs two actions: - enlist stale or invalid units of work into reprocessing queue - cancel UOWs that are older than 2 days and have been submitted more than 1 hour ago """ try: since = settings.settings['synergy_start_timeperiod'] uow_list = self.uow_dao.get_reprocessing_candidates(since) except LookupError as e: self.logger.info('flow: no UOW candidates found for reprocessing: {0}'.format(e)) return for uow in uow_list: try: if uow.process_name not in self.managed_handlers: self.logger.debug('process {0} is not known to the Synergy Scheduler. Skipping its UOW.' .format(uow.process_name)) continue thread_handler = self.managed_handlers[uow.process_name] assert isinstance(thread_handler, ManagedThreadHandler) if not thread_handler.process_entry.is_on: self.logger.debug('process {0} is inactive. Skipping its UOW.'.format(uow.process_name)) continue entry = PriorityEntry(uow) if entry in self.reprocess_uows[uow.process_name]: # given UOW is already registered in the reprocessing queue continue # ASSUMPTION: UOW is re-created by a state machine during reprocessing # thus - any UOW older 2 days could be marked as STATE_CANCELED if datetime.utcnow() - uow.created_at > timedelta(hours=settings.settings['gc_life_support_hours']): self._cancel_uow(uow) continue # if the UOW has been idle for more than 1 hour - resubmit it if datetime.utcnow() - uow.submitted_at > timedelta(hours=settings.settings['gc_resubmit_after_hours'])\ or uow.is_invalid: # enlist the UOW into the reprocessing queue self.reprocess_uows[uow.process_name].put(entry) except Exception as e: self.logger.error('flow exception: {0}'.format(e), exc_info=True)
def function[scan_uow_candidates, parameter[self]]: constant[ method performs two actions: - enlist stale or invalid units of work into reprocessing queue - cancel UOWs that are older than 2 days and have been submitted more than 1 hour ago ] <ast.Try object at 0x7da207f9b910> for taget[name[uow]] in starred[name[uow_list]] begin[:] <ast.Try object at 0x7da18dc064d0>
keyword[def] identifier[scan_uow_candidates] ( identifier[self] ): literal[string] keyword[try] : identifier[since] = identifier[settings] . identifier[settings] [ literal[string] ] identifier[uow_list] = identifier[self] . identifier[uow_dao] . identifier[get_reprocessing_candidates] ( identifier[since] ) keyword[except] identifier[LookupError] keyword[as] identifier[e] : identifier[self] . identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[e] )) keyword[return] keyword[for] identifier[uow] keyword[in] identifier[uow_list] : keyword[try] : keyword[if] identifier[uow] . identifier[process_name] keyword[not] keyword[in] identifier[self] . identifier[managed_handlers] : identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[uow] . identifier[process_name] )) keyword[continue] identifier[thread_handler] = identifier[self] . identifier[managed_handlers] [ identifier[uow] . identifier[process_name] ] keyword[assert] identifier[isinstance] ( identifier[thread_handler] , identifier[ManagedThreadHandler] ) keyword[if] keyword[not] identifier[thread_handler] . identifier[process_entry] . identifier[is_on] : identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[uow] . identifier[process_name] )) keyword[continue] identifier[entry] = identifier[PriorityEntry] ( identifier[uow] ) keyword[if] identifier[entry] keyword[in] identifier[self] . identifier[reprocess_uows] [ identifier[uow] . identifier[process_name] ]: keyword[continue] keyword[if] identifier[datetime] . identifier[utcnow] ()- identifier[uow] . identifier[created_at] > identifier[timedelta] ( identifier[hours] = identifier[settings] . identifier[settings] [ literal[string] ]): identifier[self] . identifier[_cancel_uow] ( identifier[uow] ) keyword[continue] keyword[if] identifier[datetime] . identifier[utcnow] ()- identifier[uow] . identifier[submitted_at] > identifier[timedelta] ( identifier[hours] = identifier[settings] . identifier[settings] [ literal[string] ]) keyword[or] identifier[uow] . identifier[is_invalid] : identifier[self] . identifier[reprocess_uows] [ identifier[uow] . identifier[process_name] ]. identifier[put] ( identifier[entry] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[self] . identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[e] ), identifier[exc_info] = keyword[True] )
def scan_uow_candidates(self): """ method performs two actions: - enlist stale or invalid units of work into reprocessing queue - cancel UOWs that are older than 2 days and have been submitted more than 1 hour ago """ try: since = settings.settings['synergy_start_timeperiod'] uow_list = self.uow_dao.get_reprocessing_candidates(since) # depends on [control=['try'], data=[]] except LookupError as e: self.logger.info('flow: no UOW candidates found for reprocessing: {0}'.format(e)) return # depends on [control=['except'], data=['e']] for uow in uow_list: try: if uow.process_name not in self.managed_handlers: self.logger.debug('process {0} is not known to the Synergy Scheduler. Skipping its UOW.'.format(uow.process_name)) continue # depends on [control=['if'], data=[]] thread_handler = self.managed_handlers[uow.process_name] assert isinstance(thread_handler, ManagedThreadHandler) if not thread_handler.process_entry.is_on: self.logger.debug('process {0} is inactive. Skipping its UOW.'.format(uow.process_name)) continue # depends on [control=['if'], data=[]] entry = PriorityEntry(uow) if entry in self.reprocess_uows[uow.process_name]: # given UOW is already registered in the reprocessing queue continue # depends on [control=['if'], data=[]] # ASSUMPTION: UOW is re-created by a state machine during reprocessing # thus - any UOW older 2 days could be marked as STATE_CANCELED if datetime.utcnow() - uow.created_at > timedelta(hours=settings.settings['gc_life_support_hours']): self._cancel_uow(uow) continue # depends on [control=['if'], data=[]] # if the UOW has been idle for more than 1 hour - resubmit it if datetime.utcnow() - uow.submitted_at > timedelta(hours=settings.settings['gc_resubmit_after_hours']) or uow.is_invalid: # enlist the UOW into the reprocessing queue self.reprocess_uows[uow.process_name].put(entry) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Exception as e: self.logger.error('flow exception: {0}'.format(e), exc_info=True) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['uow']]
def unregister_switch_address(addr): """ Unregister the given switch address. Unregisters the given switch address to let ryu.controller.controller.OpenFlowController stop trying to initiate connection to switch. :param addr: A tuple of (host, port) pair of switch. """ ofp_handler = app_manager.lookup_service_brick(ofp_event.NAME) # Do nothing if ofp_handler is not started yet if ofp_handler.controller is None: return ofp_handler.controller.stop_client_loop(addr)
def function[unregister_switch_address, parameter[addr]]: constant[ Unregister the given switch address. Unregisters the given switch address to let ryu.controller.controller.OpenFlowController stop trying to initiate connection to switch. :param addr: A tuple of (host, port) pair of switch. ] variable[ofp_handler] assign[=] call[name[app_manager].lookup_service_brick, parameter[name[ofp_event].NAME]] if compare[name[ofp_handler].controller is constant[None]] begin[:] return[None] call[name[ofp_handler].controller.stop_client_loop, parameter[name[addr]]]
keyword[def] identifier[unregister_switch_address] ( identifier[addr] ): literal[string] identifier[ofp_handler] = identifier[app_manager] . identifier[lookup_service_brick] ( identifier[ofp_event] . identifier[NAME] ) keyword[if] identifier[ofp_handler] . identifier[controller] keyword[is] keyword[None] : keyword[return] identifier[ofp_handler] . identifier[controller] . identifier[stop_client_loop] ( identifier[addr] )
def unregister_switch_address(addr): """ Unregister the given switch address. Unregisters the given switch address to let ryu.controller.controller.OpenFlowController stop trying to initiate connection to switch. :param addr: A tuple of (host, port) pair of switch. """ ofp_handler = app_manager.lookup_service_brick(ofp_event.NAME) # Do nothing if ofp_handler is not started yet if ofp_handler.controller is None: return # depends on [control=['if'], data=[]] ofp_handler.controller.stop_client_loop(addr)
def address(self): """The full proxied address to this page""" path = urlsplit(self.target).path suffix = '/' if not path or path.endswith('/') else '' return '%s%s/%s%s' % (self._ui_address[:-1], self._proxy_prefix, self.route, suffix)
def function[address, parameter[self]]: constant[The full proxied address to this page] variable[path] assign[=] call[name[urlsplit], parameter[name[self].target]].path variable[suffix] assign[=] <ast.IfExp object at 0x7da1b0717940> return[binary_operation[constant[%s%s/%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b07622c0>, <ast.Attribute object at 0x7da1b07621d0>, <ast.Attribute object at 0x7da1b0760c40>, <ast.Name object at 0x7da1b0761210>]]]]
keyword[def] identifier[address] ( identifier[self] ): literal[string] identifier[path] = identifier[urlsplit] ( identifier[self] . identifier[target] ). identifier[path] identifier[suffix] = literal[string] keyword[if] keyword[not] identifier[path] keyword[or] identifier[path] . identifier[endswith] ( literal[string] ) keyword[else] literal[string] keyword[return] literal[string] %( identifier[self] . identifier[_ui_address] [:- literal[int] ], identifier[self] . identifier[_proxy_prefix] , identifier[self] . identifier[route] , identifier[suffix] )
def address(self): """The full proxied address to this page""" path = urlsplit(self.target).path suffix = '/' if not path or path.endswith('/') else '' return '%s%s/%s%s' % (self._ui_address[:-1], self._proxy_prefix, self.route, suffix)
def todmsstr(origin): """ Convert [+/-]DDD.DDDDD to [+/-]DDD°MMM'DDD.DDDDD" """ degrees, minutes, seconds = todms(origin) return u'%d°%d\'%f"' % (degrees, minutes, seconds)
def function[todmsstr, parameter[origin]]: constant[ Convert [+/-]DDD.DDDDD to [+/-]DDD°MMM'DDD.DDDDD" ] <ast.Tuple object at 0x7da1b0edaaa0> assign[=] call[name[todms], parameter[name[origin]]] return[binary_operation[constant[%d°%d'%f"] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0ef7760>, <ast.Name object at 0x7da1b0ef78e0>, <ast.Name object at 0x7da1b0ef5b10>]]]]
keyword[def] identifier[todmsstr] ( identifier[origin] ): literal[string] identifier[degrees] , identifier[minutes] , identifier[seconds] = identifier[todms] ( identifier[origin] ) keyword[return] literal[string] %( identifier[degrees] , identifier[minutes] , identifier[seconds] )
def todmsstr(origin): """ Convert [+/-]DDD.DDDDD to [+/-]DDD°MMM'DDD.DDDDD" """ (degrees, minutes, seconds) = todms(origin) return u'%d°%d\'%f"' % (degrees, minutes, seconds)
def ensure_string_list(self, option): r"""Ensure that 'option' is a list of strings. If 'option' is currently a string, we split it either on /,\s*/ or /\s+/, so "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become ["foo", "bar", "baz"]. """ val = getattr(self, option) if val is None: return elif isinstance(val, string_types): setattr(self, option, re.split(r',\s*|\s+', val)) else: if isinstance(val, list): ok = all(isinstance(v, string_types) for v in val) else: ok = False if not ok: raise DistutilsOptionError( "'%s' must be a list of strings (got %r)" % (option, val))
def function[ensure_string_list, parameter[self, option]]: constant[Ensure that 'option' is a list of strings. If 'option' is currently a string, we split it either on /,\s*/ or /\s+/, so "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become ["foo", "bar", "baz"]. ] variable[val] assign[=] call[name[getattr], parameter[name[self], name[option]]] if compare[name[val] is constant[None]] begin[:] return[None]
keyword[def] identifier[ensure_string_list] ( identifier[self] , identifier[option] ): literal[string] identifier[val] = identifier[getattr] ( identifier[self] , identifier[option] ) keyword[if] identifier[val] keyword[is] keyword[None] : keyword[return] keyword[elif] identifier[isinstance] ( identifier[val] , identifier[string_types] ): identifier[setattr] ( identifier[self] , identifier[option] , identifier[re] . identifier[split] ( literal[string] , identifier[val] )) keyword[else] : keyword[if] identifier[isinstance] ( identifier[val] , identifier[list] ): identifier[ok] = identifier[all] ( identifier[isinstance] ( identifier[v] , identifier[string_types] ) keyword[for] identifier[v] keyword[in] identifier[val] ) keyword[else] : identifier[ok] = keyword[False] keyword[if] keyword[not] identifier[ok] : keyword[raise] identifier[DistutilsOptionError] ( literal[string] %( identifier[option] , identifier[val] ))
def ensure_string_list(self, option): """Ensure that 'option' is a list of strings. If 'option' is currently a string, we split it either on /,\\s*/ or /\\s+/, so "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become ["foo", "bar", "baz"]. """ val = getattr(self, option) if val is None: return # depends on [control=['if'], data=[]] elif isinstance(val, string_types): setattr(self, option, re.split(',\\s*|\\s+', val)) # depends on [control=['if'], data=[]] else: if isinstance(val, list): ok = all((isinstance(v, string_types) for v in val)) # depends on [control=['if'], data=[]] else: ok = False if not ok: raise DistutilsOptionError("'%s' must be a list of strings (got %r)" % (option, val)) # depends on [control=['if'], data=[]]
def exponential(data): """ Creates a segment cost function for a time series with a exponential distribution with changing mean Args: data (:obj:`list` of float): 1D time series data Returns: function: Function with signature (int, int) -> float where the first arg is the starting index, and the second is the last arg. Returns the cost of that segment """ data = np.hstack(([0.0], np.array(data))) cumm = np.cumsum(data) def cost(s, t): """ Cost function for exponential distribution with changing mean Args: start (int): start index end (int): end index Returns: float: Cost, from start to end """ return -1*(t-s) * (np.log(t-s) - np.log(cumm[t] - cumm[s])) return cost
def function[exponential, parameter[data]]: constant[ Creates a segment cost function for a time series with a exponential distribution with changing mean Args: data (:obj:`list` of float): 1D time series data Returns: function: Function with signature (int, int) -> float where the first arg is the starting index, and the second is the last arg. Returns the cost of that segment ] variable[data] assign[=] call[name[np].hstack, parameter[tuple[[<ast.List object at 0x7da18f00cfa0>, <ast.Call object at 0x7da18f00cb20>]]]] variable[cumm] assign[=] call[name[np].cumsum, parameter[name[data]]] def function[cost, parameter[s, t]]: constant[ Cost function for exponential distribution with changing mean Args: start (int): start index end (int): end index Returns: float: Cost, from start to end ] return[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18f00cac0> * binary_operation[name[t] - name[s]]] * binary_operation[call[name[np].log, parameter[binary_operation[name[t] - name[s]]]] - call[name[np].log, parameter[binary_operation[call[name[cumm]][name[t]] - call[name[cumm]][name[s]]]]]]]] return[name[cost]]
keyword[def] identifier[exponential] ( identifier[data] ): literal[string] identifier[data] = identifier[np] . identifier[hstack] (([ literal[int] ], identifier[np] . identifier[array] ( identifier[data] ))) identifier[cumm] = identifier[np] . identifier[cumsum] ( identifier[data] ) keyword[def] identifier[cost] ( identifier[s] , identifier[t] ): literal[string] keyword[return] - literal[int] *( identifier[t] - identifier[s] )*( identifier[np] . identifier[log] ( identifier[t] - identifier[s] )- identifier[np] . identifier[log] ( identifier[cumm] [ identifier[t] ]- identifier[cumm] [ identifier[s] ])) keyword[return] identifier[cost]
def exponential(data): """ Creates a segment cost function for a time series with a exponential distribution with changing mean Args: data (:obj:`list` of float): 1D time series data Returns: function: Function with signature (int, int) -> float where the first arg is the starting index, and the second is the last arg. Returns the cost of that segment """ data = np.hstack(([0.0], np.array(data))) cumm = np.cumsum(data) def cost(s, t): """ Cost function for exponential distribution with changing mean Args: start (int): start index end (int): end index Returns: float: Cost, from start to end """ return -1 * (t - s) * (np.log(t - s) - np.log(cumm[t] - cumm[s])) return cost
def _Stichlmair_flood_f_and_jac(inputs, Vl, rhog, rhol, mug, voidage, specific_area, C1, C2, C3, H): '''Internal function which calculates the errors of the two Stichlmair objective functions, and their jacobian. Derived using SymPy on the main flooding function. ''' Vg, dP_irr = float(inputs[0]), float(inputs[1]) x0 = 1.0/H x1 = Vg*Vg x2 = voidage**(-4.65) x3 = specific_area*x2 x4 = Vl*Vl*x3/g x5 = x4**0.333333333333333 x6 = dP_irr*dP_irr x7 = H*H x8 = 1.0/x7 x9 = g*g x10 = 1.0/x9 x11 = rhol*rhol x12 = 1.0/x11 x13 = x5*(20.0*x10*x12*x6*x8 + 1.0) x14 = 0.555*x13 x15 = (voidage/(voidage - x14))**4.65 x16 = 1.0/Vg x17 = 1.0/rhog x18 = voidage - 1.0 x19 = 1.0/x18 x20 = C1*mug*specific_area*x16*x17*x19 x21 = 2.44948974278318*C2 x22 = Vg*rhog/(mug*specific_area) x23 = x21*(-x18*x22)**-0.5 x24 = 6.0*C3 - x20 + x23 x25 = 1.0 - voidage x26 = x14 + x25 x27 = -x19*x26 x28 = 2.0*C1*mug*specific_area*x16*x17/x25 + x21*(x22*x25)**-0.5 x29 = 1.0/x24 x30 = x28*x29 x31 = x27**(-0.166666666666667*x30 + 0.666666666666667) x32 = x11*x7*x9 x33 = 200.0*voidage x34 = 111.0*x13 x35 = x33 - x34 x36 = 1.0/x35 x37 = -x33 + x34 + 200.0 x38 = 1.0/x37 x39 = 2.0*x20 x40 = -4.0*x20 + x23 + x29*(-x23 + x39)*(x23 - x39) x41 = dP_irr*rhog*specific_area*x0*x1*x10*x12*x15*x2*x24*x31 x42 = dP_irr*x10*x12*x4**0.666666666666667*x8 F1, F2, dF1_dVg, dF2_dVg, dF1_dP_irr, dF2_dP_irr = ( -dP_irr*x0 + 0.0208333333333333*rhog*specific_area*x1*x15*x2*x24*x31, x32/x6 - 20646.0*x36*x5 - x38*x5*(2960.0 - 740.0*x28*x29), 0.00173611111111111*Vg*rhog*x15*x3*x31*(144.0*C3 - 12.0*x20 + 18.0*x23 + x40*log(x27)), x0*(430.125*x36*x41*x5 - 15.4166666666667*x38*x41*x5*(x30 - 4.0) - 1.0), -1.85*x16*x29*x40*x5/x26, 3285600.0*x42*(-x30 + 4.0)*x38*x38- 91668240.0*x42*x36*x36 - 2.0*x32/(dP_irr*x6)) return [F1, F2], [[dF1_dVg, dF2_dVg], [dF1_dP_irr, dF2_dP_irr]]
def function[_Stichlmair_flood_f_and_jac, parameter[inputs, Vl, rhog, rhol, mug, voidage, specific_area, C1, C2, C3, H]]: constant[Internal function which calculates the errors of the two Stichlmair objective functions, and their jacobian. Derived using SymPy on the main flooding function. ] <ast.Tuple object at 0x7da20c6a9060> assign[=] tuple[[<ast.Call object at 0x7da20c6a92a0>, <ast.Call object at 0x7da20c6a9750>]] variable[x0] assign[=] binary_operation[constant[1.0] / name[H]] variable[x1] assign[=] binary_operation[name[Vg] * name[Vg]] variable[x2] assign[=] binary_operation[name[voidage] ** <ast.UnaryOp object at 0x7da1b117b880>] variable[x3] assign[=] binary_operation[name[specific_area] * name[x2]] variable[x4] assign[=] binary_operation[binary_operation[binary_operation[name[Vl] * name[Vl]] * name[x3]] / name[g]] variable[x5] assign[=] binary_operation[name[x4] ** constant[0.333333333333333]] variable[x6] assign[=] binary_operation[name[dP_irr] * name[dP_irr]] variable[x7] assign[=] binary_operation[name[H] * name[H]] variable[x8] assign[=] binary_operation[constant[1.0] / name[x7]] variable[x9] assign[=] binary_operation[name[g] * name[g]] variable[x10] assign[=] binary_operation[constant[1.0] / name[x9]] variable[x11] assign[=] binary_operation[name[rhol] * name[rhol]] variable[x12] assign[=] binary_operation[constant[1.0] / name[x11]] variable[x13] assign[=] binary_operation[name[x5] * binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[20.0] * name[x10]] * name[x12]] * name[x6]] * name[x8]] + constant[1.0]]] variable[x14] assign[=] binary_operation[constant[0.555] * name[x13]] variable[x15] assign[=] binary_operation[binary_operation[name[voidage] / binary_operation[name[voidage] - name[x14]]] ** constant[4.65]] variable[x16] assign[=] binary_operation[constant[1.0] / name[Vg]] variable[x17] assign[=] binary_operation[constant[1.0] / name[rhog]] variable[x18] assign[=] binary_operation[name[voidage] - constant[1.0]] variable[x19] assign[=] binary_operation[constant[1.0] / name[x18]] variable[x20] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[C1] * name[mug]] * name[specific_area]] * name[x16]] * name[x17]] * name[x19]] variable[x21] assign[=] binary_operation[constant[2.44948974278318] * name[C2]] variable[x22] assign[=] binary_operation[binary_operation[name[Vg] * name[rhog]] / binary_operation[name[mug] * name[specific_area]]] variable[x23] assign[=] binary_operation[name[x21] * binary_operation[binary_operation[<ast.UnaryOp object at 0x7da20c6ab340> * name[x22]] ** <ast.UnaryOp object at 0x7da20c6a9ea0>]] variable[x24] assign[=] binary_operation[binary_operation[binary_operation[constant[6.0] * name[C3]] - name[x20]] + name[x23]] variable[x25] assign[=] binary_operation[constant[1.0] - name[voidage]] variable[x26] assign[=] binary_operation[name[x14] + name[x25]] variable[x27] assign[=] binary_operation[<ast.UnaryOp object at 0x7da20c6a9900> * name[x26]] variable[x28] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[2.0] * name[C1]] * name[mug]] * name[specific_area]] * name[x16]] * name[x17]] / name[x25]] + binary_operation[name[x21] * binary_operation[binary_operation[name[x22] * name[x25]] ** <ast.UnaryOp object at 0x7da20c6abbe0>]]] variable[x29] assign[=] binary_operation[constant[1.0] / name[x24]] variable[x30] assign[=] binary_operation[name[x28] * name[x29]] variable[x31] assign[=] binary_operation[name[x27] ** binary_operation[binary_operation[<ast.UnaryOp object at 0x7da20c6ab370> * name[x30]] + constant[0.666666666666667]]] variable[x32] assign[=] binary_operation[binary_operation[name[x11] * name[x7]] * name[x9]] variable[x33] assign[=] binary_operation[constant[200.0] * name[voidage]] variable[x34] assign[=] binary_operation[constant[111.0] * name[x13]] variable[x35] assign[=] binary_operation[name[x33] - name[x34]] variable[x36] assign[=] binary_operation[constant[1.0] / name[x35]] variable[x37] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da20c6a8fd0> + name[x34]] + constant[200.0]] variable[x38] assign[=] binary_operation[constant[1.0] / name[x37]] variable[x39] assign[=] binary_operation[constant[2.0] * name[x20]] variable[x40] assign[=] binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da20c6a9e10> * name[x20]] + name[x23]] + binary_operation[binary_operation[name[x29] * binary_operation[<ast.UnaryOp object at 0x7da20c6a8a00> + name[x39]]] * binary_operation[name[x23] - name[x39]]]] variable[x41] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[dP_irr] * name[rhog]] * name[specific_area]] * name[x0]] * name[x1]] * name[x10]] * name[x12]] * name[x15]] * name[x2]] * name[x24]] * name[x31]] variable[x42] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[dP_irr] * name[x10]] * name[x12]] * binary_operation[name[x4] ** constant[0.666666666666667]]] * name[x8]] <ast.Tuple object at 0x7da20c6a85e0> assign[=] tuple[[<ast.BinOp object at 0x7da20c6aafe0>, <ast.BinOp object at 0x7da2044c2890>, <ast.BinOp object at 0x7da2044c2440>, <ast.BinOp object at 0x7da2044c3610>, <ast.BinOp object at 0x7da2044c1210>, <ast.BinOp object at 0x7da2044c3a00>]] return[tuple[[<ast.List object at 0x7da2044c21a0>, <ast.List object at 0x7da2044c2b30>]]]
keyword[def] identifier[_Stichlmair_flood_f_and_jac] ( identifier[inputs] , identifier[Vl] , identifier[rhog] , identifier[rhol] , identifier[mug] , identifier[voidage] , identifier[specific_area] , identifier[C1] , identifier[C2] , identifier[C3] , identifier[H] ): literal[string] identifier[Vg] , identifier[dP_irr] = identifier[float] ( identifier[inputs] [ literal[int] ]), identifier[float] ( identifier[inputs] [ literal[int] ]) identifier[x0] = literal[int] / identifier[H] identifier[x1] = identifier[Vg] * identifier[Vg] identifier[x2] = identifier[voidage] **(- literal[int] ) identifier[x3] = identifier[specific_area] * identifier[x2] identifier[x4] = identifier[Vl] * identifier[Vl] * identifier[x3] / identifier[g] identifier[x5] = identifier[x4] ** literal[int] identifier[x6] = identifier[dP_irr] * identifier[dP_irr] identifier[x7] = identifier[H] * identifier[H] identifier[x8] = literal[int] / identifier[x7] identifier[x9] = identifier[g] * identifier[g] identifier[x10] = literal[int] / identifier[x9] identifier[x11] = identifier[rhol] * identifier[rhol] identifier[x12] = literal[int] / identifier[x11] identifier[x13] = identifier[x5] *( literal[int] * identifier[x10] * identifier[x12] * identifier[x6] * identifier[x8] + literal[int] ) identifier[x14] = literal[int] * identifier[x13] identifier[x15] =( identifier[voidage] /( identifier[voidage] - identifier[x14] ))** literal[int] identifier[x16] = literal[int] / identifier[Vg] identifier[x17] = literal[int] / identifier[rhog] identifier[x18] = identifier[voidage] - literal[int] identifier[x19] = literal[int] / identifier[x18] identifier[x20] = identifier[C1] * identifier[mug] * identifier[specific_area] * identifier[x16] * identifier[x17] * identifier[x19] identifier[x21] = literal[int] * identifier[C2] identifier[x22] = identifier[Vg] * identifier[rhog] /( identifier[mug] * identifier[specific_area] ) identifier[x23] = identifier[x21] *(- identifier[x18] * identifier[x22] )**- literal[int] identifier[x24] = literal[int] * identifier[C3] - identifier[x20] + identifier[x23] identifier[x25] = literal[int] - identifier[voidage] identifier[x26] = identifier[x14] + identifier[x25] identifier[x27] =- identifier[x19] * identifier[x26] identifier[x28] = literal[int] * identifier[C1] * identifier[mug] * identifier[specific_area] * identifier[x16] * identifier[x17] / identifier[x25] + identifier[x21] *( identifier[x22] * identifier[x25] )**- literal[int] identifier[x29] = literal[int] / identifier[x24] identifier[x30] = identifier[x28] * identifier[x29] identifier[x31] = identifier[x27] **(- literal[int] * identifier[x30] + literal[int] ) identifier[x32] = identifier[x11] * identifier[x7] * identifier[x9] identifier[x33] = literal[int] * identifier[voidage] identifier[x34] = literal[int] * identifier[x13] identifier[x35] = identifier[x33] - identifier[x34] identifier[x36] = literal[int] / identifier[x35] identifier[x37] =- identifier[x33] + identifier[x34] + literal[int] identifier[x38] = literal[int] / identifier[x37] identifier[x39] = literal[int] * identifier[x20] identifier[x40] =- literal[int] * identifier[x20] + identifier[x23] + identifier[x29] *(- identifier[x23] + identifier[x39] )*( identifier[x23] - identifier[x39] ) identifier[x41] = identifier[dP_irr] * identifier[rhog] * identifier[specific_area] * identifier[x0] * identifier[x1] * identifier[x10] * identifier[x12] * identifier[x15] * identifier[x2] * identifier[x24] * identifier[x31] identifier[x42] = identifier[dP_irr] * identifier[x10] * identifier[x12] * identifier[x4] ** literal[int] * identifier[x8] identifier[F1] , identifier[F2] , identifier[dF1_dVg] , identifier[dF2_dVg] , identifier[dF1_dP_irr] , identifier[dF2_dP_irr] =( - identifier[dP_irr] * identifier[x0] + literal[int] * identifier[rhog] * identifier[specific_area] * identifier[x1] * identifier[x15] * identifier[x2] * identifier[x24] * identifier[x31] , identifier[x32] / identifier[x6] - literal[int] * identifier[x36] * identifier[x5] - identifier[x38] * identifier[x5] *( literal[int] - literal[int] * identifier[x28] * identifier[x29] ), literal[int] * identifier[Vg] * identifier[rhog] * identifier[x15] * identifier[x3] * identifier[x31] *( literal[int] * identifier[C3] - literal[int] * identifier[x20] + literal[int] * identifier[x23] + identifier[x40] * identifier[log] ( identifier[x27] )), identifier[x0] *( literal[int] * identifier[x36] * identifier[x41] * identifier[x5] - literal[int] * identifier[x38] * identifier[x41] * identifier[x5] *( identifier[x30] - literal[int] )- literal[int] ), - literal[int] * identifier[x16] * identifier[x29] * identifier[x40] * identifier[x5] / identifier[x26] , literal[int] * identifier[x42] *(- identifier[x30] + literal[int] )* identifier[x38] * identifier[x38] - literal[int] * identifier[x42] * identifier[x36] * identifier[x36] - literal[int] * identifier[x32] /( identifier[dP_irr] * identifier[x6] )) keyword[return] [ identifier[F1] , identifier[F2] ],[[ identifier[dF1_dVg] , identifier[dF2_dVg] ],[ identifier[dF1_dP_irr] , identifier[dF2_dP_irr] ]]
def _Stichlmair_flood_f_and_jac(inputs, Vl, rhog, rhol, mug, voidage, specific_area, C1, C2, C3, H): """Internal function which calculates the errors of the two Stichlmair objective functions, and their jacobian. Derived using SymPy on the main flooding function. """ (Vg, dP_irr) = (float(inputs[0]), float(inputs[1])) x0 = 1.0 / H x1 = Vg * Vg x2 = voidage ** (-4.65) x3 = specific_area * x2 x4 = Vl * Vl * x3 / g x5 = x4 ** 0.333333333333333 x6 = dP_irr * dP_irr x7 = H * H x8 = 1.0 / x7 x9 = g * g x10 = 1.0 / x9 x11 = rhol * rhol x12 = 1.0 / x11 x13 = x5 * (20.0 * x10 * x12 * x6 * x8 + 1.0) x14 = 0.555 * x13 x15 = (voidage / (voidage - x14)) ** 4.65 x16 = 1.0 / Vg x17 = 1.0 / rhog x18 = voidage - 1.0 x19 = 1.0 / x18 x20 = C1 * mug * specific_area * x16 * x17 * x19 x21 = 2.44948974278318 * C2 x22 = Vg * rhog / (mug * specific_area) x23 = x21 * (-x18 * x22) ** (-0.5) x24 = 6.0 * C3 - x20 + x23 x25 = 1.0 - voidage x26 = x14 + x25 x27 = -x19 * x26 x28 = 2.0 * C1 * mug * specific_area * x16 * x17 / x25 + x21 * (x22 * x25) ** (-0.5) x29 = 1.0 / x24 x30 = x28 * x29 x31 = x27 ** (-0.166666666666667 * x30 + 0.666666666666667) x32 = x11 * x7 * x9 x33 = 200.0 * voidage x34 = 111.0 * x13 x35 = x33 - x34 x36 = 1.0 / x35 x37 = -x33 + x34 + 200.0 x38 = 1.0 / x37 x39 = 2.0 * x20 x40 = -4.0 * x20 + x23 + x29 * (-x23 + x39) * (x23 - x39) x41 = dP_irr * rhog * specific_area * x0 * x1 * x10 * x12 * x15 * x2 * x24 * x31 x42 = dP_irr * x10 * x12 * x4 ** 0.666666666666667 * x8 (F1, F2, dF1_dVg, dF2_dVg, dF1_dP_irr, dF2_dP_irr) = (-dP_irr * x0 + 0.0208333333333333 * rhog * specific_area * x1 * x15 * x2 * x24 * x31, x32 / x6 - 20646.0 * x36 * x5 - x38 * x5 * (2960.0 - 740.0 * x28 * x29), 0.00173611111111111 * Vg * rhog * x15 * x3 * x31 * (144.0 * C3 - 12.0 * x20 + 18.0 * x23 + x40 * log(x27)), x0 * (430.125 * x36 * x41 * x5 - 15.4166666666667 * x38 * x41 * x5 * (x30 - 4.0) - 1.0), -1.85 * x16 * x29 * x40 * x5 / x26, 3285600.0 * x42 * (-x30 + 4.0) * x38 * x38 - 91668240.0 * x42 * x36 * x36 - 2.0 * x32 / (dP_irr * x6)) return ([F1, F2], [[dF1_dVg, dF2_dVg], [dF1_dP_irr, dF2_dP_irr]])
def add_files(session, imagedir, verbose): """Add files to the ATVS Keystroke database.""" def add_file(session, basename, userid, shotid, sessionid): """Parse a single filename and add it to the list.""" session.add(File(userid, basename, sessionid, shotid)) filenames = os.listdir(imagedir) for filename in filenames: basename, extension = os.path.splitext(filename) if extension == db_file_extension: if verbose>1: print(" Adding file '%s'..." % (basename)) parts = string.split(basename, "_") ctype = parts[0] shotid = int(parts[2]) userid = ctype + '_%d' % int(parts[1]) if parts[0] == "Impostor": sessionid = 3 elif parts[0] == "Genuine" and shotid <= 6: sessionid = 1 elif parts[0] == "Genuine" and shotid > 6: sessionid = 2 shotid = shotid - 6 add_file(session, basename, userid, shotid, sessionid)
def function[add_files, parameter[session, imagedir, verbose]]: constant[Add files to the ATVS Keystroke database.] def function[add_file, parameter[session, basename, userid, shotid, sessionid]]: constant[Parse a single filename and add it to the list.] call[name[session].add, parameter[call[name[File], parameter[name[userid], name[basename], name[sessionid], name[shotid]]]]] variable[filenames] assign[=] call[name[os].listdir, parameter[name[imagedir]]] for taget[name[filename]] in starred[name[filenames]] begin[:] <ast.Tuple object at 0x7da1b1416cb0> assign[=] call[name[os].path.splitext, parameter[name[filename]]] if compare[name[extension] equal[==] name[db_file_extension]] begin[:] if compare[name[verbose] greater[>] constant[1]] begin[:] call[name[print], parameter[binary_operation[constant[ Adding file '%s'...] <ast.Mod object at 0x7da2590d6920> name[basename]]]] variable[parts] assign[=] call[name[string].split, parameter[name[basename], constant[_]]] variable[ctype] assign[=] call[name[parts]][constant[0]] variable[shotid] assign[=] call[name[int], parameter[call[name[parts]][constant[2]]]] variable[userid] assign[=] binary_operation[name[ctype] + binary_operation[constant[_%d] <ast.Mod object at 0x7da2590d6920> call[name[int], parameter[call[name[parts]][constant[1]]]]]] if compare[call[name[parts]][constant[0]] equal[==] constant[Impostor]] begin[:] variable[sessionid] assign[=] constant[3] call[name[add_file], parameter[name[session], name[basename], name[userid], name[shotid], name[sessionid]]]
keyword[def] identifier[add_files] ( identifier[session] , identifier[imagedir] , identifier[verbose] ): literal[string] keyword[def] identifier[add_file] ( identifier[session] , identifier[basename] , identifier[userid] , identifier[shotid] , identifier[sessionid] ): literal[string] identifier[session] . identifier[add] ( identifier[File] ( identifier[userid] , identifier[basename] , identifier[sessionid] , identifier[shotid] )) identifier[filenames] = identifier[os] . identifier[listdir] ( identifier[imagedir] ) keyword[for] identifier[filename] keyword[in] identifier[filenames] : identifier[basename] , identifier[extension] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[filename] ) keyword[if] identifier[extension] == identifier[db_file_extension] : keyword[if] identifier[verbose] > literal[int] : identifier[print] ( literal[string] %( identifier[basename] )) identifier[parts] = identifier[string] . identifier[split] ( identifier[basename] , literal[string] ) identifier[ctype] = identifier[parts] [ literal[int] ] identifier[shotid] = identifier[int] ( identifier[parts] [ literal[int] ]) identifier[userid] = identifier[ctype] + literal[string] % identifier[int] ( identifier[parts] [ literal[int] ]) keyword[if] identifier[parts] [ literal[int] ]== literal[string] : identifier[sessionid] = literal[int] keyword[elif] identifier[parts] [ literal[int] ]== literal[string] keyword[and] identifier[shotid] <= literal[int] : identifier[sessionid] = literal[int] keyword[elif] identifier[parts] [ literal[int] ]== literal[string] keyword[and] identifier[shotid] > literal[int] : identifier[sessionid] = literal[int] identifier[shotid] = identifier[shotid] - literal[int] identifier[add_file] ( identifier[session] , identifier[basename] , identifier[userid] , identifier[shotid] , identifier[sessionid] )
def add_files(session, imagedir, verbose): """Add files to the ATVS Keystroke database.""" def add_file(session, basename, userid, shotid, sessionid): """Parse a single filename and add it to the list.""" session.add(File(userid, basename, sessionid, shotid)) filenames = os.listdir(imagedir) for filename in filenames: (basename, extension) = os.path.splitext(filename) if extension == db_file_extension: if verbose > 1: print(" Adding file '%s'..." % basename) # depends on [control=['if'], data=[]] parts = string.split(basename, '_') ctype = parts[0] shotid = int(parts[2]) userid = ctype + '_%d' % int(parts[1]) if parts[0] == 'Impostor': sessionid = 3 # depends on [control=['if'], data=[]] elif parts[0] == 'Genuine' and shotid <= 6: sessionid = 1 # depends on [control=['if'], data=[]] elif parts[0] == 'Genuine' and shotid > 6: sessionid = 2 shotid = shotid - 6 # depends on [control=['if'], data=[]] add_file(session, basename, userid, shotid, sessionid) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filename']]
def get_backup_filename(self, path): """Proposes a name for the backup file. Keyword arguments: path -- temporal filename """ head = datetime.utcnow().replace(tzinfo=pytz.UTC).strftime("%Y%m%d%H%M%S") self.backup = "stations/backup/%s.%s" % (head, path) return self.backup
def function[get_backup_filename, parameter[self, path]]: constant[Proposes a name for the backup file. Keyword arguments: path -- temporal filename ] variable[head] assign[=] call[call[call[name[datetime].utcnow, parameter[]].replace, parameter[]].strftime, parameter[constant[%Y%m%d%H%M%S]]] name[self].backup assign[=] binary_operation[constant[stations/backup/%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0852770>, <ast.Name object at 0x7da1b0811b10>]]] return[name[self].backup]
keyword[def] identifier[get_backup_filename] ( identifier[self] , identifier[path] ): literal[string] identifier[head] = identifier[datetime] . identifier[utcnow] (). identifier[replace] ( identifier[tzinfo] = identifier[pytz] . identifier[UTC] ). identifier[strftime] ( literal[string] ) identifier[self] . identifier[backup] = literal[string] %( identifier[head] , identifier[path] ) keyword[return] identifier[self] . identifier[backup]
def get_backup_filename(self, path): """Proposes a name for the backup file. Keyword arguments: path -- temporal filename """ head = datetime.utcnow().replace(tzinfo=pytz.UTC).strftime('%Y%m%d%H%M%S') self.backup = 'stations/backup/%s.%s' % (head, path) return self.backup
def removeTags(dom): """ Remove all tags from `dom` and obtain plaintext representation. Args: dom (str, obj, array): str, HTMLElement instance or array of elements. Returns: str: Plain string without tags. """ # python 2 / 3 shill try: string_type = basestring except NameError: string_type = str # initialize stack with proper value (based on dom parameter) element_stack = None if type(dom) in [list, tuple]: element_stack = dom elif isinstance(dom, HTMLElement): element_stack = dom.childs if dom.isTag() else [dom] elif isinstance(dom, string_type): element_stack = parseString(dom).childs else: element_stack = dom # remove all tags output = "" while element_stack: el = element_stack.pop(0) if not (el.isTag() or el.isComment() or not el.getTagName()): output += el.__str__() if el.childs: element_stack = el.childs + element_stack return output
def function[removeTags, parameter[dom]]: constant[ Remove all tags from `dom` and obtain plaintext representation. Args: dom (str, obj, array): str, HTMLElement instance or array of elements. Returns: str: Plain string without tags. ] <ast.Try object at 0x7da1b179d030> variable[element_stack] assign[=] constant[None] if compare[call[name[type], parameter[name[dom]]] in list[[<ast.Name object at 0x7da1b179d810>, <ast.Name object at 0x7da1b179c3d0>]]] begin[:] variable[element_stack] assign[=] name[dom] variable[output] assign[=] constant[] while name[element_stack] begin[:] variable[el] assign[=] call[name[element_stack].pop, parameter[constant[0]]] if <ast.UnaryOp object at 0x7da1b179c1f0> begin[:] <ast.AugAssign object at 0x7da1b179ce20> if name[el].childs begin[:] variable[element_stack] assign[=] binary_operation[name[el].childs + name[element_stack]] return[name[output]]
keyword[def] identifier[removeTags] ( identifier[dom] ): literal[string] keyword[try] : identifier[string_type] = identifier[basestring] keyword[except] identifier[NameError] : identifier[string_type] = identifier[str] identifier[element_stack] = keyword[None] keyword[if] identifier[type] ( identifier[dom] ) keyword[in] [ identifier[list] , identifier[tuple] ]: identifier[element_stack] = identifier[dom] keyword[elif] identifier[isinstance] ( identifier[dom] , identifier[HTMLElement] ): identifier[element_stack] = identifier[dom] . identifier[childs] keyword[if] identifier[dom] . identifier[isTag] () keyword[else] [ identifier[dom] ] keyword[elif] identifier[isinstance] ( identifier[dom] , identifier[string_type] ): identifier[element_stack] = identifier[parseString] ( identifier[dom] ). identifier[childs] keyword[else] : identifier[element_stack] = identifier[dom] identifier[output] = literal[string] keyword[while] identifier[element_stack] : identifier[el] = identifier[element_stack] . identifier[pop] ( literal[int] ) keyword[if] keyword[not] ( identifier[el] . identifier[isTag] () keyword[or] identifier[el] . identifier[isComment] () keyword[or] keyword[not] identifier[el] . identifier[getTagName] ()): identifier[output] += identifier[el] . identifier[__str__] () keyword[if] identifier[el] . identifier[childs] : identifier[element_stack] = identifier[el] . identifier[childs] + identifier[element_stack] keyword[return] identifier[output]
def removeTags(dom): """ Remove all tags from `dom` and obtain plaintext representation. Args: dom (str, obj, array): str, HTMLElement instance or array of elements. Returns: str: Plain string without tags. """ # python 2 / 3 shill try: string_type = basestring # depends on [control=['try'], data=[]] except NameError: string_type = str # depends on [control=['except'], data=[]] # initialize stack with proper value (based on dom parameter) element_stack = None if type(dom) in [list, tuple]: element_stack = dom # depends on [control=['if'], data=[]] elif isinstance(dom, HTMLElement): element_stack = dom.childs if dom.isTag() else [dom] # depends on [control=['if'], data=[]] elif isinstance(dom, string_type): element_stack = parseString(dom).childs # depends on [control=['if'], data=[]] else: element_stack = dom # remove all tags output = '' while element_stack: el = element_stack.pop(0) if not (el.isTag() or el.isComment() or (not el.getTagName())): output += el.__str__() # depends on [control=['if'], data=[]] if el.childs: element_stack = el.childs + element_stack # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] return output
def send(sms_to, sms_body, **kwargs): """ Site: https://www.twilio.com/ API: https://www.twilio.com/docs/api/rest/sending-messages """ headers = { "Content-type": "application/x-www-form-urlencoded", "User-Agent": "DBMail/%s" % get_version(), 'Authorization': 'Basic %s' % b64encode( "%s:%s" % ( settings.TWILIO_ACCOUNT_SID, settings.TWILIO_AUTH_TOKEN )).decode("ascii") } kwargs.update({ 'From': kwargs.pop('sms_from', settings.TWILIO_FROM), 'To': sms_to, 'Body': from_unicode(sms_body) }) http = HTTPSConnection(kwargs.pop("api_url", "api.twilio.com")) http.request( "POST", "/2010-04-01/Accounts/%s/Messages.json" % settings.TWILIO_ACCOUNT_SID, headers=headers, body=urlencode(kwargs)) response = http.getresponse() if response.status != 201: raise TwilioSmsError(response.reason) return loads(response.read()).get('sid')
def function[send, parameter[sms_to, sms_body]]: constant[ Site: https://www.twilio.com/ API: https://www.twilio.com/docs/api/rest/sending-messages ] variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da2054a5660>, <ast.Constant object at 0x7da2054a52d0>, <ast.Constant object at 0x7da2054a6c20>], [<ast.Constant object at 0x7da2054a4160>, <ast.BinOp object at 0x7da2054a7f70>, <ast.BinOp object at 0x7da2054a5960>]] call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da2054a6890>, <ast.Constant object at 0x7da2054a59f0>, <ast.Constant object at 0x7da2054a5fc0>], [<ast.Call object at 0x7da2054a5b70>, <ast.Name object at 0x7da2054a6830>, <ast.Call object at 0x7da2054a7610>]]]] variable[http] assign[=] call[name[HTTPSConnection], parameter[call[name[kwargs].pop, parameter[constant[api_url], constant[api.twilio.com]]]]] call[name[http].request, parameter[constant[POST], binary_operation[constant[/2010-04-01/Accounts/%s/Messages.json] <ast.Mod object at 0x7da2590d6920> name[settings].TWILIO_ACCOUNT_SID]]] variable[response] assign[=] call[name[http].getresponse, parameter[]] if compare[name[response].status not_equal[!=] constant[201]] begin[:] <ast.Raise object at 0x7da2054a7e20> return[call[call[name[loads], parameter[call[name[response].read, parameter[]]]].get, parameter[constant[sid]]]]
keyword[def] identifier[send] ( identifier[sms_to] , identifier[sms_body] ,** identifier[kwargs] ): literal[string] identifier[headers] ={ literal[string] : literal[string] , literal[string] : literal[string] % identifier[get_version] (), literal[string] : literal[string] % identifier[b64encode] ( literal[string] %( identifier[settings] . identifier[TWILIO_ACCOUNT_SID] , identifier[settings] . identifier[TWILIO_AUTH_TOKEN] )). identifier[decode] ( literal[string] ) } identifier[kwargs] . identifier[update] ({ literal[string] : identifier[kwargs] . identifier[pop] ( literal[string] , identifier[settings] . identifier[TWILIO_FROM] ), literal[string] : identifier[sms_to] , literal[string] : identifier[from_unicode] ( identifier[sms_body] ) }) identifier[http] = identifier[HTTPSConnection] ( identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] )) identifier[http] . identifier[request] ( literal[string] , literal[string] % identifier[settings] . identifier[TWILIO_ACCOUNT_SID] , identifier[headers] = identifier[headers] , identifier[body] = identifier[urlencode] ( identifier[kwargs] )) identifier[response] = identifier[http] . identifier[getresponse] () keyword[if] identifier[response] . identifier[status] != literal[int] : keyword[raise] identifier[TwilioSmsError] ( identifier[response] . identifier[reason] ) keyword[return] identifier[loads] ( identifier[response] . identifier[read] ()). identifier[get] ( literal[string] )
def send(sms_to, sms_body, **kwargs): """ Site: https://www.twilio.com/ API: https://www.twilio.com/docs/api/rest/sending-messages """ headers = {'Content-type': 'application/x-www-form-urlencoded', 'User-Agent': 'DBMail/%s' % get_version(), 'Authorization': 'Basic %s' % b64encode('%s:%s' % (settings.TWILIO_ACCOUNT_SID, settings.TWILIO_AUTH_TOKEN)).decode('ascii')} kwargs.update({'From': kwargs.pop('sms_from', settings.TWILIO_FROM), 'To': sms_to, 'Body': from_unicode(sms_body)}) http = HTTPSConnection(kwargs.pop('api_url', 'api.twilio.com')) http.request('POST', '/2010-04-01/Accounts/%s/Messages.json' % settings.TWILIO_ACCOUNT_SID, headers=headers, body=urlencode(kwargs)) response = http.getresponse() if response.status != 201: raise TwilioSmsError(response.reason) # depends on [control=['if'], data=[]] return loads(response.read()).get('sid')
def makeAggShkDstn(self): ''' Creates the attributes TranShkAggDstn, PermShkAggDstn, and AggShkDstn. Draws on attributes TranShkAggStd, PermShkAddStd, TranShkAggCount, PermShkAggCount. This version accounts for the Markov macroeconomic state. Parameters ---------- None Returns ------- None ''' TranShkAggDstn = [] PermShkAggDstn = [] AggShkDstn = [] StateCount = self.MrkvArray.shape[0] for i in range(StateCount): TranShkAggDstn.append(approxMeanOneLognormal(sigma=self.TranShkAggStd[i],N=self.TranShkAggCount)) PermShkAggDstn.append(approxMeanOneLognormal(sigma=self.PermShkAggStd[i],N=self.PermShkAggCount)) AggShkDstn.append(combineIndepDstns(PermShkAggDstn[-1],TranShkAggDstn[-1])) self.TranShkAggDstn = TranShkAggDstn self.PermShkAggDstn = PermShkAggDstn self.AggShkDstn = AggShkDstn
def function[makeAggShkDstn, parameter[self]]: constant[ Creates the attributes TranShkAggDstn, PermShkAggDstn, and AggShkDstn. Draws on attributes TranShkAggStd, PermShkAddStd, TranShkAggCount, PermShkAggCount. This version accounts for the Markov macroeconomic state. Parameters ---------- None Returns ------- None ] variable[TranShkAggDstn] assign[=] list[[]] variable[PermShkAggDstn] assign[=] list[[]] variable[AggShkDstn] assign[=] list[[]] variable[StateCount] assign[=] call[name[self].MrkvArray.shape][constant[0]] for taget[name[i]] in starred[call[name[range], parameter[name[StateCount]]]] begin[:] call[name[TranShkAggDstn].append, parameter[call[name[approxMeanOneLognormal], parameter[]]]] call[name[PermShkAggDstn].append, parameter[call[name[approxMeanOneLognormal], parameter[]]]] call[name[AggShkDstn].append, parameter[call[name[combineIndepDstns], parameter[call[name[PermShkAggDstn]][<ast.UnaryOp object at 0x7da2054a53f0>], call[name[TranShkAggDstn]][<ast.UnaryOp object at 0x7da2054a60b0>]]]]] name[self].TranShkAggDstn assign[=] name[TranShkAggDstn] name[self].PermShkAggDstn assign[=] name[PermShkAggDstn] name[self].AggShkDstn assign[=] name[AggShkDstn]
keyword[def] identifier[makeAggShkDstn] ( identifier[self] ): literal[string] identifier[TranShkAggDstn] =[] identifier[PermShkAggDstn] =[] identifier[AggShkDstn] =[] identifier[StateCount] = identifier[self] . identifier[MrkvArray] . identifier[shape] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[StateCount] ): identifier[TranShkAggDstn] . identifier[append] ( identifier[approxMeanOneLognormal] ( identifier[sigma] = identifier[self] . identifier[TranShkAggStd] [ identifier[i] ], identifier[N] = identifier[self] . identifier[TranShkAggCount] )) identifier[PermShkAggDstn] . identifier[append] ( identifier[approxMeanOneLognormal] ( identifier[sigma] = identifier[self] . identifier[PermShkAggStd] [ identifier[i] ], identifier[N] = identifier[self] . identifier[PermShkAggCount] )) identifier[AggShkDstn] . identifier[append] ( identifier[combineIndepDstns] ( identifier[PermShkAggDstn] [- literal[int] ], identifier[TranShkAggDstn] [- literal[int] ])) identifier[self] . identifier[TranShkAggDstn] = identifier[TranShkAggDstn] identifier[self] . identifier[PermShkAggDstn] = identifier[PermShkAggDstn] identifier[self] . identifier[AggShkDstn] = identifier[AggShkDstn]
def makeAggShkDstn(self): """ Creates the attributes TranShkAggDstn, PermShkAggDstn, and AggShkDstn. Draws on attributes TranShkAggStd, PermShkAddStd, TranShkAggCount, PermShkAggCount. This version accounts for the Markov macroeconomic state. Parameters ---------- None Returns ------- None """ TranShkAggDstn = [] PermShkAggDstn = [] AggShkDstn = [] StateCount = self.MrkvArray.shape[0] for i in range(StateCount): TranShkAggDstn.append(approxMeanOneLognormal(sigma=self.TranShkAggStd[i], N=self.TranShkAggCount)) PermShkAggDstn.append(approxMeanOneLognormal(sigma=self.PermShkAggStd[i], N=self.PermShkAggCount)) AggShkDstn.append(combineIndepDstns(PermShkAggDstn[-1], TranShkAggDstn[-1])) # depends on [control=['for'], data=['i']] self.TranShkAggDstn = TranShkAggDstn self.PermShkAggDstn = PermShkAggDstn self.AggShkDstn = AggShkDstn
def parse(cls, url, default_scheme='http', encoding='utf-8'): '''Parse a URL and return a URLInfo.''' if url is None: return None url = url.strip() if frozenset(url) & C0_CONTROL_SET: raise ValueError('URL contains control codes: {}'.format(ascii(url))) scheme, sep, remaining = url.partition(':') if not scheme: raise ValueError('URL missing scheme: {}'.format(ascii(url))) scheme = scheme.lower() if not sep and default_scheme: # Likely something like example.com/mystuff remaining = url scheme = default_scheme elif not sep: raise ValueError('URI missing colon: {}'.format(ascii(url))) if default_scheme and '.' in scheme or scheme == 'localhost': # Maybe something like example.com:8080/mystuff or # maybe localhost:8080/mystuff remaining = '{}:{}'.format(scheme, remaining) scheme = default_scheme info = URLInfo() info.encoding = encoding if scheme not in RELATIVE_SCHEME_DEFAULT_PORTS: info.raw = url info.scheme = scheme info.path = remaining return info if remaining.startswith('//'): remaining = remaining[2:] path_index = remaining.find('/') query_index = remaining.find('?') fragment_index = remaining.find('#') try: index_tuple = (path_index, query_index, fragment_index) authority_index = min(num for num in index_tuple if num >= 0) except ValueError: authority_index = len(remaining) authority = remaining[:authority_index] resource = remaining[authority_index:] try: index_tuple = (query_index, fragment_index) path_index = min(num for num in index_tuple if num >= 0) except ValueError: path_index = len(remaining) path = remaining[authority_index + 1:path_index] or '/' if fragment_index >= 0: query_index = fragment_index else: query_index = len(remaining) query = remaining[path_index + 1:query_index] fragment = remaining[query_index + 1:] userinfo, host = cls.parse_authority(authority) hostname, port = cls.parse_host(host) username, password = cls.parse_userinfo(userinfo) if not hostname: raise ValueError('Hostname is empty: {}'.format(ascii(url))) info.raw = url info.scheme = scheme info.authority = authority info.path = normalize_path(path, encoding=encoding) info.query = normalize_query(query, encoding=encoding) info.fragment = normalize_fragment(fragment, encoding=encoding) info.userinfo = userinfo info.username = percent_decode(username, encoding=encoding) info.password = percent_decode(password, encoding=encoding) info.host = host info.hostname = hostname info.port = port or RELATIVE_SCHEME_DEFAULT_PORTS[scheme] info.resource = resource return info
def function[parse, parameter[cls, url, default_scheme, encoding]]: constant[Parse a URL and return a URLInfo.] if compare[name[url] is constant[None]] begin[:] return[constant[None]] variable[url] assign[=] call[name[url].strip, parameter[]] if binary_operation[call[name[frozenset], parameter[name[url]]] <ast.BitAnd object at 0x7da2590d6b60> name[C0_CONTROL_SET]] begin[:] <ast.Raise object at 0x7da18f722830> <ast.Tuple object at 0x7da18f7204f0> assign[=] call[name[url].partition, parameter[constant[:]]] if <ast.UnaryOp object at 0x7da18f723cd0> begin[:] <ast.Raise object at 0x7da18f722e60> variable[scheme] assign[=] call[name[scheme].lower, parameter[]] if <ast.BoolOp object at 0x7da18f720a30> begin[:] variable[remaining] assign[=] name[url] variable[scheme] assign[=] name[default_scheme] if <ast.BoolOp object at 0x7da18f7217e0> begin[:] variable[remaining] assign[=] call[constant[{}:{}].format, parameter[name[scheme], name[remaining]]] variable[scheme] assign[=] name[default_scheme] variable[info] assign[=] call[name[URLInfo], parameter[]] name[info].encoding assign[=] name[encoding] if compare[name[scheme] <ast.NotIn object at 0x7da2590d7190> name[RELATIVE_SCHEME_DEFAULT_PORTS]] begin[:] name[info].raw assign[=] name[url] name[info].scheme assign[=] name[scheme] name[info].path assign[=] name[remaining] return[name[info]] if call[name[remaining].startswith, parameter[constant[//]]] begin[:] variable[remaining] assign[=] call[name[remaining]][<ast.Slice object at 0x7da20c6a8850>] variable[path_index] assign[=] call[name[remaining].find, parameter[constant[/]]] variable[query_index] assign[=] call[name[remaining].find, parameter[constant[?]]] variable[fragment_index] assign[=] call[name[remaining].find, parameter[constant[#]]] <ast.Try object at 0x7da20e961c00> variable[authority] assign[=] call[name[remaining]][<ast.Slice object at 0x7da18dc04430>] variable[resource] assign[=] call[name[remaining]][<ast.Slice object at 0x7da18dc04400>] <ast.Try object at 0x7da18dc07820> variable[path] assign[=] <ast.BoolOp object at 0x7da2041da860> if compare[name[fragment_index] greater_or_equal[>=] constant[0]] begin[:] variable[query_index] assign[=] name[fragment_index] variable[query] assign[=] call[name[remaining]][<ast.Slice object at 0x7da2041d8490>] variable[fragment] assign[=] call[name[remaining]][<ast.Slice object at 0x7da2041d8040>] <ast.Tuple object at 0x7da2041dae90> assign[=] call[name[cls].parse_authority, parameter[name[authority]]] <ast.Tuple object at 0x7da2041d8be0> assign[=] call[name[cls].parse_host, parameter[name[host]]] <ast.Tuple object at 0x7da2041d91e0> assign[=] call[name[cls].parse_userinfo, parameter[name[userinfo]]] if <ast.UnaryOp object at 0x7da2041da0b0> begin[:] <ast.Raise object at 0x7da2041dabf0> name[info].raw assign[=] name[url] name[info].scheme assign[=] name[scheme] name[info].authority assign[=] name[authority] name[info].path assign[=] call[name[normalize_path], parameter[name[path]]] name[info].query assign[=] call[name[normalize_query], parameter[name[query]]] name[info].fragment assign[=] call[name[normalize_fragment], parameter[name[fragment]]] name[info].userinfo assign[=] name[userinfo] name[info].username assign[=] call[name[percent_decode], parameter[name[username]]] name[info].password assign[=] call[name[percent_decode], parameter[name[password]]] name[info].host assign[=] name[host] name[info].hostname assign[=] name[hostname] name[info].port assign[=] <ast.BoolOp object at 0x7da2041d85b0> name[info].resource assign[=] name[resource] return[name[info]]
keyword[def] identifier[parse] ( identifier[cls] , identifier[url] , identifier[default_scheme] = literal[string] , identifier[encoding] = literal[string] ): literal[string] keyword[if] identifier[url] keyword[is] keyword[None] : keyword[return] keyword[None] identifier[url] = identifier[url] . identifier[strip] () keyword[if] identifier[frozenset] ( identifier[url] )& identifier[C0_CONTROL_SET] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[ascii] ( identifier[url] ))) identifier[scheme] , identifier[sep] , identifier[remaining] = identifier[url] . identifier[partition] ( literal[string] ) keyword[if] keyword[not] identifier[scheme] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[ascii] ( identifier[url] ))) identifier[scheme] = identifier[scheme] . identifier[lower] () keyword[if] keyword[not] identifier[sep] keyword[and] identifier[default_scheme] : identifier[remaining] = identifier[url] identifier[scheme] = identifier[default_scheme] keyword[elif] keyword[not] identifier[sep] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[ascii] ( identifier[url] ))) keyword[if] identifier[default_scheme] keyword[and] literal[string] keyword[in] identifier[scheme] keyword[or] identifier[scheme] == literal[string] : identifier[remaining] = literal[string] . identifier[format] ( identifier[scheme] , identifier[remaining] ) identifier[scheme] = identifier[default_scheme] identifier[info] = identifier[URLInfo] () identifier[info] . identifier[encoding] = identifier[encoding] keyword[if] identifier[scheme] keyword[not] keyword[in] identifier[RELATIVE_SCHEME_DEFAULT_PORTS] : identifier[info] . identifier[raw] = identifier[url] identifier[info] . identifier[scheme] = identifier[scheme] identifier[info] . identifier[path] = identifier[remaining] keyword[return] identifier[info] keyword[if] identifier[remaining] . identifier[startswith] ( literal[string] ): identifier[remaining] = identifier[remaining] [ literal[int] :] identifier[path_index] = identifier[remaining] . identifier[find] ( literal[string] ) identifier[query_index] = identifier[remaining] . identifier[find] ( literal[string] ) identifier[fragment_index] = identifier[remaining] . identifier[find] ( literal[string] ) keyword[try] : identifier[index_tuple] =( identifier[path_index] , identifier[query_index] , identifier[fragment_index] ) identifier[authority_index] = identifier[min] ( identifier[num] keyword[for] identifier[num] keyword[in] identifier[index_tuple] keyword[if] identifier[num] >= literal[int] ) keyword[except] identifier[ValueError] : identifier[authority_index] = identifier[len] ( identifier[remaining] ) identifier[authority] = identifier[remaining] [: identifier[authority_index] ] identifier[resource] = identifier[remaining] [ identifier[authority_index] :] keyword[try] : identifier[index_tuple] =( identifier[query_index] , identifier[fragment_index] ) identifier[path_index] = identifier[min] ( identifier[num] keyword[for] identifier[num] keyword[in] identifier[index_tuple] keyword[if] identifier[num] >= literal[int] ) keyword[except] identifier[ValueError] : identifier[path_index] = identifier[len] ( identifier[remaining] ) identifier[path] = identifier[remaining] [ identifier[authority_index] + literal[int] : identifier[path_index] ] keyword[or] literal[string] keyword[if] identifier[fragment_index] >= literal[int] : identifier[query_index] = identifier[fragment_index] keyword[else] : identifier[query_index] = identifier[len] ( identifier[remaining] ) identifier[query] = identifier[remaining] [ identifier[path_index] + literal[int] : identifier[query_index] ] identifier[fragment] = identifier[remaining] [ identifier[query_index] + literal[int] :] identifier[userinfo] , identifier[host] = identifier[cls] . identifier[parse_authority] ( identifier[authority] ) identifier[hostname] , identifier[port] = identifier[cls] . identifier[parse_host] ( identifier[host] ) identifier[username] , identifier[password] = identifier[cls] . identifier[parse_userinfo] ( identifier[userinfo] ) keyword[if] keyword[not] identifier[hostname] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[ascii] ( identifier[url] ))) identifier[info] . identifier[raw] = identifier[url] identifier[info] . identifier[scheme] = identifier[scheme] identifier[info] . identifier[authority] = identifier[authority] identifier[info] . identifier[path] = identifier[normalize_path] ( identifier[path] , identifier[encoding] = identifier[encoding] ) identifier[info] . identifier[query] = identifier[normalize_query] ( identifier[query] , identifier[encoding] = identifier[encoding] ) identifier[info] . identifier[fragment] = identifier[normalize_fragment] ( identifier[fragment] , identifier[encoding] = identifier[encoding] ) identifier[info] . identifier[userinfo] = identifier[userinfo] identifier[info] . identifier[username] = identifier[percent_decode] ( identifier[username] , identifier[encoding] = identifier[encoding] ) identifier[info] . identifier[password] = identifier[percent_decode] ( identifier[password] , identifier[encoding] = identifier[encoding] ) identifier[info] . identifier[host] = identifier[host] identifier[info] . identifier[hostname] = identifier[hostname] identifier[info] . identifier[port] = identifier[port] keyword[or] identifier[RELATIVE_SCHEME_DEFAULT_PORTS] [ identifier[scheme] ] identifier[info] . identifier[resource] = identifier[resource] keyword[return] identifier[info]
def parse(cls, url, default_scheme='http', encoding='utf-8'): """Parse a URL and return a URLInfo.""" if url is None: return None # depends on [control=['if'], data=[]] url = url.strip() if frozenset(url) & C0_CONTROL_SET: raise ValueError('URL contains control codes: {}'.format(ascii(url))) # depends on [control=['if'], data=[]] (scheme, sep, remaining) = url.partition(':') if not scheme: raise ValueError('URL missing scheme: {}'.format(ascii(url))) # depends on [control=['if'], data=[]] scheme = scheme.lower() if not sep and default_scheme: # Likely something like example.com/mystuff remaining = url scheme = default_scheme # depends on [control=['if'], data=[]] elif not sep: raise ValueError('URI missing colon: {}'.format(ascii(url))) # depends on [control=['if'], data=[]] if default_scheme and '.' in scheme or scheme == 'localhost': # Maybe something like example.com:8080/mystuff or # maybe localhost:8080/mystuff remaining = '{}:{}'.format(scheme, remaining) scheme = default_scheme # depends on [control=['if'], data=[]] info = URLInfo() info.encoding = encoding if scheme not in RELATIVE_SCHEME_DEFAULT_PORTS: info.raw = url info.scheme = scheme info.path = remaining return info # depends on [control=['if'], data=['scheme']] if remaining.startswith('//'): remaining = remaining[2:] # depends on [control=['if'], data=[]] path_index = remaining.find('/') query_index = remaining.find('?') fragment_index = remaining.find('#') try: index_tuple = (path_index, query_index, fragment_index) authority_index = min((num for num in index_tuple if num >= 0)) # depends on [control=['try'], data=[]] except ValueError: authority_index = len(remaining) # depends on [control=['except'], data=[]] authority = remaining[:authority_index] resource = remaining[authority_index:] try: index_tuple = (query_index, fragment_index) path_index = min((num for num in index_tuple if num >= 0)) # depends on [control=['try'], data=[]] except ValueError: path_index = len(remaining) # depends on [control=['except'], data=[]] path = remaining[authority_index + 1:path_index] or '/' if fragment_index >= 0: query_index = fragment_index # depends on [control=['if'], data=['fragment_index']] else: query_index = len(remaining) query = remaining[path_index + 1:query_index] fragment = remaining[query_index + 1:] (userinfo, host) = cls.parse_authority(authority) (hostname, port) = cls.parse_host(host) (username, password) = cls.parse_userinfo(userinfo) if not hostname: raise ValueError('Hostname is empty: {}'.format(ascii(url))) # depends on [control=['if'], data=[]] info.raw = url info.scheme = scheme info.authority = authority info.path = normalize_path(path, encoding=encoding) info.query = normalize_query(query, encoding=encoding) info.fragment = normalize_fragment(fragment, encoding=encoding) info.userinfo = userinfo info.username = percent_decode(username, encoding=encoding) info.password = percent_decode(password, encoding=encoding) info.host = host info.hostname = hostname info.port = port or RELATIVE_SCHEME_DEFAULT_PORTS[scheme] info.resource = resource return info
def genl_ctrl_resolve_grp(sk, family_name, grp_name): """Resolve Generic Netlink family group name. https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/ctrl.c#L471 Looks up the family object and resolves the group name to the numeric group identifier. Positional arguments: sk -- Generic Netlink socket (nl_sock class instance). family_name -- name of Generic Netlink family (bytes). grp_name -- name of group to resolve (bytes). Returns: The numeric group identifier or a negative error code. """ family = genl_ctrl_probe_by_name(sk, family_name) if family is None: return -NLE_OBJ_NOTFOUND return genl_ctrl_grp_by_name(family, grp_name)
def function[genl_ctrl_resolve_grp, parameter[sk, family_name, grp_name]]: constant[Resolve Generic Netlink family group name. https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/ctrl.c#L471 Looks up the family object and resolves the group name to the numeric group identifier. Positional arguments: sk -- Generic Netlink socket (nl_sock class instance). family_name -- name of Generic Netlink family (bytes). grp_name -- name of group to resolve (bytes). Returns: The numeric group identifier or a negative error code. ] variable[family] assign[=] call[name[genl_ctrl_probe_by_name], parameter[name[sk], name[family_name]]] if compare[name[family] is constant[None]] begin[:] return[<ast.UnaryOp object at 0x7da1b2637bb0>] return[call[name[genl_ctrl_grp_by_name], parameter[name[family], name[grp_name]]]]
keyword[def] identifier[genl_ctrl_resolve_grp] ( identifier[sk] , identifier[family_name] , identifier[grp_name] ): literal[string] identifier[family] = identifier[genl_ctrl_probe_by_name] ( identifier[sk] , identifier[family_name] ) keyword[if] identifier[family] keyword[is] keyword[None] : keyword[return] - identifier[NLE_OBJ_NOTFOUND] keyword[return] identifier[genl_ctrl_grp_by_name] ( identifier[family] , identifier[grp_name] )
def genl_ctrl_resolve_grp(sk, family_name, grp_name): """Resolve Generic Netlink family group name. https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/ctrl.c#L471 Looks up the family object and resolves the group name to the numeric group identifier. Positional arguments: sk -- Generic Netlink socket (nl_sock class instance). family_name -- name of Generic Netlink family (bytes). grp_name -- name of group to resolve (bytes). Returns: The numeric group identifier or a negative error code. """ family = genl_ctrl_probe_by_name(sk, family_name) if family is None: return -NLE_OBJ_NOTFOUND # depends on [control=['if'], data=[]] return genl_ctrl_grp_by_name(family, grp_name)
def label_from_df(self, cols:IntsOrStrs=1, label_cls:Callable=None, **kwargs): "Label `self.items` from the values in `cols` in `self.inner_df`." labels = self.inner_df.iloc[:,df_names_to_idx(cols, self.inner_df)] assert labels.isna().sum().sum() == 0, f"You have NaN values in column(s) {cols} of your dataframe, please fix it." if is_listy(cols) and len(cols) > 1 and (label_cls is None or label_cls == MultiCategoryList): new_kwargs,label_cls = dict(one_hot=True, classes= cols),MultiCategoryList kwargs = {**new_kwargs, **kwargs} return self._label_from_list(_maybe_squeeze(labels), label_cls=label_cls, **kwargs)
def function[label_from_df, parameter[self, cols, label_cls]]: constant[Label `self.items` from the values in `cols` in `self.inner_df`.] variable[labels] assign[=] call[name[self].inner_df.iloc][tuple[[<ast.Slice object at 0x7da1b1e74880>, <ast.Call object at 0x7da1b1e75d80>]]] assert[compare[call[call[call[name[labels].isna, parameter[]].sum, parameter[]].sum, parameter[]] equal[==] constant[0]]] if <ast.BoolOp object at 0x7da1b1e746a0> begin[:] <ast.Tuple object at 0x7da1b1e744f0> assign[=] tuple[[<ast.Call object at 0x7da1b1e767a0>, <ast.Name object at 0x7da1b1e77d60>]] variable[kwargs] assign[=] dictionary[[None, None], [<ast.Name object at 0x7da1b1e74820>, <ast.Name object at 0x7da1b1e77e50>]] return[call[name[self]._label_from_list, parameter[call[name[_maybe_squeeze], parameter[name[labels]]]]]]
keyword[def] identifier[label_from_df] ( identifier[self] , identifier[cols] : identifier[IntsOrStrs] = literal[int] , identifier[label_cls] : identifier[Callable] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[labels] = identifier[self] . identifier[inner_df] . identifier[iloc] [:, identifier[df_names_to_idx] ( identifier[cols] , identifier[self] . identifier[inner_df] )] keyword[assert] identifier[labels] . identifier[isna] (). identifier[sum] (). identifier[sum] ()== literal[int] , literal[string] keyword[if] identifier[is_listy] ( identifier[cols] ) keyword[and] identifier[len] ( identifier[cols] )> literal[int] keyword[and] ( identifier[label_cls] keyword[is] keyword[None] keyword[or] identifier[label_cls] == identifier[MultiCategoryList] ): identifier[new_kwargs] , identifier[label_cls] = identifier[dict] ( identifier[one_hot] = keyword[True] , identifier[classes] = identifier[cols] ), identifier[MultiCategoryList] identifier[kwargs] ={** identifier[new_kwargs] ,** identifier[kwargs] } keyword[return] identifier[self] . identifier[_label_from_list] ( identifier[_maybe_squeeze] ( identifier[labels] ), identifier[label_cls] = identifier[label_cls] ,** identifier[kwargs] )
def label_from_df(self, cols: IntsOrStrs=1, label_cls: Callable=None, **kwargs): """Label `self.items` from the values in `cols` in `self.inner_df`.""" labels = self.inner_df.iloc[:, df_names_to_idx(cols, self.inner_df)] assert labels.isna().sum().sum() == 0, f'You have NaN values in column(s) {cols} of your dataframe, please fix it.' if is_listy(cols) and len(cols) > 1 and (label_cls is None or label_cls == MultiCategoryList): (new_kwargs, label_cls) = (dict(one_hot=True, classes=cols), MultiCategoryList) kwargs = {**new_kwargs, **kwargs} # depends on [control=['if'], data=[]] return self._label_from_list(_maybe_squeeze(labels), label_cls=label_cls, **kwargs)
def encode(self, *args): """encode(value1[, ...]) -> bytes Encodes the given values to a sequence of bytes according to this Array's underlying element type """ if len(args) != self.nelems: msg = 'ArrayType %s encode() requires %d values, but received %d.' raise ValueError(msg % (self.name, self.nelems, len(args))) return bytearray().join(self.type.encode(arg) for arg in args)
def function[encode, parameter[self]]: constant[encode(value1[, ...]) -> bytes Encodes the given values to a sequence of bytes according to this Array's underlying element type ] if compare[call[name[len], parameter[name[args]]] not_equal[!=] name[self].nelems] begin[:] variable[msg] assign[=] constant[ArrayType %s encode() requires %d values, but received %d.] <ast.Raise object at 0x7da20c7c9ae0> return[call[call[name[bytearray], parameter[]].join, parameter[<ast.GeneratorExp object at 0x7da20c7cb820>]]]
keyword[def] identifier[encode] ( identifier[self] ,* identifier[args] ): literal[string] keyword[if] identifier[len] ( identifier[args] )!= identifier[self] . identifier[nelems] : identifier[msg] = literal[string] keyword[raise] identifier[ValueError] ( identifier[msg] %( identifier[self] . identifier[name] , identifier[self] . identifier[nelems] , identifier[len] ( identifier[args] ))) keyword[return] identifier[bytearray] (). identifier[join] ( identifier[self] . identifier[type] . identifier[encode] ( identifier[arg] ) keyword[for] identifier[arg] keyword[in] identifier[args] )
def encode(self, *args): """encode(value1[, ...]) -> bytes Encodes the given values to a sequence of bytes according to this Array's underlying element type """ if len(args) != self.nelems: msg = 'ArrayType %s encode() requires %d values, but received %d.' raise ValueError(msg % (self.name, self.nelems, len(args))) # depends on [control=['if'], data=[]] return bytearray().join((self.type.encode(arg) for arg in args))
def comments(tag, limit=0, flags=0, **kwargs): """Get comments only.""" return [comment for comment in cm.CommentsMatch(tag).get_comments(limit)]
def function[comments, parameter[tag, limit, flags]]: constant[Get comments only.] return[<ast.ListComp object at 0x7da18f58f850>]
keyword[def] identifier[comments] ( identifier[tag] , identifier[limit] = literal[int] , identifier[flags] = literal[int] ,** identifier[kwargs] ): literal[string] keyword[return] [ identifier[comment] keyword[for] identifier[comment] keyword[in] identifier[cm] . identifier[CommentsMatch] ( identifier[tag] ). identifier[get_comments] ( identifier[limit] )]
def comments(tag, limit=0, flags=0, **kwargs): """Get comments only.""" return [comment for comment in cm.CommentsMatch(tag).get_comments(limit)]
def bethe_fermi(energy, quasipart, shift, hopping, beta): """product of the bethe lattice dos, fermi distribution""" return fermi_dist(quasipart * energy - shift, beta) \ * bethe_lattice(energy, hopping)
def function[bethe_fermi, parameter[energy, quasipart, shift, hopping, beta]]: constant[product of the bethe lattice dos, fermi distribution] return[binary_operation[call[name[fermi_dist], parameter[binary_operation[binary_operation[name[quasipart] * name[energy]] - name[shift]], name[beta]]] * call[name[bethe_lattice], parameter[name[energy], name[hopping]]]]]
keyword[def] identifier[bethe_fermi] ( identifier[energy] , identifier[quasipart] , identifier[shift] , identifier[hopping] , identifier[beta] ): literal[string] keyword[return] identifier[fermi_dist] ( identifier[quasipart] * identifier[energy] - identifier[shift] , identifier[beta] )* identifier[bethe_lattice] ( identifier[energy] , identifier[hopping] )
def bethe_fermi(energy, quasipart, shift, hopping, beta): """product of the bethe lattice dos, fermi distribution""" return fermi_dist(quasipart * energy - shift, beta) * bethe_lattice(energy, hopping)
def p_speed_conversion(self, p): 'speed : speed IN speed_unit' logger.debug('speed = speed %s in speed unit %s', p[1], p[3]) information_unit, duration = p[3] p[0] = '{0: {1}}'.format(p[1], '{0}/{1}'.format(information_unit, duration))
def function[p_speed_conversion, parameter[self, p]]: constant[speed : speed IN speed_unit] call[name[logger].debug, parameter[constant[speed = speed %s in speed unit %s], call[name[p]][constant[1]], call[name[p]][constant[3]]]] <ast.Tuple object at 0x7da1b133c220> assign[=] call[name[p]][constant[3]] call[name[p]][constant[0]] assign[=] call[constant[{0: {1}}].format, parameter[call[name[p]][constant[1]], call[constant[{0}/{1}].format, parameter[name[information_unit], name[duration]]]]]
keyword[def] identifier[p_speed_conversion] ( identifier[self] , identifier[p] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] , identifier[p] [ literal[int] ], identifier[p] [ literal[int] ]) identifier[information_unit] , identifier[duration] = identifier[p] [ literal[int] ] identifier[p] [ literal[int] ]= literal[string] . identifier[format] ( identifier[p] [ literal[int] ], literal[string] . identifier[format] ( identifier[information_unit] , identifier[duration] ))
def p_speed_conversion(self, p): """speed : speed IN speed_unit""" logger.debug('speed = speed %s in speed unit %s', p[1], p[3]) (information_unit, duration) = p[3] p[0] = '{0: {1}}'.format(p[1], '{0}/{1}'.format(information_unit, duration))
def save(self): """ Save the config file """ file_path = self.get_config_path() contents = self.get_contents() with open(file_path, mode='w') as cfg_file: cfg_file.write(contents)
def function[save, parameter[self]]: constant[ Save the config file ] variable[file_path] assign[=] call[name[self].get_config_path, parameter[]] variable[contents] assign[=] call[name[self].get_contents, parameter[]] with call[name[open], parameter[name[file_path]]] begin[:] call[name[cfg_file].write, parameter[name[contents]]]
keyword[def] identifier[save] ( identifier[self] ): literal[string] identifier[file_path] = identifier[self] . identifier[get_config_path] () identifier[contents] = identifier[self] . identifier[get_contents] () keyword[with] identifier[open] ( identifier[file_path] , identifier[mode] = literal[string] ) keyword[as] identifier[cfg_file] : identifier[cfg_file] . identifier[write] ( identifier[contents] )
def save(self): """ Save the config file """ file_path = self.get_config_path() contents = self.get_contents() with open(file_path, mode='w') as cfg_file: cfg_file.write(contents) # depends on [control=['with'], data=['cfg_file']]
def add_static_path(prefix: str, path: str, no_watch: bool = False) -> None: """Add directory to serve static files. First argument ``prefix`` is a URL prefix for the ``path``. ``path`` must be a directory. If ``no_watch`` is True, any change of the files in the path do not trigger restart if ``--autoreload`` is enabled. """ app = get_app() app.add_static_path(prefix, path) if not no_watch: watch_dir(path)
def function[add_static_path, parameter[prefix, path, no_watch]]: constant[Add directory to serve static files. First argument ``prefix`` is a URL prefix for the ``path``. ``path`` must be a directory. If ``no_watch`` is True, any change of the files in the path do not trigger restart if ``--autoreload`` is enabled. ] variable[app] assign[=] call[name[get_app], parameter[]] call[name[app].add_static_path, parameter[name[prefix], name[path]]] if <ast.UnaryOp object at 0x7da20c7ca8f0> begin[:] call[name[watch_dir], parameter[name[path]]]
keyword[def] identifier[add_static_path] ( identifier[prefix] : identifier[str] , identifier[path] : identifier[str] , identifier[no_watch] : identifier[bool] = keyword[False] )-> keyword[None] : literal[string] identifier[app] = identifier[get_app] () identifier[app] . identifier[add_static_path] ( identifier[prefix] , identifier[path] ) keyword[if] keyword[not] identifier[no_watch] : identifier[watch_dir] ( identifier[path] )
def add_static_path(prefix: str, path: str, no_watch: bool=False) -> None: """Add directory to serve static files. First argument ``prefix`` is a URL prefix for the ``path``. ``path`` must be a directory. If ``no_watch`` is True, any change of the files in the path do not trigger restart if ``--autoreload`` is enabled. """ app = get_app() app.add_static_path(prefix, path) if not no_watch: watch_dir(path) # depends on [control=['if'], data=[]]
def create_log_entry_model(self, model): """ Creates a log entry model that will be associated with the model provided. """ attrs = self.copy_fields(model) attrs.update(self.get_logging_fields(model)) attrs.update(Meta = type(str('Meta'), (), self.get_meta_options(model))) name = str('%sAuditLogEntry'%model._meta.object_name) return type(name, (models.Model,), attrs)
def function[create_log_entry_model, parameter[self, model]]: constant[ Creates a log entry model that will be associated with the model provided. ] variable[attrs] assign[=] call[name[self].copy_fields, parameter[name[model]]] call[name[attrs].update, parameter[call[name[self].get_logging_fields, parameter[name[model]]]]] call[name[attrs].update, parameter[]] variable[name] assign[=] call[name[str], parameter[binary_operation[constant[%sAuditLogEntry] <ast.Mod object at 0x7da2590d6920> name[model]._meta.object_name]]] return[call[name[type], parameter[name[name], tuple[[<ast.Attribute object at 0x7da18fe909a0>]], name[attrs]]]]
keyword[def] identifier[create_log_entry_model] ( identifier[self] , identifier[model] ): literal[string] identifier[attrs] = identifier[self] . identifier[copy_fields] ( identifier[model] ) identifier[attrs] . identifier[update] ( identifier[self] . identifier[get_logging_fields] ( identifier[model] )) identifier[attrs] . identifier[update] ( identifier[Meta] = identifier[type] ( identifier[str] ( literal[string] ),(), identifier[self] . identifier[get_meta_options] ( identifier[model] ))) identifier[name] = identifier[str] ( literal[string] % identifier[model] . identifier[_meta] . identifier[object_name] ) keyword[return] identifier[type] ( identifier[name] ,( identifier[models] . identifier[Model] ,), identifier[attrs] )
def create_log_entry_model(self, model): """ Creates a log entry model that will be associated with the model provided. """ attrs = self.copy_fields(model) attrs.update(self.get_logging_fields(model)) attrs.update(Meta=type(str('Meta'), (), self.get_meta_options(model))) name = str('%sAuditLogEntry' % model._meta.object_name) return type(name, (models.Model,), attrs)
def incr(self, stat, value=1, tags=None): """Increment a counter.""" self.client.incr(stat=stat, count=value)
def function[incr, parameter[self, stat, value, tags]]: constant[Increment a counter.] call[name[self].client.incr, parameter[]]
keyword[def] identifier[incr] ( identifier[self] , identifier[stat] , identifier[value] = literal[int] , identifier[tags] = keyword[None] ): literal[string] identifier[self] . identifier[client] . identifier[incr] ( identifier[stat] = identifier[stat] , identifier[count] = identifier[value] )
def incr(self, stat, value=1, tags=None): """Increment a counter.""" self.client.incr(stat=stat, count=value)
def OP_calc(ACC, TPR, TNR): """ Calculate OP (Optimized precision). :param ACC: accuracy :type ACC : float :param TNR: specificity or true negative rate :type TNR : float :param TPR: sensitivity, recall, hit rate, or true positive rate :type TPR : float :return: OP as float """ try: RI = abs(TNR - TPR) / (TPR + TNR) return ACC - RI except Exception: return "None"
def function[OP_calc, parameter[ACC, TPR, TNR]]: constant[ Calculate OP (Optimized precision). :param ACC: accuracy :type ACC : float :param TNR: specificity or true negative rate :type TNR : float :param TPR: sensitivity, recall, hit rate, or true positive rate :type TPR : float :return: OP as float ] <ast.Try object at 0x7da1b16be110>
keyword[def] identifier[OP_calc] ( identifier[ACC] , identifier[TPR] , identifier[TNR] ): literal[string] keyword[try] : identifier[RI] = identifier[abs] ( identifier[TNR] - identifier[TPR] )/( identifier[TPR] + identifier[TNR] ) keyword[return] identifier[ACC] - identifier[RI] keyword[except] identifier[Exception] : keyword[return] literal[string]
def OP_calc(ACC, TPR, TNR): """ Calculate OP (Optimized precision). :param ACC: accuracy :type ACC : float :param TNR: specificity or true negative rate :type TNR : float :param TPR: sensitivity, recall, hit rate, or true positive rate :type TPR : float :return: OP as float """ try: RI = abs(TNR - TPR) / (TPR + TNR) return ACC - RI # depends on [control=['try'], data=[]] except Exception: return 'None' # depends on [control=['except'], data=[]]
def createFileParserCtxt(filename): """Create a parser context for a file content. Automatic support for ZLIB/Compress compressed document is provided by default if found at compile-time. """ ret = libxml2mod.xmlCreateFileParserCtxt(filename) if ret is None:raise parserError('xmlCreateFileParserCtxt() failed') return parserCtxt(_obj=ret)
def function[createFileParserCtxt, parameter[filename]]: constant[Create a parser context for a file content. Automatic support for ZLIB/Compress compressed document is provided by default if found at compile-time. ] variable[ret] assign[=] call[name[libxml2mod].xmlCreateFileParserCtxt, parameter[name[filename]]] if compare[name[ret] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1fa6f50> return[call[name[parserCtxt], parameter[]]]
keyword[def] identifier[createFileParserCtxt] ( identifier[filename] ): literal[string] identifier[ret] = identifier[libxml2mod] . identifier[xmlCreateFileParserCtxt] ( identifier[filename] ) keyword[if] identifier[ret] keyword[is] keyword[None] : keyword[raise] identifier[parserError] ( literal[string] ) keyword[return] identifier[parserCtxt] ( identifier[_obj] = identifier[ret] )
def createFileParserCtxt(filename): """Create a parser context for a file content. Automatic support for ZLIB/Compress compressed document is provided by default if found at compile-time. """ ret = libxml2mod.xmlCreateFileParserCtxt(filename) if ret is None: raise parserError('xmlCreateFileParserCtxt() failed') # depends on [control=['if'], data=[]] return parserCtxt(_obj=ret)
def report_fooof(self, x, y, suffix): """Create FOOOF (fitting oscillations and 1/f) report. Parameters ---------- x : ndarray vector with frequencies y : ndarray vector with amplitudes """ filename = splitext(self.filename)[0] + '_' + suffix + '_fooof.csv' freq = self.frequency freq_range = [freq['fo_min_freq'].get_value(), freq['fo_max_freq'].get_value()] pk_thresh = freq['fo_pk_thresh'].get_value() pk_width = [freq['fo_pk_width_min'].get_value(), freq['fo_pk_width_max'].get_value()] max_n_pk = freq['fo_max_n_pk'].get_value() min_pk_amp = freq['fo_min_pk_amp'].get_value() bg_mode = freq['fo_bg_mode'].get_value() if max_n_pk == 0: max_n_pk = inf if FOOOF is None: lg.warning('"fooof" package is required for this function, run "pip install fooof"') return fm = FOOOF(peak_width_limits=pk_width, max_n_peaks=max_n_pk, min_peak_amplitude=min_pk_amp, peak_threshold=pk_thresh, background_mode=bg_mode) fm.fit(x, y, freq_range) with open(filename, 'w', newline='') as f: lg.info('Writing to ' + str(filename)) csv_file = writer(f) csv_file.writerow(['Wonambi v{}'.format(__version__)]) csv_file.writerow(['FOOOF - POWER SPECTRUM MODEL']) csv_file.writerow('') csv_file.writerow(['The model was run on the frequency range ' '{} - {} Hz'.format(int(floor(fm.freq_range[0])), int(ceil(fm.freq_range[1])))]) csv_file.writerow(['Frequency Resolution is {:1.2f} Hz'.format( fm.freq_res)]) csv_file.writerow('') csv_file.writerow(['Background Parameters (offset, ' + \ ('knee, ' if fm.background_mode == 'knee' else '') + \ 'slope): ' + ', '.join(['{:2.4f}'] * \ len(fm.background_params_)).format( *fm.background_params_)]) csv_file.writerow('') csv_file.writerow(['{} peaks were found:'.format( len(fm.peak_params_))]) csv_file.writerow('') csv_file.writerow(['Index', 'CF', 'Amp', 'BW']) for i, op in enumerate(fm.peak_params_): csv_file.writerow([i, op[0], op[1], op[2]]) csv_file.writerow('') csv_file.writerow(['Goodness of fit metrics:']) csv_file.writerow(['R^2 of model fit is {:5.4f}'.format( fm.r_squared_)]) csv_file.writerow(['Root mean squared error is {:5.4f}'.format( fm.error_)]) csv_file.writerow('') csv_file.writerow(['Haller M, Donoghue T, Peterson E, Varma P, ' 'Sebastian P, Gao R, Noto T, Knight RT, ' 'Shestyuk A, Voytek B (2018) Parameterizing ' 'Neural Power Spectra. bioRxiv, 299859. doi: ' 'https://doi.org/10.1101/299859'])
def function[report_fooof, parameter[self, x, y, suffix]]: constant[Create FOOOF (fitting oscillations and 1/f) report. Parameters ---------- x : ndarray vector with frequencies y : ndarray vector with amplitudes ] variable[filename] assign[=] binary_operation[binary_operation[binary_operation[call[call[name[splitext], parameter[name[self].filename]]][constant[0]] + constant[_]] + name[suffix]] + constant[_fooof.csv]] variable[freq] assign[=] name[self].frequency variable[freq_range] assign[=] list[[<ast.Call object at 0x7da20e797eb0>, <ast.Call object at 0x7da237d34f40>]] variable[pk_thresh] assign[=] call[call[name[freq]][constant[fo_pk_thresh]].get_value, parameter[]] variable[pk_width] assign[=] list[[<ast.Call object at 0x7da18bc72bc0>, <ast.Call object at 0x7da18bc71150>]] variable[max_n_pk] assign[=] call[call[name[freq]][constant[fo_max_n_pk]].get_value, parameter[]] variable[min_pk_amp] assign[=] call[call[name[freq]][constant[fo_min_pk_amp]].get_value, parameter[]] variable[bg_mode] assign[=] call[call[name[freq]][constant[fo_bg_mode]].get_value, parameter[]] if compare[name[max_n_pk] equal[==] constant[0]] begin[:] variable[max_n_pk] assign[=] name[inf] if compare[name[FOOOF] is constant[None]] begin[:] call[name[lg].warning, parameter[constant["fooof" package is required for this function, run "pip install fooof"]]] return[None] variable[fm] assign[=] call[name[FOOOF], parameter[]] call[name[fm].fit, parameter[name[x], name[y], name[freq_range]]] with call[name[open], parameter[name[filename], constant[w]]] begin[:] call[name[lg].info, parameter[binary_operation[constant[Writing to ] + call[name[str], parameter[name[filename]]]]]] variable[csv_file] assign[=] call[name[writer], parameter[name[f]]] call[name[csv_file].writerow, parameter[list[[<ast.Call object at 0x7da18bc73dc0>]]]] call[name[csv_file].writerow, parameter[list[[<ast.Constant object at 0x7da18bc71e10>]]]] call[name[csv_file].writerow, parameter[constant[]]] call[name[csv_file].writerow, parameter[list[[<ast.Call object at 0x7da18bc70e80>]]]] call[name[csv_file].writerow, parameter[list[[<ast.Call object at 0x7da18bc71d80>]]]] call[name[csv_file].writerow, parameter[constant[]]] call[name[csv_file].writerow, parameter[list[[<ast.BinOp object at 0x7da18bc71540>]]]] call[name[csv_file].writerow, parameter[constant[]]] call[name[csv_file].writerow, parameter[list[[<ast.Call object at 0x7da18bc70310>]]]] call[name[csv_file].writerow, parameter[constant[]]] call[name[csv_file].writerow, parameter[list[[<ast.Constant object at 0x7da18bc71d20>, <ast.Constant object at 0x7da18bc70ca0>, <ast.Constant object at 0x7da18bc703a0>, <ast.Constant object at 0x7da18bc72500>]]]] for taget[tuple[[<ast.Name object at 0x7da18bc712a0>, <ast.Name object at 0x7da18bc71f30>]]] in starred[call[name[enumerate], parameter[name[fm].peak_params_]]] begin[:] call[name[csv_file].writerow, parameter[list[[<ast.Name object at 0x7da18bc73280>, <ast.Subscript object at 0x7da18bc73cd0>, <ast.Subscript object at 0x7da18bc71f00>, <ast.Subscript object at 0x7da18bc73070>]]]] call[name[csv_file].writerow, parameter[constant[]]] call[name[csv_file].writerow, parameter[list[[<ast.Constant object at 0x7da18bc73400>]]]] call[name[csv_file].writerow, parameter[list[[<ast.Call object at 0x7da18bc70ac0>]]]] call[name[csv_file].writerow, parameter[list[[<ast.Call object at 0x7da18bc70220>]]]] call[name[csv_file].writerow, parameter[constant[]]] call[name[csv_file].writerow, parameter[list[[<ast.Constant object at 0x7da18bc70850>]]]]
keyword[def] identifier[report_fooof] ( identifier[self] , identifier[x] , identifier[y] , identifier[suffix] ): literal[string] identifier[filename] = identifier[splitext] ( identifier[self] . identifier[filename] )[ literal[int] ]+ literal[string] + identifier[suffix] + literal[string] identifier[freq] = identifier[self] . identifier[frequency] identifier[freq_range] =[ identifier[freq] [ literal[string] ]. identifier[get_value] (), identifier[freq] [ literal[string] ]. identifier[get_value] ()] identifier[pk_thresh] = identifier[freq] [ literal[string] ]. identifier[get_value] () identifier[pk_width] =[ identifier[freq] [ literal[string] ]. identifier[get_value] (), identifier[freq] [ literal[string] ]. identifier[get_value] ()] identifier[max_n_pk] = identifier[freq] [ literal[string] ]. identifier[get_value] () identifier[min_pk_amp] = identifier[freq] [ literal[string] ]. identifier[get_value] () identifier[bg_mode] = identifier[freq] [ literal[string] ]. identifier[get_value] () keyword[if] identifier[max_n_pk] == literal[int] : identifier[max_n_pk] = identifier[inf] keyword[if] identifier[FOOOF] keyword[is] keyword[None] : identifier[lg] . identifier[warning] ( literal[string] ) keyword[return] identifier[fm] = identifier[FOOOF] ( identifier[peak_width_limits] = identifier[pk_width] , identifier[max_n_peaks] = identifier[max_n_pk] , identifier[min_peak_amplitude] = identifier[min_pk_amp] , identifier[peak_threshold] = identifier[pk_thresh] , identifier[background_mode] = identifier[bg_mode] ) identifier[fm] . identifier[fit] ( identifier[x] , identifier[y] , identifier[freq_range] ) keyword[with] identifier[open] ( identifier[filename] , literal[string] , identifier[newline] = literal[string] ) keyword[as] identifier[f] : identifier[lg] . identifier[info] ( literal[string] + identifier[str] ( identifier[filename] )) identifier[csv_file] = identifier[writer] ( identifier[f] ) identifier[csv_file] . identifier[writerow] ([ literal[string] . identifier[format] ( identifier[__version__] )]) identifier[csv_file] . identifier[writerow] ([ literal[string] ]) identifier[csv_file] . identifier[writerow] ( literal[string] ) identifier[csv_file] . identifier[writerow] ([ literal[string] literal[string] . identifier[format] ( identifier[int] ( identifier[floor] ( identifier[fm] . identifier[freq_range] [ literal[int] ])), identifier[int] ( identifier[ceil] ( identifier[fm] . identifier[freq_range] [ literal[int] ])))]) identifier[csv_file] . identifier[writerow] ([ literal[string] . identifier[format] ( identifier[fm] . identifier[freq_res] )]) identifier[csv_file] . identifier[writerow] ( literal[string] ) identifier[csv_file] . identifier[writerow] ([ literal[string] +( literal[string] keyword[if] identifier[fm] . identifier[background_mode] == literal[string] keyword[else] literal[string] )+ literal[string] + literal[string] . identifier[join] ([ literal[string] ]* identifier[len] ( identifier[fm] . identifier[background_params_] )). identifier[format] ( * identifier[fm] . identifier[background_params_] )]) identifier[csv_file] . identifier[writerow] ( literal[string] ) identifier[csv_file] . identifier[writerow] ([ literal[string] . identifier[format] ( identifier[len] ( identifier[fm] . identifier[peak_params_] ))]) identifier[csv_file] . identifier[writerow] ( literal[string] ) identifier[csv_file] . identifier[writerow] ([ literal[string] , literal[string] , literal[string] , literal[string] ]) keyword[for] identifier[i] , identifier[op] keyword[in] identifier[enumerate] ( identifier[fm] . identifier[peak_params_] ): identifier[csv_file] . identifier[writerow] ([ identifier[i] , identifier[op] [ literal[int] ], identifier[op] [ literal[int] ], identifier[op] [ literal[int] ]]) identifier[csv_file] . identifier[writerow] ( literal[string] ) identifier[csv_file] . identifier[writerow] ([ literal[string] ]) identifier[csv_file] . identifier[writerow] ([ literal[string] . identifier[format] ( identifier[fm] . identifier[r_squared_] )]) identifier[csv_file] . identifier[writerow] ([ literal[string] . identifier[format] ( identifier[fm] . identifier[error_] )]) identifier[csv_file] . identifier[writerow] ( literal[string] ) identifier[csv_file] . identifier[writerow] ([ literal[string] literal[string] literal[string] literal[string] literal[string] ])
def report_fooof(self, x, y, suffix): """Create FOOOF (fitting oscillations and 1/f) report. Parameters ---------- x : ndarray vector with frequencies y : ndarray vector with amplitudes """ filename = splitext(self.filename)[0] + '_' + suffix + '_fooof.csv' freq = self.frequency freq_range = [freq['fo_min_freq'].get_value(), freq['fo_max_freq'].get_value()] pk_thresh = freq['fo_pk_thresh'].get_value() pk_width = [freq['fo_pk_width_min'].get_value(), freq['fo_pk_width_max'].get_value()] max_n_pk = freq['fo_max_n_pk'].get_value() min_pk_amp = freq['fo_min_pk_amp'].get_value() bg_mode = freq['fo_bg_mode'].get_value() if max_n_pk == 0: max_n_pk = inf # depends on [control=['if'], data=['max_n_pk']] if FOOOF is None: lg.warning('"fooof" package is required for this function, run "pip install fooof"') return # depends on [control=['if'], data=[]] fm = FOOOF(peak_width_limits=pk_width, max_n_peaks=max_n_pk, min_peak_amplitude=min_pk_amp, peak_threshold=pk_thresh, background_mode=bg_mode) fm.fit(x, y, freq_range) with open(filename, 'w', newline='') as f: lg.info('Writing to ' + str(filename)) csv_file = writer(f) csv_file.writerow(['Wonambi v{}'.format(__version__)]) csv_file.writerow(['FOOOF - POWER SPECTRUM MODEL']) csv_file.writerow('') csv_file.writerow(['The model was run on the frequency range {} - {} Hz'.format(int(floor(fm.freq_range[0])), int(ceil(fm.freq_range[1])))]) csv_file.writerow(['Frequency Resolution is {:1.2f} Hz'.format(fm.freq_res)]) csv_file.writerow('') csv_file.writerow(['Background Parameters (offset, ' + ('knee, ' if fm.background_mode == 'knee' else '') + 'slope): ' + ', '.join(['{:2.4f}'] * len(fm.background_params_)).format(*fm.background_params_)]) csv_file.writerow('') csv_file.writerow(['{} peaks were found:'.format(len(fm.peak_params_))]) csv_file.writerow('') csv_file.writerow(['Index', 'CF', 'Amp', 'BW']) for (i, op) in enumerate(fm.peak_params_): csv_file.writerow([i, op[0], op[1], op[2]]) # depends on [control=['for'], data=[]] csv_file.writerow('') csv_file.writerow(['Goodness of fit metrics:']) csv_file.writerow(['R^2 of model fit is {:5.4f}'.format(fm.r_squared_)]) csv_file.writerow(['Root mean squared error is {:5.4f}'.format(fm.error_)]) csv_file.writerow('') csv_file.writerow(['Haller M, Donoghue T, Peterson E, Varma P, Sebastian P, Gao R, Noto T, Knight RT, Shestyuk A, Voytek B (2018) Parameterizing Neural Power Spectra. bioRxiv, 299859. doi: https://doi.org/10.1101/299859']) # depends on [control=['with'], data=['f']]
def _parse_coverage(header_str): """Attempts to retrieve the coverage value from the header string. It splits the header by "_" and then screens the list backwards in search of the first float value. This will be interpreted as the coverage value. If it cannot find a float value, it returns None. This search methodology is based on the strings of assemblers like spades and skesa that put the mean kmer coverage for each contig in its corresponding fasta header. Parameters ---------- header_str : str String Returns ------- float or None The coverage value for the contig. None if it cannot find the value in the provide string. """ cov = None for i in header_str.split("_")[::-1]: try: cov = float(i) break except ValueError: continue return cov
def function[_parse_coverage, parameter[header_str]]: constant[Attempts to retrieve the coverage value from the header string. It splits the header by "_" and then screens the list backwards in search of the first float value. This will be interpreted as the coverage value. If it cannot find a float value, it returns None. This search methodology is based on the strings of assemblers like spades and skesa that put the mean kmer coverage for each contig in its corresponding fasta header. Parameters ---------- header_str : str String Returns ------- float or None The coverage value for the contig. None if it cannot find the value in the provide string. ] variable[cov] assign[=] constant[None] for taget[name[i]] in starred[call[call[name[header_str].split, parameter[constant[_]]]][<ast.Slice object at 0x7da1b02c5930>]] begin[:] <ast.Try object at 0x7da1b02c4190> return[name[cov]]
keyword[def] identifier[_parse_coverage] ( identifier[header_str] ): literal[string] identifier[cov] = keyword[None] keyword[for] identifier[i] keyword[in] identifier[header_str] . identifier[split] ( literal[string] )[::- literal[int] ]: keyword[try] : identifier[cov] = identifier[float] ( identifier[i] ) keyword[break] keyword[except] identifier[ValueError] : keyword[continue] keyword[return] identifier[cov]
def _parse_coverage(header_str): """Attempts to retrieve the coverage value from the header string. It splits the header by "_" and then screens the list backwards in search of the first float value. This will be interpreted as the coverage value. If it cannot find a float value, it returns None. This search methodology is based on the strings of assemblers like spades and skesa that put the mean kmer coverage for each contig in its corresponding fasta header. Parameters ---------- header_str : str String Returns ------- float or None The coverage value for the contig. None if it cannot find the value in the provide string. """ cov = None for i in header_str.split('_')[::-1]: try: cov = float(i) break # depends on [control=['try'], data=[]] except ValueError: continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']] return cov
def get_caption_formatted(self, field_formats = app_settings.MEDIA_TREE_METADATA_FORMATS, escape=True): """Returns object metadata that has been selected to be displayed to users, compiled as a string including default formatting, for example bold titles. You can use this method in templates where you want to output image captions. """ if self.override_caption != '': return self.override_caption else: return mark_safe(self.get_metadata_display(field_formats, escape=escape))
def function[get_caption_formatted, parameter[self, field_formats, escape]]: constant[Returns object metadata that has been selected to be displayed to users, compiled as a string including default formatting, for example bold titles. You can use this method in templates where you want to output image captions. ] if compare[name[self].override_caption not_equal[!=] constant[]] begin[:] return[name[self].override_caption]
keyword[def] identifier[get_caption_formatted] ( identifier[self] , identifier[field_formats] = identifier[app_settings] . identifier[MEDIA_TREE_METADATA_FORMATS] , identifier[escape] = keyword[True] ): literal[string] keyword[if] identifier[self] . identifier[override_caption] != literal[string] : keyword[return] identifier[self] . identifier[override_caption] keyword[else] : keyword[return] identifier[mark_safe] ( identifier[self] . identifier[get_metadata_display] ( identifier[field_formats] , identifier[escape] = identifier[escape] ))
def get_caption_formatted(self, field_formats=app_settings.MEDIA_TREE_METADATA_FORMATS, escape=True): """Returns object metadata that has been selected to be displayed to users, compiled as a string including default formatting, for example bold titles. You can use this method in templates where you want to output image captions. """ if self.override_caption != '': return self.override_caption # depends on [control=['if'], data=[]] else: return mark_safe(self.get_metadata_display(field_formats, escape=escape))
def partial_derivative_scalar(self, U, V, y=0): """Compute partial derivative :math:`C(u|v)` of cumulative density of single values.""" self.check_fit() X = np.column_stack((U, V)) return self.partial_derivative(X, y)
def function[partial_derivative_scalar, parameter[self, U, V, y]]: constant[Compute partial derivative :math:`C(u|v)` of cumulative density of single values.] call[name[self].check_fit, parameter[]] variable[X] assign[=] call[name[np].column_stack, parameter[tuple[[<ast.Name object at 0x7da1b1e00100>, <ast.Name object at 0x7da1b1e03850>]]]] return[call[name[self].partial_derivative, parameter[name[X], name[y]]]]
keyword[def] identifier[partial_derivative_scalar] ( identifier[self] , identifier[U] , identifier[V] , identifier[y] = literal[int] ): literal[string] identifier[self] . identifier[check_fit] () identifier[X] = identifier[np] . identifier[column_stack] (( identifier[U] , identifier[V] )) keyword[return] identifier[self] . identifier[partial_derivative] ( identifier[X] , identifier[y] )
def partial_derivative_scalar(self, U, V, y=0): """Compute partial derivative :math:`C(u|v)` of cumulative density of single values.""" self.check_fit() X = np.column_stack((U, V)) return self.partial_derivative(X, y)
def breadth_first_search(graph, root=None, filter=null()): """ Breadth-first search. @type graph: graph, digraph @param graph: Graph. @type root: node @param root: Optional root node (will explore only root's connected component) @rtype: tuple @return: A tuple containing a dictionary and a list. 1. Generated spanning tree 2. Graph's level-based ordering """ def bfs(): """ Breadth-first search subfunction. """ while (queue != []): node = queue.pop(0) for other in graph[node]: if (other not in spanning_tree and filter(other, node)): queue.append(other) ordering.append(other) spanning_tree[other] = node queue = [] # Visiting queue spanning_tree = {} # Spanning tree ordering = [] filter.configure(graph, spanning_tree) # BFS from one node only if (root is not None): if filter(root, None): queue.append(root) ordering.append(root) spanning_tree[root] = None bfs() return spanning_tree, ordering # Algorithm for each in graph: if (each not in spanning_tree): if filter(each, None): queue.append(each) ordering.append(each) spanning_tree[each] = None bfs() return spanning_tree, ordering
def function[breadth_first_search, parameter[graph, root, filter]]: constant[ Breadth-first search. @type graph: graph, digraph @param graph: Graph. @type root: node @param root: Optional root node (will explore only root's connected component) @rtype: tuple @return: A tuple containing a dictionary and a list. 1. Generated spanning tree 2. Graph's level-based ordering ] def function[bfs, parameter[]]: constant[ Breadth-first search subfunction. ] while compare[name[queue] not_equal[!=] list[[]]] begin[:] variable[node] assign[=] call[name[queue].pop, parameter[constant[0]]] for taget[name[other]] in starred[call[name[graph]][name[node]]] begin[:] if <ast.BoolOp object at 0x7da1b17cc5e0> begin[:] call[name[queue].append, parameter[name[other]]] call[name[ordering].append, parameter[name[other]]] call[name[spanning_tree]][name[other]] assign[=] name[node] variable[queue] assign[=] list[[]] variable[spanning_tree] assign[=] dictionary[[], []] variable[ordering] assign[=] list[[]] call[name[filter].configure, parameter[name[graph], name[spanning_tree]]] if compare[name[root] is_not constant[None]] begin[:] if call[name[filter], parameter[name[root], constant[None]]] begin[:] call[name[queue].append, parameter[name[root]]] call[name[ordering].append, parameter[name[root]]] call[name[spanning_tree]][name[root]] assign[=] constant[None] call[name[bfs], parameter[]] return[tuple[[<ast.Name object at 0x7da1b17cf2e0>, <ast.Name object at 0x7da1b17cf1f0>]]] for taget[name[each]] in starred[name[graph]] begin[:] if compare[name[each] <ast.NotIn object at 0x7da2590d7190> name[spanning_tree]] begin[:] if call[name[filter], parameter[name[each], constant[None]]] begin[:] call[name[queue].append, parameter[name[each]]] call[name[ordering].append, parameter[name[each]]] call[name[spanning_tree]][name[each]] assign[=] constant[None] call[name[bfs], parameter[]] return[tuple[[<ast.Name object at 0x7da1b17cd210>, <ast.Name object at 0x7da1b17cd240>]]]
keyword[def] identifier[breadth_first_search] ( identifier[graph] , identifier[root] = keyword[None] , identifier[filter] = identifier[null] ()): literal[string] keyword[def] identifier[bfs] (): literal[string] keyword[while] ( identifier[queue] !=[]): identifier[node] = identifier[queue] . identifier[pop] ( literal[int] ) keyword[for] identifier[other] keyword[in] identifier[graph] [ identifier[node] ]: keyword[if] ( identifier[other] keyword[not] keyword[in] identifier[spanning_tree] keyword[and] identifier[filter] ( identifier[other] , identifier[node] )): identifier[queue] . identifier[append] ( identifier[other] ) identifier[ordering] . identifier[append] ( identifier[other] ) identifier[spanning_tree] [ identifier[other] ]= identifier[node] identifier[queue] =[] identifier[spanning_tree] ={} identifier[ordering] =[] identifier[filter] . identifier[configure] ( identifier[graph] , identifier[spanning_tree] ) keyword[if] ( identifier[root] keyword[is] keyword[not] keyword[None] ): keyword[if] identifier[filter] ( identifier[root] , keyword[None] ): identifier[queue] . identifier[append] ( identifier[root] ) identifier[ordering] . identifier[append] ( identifier[root] ) identifier[spanning_tree] [ identifier[root] ]= keyword[None] identifier[bfs] () keyword[return] identifier[spanning_tree] , identifier[ordering] keyword[for] identifier[each] keyword[in] identifier[graph] : keyword[if] ( identifier[each] keyword[not] keyword[in] identifier[spanning_tree] ): keyword[if] identifier[filter] ( identifier[each] , keyword[None] ): identifier[queue] . identifier[append] ( identifier[each] ) identifier[ordering] . identifier[append] ( identifier[each] ) identifier[spanning_tree] [ identifier[each] ]= keyword[None] identifier[bfs] () keyword[return] identifier[spanning_tree] , identifier[ordering]
def breadth_first_search(graph, root=None, filter=null()): """ Breadth-first search. @type graph: graph, digraph @param graph: Graph. @type root: node @param root: Optional root node (will explore only root's connected component) @rtype: tuple @return: A tuple containing a dictionary and a list. 1. Generated spanning tree 2. Graph's level-based ordering """ def bfs(): """ Breadth-first search subfunction. """ while queue != []: node = queue.pop(0) for other in graph[node]: if other not in spanning_tree and filter(other, node): queue.append(other) ordering.append(other) spanning_tree[other] = node # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['other']] # depends on [control=['while'], data=['queue']] queue = [] # Visiting queue spanning_tree = {} # Spanning tree ordering = [] filter.configure(graph, spanning_tree) # BFS from one node only if root is not None: if filter(root, None): queue.append(root) ordering.append(root) spanning_tree[root] = None bfs() # depends on [control=['if'], data=[]] return (spanning_tree, ordering) # depends on [control=['if'], data=['root']] # Algorithm for each in graph: if each not in spanning_tree: if filter(each, None): queue.append(each) ordering.append(each) spanning_tree[each] = None bfs() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['each', 'spanning_tree']] # depends on [control=['for'], data=['each']] return (spanning_tree, ordering)
def almost_eq(a, b, threshold=0.000001): """ 比较两个小数是否几乎相等 当两数差的绝对值小于threshold时认为其相等 :param a: :param b: :param threshold: :return: True or False """ if abs(a - b) < threshold: return True else: return False
def function[almost_eq, parameter[a, b, threshold]]: constant[ 比较两个小数是否几乎相等 当两数差的绝对值小于threshold时认为其相等 :param a: :param b: :param threshold: :return: True or False ] if compare[call[name[abs], parameter[binary_operation[name[a] - name[b]]]] less[<] name[threshold]] begin[:] return[constant[True]]
keyword[def] identifier[almost_eq] ( identifier[a] , identifier[b] , identifier[threshold] = literal[int] ): literal[string] keyword[if] identifier[abs] ( identifier[a] - identifier[b] )< identifier[threshold] : keyword[return] keyword[True] keyword[else] : keyword[return] keyword[False]
def almost_eq(a, b, threshold=1e-06): """ 比较两个小数是否几乎相等 当两数差的绝对值小于threshold时认为其相等 :param a: :param b: :param threshold: :return: True or False """ if abs(a - b) < threshold: return True # depends on [control=['if'], data=[]] else: return False
def secgroup_create(self, name, description): ''' Create a security group ''' nt_ks = self.compute_conn nt_ks.security_groups.create(name, description) ret = {'name': name, 'description': description} return ret
def function[secgroup_create, parameter[self, name, description]]: constant[ Create a security group ] variable[nt_ks] assign[=] name[self].compute_conn call[name[nt_ks].security_groups.create, parameter[name[name], name[description]]] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b21e03a0>, <ast.Constant object at 0x7da1b21e3df0>], [<ast.Name object at 0x7da1b21e29b0>, <ast.Name object at 0x7da1b21e0f70>]] return[name[ret]]
keyword[def] identifier[secgroup_create] ( identifier[self] , identifier[name] , identifier[description] ): literal[string] identifier[nt_ks] = identifier[self] . identifier[compute_conn] identifier[nt_ks] . identifier[security_groups] . identifier[create] ( identifier[name] , identifier[description] ) identifier[ret] ={ literal[string] : identifier[name] , literal[string] : identifier[description] } keyword[return] identifier[ret]
def secgroup_create(self, name, description): """ Create a security group """ nt_ks = self.compute_conn nt_ks.security_groups.create(name, description) ret = {'name': name, 'description': description} return ret
def cls_from_str(name: str) -> type: """Returns a class object with the name given as a string.""" try: module_name, cls_name = name.split(':') except ValueError: raise ConfigError('Expected class description in a `module.submodules:ClassName` form, but got `{}`' .format(name)) return getattr(importlib.import_module(module_name), cls_name)
def function[cls_from_str, parameter[name]]: constant[Returns a class object with the name given as a string.] <ast.Try object at 0x7da2054a7850> return[call[name[getattr], parameter[call[name[importlib].import_module, parameter[name[module_name]]], name[cls_name]]]]
keyword[def] identifier[cls_from_str] ( identifier[name] : identifier[str] )-> identifier[type] : literal[string] keyword[try] : identifier[module_name] , identifier[cls_name] = identifier[name] . identifier[split] ( literal[string] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[ConfigError] ( literal[string] . identifier[format] ( identifier[name] )) keyword[return] identifier[getattr] ( identifier[importlib] . identifier[import_module] ( identifier[module_name] ), identifier[cls_name] )
def cls_from_str(name: str) -> type: """Returns a class object with the name given as a string.""" try: (module_name, cls_name) = name.split(':') # depends on [control=['try'], data=[]] except ValueError: raise ConfigError('Expected class description in a `module.submodules:ClassName` form, but got `{}`'.format(name)) # depends on [control=['except'], data=[]] return getattr(importlib.import_module(module_name), cls_name)
def read_byte_data(self, addr, cmd): """Read a single byte from the specified cmd register of the device.""" assert self._device is not None, 'Bus must be opened before operations are made against it!' # Build ctypes values to marshall between ioctl and Python. reg = c_uint8(cmd) result = c_uint8() # Build ioctl request. request = make_i2c_rdwr_data([ (addr, 0, 1, pointer(reg)), # Write cmd register. (addr, I2C_M_RD, 1, pointer(result)) # Read 1 byte as result. ]) # Make ioctl call and return result data. ioctl(self._device.fileno(), I2C_RDWR, request) return result.value
def function[read_byte_data, parameter[self, addr, cmd]]: constant[Read a single byte from the specified cmd register of the device.] assert[compare[name[self]._device is_not constant[None]]] variable[reg] assign[=] call[name[c_uint8], parameter[name[cmd]]] variable[result] assign[=] call[name[c_uint8], parameter[]] variable[request] assign[=] call[name[make_i2c_rdwr_data], parameter[list[[<ast.Tuple object at 0x7da1b0287be0>, <ast.Tuple object at 0x7da1b02850c0>]]]] call[name[ioctl], parameter[call[name[self]._device.fileno, parameter[]], name[I2C_RDWR], name[request]]] return[name[result].value]
keyword[def] identifier[read_byte_data] ( identifier[self] , identifier[addr] , identifier[cmd] ): literal[string] keyword[assert] identifier[self] . identifier[_device] keyword[is] keyword[not] keyword[None] , literal[string] identifier[reg] = identifier[c_uint8] ( identifier[cmd] ) identifier[result] = identifier[c_uint8] () identifier[request] = identifier[make_i2c_rdwr_data] ([ ( identifier[addr] , literal[int] , literal[int] , identifier[pointer] ( identifier[reg] )), ( identifier[addr] , identifier[I2C_M_RD] , literal[int] , identifier[pointer] ( identifier[result] )) ]) identifier[ioctl] ( identifier[self] . identifier[_device] . identifier[fileno] (), identifier[I2C_RDWR] , identifier[request] ) keyword[return] identifier[result] . identifier[value]
def read_byte_data(self, addr, cmd): """Read a single byte from the specified cmd register of the device.""" assert self._device is not None, 'Bus must be opened before operations are made against it!' # Build ctypes values to marshall between ioctl and Python. reg = c_uint8(cmd) result = c_uint8() # Build ioctl request. # Write cmd register. # Read 1 byte as result. request = make_i2c_rdwr_data([(addr, 0, 1, pointer(reg)), (addr, I2C_M_RD, 1, pointer(result))]) # Make ioctl call and return result data. ioctl(self._device.fileno(), I2C_RDWR, request) return result.value
def is_mapped_luks_device(dev): """ Determine if dev is a mapped LUKS device :param: dev: A full path to a block device to be checked :returns: boolean: indicates whether a device is mapped """ _, dirs, _ = next(os.walk( '/sys/class/block/{}/holders/' .format(os.path.basename(os.path.realpath(dev)))) ) is_held = len(dirs) > 0 return is_held and is_luks_device(dev)
def function[is_mapped_luks_device, parameter[dev]]: constant[ Determine if dev is a mapped LUKS device :param: dev: A full path to a block device to be checked :returns: boolean: indicates whether a device is mapped ] <ast.Tuple object at 0x7da1b12184f0> assign[=] call[name[next], parameter[call[name[os].walk, parameter[call[constant[/sys/class/block/{}/holders/].format, parameter[call[name[os].path.basename, parameter[call[name[os].path.realpath, parameter[name[dev]]]]]]]]]]] variable[is_held] assign[=] compare[call[name[len], parameter[name[dirs]]] greater[>] constant[0]] return[<ast.BoolOp object at 0x7da1b121a290>]
keyword[def] identifier[is_mapped_luks_device] ( identifier[dev] ): literal[string] identifier[_] , identifier[dirs] , identifier[_] = identifier[next] ( identifier[os] . identifier[walk] ( literal[string] . identifier[format] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[os] . identifier[path] . identifier[realpath] ( identifier[dev] )))) ) identifier[is_held] = identifier[len] ( identifier[dirs] )> literal[int] keyword[return] identifier[is_held] keyword[and] identifier[is_luks_device] ( identifier[dev] )
def is_mapped_luks_device(dev): """ Determine if dev is a mapped LUKS device :param: dev: A full path to a block device to be checked :returns: boolean: indicates whether a device is mapped """ (_, dirs, _) = next(os.walk('/sys/class/block/{}/holders/'.format(os.path.basename(os.path.realpath(dev))))) is_held = len(dirs) > 0 return is_held and is_luks_device(dev)
def getRegion(self,size=3e4,min_nSNPs=1,chrom_i=None,pos_min=None,pos_max=None): """ Sample a region from the piece of genotype X, chrom, pos minSNPnum: minimum number of SNPs contained in the region Ichrom: restrict X to chromosome Ichrom before taking the region cis: bool vector that marks the sorted region region: vector that contains chrom and init and final position of the region """ if (self.chrom is None) or (self.pos is None): bim = plink_reader.readBIM(self.bfile,usecols=(0,1,2,3)) chrom = SP.array(bim[:,0],dtype=int) pos = SP.array(bim[:,3],dtype=int) else: chrom = self.chrom pos = self.pos if chrom_i is None: n_chroms = chrom.max() chrom_i = int(SP.ceil(SP.rand()*n_chroms)) pos = pos[chrom==chrom_i] chrom = chrom[chrom==chrom_i] ipos = SP.ones(len(pos),dtype=bool) if pos_min is not None: ipos = SP.logical_and(ipos,pos_min<pos) if pos_max is not None: ipos = SP.logical_and(ipos,pos<pos_max) pos = pos[ipos] chrom = chrom[ipos] if size==1: # select single SNP idx = int(SP.ceil(pos.shape[0]*SP.rand())) cis = SP.arange(pos.shape[0])==idx region = SP.array([chrom_i,pos[idx],pos[idx]]) else: while 1: idx = int(SP.floor(pos.shape[0]*SP.rand())) posT1 = pos[idx] posT2 = pos[idx]+size if posT2<=pos.max(): cis = chrom==chrom_i cis*= (pos>posT1)*(pos<posT2) if cis.sum()>min_nSNPs: break region = SP.array([chrom_i,posT1,posT2]) start = SP.nonzero(cis)[0].min() nSNPs = cis.sum() if self.X is None: rv = plink_reader.readBED(self.bfile,useMAFencoding=True,start = start, nSNPs = nSNPs,bim=bim) Xr = rv['snps'] else: Xr = self.X[:,start:start+nSnps] return Xr, region
def function[getRegion, parameter[self, size, min_nSNPs, chrom_i, pos_min, pos_max]]: constant[ Sample a region from the piece of genotype X, chrom, pos minSNPnum: minimum number of SNPs contained in the region Ichrom: restrict X to chromosome Ichrom before taking the region cis: bool vector that marks the sorted region region: vector that contains chrom and init and final position of the region ] if <ast.BoolOp object at 0x7da20c6c6f80> begin[:] variable[bim] assign[=] call[name[plink_reader].readBIM, parameter[name[self].bfile]] variable[chrom] assign[=] call[name[SP].array, parameter[call[name[bim]][tuple[[<ast.Slice object at 0x7da20c6c5a50>, <ast.Constant object at 0x7da20c6c42b0>]]]]] variable[pos] assign[=] call[name[SP].array, parameter[call[name[bim]][tuple[[<ast.Slice object at 0x7da20c6c5840>, <ast.Constant object at 0x7da20c6c7850>]]]]] if compare[name[chrom_i] is constant[None]] begin[:] variable[n_chroms] assign[=] call[name[chrom].max, parameter[]] variable[chrom_i] assign[=] call[name[int], parameter[call[name[SP].ceil, parameter[binary_operation[call[name[SP].rand, parameter[]] * name[n_chroms]]]]]] variable[pos] assign[=] call[name[pos]][compare[name[chrom] equal[==] name[chrom_i]]] variable[chrom] assign[=] call[name[chrom]][compare[name[chrom] equal[==] name[chrom_i]]] variable[ipos] assign[=] call[name[SP].ones, parameter[call[name[len], parameter[name[pos]]]]] if compare[name[pos_min] is_not constant[None]] begin[:] variable[ipos] assign[=] call[name[SP].logical_and, parameter[name[ipos], compare[name[pos_min] less[<] name[pos]]]] if compare[name[pos_max] is_not constant[None]] begin[:] variable[ipos] assign[=] call[name[SP].logical_and, parameter[name[ipos], compare[name[pos] less[<] name[pos_max]]]] variable[pos] assign[=] call[name[pos]][name[ipos]] variable[chrom] assign[=] call[name[chrom]][name[ipos]] if compare[name[size] equal[==] constant[1]] begin[:] variable[idx] assign[=] call[name[int], parameter[call[name[SP].ceil, parameter[binary_operation[call[name[pos].shape][constant[0]] * call[name[SP].rand, parameter[]]]]]]] variable[cis] assign[=] compare[call[name[SP].arange, parameter[call[name[pos].shape][constant[0]]]] equal[==] name[idx]] variable[region] assign[=] call[name[SP].array, parameter[list[[<ast.Name object at 0x7da20c6c5f00>, <ast.Subscript object at 0x7da20c6c7e80>, <ast.Subscript object at 0x7da20c6c41f0>]]]] variable[start] assign[=] call[call[call[name[SP].nonzero, parameter[name[cis]]]][constant[0]].min, parameter[]] variable[nSNPs] assign[=] call[name[cis].sum, parameter[]] if compare[name[self].X is constant[None]] begin[:] variable[rv] assign[=] call[name[plink_reader].readBED, parameter[name[self].bfile]] variable[Xr] assign[=] call[name[rv]][constant[snps]] return[tuple[[<ast.Name object at 0x7da20c6e6770>, <ast.Name object at 0x7da20c6e5420>]]]
keyword[def] identifier[getRegion] ( identifier[self] , identifier[size] = literal[int] , identifier[min_nSNPs] = literal[int] , identifier[chrom_i] = keyword[None] , identifier[pos_min] = keyword[None] , identifier[pos_max] = keyword[None] ): literal[string] keyword[if] ( identifier[self] . identifier[chrom] keyword[is] keyword[None] ) keyword[or] ( identifier[self] . identifier[pos] keyword[is] keyword[None] ): identifier[bim] = identifier[plink_reader] . identifier[readBIM] ( identifier[self] . identifier[bfile] , identifier[usecols] =( literal[int] , literal[int] , literal[int] , literal[int] )) identifier[chrom] = identifier[SP] . identifier[array] ( identifier[bim] [:, literal[int] ], identifier[dtype] = identifier[int] ) identifier[pos] = identifier[SP] . identifier[array] ( identifier[bim] [:, literal[int] ], identifier[dtype] = identifier[int] ) keyword[else] : identifier[chrom] = identifier[self] . identifier[chrom] identifier[pos] = identifier[self] . identifier[pos] keyword[if] identifier[chrom_i] keyword[is] keyword[None] : identifier[n_chroms] = identifier[chrom] . identifier[max] () identifier[chrom_i] = identifier[int] ( identifier[SP] . identifier[ceil] ( identifier[SP] . identifier[rand] ()* identifier[n_chroms] )) identifier[pos] = identifier[pos] [ identifier[chrom] == identifier[chrom_i] ] identifier[chrom] = identifier[chrom] [ identifier[chrom] == identifier[chrom_i] ] identifier[ipos] = identifier[SP] . identifier[ones] ( identifier[len] ( identifier[pos] ), identifier[dtype] = identifier[bool] ) keyword[if] identifier[pos_min] keyword[is] keyword[not] keyword[None] : identifier[ipos] = identifier[SP] . identifier[logical_and] ( identifier[ipos] , identifier[pos_min] < identifier[pos] ) keyword[if] identifier[pos_max] keyword[is] keyword[not] keyword[None] : identifier[ipos] = identifier[SP] . identifier[logical_and] ( identifier[ipos] , identifier[pos] < identifier[pos_max] ) identifier[pos] = identifier[pos] [ identifier[ipos] ] identifier[chrom] = identifier[chrom] [ identifier[ipos] ] keyword[if] identifier[size] == literal[int] : identifier[idx] = identifier[int] ( identifier[SP] . identifier[ceil] ( identifier[pos] . identifier[shape] [ literal[int] ]* identifier[SP] . identifier[rand] ())) identifier[cis] = identifier[SP] . identifier[arange] ( identifier[pos] . identifier[shape] [ literal[int] ])== identifier[idx] identifier[region] = identifier[SP] . identifier[array] ([ identifier[chrom_i] , identifier[pos] [ identifier[idx] ], identifier[pos] [ identifier[idx] ]]) keyword[else] : keyword[while] literal[int] : identifier[idx] = identifier[int] ( identifier[SP] . identifier[floor] ( identifier[pos] . identifier[shape] [ literal[int] ]* identifier[SP] . identifier[rand] ())) identifier[posT1] = identifier[pos] [ identifier[idx] ] identifier[posT2] = identifier[pos] [ identifier[idx] ]+ identifier[size] keyword[if] identifier[posT2] <= identifier[pos] . identifier[max] (): identifier[cis] = identifier[chrom] == identifier[chrom_i] identifier[cis] *=( identifier[pos] > identifier[posT1] )*( identifier[pos] < identifier[posT2] ) keyword[if] identifier[cis] . identifier[sum] ()> identifier[min_nSNPs] : keyword[break] identifier[region] = identifier[SP] . identifier[array] ([ identifier[chrom_i] , identifier[posT1] , identifier[posT2] ]) identifier[start] = identifier[SP] . identifier[nonzero] ( identifier[cis] )[ literal[int] ]. identifier[min] () identifier[nSNPs] = identifier[cis] . identifier[sum] () keyword[if] identifier[self] . identifier[X] keyword[is] keyword[None] : identifier[rv] = identifier[plink_reader] . identifier[readBED] ( identifier[self] . identifier[bfile] , identifier[useMAFencoding] = keyword[True] , identifier[start] = identifier[start] , identifier[nSNPs] = identifier[nSNPs] , identifier[bim] = identifier[bim] ) identifier[Xr] = identifier[rv] [ literal[string] ] keyword[else] : identifier[Xr] = identifier[self] . identifier[X] [:, identifier[start] : identifier[start] + identifier[nSnps] ] keyword[return] identifier[Xr] , identifier[region]
def getRegion(self, size=30000.0, min_nSNPs=1, chrom_i=None, pos_min=None, pos_max=None): """ Sample a region from the piece of genotype X, chrom, pos minSNPnum: minimum number of SNPs contained in the region Ichrom: restrict X to chromosome Ichrom before taking the region cis: bool vector that marks the sorted region region: vector that contains chrom and init and final position of the region """ if self.chrom is None or self.pos is None: bim = plink_reader.readBIM(self.bfile, usecols=(0, 1, 2, 3)) chrom = SP.array(bim[:, 0], dtype=int) pos = SP.array(bim[:, 3], dtype=int) # depends on [control=['if'], data=[]] else: chrom = self.chrom pos = self.pos if chrom_i is None: n_chroms = chrom.max() chrom_i = int(SP.ceil(SP.rand() * n_chroms)) # depends on [control=['if'], data=['chrom_i']] pos = pos[chrom == chrom_i] chrom = chrom[chrom == chrom_i] ipos = SP.ones(len(pos), dtype=bool) if pos_min is not None: ipos = SP.logical_and(ipos, pos_min < pos) # depends on [control=['if'], data=['pos_min']] if pos_max is not None: ipos = SP.logical_and(ipos, pos < pos_max) # depends on [control=['if'], data=['pos_max']] pos = pos[ipos] chrom = chrom[ipos] if size == 1: # select single SNP idx = int(SP.ceil(pos.shape[0] * SP.rand())) cis = SP.arange(pos.shape[0]) == idx region = SP.array([chrom_i, pos[idx], pos[idx]]) # depends on [control=['if'], data=[]] else: while 1: idx = int(SP.floor(pos.shape[0] * SP.rand())) posT1 = pos[idx] posT2 = pos[idx] + size if posT2 <= pos.max(): cis = chrom == chrom_i cis *= (pos > posT1) * (pos < posT2) if cis.sum() > min_nSNPs: break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['posT2']] # depends on [control=['while'], data=[]] region = SP.array([chrom_i, posT1, posT2]) start = SP.nonzero(cis)[0].min() nSNPs = cis.sum() if self.X is None: rv = plink_reader.readBED(self.bfile, useMAFencoding=True, start=start, nSNPs=nSNPs, bim=bim) Xr = rv['snps'] # depends on [control=['if'], data=[]] else: Xr = self.X[:, start:start + nSnps] return (Xr, region)
def box_show(text, width=100, height=3, corner="+", horizontal="-", vertical="|"): """Print a formatted ascii text box. """ print(StrTemplate.box(text=text, width=width, height=height, corner=corner, horizontal=horizontal, vertical=vertical))
def function[box_show, parameter[text, width, height, corner, horizontal, vertical]]: constant[Print a formatted ascii text box. ] call[name[print], parameter[call[name[StrTemplate].box, parameter[]]]]
keyword[def] identifier[box_show] ( identifier[text] , identifier[width] = literal[int] , identifier[height] = literal[int] , identifier[corner] = literal[string] , identifier[horizontal] = literal[string] , identifier[vertical] = literal[string] ): literal[string] identifier[print] ( identifier[StrTemplate] . identifier[box] ( identifier[text] = identifier[text] , identifier[width] = identifier[width] , identifier[height] = identifier[height] , identifier[corner] = identifier[corner] , identifier[horizontal] = identifier[horizontal] , identifier[vertical] = identifier[vertical] ))
def box_show(text, width=100, height=3, corner='+', horizontal='-', vertical='|'): """Print a formatted ascii text box. """ print(StrTemplate.box(text=text, width=width, height=height, corner=corner, horizontal=horizontal, vertical=vertical))
def query(self, query_type, query=None): """Run the given query on the connection (POST request to /query)""" return self.handleresult(self.r.post(urljoin(self.url + "query/", query_type), data=json.dumps(query))).json()
def function[query, parameter[self, query_type, query]]: constant[Run the given query on the connection (POST request to /query)] return[call[call[name[self].handleresult, parameter[call[name[self].r.post, parameter[call[name[urljoin], parameter[binary_operation[name[self].url + constant[query/]], name[query_type]]]]]]].json, parameter[]]]
keyword[def] identifier[query] ( identifier[self] , identifier[query_type] , identifier[query] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[handleresult] ( identifier[self] . identifier[r] . identifier[post] ( identifier[urljoin] ( identifier[self] . identifier[url] + literal[string] , identifier[query_type] ), identifier[data] = identifier[json] . identifier[dumps] ( identifier[query] ))). identifier[json] ()
def query(self, query_type, query=None): """Run the given query on the connection (POST request to /query)""" return self.handleresult(self.r.post(urljoin(self.url + 'query/', query_type), data=json.dumps(query))).json()
def _go_install(self, target, gopath, build_flags): """Create and execute a `go install` command.""" args = build_flags + [target.import_path] result, go_cmd = self.go_dist.execute_go_cmd( 'install', gopath=gopath, args=args, workunit_factory=self.context.new_workunit, workunit_name='install {}'.format(target.import_path), workunit_labels=[WorkUnitLabel.COMPILER]) if result != 0: raise TaskError('{} failed with exit code {}'.format(go_cmd, result))
def function[_go_install, parameter[self, target, gopath, build_flags]]: constant[Create and execute a `go install` command.] variable[args] assign[=] binary_operation[name[build_flags] + list[[<ast.Attribute object at 0x7da1b1d35330>]]] <ast.Tuple object at 0x7da1b1d34430> assign[=] call[name[self].go_dist.execute_go_cmd, parameter[constant[install]]] if compare[name[result] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da1b22a62c0>
keyword[def] identifier[_go_install] ( identifier[self] , identifier[target] , identifier[gopath] , identifier[build_flags] ): literal[string] identifier[args] = identifier[build_flags] +[ identifier[target] . identifier[import_path] ] identifier[result] , identifier[go_cmd] = identifier[self] . identifier[go_dist] . identifier[execute_go_cmd] ( literal[string] , identifier[gopath] = identifier[gopath] , identifier[args] = identifier[args] , identifier[workunit_factory] = identifier[self] . identifier[context] . identifier[new_workunit] , identifier[workunit_name] = literal[string] . identifier[format] ( identifier[target] . identifier[import_path] ), identifier[workunit_labels] =[ identifier[WorkUnitLabel] . identifier[COMPILER] ]) keyword[if] identifier[result] != literal[int] : keyword[raise] identifier[TaskError] ( literal[string] . identifier[format] ( identifier[go_cmd] , identifier[result] ))
def _go_install(self, target, gopath, build_flags): """Create and execute a `go install` command.""" args = build_flags + [target.import_path] (result, go_cmd) = self.go_dist.execute_go_cmd('install', gopath=gopath, args=args, workunit_factory=self.context.new_workunit, workunit_name='install {}'.format(target.import_path), workunit_labels=[WorkUnitLabel.COMPILER]) if result != 0: raise TaskError('{} failed with exit code {}'.format(go_cmd, result)) # depends on [control=['if'], data=['result']]
def _apply(self, ctx: ExtensionContext) -> AugmentedDict: """ Replaces any {{env::*}} directives with it's actual environment variable value or a default. Args: ctx: The processing context. Returns: Returns the altered node key and value. """ node_key, node_value = ctx.node def process(pattern: Pattern[str], _str: str) -> str: _match = pattern.match(_str) if _match is None: return _str # We got a match # Group 0: Whole match; Group 1: Our placeholder; Group 2: The environment variable placeholder, envvar = _match.group(1), _match.group(2) envvalue = os.environ.get(envvar, None) if envvalue is None and self.fail_on_unset: raise ExtensionError("Environment variable '{}' is unset.".format(envvar)) return _str.replace(placeholder, envvalue or self.default) _pattern = re.compile(self.__pattern__) node_key = process(_pattern, node_key) node_value = process(_pattern, node_value) return {node_key: node_value}
def function[_apply, parameter[self, ctx]]: constant[ Replaces any {{env::*}} directives with it's actual environment variable value or a default. Args: ctx: The processing context. Returns: Returns the altered node key and value. ] <ast.Tuple object at 0x7da207f99330> assign[=] name[ctx].node def function[process, parameter[pattern, _str]]: variable[_match] assign[=] call[name[pattern].match, parameter[name[_str]]] if compare[name[_match] is constant[None]] begin[:] return[name[_str]] <ast.Tuple object at 0x7da207f9b640> assign[=] tuple[[<ast.Call object at 0x7da207f981f0>, <ast.Call object at 0x7da207f999c0>]] variable[envvalue] assign[=] call[name[os].environ.get, parameter[name[envvar], constant[None]]] if <ast.BoolOp object at 0x7da207f9a2c0> begin[:] <ast.Raise object at 0x7da207f9b3a0> return[call[name[_str].replace, parameter[name[placeholder], <ast.BoolOp object at 0x7da207f9a560>]]] variable[_pattern] assign[=] call[name[re].compile, parameter[name[self].__pattern__]] variable[node_key] assign[=] call[name[process], parameter[name[_pattern], name[node_key]]] variable[node_value] assign[=] call[name[process], parameter[name[_pattern], name[node_value]]] return[dictionary[[<ast.Name object at 0x7da207f9ae60>], [<ast.Name object at 0x7da207f9ae90>]]]
keyword[def] identifier[_apply] ( identifier[self] , identifier[ctx] : identifier[ExtensionContext] )-> identifier[AugmentedDict] : literal[string] identifier[node_key] , identifier[node_value] = identifier[ctx] . identifier[node] keyword[def] identifier[process] ( identifier[pattern] : identifier[Pattern] [ identifier[str] ], identifier[_str] : identifier[str] )-> identifier[str] : identifier[_match] = identifier[pattern] . identifier[match] ( identifier[_str] ) keyword[if] identifier[_match] keyword[is] keyword[None] : keyword[return] identifier[_str] identifier[placeholder] , identifier[envvar] = identifier[_match] . identifier[group] ( literal[int] ), identifier[_match] . identifier[group] ( literal[int] ) identifier[envvalue] = identifier[os] . identifier[environ] . identifier[get] ( identifier[envvar] , keyword[None] ) keyword[if] identifier[envvalue] keyword[is] keyword[None] keyword[and] identifier[self] . identifier[fail_on_unset] : keyword[raise] identifier[ExtensionError] ( literal[string] . identifier[format] ( identifier[envvar] )) keyword[return] identifier[_str] . identifier[replace] ( identifier[placeholder] , identifier[envvalue] keyword[or] identifier[self] . identifier[default] ) identifier[_pattern] = identifier[re] . identifier[compile] ( identifier[self] . identifier[__pattern__] ) identifier[node_key] = identifier[process] ( identifier[_pattern] , identifier[node_key] ) identifier[node_value] = identifier[process] ( identifier[_pattern] , identifier[node_value] ) keyword[return] { identifier[node_key] : identifier[node_value] }
def _apply(self, ctx: ExtensionContext) -> AugmentedDict: """ Replaces any {{env::*}} directives with it's actual environment variable value or a default. Args: ctx: The processing context. Returns: Returns the altered node key and value. """ (node_key, node_value) = ctx.node def process(pattern: Pattern[str], _str: str) -> str: _match = pattern.match(_str) if _match is None: return _str # depends on [control=['if'], data=[]] # We got a match # Group 0: Whole match; Group 1: Our placeholder; Group 2: The environment variable (placeholder, envvar) = (_match.group(1), _match.group(2)) envvalue = os.environ.get(envvar, None) if envvalue is None and self.fail_on_unset: raise ExtensionError("Environment variable '{}' is unset.".format(envvar)) # depends on [control=['if'], data=[]] return _str.replace(placeholder, envvalue or self.default) _pattern = re.compile(self.__pattern__) node_key = process(_pattern, node_key) node_value = process(_pattern, node_value) return {node_key: node_value}
def derivativeX(self,x,y): ''' Evaluates the partial derivative of interpolated function with respect to x (the first argument) at the given input. Parameters ---------- x : np.array or float Real values to be evaluated in the interpolated function. y : np.array or float Real values to be evaluated in the interpolated function; must be the same size as x. Returns ------- dfdx : np.array or float The derivative of the interpolated function with respect to x, eval- uated at x,y: dfdx = f_x(x,y), with the same shape as x and y. ''' xa = np.asarray(x) ya = np.asarray(y) return (self._derX(xa.flatten(),ya.flatten())).reshape(xa.shape)
def function[derivativeX, parameter[self, x, y]]: constant[ Evaluates the partial derivative of interpolated function with respect to x (the first argument) at the given input. Parameters ---------- x : np.array or float Real values to be evaluated in the interpolated function. y : np.array or float Real values to be evaluated in the interpolated function; must be the same size as x. Returns ------- dfdx : np.array or float The derivative of the interpolated function with respect to x, eval- uated at x,y: dfdx = f_x(x,y), with the same shape as x and y. ] variable[xa] assign[=] call[name[np].asarray, parameter[name[x]]] variable[ya] assign[=] call[name[np].asarray, parameter[name[y]]] return[call[call[name[self]._derX, parameter[call[name[xa].flatten, parameter[]], call[name[ya].flatten, parameter[]]]].reshape, parameter[name[xa].shape]]]
keyword[def] identifier[derivativeX] ( identifier[self] , identifier[x] , identifier[y] ): literal[string] identifier[xa] = identifier[np] . identifier[asarray] ( identifier[x] ) identifier[ya] = identifier[np] . identifier[asarray] ( identifier[y] ) keyword[return] ( identifier[self] . identifier[_derX] ( identifier[xa] . identifier[flatten] (), identifier[ya] . identifier[flatten] ())). identifier[reshape] ( identifier[xa] . identifier[shape] )
def derivativeX(self, x, y): """ Evaluates the partial derivative of interpolated function with respect to x (the first argument) at the given input. Parameters ---------- x : np.array or float Real values to be evaluated in the interpolated function. y : np.array or float Real values to be evaluated in the interpolated function; must be the same size as x. Returns ------- dfdx : np.array or float The derivative of the interpolated function with respect to x, eval- uated at x,y: dfdx = f_x(x,y), with the same shape as x and y. """ xa = np.asarray(x) ya = np.asarray(y) return self._derX(xa.flatten(), ya.flatten()).reshape(xa.shape)
def firmware_download_input_reboot_options_auto_activate_auto_activate(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") firmware_download = ET.Element("firmware_download") config = firmware_download input = ET.SubElement(firmware_download, "input") reboot_options = ET.SubElement(input, "reboot-options") auto_activate = ET.SubElement(reboot_options, "auto-activate") auto_activate = ET.SubElement(auto_activate, "auto-activate") callback = kwargs.pop('callback', self._callback) return callback(config)
def function[firmware_download_input_reboot_options_auto_activate_auto_activate, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[firmware_download] assign[=] call[name[ET].Element, parameter[constant[firmware_download]]] variable[config] assign[=] name[firmware_download] variable[input] assign[=] call[name[ET].SubElement, parameter[name[firmware_download], constant[input]]] variable[reboot_options] assign[=] call[name[ET].SubElement, parameter[name[input], constant[reboot-options]]] variable[auto_activate] assign[=] call[name[ET].SubElement, parameter[name[reboot_options], constant[auto-activate]]] variable[auto_activate] assign[=] call[name[ET].SubElement, parameter[name[auto_activate], constant[auto-activate]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[firmware_download_input_reboot_options_auto_activate_auto_activate] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[firmware_download] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[config] = identifier[firmware_download] identifier[input] = identifier[ET] . identifier[SubElement] ( identifier[firmware_download] , literal[string] ) identifier[reboot_options] = identifier[ET] . identifier[SubElement] ( identifier[input] , literal[string] ) identifier[auto_activate] = identifier[ET] . identifier[SubElement] ( identifier[reboot_options] , literal[string] ) identifier[auto_activate] = identifier[ET] . identifier[SubElement] ( identifier[auto_activate] , literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def firmware_download_input_reboot_options_auto_activate_auto_activate(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') firmware_download = ET.Element('firmware_download') config = firmware_download input = ET.SubElement(firmware_download, 'input') reboot_options = ET.SubElement(input, 'reboot-options') auto_activate = ET.SubElement(reboot_options, 'auto-activate') auto_activate = ET.SubElement(auto_activate, 'auto-activate') callback = kwargs.pop('callback', self._callback) return callback(config)
def write_api_docs(self, outdir): """Generate API reST files. Parameters ---------- outdir : string Directory name in which to store files We create automatic filenames for each module Returns ------- None Notes ----- Sets self.written_modules to list of written modules """ if not os.path.exists(outdir): os.mkdir(outdir) # compose list of modules modules = self.discover_modules() self.write_modules_api(modules,outdir)
def function[write_api_docs, parameter[self, outdir]]: constant[Generate API reST files. Parameters ---------- outdir : string Directory name in which to store files We create automatic filenames for each module Returns ------- None Notes ----- Sets self.written_modules to list of written modules ] if <ast.UnaryOp object at 0x7da20c6e6020> begin[:] call[name[os].mkdir, parameter[name[outdir]]] variable[modules] assign[=] call[name[self].discover_modules, parameter[]] call[name[self].write_modules_api, parameter[name[modules], name[outdir]]]
keyword[def] identifier[write_api_docs] ( identifier[self] , identifier[outdir] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[outdir] ): identifier[os] . identifier[mkdir] ( identifier[outdir] ) identifier[modules] = identifier[self] . identifier[discover_modules] () identifier[self] . identifier[write_modules_api] ( identifier[modules] , identifier[outdir] )
def write_api_docs(self, outdir): """Generate API reST files. Parameters ---------- outdir : string Directory name in which to store files We create automatic filenames for each module Returns ------- None Notes ----- Sets self.written_modules to list of written modules """ if not os.path.exists(outdir): os.mkdir(outdir) # depends on [control=['if'], data=[]] # compose list of modules modules = self.discover_modules() self.write_modules_api(modules, outdir)
def update_footer(self): """Update footer when the field list change.""" field_item = self.field_list.currentItem() if not field_item: self.footer_label.setText('') return field_name = field_item.data(Qt.UserRole) field = self.layer.fields().field(field_name) index = self.layer.fields().lookupField(field_name) unique_values = list(self.layer.uniqueValues(index)) pretty_unique_values = ', '.join([str(v) for v in unique_values[:10]]) footer_text = tr('Field type: {0}\n').format(field.typeName()) footer_text += tr('Unique values: {0}').format(pretty_unique_values) self.footer_label.setText(footer_text)
def function[update_footer, parameter[self]]: constant[Update footer when the field list change.] variable[field_item] assign[=] call[name[self].field_list.currentItem, parameter[]] if <ast.UnaryOp object at 0x7da1b0efa380> begin[:] call[name[self].footer_label.setText, parameter[constant[]]] return[None] variable[field_name] assign[=] call[name[field_item].data, parameter[name[Qt].UserRole]] variable[field] assign[=] call[call[name[self].layer.fields, parameter[]].field, parameter[name[field_name]]] variable[index] assign[=] call[call[name[self].layer.fields, parameter[]].lookupField, parameter[name[field_name]]] variable[unique_values] assign[=] call[name[list], parameter[call[name[self].layer.uniqueValues, parameter[name[index]]]]] variable[pretty_unique_values] assign[=] call[constant[, ].join, parameter[<ast.ListComp object at 0x7da1b0ef8c10>]] variable[footer_text] assign[=] call[call[name[tr], parameter[constant[Field type: {0} ]]].format, parameter[call[name[field].typeName, parameter[]]]] <ast.AugAssign object at 0x7da1b0ef8c40> call[name[self].footer_label.setText, parameter[name[footer_text]]]
keyword[def] identifier[update_footer] ( identifier[self] ): literal[string] identifier[field_item] = identifier[self] . identifier[field_list] . identifier[currentItem] () keyword[if] keyword[not] identifier[field_item] : identifier[self] . identifier[footer_label] . identifier[setText] ( literal[string] ) keyword[return] identifier[field_name] = identifier[field_item] . identifier[data] ( identifier[Qt] . identifier[UserRole] ) identifier[field] = identifier[self] . identifier[layer] . identifier[fields] (). identifier[field] ( identifier[field_name] ) identifier[index] = identifier[self] . identifier[layer] . identifier[fields] (). identifier[lookupField] ( identifier[field_name] ) identifier[unique_values] = identifier[list] ( identifier[self] . identifier[layer] . identifier[uniqueValues] ( identifier[index] )) identifier[pretty_unique_values] = literal[string] . identifier[join] ([ identifier[str] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[unique_values] [: literal[int] ]]) identifier[footer_text] = identifier[tr] ( literal[string] ). identifier[format] ( identifier[field] . identifier[typeName] ()) identifier[footer_text] += identifier[tr] ( literal[string] ). identifier[format] ( identifier[pretty_unique_values] ) identifier[self] . identifier[footer_label] . identifier[setText] ( identifier[footer_text] )
def update_footer(self): """Update footer when the field list change.""" field_item = self.field_list.currentItem() if not field_item: self.footer_label.setText('') return # depends on [control=['if'], data=[]] field_name = field_item.data(Qt.UserRole) field = self.layer.fields().field(field_name) index = self.layer.fields().lookupField(field_name) unique_values = list(self.layer.uniqueValues(index)) pretty_unique_values = ', '.join([str(v) for v in unique_values[:10]]) footer_text = tr('Field type: {0}\n').format(field.typeName()) footer_text += tr('Unique values: {0}').format(pretty_unique_values) self.footer_label.setText(footer_text)
def require_sender(f): """A decorator that protect emit view function is triggered by a trusted sender. Currently, Rio only support Basic Authorization. """ @wraps(f) def decorator(*args, **kwargs): if not request.authorization: return jsonify({'message': 'unauthorized'}), 401 username = request.authorization.username password = request.authorization.password if not validate_sender(username, password): return jsonify({'message': 'forbidden'}), 403 return f(*args, **kwargs) return decorator
def function[require_sender, parameter[f]]: constant[A decorator that protect emit view function is triggered by a trusted sender. Currently, Rio only support Basic Authorization. ] def function[decorator, parameter[]]: if <ast.UnaryOp object at 0x7da18bcc8220> begin[:] return[tuple[[<ast.Call object at 0x7da1b0a62aa0>, <ast.Constant object at 0x7da1b0a63730>]]] variable[username] assign[=] name[request].authorization.username variable[password] assign[=] name[request].authorization.password if <ast.UnaryOp object at 0x7da1b0a61210> begin[:] return[tuple[[<ast.Call object at 0x7da1b0a60c70>, <ast.Constant object at 0x7da1b0a61690>]]] return[call[name[f], parameter[<ast.Starred object at 0x7da1b0a63cd0>]]] return[name[decorator]]
keyword[def] identifier[require_sender] ( identifier[f] ): literal[string] @ identifier[wraps] ( identifier[f] ) keyword[def] identifier[decorator] (* identifier[args] ,** identifier[kwargs] ): keyword[if] keyword[not] identifier[request] . identifier[authorization] : keyword[return] identifier[jsonify] ({ literal[string] : literal[string] }), literal[int] identifier[username] = identifier[request] . identifier[authorization] . identifier[username] identifier[password] = identifier[request] . identifier[authorization] . identifier[password] keyword[if] keyword[not] identifier[validate_sender] ( identifier[username] , identifier[password] ): keyword[return] identifier[jsonify] ({ literal[string] : literal[string] }), literal[int] keyword[return] identifier[f] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[decorator]
def require_sender(f): """A decorator that protect emit view function is triggered by a trusted sender. Currently, Rio only support Basic Authorization. """ @wraps(f) def decorator(*args, **kwargs): if not request.authorization: return (jsonify({'message': 'unauthorized'}), 401) # depends on [control=['if'], data=[]] username = request.authorization.username password = request.authorization.password if not validate_sender(username, password): return (jsonify({'message': 'forbidden'}), 403) # depends on [control=['if'], data=[]] return f(*args, **kwargs) return decorator
def remove_attribute(self, attr): """Remove attribute from a workspace. Args: attr (str): attribute name """ update = [fapi._attr_rem(attr)] r = fapi.update_workspace_attributes(self.namespace, self.name, update, self.api_url) self.data["workspace"]["attributes"].pop(attr, None) fapi._check_response_code(r, 200)
def function[remove_attribute, parameter[self, attr]]: constant[Remove attribute from a workspace. Args: attr (str): attribute name ] variable[update] assign[=] list[[<ast.Call object at 0x7da1b1c602e0>]] variable[r] assign[=] call[name[fapi].update_workspace_attributes, parameter[name[self].namespace, name[self].name, name[update], name[self].api_url]] call[call[call[name[self].data][constant[workspace]]][constant[attributes]].pop, parameter[name[attr], constant[None]]] call[name[fapi]._check_response_code, parameter[name[r], constant[200]]]
keyword[def] identifier[remove_attribute] ( identifier[self] , identifier[attr] ): literal[string] identifier[update] =[ identifier[fapi] . identifier[_attr_rem] ( identifier[attr] )] identifier[r] = identifier[fapi] . identifier[update_workspace_attributes] ( identifier[self] . identifier[namespace] , identifier[self] . identifier[name] , identifier[update] , identifier[self] . identifier[api_url] ) identifier[self] . identifier[data] [ literal[string] ][ literal[string] ]. identifier[pop] ( identifier[attr] , keyword[None] ) identifier[fapi] . identifier[_check_response_code] ( identifier[r] , literal[int] )
def remove_attribute(self, attr): """Remove attribute from a workspace. Args: attr (str): attribute name """ update = [fapi._attr_rem(attr)] r = fapi.update_workspace_attributes(self.namespace, self.name, update, self.api_url) self.data['workspace']['attributes'].pop(attr, None) fapi._check_response_code(r, 200)
def create_swagger_json_handler(app, **kwargs): """ Create a handler that returns the swagger definition for an application. This method assumes the application is using the TransmuteUrlDispatcher as the router. """ spec = getattr(app, SWAGGER_ATTR_NAME, SwaggerSpec()) _add_blueprint_specs(app, spec) spec_dict = spec.swagger_definition(**kwargs) encoded_spec = json.dumps(spec_dict).encode("UTF-8") def swagger(): return Response( encoded_spec, # we allow CORS, so this can be requested at swagger.io headers={ "Access-Control-Allow-Origin": "*" }, content_type="application/json", ) return swagger
def function[create_swagger_json_handler, parameter[app]]: constant[ Create a handler that returns the swagger definition for an application. This method assumes the application is using the TransmuteUrlDispatcher as the router. ] variable[spec] assign[=] call[name[getattr], parameter[name[app], name[SWAGGER_ATTR_NAME], call[name[SwaggerSpec], parameter[]]]] call[name[_add_blueprint_specs], parameter[name[app], name[spec]]] variable[spec_dict] assign[=] call[name[spec].swagger_definition, parameter[]] variable[encoded_spec] assign[=] call[call[name[json].dumps, parameter[name[spec_dict]]].encode, parameter[constant[UTF-8]]] def function[swagger, parameter[]]: return[call[name[Response], parameter[name[encoded_spec]]]] return[name[swagger]]
keyword[def] identifier[create_swagger_json_handler] ( identifier[app] ,** identifier[kwargs] ): literal[string] identifier[spec] = identifier[getattr] ( identifier[app] , identifier[SWAGGER_ATTR_NAME] , identifier[SwaggerSpec] ()) identifier[_add_blueprint_specs] ( identifier[app] , identifier[spec] ) identifier[spec_dict] = identifier[spec] . identifier[swagger_definition] (** identifier[kwargs] ) identifier[encoded_spec] = identifier[json] . identifier[dumps] ( identifier[spec_dict] ). identifier[encode] ( literal[string] ) keyword[def] identifier[swagger] (): keyword[return] identifier[Response] ( identifier[encoded_spec] , identifier[headers] ={ literal[string] : literal[string] }, identifier[content_type] = literal[string] , ) keyword[return] identifier[swagger]
def create_swagger_json_handler(app, **kwargs): """ Create a handler that returns the swagger definition for an application. This method assumes the application is using the TransmuteUrlDispatcher as the router. """ spec = getattr(app, SWAGGER_ATTR_NAME, SwaggerSpec()) _add_blueprint_specs(app, spec) spec_dict = spec.swagger_definition(**kwargs) encoded_spec = json.dumps(spec_dict).encode('UTF-8') def swagger(): # we allow CORS, so this can be requested at swagger.io return Response(encoded_spec, headers={'Access-Control-Allow-Origin': '*'}, content_type='application/json') return swagger
def adjust_locations(ast_node, first_lineno, first_offset): """ Adjust the locations of the ast nodes, offsetting them to the new lineno and column offset """ line_delta = first_lineno - 1 def _fix(node): if 'lineno' in node._attributes: lineno = node.lineno col = node.col_offset # adjust the offset on the first line if lineno == 1: col += first_offset lineno += line_delta node.lineno = lineno node.col_offset = col for child in iter_child_nodes(node): _fix(child) _fix(ast_node)
def function[adjust_locations, parameter[ast_node, first_lineno, first_offset]]: constant[ Adjust the locations of the ast nodes, offsetting them to the new lineno and column offset ] variable[line_delta] assign[=] binary_operation[name[first_lineno] - constant[1]] def function[_fix, parameter[node]]: if compare[constant[lineno] in name[node]._attributes] begin[:] variable[lineno] assign[=] name[node].lineno variable[col] assign[=] name[node].col_offset if compare[name[lineno] equal[==] constant[1]] begin[:] <ast.AugAssign object at 0x7da20c794460> <ast.AugAssign object at 0x7da20c795660> name[node].lineno assign[=] name[lineno] name[node].col_offset assign[=] name[col] for taget[name[child]] in starred[call[name[iter_child_nodes], parameter[name[node]]]] begin[:] call[name[_fix], parameter[name[child]]] call[name[_fix], parameter[name[ast_node]]]
keyword[def] identifier[adjust_locations] ( identifier[ast_node] , identifier[first_lineno] , identifier[first_offset] ): literal[string] identifier[line_delta] = identifier[first_lineno] - literal[int] keyword[def] identifier[_fix] ( identifier[node] ): keyword[if] literal[string] keyword[in] identifier[node] . identifier[_attributes] : identifier[lineno] = identifier[node] . identifier[lineno] identifier[col] = identifier[node] . identifier[col_offset] keyword[if] identifier[lineno] == literal[int] : identifier[col] += identifier[first_offset] identifier[lineno] += identifier[line_delta] identifier[node] . identifier[lineno] = identifier[lineno] identifier[node] . identifier[col_offset] = identifier[col] keyword[for] identifier[child] keyword[in] identifier[iter_child_nodes] ( identifier[node] ): identifier[_fix] ( identifier[child] ) identifier[_fix] ( identifier[ast_node] )
def adjust_locations(ast_node, first_lineno, first_offset): """ Adjust the locations of the ast nodes, offsetting them to the new lineno and column offset """ line_delta = first_lineno - 1 def _fix(node): if 'lineno' in node._attributes: lineno = node.lineno col = node.col_offset # adjust the offset on the first line if lineno == 1: col += first_offset # depends on [control=['if'], data=[]] lineno += line_delta node.lineno = lineno node.col_offset = col # depends on [control=['if'], data=[]] for child in iter_child_nodes(node): _fix(child) # depends on [control=['for'], data=['child']] _fix(ast_node)
def cancel_firewall(self, firewall_id, dedicated=False): """Cancels the specified firewall. :param int firewall_id: Firewall ID to be cancelled. :param bool dedicated: If true, the firewall instance is dedicated, otherwise, the firewall instance is shared. """ fwl_billing = self._get_fwl_billing_item(firewall_id, dedicated) billing_item_service = self.client['Billing_Item'] return billing_item_service.cancelService(id=fwl_billing['id'])
def function[cancel_firewall, parameter[self, firewall_id, dedicated]]: constant[Cancels the specified firewall. :param int firewall_id: Firewall ID to be cancelled. :param bool dedicated: If true, the firewall instance is dedicated, otherwise, the firewall instance is shared. ] variable[fwl_billing] assign[=] call[name[self]._get_fwl_billing_item, parameter[name[firewall_id], name[dedicated]]] variable[billing_item_service] assign[=] call[name[self].client][constant[Billing_Item]] return[call[name[billing_item_service].cancelService, parameter[]]]
keyword[def] identifier[cancel_firewall] ( identifier[self] , identifier[firewall_id] , identifier[dedicated] = keyword[False] ): literal[string] identifier[fwl_billing] = identifier[self] . identifier[_get_fwl_billing_item] ( identifier[firewall_id] , identifier[dedicated] ) identifier[billing_item_service] = identifier[self] . identifier[client] [ literal[string] ] keyword[return] identifier[billing_item_service] . identifier[cancelService] ( identifier[id] = identifier[fwl_billing] [ literal[string] ])
def cancel_firewall(self, firewall_id, dedicated=False): """Cancels the specified firewall. :param int firewall_id: Firewall ID to be cancelled. :param bool dedicated: If true, the firewall instance is dedicated, otherwise, the firewall instance is shared. """ fwl_billing = self._get_fwl_billing_item(firewall_id, dedicated) billing_item_service = self.client['Billing_Item'] return billing_item_service.cancelService(id=fwl_billing['id'])
def get_cached_commit_times(root_folder, parent_dir, sorted_relpaths): """ Get the cached commit times for the combination of this parent_dir and relpaths Return the commit assigned to this combination and the actual times! """ result = get_all_cached_commit_times(root_folder) for item in result: if sorted(item.get("sorted_relpaths", [])) == sorted_relpaths and item.get("parent_dir") == parent_dir: return item.get("commit"), item.get("commit_times") return None, {}
def function[get_cached_commit_times, parameter[root_folder, parent_dir, sorted_relpaths]]: constant[ Get the cached commit times for the combination of this parent_dir and relpaths Return the commit assigned to this combination and the actual times! ] variable[result] assign[=] call[name[get_all_cached_commit_times], parameter[name[root_folder]]] for taget[name[item]] in starred[name[result]] begin[:] if <ast.BoolOp object at 0x7da20c7c91b0> begin[:] return[tuple[[<ast.Call object at 0x7da20cabee00>, <ast.Call object at 0x7da20cabec50>]]] return[tuple[[<ast.Constant object at 0x7da18c4cebf0>, <ast.Dict object at 0x7da18c4cfc40>]]]
keyword[def] identifier[get_cached_commit_times] ( identifier[root_folder] , identifier[parent_dir] , identifier[sorted_relpaths] ): literal[string] identifier[result] = identifier[get_all_cached_commit_times] ( identifier[root_folder] ) keyword[for] identifier[item] keyword[in] identifier[result] : keyword[if] identifier[sorted] ( identifier[item] . identifier[get] ( literal[string] ,[]))== identifier[sorted_relpaths] keyword[and] identifier[item] . identifier[get] ( literal[string] )== identifier[parent_dir] : keyword[return] identifier[item] . identifier[get] ( literal[string] ), identifier[item] . identifier[get] ( literal[string] ) keyword[return] keyword[None] ,{}
def get_cached_commit_times(root_folder, parent_dir, sorted_relpaths): """ Get the cached commit times for the combination of this parent_dir and relpaths Return the commit assigned to this combination and the actual times! """ result = get_all_cached_commit_times(root_folder) for item in result: if sorted(item.get('sorted_relpaths', [])) == sorted_relpaths and item.get('parent_dir') == parent_dir: return (item.get('commit'), item.get('commit_times')) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] return (None, {})
def parse_method_results(self, results_file, met): """Parse the output of a AMYLPRED2 result file.""" result = str(open(results_file).read()) ind_s = str.find(result, 'HITS') ind_e = str.find(result, '**NOTE') tmp = result[ind_s + 10:ind_e].strip(" ") hits_resid = [] method = None if ":" in tmp: method = tmp.split(":")[0] hits = tmp.split(":")[1] if "-" in hits: for ele in hits.split(","): ele = ele.replace('\\r\\n\\r\\n', '') res_s = ele.split("-")[0] res_e = ele.split("-")[1] for i in range(int(res_s), int(res_e) + 1): hits_resid.append(i) if method: return method, hits_resid else: return met, hits_resid
def function[parse_method_results, parameter[self, results_file, met]]: constant[Parse the output of a AMYLPRED2 result file.] variable[result] assign[=] call[name[str], parameter[call[call[name[open], parameter[name[results_file]]].read, parameter[]]]] variable[ind_s] assign[=] call[name[str].find, parameter[name[result], constant[HITS]]] variable[ind_e] assign[=] call[name[str].find, parameter[name[result], constant[**NOTE]]] variable[tmp] assign[=] call[call[name[result]][<ast.Slice object at 0x7da1b0c41600>].strip, parameter[constant[ ]]] variable[hits_resid] assign[=] list[[]] variable[method] assign[=] constant[None] if compare[constant[:] in name[tmp]] begin[:] variable[method] assign[=] call[call[name[tmp].split, parameter[constant[:]]]][constant[0]] variable[hits] assign[=] call[call[name[tmp].split, parameter[constant[:]]]][constant[1]] if compare[constant[-] in name[hits]] begin[:] for taget[name[ele]] in starred[call[name[hits].split, parameter[constant[,]]]] begin[:] variable[ele] assign[=] call[name[ele].replace, parameter[constant[\r\n\r\n], constant[]]] variable[res_s] assign[=] call[call[name[ele].split, parameter[constant[-]]]][constant[0]] variable[res_e] assign[=] call[call[name[ele].split, parameter[constant[-]]]][constant[1]] for taget[name[i]] in starred[call[name[range], parameter[call[name[int], parameter[name[res_s]]], binary_operation[call[name[int], parameter[name[res_e]]] + constant[1]]]]] begin[:] call[name[hits_resid].append, parameter[name[i]]] if name[method] begin[:] return[tuple[[<ast.Name object at 0x7da1b0cb5840>, <ast.Name object at 0x7da1b0cb6a40>]]]
keyword[def] identifier[parse_method_results] ( identifier[self] , identifier[results_file] , identifier[met] ): literal[string] identifier[result] = identifier[str] ( identifier[open] ( identifier[results_file] ). identifier[read] ()) identifier[ind_s] = identifier[str] . identifier[find] ( identifier[result] , literal[string] ) identifier[ind_e] = identifier[str] . identifier[find] ( identifier[result] , literal[string] ) identifier[tmp] = identifier[result] [ identifier[ind_s] + literal[int] : identifier[ind_e] ]. identifier[strip] ( literal[string] ) identifier[hits_resid] =[] identifier[method] = keyword[None] keyword[if] literal[string] keyword[in] identifier[tmp] : identifier[method] = identifier[tmp] . identifier[split] ( literal[string] )[ literal[int] ] identifier[hits] = identifier[tmp] . identifier[split] ( literal[string] )[ literal[int] ] keyword[if] literal[string] keyword[in] identifier[hits] : keyword[for] identifier[ele] keyword[in] identifier[hits] . identifier[split] ( literal[string] ): identifier[ele] = identifier[ele] . identifier[replace] ( literal[string] , literal[string] ) identifier[res_s] = identifier[ele] . identifier[split] ( literal[string] )[ literal[int] ] identifier[res_e] = identifier[ele] . identifier[split] ( literal[string] )[ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[int] ( identifier[res_s] ), identifier[int] ( identifier[res_e] )+ literal[int] ): identifier[hits_resid] . identifier[append] ( identifier[i] ) keyword[if] identifier[method] : keyword[return] identifier[method] , identifier[hits_resid] keyword[else] : keyword[return] identifier[met] , identifier[hits_resid]
def parse_method_results(self, results_file, met): """Parse the output of a AMYLPRED2 result file.""" result = str(open(results_file).read()) ind_s = str.find(result, 'HITS') ind_e = str.find(result, '**NOTE') tmp = result[ind_s + 10:ind_e].strip(' ') hits_resid = [] method = None if ':' in tmp: method = tmp.split(':')[0] hits = tmp.split(':')[1] if '-' in hits: for ele in hits.split(','): ele = ele.replace('\\r\\n\\r\\n', '') res_s = ele.split('-')[0] res_e = ele.split('-')[1] for i in range(int(res_s), int(res_e) + 1): hits_resid.append(i) # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['ele']] # depends on [control=['if'], data=['hits']] # depends on [control=['if'], data=['tmp']] if method: return (method, hits_resid) # depends on [control=['if'], data=[]] else: return (met, hits_resid)
def get_host_addresses(use_ipv4, use_ipv6): """ Get local IP addresses. """ addresses = [] for interface in netifaces.interfaces(): ifaddresses = netifaces.ifaddresses(interface) for address in ifaddresses.get(socket.AF_INET, []): if use_ipv4 and address['addr'] != '127.0.0.1': addresses.append(address['addr']) for address in ifaddresses.get(socket.AF_INET6, []): if use_ipv6 and address['addr'] != '::1' and '%' not in address['addr']: addresses.append(address['addr']) return addresses
def function[get_host_addresses, parameter[use_ipv4, use_ipv6]]: constant[ Get local IP addresses. ] variable[addresses] assign[=] list[[]] for taget[name[interface]] in starred[call[name[netifaces].interfaces, parameter[]]] begin[:] variable[ifaddresses] assign[=] call[name[netifaces].ifaddresses, parameter[name[interface]]] for taget[name[address]] in starred[call[name[ifaddresses].get, parameter[name[socket].AF_INET, list[[]]]]] begin[:] if <ast.BoolOp object at 0x7da18f00e7a0> begin[:] call[name[addresses].append, parameter[call[name[address]][constant[addr]]]] for taget[name[address]] in starred[call[name[ifaddresses].get, parameter[name[socket].AF_INET6, list[[]]]]] begin[:] if <ast.BoolOp object at 0x7da18f00ca60> begin[:] call[name[addresses].append, parameter[call[name[address]][constant[addr]]]] return[name[addresses]]
keyword[def] identifier[get_host_addresses] ( identifier[use_ipv4] , identifier[use_ipv6] ): literal[string] identifier[addresses] =[] keyword[for] identifier[interface] keyword[in] identifier[netifaces] . identifier[interfaces] (): identifier[ifaddresses] = identifier[netifaces] . identifier[ifaddresses] ( identifier[interface] ) keyword[for] identifier[address] keyword[in] identifier[ifaddresses] . identifier[get] ( identifier[socket] . identifier[AF_INET] ,[]): keyword[if] identifier[use_ipv4] keyword[and] identifier[address] [ literal[string] ]!= literal[string] : identifier[addresses] . identifier[append] ( identifier[address] [ literal[string] ]) keyword[for] identifier[address] keyword[in] identifier[ifaddresses] . identifier[get] ( identifier[socket] . identifier[AF_INET6] ,[]): keyword[if] identifier[use_ipv6] keyword[and] identifier[address] [ literal[string] ]!= literal[string] keyword[and] literal[string] keyword[not] keyword[in] identifier[address] [ literal[string] ]: identifier[addresses] . identifier[append] ( identifier[address] [ literal[string] ]) keyword[return] identifier[addresses]
def get_host_addresses(use_ipv4, use_ipv6): """ Get local IP addresses. """ addresses = [] for interface in netifaces.interfaces(): ifaddresses = netifaces.ifaddresses(interface) for address in ifaddresses.get(socket.AF_INET, []): if use_ipv4 and address['addr'] != '127.0.0.1': addresses.append(address['addr']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['address']] for address in ifaddresses.get(socket.AF_INET6, []): if use_ipv6 and address['addr'] != '::1' and ('%' not in address['addr']): addresses.append(address['addr']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['address']] # depends on [control=['for'], data=['interface']] return addresses
def _splitHeaders(headers): """ Split an HTTP header whose components are separated with commas. Each component is then split on semicolons and the component arguments converted into a `dict`. @return: `list` of 2-`tuple` of `bytes`, `dict` @return: List of header arguments and mapping of component argument names to values. """ return [cgi.parse_header(value) for value in chain.from_iterable( s.split(',') for s in headers if s)]
def function[_splitHeaders, parameter[headers]]: constant[ Split an HTTP header whose components are separated with commas. Each component is then split on semicolons and the component arguments converted into a `dict`. @return: `list` of 2-`tuple` of `bytes`, `dict` @return: List of header arguments and mapping of component argument names to values. ] return[<ast.ListComp object at 0x7da2041d92d0>]
keyword[def] identifier[_splitHeaders] ( identifier[headers] ): literal[string] keyword[return] [ identifier[cgi] . identifier[parse_header] ( identifier[value] ) keyword[for] identifier[value] keyword[in] identifier[chain] . identifier[from_iterable] ( identifier[s] . identifier[split] ( literal[string] ) keyword[for] identifier[s] keyword[in] identifier[headers] keyword[if] identifier[s] )]
def _splitHeaders(headers): """ Split an HTTP header whose components are separated with commas. Each component is then split on semicolons and the component arguments converted into a `dict`. @return: `list` of 2-`tuple` of `bytes`, `dict` @return: List of header arguments and mapping of component argument names to values. """ return [cgi.parse_header(value) for value in chain.from_iterable((s.split(',') for s in headers if s))]
def get_ip_interface_output_interface_proxy_arp(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_ip_interface = ET.Element("get_ip_interface") config = get_ip_interface output = ET.SubElement(get_ip_interface, "output") interface = ET.SubElement(output, "interface") interface_type_key = ET.SubElement(interface, "interface-type") interface_type_key.text = kwargs.pop('interface_type') interface_name_key = ET.SubElement(interface, "interface-name") interface_name_key.text = kwargs.pop('interface_name') proxy_arp = ET.SubElement(interface, "proxy-arp") proxy_arp.text = kwargs.pop('proxy_arp') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[get_ip_interface_output_interface_proxy_arp, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[get_ip_interface] assign[=] call[name[ET].Element, parameter[constant[get_ip_interface]]] variable[config] assign[=] name[get_ip_interface] variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_ip_interface], constant[output]]] variable[interface] assign[=] call[name[ET].SubElement, parameter[name[output], constant[interface]]] variable[interface_type_key] assign[=] call[name[ET].SubElement, parameter[name[interface], constant[interface-type]]] name[interface_type_key].text assign[=] call[name[kwargs].pop, parameter[constant[interface_type]]] variable[interface_name_key] assign[=] call[name[ET].SubElement, parameter[name[interface], constant[interface-name]]] name[interface_name_key].text assign[=] call[name[kwargs].pop, parameter[constant[interface_name]]] variable[proxy_arp] assign[=] call[name[ET].SubElement, parameter[name[interface], constant[proxy-arp]]] name[proxy_arp].text assign[=] call[name[kwargs].pop, parameter[constant[proxy_arp]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[get_ip_interface_output_interface_proxy_arp] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[get_ip_interface] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[config] = identifier[get_ip_interface] identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_ip_interface] , literal[string] ) identifier[interface] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] ) identifier[interface_type_key] = identifier[ET] . identifier[SubElement] ( identifier[interface] , literal[string] ) identifier[interface_type_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[interface_name_key] = identifier[ET] . identifier[SubElement] ( identifier[interface] , literal[string] ) identifier[interface_name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[proxy_arp] = identifier[ET] . identifier[SubElement] ( identifier[interface] , literal[string] ) identifier[proxy_arp] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def get_ip_interface_output_interface_proxy_arp(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') get_ip_interface = ET.Element('get_ip_interface') config = get_ip_interface output = ET.SubElement(get_ip_interface, 'output') interface = ET.SubElement(output, 'interface') interface_type_key = ET.SubElement(interface, 'interface-type') interface_type_key.text = kwargs.pop('interface_type') interface_name_key = ET.SubElement(interface, 'interface-name') interface_name_key.text = kwargs.pop('interface_name') proxy_arp = ET.SubElement(interface, 'proxy-arp') proxy_arp.text = kwargs.pop('proxy_arp') callback = kwargs.pop('callback', self._callback) return callback(config)
def _flush_events(self): """! @brief Send all pending events to event sink.""" if self._sink is not None: for event in self._pending_events: self._sink.receive(event) self._pending_events = []
def function[_flush_events, parameter[self]]: constant[! @brief Send all pending events to event sink.] if compare[name[self]._sink is_not constant[None]] begin[:] for taget[name[event]] in starred[name[self]._pending_events] begin[:] call[name[self]._sink.receive, parameter[name[event]]] name[self]._pending_events assign[=] list[[]]
keyword[def] identifier[_flush_events] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_sink] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[event] keyword[in] identifier[self] . identifier[_pending_events] : identifier[self] . identifier[_sink] . identifier[receive] ( identifier[event] ) identifier[self] . identifier[_pending_events] =[]
def _flush_events(self): """! @brief Send all pending events to event sink.""" if self._sink is not None: for event in self._pending_events: self._sink.receive(event) # depends on [control=['for'], data=['event']] # depends on [control=['if'], data=[]] self._pending_events = []
def visit_lambda(self, node, parent): """visit a Lambda node by returning a fresh instance of it""" newnode = nodes.Lambda(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.args, newnode), self.visit(node.body, newnode)) return newnode
def function[visit_lambda, parameter[self, node, parent]]: constant[visit a Lambda node by returning a fresh instance of it] variable[newnode] assign[=] call[name[nodes].Lambda, parameter[name[node].lineno, name[node].col_offset, name[parent]]] call[name[newnode].postinit, parameter[call[name[self].visit, parameter[name[node].args, name[newnode]]], call[name[self].visit, parameter[name[node].body, name[newnode]]]]] return[name[newnode]]
keyword[def] identifier[visit_lambda] ( identifier[self] , identifier[node] , identifier[parent] ): literal[string] identifier[newnode] = identifier[nodes] . identifier[Lambda] ( identifier[node] . identifier[lineno] , identifier[node] . identifier[col_offset] , identifier[parent] ) identifier[newnode] . identifier[postinit] ( identifier[self] . identifier[visit] ( identifier[node] . identifier[args] , identifier[newnode] ), identifier[self] . identifier[visit] ( identifier[node] . identifier[body] , identifier[newnode] )) keyword[return] identifier[newnode]
def visit_lambda(self, node, parent): """visit a Lambda node by returning a fresh instance of it""" newnode = nodes.Lambda(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.args, newnode), self.visit(node.body, newnode)) return newnode
def elevate(self): r"""Return a degree-elevated version of the current curve. Does this by converting the current nodes :math:`v_0, \ldots, v_n` to new nodes :math:`w_0, \ldots, w_{n + 1}` where .. math:: \begin{align*} w_0 &= v_0 \\ w_j &= \frac{j}{n + 1} v_{j - 1} + \frac{n + 1 - j}{n + 1} v_j \\ w_{n + 1} &= v_n \end{align*} .. image:: ../../images/curve_elevate.png :align: center .. testsetup:: curve-elevate import numpy as np import bezier .. doctest:: curve-elevate :options: +NORMALIZE_WHITESPACE >>> nodes = np.asfortranarray([ ... [0.0, 1.5, 3.0], ... [0.0, 1.5, 0.0], ... ]) >>> curve = bezier.Curve(nodes, degree=2) >>> elevated = curve.elevate() >>> elevated <Curve (degree=3, dimension=2)> >>> elevated.nodes array([[0., 1., 2., 3.], [0., 1., 1., 0.]]) .. testcleanup:: curve-elevate import make_images make_images.curve_elevate(curve, elevated) Returns: Curve: The degree-elevated curve. """ new_nodes = _curve_helpers.elevate_nodes(self._nodes) return Curve(new_nodes, self._degree + 1, _copy=False)
def function[elevate, parameter[self]]: constant[Return a degree-elevated version of the current curve. Does this by converting the current nodes :math:`v_0, \ldots, v_n` to new nodes :math:`w_0, \ldots, w_{n + 1}` where .. math:: \begin{align*} w_0 &= v_0 \\ w_j &= \frac{j}{n + 1} v_{j - 1} + \frac{n + 1 - j}{n + 1} v_j \\ w_{n + 1} &= v_n \end{align*} .. image:: ../../images/curve_elevate.png :align: center .. testsetup:: curve-elevate import numpy as np import bezier .. doctest:: curve-elevate :options: +NORMALIZE_WHITESPACE >>> nodes = np.asfortranarray([ ... [0.0, 1.5, 3.0], ... [0.0, 1.5, 0.0], ... ]) >>> curve = bezier.Curve(nodes, degree=2) >>> elevated = curve.elevate() >>> elevated <Curve (degree=3, dimension=2)> >>> elevated.nodes array([[0., 1., 2., 3.], [0., 1., 1., 0.]]) .. testcleanup:: curve-elevate import make_images make_images.curve_elevate(curve, elevated) Returns: Curve: The degree-elevated curve. ] variable[new_nodes] assign[=] call[name[_curve_helpers].elevate_nodes, parameter[name[self]._nodes]] return[call[name[Curve], parameter[name[new_nodes], binary_operation[name[self]._degree + constant[1]]]]]
keyword[def] identifier[elevate] ( identifier[self] ): literal[string] identifier[new_nodes] = identifier[_curve_helpers] . identifier[elevate_nodes] ( identifier[self] . identifier[_nodes] ) keyword[return] identifier[Curve] ( identifier[new_nodes] , identifier[self] . identifier[_degree] + literal[int] , identifier[_copy] = keyword[False] )
def elevate(self): """Return a degree-elevated version of the current curve. Does this by converting the current nodes :math:`v_0, \\ldots, v_n` to new nodes :math:`w_0, \\ldots, w_{n + 1}` where .. math:: \\begin{align*} w_0 &= v_0 \\\\ w_j &= \\frac{j}{n + 1} v_{j - 1} + \\frac{n + 1 - j}{n + 1} v_j \\\\ w_{n + 1} &= v_n \\end{align*} .. image:: ../../images/curve_elevate.png :align: center .. testsetup:: curve-elevate import numpy as np import bezier .. doctest:: curve-elevate :options: +NORMALIZE_WHITESPACE >>> nodes = np.asfortranarray([ ... [0.0, 1.5, 3.0], ... [0.0, 1.5, 0.0], ... ]) >>> curve = bezier.Curve(nodes, degree=2) >>> elevated = curve.elevate() >>> elevated <Curve (degree=3, dimension=2)> >>> elevated.nodes array([[0., 1., 2., 3.], [0., 1., 1., 0.]]) .. testcleanup:: curve-elevate import make_images make_images.curve_elevate(curve, elevated) Returns: Curve: The degree-elevated curve. """ new_nodes = _curve_helpers.elevate_nodes(self._nodes) return Curve(new_nodes, self._degree + 1, _copy=False)
async def create(source_id: str, name: str, schema_id: str, payment_handle: int): """ Creates a new CredentialDef object that is written to the ledger :param source_id: Institution's unique ID for the credential definition :param name: Name of credential definition :param schema_id: The schema ID given during the creation of the schema :param payment_handle: NYI - payment of ledger fee is taken from wallet automatically Example: source_id = 'foobar123' schema_name = 'Schema Name' payment_handle = 0 credential_def1 = await CredentialDef.create(source_id, name, schema_id, payment_handle) :return: credential_def object, written to ledger """ constructor_params = (source_id, name, schema_id) c_source_id = c_char_p(source_id.encode('utf-8')) c_schema_id = c_char_p(schema_id.encode('utf-8')) c_name = c_char_p(name.encode('utf-8')) # default institution_did in config is used as issuer_did c_issuer_did = None c_payment = c_uint32(payment_handle) # Todo: add params for tag and config c_tag = c_char_p('tag1'.encode('utf-8')) c_config = c_char_p('{"support_revocation":false}'.encode('utf-8')) c_params = (c_source_id, c_name, c_schema_id, c_issuer_did, c_tag, c_config, c_payment) return await CredentialDef._create("vcx_credentialdef_create", constructor_params, c_params)
<ast.AsyncFunctionDef object at 0x7da18c4cd750>
keyword[async] keyword[def] identifier[create] ( identifier[source_id] : identifier[str] , identifier[name] : identifier[str] , identifier[schema_id] : identifier[str] , identifier[payment_handle] : identifier[int] ): literal[string] identifier[constructor_params] =( identifier[source_id] , identifier[name] , identifier[schema_id] ) identifier[c_source_id] = identifier[c_char_p] ( identifier[source_id] . identifier[encode] ( literal[string] )) identifier[c_schema_id] = identifier[c_char_p] ( identifier[schema_id] . identifier[encode] ( literal[string] )) identifier[c_name] = identifier[c_char_p] ( identifier[name] . identifier[encode] ( literal[string] )) identifier[c_issuer_did] = keyword[None] identifier[c_payment] = identifier[c_uint32] ( identifier[payment_handle] ) identifier[c_tag] = identifier[c_char_p] ( literal[string] . identifier[encode] ( literal[string] )) identifier[c_config] = identifier[c_char_p] ( literal[string] . identifier[encode] ( literal[string] )) identifier[c_params] =( identifier[c_source_id] , identifier[c_name] , identifier[c_schema_id] , identifier[c_issuer_did] , identifier[c_tag] , identifier[c_config] , identifier[c_payment] ) keyword[return] keyword[await] identifier[CredentialDef] . identifier[_create] ( literal[string] , identifier[constructor_params] , identifier[c_params] )
async def create(source_id: str, name: str, schema_id: str, payment_handle: int): """ Creates a new CredentialDef object that is written to the ledger :param source_id: Institution's unique ID for the credential definition :param name: Name of credential definition :param schema_id: The schema ID given during the creation of the schema :param payment_handle: NYI - payment of ledger fee is taken from wallet automatically Example: source_id = 'foobar123' schema_name = 'Schema Name' payment_handle = 0 credential_def1 = await CredentialDef.create(source_id, name, schema_id, payment_handle) :return: credential_def object, written to ledger """ constructor_params = (source_id, name, schema_id) c_source_id = c_char_p(source_id.encode('utf-8')) c_schema_id = c_char_p(schema_id.encode('utf-8')) c_name = c_char_p(name.encode('utf-8')) # default institution_did in config is used as issuer_did c_issuer_did = None c_payment = c_uint32(payment_handle) # Todo: add params for tag and config c_tag = c_char_p('tag1'.encode('utf-8')) c_config = c_char_p('{"support_revocation":false}'.encode('utf-8')) c_params = (c_source_id, c_name, c_schema_id, c_issuer_did, c_tag, c_config, c_payment) return await CredentialDef._create('vcx_credentialdef_create', constructor_params, c_params)
def connect(node, mode=WORKER_MODE, log_to_driver=False, worker=global_worker, driver_id=None, load_code_from_local=False): """Connect this worker to the raylet, to Plasma, and to Redis. Args: node (ray.node.Node): The node to connect. mode: The mode of the worker. One of SCRIPT_MODE, WORKER_MODE, and LOCAL_MODE. log_to_driver (bool): If true, then output from all of the worker processes on all nodes will be directed to the driver. worker: The ray.Worker instance. driver_id: The ID of driver. If it's None, then we will generate one. """ # Do some basic checking to make sure we didn't call ray.init twice. error_message = "Perhaps you called ray.init twice by accident?" assert not worker.connected, error_message assert worker.cached_functions_to_run is not None, error_message # Enable nice stack traces on SIGSEGV etc. if not faulthandler.is_enabled(): faulthandler.enable(all_threads=False) worker.profiler = profiling.Profiler(worker, worker.threads_stopped) # Initialize some fields. if mode is WORKER_MODE: worker.worker_id = _random_string() if setproctitle: setproctitle.setproctitle("ray_worker") else: # This is the code path of driver mode. if driver_id is None: driver_id = DriverID(_random_string()) if not isinstance(driver_id, DriverID): raise TypeError("The type of given driver id must be DriverID.") worker.worker_id = driver_id.binary() # When tasks are executed on remote workers in the context of multiple # drivers, the task driver ID is used to keep track of which driver is # responsible for the task so that error messages will be propagated to # the correct driver. if mode != WORKER_MODE: worker.task_driver_id = DriverID(worker.worker_id) # All workers start out as non-actors. A worker can be turned into an actor # after it is created. worker.actor_id = ActorID.nil() worker.node = node worker.set_mode(mode) # If running Ray in LOCAL_MODE, there is no need to create call # create_worker or to start the worker service. if mode == LOCAL_MODE: return # Create a Redis client. # The Redis client can safely be shared between threads. However, that is # not true of Redis pubsub clients. See the documentation at # https://github.com/andymccurdy/redis-py#thread-safety. worker.redis_client = node.create_redis_client() # For driver's check that the version information matches the version # information that the Ray cluster was started with. try: ray.services.check_version_info(worker.redis_client) except Exception as e: if mode == SCRIPT_MODE: raise e elif mode == WORKER_MODE: traceback_str = traceback.format_exc() ray.utils.push_error_to_driver_through_redis( worker.redis_client, ray_constants.VERSION_MISMATCH_PUSH_ERROR, traceback_str, driver_id=None) worker.lock = threading.RLock() # Create an object for interfacing with the global state. global_state._initialize_global_state( node.redis_address, redis_password=node.redis_password) # Register the worker with Redis. if mode == SCRIPT_MODE: # The concept of a driver is the same as the concept of a "job". # Register the driver/job with Redis here. import __main__ as main driver_info = { "node_ip_address": node.node_ip_address, "driver_id": worker.worker_id, "start_time": time.time(), "plasma_store_socket": node.plasma_store_socket_name, "raylet_socket": node.raylet_socket_name, "name": (main.__file__ if hasattr(main, "__file__") else "INTERACTIVE MODE") } worker.redis_client.hmset(b"Drivers:" + worker.worker_id, driver_info) elif mode == WORKER_MODE: # Register the worker with Redis. worker_dict = { "node_ip_address": node.node_ip_address, "plasma_store_socket": node.plasma_store_socket_name, } # Check the RedirectOutput key in Redis and based on its value redirect # worker output and error to their own files. # This key is set in services.py when Redis is started. redirect_worker_output_val = worker.redis_client.get("RedirectOutput") if (redirect_worker_output_val is not None and int(redirect_worker_output_val) == 1): log_stdout_file, log_stderr_file = ( node.new_worker_redirected_log_file(worker.worker_id)) # Redirect stdout/stderr at the file descriptor level. If we simply # set sys.stdout and sys.stderr, then logging from C++ can fail to # be redirected. os.dup2(log_stdout_file.fileno(), sys.stdout.fileno()) os.dup2(log_stderr_file.fileno(), sys.stderr.fileno()) # We also manually set sys.stdout and sys.stderr because that seems # to have an affect on the output buffering. Without doing this, # stdout and stderr are heavily buffered resulting in seemingly # lost logging statements. sys.stdout = log_stdout_file sys.stderr = log_stderr_file # This should always be the first message to appear in the worker's # stdout and stderr log files. The string "Ray worker pid:" is # parsed in the log monitor process. print("Ray worker pid: {}".format(os.getpid())) print("Ray worker pid: {}".format(os.getpid()), file=sys.stderr) sys.stdout.flush() sys.stderr.flush() worker_dict["stdout_file"] = os.path.abspath(log_stdout_file.name) worker_dict["stderr_file"] = os.path.abspath(log_stderr_file.name) worker.redis_client.hmset(b"Workers:" + worker.worker_id, worker_dict) else: raise Exception("This code should be unreachable.") # Create an object store client. worker.plasma_client = thread_safe_client( plasma.connect(node.plasma_store_socket_name, None, 0, 300)) # If this is a driver, set the current task ID, the task driver ID, and set # the task index to 0. if mode == SCRIPT_MODE: # If the user provided an object_id_seed, then set the current task ID # deterministically based on that seed (without altering the state of # the user's random number generator). Otherwise, set the current task # ID randomly to avoid object ID collisions. numpy_state = np.random.get_state() if node.object_id_seed is not None: np.random.seed(node.object_id_seed) else: # Try to use true randomness. np.random.seed(None) # Reset the state of the numpy random number generator. np.random.set_state(numpy_state) # Create an entry for the driver task in the task table. This task is # added immediately with status RUNNING. This allows us to push errors # related to this driver task back to the driver. For example, if the # driver creates an object that is later evicted, we should notify the # user that we're unable to reconstruct the object, since we cannot # rerun the driver. nil_actor_counter = 0 function_descriptor = FunctionDescriptor.for_driver_task() driver_task = ray._raylet.Task( worker.task_driver_id, function_descriptor.get_function_descriptor_list(), [], # arguments. 0, # num_returns. TaskID(_random_string()), # parent_task_id. 0, # parent_counter. ActorID.nil(), # actor_creation_id. ObjectID.nil(), # actor_creation_dummy_object_id. 0, # max_actor_reconstructions. ActorID.nil(), # actor_id. ActorHandleID.nil(), # actor_handle_id. nil_actor_counter, # actor_counter. [], # new_actor_handles. [], # execution_dependencies. {}, # resource_map. {}, # placement_resource_map. ) # Add the driver task to the task table. global_state._execute_command(driver_task.task_id(), "RAY.TABLE_ADD", ray.gcs_utils.TablePrefix.RAYLET_TASK, ray.gcs_utils.TablePubsub.RAYLET_TASK, driver_task.task_id().binary(), driver_task._serialized_raylet_task()) # Set the driver's current task ID to the task ID assigned to the # driver task. worker.task_context.current_task_id = driver_task.task_id() worker.raylet_client = ray._raylet.RayletClient( node.raylet_socket_name, ClientID(worker.worker_id), (mode == WORKER_MODE), DriverID(worker.current_task_id.binary()), ) # Start the import thread worker.import_thread = import_thread.ImportThread(worker, mode, worker.threads_stopped) worker.import_thread.start() # If this is a driver running in SCRIPT_MODE, start a thread to print error # messages asynchronously in the background. Ideally the scheduler would # push messages to the driver's worker service, but we ran into bugs when # trying to properly shutdown the driver's worker service, so we are # temporarily using this implementation which constantly queries the # scheduler for new error messages. if mode == SCRIPT_MODE: q = queue.Queue() worker.listener_thread = threading.Thread( target=listen_error_messages_raylet, name="ray_listen_error_messages", args=(worker, q, worker.threads_stopped)) worker.printer_thread = threading.Thread( target=print_error_messages_raylet, name="ray_print_error_messages", args=(q, worker.threads_stopped)) worker.listener_thread.daemon = True worker.listener_thread.start() worker.printer_thread.daemon = True worker.printer_thread.start() if log_to_driver: worker.logger_thread = threading.Thread( target=print_logs, name="ray_print_logs", args=(worker.redis_client, worker.threads_stopped)) worker.logger_thread.daemon = True worker.logger_thread.start() # If we are using the raylet code path and we are not in local mode, start # a background thread to periodically flush profiling data to the GCS. if mode != LOCAL_MODE: worker.profiler.start_flush_thread() if mode == SCRIPT_MODE: # Add the directory containing the script that is running to the Python # paths of the workers. Also add the current directory. Note that this # assumes that the directory structures on the machines in the clusters # are the same. script_directory = os.path.abspath(os.path.dirname(sys.argv[0])) current_directory = os.path.abspath(os.path.curdir) worker.run_function_on_all_workers( lambda worker_info: sys.path.insert(1, script_directory)) worker.run_function_on_all_workers( lambda worker_info: sys.path.insert(1, current_directory)) # TODO(rkn): Here we first export functions to run, then remote # functions. The order matters. For example, one of the functions to # run may set the Python path, which is needed to import a module used # to define a remote function. We may want to change the order to # simply be the order in which the exports were defined on the driver. # In addition, we will need to retain the ability to decide what the # first few exports are (mostly to set the Python path). Additionally, # note that the first exports to be defined on the driver will be the # ones defined in separate modules that are imported by the driver. # Export cached functions_to_run. for function in worker.cached_functions_to_run: worker.run_function_on_all_workers(function) # Export cached remote functions and actors to the workers. worker.function_actor_manager.export_cached() worker.cached_functions_to_run = None
def function[connect, parameter[node, mode, log_to_driver, worker, driver_id, load_code_from_local]]: constant[Connect this worker to the raylet, to Plasma, and to Redis. Args: node (ray.node.Node): The node to connect. mode: The mode of the worker. One of SCRIPT_MODE, WORKER_MODE, and LOCAL_MODE. log_to_driver (bool): If true, then output from all of the worker processes on all nodes will be directed to the driver. worker: The ray.Worker instance. driver_id: The ID of driver. If it's None, then we will generate one. ] variable[error_message] assign[=] constant[Perhaps you called ray.init twice by accident?] assert[<ast.UnaryOp object at 0x7da20e9631c0>] assert[compare[name[worker].cached_functions_to_run is_not constant[None]]] if <ast.UnaryOp object at 0x7da20e961cf0> begin[:] call[name[faulthandler].enable, parameter[]] name[worker].profiler assign[=] call[name[profiling].Profiler, parameter[name[worker], name[worker].threads_stopped]] if compare[name[mode] is name[WORKER_MODE]] begin[:] name[worker].worker_id assign[=] call[name[_random_string], parameter[]] if name[setproctitle] begin[:] call[name[setproctitle].setproctitle, parameter[constant[ray_worker]]] if compare[name[mode] not_equal[!=] name[WORKER_MODE]] begin[:] name[worker].task_driver_id assign[=] call[name[DriverID], parameter[name[worker].worker_id]] name[worker].actor_id assign[=] call[name[ActorID].nil, parameter[]] name[worker].node assign[=] name[node] call[name[worker].set_mode, parameter[name[mode]]] if compare[name[mode] equal[==] name[LOCAL_MODE]] begin[:] return[None] name[worker].redis_client assign[=] call[name[node].create_redis_client, parameter[]] <ast.Try object at 0x7da20e9618a0> name[worker].lock assign[=] call[name[threading].RLock, parameter[]] call[name[global_state]._initialize_global_state, parameter[name[node].redis_address]] if compare[name[mode] equal[==] name[SCRIPT_MODE]] begin[:] import module[__main__] as alias[main] variable[driver_info] assign[=] dictionary[[<ast.Constant object at 0x7da1b2347580>, <ast.Constant object at 0x7da1b2346dd0>, <ast.Constant object at 0x7da1b2347700>, <ast.Constant object at 0x7da1b2347d30>, <ast.Constant object at 0x7da1b2346a10>, <ast.Constant object at 0x7da1b2345a50>], [<ast.Attribute object at 0x7da1b2345c30>, <ast.Attribute object at 0x7da1b2346da0>, <ast.Call object at 0x7da1b2346b90>, <ast.Attribute object at 0x7da1b2346950>, <ast.Attribute object at 0x7da1b23462c0>, <ast.IfExp object at 0x7da1b23466b0>]] call[name[worker].redis_client.hmset, parameter[binary_operation[constant[b'Drivers:'] + name[worker].worker_id], name[driver_info]]] name[worker].plasma_client assign[=] call[name[thread_safe_client], parameter[call[name[plasma].connect, parameter[name[node].plasma_store_socket_name, constant[None], constant[0], constant[300]]]]] if compare[name[mode] equal[==] name[SCRIPT_MODE]] begin[:] variable[numpy_state] assign[=] call[name[np].random.get_state, parameter[]] if compare[name[node].object_id_seed is_not constant[None]] begin[:] call[name[np].random.seed, parameter[name[node].object_id_seed]] call[name[np].random.set_state, parameter[name[numpy_state]]] variable[nil_actor_counter] assign[=] constant[0] variable[function_descriptor] assign[=] call[name[FunctionDescriptor].for_driver_task, parameter[]] variable[driver_task] assign[=] call[name[ray]._raylet.Task, parameter[name[worker].task_driver_id, call[name[function_descriptor].get_function_descriptor_list, parameter[]], list[[]], constant[0], call[name[TaskID], parameter[call[name[_random_string], parameter[]]]], constant[0], call[name[ActorID].nil, parameter[]], call[name[ObjectID].nil, parameter[]], constant[0], call[name[ActorID].nil, parameter[]], call[name[ActorHandleID].nil, parameter[]], name[nil_actor_counter], list[[]], list[[]], dictionary[[], []], dictionary[[], []]]] call[name[global_state]._execute_command, parameter[call[name[driver_task].task_id, parameter[]], constant[RAY.TABLE_ADD], name[ray].gcs_utils.TablePrefix.RAYLET_TASK, name[ray].gcs_utils.TablePubsub.RAYLET_TASK, call[call[name[driver_task].task_id, parameter[]].binary, parameter[]], call[name[driver_task]._serialized_raylet_task, parameter[]]]] name[worker].task_context.current_task_id assign[=] call[name[driver_task].task_id, parameter[]] name[worker].raylet_client assign[=] call[name[ray]._raylet.RayletClient, parameter[name[node].raylet_socket_name, call[name[ClientID], parameter[name[worker].worker_id]], compare[name[mode] equal[==] name[WORKER_MODE]], call[name[DriverID], parameter[call[name[worker].current_task_id.binary, parameter[]]]]]] name[worker].import_thread assign[=] call[name[import_thread].ImportThread, parameter[name[worker], name[mode], name[worker].threads_stopped]] call[name[worker].import_thread.start, parameter[]] if compare[name[mode] equal[==] name[SCRIPT_MODE]] begin[:] variable[q] assign[=] call[name[queue].Queue, parameter[]] name[worker].listener_thread assign[=] call[name[threading].Thread, parameter[]] name[worker].printer_thread assign[=] call[name[threading].Thread, parameter[]] name[worker].listener_thread.daemon assign[=] constant[True] call[name[worker].listener_thread.start, parameter[]] name[worker].printer_thread.daemon assign[=] constant[True] call[name[worker].printer_thread.start, parameter[]] if name[log_to_driver] begin[:] name[worker].logger_thread assign[=] call[name[threading].Thread, parameter[]] name[worker].logger_thread.daemon assign[=] constant[True] call[name[worker].logger_thread.start, parameter[]] if compare[name[mode] not_equal[!=] name[LOCAL_MODE]] begin[:] call[name[worker].profiler.start_flush_thread, parameter[]] if compare[name[mode] equal[==] name[SCRIPT_MODE]] begin[:] variable[script_directory] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.dirname, parameter[call[name[sys].argv][constant[0]]]]]] variable[current_directory] assign[=] call[name[os].path.abspath, parameter[name[os].path.curdir]] call[name[worker].run_function_on_all_workers, parameter[<ast.Lambda object at 0x7da18eb57dc0>]] call[name[worker].run_function_on_all_workers, parameter[<ast.Lambda object at 0x7da18eb54040>]] for taget[name[function]] in starred[name[worker].cached_functions_to_run] begin[:] call[name[worker].run_function_on_all_workers, parameter[name[function]]] call[name[worker].function_actor_manager.export_cached, parameter[]] name[worker].cached_functions_to_run assign[=] constant[None]
keyword[def] identifier[connect] ( identifier[node] , identifier[mode] = identifier[WORKER_MODE] , identifier[log_to_driver] = keyword[False] , identifier[worker] = identifier[global_worker] , identifier[driver_id] = keyword[None] , identifier[load_code_from_local] = keyword[False] ): literal[string] identifier[error_message] = literal[string] keyword[assert] keyword[not] identifier[worker] . identifier[connected] , identifier[error_message] keyword[assert] identifier[worker] . identifier[cached_functions_to_run] keyword[is] keyword[not] keyword[None] , identifier[error_message] keyword[if] keyword[not] identifier[faulthandler] . identifier[is_enabled] (): identifier[faulthandler] . identifier[enable] ( identifier[all_threads] = keyword[False] ) identifier[worker] . identifier[profiler] = identifier[profiling] . identifier[Profiler] ( identifier[worker] , identifier[worker] . identifier[threads_stopped] ) keyword[if] identifier[mode] keyword[is] identifier[WORKER_MODE] : identifier[worker] . identifier[worker_id] = identifier[_random_string] () keyword[if] identifier[setproctitle] : identifier[setproctitle] . identifier[setproctitle] ( literal[string] ) keyword[else] : keyword[if] identifier[driver_id] keyword[is] keyword[None] : identifier[driver_id] = identifier[DriverID] ( identifier[_random_string] ()) keyword[if] keyword[not] identifier[isinstance] ( identifier[driver_id] , identifier[DriverID] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[worker] . identifier[worker_id] = identifier[driver_id] . identifier[binary] () keyword[if] identifier[mode] != identifier[WORKER_MODE] : identifier[worker] . identifier[task_driver_id] = identifier[DriverID] ( identifier[worker] . identifier[worker_id] ) identifier[worker] . identifier[actor_id] = identifier[ActorID] . identifier[nil] () identifier[worker] . identifier[node] = identifier[node] identifier[worker] . identifier[set_mode] ( identifier[mode] ) keyword[if] identifier[mode] == identifier[LOCAL_MODE] : keyword[return] identifier[worker] . identifier[redis_client] = identifier[node] . identifier[create_redis_client] () keyword[try] : identifier[ray] . identifier[services] . identifier[check_version_info] ( identifier[worker] . identifier[redis_client] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[if] identifier[mode] == identifier[SCRIPT_MODE] : keyword[raise] identifier[e] keyword[elif] identifier[mode] == identifier[WORKER_MODE] : identifier[traceback_str] = identifier[traceback] . identifier[format_exc] () identifier[ray] . identifier[utils] . identifier[push_error_to_driver_through_redis] ( identifier[worker] . identifier[redis_client] , identifier[ray_constants] . identifier[VERSION_MISMATCH_PUSH_ERROR] , identifier[traceback_str] , identifier[driver_id] = keyword[None] ) identifier[worker] . identifier[lock] = identifier[threading] . identifier[RLock] () identifier[global_state] . identifier[_initialize_global_state] ( identifier[node] . identifier[redis_address] , identifier[redis_password] = identifier[node] . identifier[redis_password] ) keyword[if] identifier[mode] == identifier[SCRIPT_MODE] : keyword[import] identifier[__main__] keyword[as] identifier[main] identifier[driver_info] ={ literal[string] : identifier[node] . identifier[node_ip_address] , literal[string] : identifier[worker] . identifier[worker_id] , literal[string] : identifier[time] . identifier[time] (), literal[string] : identifier[node] . identifier[plasma_store_socket_name] , literal[string] : identifier[node] . identifier[raylet_socket_name] , literal[string] :( identifier[main] . identifier[__file__] keyword[if] identifier[hasattr] ( identifier[main] , literal[string] ) keyword[else] literal[string] ) } identifier[worker] . identifier[redis_client] . identifier[hmset] ( literal[string] + identifier[worker] . identifier[worker_id] , identifier[driver_info] ) keyword[elif] identifier[mode] == identifier[WORKER_MODE] : identifier[worker_dict] ={ literal[string] : identifier[node] . identifier[node_ip_address] , literal[string] : identifier[node] . identifier[plasma_store_socket_name] , } identifier[redirect_worker_output_val] = identifier[worker] . identifier[redis_client] . identifier[get] ( literal[string] ) keyword[if] ( identifier[redirect_worker_output_val] keyword[is] keyword[not] keyword[None] keyword[and] identifier[int] ( identifier[redirect_worker_output_val] )== literal[int] ): identifier[log_stdout_file] , identifier[log_stderr_file] =( identifier[node] . identifier[new_worker_redirected_log_file] ( identifier[worker] . identifier[worker_id] )) identifier[os] . identifier[dup2] ( identifier[log_stdout_file] . identifier[fileno] (), identifier[sys] . identifier[stdout] . identifier[fileno] ()) identifier[os] . identifier[dup2] ( identifier[log_stderr_file] . identifier[fileno] (), identifier[sys] . identifier[stderr] . identifier[fileno] ()) identifier[sys] . identifier[stdout] = identifier[log_stdout_file] identifier[sys] . identifier[stderr] = identifier[log_stderr_file] identifier[print] ( literal[string] . identifier[format] ( identifier[os] . identifier[getpid] ())) identifier[print] ( literal[string] . identifier[format] ( identifier[os] . identifier[getpid] ()), identifier[file] = identifier[sys] . identifier[stderr] ) identifier[sys] . identifier[stdout] . identifier[flush] () identifier[sys] . identifier[stderr] . identifier[flush] () identifier[worker_dict] [ literal[string] ]= identifier[os] . identifier[path] . identifier[abspath] ( identifier[log_stdout_file] . identifier[name] ) identifier[worker_dict] [ literal[string] ]= identifier[os] . identifier[path] . identifier[abspath] ( identifier[log_stderr_file] . identifier[name] ) identifier[worker] . identifier[redis_client] . identifier[hmset] ( literal[string] + identifier[worker] . identifier[worker_id] , identifier[worker_dict] ) keyword[else] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[worker] . identifier[plasma_client] = identifier[thread_safe_client] ( identifier[plasma] . identifier[connect] ( identifier[node] . identifier[plasma_store_socket_name] , keyword[None] , literal[int] , literal[int] )) keyword[if] identifier[mode] == identifier[SCRIPT_MODE] : identifier[numpy_state] = identifier[np] . identifier[random] . identifier[get_state] () keyword[if] identifier[node] . identifier[object_id_seed] keyword[is] keyword[not] keyword[None] : identifier[np] . identifier[random] . identifier[seed] ( identifier[node] . identifier[object_id_seed] ) keyword[else] : identifier[np] . identifier[random] . identifier[seed] ( keyword[None] ) identifier[np] . identifier[random] . identifier[set_state] ( identifier[numpy_state] ) identifier[nil_actor_counter] = literal[int] identifier[function_descriptor] = identifier[FunctionDescriptor] . identifier[for_driver_task] () identifier[driver_task] = identifier[ray] . identifier[_raylet] . identifier[Task] ( identifier[worker] . identifier[task_driver_id] , identifier[function_descriptor] . identifier[get_function_descriptor_list] (), [], literal[int] , identifier[TaskID] ( identifier[_random_string] ()), literal[int] , identifier[ActorID] . identifier[nil] (), identifier[ObjectID] . identifier[nil] (), literal[int] , identifier[ActorID] . identifier[nil] (), identifier[ActorHandleID] . identifier[nil] (), identifier[nil_actor_counter] , [], [], {}, {}, ) identifier[global_state] . identifier[_execute_command] ( identifier[driver_task] . identifier[task_id] (), literal[string] , identifier[ray] . identifier[gcs_utils] . identifier[TablePrefix] . identifier[RAYLET_TASK] , identifier[ray] . identifier[gcs_utils] . identifier[TablePubsub] . identifier[RAYLET_TASK] , identifier[driver_task] . identifier[task_id] (). identifier[binary] (), identifier[driver_task] . identifier[_serialized_raylet_task] ()) identifier[worker] . identifier[task_context] . identifier[current_task_id] = identifier[driver_task] . identifier[task_id] () identifier[worker] . identifier[raylet_client] = identifier[ray] . identifier[_raylet] . identifier[RayletClient] ( identifier[node] . identifier[raylet_socket_name] , identifier[ClientID] ( identifier[worker] . identifier[worker_id] ), ( identifier[mode] == identifier[WORKER_MODE] ), identifier[DriverID] ( identifier[worker] . identifier[current_task_id] . identifier[binary] ()), ) identifier[worker] . identifier[import_thread] = identifier[import_thread] . identifier[ImportThread] ( identifier[worker] , identifier[mode] , identifier[worker] . identifier[threads_stopped] ) identifier[worker] . identifier[import_thread] . identifier[start] () keyword[if] identifier[mode] == identifier[SCRIPT_MODE] : identifier[q] = identifier[queue] . identifier[Queue] () identifier[worker] . identifier[listener_thread] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[listen_error_messages_raylet] , identifier[name] = literal[string] , identifier[args] =( identifier[worker] , identifier[q] , identifier[worker] . identifier[threads_stopped] )) identifier[worker] . identifier[printer_thread] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[print_error_messages_raylet] , identifier[name] = literal[string] , identifier[args] =( identifier[q] , identifier[worker] . identifier[threads_stopped] )) identifier[worker] . identifier[listener_thread] . identifier[daemon] = keyword[True] identifier[worker] . identifier[listener_thread] . identifier[start] () identifier[worker] . identifier[printer_thread] . identifier[daemon] = keyword[True] identifier[worker] . identifier[printer_thread] . identifier[start] () keyword[if] identifier[log_to_driver] : identifier[worker] . identifier[logger_thread] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[print_logs] , identifier[name] = literal[string] , identifier[args] =( identifier[worker] . identifier[redis_client] , identifier[worker] . identifier[threads_stopped] )) identifier[worker] . identifier[logger_thread] . identifier[daemon] = keyword[True] identifier[worker] . identifier[logger_thread] . identifier[start] () keyword[if] identifier[mode] != identifier[LOCAL_MODE] : identifier[worker] . identifier[profiler] . identifier[start_flush_thread] () keyword[if] identifier[mode] == identifier[SCRIPT_MODE] : identifier[script_directory] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[sys] . identifier[argv] [ literal[int] ])) identifier[current_directory] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[curdir] ) identifier[worker] . identifier[run_function_on_all_workers] ( keyword[lambda] identifier[worker_info] : identifier[sys] . identifier[path] . identifier[insert] ( literal[int] , identifier[script_directory] )) identifier[worker] . identifier[run_function_on_all_workers] ( keyword[lambda] identifier[worker_info] : identifier[sys] . identifier[path] . identifier[insert] ( literal[int] , identifier[current_directory] )) keyword[for] identifier[function] keyword[in] identifier[worker] . identifier[cached_functions_to_run] : identifier[worker] . identifier[run_function_on_all_workers] ( identifier[function] ) identifier[worker] . identifier[function_actor_manager] . identifier[export_cached] () identifier[worker] . identifier[cached_functions_to_run] = keyword[None]
def connect(node, mode=WORKER_MODE, log_to_driver=False, worker=global_worker, driver_id=None, load_code_from_local=False): """Connect this worker to the raylet, to Plasma, and to Redis. Args: node (ray.node.Node): The node to connect. mode: The mode of the worker. One of SCRIPT_MODE, WORKER_MODE, and LOCAL_MODE. log_to_driver (bool): If true, then output from all of the worker processes on all nodes will be directed to the driver. worker: The ray.Worker instance. driver_id: The ID of driver. If it's None, then we will generate one. """ # Do some basic checking to make sure we didn't call ray.init twice. error_message = 'Perhaps you called ray.init twice by accident?' assert not worker.connected, error_message assert worker.cached_functions_to_run is not None, error_message # Enable nice stack traces on SIGSEGV etc. if not faulthandler.is_enabled(): faulthandler.enable(all_threads=False) # depends on [control=['if'], data=[]] worker.profiler = profiling.Profiler(worker, worker.threads_stopped) # Initialize some fields. if mode is WORKER_MODE: worker.worker_id = _random_string() if setproctitle: setproctitle.setproctitle('ray_worker') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # This is the code path of driver mode. if driver_id is None: driver_id = DriverID(_random_string()) # depends on [control=['if'], data=['driver_id']] if not isinstance(driver_id, DriverID): raise TypeError('The type of given driver id must be DriverID.') # depends on [control=['if'], data=[]] worker.worker_id = driver_id.binary() # When tasks are executed on remote workers in the context of multiple # drivers, the task driver ID is used to keep track of which driver is # responsible for the task so that error messages will be propagated to # the correct driver. if mode != WORKER_MODE: worker.task_driver_id = DriverID(worker.worker_id) # depends on [control=['if'], data=[]] # All workers start out as non-actors. A worker can be turned into an actor # after it is created. worker.actor_id = ActorID.nil() worker.node = node worker.set_mode(mode) # If running Ray in LOCAL_MODE, there is no need to create call # create_worker or to start the worker service. if mode == LOCAL_MODE: return # depends on [control=['if'], data=[]] # Create a Redis client. # The Redis client can safely be shared between threads. However, that is # not true of Redis pubsub clients. See the documentation at # https://github.com/andymccurdy/redis-py#thread-safety. worker.redis_client = node.create_redis_client() # For driver's check that the version information matches the version # information that the Ray cluster was started with. try: ray.services.check_version_info(worker.redis_client) # depends on [control=['try'], data=[]] except Exception as e: if mode == SCRIPT_MODE: raise e # depends on [control=['if'], data=[]] elif mode == WORKER_MODE: traceback_str = traceback.format_exc() ray.utils.push_error_to_driver_through_redis(worker.redis_client, ray_constants.VERSION_MISMATCH_PUSH_ERROR, traceback_str, driver_id=None) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] worker.lock = threading.RLock() # Create an object for interfacing with the global state. global_state._initialize_global_state(node.redis_address, redis_password=node.redis_password) # Register the worker with Redis. if mode == SCRIPT_MODE: # The concept of a driver is the same as the concept of a "job". # Register the driver/job with Redis here. import __main__ as main driver_info = {'node_ip_address': node.node_ip_address, 'driver_id': worker.worker_id, 'start_time': time.time(), 'plasma_store_socket': node.plasma_store_socket_name, 'raylet_socket': node.raylet_socket_name, 'name': main.__file__ if hasattr(main, '__file__') else 'INTERACTIVE MODE'} worker.redis_client.hmset(b'Drivers:' + worker.worker_id, driver_info) # depends on [control=['if'], data=[]] elif mode == WORKER_MODE: # Register the worker with Redis. worker_dict = {'node_ip_address': node.node_ip_address, 'plasma_store_socket': node.plasma_store_socket_name} # Check the RedirectOutput key in Redis and based on its value redirect # worker output and error to their own files. # This key is set in services.py when Redis is started. redirect_worker_output_val = worker.redis_client.get('RedirectOutput') if redirect_worker_output_val is not None and int(redirect_worker_output_val) == 1: (log_stdout_file, log_stderr_file) = node.new_worker_redirected_log_file(worker.worker_id) # Redirect stdout/stderr at the file descriptor level. If we simply # set sys.stdout and sys.stderr, then logging from C++ can fail to # be redirected. os.dup2(log_stdout_file.fileno(), sys.stdout.fileno()) os.dup2(log_stderr_file.fileno(), sys.stderr.fileno()) # We also manually set sys.stdout and sys.stderr because that seems # to have an affect on the output buffering. Without doing this, # stdout and stderr are heavily buffered resulting in seemingly # lost logging statements. sys.stdout = log_stdout_file sys.stderr = log_stderr_file # This should always be the first message to appear in the worker's # stdout and stderr log files. The string "Ray worker pid:" is # parsed in the log monitor process. print('Ray worker pid: {}'.format(os.getpid())) print('Ray worker pid: {}'.format(os.getpid()), file=sys.stderr) sys.stdout.flush() sys.stderr.flush() worker_dict['stdout_file'] = os.path.abspath(log_stdout_file.name) worker_dict['stderr_file'] = os.path.abspath(log_stderr_file.name) # depends on [control=['if'], data=[]] worker.redis_client.hmset(b'Workers:' + worker.worker_id, worker_dict) # depends on [control=['if'], data=[]] else: raise Exception('This code should be unreachable.') # Create an object store client. worker.plasma_client = thread_safe_client(plasma.connect(node.plasma_store_socket_name, None, 0, 300)) # If this is a driver, set the current task ID, the task driver ID, and set # the task index to 0. if mode == SCRIPT_MODE: # If the user provided an object_id_seed, then set the current task ID # deterministically based on that seed (without altering the state of # the user's random number generator). Otherwise, set the current task # ID randomly to avoid object ID collisions. numpy_state = np.random.get_state() if node.object_id_seed is not None: np.random.seed(node.object_id_seed) # depends on [control=['if'], data=[]] else: # Try to use true randomness. np.random.seed(None) # Reset the state of the numpy random number generator. np.random.set_state(numpy_state) # Create an entry for the driver task in the task table. This task is # added immediately with status RUNNING. This allows us to push errors # related to this driver task back to the driver. For example, if the # driver creates an object that is later evicted, we should notify the # user that we're unable to reconstruct the object, since we cannot # rerun the driver. nil_actor_counter = 0 function_descriptor = FunctionDescriptor.for_driver_task() # arguments. # num_returns. # parent_task_id. # parent_counter. # actor_creation_id. # actor_creation_dummy_object_id. # max_actor_reconstructions. # actor_id. # actor_handle_id. # actor_counter. # new_actor_handles. # execution_dependencies. # resource_map. # placement_resource_map. driver_task = ray._raylet.Task(worker.task_driver_id, function_descriptor.get_function_descriptor_list(), [], 0, TaskID(_random_string()), 0, ActorID.nil(), ObjectID.nil(), 0, ActorID.nil(), ActorHandleID.nil(), nil_actor_counter, [], [], {}, {}) # Add the driver task to the task table. global_state._execute_command(driver_task.task_id(), 'RAY.TABLE_ADD', ray.gcs_utils.TablePrefix.RAYLET_TASK, ray.gcs_utils.TablePubsub.RAYLET_TASK, driver_task.task_id().binary(), driver_task._serialized_raylet_task()) # Set the driver's current task ID to the task ID assigned to the # driver task. worker.task_context.current_task_id = driver_task.task_id() # depends on [control=['if'], data=[]] worker.raylet_client = ray._raylet.RayletClient(node.raylet_socket_name, ClientID(worker.worker_id), mode == WORKER_MODE, DriverID(worker.current_task_id.binary())) # Start the import thread worker.import_thread = import_thread.ImportThread(worker, mode, worker.threads_stopped) worker.import_thread.start() # If this is a driver running in SCRIPT_MODE, start a thread to print error # messages asynchronously in the background. Ideally the scheduler would # push messages to the driver's worker service, but we ran into bugs when # trying to properly shutdown the driver's worker service, so we are # temporarily using this implementation which constantly queries the # scheduler for new error messages. if mode == SCRIPT_MODE: q = queue.Queue() worker.listener_thread = threading.Thread(target=listen_error_messages_raylet, name='ray_listen_error_messages', args=(worker, q, worker.threads_stopped)) worker.printer_thread = threading.Thread(target=print_error_messages_raylet, name='ray_print_error_messages', args=(q, worker.threads_stopped)) worker.listener_thread.daemon = True worker.listener_thread.start() worker.printer_thread.daemon = True worker.printer_thread.start() if log_to_driver: worker.logger_thread = threading.Thread(target=print_logs, name='ray_print_logs', args=(worker.redis_client, worker.threads_stopped)) worker.logger_thread.daemon = True worker.logger_thread.start() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # If we are using the raylet code path and we are not in local mode, start # a background thread to periodically flush profiling data to the GCS. if mode != LOCAL_MODE: worker.profiler.start_flush_thread() # depends on [control=['if'], data=[]] if mode == SCRIPT_MODE: # Add the directory containing the script that is running to the Python # paths of the workers. Also add the current directory. Note that this # assumes that the directory structures on the machines in the clusters # are the same. script_directory = os.path.abspath(os.path.dirname(sys.argv[0])) current_directory = os.path.abspath(os.path.curdir) worker.run_function_on_all_workers(lambda worker_info: sys.path.insert(1, script_directory)) worker.run_function_on_all_workers(lambda worker_info: sys.path.insert(1, current_directory)) # TODO(rkn): Here we first export functions to run, then remote # functions. The order matters. For example, one of the functions to # run may set the Python path, which is needed to import a module used # to define a remote function. We may want to change the order to # simply be the order in which the exports were defined on the driver. # In addition, we will need to retain the ability to decide what the # first few exports are (mostly to set the Python path). Additionally, # note that the first exports to be defined on the driver will be the # ones defined in separate modules that are imported by the driver. # Export cached functions_to_run. for function in worker.cached_functions_to_run: worker.run_function_on_all_workers(function) # depends on [control=['for'], data=['function']] # Export cached remote functions and actors to the workers. worker.function_actor_manager.export_cached() # depends on [control=['if'], data=[]] worker.cached_functions_to_run = None
def _get_name(self): """ There are three cases, because apipie definitions can have multiple signatures but python does not For example, the api endpoint: /api/myres/:myres_id/subres/:subres_id/subres2 for method *index* will be translated to the api method name: subres_index_subres2 So when you want to call it from v2 object, you'll have: myres.subres_index_subres2 """ if self.url.count(':') > 1: # /api/one/two/:three/four -> two_:three_four base_name = self.url.split('/', 3)[-1].replace('/', '_')[1:] # :one_two_three -> two_three if base_name.startswith(':'): base_name = base_name.split('_')[-1] # one_:two_three_:four_five -> one_three_five base_name = re.sub('_:[^/]+', '', base_name) # in case that the last term was a parameter if base_name.endswith('_'): base_name = base_name[:-1] # one_two_three -> one_two_method_three base_name = ( '_' + self._method['name'] ).join(base_name.rsplit('_', 1)) else: base_name = self._method['name'] if base_name == 'import': base_name = 'import_' if self._apipie_resource != self.resource: return '%s_%s' % (self._apipie_resource, base_name) else: return base_name
def function[_get_name, parameter[self]]: constant[ There are three cases, because apipie definitions can have multiple signatures but python does not For example, the api endpoint: /api/myres/:myres_id/subres/:subres_id/subres2 for method *index* will be translated to the api method name: subres_index_subres2 So when you want to call it from v2 object, you'll have: myres.subres_index_subres2 ] if compare[call[name[self].url.count, parameter[constant[:]]] greater[>] constant[1]] begin[:] variable[base_name] assign[=] call[call[call[call[name[self].url.split, parameter[constant[/], constant[3]]]][<ast.UnaryOp object at 0x7da20c796b00>].replace, parameter[constant[/], constant[_]]]][<ast.Slice object at 0x7da20c7958d0>] if call[name[base_name].startswith, parameter[constant[:]]] begin[:] variable[base_name] assign[=] call[call[name[base_name].split, parameter[constant[_]]]][<ast.UnaryOp object at 0x7da20c7955a0>] variable[base_name] assign[=] call[name[re].sub, parameter[constant[_:[^/]+], constant[], name[base_name]]] if call[name[base_name].endswith, parameter[constant[_]]] begin[:] variable[base_name] assign[=] call[name[base_name]][<ast.Slice object at 0x7da20c7962f0>] variable[base_name] assign[=] call[binary_operation[constant[_] + call[name[self]._method][constant[name]]].join, parameter[call[name[base_name].rsplit, parameter[constant[_], constant[1]]]]] if compare[name[base_name] equal[==] constant[import]] begin[:] variable[base_name] assign[=] constant[import_] if compare[name[self]._apipie_resource not_equal[!=] name[self].resource] begin[:] return[binary_operation[constant[%s_%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c7957b0>, <ast.Name object at 0x7da20c7957e0>]]]]
keyword[def] identifier[_get_name] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[url] . identifier[count] ( literal[string] )> literal[int] : identifier[base_name] = identifier[self] . identifier[url] . identifier[split] ( literal[string] , literal[int] )[- literal[int] ]. identifier[replace] ( literal[string] , literal[string] )[ literal[int] :] keyword[if] identifier[base_name] . identifier[startswith] ( literal[string] ): identifier[base_name] = identifier[base_name] . identifier[split] ( literal[string] )[- literal[int] ] identifier[base_name] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[base_name] ) keyword[if] identifier[base_name] . identifier[endswith] ( literal[string] ): identifier[base_name] = identifier[base_name] [:- literal[int] ] identifier[base_name] =( literal[string] + identifier[self] . identifier[_method] [ literal[string] ] ). identifier[join] ( identifier[base_name] . identifier[rsplit] ( literal[string] , literal[int] )) keyword[else] : identifier[base_name] = identifier[self] . identifier[_method] [ literal[string] ] keyword[if] identifier[base_name] == literal[string] : identifier[base_name] = literal[string] keyword[if] identifier[self] . identifier[_apipie_resource] != identifier[self] . identifier[resource] : keyword[return] literal[string] %( identifier[self] . identifier[_apipie_resource] , identifier[base_name] ) keyword[else] : keyword[return] identifier[base_name]
def _get_name(self): """ There are three cases, because apipie definitions can have multiple signatures but python does not For example, the api endpoint: /api/myres/:myres_id/subres/:subres_id/subres2 for method *index* will be translated to the api method name: subres_index_subres2 So when you want to call it from v2 object, you'll have: myres.subres_index_subres2 """ if self.url.count(':') > 1: # /api/one/two/:three/four -> two_:three_four base_name = self.url.split('/', 3)[-1].replace('/', '_')[1:] # :one_two_three -> two_three if base_name.startswith(':'): base_name = base_name.split('_')[-1] # depends on [control=['if'], data=[]] # one_:two_three_:four_five -> one_three_five base_name = re.sub('_:[^/]+', '', base_name) # in case that the last term was a parameter if base_name.endswith('_'): base_name = base_name[:-1] # depends on [control=['if'], data=[]] # one_two_three -> one_two_method_three base_name = ('_' + self._method['name']).join(base_name.rsplit('_', 1)) # depends on [control=['if'], data=[]] else: base_name = self._method['name'] if base_name == 'import': base_name = 'import_' # depends on [control=['if'], data=['base_name']] if self._apipie_resource != self.resource: return '%s_%s' % (self._apipie_resource, base_name) # depends on [control=['if'], data=[]] else: return base_name
def qos_queue_scheduler_strict_priority_dwrr_traffic_class2(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") qos = ET.SubElement(config, "qos", xmlns="urn:brocade.com:mgmt:brocade-qos") queue = ET.SubElement(qos, "queue") scheduler = ET.SubElement(queue, "scheduler") strict_priority = ET.SubElement(scheduler, "strict-priority") dwrr_traffic_class2 = ET.SubElement(strict_priority, "dwrr-traffic-class2") dwrr_traffic_class2.text = kwargs.pop('dwrr_traffic_class2') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[qos_queue_scheduler_strict_priority_dwrr_traffic_class2, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[qos] assign[=] call[name[ET].SubElement, parameter[name[config], constant[qos]]] variable[queue] assign[=] call[name[ET].SubElement, parameter[name[qos], constant[queue]]] variable[scheduler] assign[=] call[name[ET].SubElement, parameter[name[queue], constant[scheduler]]] variable[strict_priority] assign[=] call[name[ET].SubElement, parameter[name[scheduler], constant[strict-priority]]] variable[dwrr_traffic_class2] assign[=] call[name[ET].SubElement, parameter[name[strict_priority], constant[dwrr-traffic-class2]]] name[dwrr_traffic_class2].text assign[=] call[name[kwargs].pop, parameter[constant[dwrr_traffic_class2]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[qos_queue_scheduler_strict_priority_dwrr_traffic_class2] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[qos] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[queue] = identifier[ET] . identifier[SubElement] ( identifier[qos] , literal[string] ) identifier[scheduler] = identifier[ET] . identifier[SubElement] ( identifier[queue] , literal[string] ) identifier[strict_priority] = identifier[ET] . identifier[SubElement] ( identifier[scheduler] , literal[string] ) identifier[dwrr_traffic_class2] = identifier[ET] . identifier[SubElement] ( identifier[strict_priority] , literal[string] ) identifier[dwrr_traffic_class2] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def qos_queue_scheduler_strict_priority_dwrr_traffic_class2(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') qos = ET.SubElement(config, 'qos', xmlns='urn:brocade.com:mgmt:brocade-qos') queue = ET.SubElement(qos, 'queue') scheduler = ET.SubElement(queue, 'scheduler') strict_priority = ET.SubElement(scheduler, 'strict-priority') dwrr_traffic_class2 = ET.SubElement(strict_priority, 'dwrr-traffic-class2') dwrr_traffic_class2.text = kwargs.pop('dwrr_traffic_class2') callback = kwargs.pop('callback', self._callback) return callback(config)
def usage(repo): """Usage: slpkg [COMMANDS|OPTIONS] {repository|package...} Commands: [update, --only=[...]] [upgrade, --only=[...]] [repo-add [repository name] [URL]] [repo-remove [repository]] [repo-enable] [repo-list] [repo-info [repository]] [update [slpkg]] [health, --silent] [deps-status, --tree, --graph=[type]] [new-config] Optional arguments: [-h] [-v] [-a [script] [sources...]] [-b [package...] --add, --remove, [list]] [-q [package...] --add, --remove, [list, build, install, build-install]] [-g [print, edit, reset]] [-l [repository], --index, --installed, --name] [-c [repository], --upgrade, --rebuild, --skip=[...], --resolve-off, --checklist] [-s [repository] [package...], --rebuild, --reinstall, --resolve-off, --download-only, --directory-prefix=[dir], --case-ins, --patches] [-t [repository] [package], --check-deps, --graph=[type], --case-ins] [-p [repository] [package], --color=[]] [-n [SBo package], --checklist, --case-ins] [-F [package...], --case-ins] [-f [package...], --case-ins] [-i [options] [package...]] [-u [options] [package...]] [-r [options] [package...], --deps, --check-deps, --tag, --checklist] [-d [package...]] """ if repo and repo not in _meta_.repositories: error_repo = "" all_repos = RepoList().all_repos.keys() del RepoList().all_repos if repo in all_repos: error_repo = ("slpkg: Error: Repository '{0}' is not activated" "\n".format(repo)) else: error_repo = ("slpkg: Error: Repository '{0}' does not exist" "\n".format(repo)) print("\n" + error_repo) raise SystemExit(1) print(usage.__doc__) print("For more information try 'slpkg -h, --help' or view manpage\n")
def function[usage, parameter[repo]]: constant[Usage: slpkg [COMMANDS|OPTIONS] {repository|package...} Commands: [update, --only=[...]] [upgrade, --only=[...]] [repo-add [repository name] [URL]] [repo-remove [repository]] [repo-enable] [repo-list] [repo-info [repository]] [update [slpkg]] [health, --silent] [deps-status, --tree, --graph=[type]] [new-config] Optional arguments: [-h] [-v] [-a [script] [sources...]] [-b [package...] --add, --remove, [list]] [-q [package...] --add, --remove, [list, build, install, build-install]] [-g [print, edit, reset]] [-l [repository], --index, --installed, --name] [-c [repository], --upgrade, --rebuild, --skip=[...], --resolve-off, --checklist] [-s [repository] [package...], --rebuild, --reinstall, --resolve-off, --download-only, --directory-prefix=[dir], --case-ins, --patches] [-t [repository] [package], --check-deps, --graph=[type], --case-ins] [-p [repository] [package], --color=[]] [-n [SBo package], --checklist, --case-ins] [-F [package...], --case-ins] [-f [package...], --case-ins] [-i [options] [package...]] [-u [options] [package...]] [-r [options] [package...], --deps, --check-deps, --tag, --checklist] [-d [package...]] ] if <ast.BoolOp object at 0x7da204621540> begin[:] variable[error_repo] assign[=] constant[] variable[all_repos] assign[=] call[call[name[RepoList], parameter[]].all_repos.keys, parameter[]] <ast.Delete object at 0x7da204623280> if compare[name[repo] in name[all_repos]] begin[:] variable[error_repo] assign[=] call[constant[slpkg: Error: Repository '{0}' is not activated ].format, parameter[name[repo]]] call[name[print], parameter[binary_operation[constant[ ] + name[error_repo]]]] <ast.Raise object at 0x7da1b2828c40> call[name[print], parameter[name[usage].__doc__]] call[name[print], parameter[constant[For more information try 'slpkg -h, --help' or view manpage ]]]
keyword[def] identifier[usage] ( identifier[repo] ): literal[string] keyword[if] identifier[repo] keyword[and] identifier[repo] keyword[not] keyword[in] identifier[_meta_] . identifier[repositories] : identifier[error_repo] = literal[string] identifier[all_repos] = identifier[RepoList] (). identifier[all_repos] . identifier[keys] () keyword[del] identifier[RepoList] (). identifier[all_repos] keyword[if] identifier[repo] keyword[in] identifier[all_repos] : identifier[error_repo] =( literal[string] literal[string] . identifier[format] ( identifier[repo] )) keyword[else] : identifier[error_repo] =( literal[string] literal[string] . identifier[format] ( identifier[repo] )) identifier[print] ( literal[string] + identifier[error_repo] ) keyword[raise] identifier[SystemExit] ( literal[int] ) identifier[print] ( identifier[usage] . identifier[__doc__] ) identifier[print] ( literal[string] )
def usage(repo): """Usage: slpkg [COMMANDS|OPTIONS] {repository|package...} Commands: [update, --only=[...]] [upgrade, --only=[...]] [repo-add [repository name] [URL]] [repo-remove [repository]] [repo-enable] [repo-list] [repo-info [repository]] [update [slpkg]] [health, --silent] [deps-status, --tree, --graph=[type]] [new-config] Optional arguments: [-h] [-v] [-a [script] [sources...]] [-b [package...] --add, --remove, [list]] [-q [package...] --add, --remove, [list, build, install, build-install]] [-g [print, edit, reset]] [-l [repository], --index, --installed, --name] [-c [repository], --upgrade, --rebuild, --skip=[...], --resolve-off, --checklist] [-s [repository] [package...], --rebuild, --reinstall, --resolve-off, --download-only, --directory-prefix=[dir], --case-ins, --patches] [-t [repository] [package], --check-deps, --graph=[type], --case-ins] [-p [repository] [package], --color=[]] [-n [SBo package], --checklist, --case-ins] [-F [package...], --case-ins] [-f [package...], --case-ins] [-i [options] [package...]] [-u [options] [package...]] [-r [options] [package...], --deps, --check-deps, --tag, --checklist] [-d [package...]] """ if repo and repo not in _meta_.repositories: error_repo = '' all_repos = RepoList().all_repos.keys() del RepoList().all_repos if repo in all_repos: error_repo = "slpkg: Error: Repository '{0}' is not activated\n".format(repo) # depends on [control=['if'], data=['repo']] else: error_repo = "slpkg: Error: Repository '{0}' does not exist\n".format(repo) print('\n' + error_repo) raise SystemExit(1) # depends on [control=['if'], data=[]] print(usage.__doc__) print("For more information try 'slpkg -h, --help' or view manpage\n")
def add(self, req: Request): """ Add the specified request to this request store. """ key = req.key if key not in self: self[key] = ReqState(req) return self[key]
def function[add, parameter[self, req]]: constant[ Add the specified request to this request store. ] variable[key] assign[=] name[req].key if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[self]] begin[:] call[name[self]][name[key]] assign[=] call[name[ReqState], parameter[name[req]]] return[call[name[self]][name[key]]]
keyword[def] identifier[add] ( identifier[self] , identifier[req] : identifier[Request] ): literal[string] identifier[key] = identifier[req] . identifier[key] keyword[if] identifier[key] keyword[not] keyword[in] identifier[self] : identifier[self] [ identifier[key] ]= identifier[ReqState] ( identifier[req] ) keyword[return] identifier[self] [ identifier[key] ]
def add(self, req: Request): """ Add the specified request to this request store. """ key = req.key if key not in self: self[key] = ReqState(req) # depends on [control=['if'], data=['key', 'self']] return self[key]
def get(self, endpoint: str, **kwargs) -> dict: """HTTP GET operation to API endpoint.""" return self._request('GET', endpoint, **kwargs)
def function[get, parameter[self, endpoint]]: constant[HTTP GET operation to API endpoint.] return[call[name[self]._request, parameter[constant[GET], name[endpoint]]]]
keyword[def] identifier[get] ( identifier[self] , identifier[endpoint] : identifier[str] ,** identifier[kwargs] )-> identifier[dict] : literal[string] keyword[return] identifier[self] . identifier[_request] ( literal[string] , identifier[endpoint] ,** identifier[kwargs] )
def get(self, endpoint: str, **kwargs) -> dict: """HTTP GET operation to API endpoint.""" return self._request('GET', endpoint, **kwargs)