code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def load_config(conf_dir=DEFAULT_CONFIG_DIR, schema=CLIENT_SCHEMA): """ Load config files from the specified directory, using defaults for missing values. Directory should contain a file named config.<ext> where <ext> is a supported config file format. """ data = default_config(schema) config = read_config(conf_dir) if config: recursive_update(data, config) return data
def function[load_config, parameter[conf_dir, schema]]: constant[ Load config files from the specified directory, using defaults for missing values. Directory should contain a file named config.<ext> where <ext> is a supported config file format. ] variable[data] assign[=] call[name[default_config], parameter[name[schema]]] variable[config] assign[=] call[name[read_config], parameter[name[conf_dir]]] if name[config] begin[:] call[name[recursive_update], parameter[name[data], name[config]]] return[name[data]]
keyword[def] identifier[load_config] ( identifier[conf_dir] = identifier[DEFAULT_CONFIG_DIR] , identifier[schema] = identifier[CLIENT_SCHEMA] ): literal[string] identifier[data] = identifier[default_config] ( identifier[schema] ) identifier[config] = identifier[read_config] ( identifier[conf_dir] ) keyword[if] identifier[config] : identifier[recursive_update] ( identifier[data] , identifier[config] ) keyword[return] identifier[data]
def load_config(conf_dir=DEFAULT_CONFIG_DIR, schema=CLIENT_SCHEMA): """ Load config files from the specified directory, using defaults for missing values. Directory should contain a file named config.<ext> where <ext> is a supported config file format. """ data = default_config(schema) config = read_config(conf_dir) if config: recursive_update(data, config) # depends on [control=['if'], data=[]] return data
def postprocess_link(self, entry): """ Attempt to load full-text content from resource. """ if not self.follow_links: return if type(entry.link) is not list: entry.link = [entry.link] for link in list(entry.link): if not os.path.exists(link): continue mime_type = magic.from_file(link, mime=True) if mime_type == 'application/pdf': structuredfeature = extract_pdf(link) elif mime_type == 'text/plain': structuredfeature = extract_text(link) else: structuredfeature = None if not structuredfeature: continue fset_name = mime_type.split('/')[-1] + '_text' if not fset_name in self.full_text: self.full_text[fset_name] = {} if hasattr(self, 'index_by'): ident = getattr(entry, self.index_by) if type(ident) is list: ident = ident[0] else: # If `index_by` is not set, use `uri` by default. ident = entry.uri self.full_text[fset_name][ident] = structuredfeature
def function[postprocess_link, parameter[self, entry]]: constant[ Attempt to load full-text content from resource. ] if <ast.UnaryOp object at 0x7da1b12c40a0> begin[:] return[None] if compare[call[name[type], parameter[name[entry].link]] is_not name[list]] begin[:] name[entry].link assign[=] list[[<ast.Attribute object at 0x7da1b12c66b0>]] for taget[name[link]] in starred[call[name[list], parameter[name[entry].link]]] begin[:] if <ast.UnaryOp object at 0x7da1b12c41c0> begin[:] continue variable[mime_type] assign[=] call[name[magic].from_file, parameter[name[link]]] if compare[name[mime_type] equal[==] constant[application/pdf]] begin[:] variable[structuredfeature] assign[=] call[name[extract_pdf], parameter[name[link]]] if <ast.UnaryOp object at 0x7da1b12c7a60> begin[:] continue variable[fset_name] assign[=] binary_operation[call[call[name[mime_type].split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da1b12dba00>] + constant[_text]] if <ast.UnaryOp object at 0x7da1b12d9030> begin[:] call[name[self].full_text][name[fset_name]] assign[=] dictionary[[], []] if call[name[hasattr], parameter[name[self], constant[index_by]]] begin[:] variable[ident] assign[=] call[name[getattr], parameter[name[entry], name[self].index_by]] if compare[call[name[type], parameter[name[ident]]] is name[list]] begin[:] variable[ident] assign[=] call[name[ident]][constant[0]] call[call[name[self].full_text][name[fset_name]]][name[ident]] assign[=] name[structuredfeature]
keyword[def] identifier[postprocess_link] ( identifier[self] , identifier[entry] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[follow_links] : keyword[return] keyword[if] identifier[type] ( identifier[entry] . identifier[link] ) keyword[is] keyword[not] identifier[list] : identifier[entry] . identifier[link] =[ identifier[entry] . identifier[link] ] keyword[for] identifier[link] keyword[in] identifier[list] ( identifier[entry] . identifier[link] ): keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[link] ): keyword[continue] identifier[mime_type] = identifier[magic] . identifier[from_file] ( identifier[link] , identifier[mime] = keyword[True] ) keyword[if] identifier[mime_type] == literal[string] : identifier[structuredfeature] = identifier[extract_pdf] ( identifier[link] ) keyword[elif] identifier[mime_type] == literal[string] : identifier[structuredfeature] = identifier[extract_text] ( identifier[link] ) keyword[else] : identifier[structuredfeature] = keyword[None] keyword[if] keyword[not] identifier[structuredfeature] : keyword[continue] identifier[fset_name] = identifier[mime_type] . identifier[split] ( literal[string] )[- literal[int] ]+ literal[string] keyword[if] keyword[not] identifier[fset_name] keyword[in] identifier[self] . identifier[full_text] : identifier[self] . identifier[full_text] [ identifier[fset_name] ]={} keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[ident] = identifier[getattr] ( identifier[entry] , identifier[self] . identifier[index_by] ) keyword[if] identifier[type] ( identifier[ident] ) keyword[is] identifier[list] : identifier[ident] = identifier[ident] [ literal[int] ] keyword[else] : identifier[ident] = identifier[entry] . identifier[uri] identifier[self] . identifier[full_text] [ identifier[fset_name] ][ identifier[ident] ]= identifier[structuredfeature]
def postprocess_link(self, entry): """ Attempt to load full-text content from resource. """ if not self.follow_links: return # depends on [control=['if'], data=[]] if type(entry.link) is not list: entry.link = [entry.link] # depends on [control=['if'], data=[]] for link in list(entry.link): if not os.path.exists(link): continue # depends on [control=['if'], data=[]] mime_type = magic.from_file(link, mime=True) if mime_type == 'application/pdf': structuredfeature = extract_pdf(link) # depends on [control=['if'], data=[]] elif mime_type == 'text/plain': structuredfeature = extract_text(link) # depends on [control=['if'], data=[]] else: structuredfeature = None if not structuredfeature: continue # depends on [control=['if'], data=[]] fset_name = mime_type.split('/')[-1] + '_text' if not fset_name in self.full_text: self.full_text[fset_name] = {} # depends on [control=['if'], data=[]] if hasattr(self, 'index_by'): ident = getattr(entry, self.index_by) if type(ident) is list: ident = ident[0] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # If `index_by` is not set, use `uri` by default. ident = entry.uri self.full_text[fset_name][ident] = structuredfeature # depends on [control=['for'], data=['link']]
def post(self, request, pzone_pk): """Add a new operation to the given pzone, return json of the new operation.""" # attempt to get given content list pzone = None try: pzone = PZone.objects.get(pk=pzone_pk) except PZone.DoesNotExist: raise Http404("Cannot find given pzone.") json_obj = [] http_status = 500 json_op = json.loads(request.body.decode("utf8")) if not isinstance(json_op, list): json_op = [json_op] for data in json_op: try: serializer = self.get_serializer_class_by_name(data["type_name"]) except ContentType.DoesNotExist as e: json_obj = {"errors": [str(e)]} http_status = 400 break serialized = serializer(data=data) if serialized.is_valid(): # object is valid, save it serialized.save() # set response data json_obj.append(serialized.data) http_status = 200 else: # object is not valid, return errors in a 400 response json_obj = serialized.errors http_status = 400 break if http_status == 200 and len(json_obj) == 1: json_obj = json_obj[0] # cache the time in seconds until the next operation occurs next_ops = PZoneOperation.objects.filter(when__lte=timezone.now()) if len(next_ops) > 0: # we have at least one operation, ordered soonest first next_op = next_ops[0] # cache with expiry number of seconds until op should exec cache.set('pzone-operation-expiry-' + pzone.name, next_op.when, 60 * 60 * 5) return Response( json_obj, status=http_status, content_type="application/json" )
def function[post, parameter[self, request, pzone_pk]]: constant[Add a new operation to the given pzone, return json of the new operation.] variable[pzone] assign[=] constant[None] <ast.Try object at 0x7da1b0bf1ed0> variable[json_obj] assign[=] list[[]] variable[http_status] assign[=] constant[500] variable[json_op] assign[=] call[name[json].loads, parameter[call[name[request].body.decode, parameter[constant[utf8]]]]] if <ast.UnaryOp object at 0x7da1b0b3a8c0> begin[:] variable[json_op] assign[=] list[[<ast.Name object at 0x7da1b0b3a290>]] for taget[name[data]] in starred[name[json_op]] begin[:] <ast.Try object at 0x7da1b0bf3fd0> variable[serialized] assign[=] call[name[serializer], parameter[]] if call[name[serialized].is_valid, parameter[]] begin[:] call[name[serialized].save, parameter[]] call[name[json_obj].append, parameter[name[serialized].data]] variable[http_status] assign[=] constant[200] if <ast.BoolOp object at 0x7da1b0bf2a10> begin[:] variable[json_obj] assign[=] call[name[json_obj]][constant[0]] variable[next_ops] assign[=] call[name[PZoneOperation].objects.filter, parameter[]] if compare[call[name[len], parameter[name[next_ops]]] greater[>] constant[0]] begin[:] variable[next_op] assign[=] call[name[next_ops]][constant[0]] call[name[cache].set, parameter[binary_operation[constant[pzone-operation-expiry-] + name[pzone].name], name[next_op].when, binary_operation[binary_operation[constant[60] * constant[60]] * constant[5]]]] return[call[name[Response], parameter[name[json_obj]]]]
keyword[def] identifier[post] ( identifier[self] , identifier[request] , identifier[pzone_pk] ): literal[string] identifier[pzone] = keyword[None] keyword[try] : identifier[pzone] = identifier[PZone] . identifier[objects] . identifier[get] ( identifier[pk] = identifier[pzone_pk] ) keyword[except] identifier[PZone] . identifier[DoesNotExist] : keyword[raise] identifier[Http404] ( literal[string] ) identifier[json_obj] =[] identifier[http_status] = literal[int] identifier[json_op] = identifier[json] . identifier[loads] ( identifier[request] . identifier[body] . identifier[decode] ( literal[string] )) keyword[if] keyword[not] identifier[isinstance] ( identifier[json_op] , identifier[list] ): identifier[json_op] =[ identifier[json_op] ] keyword[for] identifier[data] keyword[in] identifier[json_op] : keyword[try] : identifier[serializer] = identifier[self] . identifier[get_serializer_class_by_name] ( identifier[data] [ literal[string] ]) keyword[except] identifier[ContentType] . identifier[DoesNotExist] keyword[as] identifier[e] : identifier[json_obj] ={ literal[string] :[ identifier[str] ( identifier[e] )]} identifier[http_status] = literal[int] keyword[break] identifier[serialized] = identifier[serializer] ( identifier[data] = identifier[data] ) keyword[if] identifier[serialized] . identifier[is_valid] (): identifier[serialized] . identifier[save] () identifier[json_obj] . identifier[append] ( identifier[serialized] . identifier[data] ) identifier[http_status] = literal[int] keyword[else] : identifier[json_obj] = identifier[serialized] . identifier[errors] identifier[http_status] = literal[int] keyword[break] keyword[if] identifier[http_status] == literal[int] keyword[and] identifier[len] ( identifier[json_obj] )== literal[int] : identifier[json_obj] = identifier[json_obj] [ literal[int] ] identifier[next_ops] = identifier[PZoneOperation] . identifier[objects] . identifier[filter] ( identifier[when__lte] = identifier[timezone] . identifier[now] ()) keyword[if] identifier[len] ( identifier[next_ops] )> literal[int] : identifier[next_op] = identifier[next_ops] [ literal[int] ] identifier[cache] . identifier[set] ( literal[string] + identifier[pzone] . identifier[name] , identifier[next_op] . identifier[when] , literal[int] * literal[int] * literal[int] ) keyword[return] identifier[Response] ( identifier[json_obj] , identifier[status] = identifier[http_status] , identifier[content_type] = literal[string] )
def post(self, request, pzone_pk): """Add a new operation to the given pzone, return json of the new operation.""" # attempt to get given content list pzone = None try: pzone = PZone.objects.get(pk=pzone_pk) # depends on [control=['try'], data=[]] except PZone.DoesNotExist: raise Http404('Cannot find given pzone.') # depends on [control=['except'], data=[]] json_obj = [] http_status = 500 json_op = json.loads(request.body.decode('utf8')) if not isinstance(json_op, list): json_op = [json_op] # depends on [control=['if'], data=[]] for data in json_op: try: serializer = self.get_serializer_class_by_name(data['type_name']) # depends on [control=['try'], data=[]] except ContentType.DoesNotExist as e: json_obj = {'errors': [str(e)]} http_status = 400 break # depends on [control=['except'], data=['e']] serialized = serializer(data=data) if serialized.is_valid(): # object is valid, save it serialized.save() # set response data json_obj.append(serialized.data) http_status = 200 # depends on [control=['if'], data=[]] else: # object is not valid, return errors in a 400 response json_obj = serialized.errors http_status = 400 break # depends on [control=['for'], data=['data']] if http_status == 200 and len(json_obj) == 1: json_obj = json_obj[0] # depends on [control=['if'], data=[]] # cache the time in seconds until the next operation occurs next_ops = PZoneOperation.objects.filter(when__lte=timezone.now()) if len(next_ops) > 0: # we have at least one operation, ordered soonest first next_op = next_ops[0] # cache with expiry number of seconds until op should exec cache.set('pzone-operation-expiry-' + pzone.name, next_op.when, 60 * 60 * 5) # depends on [control=['if'], data=[]] return Response(json_obj, status=http_status, content_type='application/json')
def _do_mstep(self, stats): """Performs the M-step of EM algorithm. Parameters ---------- stats : dict Sufficient statistics updated from all available samples. """ # The ``np.where`` calls guard against updating forbidden states # or transitions in e.g. a left-right HMM. if 's' in self.params: startprob_ = self.startprob_prior - 1.0 + stats['start'] self.startprob_ = np.where(self.startprob_ == 0.0, self.startprob_, startprob_) normalize(self.startprob_) if 't' in self.params: transmat_ = self.transmat_prior - 1.0 + stats['trans'] self.transmat_ = np.where(self.transmat_ == 0.0, self.transmat_, transmat_) normalize(self.transmat_, axis=1)
def function[_do_mstep, parameter[self, stats]]: constant[Performs the M-step of EM algorithm. Parameters ---------- stats : dict Sufficient statistics updated from all available samples. ] if compare[constant[s] in name[self].params] begin[:] variable[startprob_] assign[=] binary_operation[binary_operation[name[self].startprob_prior - constant[1.0]] + call[name[stats]][constant[start]]] name[self].startprob_ assign[=] call[name[np].where, parameter[compare[name[self].startprob_ equal[==] constant[0.0]], name[self].startprob_, name[startprob_]]] call[name[normalize], parameter[name[self].startprob_]] if compare[constant[t] in name[self].params] begin[:] variable[transmat_] assign[=] binary_operation[binary_operation[name[self].transmat_prior - constant[1.0]] + call[name[stats]][constant[trans]]] name[self].transmat_ assign[=] call[name[np].where, parameter[compare[name[self].transmat_ equal[==] constant[0.0]], name[self].transmat_, name[transmat_]]] call[name[normalize], parameter[name[self].transmat_]]
keyword[def] identifier[_do_mstep] ( identifier[self] , identifier[stats] ): literal[string] keyword[if] literal[string] keyword[in] identifier[self] . identifier[params] : identifier[startprob_] = identifier[self] . identifier[startprob_prior] - literal[int] + identifier[stats] [ literal[string] ] identifier[self] . identifier[startprob_] = identifier[np] . identifier[where] ( identifier[self] . identifier[startprob_] == literal[int] , identifier[self] . identifier[startprob_] , identifier[startprob_] ) identifier[normalize] ( identifier[self] . identifier[startprob_] ) keyword[if] literal[string] keyword[in] identifier[self] . identifier[params] : identifier[transmat_] = identifier[self] . identifier[transmat_prior] - literal[int] + identifier[stats] [ literal[string] ] identifier[self] . identifier[transmat_] = identifier[np] . identifier[where] ( identifier[self] . identifier[transmat_] == literal[int] , identifier[self] . identifier[transmat_] , identifier[transmat_] ) identifier[normalize] ( identifier[self] . identifier[transmat_] , identifier[axis] = literal[int] )
def _do_mstep(self, stats): """Performs the M-step of EM algorithm. Parameters ---------- stats : dict Sufficient statistics updated from all available samples. """ # The ``np.where`` calls guard against updating forbidden states # or transitions in e.g. a left-right HMM. if 's' in self.params: startprob_ = self.startprob_prior - 1.0 + stats['start'] self.startprob_ = np.where(self.startprob_ == 0.0, self.startprob_, startprob_) normalize(self.startprob_) # depends on [control=['if'], data=[]] if 't' in self.params: transmat_ = self.transmat_prior - 1.0 + stats['trans'] self.transmat_ = np.where(self.transmat_ == 0.0, self.transmat_, transmat_) normalize(self.transmat_, axis=1) # depends on [control=['if'], data=[]]
def create(self, name, group_id, avatar_url=None, callback_url=None, dm_notification=None, **kwargs): """Create a new bot in a particular group. :param str name: bot name :param str group_id: the group_id of a group :param str avatar_url: the URL of an image to use as an avatar :param str callback_url: a POST-back URL for each new message :param bool dm_notification: whether to POST-back for direct messages? :return: the new bot :rtype: :class:`~groupy.api.bots.Bot` """ payload = { 'bot': { 'name': name, 'group_id': group_id, 'avatar_url': avatar_url, 'callback_url': callback_url, 'dm_notification': dm_notification, }, } payload['bot'].update(kwargs) response = self.session.post(self.url, json=payload) bot = response.data['bot'] return Bot(self, **bot)
def function[create, parameter[self, name, group_id, avatar_url, callback_url, dm_notification]]: constant[Create a new bot in a particular group. :param str name: bot name :param str group_id: the group_id of a group :param str avatar_url: the URL of an image to use as an avatar :param str callback_url: a POST-back URL for each new message :param bool dm_notification: whether to POST-back for direct messages? :return: the new bot :rtype: :class:`~groupy.api.bots.Bot` ] variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b2345390>], [<ast.Dict object at 0x7da1b2346fb0>]] call[call[name[payload]][constant[bot]].update, parameter[name[kwargs]]] variable[response] assign[=] call[name[self].session.post, parameter[name[self].url]] variable[bot] assign[=] call[name[response].data][constant[bot]] return[call[name[Bot], parameter[name[self]]]]
keyword[def] identifier[create] ( identifier[self] , identifier[name] , identifier[group_id] , identifier[avatar_url] = keyword[None] , identifier[callback_url] = keyword[None] , identifier[dm_notification] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[payload] ={ literal[string] :{ literal[string] : identifier[name] , literal[string] : identifier[group_id] , literal[string] : identifier[avatar_url] , literal[string] : identifier[callback_url] , literal[string] : identifier[dm_notification] , }, } identifier[payload] [ literal[string] ]. identifier[update] ( identifier[kwargs] ) identifier[response] = identifier[self] . identifier[session] . identifier[post] ( identifier[self] . identifier[url] , identifier[json] = identifier[payload] ) identifier[bot] = identifier[response] . identifier[data] [ literal[string] ] keyword[return] identifier[Bot] ( identifier[self] ,** identifier[bot] )
def create(self, name, group_id, avatar_url=None, callback_url=None, dm_notification=None, **kwargs): """Create a new bot in a particular group. :param str name: bot name :param str group_id: the group_id of a group :param str avatar_url: the URL of an image to use as an avatar :param str callback_url: a POST-back URL for each new message :param bool dm_notification: whether to POST-back for direct messages? :return: the new bot :rtype: :class:`~groupy.api.bots.Bot` """ payload = {'bot': {'name': name, 'group_id': group_id, 'avatar_url': avatar_url, 'callback_url': callback_url, 'dm_notification': dm_notification}} payload['bot'].update(kwargs) response = self.session.post(self.url, json=payload) bot = response.data['bot'] return Bot(self, **bot)
def make_eval_fn_with_agent( agent_type, eval_mode, planner_hparams, model_dir, log_every_steps=None, video_writers=(), random_starts_step_limit=None ): """Returns an out-of-graph eval_fn using the Agent API.""" def eval_fn(env, loop_hparams, policy_hparams, policy_dir, sampling_temp): """Eval function.""" base_env = env env = rl_utils.BatchStackWrapper(env, loop_hparams.frame_stack_size) agent = make_agent_from_hparams( agent_type, base_env, env, loop_hparams, policy_hparams, planner_hparams, model_dir, policy_dir, sampling_temp, video_writers ) if eval_mode == "agent_simulated": real_env = base_env.new_like(batch_size=1) stacked_env = rl_utils.BatchStackWrapper( real_env, loop_hparams.frame_stack_size ) collect_frames_for_random_starts( real_env, stacked_env, agent, loop_hparams.frame_stack_size, random_starts_step_limit, log_every_steps ) initial_frame_chooser = rl_utils.make_initial_frame_chooser( real_env, loop_hparams.frame_stack_size, simulation_random_starts=True, simulation_flip_first_random_for_beginning=False, split=None, ) env_fn = rl.make_simulated_env_fn_from_hparams( real_env, loop_hparams, batch_size=loop_hparams.eval_batch_size, initial_frame_chooser=initial_frame_chooser, model_dir=model_dir ) sim_env = env_fn(in_graph=False) env = rl_utils.BatchStackWrapper(sim_env, loop_hparams.frame_stack_size) kwargs = {} if not agent.records_own_videos: kwargs["video_writers"] = video_writers step_limit = base_env.rl_env_max_episode_steps if step_limit == -1: step_limit = None rl_utils.run_rollouts( env, agent, env.reset(), log_every_steps=log_every_steps, step_limit=step_limit, **kwargs ) if eval_mode == "agent_real": assert len(base_env.current_epoch_rollouts()) == env.batch_size return eval_fn
def function[make_eval_fn_with_agent, parameter[agent_type, eval_mode, planner_hparams, model_dir, log_every_steps, video_writers, random_starts_step_limit]]: constant[Returns an out-of-graph eval_fn using the Agent API.] def function[eval_fn, parameter[env, loop_hparams, policy_hparams, policy_dir, sampling_temp]]: constant[Eval function.] variable[base_env] assign[=] name[env] variable[env] assign[=] call[name[rl_utils].BatchStackWrapper, parameter[name[env], name[loop_hparams].frame_stack_size]] variable[agent] assign[=] call[name[make_agent_from_hparams], parameter[name[agent_type], name[base_env], name[env], name[loop_hparams], name[policy_hparams], name[planner_hparams], name[model_dir], name[policy_dir], name[sampling_temp], name[video_writers]]] if compare[name[eval_mode] equal[==] constant[agent_simulated]] begin[:] variable[real_env] assign[=] call[name[base_env].new_like, parameter[]] variable[stacked_env] assign[=] call[name[rl_utils].BatchStackWrapper, parameter[name[real_env], name[loop_hparams].frame_stack_size]] call[name[collect_frames_for_random_starts], parameter[name[real_env], name[stacked_env], name[agent], name[loop_hparams].frame_stack_size, name[random_starts_step_limit], name[log_every_steps]]] variable[initial_frame_chooser] assign[=] call[name[rl_utils].make_initial_frame_chooser, parameter[name[real_env], name[loop_hparams].frame_stack_size]] variable[env_fn] assign[=] call[name[rl].make_simulated_env_fn_from_hparams, parameter[name[real_env], name[loop_hparams]]] variable[sim_env] assign[=] call[name[env_fn], parameter[]] variable[env] assign[=] call[name[rl_utils].BatchStackWrapper, parameter[name[sim_env], name[loop_hparams].frame_stack_size]] variable[kwargs] assign[=] dictionary[[], []] if <ast.UnaryOp object at 0x7da20e9549a0> begin[:] call[name[kwargs]][constant[video_writers]] assign[=] name[video_writers] variable[step_limit] assign[=] name[base_env].rl_env_max_episode_steps if compare[name[step_limit] equal[==] <ast.UnaryOp object at 0x7da20e955390>] begin[:] variable[step_limit] assign[=] constant[None] call[name[rl_utils].run_rollouts, parameter[name[env], name[agent], call[name[env].reset, parameter[]]]] if compare[name[eval_mode] equal[==] constant[agent_real]] begin[:] assert[compare[call[name[len], parameter[call[name[base_env].current_epoch_rollouts, parameter[]]]] equal[==] name[env].batch_size]] return[name[eval_fn]]
keyword[def] identifier[make_eval_fn_with_agent] ( identifier[agent_type] , identifier[eval_mode] , identifier[planner_hparams] , identifier[model_dir] , identifier[log_every_steps] = keyword[None] , identifier[video_writers] =(), identifier[random_starts_step_limit] = keyword[None] ): literal[string] keyword[def] identifier[eval_fn] ( identifier[env] , identifier[loop_hparams] , identifier[policy_hparams] , identifier[policy_dir] , identifier[sampling_temp] ): literal[string] identifier[base_env] = identifier[env] identifier[env] = identifier[rl_utils] . identifier[BatchStackWrapper] ( identifier[env] , identifier[loop_hparams] . identifier[frame_stack_size] ) identifier[agent] = identifier[make_agent_from_hparams] ( identifier[agent_type] , identifier[base_env] , identifier[env] , identifier[loop_hparams] , identifier[policy_hparams] , identifier[planner_hparams] , identifier[model_dir] , identifier[policy_dir] , identifier[sampling_temp] , identifier[video_writers] ) keyword[if] identifier[eval_mode] == literal[string] : identifier[real_env] = identifier[base_env] . identifier[new_like] ( identifier[batch_size] = literal[int] ) identifier[stacked_env] = identifier[rl_utils] . identifier[BatchStackWrapper] ( identifier[real_env] , identifier[loop_hparams] . identifier[frame_stack_size] ) identifier[collect_frames_for_random_starts] ( identifier[real_env] , identifier[stacked_env] , identifier[agent] , identifier[loop_hparams] . identifier[frame_stack_size] , identifier[random_starts_step_limit] , identifier[log_every_steps] ) identifier[initial_frame_chooser] = identifier[rl_utils] . identifier[make_initial_frame_chooser] ( identifier[real_env] , identifier[loop_hparams] . identifier[frame_stack_size] , identifier[simulation_random_starts] = keyword[True] , identifier[simulation_flip_first_random_for_beginning] = keyword[False] , identifier[split] = keyword[None] , ) identifier[env_fn] = identifier[rl] . identifier[make_simulated_env_fn_from_hparams] ( identifier[real_env] , identifier[loop_hparams] , identifier[batch_size] = identifier[loop_hparams] . identifier[eval_batch_size] , identifier[initial_frame_chooser] = identifier[initial_frame_chooser] , identifier[model_dir] = identifier[model_dir] ) identifier[sim_env] = identifier[env_fn] ( identifier[in_graph] = keyword[False] ) identifier[env] = identifier[rl_utils] . identifier[BatchStackWrapper] ( identifier[sim_env] , identifier[loop_hparams] . identifier[frame_stack_size] ) identifier[kwargs] ={} keyword[if] keyword[not] identifier[agent] . identifier[records_own_videos] : identifier[kwargs] [ literal[string] ]= identifier[video_writers] identifier[step_limit] = identifier[base_env] . identifier[rl_env_max_episode_steps] keyword[if] identifier[step_limit] ==- literal[int] : identifier[step_limit] = keyword[None] identifier[rl_utils] . identifier[run_rollouts] ( identifier[env] , identifier[agent] , identifier[env] . identifier[reset] (), identifier[log_every_steps] = identifier[log_every_steps] , identifier[step_limit] = identifier[step_limit] ,** identifier[kwargs] ) keyword[if] identifier[eval_mode] == literal[string] : keyword[assert] identifier[len] ( identifier[base_env] . identifier[current_epoch_rollouts] ())== identifier[env] . identifier[batch_size] keyword[return] identifier[eval_fn]
def make_eval_fn_with_agent(agent_type, eval_mode, planner_hparams, model_dir, log_every_steps=None, video_writers=(), random_starts_step_limit=None): """Returns an out-of-graph eval_fn using the Agent API.""" def eval_fn(env, loop_hparams, policy_hparams, policy_dir, sampling_temp): """Eval function.""" base_env = env env = rl_utils.BatchStackWrapper(env, loop_hparams.frame_stack_size) agent = make_agent_from_hparams(agent_type, base_env, env, loop_hparams, policy_hparams, planner_hparams, model_dir, policy_dir, sampling_temp, video_writers) if eval_mode == 'agent_simulated': real_env = base_env.new_like(batch_size=1) stacked_env = rl_utils.BatchStackWrapper(real_env, loop_hparams.frame_stack_size) collect_frames_for_random_starts(real_env, stacked_env, agent, loop_hparams.frame_stack_size, random_starts_step_limit, log_every_steps) initial_frame_chooser = rl_utils.make_initial_frame_chooser(real_env, loop_hparams.frame_stack_size, simulation_random_starts=True, simulation_flip_first_random_for_beginning=False, split=None) env_fn = rl.make_simulated_env_fn_from_hparams(real_env, loop_hparams, batch_size=loop_hparams.eval_batch_size, initial_frame_chooser=initial_frame_chooser, model_dir=model_dir) sim_env = env_fn(in_graph=False) env = rl_utils.BatchStackWrapper(sim_env, loop_hparams.frame_stack_size) # depends on [control=['if'], data=[]] kwargs = {} if not agent.records_own_videos: kwargs['video_writers'] = video_writers # depends on [control=['if'], data=[]] step_limit = base_env.rl_env_max_episode_steps if step_limit == -1: step_limit = None # depends on [control=['if'], data=['step_limit']] rl_utils.run_rollouts(env, agent, env.reset(), log_every_steps=log_every_steps, step_limit=step_limit, **kwargs) if eval_mode == 'agent_real': assert len(base_env.current_epoch_rollouts()) == env.batch_size # depends on [control=['if'], data=[]] return eval_fn
def build_graph(self, data_paths, batch_size, graph_mod): """Builds generic graph for training or eval.""" tensors = GraphReferences() is_training = graph_mod == GraphMod.TRAIN if data_paths: _, tensors.examples = _util.read_examples( data_paths, batch_size, shuffle=is_training, num_epochs=None if is_training else 2) else: tensors.examples = tf.placeholder(tf.string, name='input', shape=(None,)) if graph_mod == GraphMod.PREDICT: inception_input, inception_embeddings = self.build_inception_graph() # Build the Inception graph. We later add final training layers # to this graph. This is currently used only for prediction. # For training, we use pre-processed data, so it is not needed. embeddings = inception_embeddings tensors.input_jpeg = inception_input else: # For training and evaluation we assume data is preprocessed, so the # inputs are tf-examples. # Generate placeholders for examples. with tf.name_scope('inputs'): feature_map = { 'image_uri': tf.FixedLenFeature( shape=[], dtype=tf.string, default_value=['']), # Some images may have no labels. For those, we assume a default # label. So the number of labels is label_count+1 for the default # label. 'label': tf.FixedLenFeature( shape=[1], dtype=tf.int64, default_value=[len(self.labels)]), 'embedding': tf.FixedLenFeature( shape=[BOTTLENECK_TENSOR_SIZE], dtype=tf.float32) } parsed = tf.parse_example(tensors.examples, features=feature_map) labels = tf.squeeze(parsed['label']) uris = tf.squeeze(parsed['image_uri']) embeddings = parsed['embedding'] # We assume a default label, so the total number of labels is equal to # label_count+1. all_labels_count = len(self.labels) + 1 with tf.name_scope('final_ops'): softmax, logits = self.add_final_training_ops( embeddings, all_labels_count, BOTTLENECK_TENSOR_SIZE, dropout_keep_prob=self.dropout if is_training else None) # Prediction is the index of the label with the highest score. We are # interested only in the top score. prediction = tf.argmax(softmax, 1) tensors.predictions = [prediction, softmax, embeddings] if graph_mod == GraphMod.PREDICT: return tensors with tf.name_scope('evaluate'): loss_value = loss(logits, labels) # Add to the Graph the Ops that calculate and apply gradients. if is_training: tensors.train, tensors.global_step = training(loss_value) else: tensors.global_step = tf.Variable(0, name='global_step', trainable=False) tensors.uris = uris # Add means across all batches. loss_updates, loss_op = _util.loss(loss_value) accuracy_updates, accuracy_op = _util.accuracy(logits, labels) if not is_training: tf.summary.scalar('accuracy', accuracy_op) tf.summary.scalar('loss', loss_op) tensors.metric_updates = loss_updates + accuracy_updates tensors.metric_values = [loss_op, accuracy_op] return tensors
def function[build_graph, parameter[self, data_paths, batch_size, graph_mod]]: constant[Builds generic graph for training or eval.] variable[tensors] assign[=] call[name[GraphReferences], parameter[]] variable[is_training] assign[=] compare[name[graph_mod] equal[==] name[GraphMod].TRAIN] if name[data_paths] begin[:] <ast.Tuple object at 0x7da20c6e5480> assign[=] call[name[_util].read_examples, parameter[name[data_paths], name[batch_size]]] if compare[name[graph_mod] equal[==] name[GraphMod].PREDICT] begin[:] <ast.Tuple object at 0x7da20c6e5870> assign[=] call[name[self].build_inception_graph, parameter[]] variable[embeddings] assign[=] name[inception_embeddings] name[tensors].input_jpeg assign[=] name[inception_input] variable[all_labels_count] assign[=] binary_operation[call[name[len], parameter[name[self].labels]] + constant[1]] with call[name[tf].name_scope, parameter[constant[final_ops]]] begin[:] <ast.Tuple object at 0x7da20c6e4ac0> assign[=] call[name[self].add_final_training_ops, parameter[name[embeddings], name[all_labels_count], name[BOTTLENECK_TENSOR_SIZE]]] variable[prediction] assign[=] call[name[tf].argmax, parameter[name[softmax], constant[1]]] name[tensors].predictions assign[=] list[[<ast.Name object at 0x7da20c6e5db0>, <ast.Name object at 0x7da20c6e70d0>, <ast.Name object at 0x7da20c6e59c0>]] if compare[name[graph_mod] equal[==] name[GraphMod].PREDICT] begin[:] return[name[tensors]] with call[name[tf].name_scope, parameter[constant[evaluate]]] begin[:] variable[loss_value] assign[=] call[name[loss], parameter[name[logits], name[labels]]] if name[is_training] begin[:] <ast.Tuple object at 0x7da18dc04460> assign[=] call[name[training], parameter[name[loss_value]]] <ast.Tuple object at 0x7da18dc05720> assign[=] call[name[_util].loss, parameter[name[loss_value]]] <ast.Tuple object at 0x7da18dc06020> assign[=] call[name[_util].accuracy, parameter[name[logits], name[labels]]] if <ast.UnaryOp object at 0x7da18dc07e20> begin[:] call[name[tf].summary.scalar, parameter[constant[accuracy], name[accuracy_op]]] call[name[tf].summary.scalar, parameter[constant[loss], name[loss_op]]] name[tensors].metric_updates assign[=] binary_operation[name[loss_updates] + name[accuracy_updates]] name[tensors].metric_values assign[=] list[[<ast.Name object at 0x7da18dc048e0>, <ast.Name object at 0x7da18dc06080>]] return[name[tensors]]
keyword[def] identifier[build_graph] ( identifier[self] , identifier[data_paths] , identifier[batch_size] , identifier[graph_mod] ): literal[string] identifier[tensors] = identifier[GraphReferences] () identifier[is_training] = identifier[graph_mod] == identifier[GraphMod] . identifier[TRAIN] keyword[if] identifier[data_paths] : identifier[_] , identifier[tensors] . identifier[examples] = identifier[_util] . identifier[read_examples] ( identifier[data_paths] , identifier[batch_size] , identifier[shuffle] = identifier[is_training] , identifier[num_epochs] = keyword[None] keyword[if] identifier[is_training] keyword[else] literal[int] ) keyword[else] : identifier[tensors] . identifier[examples] = identifier[tf] . identifier[placeholder] ( identifier[tf] . identifier[string] , identifier[name] = literal[string] , identifier[shape] =( keyword[None] ,)) keyword[if] identifier[graph_mod] == identifier[GraphMod] . identifier[PREDICT] : identifier[inception_input] , identifier[inception_embeddings] = identifier[self] . identifier[build_inception_graph] () identifier[embeddings] = identifier[inception_embeddings] identifier[tensors] . identifier[input_jpeg] = identifier[inception_input] keyword[else] : keyword[with] identifier[tf] . identifier[name_scope] ( literal[string] ): identifier[feature_map] ={ literal[string] : identifier[tf] . identifier[FixedLenFeature] ( identifier[shape] =[], identifier[dtype] = identifier[tf] . identifier[string] , identifier[default_value] =[ literal[string] ]), literal[string] : identifier[tf] . identifier[FixedLenFeature] ( identifier[shape] =[ literal[int] ], identifier[dtype] = identifier[tf] . identifier[int64] , identifier[default_value] =[ identifier[len] ( identifier[self] . identifier[labels] )]), literal[string] : identifier[tf] . identifier[FixedLenFeature] ( identifier[shape] =[ identifier[BOTTLENECK_TENSOR_SIZE] ], identifier[dtype] = identifier[tf] . identifier[float32] ) } identifier[parsed] = identifier[tf] . identifier[parse_example] ( identifier[tensors] . identifier[examples] , identifier[features] = identifier[feature_map] ) identifier[labels] = identifier[tf] . identifier[squeeze] ( identifier[parsed] [ literal[string] ]) identifier[uris] = identifier[tf] . identifier[squeeze] ( identifier[parsed] [ literal[string] ]) identifier[embeddings] = identifier[parsed] [ literal[string] ] identifier[all_labels_count] = identifier[len] ( identifier[self] . identifier[labels] )+ literal[int] keyword[with] identifier[tf] . identifier[name_scope] ( literal[string] ): identifier[softmax] , identifier[logits] = identifier[self] . identifier[add_final_training_ops] ( identifier[embeddings] , identifier[all_labels_count] , identifier[BOTTLENECK_TENSOR_SIZE] , identifier[dropout_keep_prob] = identifier[self] . identifier[dropout] keyword[if] identifier[is_training] keyword[else] keyword[None] ) identifier[prediction] = identifier[tf] . identifier[argmax] ( identifier[softmax] , literal[int] ) identifier[tensors] . identifier[predictions] =[ identifier[prediction] , identifier[softmax] , identifier[embeddings] ] keyword[if] identifier[graph_mod] == identifier[GraphMod] . identifier[PREDICT] : keyword[return] identifier[tensors] keyword[with] identifier[tf] . identifier[name_scope] ( literal[string] ): identifier[loss_value] = identifier[loss] ( identifier[logits] , identifier[labels] ) keyword[if] identifier[is_training] : identifier[tensors] . identifier[train] , identifier[tensors] . identifier[global_step] = identifier[training] ( identifier[loss_value] ) keyword[else] : identifier[tensors] . identifier[global_step] = identifier[tf] . identifier[Variable] ( literal[int] , identifier[name] = literal[string] , identifier[trainable] = keyword[False] ) identifier[tensors] . identifier[uris] = identifier[uris] identifier[loss_updates] , identifier[loss_op] = identifier[_util] . identifier[loss] ( identifier[loss_value] ) identifier[accuracy_updates] , identifier[accuracy_op] = identifier[_util] . identifier[accuracy] ( identifier[logits] , identifier[labels] ) keyword[if] keyword[not] identifier[is_training] : identifier[tf] . identifier[summary] . identifier[scalar] ( literal[string] , identifier[accuracy_op] ) identifier[tf] . identifier[summary] . identifier[scalar] ( literal[string] , identifier[loss_op] ) identifier[tensors] . identifier[metric_updates] = identifier[loss_updates] + identifier[accuracy_updates] identifier[tensors] . identifier[metric_values] =[ identifier[loss_op] , identifier[accuracy_op] ] keyword[return] identifier[tensors]
def build_graph(self, data_paths, batch_size, graph_mod): """Builds generic graph for training or eval.""" tensors = GraphReferences() is_training = graph_mod == GraphMod.TRAIN if data_paths: (_, tensors.examples) = _util.read_examples(data_paths, batch_size, shuffle=is_training, num_epochs=None if is_training else 2) # depends on [control=['if'], data=[]] else: tensors.examples = tf.placeholder(tf.string, name='input', shape=(None,)) if graph_mod == GraphMod.PREDICT: (inception_input, inception_embeddings) = self.build_inception_graph() # Build the Inception graph. We later add final training layers # to this graph. This is currently used only for prediction. # For training, we use pre-processed data, so it is not needed. embeddings = inception_embeddings tensors.input_jpeg = inception_input # depends on [control=['if'], data=[]] else: # For training and evaluation we assume data is preprocessed, so the # inputs are tf-examples. # Generate placeholders for examples. with tf.name_scope('inputs'): # Some images may have no labels. For those, we assume a default # label. So the number of labels is label_count+1 for the default # label. feature_map = {'image_uri': tf.FixedLenFeature(shape=[], dtype=tf.string, default_value=['']), 'label': tf.FixedLenFeature(shape=[1], dtype=tf.int64, default_value=[len(self.labels)]), 'embedding': tf.FixedLenFeature(shape=[BOTTLENECK_TENSOR_SIZE], dtype=tf.float32)} parsed = tf.parse_example(tensors.examples, features=feature_map) labels = tf.squeeze(parsed['label']) uris = tf.squeeze(parsed['image_uri']) embeddings = parsed['embedding'] # depends on [control=['with'], data=[]] # We assume a default label, so the total number of labels is equal to # label_count+1. all_labels_count = len(self.labels) + 1 with tf.name_scope('final_ops'): (softmax, logits) = self.add_final_training_ops(embeddings, all_labels_count, BOTTLENECK_TENSOR_SIZE, dropout_keep_prob=self.dropout if is_training else None) # depends on [control=['with'], data=[]] # Prediction is the index of the label with the highest score. We are # interested only in the top score. prediction = tf.argmax(softmax, 1) tensors.predictions = [prediction, softmax, embeddings] if graph_mod == GraphMod.PREDICT: return tensors # depends on [control=['if'], data=[]] with tf.name_scope('evaluate'): loss_value = loss(logits, labels) # depends on [control=['with'], data=[]] # Add to the Graph the Ops that calculate and apply gradients. if is_training: (tensors.train, tensors.global_step) = training(loss_value) # depends on [control=['if'], data=[]] else: tensors.global_step = tf.Variable(0, name='global_step', trainable=False) tensors.uris = uris # Add means across all batches. (loss_updates, loss_op) = _util.loss(loss_value) (accuracy_updates, accuracy_op) = _util.accuracy(logits, labels) if not is_training: tf.summary.scalar('accuracy', accuracy_op) tf.summary.scalar('loss', loss_op) # depends on [control=['if'], data=[]] tensors.metric_updates = loss_updates + accuracy_updates tensors.metric_values = [loss_op, accuracy_op] return tensors
def split_url(self, url): """Parse an IIIF API URL path into components. Will parse a URL or URL path that accords with either the parametrized or info API forms. Will raise an IIIFRequestError on failure. If self.identifier is set then url is assumed not to include the identifier. """ # clear data first identifier = self.identifier self.clear() # url must start with baseurl if set (including slash) if (self.baseurl is not None): (path, num) = re.subn('^' + self.baseurl, '', url, 1) if (num != 1): raise IIIFRequestError( text="Request URL does not start with base URL") url = path # Break up by path segments, count to decide format segs = url.split('/') if (identifier is not None): segs.insert(0, identifier) elif (self.allow_slashes_in_identifier): segs = self._allow_slashes_in_identifier_munger(segs) # Now have segments with identifier as first if (len(segs) > 5): raise IIIFRequestPathError( text="Request URL (%s) has too many path segments" % url) elif (len(segs) == 5): self.identifier = urlunquote(segs[0]) self.region = urlunquote(segs[1]) self.size = urlunquote(segs[2]) self.rotation = urlunquote(segs[3]) self.quality = self.strip_format(urlunquote(segs[4])) self.info = False elif (len(segs) == 2): self.identifier = urlunquote(segs[0]) info_name = self.strip_format(urlunquote(segs[1])) if (info_name != "info"): raise IIIFRequestError( text="Bad name for Image Information") if (self.api_version == '1.0'): if (self.format not in ['json', 'xml']): raise IIIFRequestError( text="Invalid format for Image Information (json and xml allowed)") elif (self.format != 'json'): raise IIIFRequestError( text="Invalid format for Image Information (only json allowed)") self.info = True elif (len(segs) == 1): self.identifier = urlunquote(segs[0]) raise IIIFRequestBaseURI() else: raise IIIFRequestPathError( text="Bad number of path segments in request") return(self)
def function[split_url, parameter[self, url]]: constant[Parse an IIIF API URL path into components. Will parse a URL or URL path that accords with either the parametrized or info API forms. Will raise an IIIFRequestError on failure. If self.identifier is set then url is assumed not to include the identifier. ] variable[identifier] assign[=] name[self].identifier call[name[self].clear, parameter[]] if compare[name[self].baseurl is_not constant[None]] begin[:] <ast.Tuple object at 0x7da20c6c58d0> assign[=] call[name[re].subn, parameter[binary_operation[constant[^] + name[self].baseurl], constant[], name[url], constant[1]]] if compare[name[num] not_equal[!=] constant[1]] begin[:] <ast.Raise object at 0x7da20c6c58a0> variable[url] assign[=] name[path] variable[segs] assign[=] call[name[url].split, parameter[constant[/]]] if compare[name[identifier] is_not constant[None]] begin[:] call[name[segs].insert, parameter[constant[0], name[identifier]]] if compare[call[name[len], parameter[name[segs]]] greater[>] constant[5]] begin[:] <ast.Raise object at 0x7da20c6c46d0> return[name[self]]
keyword[def] identifier[split_url] ( identifier[self] , identifier[url] ): literal[string] identifier[identifier] = identifier[self] . identifier[identifier] identifier[self] . identifier[clear] () keyword[if] ( identifier[self] . identifier[baseurl] keyword[is] keyword[not] keyword[None] ): ( identifier[path] , identifier[num] )= identifier[re] . identifier[subn] ( literal[string] + identifier[self] . identifier[baseurl] , literal[string] , identifier[url] , literal[int] ) keyword[if] ( identifier[num] != literal[int] ): keyword[raise] identifier[IIIFRequestError] ( identifier[text] = literal[string] ) identifier[url] = identifier[path] identifier[segs] = identifier[url] . identifier[split] ( literal[string] ) keyword[if] ( identifier[identifier] keyword[is] keyword[not] keyword[None] ): identifier[segs] . identifier[insert] ( literal[int] , identifier[identifier] ) keyword[elif] ( identifier[self] . identifier[allow_slashes_in_identifier] ): identifier[segs] = identifier[self] . identifier[_allow_slashes_in_identifier_munger] ( identifier[segs] ) keyword[if] ( identifier[len] ( identifier[segs] )> literal[int] ): keyword[raise] identifier[IIIFRequestPathError] ( identifier[text] = literal[string] % identifier[url] ) keyword[elif] ( identifier[len] ( identifier[segs] )== literal[int] ): identifier[self] . identifier[identifier] = identifier[urlunquote] ( identifier[segs] [ literal[int] ]) identifier[self] . identifier[region] = identifier[urlunquote] ( identifier[segs] [ literal[int] ]) identifier[self] . identifier[size] = identifier[urlunquote] ( identifier[segs] [ literal[int] ]) identifier[self] . identifier[rotation] = identifier[urlunquote] ( identifier[segs] [ literal[int] ]) identifier[self] . identifier[quality] = identifier[self] . identifier[strip_format] ( identifier[urlunquote] ( identifier[segs] [ literal[int] ])) identifier[self] . identifier[info] = keyword[False] keyword[elif] ( identifier[len] ( identifier[segs] )== literal[int] ): identifier[self] . identifier[identifier] = identifier[urlunquote] ( identifier[segs] [ literal[int] ]) identifier[info_name] = identifier[self] . identifier[strip_format] ( identifier[urlunquote] ( identifier[segs] [ literal[int] ])) keyword[if] ( identifier[info_name] != literal[string] ): keyword[raise] identifier[IIIFRequestError] ( identifier[text] = literal[string] ) keyword[if] ( identifier[self] . identifier[api_version] == literal[string] ): keyword[if] ( identifier[self] . identifier[format] keyword[not] keyword[in] [ literal[string] , literal[string] ]): keyword[raise] identifier[IIIFRequestError] ( identifier[text] = literal[string] ) keyword[elif] ( identifier[self] . identifier[format] != literal[string] ): keyword[raise] identifier[IIIFRequestError] ( identifier[text] = literal[string] ) identifier[self] . identifier[info] = keyword[True] keyword[elif] ( identifier[len] ( identifier[segs] )== literal[int] ): identifier[self] . identifier[identifier] = identifier[urlunquote] ( identifier[segs] [ literal[int] ]) keyword[raise] identifier[IIIFRequestBaseURI] () keyword[else] : keyword[raise] identifier[IIIFRequestPathError] ( identifier[text] = literal[string] ) keyword[return] ( identifier[self] )
def split_url(self, url): """Parse an IIIF API URL path into components. Will parse a URL or URL path that accords with either the parametrized or info API forms. Will raise an IIIFRequestError on failure. If self.identifier is set then url is assumed not to include the identifier. """ # clear data first identifier = self.identifier self.clear() # url must start with baseurl if set (including slash) if self.baseurl is not None: (path, num) = re.subn('^' + self.baseurl, '', url, 1) if num != 1: raise IIIFRequestError(text='Request URL does not start with base URL') # depends on [control=['if'], data=[]] url = path # depends on [control=['if'], data=[]] # Break up by path segments, count to decide format segs = url.split('/') if identifier is not None: segs.insert(0, identifier) # depends on [control=['if'], data=['identifier']] elif self.allow_slashes_in_identifier: segs = self._allow_slashes_in_identifier_munger(segs) # depends on [control=['if'], data=[]] # Now have segments with identifier as first if len(segs) > 5: raise IIIFRequestPathError(text='Request URL (%s) has too many path segments' % url) # depends on [control=['if'], data=[]] elif len(segs) == 5: self.identifier = urlunquote(segs[0]) self.region = urlunquote(segs[1]) self.size = urlunquote(segs[2]) self.rotation = urlunquote(segs[3]) self.quality = self.strip_format(urlunquote(segs[4])) self.info = False # depends on [control=['if'], data=[]] elif len(segs) == 2: self.identifier = urlunquote(segs[0]) info_name = self.strip_format(urlunquote(segs[1])) if info_name != 'info': raise IIIFRequestError(text='Bad name for Image Information') # depends on [control=['if'], data=[]] if self.api_version == '1.0': if self.format not in ['json', 'xml']: raise IIIFRequestError(text='Invalid format for Image Information (json and xml allowed)') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif self.format != 'json': raise IIIFRequestError(text='Invalid format for Image Information (only json allowed)') # depends on [control=['if'], data=[]] self.info = True # depends on [control=['if'], data=[]] elif len(segs) == 1: self.identifier = urlunquote(segs[0]) raise IIIFRequestBaseURI() # depends on [control=['if'], data=[]] else: raise IIIFRequestPathError(text='Bad number of path segments in request') return self
def remove_core_element(self, model): """Remove respective core element of handed transition model :param TransitionModel model: Transition model which core element should be removed :return: """ assert model.transition.parent is self.model.state or model.transition.parent is self.model.parent.state gui_helper_state_machine.delete_core_element_of_model(model)
def function[remove_core_element, parameter[self, model]]: constant[Remove respective core element of handed transition model :param TransitionModel model: Transition model which core element should be removed :return: ] assert[<ast.BoolOp object at 0x7da1b1c7d4b0>] call[name[gui_helper_state_machine].delete_core_element_of_model, parameter[name[model]]]
keyword[def] identifier[remove_core_element] ( identifier[self] , identifier[model] ): literal[string] keyword[assert] identifier[model] . identifier[transition] . identifier[parent] keyword[is] identifier[self] . identifier[model] . identifier[state] keyword[or] identifier[model] . identifier[transition] . identifier[parent] keyword[is] identifier[self] . identifier[model] . identifier[parent] . identifier[state] identifier[gui_helper_state_machine] . identifier[delete_core_element_of_model] ( identifier[model] )
def remove_core_element(self, model): """Remove respective core element of handed transition model :param TransitionModel model: Transition model which core element should be removed :return: """ assert model.transition.parent is self.model.state or model.transition.parent is self.model.parent.state gui_helper_state_machine.delete_core_element_of_model(model)
def get_hky_pij(t, frequencies, kappa): """ Calculates the probability matrix of substitutions i->j over time t, with HKY model [Hasegawa, Kishino and Yano 1985], given state frequencies and kappa. :param t: time :type t: float :param kappa: kappa parameter for HKY model :type kappa: float :param frequencies: array of state frequencies \pi_i :type frequencies: numpy.array :return: probability matrix :rtype: numpy.ndarray """ pi_a, pi_c, pi_g, pi_t = frequencies pi_ag = pi_a + pi_g pi_ct = pi_c + pi_t beta = .5 / (pi_ag * pi_ct + kappa * (pi_a * pi_g + pi_c * pi_t)) exp_min_beta_t = np.exp(-beta * t) exp_ct = np.exp(-beta * t * (1. + pi_ct * (kappa - 1.))) / pi_ct exp_ag = np.exp(-beta * t * (1. + pi_ag * (kappa - 1.))) / pi_ag ct_sum = (pi_ct + pi_ag * exp_min_beta_t) / pi_ct ag_sum = (pi_ag + pi_ct * exp_min_beta_t) / pi_ag p = np.ones((4, 4), dtype=np.float64) * (1 - exp_min_beta_t) p *= frequencies p[T, T] = pi_t * ct_sum + pi_c * exp_ct p[T, C] = pi_c * ct_sum - pi_c * exp_ct p[C, T] = pi_t * ct_sum - pi_t * exp_ct p[C, C] = pi_c * ct_sum + pi_t * exp_ct p[A, A] = pi_a * ag_sum + pi_g * exp_ag p[A, G] = pi_g * ag_sum - pi_g * exp_ag p[G, A] = pi_a * ag_sum - pi_a * exp_ag p[G, G] = pi_g * ag_sum + pi_a * exp_ag return p
def function[get_hky_pij, parameter[t, frequencies, kappa]]: constant[ Calculates the probability matrix of substitutions i->j over time t, with HKY model [Hasegawa, Kishino and Yano 1985], given state frequencies and kappa. :param t: time :type t: float :param kappa: kappa parameter for HKY model :type kappa: float :param frequencies: array of state frequencies \pi_i :type frequencies: numpy.array :return: probability matrix :rtype: numpy.ndarray ] <ast.Tuple object at 0x7da20c6e5420> assign[=] name[frequencies] variable[pi_ag] assign[=] binary_operation[name[pi_a] + name[pi_g]] variable[pi_ct] assign[=] binary_operation[name[pi_c] + name[pi_t]] variable[beta] assign[=] binary_operation[constant[0.5] / binary_operation[binary_operation[name[pi_ag] * name[pi_ct]] + binary_operation[name[kappa] * binary_operation[binary_operation[name[pi_a] * name[pi_g]] + binary_operation[name[pi_c] * name[pi_t]]]]]] variable[exp_min_beta_t] assign[=] call[name[np].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da20c6e6ec0> * name[t]]]] variable[exp_ct] assign[=] binary_operation[call[name[np].exp, parameter[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da20c6e5b10> * name[t]] * binary_operation[constant[1.0] + binary_operation[name[pi_ct] * binary_operation[name[kappa] - constant[1.0]]]]]]] / name[pi_ct]] variable[exp_ag] assign[=] binary_operation[call[name[np].exp, parameter[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da2054a7520> * name[t]] * binary_operation[constant[1.0] + binary_operation[name[pi_ag] * binary_operation[name[kappa] - constant[1.0]]]]]]] / name[pi_ag]] variable[ct_sum] assign[=] binary_operation[binary_operation[name[pi_ct] + binary_operation[name[pi_ag] * name[exp_min_beta_t]]] / name[pi_ct]] variable[ag_sum] assign[=] binary_operation[binary_operation[name[pi_ag] + binary_operation[name[pi_ct] * name[exp_min_beta_t]]] / name[pi_ag]] variable[p] assign[=] binary_operation[call[name[np].ones, parameter[tuple[[<ast.Constant object at 0x7da20c7caf50>, <ast.Constant object at 0x7da20c7cb700>]]]] * binary_operation[constant[1] - name[exp_min_beta_t]]] <ast.AugAssign object at 0x7da20c7c8340> call[name[p]][tuple[[<ast.Name object at 0x7da20c7c82e0>, <ast.Name object at 0x7da20c7cab30>]]] assign[=] binary_operation[binary_operation[name[pi_t] * name[ct_sum]] + binary_operation[name[pi_c] * name[exp_ct]]] call[name[p]][tuple[[<ast.Name object at 0x7da20c7ca110>, <ast.Name object at 0x7da20c7ca380>]]] assign[=] binary_operation[binary_operation[name[pi_c] * name[ct_sum]] - binary_operation[name[pi_c] * name[exp_ct]]] call[name[p]][tuple[[<ast.Name object at 0x7da20c7c9bd0>, <ast.Name object at 0x7da20c7ca740>]]] assign[=] binary_operation[binary_operation[name[pi_t] * name[ct_sum]] - binary_operation[name[pi_t] * name[exp_ct]]] call[name[p]][tuple[[<ast.Name object at 0x7da20c7c9ba0>, <ast.Name object at 0x7da20c7cbe80>]]] assign[=] binary_operation[binary_operation[name[pi_c] * name[ct_sum]] + binary_operation[name[pi_t] * name[exp_ct]]] call[name[p]][tuple[[<ast.Name object at 0x7da20c7ca080>, <ast.Name object at 0x7da20c7c9f60>]]] assign[=] binary_operation[binary_operation[name[pi_a] * name[ag_sum]] + binary_operation[name[pi_g] * name[exp_ag]]] call[name[p]][tuple[[<ast.Name object at 0x7da20c7ca770>, <ast.Name object at 0x7da20c7c85b0>]]] assign[=] binary_operation[binary_operation[name[pi_g] * name[ag_sum]] - binary_operation[name[pi_g] * name[exp_ag]]] call[name[p]][tuple[[<ast.Name object at 0x7da20c7cbee0>, <ast.Name object at 0x7da20c7ca050>]]] assign[=] binary_operation[binary_operation[name[pi_a] * name[ag_sum]] - binary_operation[name[pi_a] * name[exp_ag]]] call[name[p]][tuple[[<ast.Name object at 0x7da20c7c9d80>, <ast.Name object at 0x7da20c7ca410>]]] assign[=] binary_operation[binary_operation[name[pi_g] * name[ag_sum]] + binary_operation[name[pi_a] * name[exp_ag]]] return[name[p]]
keyword[def] identifier[get_hky_pij] ( identifier[t] , identifier[frequencies] , identifier[kappa] ): literal[string] identifier[pi_a] , identifier[pi_c] , identifier[pi_g] , identifier[pi_t] = identifier[frequencies] identifier[pi_ag] = identifier[pi_a] + identifier[pi_g] identifier[pi_ct] = identifier[pi_c] + identifier[pi_t] identifier[beta] = literal[int] /( identifier[pi_ag] * identifier[pi_ct] + identifier[kappa] *( identifier[pi_a] * identifier[pi_g] + identifier[pi_c] * identifier[pi_t] )) identifier[exp_min_beta_t] = identifier[np] . identifier[exp] (- identifier[beta] * identifier[t] ) identifier[exp_ct] = identifier[np] . identifier[exp] (- identifier[beta] * identifier[t] *( literal[int] + identifier[pi_ct] *( identifier[kappa] - literal[int] )))/ identifier[pi_ct] identifier[exp_ag] = identifier[np] . identifier[exp] (- identifier[beta] * identifier[t] *( literal[int] + identifier[pi_ag] *( identifier[kappa] - literal[int] )))/ identifier[pi_ag] identifier[ct_sum] =( identifier[pi_ct] + identifier[pi_ag] * identifier[exp_min_beta_t] )/ identifier[pi_ct] identifier[ag_sum] =( identifier[pi_ag] + identifier[pi_ct] * identifier[exp_min_beta_t] )/ identifier[pi_ag] identifier[p] = identifier[np] . identifier[ones] (( literal[int] , literal[int] ), identifier[dtype] = identifier[np] . identifier[float64] )*( literal[int] - identifier[exp_min_beta_t] ) identifier[p] *= identifier[frequencies] identifier[p] [ identifier[T] , identifier[T] ]= identifier[pi_t] * identifier[ct_sum] + identifier[pi_c] * identifier[exp_ct] identifier[p] [ identifier[T] , identifier[C] ]= identifier[pi_c] * identifier[ct_sum] - identifier[pi_c] * identifier[exp_ct] identifier[p] [ identifier[C] , identifier[T] ]= identifier[pi_t] * identifier[ct_sum] - identifier[pi_t] * identifier[exp_ct] identifier[p] [ identifier[C] , identifier[C] ]= identifier[pi_c] * identifier[ct_sum] + identifier[pi_t] * identifier[exp_ct] identifier[p] [ identifier[A] , identifier[A] ]= identifier[pi_a] * identifier[ag_sum] + identifier[pi_g] * identifier[exp_ag] identifier[p] [ identifier[A] , identifier[G] ]= identifier[pi_g] * identifier[ag_sum] - identifier[pi_g] * identifier[exp_ag] identifier[p] [ identifier[G] , identifier[A] ]= identifier[pi_a] * identifier[ag_sum] - identifier[pi_a] * identifier[exp_ag] identifier[p] [ identifier[G] , identifier[G] ]= identifier[pi_g] * identifier[ag_sum] + identifier[pi_a] * identifier[exp_ag] keyword[return] identifier[p]
def get_hky_pij(t, frequencies, kappa): """ Calculates the probability matrix of substitutions i->j over time t, with HKY model [Hasegawa, Kishino and Yano 1985], given state frequencies and kappa. :param t: time :type t: float :param kappa: kappa parameter for HKY model :type kappa: float :param frequencies: array of state frequencies \\pi_i :type frequencies: numpy.array :return: probability matrix :rtype: numpy.ndarray """ (pi_a, pi_c, pi_g, pi_t) = frequencies pi_ag = pi_a + pi_g pi_ct = pi_c + pi_t beta = 0.5 / (pi_ag * pi_ct + kappa * (pi_a * pi_g + pi_c * pi_t)) exp_min_beta_t = np.exp(-beta * t) exp_ct = np.exp(-beta * t * (1.0 + pi_ct * (kappa - 1.0))) / pi_ct exp_ag = np.exp(-beta * t * (1.0 + pi_ag * (kappa - 1.0))) / pi_ag ct_sum = (pi_ct + pi_ag * exp_min_beta_t) / pi_ct ag_sum = (pi_ag + pi_ct * exp_min_beta_t) / pi_ag p = np.ones((4, 4), dtype=np.float64) * (1 - exp_min_beta_t) p *= frequencies p[T, T] = pi_t * ct_sum + pi_c * exp_ct p[T, C] = pi_c * ct_sum - pi_c * exp_ct p[C, T] = pi_t * ct_sum - pi_t * exp_ct p[C, C] = pi_c * ct_sum + pi_t * exp_ct p[A, A] = pi_a * ag_sum + pi_g * exp_ag p[A, G] = pi_g * ag_sum - pi_g * exp_ag p[G, A] = pi_a * ag_sum - pi_a * exp_ag p[G, G] = pi_g * ag_sum + pi_a * exp_ag return p
def _join(*args): """Join S3 bucket args together. Remove empty entries and strip left-leading ``/`` """ return delimiter.join(filter(lambda s: s != '', map(lambda s: s.lstrip(delimiter), args)))
def function[_join, parameter[]]: constant[Join S3 bucket args together. Remove empty entries and strip left-leading ``/`` ] return[call[name[delimiter].join, parameter[call[name[filter], parameter[<ast.Lambda object at 0x7da20c7cbfa0>, call[name[map], parameter[<ast.Lambda object at 0x7da20c7cbc70>, name[args]]]]]]]]
keyword[def] identifier[_join] (* identifier[args] ): literal[string] keyword[return] identifier[delimiter] . identifier[join] ( identifier[filter] ( keyword[lambda] identifier[s] : identifier[s] != literal[string] , identifier[map] ( keyword[lambda] identifier[s] : identifier[s] . identifier[lstrip] ( identifier[delimiter] ), identifier[args] )))
def _join(*args): """Join S3 bucket args together. Remove empty entries and strip left-leading ``/`` """ return delimiter.join(filter(lambda s: s != '', map(lambda s: s.lstrip(delimiter), args)))
def _StructPackDecoder(wire_type, format): """Return a constructor for a decoder for a fixed-width field. Args: wire_type: The field's wire type. format: The format string to pass to struct.unpack(). """ value_size = struct.calcsize(format) local_unpack = struct.unpack # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but # not enough to make a significant difference. # Note that we expect someone up-stack to catch struct.error and convert # it to _DecodeError -- this way we don't have to set up exception- # handling blocks every time we parse one value. def InnerDecode(buffer, pos): new_pos = pos + value_size result = local_unpack(format, buffer[pos:new_pos])[0] return (result, new_pos) return _SimpleDecoder(wire_type, InnerDecode)
def function[_StructPackDecoder, parameter[wire_type, format]]: constant[Return a constructor for a decoder for a fixed-width field. Args: wire_type: The field's wire type. format: The format string to pass to struct.unpack(). ] variable[value_size] assign[=] call[name[struct].calcsize, parameter[name[format]]] variable[local_unpack] assign[=] name[struct].unpack def function[InnerDecode, parameter[buffer, pos]]: variable[new_pos] assign[=] binary_operation[name[pos] + name[value_size]] variable[result] assign[=] call[call[name[local_unpack], parameter[name[format], call[name[buffer]][<ast.Slice object at 0x7da1b2065f30>]]]][constant[0]] return[tuple[[<ast.Name object at 0x7da1b20ef490>, <ast.Name object at 0x7da1b20ef370>]]] return[call[name[_SimpleDecoder], parameter[name[wire_type], name[InnerDecode]]]]
keyword[def] identifier[_StructPackDecoder] ( identifier[wire_type] , identifier[format] ): literal[string] identifier[value_size] = identifier[struct] . identifier[calcsize] ( identifier[format] ) identifier[local_unpack] = identifier[struct] . identifier[unpack] keyword[def] identifier[InnerDecode] ( identifier[buffer] , identifier[pos] ): identifier[new_pos] = identifier[pos] + identifier[value_size] identifier[result] = identifier[local_unpack] ( identifier[format] , identifier[buffer] [ identifier[pos] : identifier[new_pos] ])[ literal[int] ] keyword[return] ( identifier[result] , identifier[new_pos] ) keyword[return] identifier[_SimpleDecoder] ( identifier[wire_type] , identifier[InnerDecode] )
def _StructPackDecoder(wire_type, format): """Return a constructor for a decoder for a fixed-width field. Args: wire_type: The field's wire type. format: The format string to pass to struct.unpack(). """ value_size = struct.calcsize(format) local_unpack = struct.unpack # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but # not enough to make a significant difference. # Note that we expect someone up-stack to catch struct.error and convert # it to _DecodeError -- this way we don't have to set up exception- # handling blocks every time we parse one value. def InnerDecode(buffer, pos): new_pos = pos + value_size result = local_unpack(format, buffer[pos:new_pos])[0] return (result, new_pos) return _SimpleDecoder(wire_type, InnerDecode)
def p_expr_shl_expr(p): """ expr : expr SHL expr """ if p[1] is None or p[3] is None: p[0] = None return if p[1].type_ in (TYPE.float_, TYPE.fixed): p[1] = make_typecast(TYPE.ulong, p[1], p.lineno(2)) p[0] = make_binary(p.lineno(2), 'SHL', p[1], make_typecast(TYPE.ubyte, p[3], p.lineno(2)), lambda x, y: x << y)
def function[p_expr_shl_expr, parameter[p]]: constant[ expr : expr SHL expr ] if <ast.BoolOp object at 0x7da1b061a620> begin[:] call[name[p]][constant[0]] assign[=] constant[None] return[None] if compare[call[name[p]][constant[1]].type_ in tuple[[<ast.Attribute object at 0x7da1b0619480>, <ast.Attribute object at 0x7da1b061a860>]]] begin[:] call[name[p]][constant[1]] assign[=] call[name[make_typecast], parameter[name[TYPE].ulong, call[name[p]][constant[1]], call[name[p].lineno, parameter[constant[2]]]]] call[name[p]][constant[0]] assign[=] call[name[make_binary], parameter[call[name[p].lineno, parameter[constant[2]]], constant[SHL], call[name[p]][constant[1]], call[name[make_typecast], parameter[name[TYPE].ubyte, call[name[p]][constant[3]], call[name[p].lineno, parameter[constant[2]]]]], <ast.Lambda object at 0x7da18f58e2f0>]]
keyword[def] identifier[p_expr_shl_expr] ( identifier[p] ): literal[string] keyword[if] identifier[p] [ literal[int] ] keyword[is] keyword[None] keyword[or] identifier[p] [ literal[int] ] keyword[is] keyword[None] : identifier[p] [ literal[int] ]= keyword[None] keyword[return] keyword[if] identifier[p] [ literal[int] ]. identifier[type_] keyword[in] ( identifier[TYPE] . identifier[float_] , identifier[TYPE] . identifier[fixed] ): identifier[p] [ literal[int] ]= identifier[make_typecast] ( identifier[TYPE] . identifier[ulong] , identifier[p] [ literal[int] ], identifier[p] . identifier[lineno] ( literal[int] )) identifier[p] [ literal[int] ]= identifier[make_binary] ( identifier[p] . identifier[lineno] ( literal[int] ), literal[string] , identifier[p] [ literal[int] ], identifier[make_typecast] ( identifier[TYPE] . identifier[ubyte] , identifier[p] [ literal[int] ], identifier[p] . identifier[lineno] ( literal[int] )), keyword[lambda] identifier[x] , identifier[y] : identifier[x] << identifier[y] )
def p_expr_shl_expr(p): """ expr : expr SHL expr """ if p[1] is None or p[3] is None: p[0] = None return # depends on [control=['if'], data=[]] if p[1].type_ in (TYPE.float_, TYPE.fixed): p[1] = make_typecast(TYPE.ulong, p[1], p.lineno(2)) # depends on [control=['if'], data=[]] p[0] = make_binary(p.lineno(2), 'SHL', p[1], make_typecast(TYPE.ubyte, p[3], p.lineno(2)), lambda x, y: x << y)
def get_auth(): """Return a tuple for authenticating a user If not successful raise ``AgileError``. """ auth = get_auth_from_env() if auth[0] and auth[1]: return auth home = os.path.expanduser("~") config = os.path.join(home, '.gitconfig') if not os.path.isfile(config): raise GithubException('No .gitconfig available') parser = configparser.ConfigParser() parser.read(config) if 'user' in parser: user = parser['user'] if 'username' not in user: raise GithubException('Specify username in %s user ' 'section' % config) if 'token' not in user: raise GithubException('Specify token in %s user section' % config) return user['username'], user['token'] else: raise GithubException('No user section in %s' % config)
def function[get_auth, parameter[]]: constant[Return a tuple for authenticating a user If not successful raise ``AgileError``. ] variable[auth] assign[=] call[name[get_auth_from_env], parameter[]] if <ast.BoolOp object at 0x7da18f09d3f0> begin[:] return[name[auth]] variable[home] assign[=] call[name[os].path.expanduser, parameter[constant[~]]] variable[config] assign[=] call[name[os].path.join, parameter[name[home], constant[.gitconfig]]] if <ast.UnaryOp object at 0x7da18f09c130> begin[:] <ast.Raise object at 0x7da18f09f5b0> variable[parser] assign[=] call[name[configparser].ConfigParser, parameter[]] call[name[parser].read, parameter[name[config]]] if compare[constant[user] in name[parser]] begin[:] variable[user] assign[=] call[name[parser]][constant[user]] if compare[constant[username] <ast.NotIn object at 0x7da2590d7190> name[user]] begin[:] <ast.Raise object at 0x7da20c6ab400> if compare[constant[token] <ast.NotIn object at 0x7da2590d7190> name[user]] begin[:] <ast.Raise object at 0x7da20c6ab970> return[tuple[[<ast.Subscript object at 0x7da20c6aa650>, <ast.Subscript object at 0x7da20c6aa140>]]]
keyword[def] identifier[get_auth] (): literal[string] identifier[auth] = identifier[get_auth_from_env] () keyword[if] identifier[auth] [ literal[int] ] keyword[and] identifier[auth] [ literal[int] ]: keyword[return] identifier[auth] identifier[home] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] ) identifier[config] = identifier[os] . identifier[path] . identifier[join] ( identifier[home] , literal[string] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[config] ): keyword[raise] identifier[GithubException] ( literal[string] ) identifier[parser] = identifier[configparser] . identifier[ConfigParser] () identifier[parser] . identifier[read] ( identifier[config] ) keyword[if] literal[string] keyword[in] identifier[parser] : identifier[user] = identifier[parser] [ literal[string] ] keyword[if] literal[string] keyword[not] keyword[in] identifier[user] : keyword[raise] identifier[GithubException] ( literal[string] literal[string] % identifier[config] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[user] : keyword[raise] identifier[GithubException] ( literal[string] % identifier[config] ) keyword[return] identifier[user] [ literal[string] ], identifier[user] [ literal[string] ] keyword[else] : keyword[raise] identifier[GithubException] ( literal[string] % identifier[config] )
def get_auth(): """Return a tuple for authenticating a user If not successful raise ``AgileError``. """ auth = get_auth_from_env() if auth[0] and auth[1]: return auth # depends on [control=['if'], data=[]] home = os.path.expanduser('~') config = os.path.join(home, '.gitconfig') if not os.path.isfile(config): raise GithubException('No .gitconfig available') # depends on [control=['if'], data=[]] parser = configparser.ConfigParser() parser.read(config) if 'user' in parser: user = parser['user'] if 'username' not in user: raise GithubException('Specify username in %s user section' % config) # depends on [control=['if'], data=[]] if 'token' not in user: raise GithubException('Specify token in %s user section' % config) # depends on [control=['if'], data=[]] return (user['username'], user['token']) # depends on [control=['if'], data=['parser']] else: raise GithubException('No user section in %s' % config)
def expressions(self): """ list(Expression): List of the expressions """ if self._expressions is None: expressions = [n.expression for n in self.nodes] expressions = [e for e in expressions if e] self._expressions = expressions return self._expressions
def function[expressions, parameter[self]]: constant[ list(Expression): List of the expressions ] if compare[name[self]._expressions is constant[None]] begin[:] variable[expressions] assign[=] <ast.ListComp object at 0x7da18f09f3a0> variable[expressions] assign[=] <ast.ListComp object at 0x7da18f09fc70> name[self]._expressions assign[=] name[expressions] return[name[self]._expressions]
keyword[def] identifier[expressions] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_expressions] keyword[is] keyword[None] : identifier[expressions] =[ identifier[n] . identifier[expression] keyword[for] identifier[n] keyword[in] identifier[self] . identifier[nodes] ] identifier[expressions] =[ identifier[e] keyword[for] identifier[e] keyword[in] identifier[expressions] keyword[if] identifier[e] ] identifier[self] . identifier[_expressions] = identifier[expressions] keyword[return] identifier[self] . identifier[_expressions]
def expressions(self): """ list(Expression): List of the expressions """ if self._expressions is None: expressions = [n.expression for n in self.nodes] expressions = [e for e in expressions if e] self._expressions = expressions # depends on [control=['if'], data=[]] return self._expressions
def _set_query_data_slow(self, page): """ set more expensive action=query response data """ categories = page.get('categories') if categories: self.data['categories'] = [x['title'] for x in categories] if page.get('contributors'): contributors = page.get('contributors') or 0 anoncontributors = page.get('anoncontributors') or 0 if isinstance(contributors, list): contributors = len(contributors) self.data['contributors'] = contributors + anoncontributors files = page.get('images') # really, these are FILES if files: self.data['files'] = [x['title'] for x in files] languages = page.get('langlinks') if languages: self.data['languages'] = languages pageviews = page.get('pageviews') if pageviews: values = [x for x in pageviews.values() if x] if values: self.data['views'] = int(sum(values) / len(values)) else: self.data['views'] = 0
def function[_set_query_data_slow, parameter[self, page]]: constant[ set more expensive action=query response data ] variable[categories] assign[=] call[name[page].get, parameter[constant[categories]]] if name[categories] begin[:] call[name[self].data][constant[categories]] assign[=] <ast.ListComp object at 0x7da1b12b53f0> if call[name[page].get, parameter[constant[contributors]]] begin[:] variable[contributors] assign[=] <ast.BoolOp object at 0x7da1b12b6a70> variable[anoncontributors] assign[=] <ast.BoolOp object at 0x7da1b12b7310> if call[name[isinstance], parameter[name[contributors], name[list]]] begin[:] variable[contributors] assign[=] call[name[len], parameter[name[contributors]]] call[name[self].data][constant[contributors]] assign[=] binary_operation[name[contributors] + name[anoncontributors]] variable[files] assign[=] call[name[page].get, parameter[constant[images]]] if name[files] begin[:] call[name[self].data][constant[files]] assign[=] <ast.ListComp object at 0x7da1b138e080> variable[languages] assign[=] call[name[page].get, parameter[constant[langlinks]]] if name[languages] begin[:] call[name[self].data][constant[languages]] assign[=] name[languages] variable[pageviews] assign[=] call[name[page].get, parameter[constant[pageviews]]] if name[pageviews] begin[:] variable[values] assign[=] <ast.ListComp object at 0x7da1b138d6c0> if name[values] begin[:] call[name[self].data][constant[views]] assign[=] call[name[int], parameter[binary_operation[call[name[sum], parameter[name[values]]] / call[name[len], parameter[name[values]]]]]]
keyword[def] identifier[_set_query_data_slow] ( identifier[self] , identifier[page] ): literal[string] identifier[categories] = identifier[page] . identifier[get] ( literal[string] ) keyword[if] identifier[categories] : identifier[self] . identifier[data] [ literal[string] ]=[ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[categories] ] keyword[if] identifier[page] . identifier[get] ( literal[string] ): identifier[contributors] = identifier[page] . identifier[get] ( literal[string] ) keyword[or] literal[int] identifier[anoncontributors] = identifier[page] . identifier[get] ( literal[string] ) keyword[or] literal[int] keyword[if] identifier[isinstance] ( identifier[contributors] , identifier[list] ): identifier[contributors] = identifier[len] ( identifier[contributors] ) identifier[self] . identifier[data] [ literal[string] ]= identifier[contributors] + identifier[anoncontributors] identifier[files] = identifier[page] . identifier[get] ( literal[string] ) keyword[if] identifier[files] : identifier[self] . identifier[data] [ literal[string] ]=[ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[files] ] identifier[languages] = identifier[page] . identifier[get] ( literal[string] ) keyword[if] identifier[languages] : identifier[self] . identifier[data] [ literal[string] ]= identifier[languages] identifier[pageviews] = identifier[page] . identifier[get] ( literal[string] ) keyword[if] identifier[pageviews] : identifier[values] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[pageviews] . identifier[values] () keyword[if] identifier[x] ] keyword[if] identifier[values] : identifier[self] . identifier[data] [ literal[string] ]= identifier[int] ( identifier[sum] ( identifier[values] )/ identifier[len] ( identifier[values] )) keyword[else] : identifier[self] . identifier[data] [ literal[string] ]= literal[int]
def _set_query_data_slow(self, page): """ set more expensive action=query response data """ categories = page.get('categories') if categories: self.data['categories'] = [x['title'] for x in categories] # depends on [control=['if'], data=[]] if page.get('contributors'): contributors = page.get('contributors') or 0 anoncontributors = page.get('anoncontributors') or 0 if isinstance(contributors, list): contributors = len(contributors) # depends on [control=['if'], data=[]] self.data['contributors'] = contributors + anoncontributors # depends on [control=['if'], data=[]] files = page.get('images') # really, these are FILES if files: self.data['files'] = [x['title'] for x in files] # depends on [control=['if'], data=[]] languages = page.get('langlinks') if languages: self.data['languages'] = languages # depends on [control=['if'], data=[]] pageviews = page.get('pageviews') if pageviews: values = [x for x in pageviews.values() if x] if values: self.data['views'] = int(sum(values) / len(values)) # depends on [control=['if'], data=[]] else: self.data['views'] = 0 # depends on [control=['if'], data=[]]
def set_expected_length(self, expected_length): """stub""" if not self.my_osid_object_form._is_valid_integer( expected_length, self.get_expected_length_metadata()): raise InvalidArgument('expectedLength') self.my_osid_object_form._my_map['expectedLength'] = expected_length
def function[set_expected_length, parameter[self, expected_length]]: constant[stub] if <ast.UnaryOp object at 0x7da1b09724a0> begin[:] <ast.Raise object at 0x7da1b0973730> call[name[self].my_osid_object_form._my_map][constant[expectedLength]] assign[=] name[expected_length]
keyword[def] identifier[set_expected_length] ( identifier[self] , identifier[expected_length] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[my_osid_object_form] . identifier[_is_valid_integer] ( identifier[expected_length] , identifier[self] . identifier[get_expected_length_metadata] ()): keyword[raise] identifier[InvalidArgument] ( literal[string] ) identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ]= identifier[expected_length]
def set_expected_length(self, expected_length): """stub""" if not self.my_osid_object_form._is_valid_integer(expected_length, self.get_expected_length_metadata()): raise InvalidArgument('expectedLength') # depends on [control=['if'], data=[]] self.my_osid_object_form._my_map['expectedLength'] = expected_length
def update_target(self, name, current, total): """Updates progress bar for a specified target.""" self.refresh(self._bar(name, current, total))
def function[update_target, parameter[self, name, current, total]]: constant[Updates progress bar for a specified target.] call[name[self].refresh, parameter[call[name[self]._bar, parameter[name[name], name[current], name[total]]]]]
keyword[def] identifier[update_target] ( identifier[self] , identifier[name] , identifier[current] , identifier[total] ): literal[string] identifier[self] . identifier[refresh] ( identifier[self] . identifier[_bar] ( identifier[name] , identifier[current] , identifier[total] ))
def update_target(self, name, current, total): """Updates progress bar for a specified target.""" self.refresh(self._bar(name, current, total))
def run(self): """ Runs the scanner :return: self """ # Normalize path self.path = os.path.expanduser(self.path) # Start scanning if os.path.isdir(self.path): self.search_script_directory(self.path) return self else: raise ScannerException("Unknown directory: %s" % self.path)
def function[run, parameter[self]]: constant[ Runs the scanner :return: self ] name[self].path assign[=] call[name[os].path.expanduser, parameter[name[self].path]] if call[name[os].path.isdir, parameter[name[self].path]] begin[:] call[name[self].search_script_directory, parameter[name[self].path]] return[name[self]]
keyword[def] identifier[run] ( identifier[self] ): literal[string] identifier[self] . identifier[path] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[self] . identifier[path] ) keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[self] . identifier[path] ): identifier[self] . identifier[search_script_directory] ( identifier[self] . identifier[path] ) keyword[return] identifier[self] keyword[else] : keyword[raise] identifier[ScannerException] ( literal[string] % identifier[self] . identifier[path] )
def run(self): """ Runs the scanner :return: self """ # Normalize path self.path = os.path.expanduser(self.path) # Start scanning if os.path.isdir(self.path): self.search_script_directory(self.path) return self # depends on [control=['if'], data=[]] else: raise ScannerException('Unknown directory: %s' % self.path)
def _parse_multi_byte(self, s): # type: (str) -> int """ _parse_multi_byte parses x as a multibyte representation to get the int value of this AbstractUVarIntField. @param str s: the multibyte string to parse. @return int: The parsed int value represented by this AbstractUVarIntField. # noqa: E501 @raise: AssertionError @raise: Scapy_Exception if the input value encodes an integer larger than 1<<64 # noqa: E501 """ assert(len(s) >= 2) tmp_len = len(s) value = 0 i = 1 byte = orb(s[i]) # For CPU sake, stops at an arbitrary large number! max_value = 1 << 64 # As long as the MSG is set, an another byte must be read while byte & 0x80: value += (byte ^ 0x80) << (7 * (i - 1)) if value > max_value: raise error.Scapy_Exception( 'out-of-bound value: the string encodes a value that is too large (>2^{64}): {}'.format(value) # noqa: E501 ) i += 1 assert i < tmp_len, 'EINVAL: x: out-of-bound read: the string ends before the AbstractUVarIntField!' # noqa: E501 byte = orb(s[i]) value += byte << (7 * (i - 1)) value += self._max_value assert(value >= 0) return value
def function[_parse_multi_byte, parameter[self, s]]: constant[ _parse_multi_byte parses x as a multibyte representation to get the int value of this AbstractUVarIntField. @param str s: the multibyte string to parse. @return int: The parsed int value represented by this AbstractUVarIntField. # noqa: E501 @raise: AssertionError @raise: Scapy_Exception if the input value encodes an integer larger than 1<<64 # noqa: E501 ] assert[compare[call[name[len], parameter[name[s]]] greater_or_equal[>=] constant[2]]] variable[tmp_len] assign[=] call[name[len], parameter[name[s]]] variable[value] assign[=] constant[0] variable[i] assign[=] constant[1] variable[byte] assign[=] call[name[orb], parameter[call[name[s]][name[i]]]] variable[max_value] assign[=] binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> constant[64]] while binary_operation[name[byte] <ast.BitAnd object at 0x7da2590d6b60> constant[128]] begin[:] <ast.AugAssign object at 0x7da1b1f96fe0> if compare[name[value] greater[>] name[max_value]] begin[:] <ast.Raise object at 0x7da1b1f966b0> <ast.AugAssign object at 0x7da1b1f96b00> assert[compare[name[i] less[<] name[tmp_len]]] variable[byte] assign[=] call[name[orb], parameter[call[name[s]][name[i]]]] <ast.AugAssign object at 0x7da1b1f96b30> <ast.AugAssign object at 0x7da1b1f94250> assert[compare[name[value] greater_or_equal[>=] constant[0]]] return[name[value]]
keyword[def] identifier[_parse_multi_byte] ( identifier[self] , identifier[s] ): literal[string] keyword[assert] ( identifier[len] ( identifier[s] )>= literal[int] ) identifier[tmp_len] = identifier[len] ( identifier[s] ) identifier[value] = literal[int] identifier[i] = literal[int] identifier[byte] = identifier[orb] ( identifier[s] [ identifier[i] ]) identifier[max_value] = literal[int] << literal[int] keyword[while] identifier[byte] & literal[int] : identifier[value] +=( identifier[byte] ^ literal[int] )<<( literal[int] *( identifier[i] - literal[int] )) keyword[if] identifier[value] > identifier[max_value] : keyword[raise] identifier[error] . identifier[Scapy_Exception] ( literal[string] . identifier[format] ( identifier[value] ) ) identifier[i] += literal[int] keyword[assert] identifier[i] < identifier[tmp_len] , literal[string] identifier[byte] = identifier[orb] ( identifier[s] [ identifier[i] ]) identifier[value] += identifier[byte] <<( literal[int] *( identifier[i] - literal[int] )) identifier[value] += identifier[self] . identifier[_max_value] keyword[assert] ( identifier[value] >= literal[int] ) keyword[return] identifier[value]
def _parse_multi_byte(self, s): # type: (str) -> int ' _parse_multi_byte parses x as a multibyte representation to get the\n int value of this AbstractUVarIntField.\n\n @param str s: the multibyte string to parse.\n @return int: The parsed int value represented by this AbstractUVarIntField. # noqa: E501\n @raise: AssertionError\n @raise: Scapy_Exception if the input value encodes an integer larger than 1<<64 # noqa: E501\n ' assert len(s) >= 2 tmp_len = len(s) value = 0 i = 1 byte = orb(s[i]) # For CPU sake, stops at an arbitrary large number! max_value = 1 << 64 # As long as the MSG is set, an another byte must be read while byte & 128: value += (byte ^ 128) << 7 * (i - 1) if value > max_value: # noqa: E501 raise error.Scapy_Exception('out-of-bound value: the string encodes a value that is too large (>2^{64}): {}'.format(value)) # depends on [control=['if'], data=['value']] i += 1 assert i < tmp_len, 'EINVAL: x: out-of-bound read: the string ends before the AbstractUVarIntField!' # noqa: E501 byte = orb(s[i]) # depends on [control=['while'], data=[]] value += byte << 7 * (i - 1) value += self._max_value assert value >= 0 return value
def _fix_generic_tn_names(paired): """Convert TUMOR/NORMAL names in output into sample IDs. """ def run(line): parts = line.rstrip("\n\r").split("\t") if "TUMOR" in parts: parts[parts.index("TUMOR")] = paired.tumor_name if "TUMOUR" in parts: parts[parts.index("TUMOUR")] = paired.tumor_name if "NORMAL" in parts: assert paired.normal_name parts[parts.index("NORMAL")] = paired.normal_name return "\t".join(parts) + "\n" return run
def function[_fix_generic_tn_names, parameter[paired]]: constant[Convert TUMOR/NORMAL names in output into sample IDs. ] def function[run, parameter[line]]: variable[parts] assign[=] call[call[name[line].rstrip, parameter[constant[ ]]].split, parameter[constant[ ]]] if compare[constant[TUMOR] in name[parts]] begin[:] call[name[parts]][call[name[parts].index, parameter[constant[TUMOR]]]] assign[=] name[paired].tumor_name if compare[constant[TUMOUR] in name[parts]] begin[:] call[name[parts]][call[name[parts].index, parameter[constant[TUMOUR]]]] assign[=] name[paired].tumor_name if compare[constant[NORMAL] in name[parts]] begin[:] assert[name[paired].normal_name] call[name[parts]][call[name[parts].index, parameter[constant[NORMAL]]]] assign[=] name[paired].normal_name return[binary_operation[call[constant[ ].join, parameter[name[parts]]] + constant[ ]]] return[name[run]]
keyword[def] identifier[_fix_generic_tn_names] ( identifier[paired] ): literal[string] keyword[def] identifier[run] ( identifier[line] ): identifier[parts] = identifier[line] . identifier[rstrip] ( literal[string] ). identifier[split] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[parts] : identifier[parts] [ identifier[parts] . identifier[index] ( literal[string] )]= identifier[paired] . identifier[tumor_name] keyword[if] literal[string] keyword[in] identifier[parts] : identifier[parts] [ identifier[parts] . identifier[index] ( literal[string] )]= identifier[paired] . identifier[tumor_name] keyword[if] literal[string] keyword[in] identifier[parts] : keyword[assert] identifier[paired] . identifier[normal_name] identifier[parts] [ identifier[parts] . identifier[index] ( literal[string] )]= identifier[paired] . identifier[normal_name] keyword[return] literal[string] . identifier[join] ( identifier[parts] )+ literal[string] keyword[return] identifier[run]
def _fix_generic_tn_names(paired): """Convert TUMOR/NORMAL names in output into sample IDs. """ def run(line): parts = line.rstrip('\n\r').split('\t') if 'TUMOR' in parts: parts[parts.index('TUMOR')] = paired.tumor_name # depends on [control=['if'], data=['parts']] if 'TUMOUR' in parts: parts[parts.index('TUMOUR')] = paired.tumor_name # depends on [control=['if'], data=['parts']] if 'NORMAL' in parts: assert paired.normal_name parts[parts.index('NORMAL')] = paired.normal_name # depends on [control=['if'], data=['parts']] return '\t'.join(parts) + '\n' return run
def groupuninstall(group, options=None): """ Remove an existing software group. Extra *options* may be passed to ``yum`` if necessary. """ manager = MANAGER if options is None: options = [] elif isinstance(options, str): options = [options] options = " ".join(options) run_as_root('%(manager)s %(options)s groupremove "%(group)s"' % locals())
def function[groupuninstall, parameter[group, options]]: constant[ Remove an existing software group. Extra *options* may be passed to ``yum`` if necessary. ] variable[manager] assign[=] name[MANAGER] if compare[name[options] is constant[None]] begin[:] variable[options] assign[=] list[[]] variable[options] assign[=] call[constant[ ].join, parameter[name[options]]] call[name[run_as_root], parameter[binary_operation[constant[%(manager)s %(options)s groupremove "%(group)s"] <ast.Mod object at 0x7da2590d6920> call[name[locals], parameter[]]]]]
keyword[def] identifier[groupuninstall] ( identifier[group] , identifier[options] = keyword[None] ): literal[string] identifier[manager] = identifier[MANAGER] keyword[if] identifier[options] keyword[is] keyword[None] : identifier[options] =[] keyword[elif] identifier[isinstance] ( identifier[options] , identifier[str] ): identifier[options] =[ identifier[options] ] identifier[options] = literal[string] . identifier[join] ( identifier[options] ) identifier[run_as_root] ( literal[string] % identifier[locals] ())
def groupuninstall(group, options=None): """ Remove an existing software group. Extra *options* may be passed to ``yum`` if necessary. """ manager = MANAGER if options is None: options = [] # depends on [control=['if'], data=['options']] elif isinstance(options, str): options = [options] # depends on [control=['if'], data=[]] options = ' '.join(options) run_as_root('%(manager)s %(options)s groupremove "%(group)s"' % locals())
def close(self) -> None: """ Closes the connection. """ if self.transport is not None and not self.transport.is_closing(): self.transport.close() if self._connect_lock.locked(): self._connect_lock.release() self.protocol = None self.transport = None
def function[close, parameter[self]]: constant[ Closes the connection. ] if <ast.BoolOp object at 0x7da2044c0b80> begin[:] call[name[self].transport.close, parameter[]] if call[name[self]._connect_lock.locked, parameter[]] begin[:] call[name[self]._connect_lock.release, parameter[]] name[self].protocol assign[=] constant[None] name[self].transport assign[=] constant[None]
keyword[def] identifier[close] ( identifier[self] )-> keyword[None] : literal[string] keyword[if] identifier[self] . identifier[transport] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[self] . identifier[transport] . identifier[is_closing] (): identifier[self] . identifier[transport] . identifier[close] () keyword[if] identifier[self] . identifier[_connect_lock] . identifier[locked] (): identifier[self] . identifier[_connect_lock] . identifier[release] () identifier[self] . identifier[protocol] = keyword[None] identifier[self] . identifier[transport] = keyword[None]
def close(self) -> None: """ Closes the connection. """ if self.transport is not None and (not self.transport.is_closing()): self.transport.close() # depends on [control=['if'], data=[]] if self._connect_lock.locked(): self._connect_lock.release() # depends on [control=['if'], data=[]] self.protocol = None self.transport = None
def geometry(self): """returns the feature geometry""" if arcpyFound: if self._geom is None: if 'feature' in self._dict: self._geom = arcpy.AsShape(self._dict['feature']['geometry'], esri_json=True) elif 'geometry' in self._dict: self._geom = arcpy.AsShape(self._dict['geometry'], esri_json=True) return self._geom return None
def function[geometry, parameter[self]]: constant[returns the feature geometry] if name[arcpyFound] begin[:] if compare[name[self]._geom is constant[None]] begin[:] if compare[constant[feature] in name[self]._dict] begin[:] name[self]._geom assign[=] call[name[arcpy].AsShape, parameter[call[call[name[self]._dict][constant[feature]]][constant[geometry]]]] return[name[self]._geom] return[constant[None]]
keyword[def] identifier[geometry] ( identifier[self] ): literal[string] keyword[if] identifier[arcpyFound] : keyword[if] identifier[self] . identifier[_geom] keyword[is] keyword[None] : keyword[if] literal[string] keyword[in] identifier[self] . identifier[_dict] : identifier[self] . identifier[_geom] = identifier[arcpy] . identifier[AsShape] ( identifier[self] . identifier[_dict] [ literal[string] ][ literal[string] ], identifier[esri_json] = keyword[True] ) keyword[elif] literal[string] keyword[in] identifier[self] . identifier[_dict] : identifier[self] . identifier[_geom] = identifier[arcpy] . identifier[AsShape] ( identifier[self] . identifier[_dict] [ literal[string] ], identifier[esri_json] = keyword[True] ) keyword[return] identifier[self] . identifier[_geom] keyword[return] keyword[None]
def geometry(self): """returns the feature geometry""" if arcpyFound: if self._geom is None: if 'feature' in self._dict: self._geom = arcpy.AsShape(self._dict['feature']['geometry'], esri_json=True) # depends on [control=['if'], data=[]] elif 'geometry' in self._dict: self._geom = arcpy.AsShape(self._dict['geometry'], esri_json=True) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return self._geom # depends on [control=['if'], data=[]] return None
def get_selected_state(self): """Returns the current selected state """ form_key = "{}_review_state".format(self.form_id) return self.request.get(form_key, "default")
def function[get_selected_state, parameter[self]]: constant[Returns the current selected state ] variable[form_key] assign[=] call[constant[{}_review_state].format, parameter[name[self].form_id]] return[call[name[self].request.get, parameter[name[form_key], constant[default]]]]
keyword[def] identifier[get_selected_state] ( identifier[self] ): literal[string] identifier[form_key] = literal[string] . identifier[format] ( identifier[self] . identifier[form_id] ) keyword[return] identifier[self] . identifier[request] . identifier[get] ( identifier[form_key] , literal[string] )
def get_selected_state(self): """Returns the current selected state """ form_key = '{}_review_state'.format(self.form_id) return self.request.get(form_key, 'default')
def get_user(self, user): """ Get user's data (first and last name, email, etc). Args: user (string): User name. Returns: (dictionary): User's data encoded in a dictionary. Raises: requests.HTTPError on failure. """ self.project_service.set_auth(self._token_project) return self.project_service.get_user(user)
def function[get_user, parameter[self, user]]: constant[ Get user's data (first and last name, email, etc). Args: user (string): User name. Returns: (dictionary): User's data encoded in a dictionary. Raises: requests.HTTPError on failure. ] call[name[self].project_service.set_auth, parameter[name[self]._token_project]] return[call[name[self].project_service.get_user, parameter[name[user]]]]
keyword[def] identifier[get_user] ( identifier[self] , identifier[user] ): literal[string] identifier[self] . identifier[project_service] . identifier[set_auth] ( identifier[self] . identifier[_token_project] ) keyword[return] identifier[self] . identifier[project_service] . identifier[get_user] ( identifier[user] )
def get_user(self, user): """ Get user's data (first and last name, email, etc). Args: user (string): User name. Returns: (dictionary): User's data encoded in a dictionary. Raises: requests.HTTPError on failure. """ self.project_service.set_auth(self._token_project) return self.project_service.get_user(user)
def _set_af_vpnv4_neighbor(self, v, load=False): """ Setter method for af_vpnv4_neighbor, mapped from YANG variable /routing_system/router/router_bgp/address_family/vpnv4/vpnv4_unicast/af_vpnv4_neighbor_address_holder/af_vpnv4_neighbor (list) If this variable is read-only (config: false) in the source YANG file, then _set_af_vpnv4_neighbor is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_af_vpnv4_neighbor() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("af_vpnv4_neighbor_address",af_vpnv4_neighbor.af_vpnv4_neighbor, yang_name="af-vpnv4-neighbor", rest_name="neighbor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='af-vpnv4-neighbor-address', extensions={u'tailf-common': {u'info': u'Specify a neighbor router', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-suppress-list-no': None, u'callpoint': u'AfVpnV4Neighbors', u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'alt-name': u'neighbor'}}), is_container='list', yang_name="af-vpnv4-neighbor", rest_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify a neighbor router', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-suppress-list-no': None, u'callpoint': u'AfVpnV4Neighbors', u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'alt-name': u'neighbor'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """af_vpnv4_neighbor must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("af_vpnv4_neighbor_address",af_vpnv4_neighbor.af_vpnv4_neighbor, yang_name="af-vpnv4-neighbor", rest_name="neighbor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='af-vpnv4-neighbor-address', extensions={u'tailf-common': {u'info': u'Specify a neighbor router', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-suppress-list-no': None, u'callpoint': u'AfVpnV4Neighbors', u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'alt-name': u'neighbor'}}), is_container='list', yang_name="af-vpnv4-neighbor", rest_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify a neighbor router', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-suppress-list-no': None, u'callpoint': u'AfVpnV4Neighbors', u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'alt-name': u'neighbor'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='list', is_config=True)""", }) self.__af_vpnv4_neighbor = t if hasattr(self, '_set'): self._set()
def function[_set_af_vpnv4_neighbor, parameter[self, v, load]]: constant[ Setter method for af_vpnv4_neighbor, mapped from YANG variable /routing_system/router/router_bgp/address_family/vpnv4/vpnv4_unicast/af_vpnv4_neighbor_address_holder/af_vpnv4_neighbor (list) If this variable is read-only (config: false) in the source YANG file, then _set_af_vpnv4_neighbor is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_af_vpnv4_neighbor() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da1b2588640> name[self].__af_vpnv4_neighbor assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_af_vpnv4_neighbor] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[af_vpnv4_neighbor] . identifier[af_vpnv4_neighbor] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__af_vpnv4_neighbor] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_af_vpnv4_neighbor(self, v, load=False): """ Setter method for af_vpnv4_neighbor, mapped from YANG variable /routing_system/router/router_bgp/address_family/vpnv4/vpnv4_unicast/af_vpnv4_neighbor_address_holder/af_vpnv4_neighbor (list) If this variable is read-only (config: false) in the source YANG file, then _set_af_vpnv4_neighbor is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_af_vpnv4_neighbor() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=YANGListType('af_vpnv4_neighbor_address', af_vpnv4_neighbor.af_vpnv4_neighbor, yang_name='af-vpnv4-neighbor', rest_name='neighbor', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='af-vpnv4-neighbor-address', extensions={u'tailf-common': {u'info': u'Specify a neighbor router', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-suppress-list-no': None, u'callpoint': u'AfVpnV4Neighbors', u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'alt-name': u'neighbor'}}), is_container='list', yang_name='af-vpnv4-neighbor', rest_name='neighbor', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify a neighbor router', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-incomplete-no': None, u'cli-suppress-list-no': None, u'callpoint': u'AfVpnV4Neighbors', u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'alt-name': u'neighbor'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='list', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'af_vpnv4_neighbor must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("af_vpnv4_neighbor_address",af_vpnv4_neighbor.af_vpnv4_neighbor, yang_name="af-vpnv4-neighbor", rest_name="neighbor", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'af-vpnv4-neighbor-address\', extensions={u\'tailf-common\': {u\'info\': u\'Specify a neighbor router\', u\'cli-no-key-completion\': None, u\'cli-suppress-mode\': None, u\'cli-incomplete-no\': None, u\'cli-suppress-list-no\': None, u\'callpoint\': u\'AfVpnV4Neighbors\', u\'cli-suppress-key-abbreviation\': None, u\'cli-incomplete-command\': None, u\'alt-name\': u\'neighbor\'}}), is_container=\'list\', yang_name="af-vpnv4-neighbor", rest_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Specify a neighbor router\', u\'cli-no-key-completion\': None, u\'cli-suppress-mode\': None, u\'cli-incomplete-no\': None, u\'cli-suppress-list-no\': None, u\'callpoint\': u\'AfVpnV4Neighbors\', u\'cli-suppress-key-abbreviation\': None, u\'cli-incomplete-command\': None, u\'alt-name\': u\'neighbor\'}}, namespace=\'urn:brocade.com:mgmt:brocade-bgp\', defining_module=\'brocade-bgp\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__af_vpnv4_neighbor = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def get_sections(self, hdrgo, dflt_section=True): """Given a header GO, return the sections that contain it.""" dflt_list = [] # If the hdrgo is not in a section, return the default name for a section if dflt_section: dflt_list = [self.secdflt] return self.hdrgo2sections.get(hdrgo, dflt_list)
def function[get_sections, parameter[self, hdrgo, dflt_section]]: constant[Given a header GO, return the sections that contain it.] variable[dflt_list] assign[=] list[[]] if name[dflt_section] begin[:] variable[dflt_list] assign[=] list[[<ast.Attribute object at 0x7da20c6a8430>]] return[call[name[self].hdrgo2sections.get, parameter[name[hdrgo], name[dflt_list]]]]
keyword[def] identifier[get_sections] ( identifier[self] , identifier[hdrgo] , identifier[dflt_section] = keyword[True] ): literal[string] identifier[dflt_list] =[] keyword[if] identifier[dflt_section] : identifier[dflt_list] =[ identifier[self] . identifier[secdflt] ] keyword[return] identifier[self] . identifier[hdrgo2sections] . identifier[get] ( identifier[hdrgo] , identifier[dflt_list] )
def get_sections(self, hdrgo, dflt_section=True): """Given a header GO, return the sections that contain it.""" dflt_list = [] # If the hdrgo is not in a section, return the default name for a section if dflt_section: dflt_list = [self.secdflt] # depends on [control=['if'], data=[]] return self.hdrgo2sections.get(hdrgo, dflt_list)
def with_url(self, url): """Sets the request's URL and returns the request itself. Automatically sets the Host header according to the URL. Keyword arguments: url -- a string representing the URL the set for the request """ self.url = URL(url) self.header["Host"] = self.url.host return self
def function[with_url, parameter[self, url]]: constant[Sets the request's URL and returns the request itself. Automatically sets the Host header according to the URL. Keyword arguments: url -- a string representing the URL the set for the request ] name[self].url assign[=] call[name[URL], parameter[name[url]]] call[name[self].header][constant[Host]] assign[=] name[self].url.host return[name[self]]
keyword[def] identifier[with_url] ( identifier[self] , identifier[url] ): literal[string] identifier[self] . identifier[url] = identifier[URL] ( identifier[url] ) identifier[self] . identifier[header] [ literal[string] ]= identifier[self] . identifier[url] . identifier[host] keyword[return] identifier[self]
def with_url(self, url): """Sets the request's URL and returns the request itself. Automatically sets the Host header according to the URL. Keyword arguments: url -- a string representing the URL the set for the request """ self.url = URL(url) self.header['Host'] = self.url.host return self
def chunk_iter(iterable, n): """Yields an iterator in chunks For example you can do for a, b in chunk_iter([1, 2, 3, 4, 5, 6], 2): print('{} {}'.format(a, b)) # Prints # 1 2 # 3 4 # 5 6 Args: iterable - Some iterable n - Chunk size (must be greater than 0) """ assert n > 0 iterable = iter(iterable) chunk = tuple(itertools.islice(iterable, n)) while chunk: yield chunk chunk = tuple(itertools.islice(iterable, n))
def function[chunk_iter, parameter[iterable, n]]: constant[Yields an iterator in chunks For example you can do for a, b in chunk_iter([1, 2, 3, 4, 5, 6], 2): print('{} {}'.format(a, b)) # Prints # 1 2 # 3 4 # 5 6 Args: iterable - Some iterable n - Chunk size (must be greater than 0) ] assert[compare[name[n] greater[>] constant[0]]] variable[iterable] assign[=] call[name[iter], parameter[name[iterable]]] variable[chunk] assign[=] call[name[tuple], parameter[call[name[itertools].islice, parameter[name[iterable], name[n]]]]] while name[chunk] begin[:] <ast.Yield object at 0x7da18eb54eb0> variable[chunk] assign[=] call[name[tuple], parameter[call[name[itertools].islice, parameter[name[iterable], name[n]]]]]
keyword[def] identifier[chunk_iter] ( identifier[iterable] , identifier[n] ): literal[string] keyword[assert] identifier[n] > literal[int] identifier[iterable] = identifier[iter] ( identifier[iterable] ) identifier[chunk] = identifier[tuple] ( identifier[itertools] . identifier[islice] ( identifier[iterable] , identifier[n] )) keyword[while] identifier[chunk] : keyword[yield] identifier[chunk] identifier[chunk] = identifier[tuple] ( identifier[itertools] . identifier[islice] ( identifier[iterable] , identifier[n] ))
def chunk_iter(iterable, n): """Yields an iterator in chunks For example you can do for a, b in chunk_iter([1, 2, 3, 4, 5, 6], 2): print('{} {}'.format(a, b)) # Prints # 1 2 # 3 4 # 5 6 Args: iterable - Some iterable n - Chunk size (must be greater than 0) """ assert n > 0 iterable = iter(iterable) chunk = tuple(itertools.islice(iterable, n)) while chunk: yield chunk chunk = tuple(itertools.islice(iterable, n)) # depends on [control=['while'], data=[]]
def _mean_absolute_error(y, y_pred, w): """Calculate the mean absolute error.""" return np.average(np.abs(y_pred - y), weights=w)
def function[_mean_absolute_error, parameter[y, y_pred, w]]: constant[Calculate the mean absolute error.] return[call[name[np].average, parameter[call[name[np].abs, parameter[binary_operation[name[y_pred] - name[y]]]]]]]
keyword[def] identifier[_mean_absolute_error] ( identifier[y] , identifier[y_pred] , identifier[w] ): literal[string] keyword[return] identifier[np] . identifier[average] ( identifier[np] . identifier[abs] ( identifier[y_pred] - identifier[y] ), identifier[weights] = identifier[w] )
def _mean_absolute_error(y, y_pred, w): """Calculate the mean absolute error.""" return np.average(np.abs(y_pred - y), weights=w)
def ls_recurse(path, include_hidden=False): """Finds content of folder recursively :param path: directory to get list of files and folders :param include_hidden: True iff include hidden files in list :return: List of paths in given directory recursively """ lst = [] for file in os.listdir(path): hidden_file = FileSystem(file).is_hidden() if (hidden_file and include_hidden) or (not hidden_file): lst.append(os.path.join(path, file)) if is_folder(os.path.join(path, file)): lst += ls_recurse( os.path.join(path, file), include_hidden=include_hidden ) # get list of files in directory return list(set(lst))
def function[ls_recurse, parameter[path, include_hidden]]: constant[Finds content of folder recursively :param path: directory to get list of files and folders :param include_hidden: True iff include hidden files in list :return: List of paths in given directory recursively ] variable[lst] assign[=] list[[]] for taget[name[file]] in starred[call[name[os].listdir, parameter[name[path]]]] begin[:] variable[hidden_file] assign[=] call[call[name[FileSystem], parameter[name[file]]].is_hidden, parameter[]] if <ast.BoolOp object at 0x7da20c76dab0> begin[:] call[name[lst].append, parameter[call[name[os].path.join, parameter[name[path], name[file]]]]] if call[name[is_folder], parameter[call[name[os].path.join, parameter[name[path], name[file]]]]] begin[:] <ast.AugAssign object at 0x7da20c76ea10> return[call[name[list], parameter[call[name[set], parameter[name[lst]]]]]]
keyword[def] identifier[ls_recurse] ( identifier[path] , identifier[include_hidden] = keyword[False] ): literal[string] identifier[lst] =[] keyword[for] identifier[file] keyword[in] identifier[os] . identifier[listdir] ( identifier[path] ): identifier[hidden_file] = identifier[FileSystem] ( identifier[file] ). identifier[is_hidden] () keyword[if] ( identifier[hidden_file] keyword[and] identifier[include_hidden] ) keyword[or] ( keyword[not] identifier[hidden_file] ): identifier[lst] . identifier[append] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[file] )) keyword[if] identifier[is_folder] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[file] )): identifier[lst] += identifier[ls_recurse] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[file] ), identifier[include_hidden] = identifier[include_hidden] ) keyword[return] identifier[list] ( identifier[set] ( identifier[lst] ))
def ls_recurse(path, include_hidden=False): """Finds content of folder recursively :param path: directory to get list of files and folders :param include_hidden: True iff include hidden files in list :return: List of paths in given directory recursively """ lst = [] for file in os.listdir(path): hidden_file = FileSystem(file).is_hidden() if hidden_file and include_hidden or not hidden_file: lst.append(os.path.join(path, file)) if is_folder(os.path.join(path, file)): lst += ls_recurse(os.path.join(path, file), include_hidden=include_hidden) # get list of files in directory # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['file']] return list(set(lst))
def format_measure(measure): """Get format and units for data coming from profiler task.""" # Convert to a positive value. measure = abs(measure) # For number of calls if isinstance(measure, int): return to_text_string(measure) # For time measurements if 1.e-9 < measure <= 1.e-6: measure = u"{0:.2f} ns".format(measure / 1.e-9) elif 1.e-6 < measure <= 1.e-3: measure = u"{0:.2f} us".format(measure / 1.e-6) elif 1.e-3 < measure <= 1: measure = u"{0:.2f} ms".format(measure / 1.e-3) elif 1 < measure <= 60: measure = u"{0:.2f} sec".format(measure) elif 60 < measure <= 3600: m, s = divmod(measure, 3600) if s > 60: m, s = divmod(measure, 60) s = to_text_string(s).split(".")[-1] measure = u"{0:.0f}.{1:.2s} min".format(m, s) else: h, m = divmod(measure, 3600) if m > 60: m /= 60 measure = u"{0:.0f}h:{1:.0f}min".format(h, m) return measure
def function[format_measure, parameter[measure]]: constant[Get format and units for data coming from profiler task.] variable[measure] assign[=] call[name[abs], parameter[name[measure]]] if call[name[isinstance], parameter[name[measure], name[int]]] begin[:] return[call[name[to_text_string], parameter[name[measure]]]] if compare[constant[1e-09] less[<] name[measure]] begin[:] variable[measure] assign[=] call[constant[{0:.2f} ns].format, parameter[binary_operation[name[measure] / constant[1e-09]]]] return[name[measure]]
keyword[def] identifier[format_measure] ( identifier[measure] ): literal[string] identifier[measure] = identifier[abs] ( identifier[measure] ) keyword[if] identifier[isinstance] ( identifier[measure] , identifier[int] ): keyword[return] identifier[to_text_string] ( identifier[measure] ) keyword[if] literal[int] < identifier[measure] <= literal[int] : identifier[measure] = literal[string] . identifier[format] ( identifier[measure] / literal[int] ) keyword[elif] literal[int] < identifier[measure] <= literal[int] : identifier[measure] = literal[string] . identifier[format] ( identifier[measure] / literal[int] ) keyword[elif] literal[int] < identifier[measure] <= literal[int] : identifier[measure] = literal[string] . identifier[format] ( identifier[measure] / literal[int] ) keyword[elif] literal[int] < identifier[measure] <= literal[int] : identifier[measure] = literal[string] . identifier[format] ( identifier[measure] ) keyword[elif] literal[int] < identifier[measure] <= literal[int] : identifier[m] , identifier[s] = identifier[divmod] ( identifier[measure] , literal[int] ) keyword[if] identifier[s] > literal[int] : identifier[m] , identifier[s] = identifier[divmod] ( identifier[measure] , literal[int] ) identifier[s] = identifier[to_text_string] ( identifier[s] ). identifier[split] ( literal[string] )[- literal[int] ] identifier[measure] = literal[string] . identifier[format] ( identifier[m] , identifier[s] ) keyword[else] : identifier[h] , identifier[m] = identifier[divmod] ( identifier[measure] , literal[int] ) keyword[if] identifier[m] > literal[int] : identifier[m] /= literal[int] identifier[measure] = literal[string] . identifier[format] ( identifier[h] , identifier[m] ) keyword[return] identifier[measure]
def format_measure(measure): """Get format and units for data coming from profiler task.""" # Convert to a positive value. measure = abs(measure) # For number of calls if isinstance(measure, int): return to_text_string(measure) # depends on [control=['if'], data=[]] # For time measurements if 1e-09 < measure <= 1e-06: measure = u'{0:.2f} ns'.format(measure / 1e-09) # depends on [control=['if'], data=['measure']] elif 1e-06 < measure <= 0.001: measure = u'{0:.2f} us'.format(measure / 1e-06) # depends on [control=['if'], data=['measure']] elif 0.001 < measure <= 1: measure = u'{0:.2f} ms'.format(measure / 0.001) # depends on [control=['if'], data=['measure']] elif 1 < measure <= 60: measure = u'{0:.2f} sec'.format(measure) # depends on [control=['if'], data=['measure']] elif 60 < measure <= 3600: (m, s) = divmod(measure, 3600) if s > 60: (m, s) = divmod(measure, 60) s = to_text_string(s).split('.')[-1] # depends on [control=['if'], data=['s']] measure = u'{0:.0f}.{1:.2s} min'.format(m, s) # depends on [control=['if'], data=['measure']] else: (h, m) = divmod(measure, 3600) if m > 60: m /= 60 # depends on [control=['if'], data=['m']] measure = u'{0:.0f}h:{1:.0f}min'.format(h, m) return measure
def dispatch(self, message): """ dispatch """ results = [] # match routes for resource, route in self.routes.items(): __message = message.match(route) if __message is None: continue route_result = route.dispatch(__message) if len(route_result) == 0: continue results.append({ "handlers": route_result, "message": __message }) return results
def function[dispatch, parameter[self, message]]: constant[ dispatch ] variable[results] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b0a2e2c0>, <ast.Name object at 0x7da1b0a2d0c0>]]] in starred[call[name[self].routes.items, parameter[]]] begin[:] variable[__message] assign[=] call[name[message].match, parameter[name[route]]] if compare[name[__message] is constant[None]] begin[:] continue variable[route_result] assign[=] call[name[route].dispatch, parameter[name[__message]]] if compare[call[name[len], parameter[name[route_result]]] equal[==] constant[0]] begin[:] continue call[name[results].append, parameter[dictionary[[<ast.Constant object at 0x7da1b0a2c040>, <ast.Constant object at 0x7da1b0a2d360>], [<ast.Name object at 0x7da1b0a2d480>, <ast.Name object at 0x7da1b0af4130>]]]] return[name[results]]
keyword[def] identifier[dispatch] ( identifier[self] , identifier[message] ): literal[string] identifier[results] =[] keyword[for] identifier[resource] , identifier[route] keyword[in] identifier[self] . identifier[routes] . identifier[items] (): identifier[__message] = identifier[message] . identifier[match] ( identifier[route] ) keyword[if] identifier[__message] keyword[is] keyword[None] : keyword[continue] identifier[route_result] = identifier[route] . identifier[dispatch] ( identifier[__message] ) keyword[if] identifier[len] ( identifier[route_result] )== literal[int] : keyword[continue] identifier[results] . identifier[append] ({ literal[string] : identifier[route_result] , literal[string] : identifier[__message] }) keyword[return] identifier[results]
def dispatch(self, message): """ dispatch """ results = [] # match routes for (resource, route) in self.routes.items(): __message = message.match(route) if __message is None: continue # depends on [control=['if'], data=[]] route_result = route.dispatch(__message) if len(route_result) == 0: continue # depends on [control=['if'], data=[]] results.append({'handlers': route_result, 'message': __message}) # depends on [control=['for'], data=[]] return results
def _find_games(self, week, year, end_week): """ Retrieve all major games played for a given week. Builds a URL based on the requested date and downloads the HTML contents before parsing any and all games played during that week. Any games that are found are added to the boxscores dictionary with high-level game information such as the home and away team names and a link to the boxscore page. Parameters ---------- week : int The week number to pull games from. year : int The 4-digit year to pull games from. end_week : int (optional) Optionally specify an end week to iterate until. All boxscores starting from the week specified in the 'week' parameter up to and including the boxscores specified in the 'end_week' parameter will be pulled. If left empty, or if 'end_week' is prior to 'week', only the games from the day specified in the 'date' parameter will be saved. """ if not end_week or week > end_week: end_week = week while week <= end_week: url = self._create_url(week, year) page = self._get_requested_page(url) games = page('table[class="teams"]').items() boxscores = self._extract_game_info(games) timestamp = '%s-%s' % (week, year) self._boxscores[timestamp] = boxscores week += 1
def function[_find_games, parameter[self, week, year, end_week]]: constant[ Retrieve all major games played for a given week. Builds a URL based on the requested date and downloads the HTML contents before parsing any and all games played during that week. Any games that are found are added to the boxscores dictionary with high-level game information such as the home and away team names and a link to the boxscore page. Parameters ---------- week : int The week number to pull games from. year : int The 4-digit year to pull games from. end_week : int (optional) Optionally specify an end week to iterate until. All boxscores starting from the week specified in the 'week' parameter up to and including the boxscores specified in the 'end_week' parameter will be pulled. If left empty, or if 'end_week' is prior to 'week', only the games from the day specified in the 'date' parameter will be saved. ] if <ast.BoolOp object at 0x7da1b0b36dd0> begin[:] variable[end_week] assign[=] name[week] while compare[name[week] less_or_equal[<=] name[end_week]] begin[:] variable[url] assign[=] call[name[self]._create_url, parameter[name[week], name[year]]] variable[page] assign[=] call[name[self]._get_requested_page, parameter[name[url]]] variable[games] assign[=] call[call[name[page], parameter[constant[table[class="teams"]]]].items, parameter[]] variable[boxscores] assign[=] call[name[self]._extract_game_info, parameter[name[games]]] variable[timestamp] assign[=] binary_operation[constant[%s-%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0b37a60>, <ast.Name object at 0x7da1b0b35510>]]] call[name[self]._boxscores][name[timestamp]] assign[=] name[boxscores] <ast.AugAssign object at 0x7da1b0b372e0>
keyword[def] identifier[_find_games] ( identifier[self] , identifier[week] , identifier[year] , identifier[end_week] ): literal[string] keyword[if] keyword[not] identifier[end_week] keyword[or] identifier[week] > identifier[end_week] : identifier[end_week] = identifier[week] keyword[while] identifier[week] <= identifier[end_week] : identifier[url] = identifier[self] . identifier[_create_url] ( identifier[week] , identifier[year] ) identifier[page] = identifier[self] . identifier[_get_requested_page] ( identifier[url] ) identifier[games] = identifier[page] ( literal[string] ). identifier[items] () identifier[boxscores] = identifier[self] . identifier[_extract_game_info] ( identifier[games] ) identifier[timestamp] = literal[string] %( identifier[week] , identifier[year] ) identifier[self] . identifier[_boxscores] [ identifier[timestamp] ]= identifier[boxscores] identifier[week] += literal[int]
def _find_games(self, week, year, end_week): """ Retrieve all major games played for a given week. Builds a URL based on the requested date and downloads the HTML contents before parsing any and all games played during that week. Any games that are found are added to the boxscores dictionary with high-level game information such as the home and away team names and a link to the boxscore page. Parameters ---------- week : int The week number to pull games from. year : int The 4-digit year to pull games from. end_week : int (optional) Optionally specify an end week to iterate until. All boxscores starting from the week specified in the 'week' parameter up to and including the boxscores specified in the 'end_week' parameter will be pulled. If left empty, or if 'end_week' is prior to 'week', only the games from the day specified in the 'date' parameter will be saved. """ if not end_week or week > end_week: end_week = week # depends on [control=['if'], data=[]] while week <= end_week: url = self._create_url(week, year) page = self._get_requested_page(url) games = page('table[class="teams"]').items() boxscores = self._extract_game_info(games) timestamp = '%s-%s' % (week, year) self._boxscores[timestamp] = boxscores week += 1 # depends on [control=['while'], data=['week']]
def watch(self, key, criteria, callback): """ Registers a new watch under [key] (which can be used with `unwatch()` to remove the watch) that filters messages using [criteria] (may be a predicate or a 'criteria dict' [see the README for more info there]). Matching messages are passed to [callback], which must accept three arguments: the matched incoming message, this instance of `WatchableConnection`, and the key under which the watch was registered. """ if hasattr(criteria, '__call__'): pred = criteria else: pred = lambda incoming: _match_criteria(criteria, incoming) with self._watches_lock: self._watches[key] = (pred, callback)
def function[watch, parameter[self, key, criteria, callback]]: constant[ Registers a new watch under [key] (which can be used with `unwatch()` to remove the watch) that filters messages using [criteria] (may be a predicate or a 'criteria dict' [see the README for more info there]). Matching messages are passed to [callback], which must accept three arguments: the matched incoming message, this instance of `WatchableConnection`, and the key under which the watch was registered. ] if call[name[hasattr], parameter[name[criteria], constant[__call__]]] begin[:] variable[pred] assign[=] name[criteria] with name[self]._watches_lock begin[:] call[name[self]._watches][name[key]] assign[=] tuple[[<ast.Name object at 0x7da1b0c34e50>, <ast.Name object at 0x7da1b0c35870>]]
keyword[def] identifier[watch] ( identifier[self] , identifier[key] , identifier[criteria] , identifier[callback] ): literal[string] keyword[if] identifier[hasattr] ( identifier[criteria] , literal[string] ): identifier[pred] = identifier[criteria] keyword[else] : identifier[pred] = keyword[lambda] identifier[incoming] : identifier[_match_criteria] ( identifier[criteria] , identifier[incoming] ) keyword[with] identifier[self] . identifier[_watches_lock] : identifier[self] . identifier[_watches] [ identifier[key] ]=( identifier[pred] , identifier[callback] )
def watch(self, key, criteria, callback): """ Registers a new watch under [key] (which can be used with `unwatch()` to remove the watch) that filters messages using [criteria] (may be a predicate or a 'criteria dict' [see the README for more info there]). Matching messages are passed to [callback], which must accept three arguments: the matched incoming message, this instance of `WatchableConnection`, and the key under which the watch was registered. """ if hasattr(criteria, '__call__'): pred = criteria # depends on [control=['if'], data=[]] else: pred = lambda incoming: _match_criteria(criteria, incoming) with self._watches_lock: self._watches[key] = (pred, callback) # depends on [control=['with'], data=[]]
def make_vec_env(env_id, env_type, num_env, seed, wrapper_kwargs=None, start_index=0, reward_scale=1.0, flatten_dict_observations=True, gamestate=None): """ Create a wrapped, monitored SubprocVecEnv for Atari and MuJoCo. """ wrapper_kwargs = wrapper_kwargs or {} mpi_rank = MPI.COMM_WORLD.Get_rank() if MPI else 0 seed = seed + 10000 * mpi_rank if seed is not None else None logger_dir = logger.get_dir() def make_thunk(rank): return lambda: make_env( env_id=env_id, env_type=env_type, mpi_rank=mpi_rank, subrank=rank, seed=seed, reward_scale=reward_scale, gamestate=gamestate, flatten_dict_observations=flatten_dict_observations, wrapper_kwargs=wrapper_kwargs, logger_dir=logger_dir ) set_global_seeds(seed) if num_env > 1: return SubprocVecEnv([make_thunk(i + start_index) for i in range(num_env)]) else: return DummyVecEnv([make_thunk(start_index)])
def function[make_vec_env, parameter[env_id, env_type, num_env, seed, wrapper_kwargs, start_index, reward_scale, flatten_dict_observations, gamestate]]: constant[ Create a wrapped, monitored SubprocVecEnv for Atari and MuJoCo. ] variable[wrapper_kwargs] assign[=] <ast.BoolOp object at 0x7da2047e9780> variable[mpi_rank] assign[=] <ast.IfExp object at 0x7da2047e8b20> variable[seed] assign[=] <ast.IfExp object at 0x7da2047ebaf0> variable[logger_dir] assign[=] call[name[logger].get_dir, parameter[]] def function[make_thunk, parameter[rank]]: return[<ast.Lambda object at 0x7da2047e9a20>] call[name[set_global_seeds], parameter[name[seed]]] if compare[name[num_env] greater[>] constant[1]] begin[:] return[call[name[SubprocVecEnv], parameter[<ast.ListComp object at 0x7da18dc9b490>]]]
keyword[def] identifier[make_vec_env] ( identifier[env_id] , identifier[env_type] , identifier[num_env] , identifier[seed] , identifier[wrapper_kwargs] = keyword[None] , identifier[start_index] = literal[int] , identifier[reward_scale] = literal[int] , identifier[flatten_dict_observations] = keyword[True] , identifier[gamestate] = keyword[None] ): literal[string] identifier[wrapper_kwargs] = identifier[wrapper_kwargs] keyword[or] {} identifier[mpi_rank] = identifier[MPI] . identifier[COMM_WORLD] . identifier[Get_rank] () keyword[if] identifier[MPI] keyword[else] literal[int] identifier[seed] = identifier[seed] + literal[int] * identifier[mpi_rank] keyword[if] identifier[seed] keyword[is] keyword[not] keyword[None] keyword[else] keyword[None] identifier[logger_dir] = identifier[logger] . identifier[get_dir] () keyword[def] identifier[make_thunk] ( identifier[rank] ): keyword[return] keyword[lambda] : identifier[make_env] ( identifier[env_id] = identifier[env_id] , identifier[env_type] = identifier[env_type] , identifier[mpi_rank] = identifier[mpi_rank] , identifier[subrank] = identifier[rank] , identifier[seed] = identifier[seed] , identifier[reward_scale] = identifier[reward_scale] , identifier[gamestate] = identifier[gamestate] , identifier[flatten_dict_observations] = identifier[flatten_dict_observations] , identifier[wrapper_kwargs] = identifier[wrapper_kwargs] , identifier[logger_dir] = identifier[logger_dir] ) identifier[set_global_seeds] ( identifier[seed] ) keyword[if] identifier[num_env] > literal[int] : keyword[return] identifier[SubprocVecEnv] ([ identifier[make_thunk] ( identifier[i] + identifier[start_index] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[num_env] )]) keyword[else] : keyword[return] identifier[DummyVecEnv] ([ identifier[make_thunk] ( identifier[start_index] )])
def make_vec_env(env_id, env_type, num_env, seed, wrapper_kwargs=None, start_index=0, reward_scale=1.0, flatten_dict_observations=True, gamestate=None): """ Create a wrapped, monitored SubprocVecEnv for Atari and MuJoCo. """ wrapper_kwargs = wrapper_kwargs or {} mpi_rank = MPI.COMM_WORLD.Get_rank() if MPI else 0 seed = seed + 10000 * mpi_rank if seed is not None else None logger_dir = logger.get_dir() def make_thunk(rank): return lambda : make_env(env_id=env_id, env_type=env_type, mpi_rank=mpi_rank, subrank=rank, seed=seed, reward_scale=reward_scale, gamestate=gamestate, flatten_dict_observations=flatten_dict_observations, wrapper_kwargs=wrapper_kwargs, logger_dir=logger_dir) set_global_seeds(seed) if num_env > 1: return SubprocVecEnv([make_thunk(i + start_index) for i in range(num_env)]) # depends on [control=['if'], data=['num_env']] else: return DummyVecEnv([make_thunk(start_index)])
def to_protobuf(self): """Convert object to a protobuf message""" self._validate() kwargs = {k: _convert(getattr(self, k), 'to_protobuf') for k in self._get_params()} return self._protobuf_cls(**kwargs)
def function[to_protobuf, parameter[self]]: constant[Convert object to a protobuf message] call[name[self]._validate, parameter[]] variable[kwargs] assign[=] <ast.DictComp object at 0x7da1b08da680> return[call[name[self]._protobuf_cls, parameter[]]]
keyword[def] identifier[to_protobuf] ( identifier[self] ): literal[string] identifier[self] . identifier[_validate] () identifier[kwargs] ={ identifier[k] : identifier[_convert] ( identifier[getattr] ( identifier[self] , identifier[k] ), literal[string] ) keyword[for] identifier[k] keyword[in] identifier[self] . identifier[_get_params] ()} keyword[return] identifier[self] . identifier[_protobuf_cls] (** identifier[kwargs] )
def to_protobuf(self): """Convert object to a protobuf message""" self._validate() kwargs = {k: _convert(getattr(self, k), 'to_protobuf') for k in self._get_params()} return self._protobuf_cls(**kwargs)
def from_meta(node): """ Helper method that reolves a routing node to element. Rather than doing a lookup and fetch, the routing node provides the information to build the element from meta alone. :rtype: Element """ # Version SMC < 6.4 if 'related_element_type' not in node.data: return Element.from_href( node.data.get('href')) # SMC Version >= 6.4 - more efficient because it builds the # element by meta versus requiring a query return Element.from_meta( name=node.data.get('name'), type=node.related_element_type, href=node.data.get('href'))
def function[from_meta, parameter[node]]: constant[ Helper method that reolves a routing node to element. Rather than doing a lookup and fetch, the routing node provides the information to build the element from meta alone. :rtype: Element ] if compare[constant[related_element_type] <ast.NotIn object at 0x7da2590d7190> name[node].data] begin[:] return[call[name[Element].from_href, parameter[call[name[node].data.get, parameter[constant[href]]]]]] return[call[name[Element].from_meta, parameter[]]]
keyword[def] identifier[from_meta] ( identifier[node] ): literal[string] keyword[if] literal[string] keyword[not] keyword[in] identifier[node] . identifier[data] : keyword[return] identifier[Element] . identifier[from_href] ( identifier[node] . identifier[data] . identifier[get] ( literal[string] )) keyword[return] identifier[Element] . identifier[from_meta] ( identifier[name] = identifier[node] . identifier[data] . identifier[get] ( literal[string] ), identifier[type] = identifier[node] . identifier[related_element_type] , identifier[href] = identifier[node] . identifier[data] . identifier[get] ( literal[string] ))
def from_meta(node): """ Helper method that reolves a routing node to element. Rather than doing a lookup and fetch, the routing node provides the information to build the element from meta alone. :rtype: Element """ # Version SMC < 6.4 if 'related_element_type' not in node.data: return Element.from_href(node.data.get('href')) # depends on [control=['if'], data=[]] # SMC Version >= 6.4 - more efficient because it builds the # element by meta versus requiring a query return Element.from_meta(name=node.data.get('name'), type=node.related_element_type, href=node.data.get('href'))
def send(self, data, tx_id=None): """Send `data` (raw string or EJSON payload) to WebSocket client.""" # buffer data until we get pre-requisite data if tx_id is None: tx_id = self.get_tx_id() self._tx_buffer[tx_id] = data # de-queue messages from buffer while self._tx_next_id in self._tx_buffer: # pull next message from buffer data = self._tx_buffer.pop(self._tx_next_id) if self._tx_buffer: safe_call(self.logger.debug, 'TX found %d', self._tx_next_id) # advance next message ID self._tx_next_id = next(self._tx_next_id_gen) if not isinstance(data, basestring): # ejson payload msg = data.get('msg', None) if msg in (ADDED, CHANGED, REMOVED): ids = self.remote_ids[data['collection']] meteor_id = data['id'] if msg == ADDED: if meteor_id in ids: msg = data['msg'] = CHANGED else: ids.add(meteor_id) elif msg == CHANGED: if meteor_id not in ids: # object has become visible, treat as `added`. msg = data['msg'] = ADDED ids.add(meteor_id) elif msg == REMOVED: try: ids.remove(meteor_id) except KeyError: continue # client doesn't have this, don't send. data = 'a%s' % ejson.dumps([ejson.dumps(data)]) # send message safe_call(self.logger.debug, '> %s %r', self, data) try: self.ws.send(data) except geventwebsocket.WebSocketError: self.ws.close() self._tx_buffer.clear() break num_waiting = len(self._tx_buffer) if num_waiting > 10: safe_call( self.logger.warn, 'TX received %d, waiting for %d, have %d waiting: %r.', tx_id, self._tx_next_id, num_waiting, self._tx_buffer, )
def function[send, parameter[self, data, tx_id]]: constant[Send `data` (raw string or EJSON payload) to WebSocket client.] if compare[name[tx_id] is constant[None]] begin[:] variable[tx_id] assign[=] call[name[self].get_tx_id, parameter[]] call[name[self]._tx_buffer][name[tx_id]] assign[=] name[data] while compare[name[self]._tx_next_id in name[self]._tx_buffer] begin[:] variable[data] assign[=] call[name[self]._tx_buffer.pop, parameter[name[self]._tx_next_id]] if name[self]._tx_buffer begin[:] call[name[safe_call], parameter[name[self].logger.debug, constant[TX found %d], name[self]._tx_next_id]] name[self]._tx_next_id assign[=] call[name[next], parameter[name[self]._tx_next_id_gen]] if <ast.UnaryOp object at 0x7da18eb56e60> begin[:] variable[msg] assign[=] call[name[data].get, parameter[constant[msg], constant[None]]] if compare[name[msg] in tuple[[<ast.Name object at 0x7da18eb55d20>, <ast.Name object at 0x7da18eb546a0>, <ast.Name object at 0x7da18eb57340>]]] begin[:] variable[ids] assign[=] call[name[self].remote_ids][call[name[data]][constant[collection]]] variable[meteor_id] assign[=] call[name[data]][constant[id]] if compare[name[msg] equal[==] name[ADDED]] begin[:] if compare[name[meteor_id] in name[ids]] begin[:] variable[msg] assign[=] name[CHANGED] variable[data] assign[=] binary_operation[constant[a%s] <ast.Mod object at 0x7da2590d6920> call[name[ejson].dumps, parameter[list[[<ast.Call object at 0x7da20cabc3a0>]]]]] call[name[safe_call], parameter[name[self].logger.debug, constant[> %s %r], name[self], name[data]]] <ast.Try object at 0x7da20cabcd00> variable[num_waiting] assign[=] call[name[len], parameter[name[self]._tx_buffer]] if compare[name[num_waiting] greater[>] constant[10]] begin[:] call[name[safe_call], parameter[name[self].logger.warn, constant[TX received %d, waiting for %d, have %d waiting: %r.], name[tx_id], name[self]._tx_next_id, name[num_waiting], name[self]._tx_buffer]]
keyword[def] identifier[send] ( identifier[self] , identifier[data] , identifier[tx_id] = keyword[None] ): literal[string] keyword[if] identifier[tx_id] keyword[is] keyword[None] : identifier[tx_id] = identifier[self] . identifier[get_tx_id] () identifier[self] . identifier[_tx_buffer] [ identifier[tx_id] ]= identifier[data] keyword[while] identifier[self] . identifier[_tx_next_id] keyword[in] identifier[self] . identifier[_tx_buffer] : identifier[data] = identifier[self] . identifier[_tx_buffer] . identifier[pop] ( identifier[self] . identifier[_tx_next_id] ) keyword[if] identifier[self] . identifier[_tx_buffer] : identifier[safe_call] ( identifier[self] . identifier[logger] . identifier[debug] , literal[string] , identifier[self] . identifier[_tx_next_id] ) identifier[self] . identifier[_tx_next_id] = identifier[next] ( identifier[self] . identifier[_tx_next_id_gen] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[data] , identifier[basestring] ): identifier[msg] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[msg] keyword[in] ( identifier[ADDED] , identifier[CHANGED] , identifier[REMOVED] ): identifier[ids] = identifier[self] . identifier[remote_ids] [ identifier[data] [ literal[string] ]] identifier[meteor_id] = identifier[data] [ literal[string] ] keyword[if] identifier[msg] == identifier[ADDED] : keyword[if] identifier[meteor_id] keyword[in] identifier[ids] : identifier[msg] = identifier[data] [ literal[string] ]= identifier[CHANGED] keyword[else] : identifier[ids] . identifier[add] ( identifier[meteor_id] ) keyword[elif] identifier[msg] == identifier[CHANGED] : keyword[if] identifier[meteor_id] keyword[not] keyword[in] identifier[ids] : identifier[msg] = identifier[data] [ literal[string] ]= identifier[ADDED] identifier[ids] . identifier[add] ( identifier[meteor_id] ) keyword[elif] identifier[msg] == identifier[REMOVED] : keyword[try] : identifier[ids] . identifier[remove] ( identifier[meteor_id] ) keyword[except] identifier[KeyError] : keyword[continue] identifier[data] = literal[string] % identifier[ejson] . identifier[dumps] ([ identifier[ejson] . identifier[dumps] ( identifier[data] )]) identifier[safe_call] ( identifier[self] . identifier[logger] . identifier[debug] , literal[string] , identifier[self] , identifier[data] ) keyword[try] : identifier[self] . identifier[ws] . identifier[send] ( identifier[data] ) keyword[except] identifier[geventwebsocket] . identifier[WebSocketError] : identifier[self] . identifier[ws] . identifier[close] () identifier[self] . identifier[_tx_buffer] . identifier[clear] () keyword[break] identifier[num_waiting] = identifier[len] ( identifier[self] . identifier[_tx_buffer] ) keyword[if] identifier[num_waiting] > literal[int] : identifier[safe_call] ( identifier[self] . identifier[logger] . identifier[warn] , literal[string] , identifier[tx_id] , identifier[self] . identifier[_tx_next_id] , identifier[num_waiting] , identifier[self] . identifier[_tx_buffer] , )
def send(self, data, tx_id=None): """Send `data` (raw string or EJSON payload) to WebSocket client.""" # buffer data until we get pre-requisite data if tx_id is None: tx_id = self.get_tx_id() # depends on [control=['if'], data=['tx_id']] self._tx_buffer[tx_id] = data # de-queue messages from buffer while self._tx_next_id in self._tx_buffer: # pull next message from buffer data = self._tx_buffer.pop(self._tx_next_id) if self._tx_buffer: safe_call(self.logger.debug, 'TX found %d', self._tx_next_id) # depends on [control=['if'], data=[]] # advance next message ID self._tx_next_id = next(self._tx_next_id_gen) if not isinstance(data, basestring): # ejson payload msg = data.get('msg', None) if msg in (ADDED, CHANGED, REMOVED): ids = self.remote_ids[data['collection']] meteor_id = data['id'] if msg == ADDED: if meteor_id in ids: msg = data['msg'] = CHANGED # depends on [control=['if'], data=[]] else: ids.add(meteor_id) # depends on [control=['if'], data=['msg']] elif msg == CHANGED: if meteor_id not in ids: # object has become visible, treat as `added`. msg = data['msg'] = ADDED ids.add(meteor_id) # depends on [control=['if'], data=['meteor_id', 'ids']] # depends on [control=['if'], data=['msg']] elif msg == REMOVED: try: ids.remove(meteor_id) # depends on [control=['try'], data=[]] except KeyError: continue # client doesn't have this, don't send. # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['msg']] data = 'a%s' % ejson.dumps([ejson.dumps(data)]) # depends on [control=['if'], data=[]] # send message safe_call(self.logger.debug, '> %s %r', self, data) try: self.ws.send(data) # depends on [control=['try'], data=[]] except geventwebsocket.WebSocketError: self.ws.close() self._tx_buffer.clear() break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] num_waiting = len(self._tx_buffer) if num_waiting > 10: safe_call(self.logger.warn, 'TX received %d, waiting for %d, have %d waiting: %r.', tx_id, self._tx_next_id, num_waiting, self._tx_buffer) # depends on [control=['if'], data=['num_waiting']]
def set_edge_attr(self, n, m, attr, value): ''' API: set_edge_attr(self, n, m, attr, value) Description: Sets attr attribute of edge (n,m) to value. Input: n: Source node name. m: Sink node name. attr: Attribute of edge to set. value: New value of attribute. Pre: Graph should have this edge. Post: Edge attribute will be updated. ''' if self.graph_type is DIRECTED_GRAPH: self.edge_attr[(n,m)][attr] = value else: try: self.edge_attr[(n,m)][attr] = value except KeyError: self.edge_attr[(m,n)][attr] = value
def function[set_edge_attr, parameter[self, n, m, attr, value]]: constant[ API: set_edge_attr(self, n, m, attr, value) Description: Sets attr attribute of edge (n,m) to value. Input: n: Source node name. m: Sink node name. attr: Attribute of edge to set. value: New value of attribute. Pre: Graph should have this edge. Post: Edge attribute will be updated. ] if compare[name[self].graph_type is name[DIRECTED_GRAPH]] begin[:] call[call[name[self].edge_attr][tuple[[<ast.Name object at 0x7da1b05c49d0>, <ast.Name object at 0x7da1b05c4b20>]]]][name[attr]] assign[=] name[value]
keyword[def] identifier[set_edge_attr] ( identifier[self] , identifier[n] , identifier[m] , identifier[attr] , identifier[value] ): literal[string] keyword[if] identifier[self] . identifier[graph_type] keyword[is] identifier[DIRECTED_GRAPH] : identifier[self] . identifier[edge_attr] [( identifier[n] , identifier[m] )][ identifier[attr] ]= identifier[value] keyword[else] : keyword[try] : identifier[self] . identifier[edge_attr] [( identifier[n] , identifier[m] )][ identifier[attr] ]= identifier[value] keyword[except] identifier[KeyError] : identifier[self] . identifier[edge_attr] [( identifier[m] , identifier[n] )][ identifier[attr] ]= identifier[value]
def set_edge_attr(self, n, m, attr, value): """ API: set_edge_attr(self, n, m, attr, value) Description: Sets attr attribute of edge (n,m) to value. Input: n: Source node name. m: Sink node name. attr: Attribute of edge to set. value: New value of attribute. Pre: Graph should have this edge. Post: Edge attribute will be updated. """ if self.graph_type is DIRECTED_GRAPH: self.edge_attr[n, m][attr] = value # depends on [control=['if'], data=[]] else: try: self.edge_attr[n, m][attr] = value # depends on [control=['try'], data=[]] except KeyError: self.edge_attr[m, n][attr] = value # depends on [control=['except'], data=[]]
def _validate_incompatibilities(self, incompatibles): """Used by the bowtie _validate method (PRIVATE).""" for element in incompatibles: if type(element) is list: i = [a for a in element if self._get_parameter(a)] if len(i) > 1: raise ValueError("Options {} are incompatible".format(" and ".join(i))) elif type(incompatibles) is dict: if self._get_parameter(element): for b in incompatibles[element]: if self._get_parameter(b): raise ValueError("Options %s and %s are incompatible." % (element, b)) else: for a in element: if self._get_parameter(a): for b in incompatibles[a]: if self._get_parameter(b): raise ValueError("Options %s and %s are incompatible." % (a, b))
def function[_validate_incompatibilities, parameter[self, incompatibles]]: constant[Used by the bowtie _validate method (PRIVATE).] for taget[name[element]] in starred[name[incompatibles]] begin[:] if compare[call[name[type], parameter[name[element]]] is name[list]] begin[:] variable[i] assign[=] <ast.ListComp object at 0x7da1b1fb8eb0> if compare[call[name[len], parameter[name[i]]] greater[>] constant[1]] begin[:] <ast.Raise object at 0x7da1b1fb9030>
keyword[def] identifier[_validate_incompatibilities] ( identifier[self] , identifier[incompatibles] ): literal[string] keyword[for] identifier[element] keyword[in] identifier[incompatibles] : keyword[if] identifier[type] ( identifier[element] ) keyword[is] identifier[list] : identifier[i] =[ identifier[a] keyword[for] identifier[a] keyword[in] identifier[element] keyword[if] identifier[self] . identifier[_get_parameter] ( identifier[a] )] keyword[if] identifier[len] ( identifier[i] )> literal[int] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[i] ))) keyword[elif] identifier[type] ( identifier[incompatibles] ) keyword[is] identifier[dict] : keyword[if] identifier[self] . identifier[_get_parameter] ( identifier[element] ): keyword[for] identifier[b] keyword[in] identifier[incompatibles] [ identifier[element] ]: keyword[if] identifier[self] . identifier[_get_parameter] ( identifier[b] ): keyword[raise] identifier[ValueError] ( literal[string] %( identifier[element] , identifier[b] )) keyword[else] : keyword[for] identifier[a] keyword[in] identifier[element] : keyword[if] identifier[self] . identifier[_get_parameter] ( identifier[a] ): keyword[for] identifier[b] keyword[in] identifier[incompatibles] [ identifier[a] ]: keyword[if] identifier[self] . identifier[_get_parameter] ( identifier[b] ): keyword[raise] identifier[ValueError] ( literal[string] %( identifier[a] , identifier[b] ))
def _validate_incompatibilities(self, incompatibles): """Used by the bowtie _validate method (PRIVATE).""" for element in incompatibles: if type(element) is list: i = [a for a in element if self._get_parameter(a)] if len(i) > 1: raise ValueError('Options {} are incompatible'.format(' and '.join(i))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif type(incompatibles) is dict: if self._get_parameter(element): for b in incompatibles[element]: if self._get_parameter(b): raise ValueError('Options %s and %s are incompatible.' % (element, b)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['b']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: for a in element: if self._get_parameter(a): for b in incompatibles[a]: if self._get_parameter(b): raise ValueError('Options %s and %s are incompatible.' % (a, b)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['b']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']] # depends on [control=['for'], data=['element']]
def _flattenPortsSide(side: List[LNode]) -> List[LNode]: """ Flatten hierarchical ports on node side """ new_side = [] for i in side: for new_p in flattenPort(i): new_side.append(new_p) return new_side
def function[_flattenPortsSide, parameter[side]]: constant[ Flatten hierarchical ports on node side ] variable[new_side] assign[=] list[[]] for taget[name[i]] in starred[name[side]] begin[:] for taget[name[new_p]] in starred[call[name[flattenPort], parameter[name[i]]]] begin[:] call[name[new_side].append, parameter[name[new_p]]] return[name[new_side]]
keyword[def] identifier[_flattenPortsSide] ( identifier[side] : identifier[List] [ identifier[LNode] ])-> identifier[List] [ identifier[LNode] ]: literal[string] identifier[new_side] =[] keyword[for] identifier[i] keyword[in] identifier[side] : keyword[for] identifier[new_p] keyword[in] identifier[flattenPort] ( identifier[i] ): identifier[new_side] . identifier[append] ( identifier[new_p] ) keyword[return] identifier[new_side]
def _flattenPortsSide(side: List[LNode]) -> List[LNode]: """ Flatten hierarchical ports on node side """ new_side = [] for i in side: for new_p in flattenPort(i): new_side.append(new_p) # depends on [control=['for'], data=['new_p']] # depends on [control=['for'], data=['i']] return new_side
def _parse(self, stream): """Parse a JSON BUILD file. Args: builddata: dictionary of buildfile data reponame: name of the repo that it came from path: directory path within the repo """ builddata = json.load(stream) log.debug('This is a JSON build file.') if 'targets' not in builddata: log.warn('Warning: No targets defined here.') return for tdata in builddata['targets']: # TODO: validate name target = address.new(target=tdata.pop('name'), repo=self.target.repo, path=self.target.path) # Duplicate target definition? Uh oh. if target in self.node and 'target_obj' in self.node[target]: raise error.ButcherError( 'Target is defined more than once: %s', target) rule_obj = targets.new(name=target, ruletype=tdata.pop('type'), **tdata) log.debug('New target: %s', target) self.add_node(target, {'target_obj': rule_obj}) # dep could be ":blabla" or "//foo:blabla" or "//foo/bar:blabla" for dep in rule_obj.composed_deps() or []: d_target = address.new(dep) if not d_target.repo: # ":blabla" d_target.repo = self.target.repo if d_target.repo == self.target.repo and not d_target.path: d_target.path = self.target.path if d_target not in self.nodes(): self.add_node(d_target) log.debug('New dep: %s -> %s', target, d_target) self.add_edge(target, d_target)
def function[_parse, parameter[self, stream]]: constant[Parse a JSON BUILD file. Args: builddata: dictionary of buildfile data reponame: name of the repo that it came from path: directory path within the repo ] variable[builddata] assign[=] call[name[json].load, parameter[name[stream]]] call[name[log].debug, parameter[constant[This is a JSON build file.]]] if compare[constant[targets] <ast.NotIn object at 0x7da2590d7190> name[builddata]] begin[:] call[name[log].warn, parameter[constant[Warning: No targets defined here.]]] return[None] for taget[name[tdata]] in starred[call[name[builddata]][constant[targets]]] begin[:] variable[target] assign[=] call[name[address].new, parameter[]] if <ast.BoolOp object at 0x7da18f09f400> begin[:] <ast.Raise object at 0x7da18f09d660> variable[rule_obj] assign[=] call[name[targets].new, parameter[]] call[name[log].debug, parameter[constant[New target: %s], name[target]]] call[name[self].add_node, parameter[name[target], dictionary[[<ast.Constant object at 0x7da18f09f100>], [<ast.Name object at 0x7da18f09e920>]]]] for taget[name[dep]] in starred[<ast.BoolOp object at 0x7da18f09c100>] begin[:] variable[d_target] assign[=] call[name[address].new, parameter[name[dep]]] if <ast.UnaryOp object at 0x7da18dc99f90> begin[:] name[d_target].repo assign[=] name[self].target.repo if <ast.BoolOp object at 0x7da18dc9aa10> begin[:] name[d_target].path assign[=] name[self].target.path if compare[name[d_target] <ast.NotIn object at 0x7da2590d7190> call[name[self].nodes, parameter[]]] begin[:] call[name[self].add_node, parameter[name[d_target]]] call[name[log].debug, parameter[constant[New dep: %s -> %s], name[target], name[d_target]]] call[name[self].add_edge, parameter[name[target], name[d_target]]]
keyword[def] identifier[_parse] ( identifier[self] , identifier[stream] ): literal[string] identifier[builddata] = identifier[json] . identifier[load] ( identifier[stream] ) identifier[log] . identifier[debug] ( literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[builddata] : identifier[log] . identifier[warn] ( literal[string] ) keyword[return] keyword[for] identifier[tdata] keyword[in] identifier[builddata] [ literal[string] ]: identifier[target] = identifier[address] . identifier[new] ( identifier[target] = identifier[tdata] . identifier[pop] ( literal[string] ), identifier[repo] = identifier[self] . identifier[target] . identifier[repo] , identifier[path] = identifier[self] . identifier[target] . identifier[path] ) keyword[if] identifier[target] keyword[in] identifier[self] . identifier[node] keyword[and] literal[string] keyword[in] identifier[self] . identifier[node] [ identifier[target] ]: keyword[raise] identifier[error] . identifier[ButcherError] ( literal[string] , identifier[target] ) identifier[rule_obj] = identifier[targets] . identifier[new] ( identifier[name] = identifier[target] , identifier[ruletype] = identifier[tdata] . identifier[pop] ( literal[string] ), ** identifier[tdata] ) identifier[log] . identifier[debug] ( literal[string] , identifier[target] ) identifier[self] . identifier[add_node] ( identifier[target] ,{ literal[string] : identifier[rule_obj] }) keyword[for] identifier[dep] keyword[in] identifier[rule_obj] . identifier[composed_deps] () keyword[or] []: identifier[d_target] = identifier[address] . identifier[new] ( identifier[dep] ) keyword[if] keyword[not] identifier[d_target] . identifier[repo] : identifier[d_target] . identifier[repo] = identifier[self] . identifier[target] . identifier[repo] keyword[if] identifier[d_target] . identifier[repo] == identifier[self] . identifier[target] . identifier[repo] keyword[and] keyword[not] identifier[d_target] . identifier[path] : identifier[d_target] . identifier[path] = identifier[self] . identifier[target] . identifier[path] keyword[if] identifier[d_target] keyword[not] keyword[in] identifier[self] . identifier[nodes] (): identifier[self] . identifier[add_node] ( identifier[d_target] ) identifier[log] . identifier[debug] ( literal[string] , identifier[target] , identifier[d_target] ) identifier[self] . identifier[add_edge] ( identifier[target] , identifier[d_target] )
def _parse(self, stream): """Parse a JSON BUILD file. Args: builddata: dictionary of buildfile data reponame: name of the repo that it came from path: directory path within the repo """ builddata = json.load(stream) log.debug('This is a JSON build file.') if 'targets' not in builddata: log.warn('Warning: No targets defined here.') return # depends on [control=['if'], data=[]] for tdata in builddata['targets']: # TODO: validate name target = address.new(target=tdata.pop('name'), repo=self.target.repo, path=self.target.path) # Duplicate target definition? Uh oh. if target in self.node and 'target_obj' in self.node[target]: raise error.ButcherError('Target is defined more than once: %s', target) # depends on [control=['if'], data=[]] rule_obj = targets.new(name=target, ruletype=tdata.pop('type'), **tdata) log.debug('New target: %s', target) self.add_node(target, {'target_obj': rule_obj}) # dep could be ":blabla" or "//foo:blabla" or "//foo/bar:blabla" for dep in rule_obj.composed_deps() or []: d_target = address.new(dep) if not d_target.repo: # ":blabla" d_target.repo = self.target.repo # depends on [control=['if'], data=[]] if d_target.repo == self.target.repo and (not d_target.path): d_target.path = self.target.path # depends on [control=['if'], data=[]] if d_target not in self.nodes(): self.add_node(d_target) # depends on [control=['if'], data=['d_target']] log.debug('New dep: %s -> %s', target, d_target) self.add_edge(target, d_target) # depends on [control=['for'], data=['dep']] # depends on [control=['for'], data=['tdata']]
def cmd_posvel(self, args): '''posvel mapclick vN vE vD''' ignoremask = 511 latlon = None try: latlon = self.module('map').click_position except Exception: pass if latlon is None: print ("set latlon to zeros") latlon = [0, 0] else: ignoremask = ignoremask & 504 print ("found latlon", ignoremask) vN = 0 vE = 0 vD = 0 if (len(args) == 3): vN = float(args[0]) vE = float(args[1]) vD = float(args[2]) ignoremask = ignoremask & 455 print ("ignoremask",ignoremask) print (latlon) self.master.mav.set_position_target_global_int_send( 0, # system time in ms 1, # target system 0, # target component mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT, ignoremask, # ignore int(latlon[0] * 1e7), int(latlon[1] * 1e7), 10, vN, vE, vD, # velocity 0, 0, 0, # accel x,y,z 0, 0)
def function[cmd_posvel, parameter[self, args]]: constant[posvel mapclick vN vE vD] variable[ignoremask] assign[=] constant[511] variable[latlon] assign[=] constant[None] <ast.Try object at 0x7da1b26ae530> if compare[name[latlon] is constant[None]] begin[:] call[name[print], parameter[constant[set latlon to zeros]]] variable[latlon] assign[=] list[[<ast.Constant object at 0x7da1b26ad9c0>, <ast.Constant object at 0x7da1b26ae5f0>]] variable[vN] assign[=] constant[0] variable[vE] assign[=] constant[0] variable[vD] assign[=] constant[0] if compare[call[name[len], parameter[name[args]]] equal[==] constant[3]] begin[:] variable[vN] assign[=] call[name[float], parameter[call[name[args]][constant[0]]]] variable[vE] assign[=] call[name[float], parameter[call[name[args]][constant[1]]]] variable[vD] assign[=] call[name[float], parameter[call[name[args]][constant[2]]]] variable[ignoremask] assign[=] binary_operation[name[ignoremask] <ast.BitAnd object at 0x7da2590d6b60> constant[455]] call[name[print], parameter[constant[ignoremask], name[ignoremask]]] call[name[print], parameter[name[latlon]]] call[name[self].master.mav.set_position_target_global_int_send, parameter[constant[0], constant[1], constant[0], name[mavutil].mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT, name[ignoremask], call[name[int], parameter[binary_operation[call[name[latlon]][constant[0]] * constant[10000000.0]]]], call[name[int], parameter[binary_operation[call[name[latlon]][constant[1]] * constant[10000000.0]]]], constant[10], name[vN], name[vE], name[vD], constant[0], constant[0], constant[0], constant[0], constant[0]]]
keyword[def] identifier[cmd_posvel] ( identifier[self] , identifier[args] ): literal[string] identifier[ignoremask] = literal[int] identifier[latlon] = keyword[None] keyword[try] : identifier[latlon] = identifier[self] . identifier[module] ( literal[string] ). identifier[click_position] keyword[except] identifier[Exception] : keyword[pass] keyword[if] identifier[latlon] keyword[is] keyword[None] : identifier[print] ( literal[string] ) identifier[latlon] =[ literal[int] , literal[int] ] keyword[else] : identifier[ignoremask] = identifier[ignoremask] & literal[int] identifier[print] ( literal[string] , identifier[ignoremask] ) identifier[vN] = literal[int] identifier[vE] = literal[int] identifier[vD] = literal[int] keyword[if] ( identifier[len] ( identifier[args] )== literal[int] ): identifier[vN] = identifier[float] ( identifier[args] [ literal[int] ]) identifier[vE] = identifier[float] ( identifier[args] [ literal[int] ]) identifier[vD] = identifier[float] ( identifier[args] [ literal[int] ]) identifier[ignoremask] = identifier[ignoremask] & literal[int] identifier[print] ( literal[string] , identifier[ignoremask] ) identifier[print] ( identifier[latlon] ) identifier[self] . identifier[master] . identifier[mav] . identifier[set_position_target_global_int_send] ( literal[int] , literal[int] , literal[int] , identifier[mavutil] . identifier[mavlink] . identifier[MAV_FRAME_GLOBAL_RELATIVE_ALT_INT] , identifier[ignoremask] , identifier[int] ( identifier[latlon] [ literal[int] ]* literal[int] ), identifier[int] ( identifier[latlon] [ literal[int] ]* literal[int] ), literal[int] , identifier[vN] , identifier[vE] , identifier[vD] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] )
def cmd_posvel(self, args): """posvel mapclick vN vE vD""" ignoremask = 511 latlon = None try: latlon = self.module('map').click_position # depends on [control=['try'], data=[]] except Exception: pass # depends on [control=['except'], data=[]] if latlon is None: print('set latlon to zeros') latlon = [0, 0] # depends on [control=['if'], data=['latlon']] else: ignoremask = ignoremask & 504 print('found latlon', ignoremask) vN = 0 vE = 0 vD = 0 if len(args) == 3: vN = float(args[0]) vE = float(args[1]) vD = float(args[2]) ignoremask = ignoremask & 455 # depends on [control=['if'], data=[]] print('ignoremask', ignoremask) print(latlon) # system time in ms # target system # target component # ignore # velocity # accel x,y,z self.master.mav.set_position_target_global_int_send(0, 1, 0, mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT, ignoremask, int(latlon[0] * 10000000.0), int(latlon[1] * 10000000.0), 10, vN, vE, vD, 0, 0, 0, 0, 0)
def __hosting_wechat_img(self, content_info, hosting_callback): """将微信明细中图片托管到云端,同时将html页面中的对应图片替换 Parameters ---------- content_info : dict 微信文章明细字典 { 'content_img_list': [], # 从微信文章解析出的原始图片列表 'content_html': '', # 从微信文章解析出文章的内容 } hosting_callback : callable 托管回调函数,传入单个图片链接,返回托管后的图片链接 Returns ------- dict { 'content_img_list': '', # 托管后的图片列表 'content_html': '', # 图片链接为托管后的图片链接内容 } """ assert callable(hosting_callback) content_img_list = content_info.pop("content_img_list") content_html = content_info.pop("content_html") for idx, img_url in enumerate(content_img_list): hosting_img_url = hosting_callback(img_url) if not hosting_img_url: # todo 定义标准异常 raise Exception() content_img_list[idx] = hosting_img_url content_html = content_html.replace(img_url, hosting_img_url) return dict(content_img_list=content_img_list, content_html=content_html)
def function[__hosting_wechat_img, parameter[self, content_info, hosting_callback]]: constant[将微信明细中图片托管到云端,同时将html页面中的对应图片替换 Parameters ---------- content_info : dict 微信文章明细字典 { 'content_img_list': [], # 从微信文章解析出的原始图片列表 'content_html': '', # 从微信文章解析出文章的内容 } hosting_callback : callable 托管回调函数,传入单个图片链接,返回托管后的图片链接 Returns ------- dict { 'content_img_list': '', # 托管后的图片列表 'content_html': '', # 图片链接为托管后的图片链接内容 } ] assert[call[name[callable], parameter[name[hosting_callback]]]] variable[content_img_list] assign[=] call[name[content_info].pop, parameter[constant[content_img_list]]] variable[content_html] assign[=] call[name[content_info].pop, parameter[constant[content_html]]] for taget[tuple[[<ast.Name object at 0x7da1b2046fb0>, <ast.Name object at 0x7da1b20476d0>]]] in starred[call[name[enumerate], parameter[name[content_img_list]]]] begin[:] variable[hosting_img_url] assign[=] call[name[hosting_callback], parameter[name[img_url]]] if <ast.UnaryOp object at 0x7da1b20459c0> begin[:] <ast.Raise object at 0x7da1b20447c0> call[name[content_img_list]][name[idx]] assign[=] name[hosting_img_url] variable[content_html] assign[=] call[name[content_html].replace, parameter[name[img_url], name[hosting_img_url]]] return[call[name[dict], parameter[]]]
keyword[def] identifier[__hosting_wechat_img] ( identifier[self] , identifier[content_info] , identifier[hosting_callback] ): literal[string] keyword[assert] identifier[callable] ( identifier[hosting_callback] ) identifier[content_img_list] = identifier[content_info] . identifier[pop] ( literal[string] ) identifier[content_html] = identifier[content_info] . identifier[pop] ( literal[string] ) keyword[for] identifier[idx] , identifier[img_url] keyword[in] identifier[enumerate] ( identifier[content_img_list] ): identifier[hosting_img_url] = identifier[hosting_callback] ( identifier[img_url] ) keyword[if] keyword[not] identifier[hosting_img_url] : keyword[raise] identifier[Exception] () identifier[content_img_list] [ identifier[idx] ]= identifier[hosting_img_url] identifier[content_html] = identifier[content_html] . identifier[replace] ( identifier[img_url] , identifier[hosting_img_url] ) keyword[return] identifier[dict] ( identifier[content_img_list] = identifier[content_img_list] , identifier[content_html] = identifier[content_html] )
def __hosting_wechat_img(self, content_info, hosting_callback): """将微信明细中图片托管到云端,同时将html页面中的对应图片替换 Parameters ---------- content_info : dict 微信文章明细字典 { 'content_img_list': [], # 从微信文章解析出的原始图片列表 'content_html': '', # 从微信文章解析出文章的内容 } hosting_callback : callable 托管回调函数,传入单个图片链接,返回托管后的图片链接 Returns ------- dict { 'content_img_list': '', # 托管后的图片列表 'content_html': '', # 图片链接为托管后的图片链接内容 } """ assert callable(hosting_callback) content_img_list = content_info.pop('content_img_list') content_html = content_info.pop('content_html') for (idx, img_url) in enumerate(content_img_list): hosting_img_url = hosting_callback(img_url) if not hosting_img_url: # todo 定义标准异常 raise Exception() # depends on [control=['if'], data=[]] content_img_list[idx] = hosting_img_url content_html = content_html.replace(img_url, hosting_img_url) # depends on [control=['for'], data=[]] return dict(content_img_list=content_img_list, content_html=content_html)
def _getFontChars(self, font_files): """ Returns code points of characters included in given font files. """ code_points = set() for font_file in font_files: face = ft.Face(font_file) charcode, agindex = face.get_first_char() while agindex != 0: code_points.add(charcode) charcode, agindex = face.get_next_char(charcode, agindex) return sorted(code_points)
def function[_getFontChars, parameter[self, font_files]]: constant[ Returns code points of characters included in given font files. ] variable[code_points] assign[=] call[name[set], parameter[]] for taget[name[font_file]] in starred[name[font_files]] begin[:] variable[face] assign[=] call[name[ft].Face, parameter[name[font_file]]] <ast.Tuple object at 0x7da18f00ddb0> assign[=] call[name[face].get_first_char, parameter[]] while compare[name[agindex] not_equal[!=] constant[0]] begin[:] call[name[code_points].add, parameter[name[charcode]]] <ast.Tuple object at 0x7da18f00ec50> assign[=] call[name[face].get_next_char, parameter[name[charcode], name[agindex]]] return[call[name[sorted], parameter[name[code_points]]]]
keyword[def] identifier[_getFontChars] ( identifier[self] , identifier[font_files] ): literal[string] identifier[code_points] = identifier[set] () keyword[for] identifier[font_file] keyword[in] identifier[font_files] : identifier[face] = identifier[ft] . identifier[Face] ( identifier[font_file] ) identifier[charcode] , identifier[agindex] = identifier[face] . identifier[get_first_char] () keyword[while] identifier[agindex] != literal[int] : identifier[code_points] . identifier[add] ( identifier[charcode] ) identifier[charcode] , identifier[agindex] = identifier[face] . identifier[get_next_char] ( identifier[charcode] , identifier[agindex] ) keyword[return] identifier[sorted] ( identifier[code_points] )
def _getFontChars(self, font_files): """ Returns code points of characters included in given font files. """ code_points = set() for font_file in font_files: face = ft.Face(font_file) (charcode, agindex) = face.get_first_char() while agindex != 0: code_points.add(charcode) (charcode, agindex) = face.get_next_char(charcode, agindex) # depends on [control=['while'], data=['agindex']] # depends on [control=['for'], data=['font_file']] return sorted(code_points)
def has_permission(self, request): """Check if user has permission""" if not self.object and not self.permission: return True if not self.permission: return request.user.has_perm('{}_{}'.format( self.model_permission, self.object.__class__.__name__.lower()), self.object ) return request.user.has_perm(self.permission)
def function[has_permission, parameter[self, request]]: constant[Check if user has permission] if <ast.BoolOp object at 0x7da18f00ccd0> begin[:] return[constant[True]] if <ast.UnaryOp object at 0x7da18f00e9b0> begin[:] return[call[name[request].user.has_perm, parameter[call[constant[{}_{}].format, parameter[name[self].model_permission, call[name[self].object.__class__.__name__.lower, parameter[]]]], name[self].object]]] return[call[name[request].user.has_perm, parameter[name[self].permission]]]
keyword[def] identifier[has_permission] ( identifier[self] , identifier[request] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[object] keyword[and] keyword[not] identifier[self] . identifier[permission] : keyword[return] keyword[True] keyword[if] keyword[not] identifier[self] . identifier[permission] : keyword[return] identifier[request] . identifier[user] . identifier[has_perm] ( literal[string] . identifier[format] ( identifier[self] . identifier[model_permission] , identifier[self] . identifier[object] . identifier[__class__] . identifier[__name__] . identifier[lower] ()), identifier[self] . identifier[object] ) keyword[return] identifier[request] . identifier[user] . identifier[has_perm] ( identifier[self] . identifier[permission] )
def has_permission(self, request): """Check if user has permission""" if not self.object and (not self.permission): return True # depends on [control=['if'], data=[]] if not self.permission: return request.user.has_perm('{}_{}'.format(self.model_permission, self.object.__class__.__name__.lower()), self.object) # depends on [control=['if'], data=[]] return request.user.has_perm(self.permission)
def as_iso8601(self, include_millis=False): """ example: 2016-08-13T00:38:05.210+01:00 """ date = self.__datetime.strftime("%Y-%m-%d") time = self.__datetime.strftime("%H:%M:%S") # millis... if include_millis: micros = float(self.__datetime.strftime("%f")) millis = ".%03d" % (micros // 1000) else: millis = "" # time zone... zone = self.__datetime.strftime("%z") # Z format... if float(zone[1:]) == 0.0: return "%sT%s%sZ" % (date, time, millis) # numeric format... zone_hours = zone[:3] zone_mins = zone[3:] return "%sT%s%s%s:%s" % (date, time, millis, zone_hours, zone_mins)
def function[as_iso8601, parameter[self, include_millis]]: constant[ example: 2016-08-13T00:38:05.210+01:00 ] variable[date] assign[=] call[name[self].__datetime.strftime, parameter[constant[%Y-%m-%d]]] variable[time] assign[=] call[name[self].__datetime.strftime, parameter[constant[%H:%M:%S]]] if name[include_millis] begin[:] variable[micros] assign[=] call[name[float], parameter[call[name[self].__datetime.strftime, parameter[constant[%f]]]]] variable[millis] assign[=] binary_operation[constant[.%03d] <ast.Mod object at 0x7da2590d6920> binary_operation[name[micros] <ast.FloorDiv object at 0x7da2590d6bc0> constant[1000]]] variable[zone] assign[=] call[name[self].__datetime.strftime, parameter[constant[%z]]] if compare[call[name[float], parameter[call[name[zone]][<ast.Slice object at 0x7da18bc715d0>]]] equal[==] constant[0.0]] begin[:] return[binary_operation[constant[%sT%s%sZ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bc712a0>, <ast.Name object at 0x7da18bc72d10>, <ast.Name object at 0x7da18bc73220>]]]] variable[zone_hours] assign[=] call[name[zone]][<ast.Slice object at 0x7da2044c2290>] variable[zone_mins] assign[=] call[name[zone]][<ast.Slice object at 0x7da2044c2a10>] return[binary_operation[constant[%sT%s%s%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20eb28c10>, <ast.Name object at 0x7da20eb29060>, <ast.Name object at 0x7da20eb29fc0>, <ast.Name object at 0x7da20eb28f70>, <ast.Name object at 0x7da20eb286d0>]]]]
keyword[def] identifier[as_iso8601] ( identifier[self] , identifier[include_millis] = keyword[False] ): literal[string] identifier[date] = identifier[self] . identifier[__datetime] . identifier[strftime] ( literal[string] ) identifier[time] = identifier[self] . identifier[__datetime] . identifier[strftime] ( literal[string] ) keyword[if] identifier[include_millis] : identifier[micros] = identifier[float] ( identifier[self] . identifier[__datetime] . identifier[strftime] ( literal[string] )) identifier[millis] = literal[string] %( identifier[micros] // literal[int] ) keyword[else] : identifier[millis] = literal[string] identifier[zone] = identifier[self] . identifier[__datetime] . identifier[strftime] ( literal[string] ) keyword[if] identifier[float] ( identifier[zone] [ literal[int] :])== literal[int] : keyword[return] literal[string] %( identifier[date] , identifier[time] , identifier[millis] ) identifier[zone_hours] = identifier[zone] [: literal[int] ] identifier[zone_mins] = identifier[zone] [ literal[int] :] keyword[return] literal[string] %( identifier[date] , identifier[time] , identifier[millis] , identifier[zone_hours] , identifier[zone_mins] )
def as_iso8601(self, include_millis=False): """ example: 2016-08-13T00:38:05.210+01:00 """ date = self.__datetime.strftime('%Y-%m-%d') time = self.__datetime.strftime('%H:%M:%S') # millis... if include_millis: micros = float(self.__datetime.strftime('%f')) millis = '.%03d' % (micros // 1000) # depends on [control=['if'], data=[]] else: millis = '' # time zone... zone = self.__datetime.strftime('%z') # Z format... if float(zone[1:]) == 0.0: return '%sT%s%sZ' % (date, time, millis) # depends on [control=['if'], data=[]] # numeric format... zone_hours = zone[:3] zone_mins = zone[3:] return '%sT%s%s%s:%s' % (date, time, millis, zone_hours, zone_mins)
def search_code(self, query, sort=github.GithubObject.NotSet, order=github.GithubObject.NotSet, highlight=False, **qualifiers): """ :calls: `GET /search/code <http://developer.github.com/v3/search>`_ :param query: string :param sort: string ('indexed') :param order: string ('asc', 'desc') :param highlight: boolean (True, False) :param qualifiers: keyword dict query qualifiers :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.ContentFile.ContentFile` """ assert isinstance(query, (str, unicode)), query url_parameters = dict() if sort is not github.GithubObject.NotSet: # pragma no branch (Should be covered) assert sort in ('indexed',), sort url_parameters["sort"] = sort if order is not github.GithubObject.NotSet: # pragma no branch (Should be covered) assert order in ('asc', 'desc'), order url_parameters["order"] = order query_chunks = [] if query: # pragma no branch (Should be covered) query_chunks.append(query) for qualifier, value in qualifiers.items(): query_chunks.append("%s:%s" % (qualifier, value)) url_parameters["q"] = ' '.join(query_chunks) assert url_parameters["q"], "need at least one qualifier" headers = {"Accept": Consts.highLightSearchPreview} if highlight else None return github.PaginatedList.PaginatedList( github.ContentFile.ContentFile, self.__requester, "/search/code", url_parameters, headers=headers )
def function[search_code, parameter[self, query, sort, order, highlight]]: constant[ :calls: `GET /search/code <http://developer.github.com/v3/search>`_ :param query: string :param sort: string ('indexed') :param order: string ('asc', 'desc') :param highlight: boolean (True, False) :param qualifiers: keyword dict query qualifiers :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.ContentFile.ContentFile` ] assert[call[name[isinstance], parameter[name[query], tuple[[<ast.Name object at 0x7da1b1f495d0>, <ast.Name object at 0x7da1b1f49ab0>]]]]] variable[url_parameters] assign[=] call[name[dict], parameter[]] if compare[name[sort] is_not name[github].GithubObject.NotSet] begin[:] assert[compare[name[sort] in tuple[[<ast.Constant object at 0x7da1b1f4b5b0>]]]] call[name[url_parameters]][constant[sort]] assign[=] name[sort] if compare[name[order] is_not name[github].GithubObject.NotSet] begin[:] assert[compare[name[order] in tuple[[<ast.Constant object at 0x7da1b1f499c0>, <ast.Constant object at 0x7da1b1f495a0>]]]] call[name[url_parameters]][constant[order]] assign[=] name[order] variable[query_chunks] assign[=] list[[]] if name[query] begin[:] call[name[query_chunks].append, parameter[name[query]]] for taget[tuple[[<ast.Name object at 0x7da1b1f49240>, <ast.Name object at 0x7da1b1f49060>]]] in starred[call[name[qualifiers].items, parameter[]]] begin[:] call[name[query_chunks].append, parameter[binary_operation[constant[%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1f481f0>, <ast.Name object at 0x7da1b1f49c30>]]]]] call[name[url_parameters]][constant[q]] assign[=] call[constant[ ].join, parameter[name[query_chunks]]] assert[call[name[url_parameters]][constant[q]]] variable[headers] assign[=] <ast.IfExp object at 0x7da1b1f48d90> return[call[name[github].PaginatedList.PaginatedList, parameter[name[github].ContentFile.ContentFile, name[self].__requester, constant[/search/code], name[url_parameters]]]]
keyword[def] identifier[search_code] ( identifier[self] , identifier[query] , identifier[sort] = identifier[github] . identifier[GithubObject] . identifier[NotSet] , identifier[order] = identifier[github] . identifier[GithubObject] . identifier[NotSet] , identifier[highlight] = keyword[False] ,** identifier[qualifiers] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[query] ,( identifier[str] , identifier[unicode] )), identifier[query] identifier[url_parameters] = identifier[dict] () keyword[if] identifier[sort] keyword[is] keyword[not] identifier[github] . identifier[GithubObject] . identifier[NotSet] : keyword[assert] identifier[sort] keyword[in] ( literal[string] ,), identifier[sort] identifier[url_parameters] [ literal[string] ]= identifier[sort] keyword[if] identifier[order] keyword[is] keyword[not] identifier[github] . identifier[GithubObject] . identifier[NotSet] : keyword[assert] identifier[order] keyword[in] ( literal[string] , literal[string] ), identifier[order] identifier[url_parameters] [ literal[string] ]= identifier[order] identifier[query_chunks] =[] keyword[if] identifier[query] : identifier[query_chunks] . identifier[append] ( identifier[query] ) keyword[for] identifier[qualifier] , identifier[value] keyword[in] identifier[qualifiers] . identifier[items] (): identifier[query_chunks] . identifier[append] ( literal[string] %( identifier[qualifier] , identifier[value] )) identifier[url_parameters] [ literal[string] ]= literal[string] . identifier[join] ( identifier[query_chunks] ) keyword[assert] identifier[url_parameters] [ literal[string] ], literal[string] identifier[headers] ={ literal[string] : identifier[Consts] . identifier[highLightSearchPreview] } keyword[if] identifier[highlight] keyword[else] keyword[None] keyword[return] identifier[github] . identifier[PaginatedList] . identifier[PaginatedList] ( identifier[github] . identifier[ContentFile] . identifier[ContentFile] , identifier[self] . identifier[__requester] , literal[string] , identifier[url_parameters] , identifier[headers] = identifier[headers] )
def search_code(self, query, sort=github.GithubObject.NotSet, order=github.GithubObject.NotSet, highlight=False, **qualifiers): """ :calls: `GET /search/code <http://developer.github.com/v3/search>`_ :param query: string :param sort: string ('indexed') :param order: string ('asc', 'desc') :param highlight: boolean (True, False) :param qualifiers: keyword dict query qualifiers :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.ContentFile.ContentFile` """ assert isinstance(query, (str, unicode)), query url_parameters = dict() if sort is not github.GithubObject.NotSet: # pragma no branch (Should be covered) assert sort in ('indexed',), sort url_parameters['sort'] = sort # depends on [control=['if'], data=['sort']] if order is not github.GithubObject.NotSet: # pragma no branch (Should be covered) assert order in ('asc', 'desc'), order url_parameters['order'] = order # depends on [control=['if'], data=['order']] query_chunks = [] if query: # pragma no branch (Should be covered) query_chunks.append(query) # depends on [control=['if'], data=[]] for (qualifier, value) in qualifiers.items(): query_chunks.append('%s:%s' % (qualifier, value)) # depends on [control=['for'], data=[]] url_parameters['q'] = ' '.join(query_chunks) assert url_parameters['q'], 'need at least one qualifier' headers = {'Accept': Consts.highLightSearchPreview} if highlight else None return github.PaginatedList.PaginatedList(github.ContentFile.ContentFile, self.__requester, '/search/code', url_parameters, headers=headers)
def loop_in_background(interval, callback): """ When entering the context, spawns a greenlet that sleeps for `interval` seconds between `callback` executions. When leaving the context stops the greenlet. The yielded object is the `GeventLoop` object so the loop can be stopped from within the context. For example: ``` with loop_in_background(60.0, purge_cache) as purge_cache_job: ... ... if should_stop_cache(): purge_cache_job.stop() ``` """ loop = GeventLoop(interval, callback) loop.start() try: yield loop finally: if loop.has_started(): loop.stop()
def function[loop_in_background, parameter[interval, callback]]: constant[ When entering the context, spawns a greenlet that sleeps for `interval` seconds between `callback` executions. When leaving the context stops the greenlet. The yielded object is the `GeventLoop` object so the loop can be stopped from within the context. For example: ``` with loop_in_background(60.0, purge_cache) as purge_cache_job: ... ... if should_stop_cache(): purge_cache_job.stop() ``` ] variable[loop] assign[=] call[name[GeventLoop], parameter[name[interval], name[callback]]] call[name[loop].start, parameter[]] <ast.Try object at 0x7da2054a6bf0>
keyword[def] identifier[loop_in_background] ( identifier[interval] , identifier[callback] ): literal[string] identifier[loop] = identifier[GeventLoop] ( identifier[interval] , identifier[callback] ) identifier[loop] . identifier[start] () keyword[try] : keyword[yield] identifier[loop] keyword[finally] : keyword[if] identifier[loop] . identifier[has_started] (): identifier[loop] . identifier[stop] ()
def loop_in_background(interval, callback): """ When entering the context, spawns a greenlet that sleeps for `interval` seconds between `callback` executions. When leaving the context stops the greenlet. The yielded object is the `GeventLoop` object so the loop can be stopped from within the context. For example: ``` with loop_in_background(60.0, purge_cache) as purge_cache_job: ... ... if should_stop_cache(): purge_cache_job.stop() ``` """ loop = GeventLoop(interval, callback) loop.start() try: yield loop # depends on [control=['try'], data=[]] finally: if loop.has_started(): loop.stop() # depends on [control=['if'], data=[]]
def mkdir_p(path): '''mkdir_p attempts to get the same functionality as mkdir -p :param path: the path to create. ''' try: os.makedirs(path) except OSError as e: if e.errno == errno.EEXIST and os.path.isdir(path): pass else: bot.error("Error creating path %s, exiting." % path) sys.exit(1)
def function[mkdir_p, parameter[path]]: constant[mkdir_p attempts to get the same functionality as mkdir -p :param path: the path to create. ] <ast.Try object at 0x7da1b03bb9d0>
keyword[def] identifier[mkdir_p] ( identifier[path] ): literal[string] keyword[try] : identifier[os] . identifier[makedirs] ( identifier[path] ) keyword[except] identifier[OSError] keyword[as] identifier[e] : keyword[if] identifier[e] . identifier[errno] == identifier[errno] . identifier[EEXIST] keyword[and] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ): keyword[pass] keyword[else] : identifier[bot] . identifier[error] ( literal[string] % identifier[path] ) identifier[sys] . identifier[exit] ( literal[int] )
def mkdir_p(path): """mkdir_p attempts to get the same functionality as mkdir -p :param path: the path to create. """ try: os.makedirs(path) # depends on [control=['try'], data=[]] except OSError as e: if e.errno == errno.EEXIST and os.path.isdir(path): pass # depends on [control=['if'], data=[]] else: bot.error('Error creating path %s, exiting.' % path) sys.exit(1) # depends on [control=['except'], data=['e']]
def render(self, context, instance, placeholder): """ Update the context with plugin's data """ context = super(CMSRandomEntriesPlugin, self).render( context, instance, placeholder) context['template_to_render'] = (str(instance.template_to_render) or 'zinnia/tags/entries_random.html') return context
def function[render, parameter[self, context, instance, placeholder]]: constant[ Update the context with plugin's data ] variable[context] assign[=] call[call[name[super], parameter[name[CMSRandomEntriesPlugin], name[self]]].render, parameter[name[context], name[instance], name[placeholder]]] call[name[context]][constant[template_to_render]] assign[=] <ast.BoolOp object at 0x7da1b0ca23e0> return[name[context]]
keyword[def] identifier[render] ( identifier[self] , identifier[context] , identifier[instance] , identifier[placeholder] ): literal[string] identifier[context] = identifier[super] ( identifier[CMSRandomEntriesPlugin] , identifier[self] ). identifier[render] ( identifier[context] , identifier[instance] , identifier[placeholder] ) identifier[context] [ literal[string] ]=( identifier[str] ( identifier[instance] . identifier[template_to_render] ) keyword[or] literal[string] ) keyword[return] identifier[context]
def render(self, context, instance, placeholder): """ Update the context with plugin's data """ context = super(CMSRandomEntriesPlugin, self).render(context, instance, placeholder) context['template_to_render'] = str(instance.template_to_render) or 'zinnia/tags/entries_random.html' return context
def split_term_lower(cls, term): """ Like split_term, but also lowercases both parent and record term :param term: combined term text :return: Tuple of parent and record term """ return tuple(e.lower() for e in Term.split_term(term))
def function[split_term_lower, parameter[cls, term]]: constant[ Like split_term, but also lowercases both parent and record term :param term: combined term text :return: Tuple of parent and record term ] return[call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da18c4cecb0>]]]
keyword[def] identifier[split_term_lower] ( identifier[cls] , identifier[term] ): literal[string] keyword[return] identifier[tuple] ( identifier[e] . identifier[lower] () keyword[for] identifier[e] keyword[in] identifier[Term] . identifier[split_term] ( identifier[term] ))
def split_term_lower(cls, term): """ Like split_term, but also lowercases both parent and record term :param term: combined term text :return: Tuple of parent and record term """ return tuple((e.lower() for e in Term.split_term(term)))
def on_open_output_tool_clicked(self): """Autoconnect slot activated when open output tool button is clicked. """ output_path = self.output_path.text() if not output_path: output_path = os.path.expanduser('~') # noinspection PyCallByClass,PyTypeChecker filename, __ = QFileDialog.getSaveFileName( self, tr('Output file'), output_path, tr('Raster file (*.tif)')) if filename: self.output_path.setText(filename)
def function[on_open_output_tool_clicked, parameter[self]]: constant[Autoconnect slot activated when open output tool button is clicked. ] variable[output_path] assign[=] call[name[self].output_path.text, parameter[]] if <ast.UnaryOp object at 0x7da1b0cef190> begin[:] variable[output_path] assign[=] call[name[os].path.expanduser, parameter[constant[~]]] <ast.Tuple object at 0x7da1b0ceee00> assign[=] call[name[QFileDialog].getSaveFileName, parameter[name[self], call[name[tr], parameter[constant[Output file]]], name[output_path], call[name[tr], parameter[constant[Raster file (*.tif)]]]]] if name[filename] begin[:] call[name[self].output_path.setText, parameter[name[filename]]]
keyword[def] identifier[on_open_output_tool_clicked] ( identifier[self] ): literal[string] identifier[output_path] = identifier[self] . identifier[output_path] . identifier[text] () keyword[if] keyword[not] identifier[output_path] : identifier[output_path] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] ) identifier[filename] , identifier[__] = identifier[QFileDialog] . identifier[getSaveFileName] ( identifier[self] , identifier[tr] ( literal[string] ), identifier[output_path] , identifier[tr] ( literal[string] )) keyword[if] identifier[filename] : identifier[self] . identifier[output_path] . identifier[setText] ( identifier[filename] )
def on_open_output_tool_clicked(self): """Autoconnect slot activated when open output tool button is clicked. """ output_path = self.output_path.text() if not output_path: output_path = os.path.expanduser('~') # depends on [control=['if'], data=[]] # noinspection PyCallByClass,PyTypeChecker (filename, __) = QFileDialog.getSaveFileName(self, tr('Output file'), output_path, tr('Raster file (*.tif)')) if filename: self.output_path.setText(filename) # depends on [control=['if'], data=[]]
def clear_breakpoints(self): """Clear breakpoints""" self.breakpoints = [] for data in self.editor.blockuserdata_list[:]: data.breakpoint = False # data.breakpoint_condition = None # not necessary, but logical if data.is_empty(): # This is not calling the __del__ in BlockUserData. Not # sure if it's supposed to or not, but that seems to be the # intent. del data
def function[clear_breakpoints, parameter[self]]: constant[Clear breakpoints] name[self].breakpoints assign[=] list[[]] for taget[name[data]] in starred[call[name[self].editor.blockuserdata_list][<ast.Slice object at 0x7da1b2042980>]] begin[:] name[data].breakpoint assign[=] constant[False] if call[name[data].is_empty, parameter[]] begin[:] <ast.Delete object at 0x7da1b2042cb0>
keyword[def] identifier[clear_breakpoints] ( identifier[self] ): literal[string] identifier[self] . identifier[breakpoints] =[] keyword[for] identifier[data] keyword[in] identifier[self] . identifier[editor] . identifier[blockuserdata_list] [:]: identifier[data] . identifier[breakpoint] = keyword[False] keyword[if] identifier[data] . identifier[is_empty] (): keyword[del] identifier[data]
def clear_breakpoints(self): """Clear breakpoints""" self.breakpoints = [] for data in self.editor.blockuserdata_list[:]: data.breakpoint = False # data.breakpoint_condition = None # not necessary, but logical if data.is_empty(): # This is not calling the __del__ in BlockUserData. Not # sure if it's supposed to or not, but that seems to be the # intent. del data # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['data']]
def create_sysdig_capture(self, hostname, capture_name, duration, capture_filter='', folder='/'): '''**Description** Create a new sysdig capture. The capture will be immediately started. **Arguments** - **hostname**: the hostname of the instrumented host where the capture will be taken. - **capture_name**: the name of the capture. - **duration**: the duration of the capture, in seconds. - **capture_filter**: a sysdig filter expression. - **folder**: directory in the S3 bucket where the capture will be saved. **Success Return Value** A dictionary showing the details of the new capture. **Example** `examples/create_sysdig_capture.py <https://github.com/draios/python-sdc-client/blob/master/examples/create_sysdig_capture.py>`_ ''' res = self.get_connected_agents() if not res[0]: return res capture_agent = None for agent in res[1]: if hostname == agent['hostName']: capture_agent = agent break if capture_agent is None: return [False, hostname + ' not found'] data = { 'agent': capture_agent, 'name': capture_name, 'duration': duration, 'folder': folder, 'filters': capture_filter, 'bucketName': '', 'source': self.product } res = requests.post(self.url + '/api/sysdig', headers=self.hdrs, data=json.dumps(data), verify=self.ssl_verify) return self._request_result(res)
def function[create_sysdig_capture, parameter[self, hostname, capture_name, duration, capture_filter, folder]]: constant[**Description** Create a new sysdig capture. The capture will be immediately started. **Arguments** - **hostname**: the hostname of the instrumented host where the capture will be taken. - **capture_name**: the name of the capture. - **duration**: the duration of the capture, in seconds. - **capture_filter**: a sysdig filter expression. - **folder**: directory in the S3 bucket where the capture will be saved. **Success Return Value** A dictionary showing the details of the new capture. **Example** `examples/create_sysdig_capture.py <https://github.com/draios/python-sdc-client/blob/master/examples/create_sysdig_capture.py>`_ ] variable[res] assign[=] call[name[self].get_connected_agents, parameter[]] if <ast.UnaryOp object at 0x7da1b024dff0> begin[:] return[name[res]] variable[capture_agent] assign[=] constant[None] for taget[name[agent]] in starred[call[name[res]][constant[1]]] begin[:] if compare[name[hostname] equal[==] call[name[agent]][constant[hostName]]] begin[:] variable[capture_agent] assign[=] name[agent] break if compare[name[capture_agent] is constant[None]] begin[:] return[list[[<ast.Constant object at 0x7da1b024e590>, <ast.BinOp object at 0x7da1b024d9f0>]]] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b024f2b0>, <ast.Constant object at 0x7da1b024f6d0>, <ast.Constant object at 0x7da1b024fc70>, <ast.Constant object at 0x7da1b024c940>, <ast.Constant object at 0x7da1b024e560>, <ast.Constant object at 0x7da1b024c3a0>, <ast.Constant object at 0x7da1b024e350>], [<ast.Name object at 0x7da1b024dde0>, <ast.Name object at 0x7da1b024eb30>, <ast.Name object at 0x7da1b024ff10>, <ast.Name object at 0x7da1b024fca0>, <ast.Name object at 0x7da1b024c970>, <ast.Constant object at 0x7da1b024fa90>, <ast.Attribute object at 0x7da1b024cc40>]] variable[res] assign[=] call[name[requests].post, parameter[binary_operation[name[self].url + constant[/api/sysdig]]]] return[call[name[self]._request_result, parameter[name[res]]]]
keyword[def] identifier[create_sysdig_capture] ( identifier[self] , identifier[hostname] , identifier[capture_name] , identifier[duration] , identifier[capture_filter] = literal[string] , identifier[folder] = literal[string] ): literal[string] identifier[res] = identifier[self] . identifier[get_connected_agents] () keyword[if] keyword[not] identifier[res] [ literal[int] ]: keyword[return] identifier[res] identifier[capture_agent] = keyword[None] keyword[for] identifier[agent] keyword[in] identifier[res] [ literal[int] ]: keyword[if] identifier[hostname] == identifier[agent] [ literal[string] ]: identifier[capture_agent] = identifier[agent] keyword[break] keyword[if] identifier[capture_agent] keyword[is] keyword[None] : keyword[return] [ keyword[False] , identifier[hostname] + literal[string] ] identifier[data] ={ literal[string] : identifier[capture_agent] , literal[string] : identifier[capture_name] , literal[string] : identifier[duration] , literal[string] : identifier[folder] , literal[string] : identifier[capture_filter] , literal[string] : literal[string] , literal[string] : identifier[self] . identifier[product] } identifier[res] = identifier[requests] . identifier[post] ( identifier[self] . identifier[url] + literal[string] , identifier[headers] = identifier[self] . identifier[hdrs] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[data] ), identifier[verify] = identifier[self] . identifier[ssl_verify] ) keyword[return] identifier[self] . identifier[_request_result] ( identifier[res] )
def create_sysdig_capture(self, hostname, capture_name, duration, capture_filter='', folder='/'): """**Description** Create a new sysdig capture. The capture will be immediately started. **Arguments** - **hostname**: the hostname of the instrumented host where the capture will be taken. - **capture_name**: the name of the capture. - **duration**: the duration of the capture, in seconds. - **capture_filter**: a sysdig filter expression. - **folder**: directory in the S3 bucket where the capture will be saved. **Success Return Value** A dictionary showing the details of the new capture. **Example** `examples/create_sysdig_capture.py <https://github.com/draios/python-sdc-client/blob/master/examples/create_sysdig_capture.py>`_ """ res = self.get_connected_agents() if not res[0]: return res # depends on [control=['if'], data=[]] capture_agent = None for agent in res[1]: if hostname == agent['hostName']: capture_agent = agent break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['agent']] if capture_agent is None: return [False, hostname + ' not found'] # depends on [control=['if'], data=[]] data = {'agent': capture_agent, 'name': capture_name, 'duration': duration, 'folder': folder, 'filters': capture_filter, 'bucketName': '', 'source': self.product} res = requests.post(self.url + '/api/sysdig', headers=self.hdrs, data=json.dumps(data), verify=self.ssl_verify) return self._request_result(res)
def remove_feature(self, feature=None, **kwargs): """ [NOT IMPLEMENTED] Remove a 'feature' from the bundle :raises NotImplementedError: because this isn't implemented yet """ self._kwargs_checks(kwargs) # Let's avoid deleting ALL features from the matching contexts if feature is None and not len(kwargs.items()): raise ValueError("must provide some value to filter for features") kwargs['feature'] = feature # Let's avoid the possibility of deleting a single parameter kwargs['qualifier'] = None # Let's also avoid the possibility of accidentally deleting system # parameters, etc kwargs.setdefault('context', ['feature']) self.remove_parameters_all(**kwargs) self._add_history(redo_func='remove_feature', redo_kwargs=kwargs, undo_func=None, undo_kwargs={}) return
def function[remove_feature, parameter[self, feature]]: constant[ [NOT IMPLEMENTED] Remove a 'feature' from the bundle :raises NotImplementedError: because this isn't implemented yet ] call[name[self]._kwargs_checks, parameter[name[kwargs]]] if <ast.BoolOp object at 0x7da18f811ea0> begin[:] <ast.Raise object at 0x7da18f812b30> call[name[kwargs]][constant[feature]] assign[=] name[feature] call[name[kwargs]][constant[qualifier]] assign[=] constant[None] call[name[kwargs].setdefault, parameter[constant[context], list[[<ast.Constant object at 0x7da18f810880>]]]] call[name[self].remove_parameters_all, parameter[]] call[name[self]._add_history, parameter[]] return[None]
keyword[def] identifier[remove_feature] ( identifier[self] , identifier[feature] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[_kwargs_checks] ( identifier[kwargs] ) keyword[if] identifier[feature] keyword[is] keyword[None] keyword[and] keyword[not] identifier[len] ( identifier[kwargs] . identifier[items] ()): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[kwargs] [ literal[string] ]= identifier[feature] identifier[kwargs] [ literal[string] ]= keyword[None] identifier[kwargs] . identifier[setdefault] ( literal[string] ,[ literal[string] ]) identifier[self] . identifier[remove_parameters_all] (** identifier[kwargs] ) identifier[self] . identifier[_add_history] ( identifier[redo_func] = literal[string] , identifier[redo_kwargs] = identifier[kwargs] , identifier[undo_func] = keyword[None] , identifier[undo_kwargs] ={}) keyword[return]
def remove_feature(self, feature=None, **kwargs): """ [NOT IMPLEMENTED] Remove a 'feature' from the bundle :raises NotImplementedError: because this isn't implemented yet """ self._kwargs_checks(kwargs) # Let's avoid deleting ALL features from the matching contexts if feature is None and (not len(kwargs.items())): raise ValueError('must provide some value to filter for features') # depends on [control=['if'], data=[]] kwargs['feature'] = feature # Let's avoid the possibility of deleting a single parameter kwargs['qualifier'] = None # Let's also avoid the possibility of accidentally deleting system # parameters, etc kwargs.setdefault('context', ['feature']) self.remove_parameters_all(**kwargs) self._add_history(redo_func='remove_feature', redo_kwargs=kwargs, undo_func=None, undo_kwargs={}) return
def is_dataset_on_gcs(dataset_name): """If the dataset is available on the GCS bucket gs://tfds-data/datasets.""" dir_name = posixpath.join(GCS_DATASETS_DIR, dataset_name) return len(gcs_files(prefix_filter=dir_name)) > 2
def function[is_dataset_on_gcs, parameter[dataset_name]]: constant[If the dataset is available on the GCS bucket gs://tfds-data/datasets.] variable[dir_name] assign[=] call[name[posixpath].join, parameter[name[GCS_DATASETS_DIR], name[dataset_name]]] return[compare[call[name[len], parameter[call[name[gcs_files], parameter[]]]] greater[>] constant[2]]]
keyword[def] identifier[is_dataset_on_gcs] ( identifier[dataset_name] ): literal[string] identifier[dir_name] = identifier[posixpath] . identifier[join] ( identifier[GCS_DATASETS_DIR] , identifier[dataset_name] ) keyword[return] identifier[len] ( identifier[gcs_files] ( identifier[prefix_filter] = identifier[dir_name] ))> literal[int]
def is_dataset_on_gcs(dataset_name): """If the dataset is available on the GCS bucket gs://tfds-data/datasets.""" dir_name = posixpath.join(GCS_DATASETS_DIR, dataset_name) return len(gcs_files(prefix_filter=dir_name)) > 2
def multi_arange(n): """By example: # 0 1 2 3 4 5 6 7 8 n = [0, 0, 3, 0, 0, 2, 0, 2, 1] res = [0, 1, 2, 0, 1, 0, 1, 0] That is it is equivalent to something like this : hstack((arange(n_i) for n_i in n)) This version seems quite a bit faster, at least for some possible inputs, and at any rate it encapsulates a task in a function. """ if n.ndim != 1: raise ValueError("n is supposed to be 1d array.") n_mask = n.astype(bool) n_cumsum = np.cumsum(n) ret = np.ones(n_cumsum[-1] + 1, dtype=int) ret[n_cumsum[n_mask]] -= n[n_mask] ret[0] -= 1 return np.cumsum(ret)[:-1]
def function[multi_arange, parameter[n]]: constant[By example: # 0 1 2 3 4 5 6 7 8 n = [0, 0, 3, 0, 0, 2, 0, 2, 1] res = [0, 1, 2, 0, 1, 0, 1, 0] That is it is equivalent to something like this : hstack((arange(n_i) for n_i in n)) This version seems quite a bit faster, at least for some possible inputs, and at any rate it encapsulates a task in a function. ] if compare[name[n].ndim not_equal[!=] constant[1]] begin[:] <ast.Raise object at 0x7da1b2347d90> variable[n_mask] assign[=] call[name[n].astype, parameter[name[bool]]] variable[n_cumsum] assign[=] call[name[np].cumsum, parameter[name[n]]] variable[ret] assign[=] call[name[np].ones, parameter[binary_operation[call[name[n_cumsum]][<ast.UnaryOp object at 0x7da1b2344bb0>] + constant[1]]]] <ast.AugAssign object at 0x7da1b2347dc0> <ast.AugAssign object at 0x7da1b23444f0> return[call[call[name[np].cumsum, parameter[name[ret]]]][<ast.Slice object at 0x7da1b2347fd0>]]
keyword[def] identifier[multi_arange] ( identifier[n] ): literal[string] keyword[if] identifier[n] . identifier[ndim] != literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[n_mask] = identifier[n] . identifier[astype] ( identifier[bool] ) identifier[n_cumsum] = identifier[np] . identifier[cumsum] ( identifier[n] ) identifier[ret] = identifier[np] . identifier[ones] ( identifier[n_cumsum] [- literal[int] ]+ literal[int] , identifier[dtype] = identifier[int] ) identifier[ret] [ identifier[n_cumsum] [ identifier[n_mask] ]]-= identifier[n] [ identifier[n_mask] ] identifier[ret] [ literal[int] ]-= literal[int] keyword[return] identifier[np] . identifier[cumsum] ( identifier[ret] )[:- literal[int] ]
def multi_arange(n): """By example: # 0 1 2 3 4 5 6 7 8 n = [0, 0, 3, 0, 0, 2, 0, 2, 1] res = [0, 1, 2, 0, 1, 0, 1, 0] That is it is equivalent to something like this : hstack((arange(n_i) for n_i in n)) This version seems quite a bit faster, at least for some possible inputs, and at any rate it encapsulates a task in a function. """ if n.ndim != 1: raise ValueError('n is supposed to be 1d array.') # depends on [control=['if'], data=[]] n_mask = n.astype(bool) n_cumsum = np.cumsum(n) ret = np.ones(n_cumsum[-1] + 1, dtype=int) ret[n_cumsum[n_mask]] -= n[n_mask] ret[0] -= 1 return np.cumsum(ret)[:-1]
def t_fold_end(self, t): r'\n+\ *' column = find_column(t) indent = self.indent_stack[-1] if column < indent: rollback_lexpos(t) if column <= indent: t.lexer.pop_state() t.type = 'B_FOLD_END' if column > indent: t.type = 'SCALAR' return t
def function[t_fold_end, parameter[self, t]]: constant[\n+\ *] variable[column] assign[=] call[name[find_column], parameter[name[t]]] variable[indent] assign[=] call[name[self].indent_stack][<ast.UnaryOp object at 0x7da1b2567220>] if compare[name[column] less[<] name[indent]] begin[:] call[name[rollback_lexpos], parameter[name[t]]] if compare[name[column] less_or_equal[<=] name[indent]] begin[:] call[name[t].lexer.pop_state, parameter[]] name[t].type assign[=] constant[B_FOLD_END] if compare[name[column] greater[>] name[indent]] begin[:] name[t].type assign[=] constant[SCALAR] return[name[t]]
keyword[def] identifier[t_fold_end] ( identifier[self] , identifier[t] ): literal[string] identifier[column] = identifier[find_column] ( identifier[t] ) identifier[indent] = identifier[self] . identifier[indent_stack] [- literal[int] ] keyword[if] identifier[column] < identifier[indent] : identifier[rollback_lexpos] ( identifier[t] ) keyword[if] identifier[column] <= identifier[indent] : identifier[t] . identifier[lexer] . identifier[pop_state] () identifier[t] . identifier[type] = literal[string] keyword[if] identifier[column] > identifier[indent] : identifier[t] . identifier[type] = literal[string] keyword[return] identifier[t]
def t_fold_end(self, t): """\\n+\\ *""" column = find_column(t) indent = self.indent_stack[-1] if column < indent: rollback_lexpos(t) # depends on [control=['if'], data=[]] if column <= indent: t.lexer.pop_state() t.type = 'B_FOLD_END' # depends on [control=['if'], data=[]] if column > indent: t.type = 'SCALAR' # depends on [control=['if'], data=[]] return t
def piano_roll(annotation, **kwargs): '''Plotting wrapper for piano rolls''' times, midi = annotation.to_interval_values() return mir_eval.display.piano_roll(times, midi=midi, **kwargs)
def function[piano_roll, parameter[annotation]]: constant[Plotting wrapper for piano rolls] <ast.Tuple object at 0x7da204622860> assign[=] call[name[annotation].to_interval_values, parameter[]] return[call[name[mir_eval].display.piano_roll, parameter[name[times]]]]
keyword[def] identifier[piano_roll] ( identifier[annotation] ,** identifier[kwargs] ): literal[string] identifier[times] , identifier[midi] = identifier[annotation] . identifier[to_interval_values] () keyword[return] identifier[mir_eval] . identifier[display] . identifier[piano_roll] ( identifier[times] , identifier[midi] = identifier[midi] ,** identifier[kwargs] )
def piano_roll(annotation, **kwargs): """Plotting wrapper for piano rolls""" (times, midi) = annotation.to_interval_values() return mir_eval.display.piano_roll(times, midi=midi, **kwargs)
def upd_size(self, *args): """I'm the same size as any given card in my :class:`DeckLayout`.""" self.size = ( self.parent.card_size_hint_x * self.parent.width, self.parent.card_size_hint_y * self.parent.height )
def function[upd_size, parameter[self]]: constant[I'm the same size as any given card in my :class:`DeckLayout`.] name[self].size assign[=] tuple[[<ast.BinOp object at 0x7da2047e8b20>, <ast.BinOp object at 0x7da2047eac80>]]
keyword[def] identifier[upd_size] ( identifier[self] ,* identifier[args] ): literal[string] identifier[self] . identifier[size] =( identifier[self] . identifier[parent] . identifier[card_size_hint_x] * identifier[self] . identifier[parent] . identifier[width] , identifier[self] . identifier[parent] . identifier[card_size_hint_y] * identifier[self] . identifier[parent] . identifier[height] )
def upd_size(self, *args): """I'm the same size as any given card in my :class:`DeckLayout`.""" self.size = (self.parent.card_size_hint_x * self.parent.width, self.parent.card_size_hint_y * self.parent.height)
def get(self, cluster, environ, topology): ''' :param cluster: :param environ: :param topology: :return: ''' # fetch the execution of the topology asynchronously execution_state = yield access.get_execution_state(cluster, environ, topology) # fetch scheduler location of the topology scheduler_location = yield access.get_scheduler_location(cluster, environ, topology) job_page_link = scheduler_location["job_page_link"] # convert the topology launch time to display format launched_at = datetime.utcfromtimestamp(execution_state['submission_time']) launched_time = launched_at.strftime('%Y-%m-%d %H:%M:%S UTC') # pylint: disable=no-member options = dict( cluster=cluster, environ=environ, topology=topology, execution_state=execution_state, launched=launched_time, status="running" if random.randint(0, 1) else "errors", active="topologies", job_page_link=job_page_link, function=common.className, baseUrl=self.baseUrl ) # send the single topology page self.render("topology.html", **options)
def function[get, parameter[self, cluster, environ, topology]]: constant[ :param cluster: :param environ: :param topology: :return: ] variable[execution_state] assign[=] <ast.Yield object at 0x7da20c9901c0> variable[scheduler_location] assign[=] <ast.Yield object at 0x7da20c991090> variable[job_page_link] assign[=] call[name[scheduler_location]][constant[job_page_link]] variable[launched_at] assign[=] call[name[datetime].utcfromtimestamp, parameter[call[name[execution_state]][constant[submission_time]]]] variable[launched_time] assign[=] call[name[launched_at].strftime, parameter[constant[%Y-%m-%d %H:%M:%S UTC]]] variable[options] assign[=] call[name[dict], parameter[]] call[name[self].render, parameter[constant[topology.html]]]
keyword[def] identifier[get] ( identifier[self] , identifier[cluster] , identifier[environ] , identifier[topology] ): literal[string] identifier[execution_state] = keyword[yield] identifier[access] . identifier[get_execution_state] ( identifier[cluster] , identifier[environ] , identifier[topology] ) identifier[scheduler_location] = keyword[yield] identifier[access] . identifier[get_scheduler_location] ( identifier[cluster] , identifier[environ] , identifier[topology] ) identifier[job_page_link] = identifier[scheduler_location] [ literal[string] ] identifier[launched_at] = identifier[datetime] . identifier[utcfromtimestamp] ( identifier[execution_state] [ literal[string] ]) identifier[launched_time] = identifier[launched_at] . identifier[strftime] ( literal[string] ) identifier[options] = identifier[dict] ( identifier[cluster] = identifier[cluster] , identifier[environ] = identifier[environ] , identifier[topology] = identifier[topology] , identifier[execution_state] = identifier[execution_state] , identifier[launched] = identifier[launched_time] , identifier[status] = literal[string] keyword[if] identifier[random] . identifier[randint] ( literal[int] , literal[int] ) keyword[else] literal[string] , identifier[active] = literal[string] , identifier[job_page_link] = identifier[job_page_link] , identifier[function] = identifier[common] . identifier[className] , identifier[baseUrl] = identifier[self] . identifier[baseUrl] ) identifier[self] . identifier[render] ( literal[string] ,** identifier[options] )
def get(self, cluster, environ, topology): """ :param cluster: :param environ: :param topology: :return: """ # fetch the execution of the topology asynchronously execution_state = (yield access.get_execution_state(cluster, environ, topology)) # fetch scheduler location of the topology scheduler_location = (yield access.get_scheduler_location(cluster, environ, topology)) job_page_link = scheduler_location['job_page_link'] # convert the topology launch time to display format launched_at = datetime.utcfromtimestamp(execution_state['submission_time']) launched_time = launched_at.strftime('%Y-%m-%d %H:%M:%S UTC') # pylint: disable=no-member options = dict(cluster=cluster, environ=environ, topology=topology, execution_state=execution_state, launched=launched_time, status='running' if random.randint(0, 1) else 'errors', active='topologies', job_page_link=job_page_link, function=common.className, baseUrl=self.baseUrl) # send the single topology page self.render('topology.html', **options)
def update1(self, key: str, data: np.ndarray, size: int) -> None: """ Update one entry in specific record in datastore """ print(data) if key in self.get_keys(): self.data[key][data[0]] = data else: newdata = np.zeros((size, 6)) newdata[data[0]] = data self.data[key] = newdata
def function[update1, parameter[self, key, data, size]]: constant[ Update one entry in specific record in datastore ] call[name[print], parameter[name[data]]] if compare[name[key] in call[name[self].get_keys, parameter[]]] begin[:] call[call[name[self].data][name[key]]][call[name[data]][constant[0]]] assign[=] name[data]
keyword[def] identifier[update1] ( identifier[self] , identifier[key] : identifier[str] , identifier[data] : identifier[np] . identifier[ndarray] , identifier[size] : identifier[int] )-> keyword[None] : literal[string] identifier[print] ( identifier[data] ) keyword[if] identifier[key] keyword[in] identifier[self] . identifier[get_keys] (): identifier[self] . identifier[data] [ identifier[key] ][ identifier[data] [ literal[int] ]]= identifier[data] keyword[else] : identifier[newdata] = identifier[np] . identifier[zeros] (( identifier[size] , literal[int] )) identifier[newdata] [ identifier[data] [ literal[int] ]]= identifier[data] identifier[self] . identifier[data] [ identifier[key] ]= identifier[newdata]
def update1(self, key: str, data: np.ndarray, size: int) -> None: """ Update one entry in specific record in datastore """ print(data) if key in self.get_keys(): self.data[key][data[0]] = data # depends on [control=['if'], data=['key']] else: newdata = np.zeros((size, 6)) newdata[data[0]] = data self.data[key] = newdata
def sympy_to_py(func, args): """ Turn a symbolic expression into a Python lambda function, which has the names of the variables and parameters as it's argument names. :param func: sympy expression :param args: variables and parameters in this model :return: lambda function to be used for numerical evaluation of the model. """ # replace the derivatives with printable variables. derivatives = {var: Variable(var.name) for var in args if isinstance(var, sympy.Derivative)} func = func.xreplace(derivatives) args = [derivatives[var] if isinstance(var, sympy.Derivative) else var for var in args] lambdafunc = lambdify(args, func, printer=SymfitNumPyPrinter, dummify=False) # Check if the names of the lambda function are what we expect signature = inspect_sig.signature(lambdafunc) sig_parameters = OrderedDict(signature.parameters) for arg, lambda_arg in zip(args, sig_parameters): if arg.name != lambda_arg: break else: # Lambdifying succesful! return lambdafunc # If we are here (very rare), then one of the lambda arg is still a Dummy. # In this case we will manually handle the naming. lambda_names = sig_parameters.keys() arg_names = [arg.name for arg in args] conversion = dict(zip(arg_names, lambda_names)) # Wrap the lambda such that arg names are translated into the correct dummy # symbol names @wraps(lambdafunc) def wrapped_lambdafunc(*ordered_args, **kwargs): converted_kwargs = {conversion[k]: v for k, v in kwargs.items()} return lambdafunc(*ordered_args, **converted_kwargs) # Update the signature of wrapped_lambdafunc to math our args new_sig_parameters = OrderedDict() for arg_name, dummy_name in conversion.items(): if arg_name == dummy_name: # Already has the correct name new_sig_parameters[arg_name] = sig_parameters[arg_name] else: # Change the dummy inspect.Parameter to the correct name param = sig_parameters[dummy_name] param = param.replace(name=arg_name) new_sig_parameters[arg_name] = param wrapped_lambdafunc.__signature__ = signature.replace( parameters=new_sig_parameters.values() ) return wrapped_lambdafunc
def function[sympy_to_py, parameter[func, args]]: constant[ Turn a symbolic expression into a Python lambda function, which has the names of the variables and parameters as it's argument names. :param func: sympy expression :param args: variables and parameters in this model :return: lambda function to be used for numerical evaluation of the model. ] variable[derivatives] assign[=] <ast.DictComp object at 0x7da1b1529b70> variable[func] assign[=] call[name[func].xreplace, parameter[name[derivatives]]] variable[args] assign[=] <ast.ListComp object at 0x7da1b15280d0> variable[lambdafunc] assign[=] call[name[lambdify], parameter[name[args], name[func]]] variable[signature] assign[=] call[name[inspect_sig].signature, parameter[name[lambdafunc]]] variable[sig_parameters] assign[=] call[name[OrderedDict], parameter[name[signature].parameters]] for taget[tuple[[<ast.Name object at 0x7da1b15286d0>, <ast.Name object at 0x7da1b1529570>]]] in starred[call[name[zip], parameter[name[args], name[sig_parameters]]]] begin[:] if compare[name[arg].name not_equal[!=] name[lambda_arg]] begin[:] break variable[lambda_names] assign[=] call[name[sig_parameters].keys, parameter[]] variable[arg_names] assign[=] <ast.ListComp object at 0x7da1b1529db0> variable[conversion] assign[=] call[name[dict], parameter[call[name[zip], parameter[name[arg_names], name[lambda_names]]]]] def function[wrapped_lambdafunc, parameter[]]: variable[converted_kwargs] assign[=] <ast.DictComp object at 0x7da1b152a230> return[call[name[lambdafunc], parameter[<ast.Starred object at 0x7da1b15292d0>]]] variable[new_sig_parameters] assign[=] call[name[OrderedDict], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b152a380>, <ast.Name object at 0x7da1b1528d90>]]] in starred[call[name[conversion].items, parameter[]]] begin[:] if compare[name[arg_name] equal[==] name[dummy_name]] begin[:] call[name[new_sig_parameters]][name[arg_name]] assign[=] call[name[sig_parameters]][name[arg_name]] name[wrapped_lambdafunc].__signature__ assign[=] call[name[signature].replace, parameter[]] return[name[wrapped_lambdafunc]]
keyword[def] identifier[sympy_to_py] ( identifier[func] , identifier[args] ): literal[string] identifier[derivatives] ={ identifier[var] : identifier[Variable] ( identifier[var] . identifier[name] ) keyword[for] identifier[var] keyword[in] identifier[args] keyword[if] identifier[isinstance] ( identifier[var] , identifier[sympy] . identifier[Derivative] )} identifier[func] = identifier[func] . identifier[xreplace] ( identifier[derivatives] ) identifier[args] =[ identifier[derivatives] [ identifier[var] ] keyword[if] identifier[isinstance] ( identifier[var] , identifier[sympy] . identifier[Derivative] ) keyword[else] identifier[var] keyword[for] identifier[var] keyword[in] identifier[args] ] identifier[lambdafunc] = identifier[lambdify] ( identifier[args] , identifier[func] , identifier[printer] = identifier[SymfitNumPyPrinter] , identifier[dummify] = keyword[False] ) identifier[signature] = identifier[inspect_sig] . identifier[signature] ( identifier[lambdafunc] ) identifier[sig_parameters] = identifier[OrderedDict] ( identifier[signature] . identifier[parameters] ) keyword[for] identifier[arg] , identifier[lambda_arg] keyword[in] identifier[zip] ( identifier[args] , identifier[sig_parameters] ): keyword[if] identifier[arg] . identifier[name] != identifier[lambda_arg] : keyword[break] keyword[else] : keyword[return] identifier[lambdafunc] identifier[lambda_names] = identifier[sig_parameters] . identifier[keys] () identifier[arg_names] =[ identifier[arg] . identifier[name] keyword[for] identifier[arg] keyword[in] identifier[args] ] identifier[conversion] = identifier[dict] ( identifier[zip] ( identifier[arg_names] , identifier[lambda_names] )) @ identifier[wraps] ( identifier[lambdafunc] ) keyword[def] identifier[wrapped_lambdafunc] (* identifier[ordered_args] ,** identifier[kwargs] ): identifier[converted_kwargs] ={ identifier[conversion] [ identifier[k] ]: identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[items] ()} keyword[return] identifier[lambdafunc] (* identifier[ordered_args] ,** identifier[converted_kwargs] ) identifier[new_sig_parameters] = identifier[OrderedDict] () keyword[for] identifier[arg_name] , identifier[dummy_name] keyword[in] identifier[conversion] . identifier[items] (): keyword[if] identifier[arg_name] == identifier[dummy_name] : identifier[new_sig_parameters] [ identifier[arg_name] ]= identifier[sig_parameters] [ identifier[arg_name] ] keyword[else] : identifier[param] = identifier[sig_parameters] [ identifier[dummy_name] ] identifier[param] = identifier[param] . identifier[replace] ( identifier[name] = identifier[arg_name] ) identifier[new_sig_parameters] [ identifier[arg_name] ]= identifier[param] identifier[wrapped_lambdafunc] . identifier[__signature__] = identifier[signature] . identifier[replace] ( identifier[parameters] = identifier[new_sig_parameters] . identifier[values] () ) keyword[return] identifier[wrapped_lambdafunc]
def sympy_to_py(func, args): """ Turn a symbolic expression into a Python lambda function, which has the names of the variables and parameters as it's argument names. :param func: sympy expression :param args: variables and parameters in this model :return: lambda function to be used for numerical evaluation of the model. """ # replace the derivatives with printable variables. derivatives = {var: Variable(var.name) for var in args if isinstance(var, sympy.Derivative)} func = func.xreplace(derivatives) args = [derivatives[var] if isinstance(var, sympy.Derivative) else var for var in args] lambdafunc = lambdify(args, func, printer=SymfitNumPyPrinter, dummify=False) # Check if the names of the lambda function are what we expect signature = inspect_sig.signature(lambdafunc) sig_parameters = OrderedDict(signature.parameters) for (arg, lambda_arg) in zip(args, sig_parameters): if arg.name != lambda_arg: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] else: # Lambdifying succesful! return lambdafunc # If we are here (very rare), then one of the lambda arg is still a Dummy. # In this case we will manually handle the naming. lambda_names = sig_parameters.keys() arg_names = [arg.name for arg in args] conversion = dict(zip(arg_names, lambda_names)) # Wrap the lambda such that arg names are translated into the correct dummy # symbol names @wraps(lambdafunc) def wrapped_lambdafunc(*ordered_args, **kwargs): converted_kwargs = {conversion[k]: v for (k, v) in kwargs.items()} return lambdafunc(*ordered_args, **converted_kwargs) # Update the signature of wrapped_lambdafunc to math our args new_sig_parameters = OrderedDict() for (arg_name, dummy_name) in conversion.items(): if arg_name == dummy_name: # Already has the correct name new_sig_parameters[arg_name] = sig_parameters[arg_name] # depends on [control=['if'], data=['arg_name']] else: # Change the dummy inspect.Parameter to the correct name param = sig_parameters[dummy_name] param = param.replace(name=arg_name) new_sig_parameters[arg_name] = param # depends on [control=['for'], data=[]] wrapped_lambdafunc.__signature__ = signature.replace(parameters=new_sig_parameters.values()) return wrapped_lambdafunc
def send_offset_fetch_request(self, group, payloads=None, fail_on_error=True, callback=None): """ Takes a group (string) and list of OffsetFetchRequest and returns a list of OffsetFetchResponse objects """ encoder = partial(KafkaCodec.encode_offset_fetch_request, group=group) decoder = KafkaCodec.decode_offset_fetch_response resps = yield self._send_broker_aware_request( payloads, encoder, decoder, consumer_group=group) returnValue(self._handle_responses( resps, fail_on_error, callback, group))
def function[send_offset_fetch_request, parameter[self, group, payloads, fail_on_error, callback]]: constant[ Takes a group (string) and list of OffsetFetchRequest and returns a list of OffsetFetchResponse objects ] variable[encoder] assign[=] call[name[partial], parameter[name[KafkaCodec].encode_offset_fetch_request]] variable[decoder] assign[=] name[KafkaCodec].decode_offset_fetch_response variable[resps] assign[=] <ast.Yield object at 0x7da1b04d1b40> call[name[returnValue], parameter[call[name[self]._handle_responses, parameter[name[resps], name[fail_on_error], name[callback], name[group]]]]]
keyword[def] identifier[send_offset_fetch_request] ( identifier[self] , identifier[group] , identifier[payloads] = keyword[None] , identifier[fail_on_error] = keyword[True] , identifier[callback] = keyword[None] ): literal[string] identifier[encoder] = identifier[partial] ( identifier[KafkaCodec] . identifier[encode_offset_fetch_request] , identifier[group] = identifier[group] ) identifier[decoder] = identifier[KafkaCodec] . identifier[decode_offset_fetch_response] identifier[resps] = keyword[yield] identifier[self] . identifier[_send_broker_aware_request] ( identifier[payloads] , identifier[encoder] , identifier[decoder] , identifier[consumer_group] = identifier[group] ) identifier[returnValue] ( identifier[self] . identifier[_handle_responses] ( identifier[resps] , identifier[fail_on_error] , identifier[callback] , identifier[group] ))
def send_offset_fetch_request(self, group, payloads=None, fail_on_error=True, callback=None): """ Takes a group (string) and list of OffsetFetchRequest and returns a list of OffsetFetchResponse objects """ encoder = partial(KafkaCodec.encode_offset_fetch_request, group=group) decoder = KafkaCodec.decode_offset_fetch_response resps = (yield self._send_broker_aware_request(payloads, encoder, decoder, consumer_group=group)) returnValue(self._handle_responses(resps, fail_on_error, callback, group))
def to_potential(f): ''' to_potential(f) yields f if f is a potential function; if f is not, but f can be converted to a potential function, that conversion is performed then the result is yielded. to_potential(Ellipsis) yields a potential function whose output is simply its input (i.e., the identity function). to_potential(None) is equivalent to to_potential(0). The following can be converted into potential functions: * Anything for which pimms.is_array(x, 'number') yields True (i.e., arrays of constants). * Any tuple (g, h) where g(x) yields a potential value and h(x) yields a jacobian matrix for the parameter vector x. ''' if is_potential(f): return f elif f is Ellipsis: return identity elif pimms.is_array(f, 'number'): return const_potential(f) elif isinstance(f, tuple) and len(f) == 2: return PotentialLambda(f[0], f[1]) else: raise ValueError('Could not convert object to potential function')
def function[to_potential, parameter[f]]: constant[ to_potential(f) yields f if f is a potential function; if f is not, but f can be converted to a potential function, that conversion is performed then the result is yielded. to_potential(Ellipsis) yields a potential function whose output is simply its input (i.e., the identity function). to_potential(None) is equivalent to to_potential(0). The following can be converted into potential functions: * Anything for which pimms.is_array(x, 'number') yields True (i.e., arrays of constants). * Any tuple (g, h) where g(x) yields a potential value and h(x) yields a jacobian matrix for the parameter vector x. ] if call[name[is_potential], parameter[name[f]]] begin[:] return[name[f]]
keyword[def] identifier[to_potential] ( identifier[f] ): literal[string] keyword[if] identifier[is_potential] ( identifier[f] ): keyword[return] identifier[f] keyword[elif] identifier[f] keyword[is] identifier[Ellipsis] : keyword[return] identifier[identity] keyword[elif] identifier[pimms] . identifier[is_array] ( identifier[f] , literal[string] ): keyword[return] identifier[const_potential] ( identifier[f] ) keyword[elif] identifier[isinstance] ( identifier[f] , identifier[tuple] ) keyword[and] identifier[len] ( identifier[f] )== literal[int] : keyword[return] identifier[PotentialLambda] ( identifier[f] [ literal[int] ], identifier[f] [ literal[int] ]) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] )
def to_potential(f): """ to_potential(f) yields f if f is a potential function; if f is not, but f can be converted to a potential function, that conversion is performed then the result is yielded. to_potential(Ellipsis) yields a potential function whose output is simply its input (i.e., the identity function). to_potential(None) is equivalent to to_potential(0). The following can be converted into potential functions: * Anything for which pimms.is_array(x, 'number') yields True (i.e., arrays of constants). * Any tuple (g, h) where g(x) yields a potential value and h(x) yields a jacobian matrix for the parameter vector x. """ if is_potential(f): return f # depends on [control=['if'], data=[]] elif f is Ellipsis: return identity # depends on [control=['if'], data=[]] elif pimms.is_array(f, 'number'): return const_potential(f) # depends on [control=['if'], data=[]] elif isinstance(f, tuple) and len(f) == 2: return PotentialLambda(f[0], f[1]) # depends on [control=['if'], data=[]] else: raise ValueError('Could not convert object to potential function')
def upsert_all(cls, engine, obj_or_data): """ The :meth:`sqlalchemy.crud.updating.upsert_all` function in ORM syntax. :param engine: an engine created by``sqlalchemy.create_engine``. :param obj_or_data: single object or list of object """ cls.update_all( engine=engine, obj_or_data=obj_or_data, upsert=True, )
def function[upsert_all, parameter[cls, engine, obj_or_data]]: constant[ The :meth:`sqlalchemy.crud.updating.upsert_all` function in ORM syntax. :param engine: an engine created by``sqlalchemy.create_engine``. :param obj_or_data: single object or list of object ] call[name[cls].update_all, parameter[]]
keyword[def] identifier[upsert_all] ( identifier[cls] , identifier[engine] , identifier[obj_or_data] ): literal[string] identifier[cls] . identifier[update_all] ( identifier[engine] = identifier[engine] , identifier[obj_or_data] = identifier[obj_or_data] , identifier[upsert] = keyword[True] , )
def upsert_all(cls, engine, obj_or_data): """ The :meth:`sqlalchemy.crud.updating.upsert_all` function in ORM syntax. :param engine: an engine created by``sqlalchemy.create_engine``. :param obj_or_data: single object or list of object """ cls.update_all(engine=engine, obj_or_data=obj_or_data, upsert=True)
def get_one(seq, default=None, skip_string_iter=True): """ Return one item from seq or None(by default). """ if skip_string_iter and isinstance(seq, (str, unicode, bytes, bytearray)): return seq if not seq: return '' try: return next(iter(seq)) except TypeError: # not hasattr __iter__/__getitem__ return default
def function[get_one, parameter[seq, default, skip_string_iter]]: constant[ Return one item from seq or None(by default). ] if <ast.BoolOp object at 0x7da18bcc8a00> begin[:] return[name[seq]] if <ast.UnaryOp object at 0x7da18bcc9360> begin[:] return[constant[]] <ast.Try object at 0x7da18bcc82e0>
keyword[def] identifier[get_one] ( identifier[seq] , identifier[default] = keyword[None] , identifier[skip_string_iter] = keyword[True] ): literal[string] keyword[if] identifier[skip_string_iter] keyword[and] identifier[isinstance] ( identifier[seq] ,( identifier[str] , identifier[unicode] , identifier[bytes] , identifier[bytearray] )): keyword[return] identifier[seq] keyword[if] keyword[not] identifier[seq] : keyword[return] literal[string] keyword[try] : keyword[return] identifier[next] ( identifier[iter] ( identifier[seq] )) keyword[except] identifier[TypeError] : keyword[return] identifier[default]
def get_one(seq, default=None, skip_string_iter=True): """ Return one item from seq or None(by default). """ if skip_string_iter and isinstance(seq, (str, unicode, bytes, bytearray)): return seq # depends on [control=['if'], data=[]] if not seq: return '' # depends on [control=['if'], data=[]] try: return next(iter(seq)) # depends on [control=['try'], data=[]] except TypeError: # not hasattr __iter__/__getitem__ return default # depends on [control=['except'], data=[]]
def avoid(self) -> Tuple[Tuple[int], Tuple[int]]: """ Assembles a list of per-hypothesis words to avoid. The indices are (x, y) pairs into the scores array, which has dimensions (beam_size, target_vocab_size). These values are then used by the caller to set these items to np.inf so they won't be selected. Words to be avoided are selected by consulting both the global trie of phrases and the sentence-specific one. :return: Two lists of indices: the x coordinates and y coordinates. """ to_avoid = set() # type: Set[Tuple[int, int]] for i, state in enumerate(self.global_avoid_states): for word_id in state.avoid(): if word_id > 0: to_avoid.add((i, word_id)) for i, state in enumerate(self.local_avoid_states): for word_id in state.avoid(): if word_id > 0: to_avoid.add((i, word_id)) return tuple(zip(*to_avoid))
def function[avoid, parameter[self]]: constant[ Assembles a list of per-hypothesis words to avoid. The indices are (x, y) pairs into the scores array, which has dimensions (beam_size, target_vocab_size). These values are then used by the caller to set these items to np.inf so they won't be selected. Words to be avoided are selected by consulting both the global trie of phrases and the sentence-specific one. :return: Two lists of indices: the x coordinates and y coordinates. ] variable[to_avoid] assign[=] call[name[set], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b1d76080>, <ast.Name object at 0x7da1b1d743a0>]]] in starred[call[name[enumerate], parameter[name[self].global_avoid_states]]] begin[:] for taget[name[word_id]] in starred[call[name[state].avoid, parameter[]]] begin[:] if compare[name[word_id] greater[>] constant[0]] begin[:] call[name[to_avoid].add, parameter[tuple[[<ast.Name object at 0x7da1b1d75a20>, <ast.Name object at 0x7da1b1d77850>]]]] for taget[tuple[[<ast.Name object at 0x7da1b1d74d60>, <ast.Name object at 0x7da1b1d75f60>]]] in starred[call[name[enumerate], parameter[name[self].local_avoid_states]]] begin[:] for taget[name[word_id]] in starred[call[name[state].avoid, parameter[]]] begin[:] if compare[name[word_id] greater[>] constant[0]] begin[:] call[name[to_avoid].add, parameter[tuple[[<ast.Name object at 0x7da1b1d74760>, <ast.Name object at 0x7da1b1d74df0>]]]] return[call[name[tuple], parameter[call[name[zip], parameter[<ast.Starred object at 0x7da1b1d770a0>]]]]]
keyword[def] identifier[avoid] ( identifier[self] )-> identifier[Tuple] [ identifier[Tuple] [ identifier[int] ], identifier[Tuple] [ identifier[int] ]]: literal[string] identifier[to_avoid] = identifier[set] () keyword[for] identifier[i] , identifier[state] keyword[in] identifier[enumerate] ( identifier[self] . identifier[global_avoid_states] ): keyword[for] identifier[word_id] keyword[in] identifier[state] . identifier[avoid] (): keyword[if] identifier[word_id] > literal[int] : identifier[to_avoid] . identifier[add] (( identifier[i] , identifier[word_id] )) keyword[for] identifier[i] , identifier[state] keyword[in] identifier[enumerate] ( identifier[self] . identifier[local_avoid_states] ): keyword[for] identifier[word_id] keyword[in] identifier[state] . identifier[avoid] (): keyword[if] identifier[word_id] > literal[int] : identifier[to_avoid] . identifier[add] (( identifier[i] , identifier[word_id] )) keyword[return] identifier[tuple] ( identifier[zip] (* identifier[to_avoid] ))
def avoid(self) -> Tuple[Tuple[int], Tuple[int]]: """ Assembles a list of per-hypothesis words to avoid. The indices are (x, y) pairs into the scores array, which has dimensions (beam_size, target_vocab_size). These values are then used by the caller to set these items to np.inf so they won't be selected. Words to be avoided are selected by consulting both the global trie of phrases and the sentence-specific one. :return: Two lists of indices: the x coordinates and y coordinates. """ to_avoid = set() # type: Set[Tuple[int, int]] for (i, state) in enumerate(self.global_avoid_states): for word_id in state.avoid(): if word_id > 0: to_avoid.add((i, word_id)) # depends on [control=['if'], data=['word_id']] # depends on [control=['for'], data=['word_id']] # depends on [control=['for'], data=[]] for (i, state) in enumerate(self.local_avoid_states): for word_id in state.avoid(): if word_id > 0: to_avoid.add((i, word_id)) # depends on [control=['if'], data=['word_id']] # depends on [control=['for'], data=['word_id']] # depends on [control=['for'], data=[]] return tuple(zip(*to_avoid))
def clear_modified_data(self): """ Clears only the modified data """ self.__modified_data__ = None for value in self.__original_data__: try: value.clear_modified_data() except AttributeError: pass
def function[clear_modified_data, parameter[self]]: constant[ Clears only the modified data ] name[self].__modified_data__ assign[=] constant[None] for taget[name[value]] in starred[name[self].__original_data__] begin[:] <ast.Try object at 0x7da1b0aa6d40>
keyword[def] identifier[clear_modified_data] ( identifier[self] ): literal[string] identifier[self] . identifier[__modified_data__] = keyword[None] keyword[for] identifier[value] keyword[in] identifier[self] . identifier[__original_data__] : keyword[try] : identifier[value] . identifier[clear_modified_data] () keyword[except] identifier[AttributeError] : keyword[pass]
def clear_modified_data(self): """ Clears only the modified data """ self.__modified_data__ = None for value in self.__original_data__: try: value.clear_modified_data() # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['value']]
def render(self, use_reloader=False): """Generate the site. :param use_reloader: if given, reload templates on modification """ self.render_templates(self.templates) self.copy_static(self.static_names) if use_reloader: self.logger.info("Watching '%s' for changes..." % self.searchpath) self.logger.info("Press Ctrl+C to stop.") Reloader(self).watch()
def function[render, parameter[self, use_reloader]]: constant[Generate the site. :param use_reloader: if given, reload templates on modification ] call[name[self].render_templates, parameter[name[self].templates]] call[name[self].copy_static, parameter[name[self].static_names]] if name[use_reloader] begin[:] call[name[self].logger.info, parameter[binary_operation[constant[Watching '%s' for changes...] <ast.Mod object at 0x7da2590d6920> name[self].searchpath]]] call[name[self].logger.info, parameter[constant[Press Ctrl+C to stop.]]] call[call[name[Reloader], parameter[name[self]]].watch, parameter[]]
keyword[def] identifier[render] ( identifier[self] , identifier[use_reloader] = keyword[False] ): literal[string] identifier[self] . identifier[render_templates] ( identifier[self] . identifier[templates] ) identifier[self] . identifier[copy_static] ( identifier[self] . identifier[static_names] ) keyword[if] identifier[use_reloader] : identifier[self] . identifier[logger] . identifier[info] ( literal[string] % identifier[self] . identifier[searchpath] ) identifier[self] . identifier[logger] . identifier[info] ( literal[string] ) identifier[Reloader] ( identifier[self] ). identifier[watch] ()
def render(self, use_reloader=False): """Generate the site. :param use_reloader: if given, reload templates on modification """ self.render_templates(self.templates) self.copy_static(self.static_names) if use_reloader: self.logger.info("Watching '%s' for changes..." % self.searchpath) self.logger.info('Press Ctrl+C to stop.') Reloader(self).watch() # depends on [control=['if'], data=[]]
def _sign_operation(op): """Obtains a signature for an operation in a ReportRequest. Args: op (:class:`endpoints_management.gen.servicecontrol_v1_messages.Operation`): an operation used in a `ReportRequest` Returns: string: a unique signature for that operation """ md5 = hashlib.md5() md5.update(op.consumerId.encode('utf-8')) md5.update(b'\x00') md5.update(op.operationName.encode('utf-8')) if op.labels: signing.add_dict_to_hash(md5, encoding.MessageToPyValue(op.labels)) return md5.digest()
def function[_sign_operation, parameter[op]]: constant[Obtains a signature for an operation in a ReportRequest. Args: op (:class:`endpoints_management.gen.servicecontrol_v1_messages.Operation`): an operation used in a `ReportRequest` Returns: string: a unique signature for that operation ] variable[md5] assign[=] call[name[hashlib].md5, parameter[]] call[name[md5].update, parameter[call[name[op].consumerId.encode, parameter[constant[utf-8]]]]] call[name[md5].update, parameter[constant[b'\x00']]] call[name[md5].update, parameter[call[name[op].operationName.encode, parameter[constant[utf-8]]]]] if name[op].labels begin[:] call[name[signing].add_dict_to_hash, parameter[name[md5], call[name[encoding].MessageToPyValue, parameter[name[op].labels]]]] return[call[name[md5].digest, parameter[]]]
keyword[def] identifier[_sign_operation] ( identifier[op] ): literal[string] identifier[md5] = identifier[hashlib] . identifier[md5] () identifier[md5] . identifier[update] ( identifier[op] . identifier[consumerId] . identifier[encode] ( literal[string] )) identifier[md5] . identifier[update] ( literal[string] ) identifier[md5] . identifier[update] ( identifier[op] . identifier[operationName] . identifier[encode] ( literal[string] )) keyword[if] identifier[op] . identifier[labels] : identifier[signing] . identifier[add_dict_to_hash] ( identifier[md5] , identifier[encoding] . identifier[MessageToPyValue] ( identifier[op] . identifier[labels] )) keyword[return] identifier[md5] . identifier[digest] ()
def _sign_operation(op): """Obtains a signature for an operation in a ReportRequest. Args: op (:class:`endpoints_management.gen.servicecontrol_v1_messages.Operation`): an operation used in a `ReportRequest` Returns: string: a unique signature for that operation """ md5 = hashlib.md5() md5.update(op.consumerId.encode('utf-8')) md5.update(b'\x00') md5.update(op.operationName.encode('utf-8')) if op.labels: signing.add_dict_to_hash(md5, encoding.MessageToPyValue(op.labels)) # depends on [control=['if'], data=[]] return md5.digest()
def contact_number(self): """ This method returns the contact phone number. :return: """ try: number = self._ad_page_content.find( 'button', {'class': 'phone-number'}) return (base64.b64decode(number.attrs['data-p'])).decode('ascii') except Exception as e: if self._debug: logging.error( "Error getting contact_number. Error message: " + e.args[0]) return 'N/A'
def function[contact_number, parameter[self]]: constant[ This method returns the contact phone number. :return: ] <ast.Try object at 0x7da18c4ccbe0>
keyword[def] identifier[contact_number] ( identifier[self] ): literal[string] keyword[try] : identifier[number] = identifier[self] . identifier[_ad_page_content] . identifier[find] ( literal[string] ,{ literal[string] : literal[string] }) keyword[return] ( identifier[base64] . identifier[b64decode] ( identifier[number] . identifier[attrs] [ literal[string] ])). identifier[decode] ( literal[string] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[if] identifier[self] . identifier[_debug] : identifier[logging] . identifier[error] ( literal[string] + identifier[e] . identifier[args] [ literal[int] ]) keyword[return] literal[string]
def contact_number(self): """ This method returns the contact phone number. :return: """ try: number = self._ad_page_content.find('button', {'class': 'phone-number'}) return base64.b64decode(number.attrs['data-p']).decode('ascii') # depends on [control=['try'], data=[]] except Exception as e: if self._debug: logging.error('Error getting contact_number. Error message: ' + e.args[0]) # depends on [control=['if'], data=[]] return 'N/A' # depends on [control=['except'], data=['e']]
def _update_batches_if_needed(self)->None: "one_batch function is extremely slow with large datasets. This is caching the result as an optimization." if self.learn.data.valid_dl is None: return # Running learning rate finder, so return update_batches = self.data is not self.learn.data if not update_batches: return self.data = self.learn.data self.trn_batch = self._get_new_batch(ds_type=DatasetType.Train) self.val_batch = self._get_new_batch(ds_type=DatasetType.Valid)
def function[_update_batches_if_needed, parameter[self]]: constant[one_batch function is extremely slow with large datasets. This is caching the result as an optimization.] if compare[name[self].learn.data.valid_dl is constant[None]] begin[:] return[None] variable[update_batches] assign[=] compare[name[self].data is_not name[self].learn.data] if <ast.UnaryOp object at 0x7da1b1ec5180> begin[:] return[None] name[self].data assign[=] name[self].learn.data name[self].trn_batch assign[=] call[name[self]._get_new_batch, parameter[]] name[self].val_batch assign[=] call[name[self]._get_new_batch, parameter[]]
keyword[def] identifier[_update_batches_if_needed] ( identifier[self] )-> keyword[None] : literal[string] keyword[if] identifier[self] . identifier[learn] . identifier[data] . identifier[valid_dl] keyword[is] keyword[None] : keyword[return] identifier[update_batches] = identifier[self] . identifier[data] keyword[is] keyword[not] identifier[self] . identifier[learn] . identifier[data] keyword[if] keyword[not] identifier[update_batches] : keyword[return] identifier[self] . identifier[data] = identifier[self] . identifier[learn] . identifier[data] identifier[self] . identifier[trn_batch] = identifier[self] . identifier[_get_new_batch] ( identifier[ds_type] = identifier[DatasetType] . identifier[Train] ) identifier[self] . identifier[val_batch] = identifier[self] . identifier[_get_new_batch] ( identifier[ds_type] = identifier[DatasetType] . identifier[Valid] )
def _update_batches_if_needed(self) -> None: """one_batch function is extremely slow with large datasets. This is caching the result as an optimization.""" if self.learn.data.valid_dl is None: return # Running learning rate finder, so return # depends on [control=['if'], data=[]] update_batches = self.data is not self.learn.data if not update_batches: return # depends on [control=['if'], data=[]] self.data = self.learn.data self.trn_batch = self._get_new_batch(ds_type=DatasetType.Train) self.val_batch = self._get_new_batch(ds_type=DatasetType.Valid)
def by_col(cls, df, events, populations, w=None, inplace=False, pvalue='sim', outvals=None, swapname='', **stat_kws): """ Function to compute a Moran_Rate statistic on a dataframe Arguments --------- df : pandas.DataFrame a pandas dataframe with a geometry column events : string or list of strings one or more names where events are stored populations : string or list of strings one or more names where the populations corresponding to the events are stored. If one population column is provided, it is used for all event columns. If more than one population column is provided but there is not a population for every event column, an exception will be raised. w : pysal weights object a weights object aligned with the dataframe. If not provided, this is searched for in the dataframe's metadata inplace : bool a boolean denoting whether to operate on the dataframe inplace or to return a series contaning the results of the computation. If operating inplace, the derived columns will be named 'column_moran_rate' pvalue : string a string denoting which pvalue should be returned. Refer to the the Moran_Rate statistic's documentation for available p-values outvals : list of strings list of arbitrary attributes to return as columns from the Moran_Rate statistic **stat_kws : keyword arguments options to pass to the underlying statistic. For this, see the documentation for the Moran_Rate statistic. Returns -------- If inplace, None, and operation is conducted on dataframe in memory. Otherwise, returns a copy of the dataframe with the relevant columns attached. See Also --------- For further documentation, refer to the Moran_Rate class in pysal.esda """ if not inplace: new = df.copy() cls.by_col(new, events, populations, w=w, inplace=True, pvalue=pvalue, outvals=outvals, swapname=swapname, **stat_kws) return new if isinstance(events, str): events = [events] if isinstance(populations, str): populations = [populations] if len(populations) < len(events): populations = populations * len(events) if len(events) != len(populations): raise ValueError('There is not a one-to-one matching between events and ' 'populations!\nEvents: {}\n\nPopulations:' ' {}'.format(events, populations)) adjusted = stat_kws.pop('adjusted', True) if isinstance(adjusted, bool): adjusted = [adjusted] * len(events) if swapname is '': swapname = cls.__name__.lower() rates = [assuncao_rate(df[e], df[pop]) if adj else df[e].astype(float) / df[pop] for e,pop,adj in zip(events, populations, adjusted)] names = ['-'.join((e,p)) for e,p in zip(events, populations)] out_df = df.copy() rate_df = out_df.from_items(list(zip(names, rates))) #trick to avoid importing pandas stat_df = _univariate_handler(rate_df, names, w=w, inplace=False, pvalue = pvalue, outvals = outvals, swapname=swapname, stat=Moran, #how would this get done w/super? **stat_kws) for col in stat_df.columns: df[col] = stat_df[col]
def function[by_col, parameter[cls, df, events, populations, w, inplace, pvalue, outvals, swapname]]: constant[ Function to compute a Moran_Rate statistic on a dataframe Arguments --------- df : pandas.DataFrame a pandas dataframe with a geometry column events : string or list of strings one or more names where events are stored populations : string or list of strings one or more names where the populations corresponding to the events are stored. If one population column is provided, it is used for all event columns. If more than one population column is provided but there is not a population for every event column, an exception will be raised. w : pysal weights object a weights object aligned with the dataframe. If not provided, this is searched for in the dataframe's metadata inplace : bool a boolean denoting whether to operate on the dataframe inplace or to return a series contaning the results of the computation. If operating inplace, the derived columns will be named 'column_moran_rate' pvalue : string a string denoting which pvalue should be returned. Refer to the the Moran_Rate statistic's documentation for available p-values outvals : list of strings list of arbitrary attributes to return as columns from the Moran_Rate statistic **stat_kws : keyword arguments options to pass to the underlying statistic. For this, see the documentation for the Moran_Rate statistic. Returns -------- If inplace, None, and operation is conducted on dataframe in memory. Otherwise, returns a copy of the dataframe with the relevant columns attached. See Also --------- For further documentation, refer to the Moran_Rate class in pysal.esda ] if <ast.UnaryOp object at 0x7da1b1108af0> begin[:] variable[new] assign[=] call[name[df].copy, parameter[]] call[name[cls].by_col, parameter[name[new], name[events], name[populations]]] return[name[new]] if call[name[isinstance], parameter[name[events], name[str]]] begin[:] variable[events] assign[=] list[[<ast.Name object at 0x7da1b1108c70>]] if call[name[isinstance], parameter[name[populations], name[str]]] begin[:] variable[populations] assign[=] list[[<ast.Name object at 0x7da1b1108850>]] if compare[call[name[len], parameter[name[populations]]] less[<] call[name[len], parameter[name[events]]]] begin[:] variable[populations] assign[=] binary_operation[name[populations] * call[name[len], parameter[name[events]]]] if compare[call[name[len], parameter[name[events]]] not_equal[!=] call[name[len], parameter[name[populations]]]] begin[:] <ast.Raise object at 0x7da1b110bd90> variable[adjusted] assign[=] call[name[stat_kws].pop, parameter[constant[adjusted], constant[True]]] if call[name[isinstance], parameter[name[adjusted], name[bool]]] begin[:] variable[adjusted] assign[=] binary_operation[list[[<ast.Name object at 0x7da1b1109c90>]] * call[name[len], parameter[name[events]]]] if compare[name[swapname] is constant[]] begin[:] variable[swapname] assign[=] call[name[cls].__name__.lower, parameter[]] variable[rates] assign[=] <ast.ListComp object at 0x7da1b110bb80> variable[names] assign[=] <ast.ListComp object at 0x7da1b110bfd0> variable[out_df] assign[=] call[name[df].copy, parameter[]] variable[rate_df] assign[=] call[name[out_df].from_items, parameter[call[name[list], parameter[call[name[zip], parameter[name[names], name[rates]]]]]]] variable[stat_df] assign[=] call[name[_univariate_handler], parameter[name[rate_df], name[names]]] for taget[name[col]] in starred[name[stat_df].columns] begin[:] call[name[df]][name[col]] assign[=] call[name[stat_df]][name[col]]
keyword[def] identifier[by_col] ( identifier[cls] , identifier[df] , identifier[events] , identifier[populations] , identifier[w] = keyword[None] , identifier[inplace] = keyword[False] , identifier[pvalue] = literal[string] , identifier[outvals] = keyword[None] , identifier[swapname] = literal[string] ,** identifier[stat_kws] ): literal[string] keyword[if] keyword[not] identifier[inplace] : identifier[new] = identifier[df] . identifier[copy] () identifier[cls] . identifier[by_col] ( identifier[new] , identifier[events] , identifier[populations] , identifier[w] = identifier[w] , identifier[inplace] = keyword[True] , identifier[pvalue] = identifier[pvalue] , identifier[outvals] = identifier[outvals] , identifier[swapname] = identifier[swapname] , ** identifier[stat_kws] ) keyword[return] identifier[new] keyword[if] identifier[isinstance] ( identifier[events] , identifier[str] ): identifier[events] =[ identifier[events] ] keyword[if] identifier[isinstance] ( identifier[populations] , identifier[str] ): identifier[populations] =[ identifier[populations] ] keyword[if] identifier[len] ( identifier[populations] )< identifier[len] ( identifier[events] ): identifier[populations] = identifier[populations] * identifier[len] ( identifier[events] ) keyword[if] identifier[len] ( identifier[events] )!= identifier[len] ( identifier[populations] ): keyword[raise] identifier[ValueError] ( literal[string] literal[string] literal[string] . identifier[format] ( identifier[events] , identifier[populations] )) identifier[adjusted] = identifier[stat_kws] . identifier[pop] ( literal[string] , keyword[True] ) keyword[if] identifier[isinstance] ( identifier[adjusted] , identifier[bool] ): identifier[adjusted] =[ identifier[adjusted] ]* identifier[len] ( identifier[events] ) keyword[if] identifier[swapname] keyword[is] literal[string] : identifier[swapname] = identifier[cls] . identifier[__name__] . identifier[lower] () identifier[rates] =[ identifier[assuncao_rate] ( identifier[df] [ identifier[e] ], identifier[df] [ identifier[pop] ]) keyword[if] identifier[adj] keyword[else] identifier[df] [ identifier[e] ]. identifier[astype] ( identifier[float] )/ identifier[df] [ identifier[pop] ] keyword[for] identifier[e] , identifier[pop] , identifier[adj] keyword[in] identifier[zip] ( identifier[events] , identifier[populations] , identifier[adjusted] )] identifier[names] =[ literal[string] . identifier[join] (( identifier[e] , identifier[p] )) keyword[for] identifier[e] , identifier[p] keyword[in] identifier[zip] ( identifier[events] , identifier[populations] )] identifier[out_df] = identifier[df] . identifier[copy] () identifier[rate_df] = identifier[out_df] . identifier[from_items] ( identifier[list] ( identifier[zip] ( identifier[names] , identifier[rates] ))) identifier[stat_df] = identifier[_univariate_handler] ( identifier[rate_df] , identifier[names] , identifier[w] = identifier[w] , identifier[inplace] = keyword[False] , identifier[pvalue] = identifier[pvalue] , identifier[outvals] = identifier[outvals] , identifier[swapname] = identifier[swapname] , identifier[stat] = identifier[Moran] , ** identifier[stat_kws] ) keyword[for] identifier[col] keyword[in] identifier[stat_df] . identifier[columns] : identifier[df] [ identifier[col] ]= identifier[stat_df] [ identifier[col] ]
def by_col(cls, df, events, populations, w=None, inplace=False, pvalue='sim', outvals=None, swapname='', **stat_kws): """ Function to compute a Moran_Rate statistic on a dataframe Arguments --------- df : pandas.DataFrame a pandas dataframe with a geometry column events : string or list of strings one or more names where events are stored populations : string or list of strings one or more names where the populations corresponding to the events are stored. If one population column is provided, it is used for all event columns. If more than one population column is provided but there is not a population for every event column, an exception will be raised. w : pysal weights object a weights object aligned with the dataframe. If not provided, this is searched for in the dataframe's metadata inplace : bool a boolean denoting whether to operate on the dataframe inplace or to return a series contaning the results of the computation. If operating inplace, the derived columns will be named 'column_moran_rate' pvalue : string a string denoting which pvalue should be returned. Refer to the the Moran_Rate statistic's documentation for available p-values outvals : list of strings list of arbitrary attributes to return as columns from the Moran_Rate statistic **stat_kws : keyword arguments options to pass to the underlying statistic. For this, see the documentation for the Moran_Rate statistic. Returns -------- If inplace, None, and operation is conducted on dataframe in memory. Otherwise, returns a copy of the dataframe with the relevant columns attached. See Also --------- For further documentation, refer to the Moran_Rate class in pysal.esda """ if not inplace: new = df.copy() cls.by_col(new, events, populations, w=w, inplace=True, pvalue=pvalue, outvals=outvals, swapname=swapname, **stat_kws) return new # depends on [control=['if'], data=[]] if isinstance(events, str): events = [events] # depends on [control=['if'], data=[]] if isinstance(populations, str): populations = [populations] # depends on [control=['if'], data=[]] if len(populations) < len(events): populations = populations * len(events) # depends on [control=['if'], data=[]] if len(events) != len(populations): raise ValueError('There is not a one-to-one matching between events and populations!\nEvents: {}\n\nPopulations: {}'.format(events, populations)) # depends on [control=['if'], data=[]] adjusted = stat_kws.pop('adjusted', True) if isinstance(adjusted, bool): adjusted = [adjusted] * len(events) # depends on [control=['if'], data=[]] if swapname is '': swapname = cls.__name__.lower() # depends on [control=['if'], data=['swapname']] rates = [assuncao_rate(df[e], df[pop]) if adj else df[e].astype(float) / df[pop] for (e, pop, adj) in zip(events, populations, adjusted)] names = ['-'.join((e, p)) for (e, p) in zip(events, populations)] out_df = df.copy() rate_df = out_df.from_items(list(zip(names, rates))) #trick to avoid importing pandas #how would this get done w/super? stat_df = _univariate_handler(rate_df, names, w=w, inplace=False, pvalue=pvalue, outvals=outvals, swapname=swapname, stat=Moran, **stat_kws) for col in stat_df.columns: df[col] = stat_df[col] # depends on [control=['for'], data=['col']]
def _load_ontology(self, filename, preview_mode=False): """ Loads an ontology Unless preview_mode=True, it is always loaded from the local repository note: if the ontology does not have a cached version, it is created preview_mode: used to pass a URI/path to be inspected without saving it locally """ if not preview_mode: fullpath = self.LOCAL_MODELS + filename g = manager.get_pickled_ontology(filename) if not g: g = manager.do_pickle_ontology(filename) else: fullpath = filename filename = os.path.basename(os.path.normpath(fullpath)) g = Ontospy(fullpath, verbose=True) self.current = {'file': filename, 'fullpath': fullpath, 'graph': g} self.currentEntity = None self._print_entity_intro(g)
def function[_load_ontology, parameter[self, filename, preview_mode]]: constant[ Loads an ontology Unless preview_mode=True, it is always loaded from the local repository note: if the ontology does not have a cached version, it is created preview_mode: used to pass a URI/path to be inspected without saving it locally ] if <ast.UnaryOp object at 0x7da1b1115300> begin[:] variable[fullpath] assign[=] binary_operation[name[self].LOCAL_MODELS + name[filename]] variable[g] assign[=] call[name[manager].get_pickled_ontology, parameter[name[filename]]] if <ast.UnaryOp object at 0x7da1b1115c90> begin[:] variable[g] assign[=] call[name[manager].do_pickle_ontology, parameter[name[filename]]] name[self].current assign[=] dictionary[[<ast.Constant object at 0x7da1b11152d0>, <ast.Constant object at 0x7da1b1116680>, <ast.Constant object at 0x7da1b11164a0>], [<ast.Name object at 0x7da1b11b5540>, <ast.Name object at 0x7da1b11a1900>, <ast.Name object at 0x7da1b11a23b0>]] name[self].currentEntity assign[=] constant[None] call[name[self]._print_entity_intro, parameter[name[g]]]
keyword[def] identifier[_load_ontology] ( identifier[self] , identifier[filename] , identifier[preview_mode] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[preview_mode] : identifier[fullpath] = identifier[self] . identifier[LOCAL_MODELS] + identifier[filename] identifier[g] = identifier[manager] . identifier[get_pickled_ontology] ( identifier[filename] ) keyword[if] keyword[not] identifier[g] : identifier[g] = identifier[manager] . identifier[do_pickle_ontology] ( identifier[filename] ) keyword[else] : identifier[fullpath] = identifier[filename] identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[os] . identifier[path] . identifier[normpath] ( identifier[fullpath] )) identifier[g] = identifier[Ontospy] ( identifier[fullpath] , identifier[verbose] = keyword[True] ) identifier[self] . identifier[current] ={ literal[string] : identifier[filename] , literal[string] : identifier[fullpath] , literal[string] : identifier[g] } identifier[self] . identifier[currentEntity] = keyword[None] identifier[self] . identifier[_print_entity_intro] ( identifier[g] )
def _load_ontology(self, filename, preview_mode=False): """ Loads an ontology Unless preview_mode=True, it is always loaded from the local repository note: if the ontology does not have a cached version, it is created preview_mode: used to pass a URI/path to be inspected without saving it locally """ if not preview_mode: fullpath = self.LOCAL_MODELS + filename g = manager.get_pickled_ontology(filename) if not g: g = manager.do_pickle_ontology(filename) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: fullpath = filename filename = os.path.basename(os.path.normpath(fullpath)) g = Ontospy(fullpath, verbose=True) self.current = {'file': filename, 'fullpath': fullpath, 'graph': g} self.currentEntity = None self._print_entity_intro(g)
def create(self, domain_name, partner_id=None): """Register a domain you control with netki as a Gem-managed domain. Note: After registering a domain, unless you have already set up its DNSSEC/DS Records, you'll need to do so: http://docs.netki.apiary.io The information required will be an attribute of the returned NetkiDomain object. Args: domain_name (str): Domain to add (e.g. 'gem.co') partner_id (str, optional): your netki partner_id (if you have one) Returns: The new round.NetkiDomain """ params = dict(domain_name=domain_name) if partner_id: params['partner_id'] = partner_id domain = self.wrap(self.resource.create(params)) self.add(domain) return domain
def function[create, parameter[self, domain_name, partner_id]]: constant[Register a domain you control with netki as a Gem-managed domain. Note: After registering a domain, unless you have already set up its DNSSEC/DS Records, you'll need to do so: http://docs.netki.apiary.io The information required will be an attribute of the returned NetkiDomain object. Args: domain_name (str): Domain to add (e.g. 'gem.co') partner_id (str, optional): your netki partner_id (if you have one) Returns: The new round.NetkiDomain ] variable[params] assign[=] call[name[dict], parameter[]] if name[partner_id] begin[:] call[name[params]][constant[partner_id]] assign[=] name[partner_id] variable[domain] assign[=] call[name[self].wrap, parameter[call[name[self].resource.create, parameter[name[params]]]]] call[name[self].add, parameter[name[domain]]] return[name[domain]]
keyword[def] identifier[create] ( identifier[self] , identifier[domain_name] , identifier[partner_id] = keyword[None] ): literal[string] identifier[params] = identifier[dict] ( identifier[domain_name] = identifier[domain_name] ) keyword[if] identifier[partner_id] : identifier[params] [ literal[string] ]= identifier[partner_id] identifier[domain] = identifier[self] . identifier[wrap] ( identifier[self] . identifier[resource] . identifier[create] ( identifier[params] )) identifier[self] . identifier[add] ( identifier[domain] ) keyword[return] identifier[domain]
def create(self, domain_name, partner_id=None): """Register a domain you control with netki as a Gem-managed domain. Note: After registering a domain, unless you have already set up its DNSSEC/DS Records, you'll need to do so: http://docs.netki.apiary.io The information required will be an attribute of the returned NetkiDomain object. Args: domain_name (str): Domain to add (e.g. 'gem.co') partner_id (str, optional): your netki partner_id (if you have one) Returns: The new round.NetkiDomain """ params = dict(domain_name=domain_name) if partner_id: params['partner_id'] = partner_id # depends on [control=['if'], data=[]] domain = self.wrap(self.resource.create(params)) self.add(domain) return domain
def tintWith(self, red, green, blue): """tintWith(self, red, green, blue)""" if not self.colorspace or self.colorspace.n > 3: print("warning: colorspace invalid for function") return return _fitz.Pixmap_tintWith(self, red, green, blue)
def function[tintWith, parameter[self, red, green, blue]]: constant[tintWith(self, red, green, blue)] if <ast.BoolOp object at 0x7da1b167c7c0> begin[:] call[name[print], parameter[constant[warning: colorspace invalid for function]]] return[None] return[call[name[_fitz].Pixmap_tintWith, parameter[name[self], name[red], name[green], name[blue]]]]
keyword[def] identifier[tintWith] ( identifier[self] , identifier[red] , identifier[green] , identifier[blue] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[colorspace] keyword[or] identifier[self] . identifier[colorspace] . identifier[n] > literal[int] : identifier[print] ( literal[string] ) keyword[return] keyword[return] identifier[_fitz] . identifier[Pixmap_tintWith] ( identifier[self] , identifier[red] , identifier[green] , identifier[blue] )
def tintWith(self, red, green, blue): """tintWith(self, red, green, blue)""" if not self.colorspace or self.colorspace.n > 3: print('warning: colorspace invalid for function') return # depends on [control=['if'], data=[]] return _fitz.Pixmap_tintWith(self, red, green, blue)
def sample(self, maxiter=None, maxcall=None, dlogz=0.01, logl_max=np.inf, save_bounds=True, save_samples=True): """ **The main nested sampling loop.** Iteratively replace the worst live point with a sample drawn uniformly from the prior until the provided stopping criteria are reached. Instantiates a generator that will be called by the user. Parameters ---------- maxiter : int, optional Maximum number of iterations. Iteration may stop earlier if the termination condition is reached. Default is `sys.maxsize` (no limit). maxcall : int, optional Maximum number of likelihood evaluations. Iteration may stop earlier if termination condition is reached. Default is `sys.maxsize` (no limit). dlogz : float, optional Iteration will stop when the estimated contribution of the remaining prior volume to the total evidence falls below this threshold. Explicitly, the stopping criterion is `ln(z + z_est) - ln(z) < dlogz`, where `z` is the current evidence from all saved samples and `z_est` is the estimated contribution from the remaining volume. Default is `0.01`. logl_max : float, optional Iteration will stop when the sampled ln(likelihood) exceeds the threshold set by `logl_max`. Default is no bound (`np.inf`). save_bounds : bool, optional Whether or not to save past distributions used to bound the live points internally. Default is `True`. save_samples : bool, optional Whether or not to save past samples from the nested sampling run (along with other ancillary quantities) internally. Default is `True`. Returns ------- worst : int Index of the live point with the worst likelihood. This is our new dead point sample. ustar : `~numpy.ndarray` with shape (npdim,) Position of the sample. vstar : `~numpy.ndarray` with shape (ndim,) Transformed position of the sample. loglstar : float Ln(likelihood) of the sample. logvol : float Ln(prior volume) within the sample. logwt : float Ln(weight) of the sample. logz : float Cumulative ln(evidence) up to the sample (inclusive). logzvar : float Estimated cumulative variance on `logz` (inclusive). h : float Cumulative information up to the sample (inclusive). nc : int Number of likelihood calls performed before the new live point was accepted. worst_it : int Iteration when the live (now dead) point was originally proposed. boundidx : int Index of the bound the dead point was originally drawn from. bounditer : int Index of the bound being used at the current iteration. eff : float The cumulative sampling efficiency (in percent). delta_logz : float The estimated remaining evidence expressed as the ln(ratio) of the current evidence. """ # Initialize quantities. if maxcall is None: maxcall = sys.maxsize if maxiter is None: maxiter = sys.maxsize self.save_samples = save_samples self.save_bounds = save_bounds ncall = 0 # Check whether we're starting fresh or continuing a previous run. if self.it == 1: # Initialize values for nested sampling loop. h = 0. # information, initially *0.* logz = -1.e300 # ln(evidence), initially *0.* logzvar = 0. # var[ln(evidence)], initially *0.* logvol = 0. # initially contains the whole prior (volume=1.) loglstar = -1.e300 # initial ln(likelihood) delta_logz = 1.e300 # ln(ratio) of total/current evidence # Check if we should initialize a different bounding distribution # instead of using the unit cube. pointvol = 1. / self.nlive if self._beyond_unit_bound(loglstar): bound = self.update(pointvol) if self.save_bounds: self.bound.append(bound) self.nbound += 1 self.since_update = 0 else: # Remove live points (if added) from previous run. if self.added_live: self._remove_live_points() # Get final state from previous run. h = self.saved_h[-1] # information logz = self.saved_logz[-1] # ln(evidence) logzvar = self.saved_logzvar[-1] # var[ln(evidence)] logvol = self.saved_logvol[-1] # ln(volume) loglstar = min(self.live_logl) # ln(likelihood) delta_logz = np.logaddexp(logz, np.max(self.live_logl) + logvol) - logz # log-evidence ratio # The main nested sampling loop. for it in range(sys.maxsize): # Stopping criterion 1: current number of iterations # exceeds `maxiter`. if it > maxiter: # If dumping past states, save only the required quantities. if not self.save_samples: self.saved_logz.append(logz) self.saved_logzvar.append(logzvar) self.saved_h.append(h) self.saved_logvol.append(logvol) self.saved_logl.append(loglstar) break # Stopping criterion 2: current number of `loglikelihood` # calls exceeds `maxcall`. if ncall > maxcall: if not self.save_samples: self.saved_logz.append(logz) self.saved_logzvar.append(logzvar) self.saved_h.append(h) self.saved_logvol.append(logvol) self.saved_logl.append(loglstar) break # Stopping criterion 3: estimated (fractional) remaining evidence # lies below some threshold set by `dlogz`. logz_remain = np.max(self.live_logl) + logvol delta_logz = np.logaddexp(logz, logz_remain) - logz if dlogz is not None: if delta_logz < dlogz: if not self.save_samples: self.saved_logz.append(logz) self.saved_logzvar.append(logzvar) self.saved_h.append(h) self.saved_logvol.append(logvol) self.saved_logl.append(loglstar) break # Stopping criterion 4: last dead point exceeded the upper # `logl_max` bound. if loglstar > logl_max: if not self.save_samples: self.saved_logz.append(logz) self.saved_logzvar.append(logzvar) self.saved_h.append(h) self.saved_logvol.append(logvol) self.saved_logl.append(loglstar) break # Expected ln(volume) shrinkage. logvol -= self.dlv # After `update_interval` interations have passed *and* we meet # the criteria for moving beyond sampling from the unit cube, # update the bound using the current set of live points. ucheck = self.since_update >= self.update_interval bcheck = self._beyond_unit_bound(loglstar) if ucheck and bcheck: pointvol = math.exp(logvol) / self.nlive bound = self.update(pointvol) if self.save_bounds: self.bound.append(bound) self.nbound += 1 self.since_update = 0 # Locate the "live" point with the lowest `logl`. worst = np.argmin(self.live_logl) # index worst_it = self.live_it[worst] # when point was proposed boundidx = self.live_bound[worst] # associated bound index # Set our new worst likelihood constraint. ustar = np.array(self.live_u[worst]) # unit cube position vstar = np.array(self.live_v[worst]) # transformed position loglstar_new = self.live_logl[worst] # new likelihood # Set our new weight using quadratic estimates (trapezoid rule). logdvol = logsumexp(a=[logvol + self.dlv, logvol], b=[0.5, -0.5]) # ln(dvol) logwt = np.logaddexp(loglstar_new, loglstar) + logdvol # ln(wt) # Sample a new live point from within the likelihood constraint # `logl > loglstar` using the bounding distribution and sampling # method from our sampler. u, v, logl, nc = self._new_point(loglstar_new, logvol) ncall += nc self.ncall += nc self.since_update += nc # Update evidence `logz` and information `h`. logz_new = np.logaddexp(logz, logwt) lzterm = (math.exp(loglstar - logz_new) * loglstar + math.exp(loglstar_new - logz_new) * loglstar_new) h_new = (math.exp(logdvol) * lzterm + math.exp(logz - logz_new) * (h + logz) - logz_new) dh = h_new - h h = h_new logz = logz_new logzvar += dh * self.dlv loglstar = loglstar_new # Compute bound index at the current iteration. if self._beyond_unit_bound(loglstar): bounditer = self.nbound - 1 else: bounditer = 0 # Save the worst live point. It is now a "dead" point. if self.save_samples: self.saved_id.append(worst) self.saved_u.append(ustar) self.saved_v.append(vstar) self.saved_logl.append(loglstar) self.saved_logvol.append(logvol) self.saved_logwt.append(logwt) self.saved_logz.append(logz) self.saved_logzvar.append(logzvar) self.saved_h.append(h) self.saved_nc.append(nc) self.saved_boundidx.append(boundidx) self.saved_it.append(worst_it) self.saved_bounditer.append(bounditer) self.saved_scale.append(self.scale) # Update the live point (previously our "worst" point). self.live_u[worst] = u self.live_v[worst] = v self.live_logl[worst] = logl self.live_bound[worst] = bounditer self.live_it[worst] = self.it # Compute our sampling efficiency. self.eff = 100. * self.it / self.ncall # Increment total number of iterations. self.it += 1 # Return dead point and ancillary quantities. yield (worst, ustar, vstar, loglstar, logvol, logwt, logz, logzvar, h, nc, worst_it, boundidx, bounditer, self.eff, delta_logz)
def function[sample, parameter[self, maxiter, maxcall, dlogz, logl_max, save_bounds, save_samples]]: constant[ **The main nested sampling loop.** Iteratively replace the worst live point with a sample drawn uniformly from the prior until the provided stopping criteria are reached. Instantiates a generator that will be called by the user. Parameters ---------- maxiter : int, optional Maximum number of iterations. Iteration may stop earlier if the termination condition is reached. Default is `sys.maxsize` (no limit). maxcall : int, optional Maximum number of likelihood evaluations. Iteration may stop earlier if termination condition is reached. Default is `sys.maxsize` (no limit). dlogz : float, optional Iteration will stop when the estimated contribution of the remaining prior volume to the total evidence falls below this threshold. Explicitly, the stopping criterion is `ln(z + z_est) - ln(z) < dlogz`, where `z` is the current evidence from all saved samples and `z_est` is the estimated contribution from the remaining volume. Default is `0.01`. logl_max : float, optional Iteration will stop when the sampled ln(likelihood) exceeds the threshold set by `logl_max`. Default is no bound (`np.inf`). save_bounds : bool, optional Whether or not to save past distributions used to bound the live points internally. Default is `True`. save_samples : bool, optional Whether or not to save past samples from the nested sampling run (along with other ancillary quantities) internally. Default is `True`. Returns ------- worst : int Index of the live point with the worst likelihood. This is our new dead point sample. ustar : `~numpy.ndarray` with shape (npdim,) Position of the sample. vstar : `~numpy.ndarray` with shape (ndim,) Transformed position of the sample. loglstar : float Ln(likelihood) of the sample. logvol : float Ln(prior volume) within the sample. logwt : float Ln(weight) of the sample. logz : float Cumulative ln(evidence) up to the sample (inclusive). logzvar : float Estimated cumulative variance on `logz` (inclusive). h : float Cumulative information up to the sample (inclusive). nc : int Number of likelihood calls performed before the new live point was accepted. worst_it : int Iteration when the live (now dead) point was originally proposed. boundidx : int Index of the bound the dead point was originally drawn from. bounditer : int Index of the bound being used at the current iteration. eff : float The cumulative sampling efficiency (in percent). delta_logz : float The estimated remaining evidence expressed as the ln(ratio) of the current evidence. ] if compare[name[maxcall] is constant[None]] begin[:] variable[maxcall] assign[=] name[sys].maxsize if compare[name[maxiter] is constant[None]] begin[:] variable[maxiter] assign[=] name[sys].maxsize name[self].save_samples assign[=] name[save_samples] name[self].save_bounds assign[=] name[save_bounds] variable[ncall] assign[=] constant[0] if compare[name[self].it equal[==] constant[1]] begin[:] variable[h] assign[=] constant[0.0] variable[logz] assign[=] <ast.UnaryOp object at 0x7da1b1e034f0> variable[logzvar] assign[=] constant[0.0] variable[logvol] assign[=] constant[0.0] variable[loglstar] assign[=] <ast.UnaryOp object at 0x7da1b1e03310> variable[delta_logz] assign[=] constant[1e+300] variable[pointvol] assign[=] binary_operation[constant[1.0] / name[self].nlive] if call[name[self]._beyond_unit_bound, parameter[name[loglstar]]] begin[:] variable[bound] assign[=] call[name[self].update, parameter[name[pointvol]]] if name[self].save_bounds begin[:] call[name[self].bound.append, parameter[name[bound]]] <ast.AugAssign object at 0x7da1b1e02d10> name[self].since_update assign[=] constant[0] for taget[name[it]] in starred[call[name[range], parameter[name[sys].maxsize]]] begin[:] if compare[name[it] greater[>] name[maxiter]] begin[:] if <ast.UnaryOp object at 0x7da1b1d98970> begin[:] call[name[self].saved_logz.append, parameter[name[logz]]] call[name[self].saved_logzvar.append, parameter[name[logzvar]]] call[name[self].saved_h.append, parameter[name[h]]] call[name[self].saved_logvol.append, parameter[name[logvol]]] call[name[self].saved_logl.append, parameter[name[loglstar]]] break if compare[name[ncall] greater[>] name[maxcall]] begin[:] if <ast.UnaryOp object at 0x7da1b1d99420> begin[:] call[name[self].saved_logz.append, parameter[name[logz]]] call[name[self].saved_logzvar.append, parameter[name[logzvar]]] call[name[self].saved_h.append, parameter[name[h]]] call[name[self].saved_logvol.append, parameter[name[logvol]]] call[name[self].saved_logl.append, parameter[name[loglstar]]] break variable[logz_remain] assign[=] binary_operation[call[name[np].max, parameter[name[self].live_logl]] + name[logvol]] variable[delta_logz] assign[=] binary_operation[call[name[np].logaddexp, parameter[name[logz], name[logz_remain]]] - name[logz]] if compare[name[dlogz] is_not constant[None]] begin[:] if compare[name[delta_logz] less[<] name[dlogz]] begin[:] if <ast.UnaryOp object at 0x7da1b1d99480> begin[:] call[name[self].saved_logz.append, parameter[name[logz]]] call[name[self].saved_logzvar.append, parameter[name[logzvar]]] call[name[self].saved_h.append, parameter[name[h]]] call[name[self].saved_logvol.append, parameter[name[logvol]]] call[name[self].saved_logl.append, parameter[name[loglstar]]] break if compare[name[loglstar] greater[>] name[logl_max]] begin[:] if <ast.UnaryOp object at 0x7da1b1d98430> begin[:] call[name[self].saved_logz.append, parameter[name[logz]]] call[name[self].saved_logzvar.append, parameter[name[logzvar]]] call[name[self].saved_h.append, parameter[name[h]]] call[name[self].saved_logvol.append, parameter[name[logvol]]] call[name[self].saved_logl.append, parameter[name[loglstar]]] break <ast.AugAssign object at 0x7da1b1d9b130> variable[ucheck] assign[=] compare[name[self].since_update greater_or_equal[>=] name[self].update_interval] variable[bcheck] assign[=] call[name[self]._beyond_unit_bound, parameter[name[loglstar]]] if <ast.BoolOp object at 0x7da1b1d9b7c0> begin[:] variable[pointvol] assign[=] binary_operation[call[name[math].exp, parameter[name[logvol]]] / name[self].nlive] variable[bound] assign[=] call[name[self].update, parameter[name[pointvol]]] if name[self].save_bounds begin[:] call[name[self].bound.append, parameter[name[bound]]] <ast.AugAssign object at 0x7da18bc704c0> name[self].since_update assign[=] constant[0] variable[worst] assign[=] call[name[np].argmin, parameter[name[self].live_logl]] variable[worst_it] assign[=] call[name[self].live_it][name[worst]] variable[boundidx] assign[=] call[name[self].live_bound][name[worst]] variable[ustar] assign[=] call[name[np].array, parameter[call[name[self].live_u][name[worst]]]] variable[vstar] assign[=] call[name[np].array, parameter[call[name[self].live_v][name[worst]]]] variable[loglstar_new] assign[=] call[name[self].live_logl][name[worst]] variable[logdvol] assign[=] call[name[logsumexp], parameter[]] variable[logwt] assign[=] binary_operation[call[name[np].logaddexp, parameter[name[loglstar_new], name[loglstar]]] + name[logdvol]] <ast.Tuple object at 0x7da1b1eea290> assign[=] call[name[self]._new_point, parameter[name[loglstar_new], name[logvol]]] <ast.AugAssign object at 0x7da1b1eead10> <ast.AugAssign object at 0x7da1b1eeada0> <ast.AugAssign object at 0x7da1b1eeb040> variable[logz_new] assign[=] call[name[np].logaddexp, parameter[name[logz], name[logwt]]] variable[lzterm] assign[=] binary_operation[binary_operation[call[name[math].exp, parameter[binary_operation[name[loglstar] - name[logz_new]]]] * name[loglstar]] + binary_operation[call[name[math].exp, parameter[binary_operation[name[loglstar_new] - name[logz_new]]]] * name[loglstar_new]]] variable[h_new] assign[=] binary_operation[binary_operation[binary_operation[call[name[math].exp, parameter[name[logdvol]]] * name[lzterm]] + binary_operation[call[name[math].exp, parameter[binary_operation[name[logz] - name[logz_new]]]] * binary_operation[name[h] + name[logz]]]] - name[logz_new]] variable[dh] assign[=] binary_operation[name[h_new] - name[h]] variable[h] assign[=] name[h_new] variable[logz] assign[=] name[logz_new] <ast.AugAssign object at 0x7da1b1ee92d0> variable[loglstar] assign[=] name[loglstar_new] if call[name[self]._beyond_unit_bound, parameter[name[loglstar]]] begin[:] variable[bounditer] assign[=] binary_operation[name[self].nbound - constant[1]] if name[self].save_samples begin[:] call[name[self].saved_id.append, parameter[name[worst]]] call[name[self].saved_u.append, parameter[name[ustar]]] call[name[self].saved_v.append, parameter[name[vstar]]] call[name[self].saved_logl.append, parameter[name[loglstar]]] call[name[self].saved_logvol.append, parameter[name[logvol]]] call[name[self].saved_logwt.append, parameter[name[logwt]]] call[name[self].saved_logz.append, parameter[name[logz]]] call[name[self].saved_logzvar.append, parameter[name[logzvar]]] call[name[self].saved_h.append, parameter[name[h]]] call[name[self].saved_nc.append, parameter[name[nc]]] call[name[self].saved_boundidx.append, parameter[name[boundidx]]] call[name[self].saved_it.append, parameter[name[worst_it]]] call[name[self].saved_bounditer.append, parameter[name[bounditer]]] call[name[self].saved_scale.append, parameter[name[self].scale]] call[name[self].live_u][name[worst]] assign[=] name[u] call[name[self].live_v][name[worst]] assign[=] name[v] call[name[self].live_logl][name[worst]] assign[=] name[logl] call[name[self].live_bound][name[worst]] assign[=] name[bounditer] call[name[self].live_it][name[worst]] assign[=] name[self].it name[self].eff assign[=] binary_operation[binary_operation[constant[100.0] * name[self].it] / name[self].ncall] <ast.AugAssign object at 0x7da1b1d48970> <ast.Yield object at 0x7da1b1d49f30>
keyword[def] identifier[sample] ( identifier[self] , identifier[maxiter] = keyword[None] , identifier[maxcall] = keyword[None] , identifier[dlogz] = literal[int] , identifier[logl_max] = identifier[np] . identifier[inf] , identifier[save_bounds] = keyword[True] , identifier[save_samples] = keyword[True] ): literal[string] keyword[if] identifier[maxcall] keyword[is] keyword[None] : identifier[maxcall] = identifier[sys] . identifier[maxsize] keyword[if] identifier[maxiter] keyword[is] keyword[None] : identifier[maxiter] = identifier[sys] . identifier[maxsize] identifier[self] . identifier[save_samples] = identifier[save_samples] identifier[self] . identifier[save_bounds] = identifier[save_bounds] identifier[ncall] = literal[int] keyword[if] identifier[self] . identifier[it] == literal[int] : identifier[h] = literal[int] identifier[logz] =- literal[int] identifier[logzvar] = literal[int] identifier[logvol] = literal[int] identifier[loglstar] =- literal[int] identifier[delta_logz] = literal[int] identifier[pointvol] = literal[int] / identifier[self] . identifier[nlive] keyword[if] identifier[self] . identifier[_beyond_unit_bound] ( identifier[loglstar] ): identifier[bound] = identifier[self] . identifier[update] ( identifier[pointvol] ) keyword[if] identifier[self] . identifier[save_bounds] : identifier[self] . identifier[bound] . identifier[append] ( identifier[bound] ) identifier[self] . identifier[nbound] += literal[int] identifier[self] . identifier[since_update] = literal[int] keyword[else] : keyword[if] identifier[self] . identifier[added_live] : identifier[self] . identifier[_remove_live_points] () identifier[h] = identifier[self] . identifier[saved_h] [- literal[int] ] identifier[logz] = identifier[self] . identifier[saved_logz] [- literal[int] ] identifier[logzvar] = identifier[self] . identifier[saved_logzvar] [- literal[int] ] identifier[logvol] = identifier[self] . identifier[saved_logvol] [- literal[int] ] identifier[loglstar] = identifier[min] ( identifier[self] . identifier[live_logl] ) identifier[delta_logz] = identifier[np] . identifier[logaddexp] ( identifier[logz] , identifier[np] . identifier[max] ( identifier[self] . identifier[live_logl] )+ identifier[logvol] )- identifier[logz] keyword[for] identifier[it] keyword[in] identifier[range] ( identifier[sys] . identifier[maxsize] ): keyword[if] identifier[it] > identifier[maxiter] : keyword[if] keyword[not] identifier[self] . identifier[save_samples] : identifier[self] . identifier[saved_logz] . identifier[append] ( identifier[logz] ) identifier[self] . identifier[saved_logzvar] . identifier[append] ( identifier[logzvar] ) identifier[self] . identifier[saved_h] . identifier[append] ( identifier[h] ) identifier[self] . identifier[saved_logvol] . identifier[append] ( identifier[logvol] ) identifier[self] . identifier[saved_logl] . identifier[append] ( identifier[loglstar] ) keyword[break] keyword[if] identifier[ncall] > identifier[maxcall] : keyword[if] keyword[not] identifier[self] . identifier[save_samples] : identifier[self] . identifier[saved_logz] . identifier[append] ( identifier[logz] ) identifier[self] . identifier[saved_logzvar] . identifier[append] ( identifier[logzvar] ) identifier[self] . identifier[saved_h] . identifier[append] ( identifier[h] ) identifier[self] . identifier[saved_logvol] . identifier[append] ( identifier[logvol] ) identifier[self] . identifier[saved_logl] . identifier[append] ( identifier[loglstar] ) keyword[break] identifier[logz_remain] = identifier[np] . identifier[max] ( identifier[self] . identifier[live_logl] )+ identifier[logvol] identifier[delta_logz] = identifier[np] . identifier[logaddexp] ( identifier[logz] , identifier[logz_remain] )- identifier[logz] keyword[if] identifier[dlogz] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[delta_logz] < identifier[dlogz] : keyword[if] keyword[not] identifier[self] . identifier[save_samples] : identifier[self] . identifier[saved_logz] . identifier[append] ( identifier[logz] ) identifier[self] . identifier[saved_logzvar] . identifier[append] ( identifier[logzvar] ) identifier[self] . identifier[saved_h] . identifier[append] ( identifier[h] ) identifier[self] . identifier[saved_logvol] . identifier[append] ( identifier[logvol] ) identifier[self] . identifier[saved_logl] . identifier[append] ( identifier[loglstar] ) keyword[break] keyword[if] identifier[loglstar] > identifier[logl_max] : keyword[if] keyword[not] identifier[self] . identifier[save_samples] : identifier[self] . identifier[saved_logz] . identifier[append] ( identifier[logz] ) identifier[self] . identifier[saved_logzvar] . identifier[append] ( identifier[logzvar] ) identifier[self] . identifier[saved_h] . identifier[append] ( identifier[h] ) identifier[self] . identifier[saved_logvol] . identifier[append] ( identifier[logvol] ) identifier[self] . identifier[saved_logl] . identifier[append] ( identifier[loglstar] ) keyword[break] identifier[logvol] -= identifier[self] . identifier[dlv] identifier[ucheck] = identifier[self] . identifier[since_update] >= identifier[self] . identifier[update_interval] identifier[bcheck] = identifier[self] . identifier[_beyond_unit_bound] ( identifier[loglstar] ) keyword[if] identifier[ucheck] keyword[and] identifier[bcheck] : identifier[pointvol] = identifier[math] . identifier[exp] ( identifier[logvol] )/ identifier[self] . identifier[nlive] identifier[bound] = identifier[self] . identifier[update] ( identifier[pointvol] ) keyword[if] identifier[self] . identifier[save_bounds] : identifier[self] . identifier[bound] . identifier[append] ( identifier[bound] ) identifier[self] . identifier[nbound] += literal[int] identifier[self] . identifier[since_update] = literal[int] identifier[worst] = identifier[np] . identifier[argmin] ( identifier[self] . identifier[live_logl] ) identifier[worst_it] = identifier[self] . identifier[live_it] [ identifier[worst] ] identifier[boundidx] = identifier[self] . identifier[live_bound] [ identifier[worst] ] identifier[ustar] = identifier[np] . identifier[array] ( identifier[self] . identifier[live_u] [ identifier[worst] ]) identifier[vstar] = identifier[np] . identifier[array] ( identifier[self] . identifier[live_v] [ identifier[worst] ]) identifier[loglstar_new] = identifier[self] . identifier[live_logl] [ identifier[worst] ] identifier[logdvol] = identifier[logsumexp] ( identifier[a] =[ identifier[logvol] + identifier[self] . identifier[dlv] , identifier[logvol] ], identifier[b] =[ literal[int] ,- literal[int] ]) identifier[logwt] = identifier[np] . identifier[logaddexp] ( identifier[loglstar_new] , identifier[loglstar] )+ identifier[logdvol] identifier[u] , identifier[v] , identifier[logl] , identifier[nc] = identifier[self] . identifier[_new_point] ( identifier[loglstar_new] , identifier[logvol] ) identifier[ncall] += identifier[nc] identifier[self] . identifier[ncall] += identifier[nc] identifier[self] . identifier[since_update] += identifier[nc] identifier[logz_new] = identifier[np] . identifier[logaddexp] ( identifier[logz] , identifier[logwt] ) identifier[lzterm] =( identifier[math] . identifier[exp] ( identifier[loglstar] - identifier[logz_new] )* identifier[loglstar] + identifier[math] . identifier[exp] ( identifier[loglstar_new] - identifier[logz_new] )* identifier[loglstar_new] ) identifier[h_new] =( identifier[math] . identifier[exp] ( identifier[logdvol] )* identifier[lzterm] + identifier[math] . identifier[exp] ( identifier[logz] - identifier[logz_new] )*( identifier[h] + identifier[logz] )- identifier[logz_new] ) identifier[dh] = identifier[h_new] - identifier[h] identifier[h] = identifier[h_new] identifier[logz] = identifier[logz_new] identifier[logzvar] += identifier[dh] * identifier[self] . identifier[dlv] identifier[loglstar] = identifier[loglstar_new] keyword[if] identifier[self] . identifier[_beyond_unit_bound] ( identifier[loglstar] ): identifier[bounditer] = identifier[self] . identifier[nbound] - literal[int] keyword[else] : identifier[bounditer] = literal[int] keyword[if] identifier[self] . identifier[save_samples] : identifier[self] . identifier[saved_id] . identifier[append] ( identifier[worst] ) identifier[self] . identifier[saved_u] . identifier[append] ( identifier[ustar] ) identifier[self] . identifier[saved_v] . identifier[append] ( identifier[vstar] ) identifier[self] . identifier[saved_logl] . identifier[append] ( identifier[loglstar] ) identifier[self] . identifier[saved_logvol] . identifier[append] ( identifier[logvol] ) identifier[self] . identifier[saved_logwt] . identifier[append] ( identifier[logwt] ) identifier[self] . identifier[saved_logz] . identifier[append] ( identifier[logz] ) identifier[self] . identifier[saved_logzvar] . identifier[append] ( identifier[logzvar] ) identifier[self] . identifier[saved_h] . identifier[append] ( identifier[h] ) identifier[self] . identifier[saved_nc] . identifier[append] ( identifier[nc] ) identifier[self] . identifier[saved_boundidx] . identifier[append] ( identifier[boundidx] ) identifier[self] . identifier[saved_it] . identifier[append] ( identifier[worst_it] ) identifier[self] . identifier[saved_bounditer] . identifier[append] ( identifier[bounditer] ) identifier[self] . identifier[saved_scale] . identifier[append] ( identifier[self] . identifier[scale] ) identifier[self] . identifier[live_u] [ identifier[worst] ]= identifier[u] identifier[self] . identifier[live_v] [ identifier[worst] ]= identifier[v] identifier[self] . identifier[live_logl] [ identifier[worst] ]= identifier[logl] identifier[self] . identifier[live_bound] [ identifier[worst] ]= identifier[bounditer] identifier[self] . identifier[live_it] [ identifier[worst] ]= identifier[self] . identifier[it] identifier[self] . identifier[eff] = literal[int] * identifier[self] . identifier[it] / identifier[self] . identifier[ncall] identifier[self] . identifier[it] += literal[int] keyword[yield] ( identifier[worst] , identifier[ustar] , identifier[vstar] , identifier[loglstar] , identifier[logvol] , identifier[logwt] , identifier[logz] , identifier[logzvar] , identifier[h] , identifier[nc] , identifier[worst_it] , identifier[boundidx] , identifier[bounditer] , identifier[self] . identifier[eff] , identifier[delta_logz] )
def sample(self, maxiter=None, maxcall=None, dlogz=0.01, logl_max=np.inf, save_bounds=True, save_samples=True): """ **The main nested sampling loop.** Iteratively replace the worst live point with a sample drawn uniformly from the prior until the provided stopping criteria are reached. Instantiates a generator that will be called by the user. Parameters ---------- maxiter : int, optional Maximum number of iterations. Iteration may stop earlier if the termination condition is reached. Default is `sys.maxsize` (no limit). maxcall : int, optional Maximum number of likelihood evaluations. Iteration may stop earlier if termination condition is reached. Default is `sys.maxsize` (no limit). dlogz : float, optional Iteration will stop when the estimated contribution of the remaining prior volume to the total evidence falls below this threshold. Explicitly, the stopping criterion is `ln(z + z_est) - ln(z) < dlogz`, where `z` is the current evidence from all saved samples and `z_est` is the estimated contribution from the remaining volume. Default is `0.01`. logl_max : float, optional Iteration will stop when the sampled ln(likelihood) exceeds the threshold set by `logl_max`. Default is no bound (`np.inf`). save_bounds : bool, optional Whether or not to save past distributions used to bound the live points internally. Default is `True`. save_samples : bool, optional Whether or not to save past samples from the nested sampling run (along with other ancillary quantities) internally. Default is `True`. Returns ------- worst : int Index of the live point with the worst likelihood. This is our new dead point sample. ustar : `~numpy.ndarray` with shape (npdim,) Position of the sample. vstar : `~numpy.ndarray` with shape (ndim,) Transformed position of the sample. loglstar : float Ln(likelihood) of the sample. logvol : float Ln(prior volume) within the sample. logwt : float Ln(weight) of the sample. logz : float Cumulative ln(evidence) up to the sample (inclusive). logzvar : float Estimated cumulative variance on `logz` (inclusive). h : float Cumulative information up to the sample (inclusive). nc : int Number of likelihood calls performed before the new live point was accepted. worst_it : int Iteration when the live (now dead) point was originally proposed. boundidx : int Index of the bound the dead point was originally drawn from. bounditer : int Index of the bound being used at the current iteration. eff : float The cumulative sampling efficiency (in percent). delta_logz : float The estimated remaining evidence expressed as the ln(ratio) of the current evidence. """ # Initialize quantities. if maxcall is None: maxcall = sys.maxsize # depends on [control=['if'], data=['maxcall']] if maxiter is None: maxiter = sys.maxsize # depends on [control=['if'], data=['maxiter']] self.save_samples = save_samples self.save_bounds = save_bounds ncall = 0 # Check whether we're starting fresh or continuing a previous run. if self.it == 1: # Initialize values for nested sampling loop. h = 0.0 # information, initially *0.* logz = -1e+300 # ln(evidence), initially *0.* logzvar = 0.0 # var[ln(evidence)], initially *0.* logvol = 0.0 # initially contains the whole prior (volume=1.) loglstar = -1e+300 # initial ln(likelihood) delta_logz = 1e+300 # ln(ratio) of total/current evidence # Check if we should initialize a different bounding distribution # instead of using the unit cube. pointvol = 1.0 / self.nlive if self._beyond_unit_bound(loglstar): bound = self.update(pointvol) if self.save_bounds: self.bound.append(bound) self.nbound += 1 # depends on [control=['if'], data=[]] self.since_update = 0 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # Remove live points (if added) from previous run. if self.added_live: self._remove_live_points() # depends on [control=['if'], data=[]] # Get final state from previous run. h = self.saved_h[-1] # information logz = self.saved_logz[-1] # ln(evidence) logzvar = self.saved_logzvar[-1] # var[ln(evidence)] logvol = self.saved_logvol[-1] # ln(volume) loglstar = min(self.live_logl) # ln(likelihood) delta_logz = np.logaddexp(logz, np.max(self.live_logl) + logvol) - logz # log-evidence ratio # The main nested sampling loop. for it in range(sys.maxsize): # Stopping criterion 1: current number of iterations # exceeds `maxiter`. if it > maxiter: # If dumping past states, save only the required quantities. if not self.save_samples: self.saved_logz.append(logz) self.saved_logzvar.append(logzvar) self.saved_h.append(h) self.saved_logvol.append(logvol) self.saved_logl.append(loglstar) # depends on [control=['if'], data=[]] break # depends on [control=['if'], data=[]] # Stopping criterion 2: current number of `loglikelihood` # calls exceeds `maxcall`. if ncall > maxcall: if not self.save_samples: self.saved_logz.append(logz) self.saved_logzvar.append(logzvar) self.saved_h.append(h) self.saved_logvol.append(logvol) self.saved_logl.append(loglstar) # depends on [control=['if'], data=[]] break # depends on [control=['if'], data=[]] # Stopping criterion 3: estimated (fractional) remaining evidence # lies below some threshold set by `dlogz`. logz_remain = np.max(self.live_logl) + logvol delta_logz = np.logaddexp(logz, logz_remain) - logz if dlogz is not None: if delta_logz < dlogz: if not self.save_samples: self.saved_logz.append(logz) self.saved_logzvar.append(logzvar) self.saved_h.append(h) self.saved_logvol.append(logvol) self.saved_logl.append(loglstar) # depends on [control=['if'], data=[]] break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['dlogz']] # Stopping criterion 4: last dead point exceeded the upper # `logl_max` bound. if loglstar > logl_max: if not self.save_samples: self.saved_logz.append(logz) self.saved_logzvar.append(logzvar) self.saved_h.append(h) self.saved_logvol.append(logvol) self.saved_logl.append(loglstar) # depends on [control=['if'], data=[]] break # depends on [control=['if'], data=['loglstar']] # Expected ln(volume) shrinkage. logvol -= self.dlv # After `update_interval` interations have passed *and* we meet # the criteria for moving beyond sampling from the unit cube, # update the bound using the current set of live points. ucheck = self.since_update >= self.update_interval bcheck = self._beyond_unit_bound(loglstar) if ucheck and bcheck: pointvol = math.exp(logvol) / self.nlive bound = self.update(pointvol) if self.save_bounds: self.bound.append(bound) # depends on [control=['if'], data=[]] self.nbound += 1 self.since_update = 0 # depends on [control=['if'], data=[]] # Locate the "live" point with the lowest `logl`. worst = np.argmin(self.live_logl) # index worst_it = self.live_it[worst] # when point was proposed boundidx = self.live_bound[worst] # associated bound index # Set our new worst likelihood constraint. ustar = np.array(self.live_u[worst]) # unit cube position vstar = np.array(self.live_v[worst]) # transformed position loglstar_new = self.live_logl[worst] # new likelihood # Set our new weight using quadratic estimates (trapezoid rule). logdvol = logsumexp(a=[logvol + self.dlv, logvol], b=[0.5, -0.5]) # ln(dvol) logwt = np.logaddexp(loglstar_new, loglstar) + logdvol # ln(wt) # Sample a new live point from within the likelihood constraint # `logl > loglstar` using the bounding distribution and sampling # method from our sampler. (u, v, logl, nc) = self._new_point(loglstar_new, logvol) ncall += nc self.ncall += nc self.since_update += nc # Update evidence `logz` and information `h`. logz_new = np.logaddexp(logz, logwt) lzterm = math.exp(loglstar - logz_new) * loglstar + math.exp(loglstar_new - logz_new) * loglstar_new h_new = math.exp(logdvol) * lzterm + math.exp(logz - logz_new) * (h + logz) - logz_new dh = h_new - h h = h_new logz = logz_new logzvar += dh * self.dlv loglstar = loglstar_new # Compute bound index at the current iteration. if self._beyond_unit_bound(loglstar): bounditer = self.nbound - 1 # depends on [control=['if'], data=[]] else: bounditer = 0 # Save the worst live point. It is now a "dead" point. if self.save_samples: self.saved_id.append(worst) self.saved_u.append(ustar) self.saved_v.append(vstar) self.saved_logl.append(loglstar) self.saved_logvol.append(logvol) self.saved_logwt.append(logwt) self.saved_logz.append(logz) self.saved_logzvar.append(logzvar) self.saved_h.append(h) self.saved_nc.append(nc) self.saved_boundidx.append(boundidx) self.saved_it.append(worst_it) self.saved_bounditer.append(bounditer) self.saved_scale.append(self.scale) # depends on [control=['if'], data=[]] # Update the live point (previously our "worst" point). self.live_u[worst] = u self.live_v[worst] = v self.live_logl[worst] = logl self.live_bound[worst] = bounditer self.live_it[worst] = self.it # Compute our sampling efficiency. self.eff = 100.0 * self.it / self.ncall # Increment total number of iterations. self.it += 1 # Return dead point and ancillary quantities. yield (worst, ustar, vstar, loglstar, logvol, logwt, logz, logzvar, h, nc, worst_it, boundidx, bounditer, self.eff, delta_logz) # depends on [control=['for'], data=['it']]
def get_page_content(self, page_id, page_info=0): """ PageInfo 0 - Returns only basic page content, without selection markup and binary data objects. This is the standard value to pass. 1 - Returns page content with no selection markup, but with all binary data. 2 - Returns page content with selection markup, but no binary data. 3 - Returns page content with selection markup and all binary data. """ try: return(self.process.GetPageContent(page_id, "", page_info)) except Exception as e: print(e) print("Could not get Page Content")
def function[get_page_content, parameter[self, page_id, page_info]]: constant[ PageInfo 0 - Returns only basic page content, without selection markup and binary data objects. This is the standard value to pass. 1 - Returns page content with no selection markup, but with all binary data. 2 - Returns page content with selection markup, but no binary data. 3 - Returns page content with selection markup and all binary data. ] <ast.Try object at 0x7da204565fc0>
keyword[def] identifier[get_page_content] ( identifier[self] , identifier[page_id] , identifier[page_info] = literal[int] ): literal[string] keyword[try] : keyword[return] ( identifier[self] . identifier[process] . identifier[GetPageContent] ( identifier[page_id] , literal[string] , identifier[page_info] )) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[print] ( identifier[e] ) identifier[print] ( literal[string] )
def get_page_content(self, page_id, page_info=0): """ PageInfo 0 - Returns only basic page content, without selection markup and binary data objects. This is the standard value to pass. 1 - Returns page content with no selection markup, but with all binary data. 2 - Returns page content with selection markup, but no binary data. 3 - Returns page content with selection markup and all binary data. """ try: return self.process.GetPageContent(page_id, '', page_info) # depends on [control=['try'], data=[]] except Exception as e: print(e) print('Could not get Page Content') # depends on [control=['except'], data=['e']]
def make_repr(*args, **kwargs): """Returns __repr__ method which returns ASCII representaion of the object with given fields. Without arguments, ``make_repr`` generates a method which outputs all object's non-protected (non-undercored) arguments which are not callables. Accepts ``*args``, which should be a names of object's attributes to be included in the output:: __repr__ = make_repr('foo', 'bar') If you want to generate attribute's content on the fly, then you should use keyword arguments and pass a callable of one argument:: __repr__ = make_repr(foo=lambda obj: obj.blah + 100500) """ def method(self): cls_name = self.__class__.__name__ if args: field_names = args else: def undercored(name): return name.startswith('_') def is_method(name): return callable(getattr(self, name)) def good_name(name): return not undercored(name) and not is_method(name) field_names = filter(good_name, dir(self)) field_names = sorted(field_names) # on this stage, we make from field_names an # attribute getters field_getters = zip(field_names, map(attrgetter, field_names)) # now process keyword args, they must # contain callables of one argument # and callable should return a field's value field_getters = chain( field_getters, kwargs.items()) fields = ((name, format_value(getter(self))) for name, getter in field_getters) # prepare key strings fields = ((u'{0}='.format(name), value) for name, value in fields) # join values with they respective keys fields = list(starmap(serialize_text, fields)) beginning = u'<{cls_name} '.format( cls_name=cls_name, ) result = serialize_list( beginning, fields) # append closing braket result += u'>' if ON_PYTHON2: # on python 2.x repr returns bytes, but on python3 - unicode strings result = result.encode('utf-8') return result return method
def function[make_repr, parameter[]]: constant[Returns __repr__ method which returns ASCII representaion of the object with given fields. Without arguments, ``make_repr`` generates a method which outputs all object's non-protected (non-undercored) arguments which are not callables. Accepts ``*args``, which should be a names of object's attributes to be included in the output:: __repr__ = make_repr('foo', 'bar') If you want to generate attribute's content on the fly, then you should use keyword arguments and pass a callable of one argument:: __repr__ = make_repr(foo=lambda obj: obj.blah + 100500) ] def function[method, parameter[self]]: variable[cls_name] assign[=] name[self].__class__.__name__ if name[args] begin[:] variable[field_names] assign[=] name[args] variable[field_getters] assign[=] call[name[zip], parameter[name[field_names], call[name[map], parameter[name[attrgetter], name[field_names]]]]] variable[field_getters] assign[=] call[name[chain], parameter[name[field_getters], call[name[kwargs].items, parameter[]]]] variable[fields] assign[=] <ast.GeneratorExp object at 0x7da1affd6ec0> variable[fields] assign[=] <ast.GeneratorExp object at 0x7da1affd7250> variable[fields] assign[=] call[name[list], parameter[call[name[starmap], parameter[name[serialize_text], name[fields]]]]] variable[beginning] assign[=] call[constant[<{cls_name} ].format, parameter[]] variable[result] assign[=] call[name[serialize_list], parameter[name[beginning], name[fields]]] <ast.AugAssign object at 0x7da1affd7190> if name[ON_PYTHON2] begin[:] variable[result] assign[=] call[name[result].encode, parameter[constant[utf-8]]] return[name[result]] return[name[method]]
keyword[def] identifier[make_repr] (* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[def] identifier[method] ( identifier[self] ): identifier[cls_name] = identifier[self] . identifier[__class__] . identifier[__name__] keyword[if] identifier[args] : identifier[field_names] = identifier[args] keyword[else] : keyword[def] identifier[undercored] ( identifier[name] ): keyword[return] identifier[name] . identifier[startswith] ( literal[string] ) keyword[def] identifier[is_method] ( identifier[name] ): keyword[return] identifier[callable] ( identifier[getattr] ( identifier[self] , identifier[name] )) keyword[def] identifier[good_name] ( identifier[name] ): keyword[return] keyword[not] identifier[undercored] ( identifier[name] ) keyword[and] keyword[not] identifier[is_method] ( identifier[name] ) identifier[field_names] = identifier[filter] ( identifier[good_name] , identifier[dir] ( identifier[self] )) identifier[field_names] = identifier[sorted] ( identifier[field_names] ) identifier[field_getters] = identifier[zip] ( identifier[field_names] , identifier[map] ( identifier[attrgetter] , identifier[field_names] )) identifier[field_getters] = identifier[chain] ( identifier[field_getters] , identifier[kwargs] . identifier[items] ()) identifier[fields] =(( identifier[name] , identifier[format_value] ( identifier[getter] ( identifier[self] ))) keyword[for] identifier[name] , identifier[getter] keyword[in] identifier[field_getters] ) identifier[fields] =(( literal[string] . identifier[format] ( identifier[name] ), identifier[value] ) keyword[for] identifier[name] , identifier[value] keyword[in] identifier[fields] ) identifier[fields] = identifier[list] ( identifier[starmap] ( identifier[serialize_text] , identifier[fields] )) identifier[beginning] = literal[string] . identifier[format] ( identifier[cls_name] = identifier[cls_name] , ) identifier[result] = identifier[serialize_list] ( identifier[beginning] , identifier[fields] ) identifier[result] += literal[string] keyword[if] identifier[ON_PYTHON2] : identifier[result] = identifier[result] . identifier[encode] ( literal[string] ) keyword[return] identifier[result] keyword[return] identifier[method]
def make_repr(*args, **kwargs): """Returns __repr__ method which returns ASCII representaion of the object with given fields. Without arguments, ``make_repr`` generates a method which outputs all object's non-protected (non-undercored) arguments which are not callables. Accepts ``*args``, which should be a names of object's attributes to be included in the output:: __repr__ = make_repr('foo', 'bar') If you want to generate attribute's content on the fly, then you should use keyword arguments and pass a callable of one argument:: __repr__ = make_repr(foo=lambda obj: obj.blah + 100500) """ def method(self): cls_name = self.__class__.__name__ if args: field_names = args # depends on [control=['if'], data=[]] else: def undercored(name): return name.startswith('_') def is_method(name): return callable(getattr(self, name)) def good_name(name): return not undercored(name) and (not is_method(name)) field_names = filter(good_name, dir(self)) field_names = sorted(field_names) # on this stage, we make from field_names an # attribute getters field_getters = zip(field_names, map(attrgetter, field_names)) # now process keyword args, they must # contain callables of one argument # and callable should return a field's value field_getters = chain(field_getters, kwargs.items()) fields = ((name, format_value(getter(self))) for (name, getter) in field_getters) # prepare key strings fields = ((u'{0}='.format(name), value) for (name, value) in fields) # join values with they respective keys fields = list(starmap(serialize_text, fields)) beginning = u'<{cls_name} '.format(cls_name=cls_name) result = serialize_list(beginning, fields) # append closing braket result += u'>' if ON_PYTHON2: # on python 2.x repr returns bytes, but on python3 - unicode strings result = result.encode('utf-8') # depends on [control=['if'], data=[]] return result return method
def activate_license(self, key): """Activates iLO license. :param key: iLO license key. :raises: IloError, on an error from iLO. :raises: IloCommandNotSupportedError, if the command is not supported on the server. """ manager, uri = self._get_ilo_details() try: lic_uri = manager['Oem']['Hp']['links']['LicenseService']['href'] except KeyError: msg = ('"LicenseService" section in Manager/Oem/Hp does not exist') raise exception.IloCommandNotSupportedError(msg) lic_key = {} lic_key['LicenseKey'] = key # Perform POST to activate license status, headers, response = self._rest_post(lic_uri, None, lic_key) if status >= 300: msg = self._get_extended_error(response) raise exception.IloError(msg)
def function[activate_license, parameter[self, key]]: constant[Activates iLO license. :param key: iLO license key. :raises: IloError, on an error from iLO. :raises: IloCommandNotSupportedError, if the command is not supported on the server. ] <ast.Tuple object at 0x7da1b197d300> assign[=] call[name[self]._get_ilo_details, parameter[]] <ast.Try object at 0x7da1b197d150> variable[lic_key] assign[=] dictionary[[], []] call[name[lic_key]][constant[LicenseKey]] assign[=] name[key] <ast.Tuple object at 0x7da1b197e170> assign[=] call[name[self]._rest_post, parameter[name[lic_uri], constant[None], name[lic_key]]] if compare[name[status] greater_or_equal[>=] constant[300]] begin[:] variable[msg] assign[=] call[name[self]._get_extended_error, parameter[name[response]]] <ast.Raise object at 0x7da1b197e470>
keyword[def] identifier[activate_license] ( identifier[self] , identifier[key] ): literal[string] identifier[manager] , identifier[uri] = identifier[self] . identifier[_get_ilo_details] () keyword[try] : identifier[lic_uri] = identifier[manager] [ literal[string] ][ literal[string] ][ literal[string] ][ literal[string] ][ literal[string] ] keyword[except] identifier[KeyError] : identifier[msg] =( literal[string] ) keyword[raise] identifier[exception] . identifier[IloCommandNotSupportedError] ( identifier[msg] ) identifier[lic_key] ={} identifier[lic_key] [ literal[string] ]= identifier[key] identifier[status] , identifier[headers] , identifier[response] = identifier[self] . identifier[_rest_post] ( identifier[lic_uri] , keyword[None] , identifier[lic_key] ) keyword[if] identifier[status] >= literal[int] : identifier[msg] = identifier[self] . identifier[_get_extended_error] ( identifier[response] ) keyword[raise] identifier[exception] . identifier[IloError] ( identifier[msg] )
def activate_license(self, key): """Activates iLO license. :param key: iLO license key. :raises: IloError, on an error from iLO. :raises: IloCommandNotSupportedError, if the command is not supported on the server. """ (manager, uri) = self._get_ilo_details() try: lic_uri = manager['Oem']['Hp']['links']['LicenseService']['href'] # depends on [control=['try'], data=[]] except KeyError: msg = '"LicenseService" section in Manager/Oem/Hp does not exist' raise exception.IloCommandNotSupportedError(msg) # depends on [control=['except'], data=[]] lic_key = {} lic_key['LicenseKey'] = key # Perform POST to activate license (status, headers, response) = self._rest_post(lic_uri, None, lic_key) if status >= 300: msg = self._get_extended_error(response) raise exception.IloError(msg) # depends on [control=['if'], data=[]]
def read(self, nml_fname, nml_patch_in=None, patch_fname=None): """Parse a Fortran namelist file and store the contents. >>> parser = f90nml.Parser() >>> data_nml = parser.read('data.nml') """ # For switching based on files versus paths nml_is_path = not hasattr(nml_fname, 'read') patch_is_path = not hasattr(patch_fname, 'read') # Convert patch data to a Namelist object if nml_patch_in is not None: if not isinstance(nml_patch_in, dict): raise TypeError('Input patch must be a dict or a Namelist.') nml_patch = copy.deepcopy(Namelist(nml_patch_in)) if not patch_fname and nml_is_path: patch_fname = nml_fname + '~' elif not patch_fname: raise ValueError('f90nml: error: No output file for patch.') elif nml_fname == patch_fname: raise ValueError('f90nml: error: Patch filepath cannot be the ' 'same as the original filepath.') if patch_is_path: self.pfile = open(patch_fname, 'w') else: self.pfile = patch_fname else: nml_patch = Namelist() try: nml_file = open(nml_fname, 'r') if nml_is_path else nml_fname try: return self._readstream(nml_file, nml_patch) # Close the files we opened on any exceptions within readstream finally: if nml_is_path: nml_file.close() finally: if self.pfile and patch_is_path: self.pfile.close()
def function[read, parameter[self, nml_fname, nml_patch_in, patch_fname]]: constant[Parse a Fortran namelist file and store the contents. >>> parser = f90nml.Parser() >>> data_nml = parser.read('data.nml') ] variable[nml_is_path] assign[=] <ast.UnaryOp object at 0x7da1b035b9a0> variable[patch_is_path] assign[=] <ast.UnaryOp object at 0x7da1b035a890> if compare[name[nml_patch_in] is_not constant[None]] begin[:] if <ast.UnaryOp object at 0x7da1b035b160> begin[:] <ast.Raise object at 0x7da1b035b3d0> variable[nml_patch] assign[=] call[name[copy].deepcopy, parameter[call[name[Namelist], parameter[name[nml_patch_in]]]]] if <ast.BoolOp object at 0x7da1b0359de0> begin[:] variable[patch_fname] assign[=] binary_operation[name[nml_fname] + constant[~]] if name[patch_is_path] begin[:] name[self].pfile assign[=] call[name[open], parameter[name[patch_fname], constant[w]]] <ast.Try object at 0x7da1b0359600>
keyword[def] identifier[read] ( identifier[self] , identifier[nml_fname] , identifier[nml_patch_in] = keyword[None] , identifier[patch_fname] = keyword[None] ): literal[string] identifier[nml_is_path] = keyword[not] identifier[hasattr] ( identifier[nml_fname] , literal[string] ) identifier[patch_is_path] = keyword[not] identifier[hasattr] ( identifier[patch_fname] , literal[string] ) keyword[if] identifier[nml_patch_in] keyword[is] keyword[not] keyword[None] : keyword[if] keyword[not] identifier[isinstance] ( identifier[nml_patch_in] , identifier[dict] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[nml_patch] = identifier[copy] . identifier[deepcopy] ( identifier[Namelist] ( identifier[nml_patch_in] )) keyword[if] keyword[not] identifier[patch_fname] keyword[and] identifier[nml_is_path] : identifier[patch_fname] = identifier[nml_fname] + literal[string] keyword[elif] keyword[not] identifier[patch_fname] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[elif] identifier[nml_fname] == identifier[patch_fname] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) keyword[if] identifier[patch_is_path] : identifier[self] . identifier[pfile] = identifier[open] ( identifier[patch_fname] , literal[string] ) keyword[else] : identifier[self] . identifier[pfile] = identifier[patch_fname] keyword[else] : identifier[nml_patch] = identifier[Namelist] () keyword[try] : identifier[nml_file] = identifier[open] ( identifier[nml_fname] , literal[string] ) keyword[if] identifier[nml_is_path] keyword[else] identifier[nml_fname] keyword[try] : keyword[return] identifier[self] . identifier[_readstream] ( identifier[nml_file] , identifier[nml_patch] ) keyword[finally] : keyword[if] identifier[nml_is_path] : identifier[nml_file] . identifier[close] () keyword[finally] : keyword[if] identifier[self] . identifier[pfile] keyword[and] identifier[patch_is_path] : identifier[self] . identifier[pfile] . identifier[close] ()
def read(self, nml_fname, nml_patch_in=None, patch_fname=None): """Parse a Fortran namelist file and store the contents. >>> parser = f90nml.Parser() >>> data_nml = parser.read('data.nml') """ # For switching based on files versus paths nml_is_path = not hasattr(nml_fname, 'read') patch_is_path = not hasattr(patch_fname, 'read') # Convert patch data to a Namelist object if nml_patch_in is not None: if not isinstance(nml_patch_in, dict): raise TypeError('Input patch must be a dict or a Namelist.') # depends on [control=['if'], data=[]] nml_patch = copy.deepcopy(Namelist(nml_patch_in)) if not patch_fname and nml_is_path: patch_fname = nml_fname + '~' # depends on [control=['if'], data=[]] elif not patch_fname: raise ValueError('f90nml: error: No output file for patch.') # depends on [control=['if'], data=[]] elif nml_fname == patch_fname: raise ValueError('f90nml: error: Patch filepath cannot be the same as the original filepath.') # depends on [control=['if'], data=[]] if patch_is_path: self.pfile = open(patch_fname, 'w') # depends on [control=['if'], data=[]] else: self.pfile = patch_fname # depends on [control=['if'], data=['nml_patch_in']] else: nml_patch = Namelist() try: nml_file = open(nml_fname, 'r') if nml_is_path else nml_fname try: return self._readstream(nml_file, nml_patch) # depends on [control=['try'], data=[]] finally: # Close the files we opened on any exceptions within readstream if nml_is_path: nml_file.close() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] finally: if self.pfile and patch_is_path: self.pfile.close() # depends on [control=['if'], data=[]]
def _init_actions(self, create_standard_actions): """ Init context menu action """ menu_advanced = QtWidgets.QMenu(_('Advanced')) self.add_menu(menu_advanced) self._sub_menus = { 'Advanced': menu_advanced } if create_standard_actions: # Undo action = QtWidgets.QAction(_('Undo'), self) action.setShortcut('Ctrl+Z') action.setIcon(icons.icon( 'edit-undo', ':/pyqode-icons/rc/edit-undo.png', 'fa.undo')) action.triggered.connect(self.undo) self.undoAvailable.connect(action.setVisible) action.setVisible(False) self.add_action(action, sub_menu=None) self.action_undo = action # Redo action = QtWidgets.QAction(_('Redo'), self) action.setShortcut('Ctrl+Y') action.setIcon(icons.icon( 'edit-redo', ':/pyqode-icons/rc/edit-redo.png', 'fa.repeat')) action.triggered.connect(self.redo) self.redoAvailable.connect(action.setVisible) action.setVisible(False) self.add_action(action, sub_menu=None) self.action_redo = action # Copy action = QtWidgets.QAction(_('Copy'), self) action.setShortcut(QtGui.QKeySequence.Copy) action.setIcon(icons.icon( 'edit-copy', ':/pyqode-icons/rc/edit-copy.png', 'fa.copy')) action.triggered.connect(self.copy) self.add_action(action, sub_menu=None) self.action_copy = action # cut action = QtWidgets.QAction(_('Cut'), self) action.setShortcut(QtGui.QKeySequence.Cut) action.setIcon(icons.icon( 'edit-cut', ':/pyqode-icons/rc/edit-cut.png', 'fa.cut')) action.triggered.connect(self.cut) self.add_action(action, sub_menu=None) self.action_cut = action # paste action = QtWidgets.QAction(_('Paste'), self) action.setShortcut(QtGui.QKeySequence.Paste) action.setIcon(icons.icon( 'edit-paste', ':/pyqode-icons/rc/edit-paste.png', 'fa.paste')) action.triggered.connect(self.paste) self.add_action(action, sub_menu=None) self.action_paste = action # duplicate line action = QtWidgets.QAction(_('Duplicate line'), self) action.setShortcut('Ctrl+D') action.triggered.connect(self.duplicate_line) self.add_action(action, sub_menu=None) self.action_duplicate_line = action # swap line up action = QtWidgets.QAction(_('Swap line up'), self) action.setShortcut("Alt++") action.triggered.connect(self.swapLineUp) self.add_action(action, sub_menu=None) self.action_swap_line_up = action # swap line down action = QtWidgets.QAction(_('Swap line down'), self) action.setShortcut("Alt+-") action.triggered.connect(self.swapLineDown) self.add_action(action, sub_menu=None) self.action_swap_line_down = action # select all action = QtWidgets.QAction(_('Select all'), self) action.setShortcut(QtGui.QKeySequence.SelectAll) action.triggered.connect(self.selectAll) self.action_select_all = action self.add_action(self.action_select_all, sub_menu=None) self.add_separator(sub_menu=None) if create_standard_actions: # indent action = QtWidgets.QAction(_('Indent'), self) action.setShortcut('Tab') action.setIcon(icons.icon( 'format-indent-more', ':/pyqode-icons/rc/format-indent-more.png', 'fa.indent')) action.triggered.connect(self.indent) self.add_action(action) self.action_indent = action # unindent action = QtWidgets.QAction(_('Un-indent'), self) action.setShortcut('Shift+Tab') action.setIcon(icons.icon( 'format-indent-less', ':/pyqode-icons/rc/format-indent-less.png', 'fa.dedent')) action.triggered.connect(self.un_indent) self.add_action(action) self.action_un_indent = action self.add_separator() # goto action = QtWidgets.QAction(_('Go to line'), self) action.setShortcut('Ctrl+G') action.setIcon(icons.icon( 'go-jump', ':/pyqode-icons/rc/goto-line.png', 'fa.share')) action.triggered.connect(self.goto_line) self.add_action(action) self.action_goto_line = action
def function[_init_actions, parameter[self, create_standard_actions]]: constant[ Init context menu action ] variable[menu_advanced] assign[=] call[name[QtWidgets].QMenu, parameter[call[name[_], parameter[constant[Advanced]]]]] call[name[self].add_menu, parameter[name[menu_advanced]]] name[self]._sub_menus assign[=] dictionary[[<ast.Constant object at 0x7da20c7c95a0>], [<ast.Name object at 0x7da20c7ca0e0>]] if name[create_standard_actions] begin[:] variable[action] assign[=] call[name[QtWidgets].QAction, parameter[call[name[_], parameter[constant[Undo]]], name[self]]] call[name[action].setShortcut, parameter[constant[Ctrl+Z]]] call[name[action].setIcon, parameter[call[name[icons].icon, parameter[constant[edit-undo], constant[:/pyqode-icons/rc/edit-undo.png], constant[fa.undo]]]]] call[name[action].triggered.connect, parameter[name[self].undo]] call[name[self].undoAvailable.connect, parameter[name[action].setVisible]] call[name[action].setVisible, parameter[constant[False]]] call[name[self].add_action, parameter[name[action]]] name[self].action_undo assign[=] name[action] variable[action] assign[=] call[name[QtWidgets].QAction, parameter[call[name[_], parameter[constant[Redo]]], name[self]]] call[name[action].setShortcut, parameter[constant[Ctrl+Y]]] call[name[action].setIcon, parameter[call[name[icons].icon, parameter[constant[edit-redo], constant[:/pyqode-icons/rc/edit-redo.png], constant[fa.repeat]]]]] call[name[action].triggered.connect, parameter[name[self].redo]] call[name[self].redoAvailable.connect, parameter[name[action].setVisible]] call[name[action].setVisible, parameter[constant[False]]] call[name[self].add_action, parameter[name[action]]] name[self].action_redo assign[=] name[action] variable[action] assign[=] call[name[QtWidgets].QAction, parameter[call[name[_], parameter[constant[Copy]]], name[self]]] call[name[action].setShortcut, parameter[name[QtGui].QKeySequence.Copy]] call[name[action].setIcon, parameter[call[name[icons].icon, parameter[constant[edit-copy], constant[:/pyqode-icons/rc/edit-copy.png], constant[fa.copy]]]]] call[name[action].triggered.connect, parameter[name[self].copy]] call[name[self].add_action, parameter[name[action]]] name[self].action_copy assign[=] name[action] variable[action] assign[=] call[name[QtWidgets].QAction, parameter[call[name[_], parameter[constant[Cut]]], name[self]]] call[name[action].setShortcut, parameter[name[QtGui].QKeySequence.Cut]] call[name[action].setIcon, parameter[call[name[icons].icon, parameter[constant[edit-cut], constant[:/pyqode-icons/rc/edit-cut.png], constant[fa.cut]]]]] call[name[action].triggered.connect, parameter[name[self].cut]] call[name[self].add_action, parameter[name[action]]] name[self].action_cut assign[=] name[action] variable[action] assign[=] call[name[QtWidgets].QAction, parameter[call[name[_], parameter[constant[Paste]]], name[self]]] call[name[action].setShortcut, parameter[name[QtGui].QKeySequence.Paste]] call[name[action].setIcon, parameter[call[name[icons].icon, parameter[constant[edit-paste], constant[:/pyqode-icons/rc/edit-paste.png], constant[fa.paste]]]]] call[name[action].triggered.connect, parameter[name[self].paste]] call[name[self].add_action, parameter[name[action]]] name[self].action_paste assign[=] name[action] variable[action] assign[=] call[name[QtWidgets].QAction, parameter[call[name[_], parameter[constant[Duplicate line]]], name[self]]] call[name[action].setShortcut, parameter[constant[Ctrl+D]]] call[name[action].triggered.connect, parameter[name[self].duplicate_line]] call[name[self].add_action, parameter[name[action]]] name[self].action_duplicate_line assign[=] name[action] variable[action] assign[=] call[name[QtWidgets].QAction, parameter[call[name[_], parameter[constant[Swap line up]]], name[self]]] call[name[action].setShortcut, parameter[constant[Alt++]]] call[name[action].triggered.connect, parameter[name[self].swapLineUp]] call[name[self].add_action, parameter[name[action]]] name[self].action_swap_line_up assign[=] name[action] variable[action] assign[=] call[name[QtWidgets].QAction, parameter[call[name[_], parameter[constant[Swap line down]]], name[self]]] call[name[action].setShortcut, parameter[constant[Alt+-]]] call[name[action].triggered.connect, parameter[name[self].swapLineDown]] call[name[self].add_action, parameter[name[action]]] name[self].action_swap_line_down assign[=] name[action] variable[action] assign[=] call[name[QtWidgets].QAction, parameter[call[name[_], parameter[constant[Select all]]], name[self]]] call[name[action].setShortcut, parameter[name[QtGui].QKeySequence.SelectAll]] call[name[action].triggered.connect, parameter[name[self].selectAll]] name[self].action_select_all assign[=] name[action] call[name[self].add_action, parameter[name[self].action_select_all]] call[name[self].add_separator, parameter[]] if name[create_standard_actions] begin[:] variable[action] assign[=] call[name[QtWidgets].QAction, parameter[call[name[_], parameter[constant[Indent]]], name[self]]] call[name[action].setShortcut, parameter[constant[Tab]]] call[name[action].setIcon, parameter[call[name[icons].icon, parameter[constant[format-indent-more], constant[:/pyqode-icons/rc/format-indent-more.png], constant[fa.indent]]]]] call[name[action].triggered.connect, parameter[name[self].indent]] call[name[self].add_action, parameter[name[action]]] name[self].action_indent assign[=] name[action] variable[action] assign[=] call[name[QtWidgets].QAction, parameter[call[name[_], parameter[constant[Un-indent]]], name[self]]] call[name[action].setShortcut, parameter[constant[Shift+Tab]]] call[name[action].setIcon, parameter[call[name[icons].icon, parameter[constant[format-indent-less], constant[:/pyqode-icons/rc/format-indent-less.png], constant[fa.dedent]]]]] call[name[action].triggered.connect, parameter[name[self].un_indent]] call[name[self].add_action, parameter[name[action]]] name[self].action_un_indent assign[=] name[action] call[name[self].add_separator, parameter[]] variable[action] assign[=] call[name[QtWidgets].QAction, parameter[call[name[_], parameter[constant[Go to line]]], name[self]]] call[name[action].setShortcut, parameter[constant[Ctrl+G]]] call[name[action].setIcon, parameter[call[name[icons].icon, parameter[constant[go-jump], constant[:/pyqode-icons/rc/goto-line.png], constant[fa.share]]]]] call[name[action].triggered.connect, parameter[name[self].goto_line]] call[name[self].add_action, parameter[name[action]]] name[self].action_goto_line assign[=] name[action]
keyword[def] identifier[_init_actions] ( identifier[self] , identifier[create_standard_actions] ): literal[string] identifier[menu_advanced] = identifier[QtWidgets] . identifier[QMenu] ( identifier[_] ( literal[string] )) identifier[self] . identifier[add_menu] ( identifier[menu_advanced] ) identifier[self] . identifier[_sub_menus] ={ literal[string] : identifier[menu_advanced] } keyword[if] identifier[create_standard_actions] : identifier[action] = identifier[QtWidgets] . identifier[QAction] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[action] . identifier[setShortcut] ( literal[string] ) identifier[action] . identifier[setIcon] ( identifier[icons] . identifier[icon] ( literal[string] , literal[string] , literal[string] )) identifier[action] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[undo] ) identifier[self] . identifier[undoAvailable] . identifier[connect] ( identifier[action] . identifier[setVisible] ) identifier[action] . identifier[setVisible] ( keyword[False] ) identifier[self] . identifier[add_action] ( identifier[action] , identifier[sub_menu] = keyword[None] ) identifier[self] . identifier[action_undo] = identifier[action] identifier[action] = identifier[QtWidgets] . identifier[QAction] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[action] . identifier[setShortcut] ( literal[string] ) identifier[action] . identifier[setIcon] ( identifier[icons] . identifier[icon] ( literal[string] , literal[string] , literal[string] )) identifier[action] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[redo] ) identifier[self] . identifier[redoAvailable] . identifier[connect] ( identifier[action] . identifier[setVisible] ) identifier[action] . identifier[setVisible] ( keyword[False] ) identifier[self] . identifier[add_action] ( identifier[action] , identifier[sub_menu] = keyword[None] ) identifier[self] . identifier[action_redo] = identifier[action] identifier[action] = identifier[QtWidgets] . identifier[QAction] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[action] . identifier[setShortcut] ( identifier[QtGui] . identifier[QKeySequence] . identifier[Copy] ) identifier[action] . identifier[setIcon] ( identifier[icons] . identifier[icon] ( literal[string] , literal[string] , literal[string] )) identifier[action] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[copy] ) identifier[self] . identifier[add_action] ( identifier[action] , identifier[sub_menu] = keyword[None] ) identifier[self] . identifier[action_copy] = identifier[action] identifier[action] = identifier[QtWidgets] . identifier[QAction] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[action] . identifier[setShortcut] ( identifier[QtGui] . identifier[QKeySequence] . identifier[Cut] ) identifier[action] . identifier[setIcon] ( identifier[icons] . identifier[icon] ( literal[string] , literal[string] , literal[string] )) identifier[action] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[cut] ) identifier[self] . identifier[add_action] ( identifier[action] , identifier[sub_menu] = keyword[None] ) identifier[self] . identifier[action_cut] = identifier[action] identifier[action] = identifier[QtWidgets] . identifier[QAction] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[action] . identifier[setShortcut] ( identifier[QtGui] . identifier[QKeySequence] . identifier[Paste] ) identifier[action] . identifier[setIcon] ( identifier[icons] . identifier[icon] ( literal[string] , literal[string] , literal[string] )) identifier[action] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[paste] ) identifier[self] . identifier[add_action] ( identifier[action] , identifier[sub_menu] = keyword[None] ) identifier[self] . identifier[action_paste] = identifier[action] identifier[action] = identifier[QtWidgets] . identifier[QAction] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[action] . identifier[setShortcut] ( literal[string] ) identifier[action] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[duplicate_line] ) identifier[self] . identifier[add_action] ( identifier[action] , identifier[sub_menu] = keyword[None] ) identifier[self] . identifier[action_duplicate_line] = identifier[action] identifier[action] = identifier[QtWidgets] . identifier[QAction] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[action] . identifier[setShortcut] ( literal[string] ) identifier[action] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[swapLineUp] ) identifier[self] . identifier[add_action] ( identifier[action] , identifier[sub_menu] = keyword[None] ) identifier[self] . identifier[action_swap_line_up] = identifier[action] identifier[action] = identifier[QtWidgets] . identifier[QAction] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[action] . identifier[setShortcut] ( literal[string] ) identifier[action] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[swapLineDown] ) identifier[self] . identifier[add_action] ( identifier[action] , identifier[sub_menu] = keyword[None] ) identifier[self] . identifier[action_swap_line_down] = identifier[action] identifier[action] = identifier[QtWidgets] . identifier[QAction] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[action] . identifier[setShortcut] ( identifier[QtGui] . identifier[QKeySequence] . identifier[SelectAll] ) identifier[action] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[selectAll] ) identifier[self] . identifier[action_select_all] = identifier[action] identifier[self] . identifier[add_action] ( identifier[self] . identifier[action_select_all] , identifier[sub_menu] = keyword[None] ) identifier[self] . identifier[add_separator] ( identifier[sub_menu] = keyword[None] ) keyword[if] identifier[create_standard_actions] : identifier[action] = identifier[QtWidgets] . identifier[QAction] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[action] . identifier[setShortcut] ( literal[string] ) identifier[action] . identifier[setIcon] ( identifier[icons] . identifier[icon] ( literal[string] , literal[string] , literal[string] )) identifier[action] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[indent] ) identifier[self] . identifier[add_action] ( identifier[action] ) identifier[self] . identifier[action_indent] = identifier[action] identifier[action] = identifier[QtWidgets] . identifier[QAction] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[action] . identifier[setShortcut] ( literal[string] ) identifier[action] . identifier[setIcon] ( identifier[icons] . identifier[icon] ( literal[string] , literal[string] , literal[string] )) identifier[action] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[un_indent] ) identifier[self] . identifier[add_action] ( identifier[action] ) identifier[self] . identifier[action_un_indent] = identifier[action] identifier[self] . identifier[add_separator] () identifier[action] = identifier[QtWidgets] . identifier[QAction] ( identifier[_] ( literal[string] ), identifier[self] ) identifier[action] . identifier[setShortcut] ( literal[string] ) identifier[action] . identifier[setIcon] ( identifier[icons] . identifier[icon] ( literal[string] , literal[string] , literal[string] )) identifier[action] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[goto_line] ) identifier[self] . identifier[add_action] ( identifier[action] ) identifier[self] . identifier[action_goto_line] = identifier[action]
def _init_actions(self, create_standard_actions): """ Init context menu action """ menu_advanced = QtWidgets.QMenu(_('Advanced')) self.add_menu(menu_advanced) self._sub_menus = {'Advanced': menu_advanced} if create_standard_actions: # Undo action = QtWidgets.QAction(_('Undo'), self) action.setShortcut('Ctrl+Z') action.setIcon(icons.icon('edit-undo', ':/pyqode-icons/rc/edit-undo.png', 'fa.undo')) action.triggered.connect(self.undo) self.undoAvailable.connect(action.setVisible) action.setVisible(False) self.add_action(action, sub_menu=None) self.action_undo = action # Redo action = QtWidgets.QAction(_('Redo'), self) action.setShortcut('Ctrl+Y') action.setIcon(icons.icon('edit-redo', ':/pyqode-icons/rc/edit-redo.png', 'fa.repeat')) action.triggered.connect(self.redo) self.redoAvailable.connect(action.setVisible) action.setVisible(False) self.add_action(action, sub_menu=None) self.action_redo = action # Copy action = QtWidgets.QAction(_('Copy'), self) action.setShortcut(QtGui.QKeySequence.Copy) action.setIcon(icons.icon('edit-copy', ':/pyqode-icons/rc/edit-copy.png', 'fa.copy')) action.triggered.connect(self.copy) self.add_action(action, sub_menu=None) self.action_copy = action # cut action = QtWidgets.QAction(_('Cut'), self) action.setShortcut(QtGui.QKeySequence.Cut) action.setIcon(icons.icon('edit-cut', ':/pyqode-icons/rc/edit-cut.png', 'fa.cut')) action.triggered.connect(self.cut) self.add_action(action, sub_menu=None) self.action_cut = action # paste action = QtWidgets.QAction(_('Paste'), self) action.setShortcut(QtGui.QKeySequence.Paste) action.setIcon(icons.icon('edit-paste', ':/pyqode-icons/rc/edit-paste.png', 'fa.paste')) action.triggered.connect(self.paste) self.add_action(action, sub_menu=None) self.action_paste = action # depends on [control=['if'], data=[]] # duplicate line action = QtWidgets.QAction(_('Duplicate line'), self) action.setShortcut('Ctrl+D') action.triggered.connect(self.duplicate_line) self.add_action(action, sub_menu=None) self.action_duplicate_line = action # swap line up action = QtWidgets.QAction(_('Swap line up'), self) action.setShortcut('Alt++') action.triggered.connect(self.swapLineUp) self.add_action(action, sub_menu=None) self.action_swap_line_up = action # swap line down action = QtWidgets.QAction(_('Swap line down'), self) action.setShortcut('Alt+-') action.triggered.connect(self.swapLineDown) self.add_action(action, sub_menu=None) self.action_swap_line_down = action # select all action = QtWidgets.QAction(_('Select all'), self) action.setShortcut(QtGui.QKeySequence.SelectAll) action.triggered.connect(self.selectAll) self.action_select_all = action self.add_action(self.action_select_all, sub_menu=None) self.add_separator(sub_menu=None) if create_standard_actions: # indent action = QtWidgets.QAction(_('Indent'), self) action.setShortcut('Tab') action.setIcon(icons.icon('format-indent-more', ':/pyqode-icons/rc/format-indent-more.png', 'fa.indent')) action.triggered.connect(self.indent) self.add_action(action) self.action_indent = action # unindent action = QtWidgets.QAction(_('Un-indent'), self) action.setShortcut('Shift+Tab') action.setIcon(icons.icon('format-indent-less', ':/pyqode-icons/rc/format-indent-less.png', 'fa.dedent')) action.triggered.connect(self.un_indent) self.add_action(action) self.action_un_indent = action self.add_separator() # depends on [control=['if'], data=[]] # goto action = QtWidgets.QAction(_('Go to line'), self) action.setShortcut('Ctrl+G') action.setIcon(icons.icon('go-jump', ':/pyqode-icons/rc/goto-line.png', 'fa.share')) action.triggered.connect(self.goto_line) self.add_action(action) self.action_goto_line = action
def observe(M, C, obs_mesh, obs_vals, obs_V=0, lintrans=None, cross_validate=True): """ (M, C, obs_mesh, obs_vals[, obs_V = 0, lintrans = None, cross_validate = True]) Imposes observation of the value of obs_vals on M and C, where obs_vals ~ N(lintrans * f(obs_mesh), V) f ~ GP(M,C) :Arguments: - `M`: The mean function - `C`: The covariance function - `obs_mesh`: The places where f has been evaluated. - `obs_vals`: The values of f that were observed there. - `obs_V`: The observation variance. If None, assumed to be infinite (observations made with no error). - `lintrans`: A linear transformation. If None, assumed to be the identity transformation (pretend it doesn't exist). - `cross_validate`: A flag indicating whether a check should be done to see if the data could have arisen from M and C with positive probability. """ obs_mesh = regularize_array(obs_mesh) # print_(obs_mesh) obs_V = resize(obs_V, obs_mesh.shape[0]) obs_vals = resize(obs_vals, obs_mesh.shape[0]) # First observe C. relevant_slice, obs_mesh_new = C.observe(obs_mesh, obs_V, output_type='o') # Then observe M from C. M.observe(C, obs_mesh_new, obs_vals.ravel()[relevant_slice]) # Cross-validate if not asked not to. if obs_mesh_new.shape[0] < obs_mesh.shape[0]: if cross_validate: if not predictive_check( obs_vals, obs_mesh, M, C.obs_piv, sqrt(C.relative_precision)): raise ValueError( "These data seem extremely improbable given your GP prior. \n Suggestions: decrease observation precision, or adjust the covariance to \n allow the function to be less smooth.")
def function[observe, parameter[M, C, obs_mesh, obs_vals, obs_V, lintrans, cross_validate]]: constant[ (M, C, obs_mesh, obs_vals[, obs_V = 0, lintrans = None, cross_validate = True]) Imposes observation of the value of obs_vals on M and C, where obs_vals ~ N(lintrans * f(obs_mesh), V) f ~ GP(M,C) :Arguments: - `M`: The mean function - `C`: The covariance function - `obs_mesh`: The places where f has been evaluated. - `obs_vals`: The values of f that were observed there. - `obs_V`: The observation variance. If None, assumed to be infinite (observations made with no error). - `lintrans`: A linear transformation. If None, assumed to be the identity transformation (pretend it doesn't exist). - `cross_validate`: A flag indicating whether a check should be done to see if the data could have arisen from M and C with positive probability. ] variable[obs_mesh] assign[=] call[name[regularize_array], parameter[name[obs_mesh]]] variable[obs_V] assign[=] call[name[resize], parameter[name[obs_V], call[name[obs_mesh].shape][constant[0]]]] variable[obs_vals] assign[=] call[name[resize], parameter[name[obs_vals], call[name[obs_mesh].shape][constant[0]]]] <ast.Tuple object at 0x7da1b18492a0> assign[=] call[name[C].observe, parameter[name[obs_mesh], name[obs_V]]] call[name[M].observe, parameter[name[C], name[obs_mesh_new], call[call[name[obs_vals].ravel, parameter[]]][name[relevant_slice]]]] if compare[call[name[obs_mesh_new].shape][constant[0]] less[<] call[name[obs_mesh].shape][constant[0]]] begin[:] if name[cross_validate] begin[:] if <ast.UnaryOp object at 0x7da1b1849780> begin[:] <ast.Raise object at 0x7da1b184a230>
keyword[def] identifier[observe] ( identifier[M] , identifier[C] , identifier[obs_mesh] , identifier[obs_vals] , identifier[obs_V] = literal[int] , identifier[lintrans] = keyword[None] , identifier[cross_validate] = keyword[True] ): literal[string] identifier[obs_mesh] = identifier[regularize_array] ( identifier[obs_mesh] ) identifier[obs_V] = identifier[resize] ( identifier[obs_V] , identifier[obs_mesh] . identifier[shape] [ literal[int] ]) identifier[obs_vals] = identifier[resize] ( identifier[obs_vals] , identifier[obs_mesh] . identifier[shape] [ literal[int] ]) identifier[relevant_slice] , identifier[obs_mesh_new] = identifier[C] . identifier[observe] ( identifier[obs_mesh] , identifier[obs_V] , identifier[output_type] = literal[string] ) identifier[M] . identifier[observe] ( identifier[C] , identifier[obs_mesh_new] , identifier[obs_vals] . identifier[ravel] ()[ identifier[relevant_slice] ]) keyword[if] identifier[obs_mesh_new] . identifier[shape] [ literal[int] ]< identifier[obs_mesh] . identifier[shape] [ literal[int] ]: keyword[if] identifier[cross_validate] : keyword[if] keyword[not] identifier[predictive_check] ( identifier[obs_vals] , identifier[obs_mesh] , identifier[M] , identifier[C] . identifier[obs_piv] , identifier[sqrt] ( identifier[C] . identifier[relative_precision] )): keyword[raise] identifier[ValueError] ( literal[string] )
def observe(M, C, obs_mesh, obs_vals, obs_V=0, lintrans=None, cross_validate=True): """ (M, C, obs_mesh, obs_vals[, obs_V = 0, lintrans = None, cross_validate = True]) Imposes observation of the value of obs_vals on M and C, where obs_vals ~ N(lintrans * f(obs_mesh), V) f ~ GP(M,C) :Arguments: - `M`: The mean function - `C`: The covariance function - `obs_mesh`: The places where f has been evaluated. - `obs_vals`: The values of f that were observed there. - `obs_V`: The observation variance. If None, assumed to be infinite (observations made with no error). - `lintrans`: A linear transformation. If None, assumed to be the identity transformation (pretend it doesn't exist). - `cross_validate`: A flag indicating whether a check should be done to see if the data could have arisen from M and C with positive probability. """ obs_mesh = regularize_array(obs_mesh) # print_(obs_mesh) obs_V = resize(obs_V, obs_mesh.shape[0]) obs_vals = resize(obs_vals, obs_mesh.shape[0]) # First observe C. (relevant_slice, obs_mesh_new) = C.observe(obs_mesh, obs_V, output_type='o') # Then observe M from C. M.observe(C, obs_mesh_new, obs_vals.ravel()[relevant_slice]) # Cross-validate if not asked not to. if obs_mesh_new.shape[0] < obs_mesh.shape[0]: if cross_validate: if not predictive_check(obs_vals, obs_mesh, M, C.obs_piv, sqrt(C.relative_precision)): raise ValueError('These data seem extremely improbable given your GP prior. \n Suggestions: decrease observation precision, or adjust the covariance to \n allow the function to be less smooth.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def get_i_text(node): """ Get the text for an Indicator node. :param node: Indicator node. :return: """ if node.tag != 'Indicator': raise IOCParseError('Invalid tag: {}'.format(node.tag)) s = node.get('operator').upper() return s
def function[get_i_text, parameter[node]]: constant[ Get the text for an Indicator node. :param node: Indicator node. :return: ] if compare[name[node].tag not_equal[!=] constant[Indicator]] begin[:] <ast.Raise object at 0x7da1b1037d00> variable[s] assign[=] call[call[name[node].get, parameter[constant[operator]]].upper, parameter[]] return[name[s]]
keyword[def] identifier[get_i_text] ( identifier[node] ): literal[string] keyword[if] identifier[node] . identifier[tag] != literal[string] : keyword[raise] identifier[IOCParseError] ( literal[string] . identifier[format] ( identifier[node] . identifier[tag] )) identifier[s] = identifier[node] . identifier[get] ( literal[string] ). identifier[upper] () keyword[return] identifier[s]
def get_i_text(node): """ Get the text for an Indicator node. :param node: Indicator node. :return: """ if node.tag != 'Indicator': raise IOCParseError('Invalid tag: {}'.format(node.tag)) # depends on [control=['if'], data=[]] s = node.get('operator').upper() return s
def info_post_request(self, node, info): """Run when a request to create an info is complete.""" for agent in node.neighbors(): node.transmit(what=info, to_whom=agent)
def function[info_post_request, parameter[self, node, info]]: constant[Run when a request to create an info is complete.] for taget[name[agent]] in starred[call[name[node].neighbors, parameter[]]] begin[:] call[name[node].transmit, parameter[]]
keyword[def] identifier[info_post_request] ( identifier[self] , identifier[node] , identifier[info] ): literal[string] keyword[for] identifier[agent] keyword[in] identifier[node] . identifier[neighbors] (): identifier[node] . identifier[transmit] ( identifier[what] = identifier[info] , identifier[to_whom] = identifier[agent] )
def info_post_request(self, node, info): """Run when a request to create an info is complete.""" for agent in node.neighbors(): node.transmit(what=info, to_whom=agent) # depends on [control=['for'], data=['agent']]
def world_series_logs(): """ Pull Retrosheet World Series Game Logs """ file_name = 'GLWS.TXT' z = get_zip_file(world_series_url) data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"') data.columns = gamelog_columns return data
def function[world_series_logs, parameter[]]: constant[ Pull Retrosheet World Series Game Logs ] variable[file_name] assign[=] constant[GLWS.TXT] variable[z] assign[=] call[name[get_zip_file], parameter[name[world_series_url]]] variable[data] assign[=] call[name[pd].read_csv, parameter[call[name[z].open, parameter[name[file_name]]]]] name[data].columns assign[=] name[gamelog_columns] return[name[data]]
keyword[def] identifier[world_series_logs] (): literal[string] identifier[file_name] = literal[string] identifier[z] = identifier[get_zip_file] ( identifier[world_series_url] ) identifier[data] = identifier[pd] . identifier[read_csv] ( identifier[z] . identifier[open] ( identifier[file_name] ), identifier[header] = keyword[None] , identifier[sep] = literal[string] , identifier[quotechar] = literal[string] ) identifier[data] . identifier[columns] = identifier[gamelog_columns] keyword[return] identifier[data]
def world_series_logs(): """ Pull Retrosheet World Series Game Logs """ file_name = 'GLWS.TXT' z = get_zip_file(world_series_url) data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"') data.columns = gamelog_columns return data
def from_string(data_str): """Creates a MonsoonData object from a string representation generated by __str__. Args: str: The string representation of a MonsoonData. Returns: A MonsoonData object. """ lines = data_str.strip().split('\n') err_msg = ("Invalid input string format. Is this string generated by " "MonsoonData class?") conditions = [ len(lines) <= 4, "Average Current:" not in lines[1], "Voltage: " not in lines[2], "Total Power: " not in lines[3], "samples taken at " not in lines[4], lines[5] != "Time" + ' ' * 7 + "Amp" ] if any(conditions): raise MonsoonError(err_msg) hz_str = lines[4].split()[2] hz = int(hz_str[:-2]) voltage_str = lines[2].split()[1] voltage = int(voltage_str[:-1]) lines = lines[6:] t = [] v = [] for l in lines: try: timestamp, value = l.split(' ') t.append(int(timestamp)) v.append(float(value)) except ValueError: raise MonsoonError(err_msg) return MonsoonData(v, t, hz, voltage)
def function[from_string, parameter[data_str]]: constant[Creates a MonsoonData object from a string representation generated by __str__. Args: str: The string representation of a MonsoonData. Returns: A MonsoonData object. ] variable[lines] assign[=] call[call[name[data_str].strip, parameter[]].split, parameter[constant[ ]]] variable[err_msg] assign[=] constant[Invalid input string format. Is this string generated by MonsoonData class?] variable[conditions] assign[=] list[[<ast.Compare object at 0x7da1b08c9c00>, <ast.Compare object at 0x7da1b08c8760>, <ast.Compare object at 0x7da1b08c9000>, <ast.Compare object at 0x7da1b08cbe80>, <ast.Compare object at 0x7da1b08c92a0>, <ast.Compare object at 0x7da1b08c94e0>]] if call[name[any], parameter[name[conditions]]] begin[:] <ast.Raise object at 0x7da1b08c8d00> variable[hz_str] assign[=] call[call[call[name[lines]][constant[4]].split, parameter[]]][constant[2]] variable[hz] assign[=] call[name[int], parameter[call[name[hz_str]][<ast.Slice object at 0x7da1b08ca110>]]] variable[voltage_str] assign[=] call[call[call[name[lines]][constant[2]].split, parameter[]]][constant[1]] variable[voltage] assign[=] call[name[int], parameter[call[name[voltage_str]][<ast.Slice object at 0x7da1b08ca620>]]] variable[lines] assign[=] call[name[lines]][<ast.Slice object at 0x7da1b0863190>] variable[t] assign[=] list[[]] variable[v] assign[=] list[[]] for taget[name[l]] in starred[name[lines]] begin[:] <ast.Try object at 0x7da1b0883880> return[call[name[MonsoonData], parameter[name[v], name[t], name[hz], name[voltage]]]]
keyword[def] identifier[from_string] ( identifier[data_str] ): literal[string] identifier[lines] = identifier[data_str] . identifier[strip] (). identifier[split] ( literal[string] ) identifier[err_msg] =( literal[string] literal[string] ) identifier[conditions] =[ identifier[len] ( identifier[lines] )<= literal[int] , literal[string] keyword[not] keyword[in] identifier[lines] [ literal[int] ], literal[string] keyword[not] keyword[in] identifier[lines] [ literal[int] ], literal[string] keyword[not] keyword[in] identifier[lines] [ literal[int] ], literal[string] keyword[not] keyword[in] identifier[lines] [ literal[int] ], identifier[lines] [ literal[int] ]!= literal[string] + literal[string] * literal[int] + literal[string] ] keyword[if] identifier[any] ( identifier[conditions] ): keyword[raise] identifier[MonsoonError] ( identifier[err_msg] ) identifier[hz_str] = identifier[lines] [ literal[int] ]. identifier[split] ()[ literal[int] ] identifier[hz] = identifier[int] ( identifier[hz_str] [:- literal[int] ]) identifier[voltage_str] = identifier[lines] [ literal[int] ]. identifier[split] ()[ literal[int] ] identifier[voltage] = identifier[int] ( identifier[voltage_str] [:- literal[int] ]) identifier[lines] = identifier[lines] [ literal[int] :] identifier[t] =[] identifier[v] =[] keyword[for] identifier[l] keyword[in] identifier[lines] : keyword[try] : identifier[timestamp] , identifier[value] = identifier[l] . identifier[split] ( literal[string] ) identifier[t] . identifier[append] ( identifier[int] ( identifier[timestamp] )) identifier[v] . identifier[append] ( identifier[float] ( identifier[value] )) keyword[except] identifier[ValueError] : keyword[raise] identifier[MonsoonError] ( identifier[err_msg] ) keyword[return] identifier[MonsoonData] ( identifier[v] , identifier[t] , identifier[hz] , identifier[voltage] )
def from_string(data_str): """Creates a MonsoonData object from a string representation generated by __str__. Args: str: The string representation of a MonsoonData. Returns: A MonsoonData object. """ lines = data_str.strip().split('\n') err_msg = 'Invalid input string format. Is this string generated by MonsoonData class?' conditions = [len(lines) <= 4, 'Average Current:' not in lines[1], 'Voltage: ' not in lines[2], 'Total Power: ' not in lines[3], 'samples taken at ' not in lines[4], lines[5] != 'Time' + ' ' * 7 + 'Amp'] if any(conditions): raise MonsoonError(err_msg) # depends on [control=['if'], data=[]] hz_str = lines[4].split()[2] hz = int(hz_str[:-2]) voltage_str = lines[2].split()[1] voltage = int(voltage_str[:-1]) lines = lines[6:] t = [] v = [] for l in lines: try: (timestamp, value) = l.split(' ') t.append(int(timestamp)) v.append(float(value)) # depends on [control=['try'], data=[]] except ValueError: raise MonsoonError(err_msg) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['l']] return MonsoonData(v, t, hz, voltage)
def _ph2f(self, placeholder): """Lookup a field given a field placeholder""" if issubclass(placeholder.cls, FieldAccessor): return placeholder.cls.access(self._parent, placeholder) return self._parent.lookup_field_by_placeholder(placeholder)
def function[_ph2f, parameter[self, placeholder]]: constant[Lookup a field given a field placeholder] if call[name[issubclass], parameter[name[placeholder].cls, name[FieldAccessor]]] begin[:] return[call[name[placeholder].cls.access, parameter[name[self]._parent, name[placeholder]]]] return[call[name[self]._parent.lookup_field_by_placeholder, parameter[name[placeholder]]]]
keyword[def] identifier[_ph2f] ( identifier[self] , identifier[placeholder] ): literal[string] keyword[if] identifier[issubclass] ( identifier[placeholder] . identifier[cls] , identifier[FieldAccessor] ): keyword[return] identifier[placeholder] . identifier[cls] . identifier[access] ( identifier[self] . identifier[_parent] , identifier[placeholder] ) keyword[return] identifier[self] . identifier[_parent] . identifier[lookup_field_by_placeholder] ( identifier[placeholder] )
def _ph2f(self, placeholder): """Lookup a field given a field placeholder""" if issubclass(placeholder.cls, FieldAccessor): return placeholder.cls.access(self._parent, placeholder) # depends on [control=['if'], data=[]] return self._parent.lookup_field_by_placeholder(placeholder)
def get_file(self, filename): """Get a file from the repo. Returns a file-like stream with the data. """ log.debug('[%s]: reading: //%s/%s', self.name, self.name, filename) try: blob = self.repo.head.commit.tree/filename return blob.data_stream except KeyError as err: raise GitError(err)
def function[get_file, parameter[self, filename]]: constant[Get a file from the repo. Returns a file-like stream with the data. ] call[name[log].debug, parameter[constant[[%s]: reading: //%s/%s], name[self].name, name[self].name, name[filename]]] <ast.Try object at 0x7da204566bc0>
keyword[def] identifier[get_file] ( identifier[self] , identifier[filename] ): literal[string] identifier[log] . identifier[debug] ( literal[string] , identifier[self] . identifier[name] , identifier[self] . identifier[name] , identifier[filename] ) keyword[try] : identifier[blob] = identifier[self] . identifier[repo] . identifier[head] . identifier[commit] . identifier[tree] / identifier[filename] keyword[return] identifier[blob] . identifier[data_stream] keyword[except] identifier[KeyError] keyword[as] identifier[err] : keyword[raise] identifier[GitError] ( identifier[err] )
def get_file(self, filename): """Get a file from the repo. Returns a file-like stream with the data. """ log.debug('[%s]: reading: //%s/%s', self.name, self.name, filename) try: blob = self.repo.head.commit.tree / filename return blob.data_stream # depends on [control=['try'], data=[]] except KeyError as err: raise GitError(err) # depends on [control=['except'], data=['err']]
def add_to_matching_blacklist(session, term): """Add term to the matching blacklist. This function adds a `term` to the matching blacklist. The term to add cannot have a `None` or empty value, on this case an `ValueError` will be raised. :param session: database session :param term: term, word or value to blacklist :return: a new entry in the blacklist :raises ValueError: raised when `term` is `None` or an empty string """ if term is None: raise ValueError("'term' to blacklist cannot be None") if term == '': raise ValueError("'term' to blacklist cannot be an empty string") mb = MatchingBlacklist(excluded=term) session.add(mb) return mb
def function[add_to_matching_blacklist, parameter[session, term]]: constant[Add term to the matching blacklist. This function adds a `term` to the matching blacklist. The term to add cannot have a `None` or empty value, on this case an `ValueError` will be raised. :param session: database session :param term: term, word or value to blacklist :return: a new entry in the blacklist :raises ValueError: raised when `term` is `None` or an empty string ] if compare[name[term] is constant[None]] begin[:] <ast.Raise object at 0x7da1b0e9f430> if compare[name[term] equal[==] constant[]] begin[:] <ast.Raise object at 0x7da1b0e9d210> variable[mb] assign[=] call[name[MatchingBlacklist], parameter[]] call[name[session].add, parameter[name[mb]]] return[name[mb]]
keyword[def] identifier[add_to_matching_blacklist] ( identifier[session] , identifier[term] ): literal[string] keyword[if] identifier[term] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[term] == literal[string] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[mb] = identifier[MatchingBlacklist] ( identifier[excluded] = identifier[term] ) identifier[session] . identifier[add] ( identifier[mb] ) keyword[return] identifier[mb]
def add_to_matching_blacklist(session, term): """Add term to the matching blacklist. This function adds a `term` to the matching blacklist. The term to add cannot have a `None` or empty value, on this case an `ValueError` will be raised. :param session: database session :param term: term, word or value to blacklist :return: a new entry in the blacklist :raises ValueError: raised when `term` is `None` or an empty string """ if term is None: raise ValueError("'term' to blacklist cannot be None") # depends on [control=['if'], data=[]] if term == '': raise ValueError("'term' to blacklist cannot be an empty string") # depends on [control=['if'], data=[]] mb = MatchingBlacklist(excluded=term) session.add(mb) return mb
def document_stat(stat): """ Create a structured documentation for the stat It replaces `{usage}`, `{common_parameters}` and `{aesthetics}` with generated documentation. """ # Dedented so that it lineups (in sphinx) with the part # generated parts when put together docstring = dedent(stat.__doc__) # usage: signature = make_signature(stat.__name__, stat.DEFAULT_PARAMS, common_stat_params, common_stat_param_values) usage = STAT_SIGNATURE_TPL.format(signature=signature) # aesthetics contents = OrderedDict(('**{}**'.format(ae), '') for ae in sorted(stat.REQUIRED_AES)) contents.update(sorted(stat.DEFAULT_AES.items())) table = dict_to_table(('Aesthetic', 'Default value'), contents) aesthetics_table = AESTHETICS_TABLE_TPL.format(table=table) tpl = dedent(stat._aesthetics_doc.lstrip('\n')) aesthetics_doc = tpl.format(aesthetics_table=aesthetics_table) aesthetics_doc = indent(aesthetics_doc, ' '*4) # common_parameters d = stat.DEFAULT_PARAMS common_parameters = STAT_PARAMS_TPL.format( default_geom=d['geom'], default_position=d['position'], default_na_rm=d['na_rm'], _aesthetics_doc=aesthetics_doc, **common_params_doc) docstring = docstring.replace('{usage}', usage) docstring = docstring.replace('{common_parameters}', common_parameters) stat.__doc__ = docstring return stat
def function[document_stat, parameter[stat]]: constant[ Create a structured documentation for the stat It replaces `{usage}`, `{common_parameters}` and `{aesthetics}` with generated documentation. ] variable[docstring] assign[=] call[name[dedent], parameter[name[stat].__doc__]] variable[signature] assign[=] call[name[make_signature], parameter[name[stat].__name__, name[stat].DEFAULT_PARAMS, name[common_stat_params], name[common_stat_param_values]]] variable[usage] assign[=] call[name[STAT_SIGNATURE_TPL].format, parameter[]] variable[contents] assign[=] call[name[OrderedDict], parameter[<ast.GeneratorExp object at 0x7da20e9601c0>]] call[name[contents].update, parameter[call[name[sorted], parameter[call[name[stat].DEFAULT_AES.items, parameter[]]]]]] variable[table] assign[=] call[name[dict_to_table], parameter[tuple[[<ast.Constant object at 0x7da18f721000>, <ast.Constant object at 0x7da18f720d00>]], name[contents]]] variable[aesthetics_table] assign[=] call[name[AESTHETICS_TABLE_TPL].format, parameter[]] variable[tpl] assign[=] call[name[dedent], parameter[call[name[stat]._aesthetics_doc.lstrip, parameter[constant[ ]]]]] variable[aesthetics_doc] assign[=] call[name[tpl].format, parameter[]] variable[aesthetics_doc] assign[=] call[name[indent], parameter[name[aesthetics_doc], binary_operation[constant[ ] * constant[4]]]] variable[d] assign[=] name[stat].DEFAULT_PARAMS variable[common_parameters] assign[=] call[name[STAT_PARAMS_TPL].format, parameter[]] variable[docstring] assign[=] call[name[docstring].replace, parameter[constant[{usage}], name[usage]]] variable[docstring] assign[=] call[name[docstring].replace, parameter[constant[{common_parameters}], name[common_parameters]]] name[stat].__doc__ assign[=] name[docstring] return[name[stat]]
keyword[def] identifier[document_stat] ( identifier[stat] ): literal[string] identifier[docstring] = identifier[dedent] ( identifier[stat] . identifier[__doc__] ) identifier[signature] = identifier[make_signature] ( identifier[stat] . identifier[__name__] , identifier[stat] . identifier[DEFAULT_PARAMS] , identifier[common_stat_params] , identifier[common_stat_param_values] ) identifier[usage] = identifier[STAT_SIGNATURE_TPL] . identifier[format] ( identifier[signature] = identifier[signature] ) identifier[contents] = identifier[OrderedDict] (( literal[string] . identifier[format] ( identifier[ae] ), literal[string] ) keyword[for] identifier[ae] keyword[in] identifier[sorted] ( identifier[stat] . identifier[REQUIRED_AES] )) identifier[contents] . identifier[update] ( identifier[sorted] ( identifier[stat] . identifier[DEFAULT_AES] . identifier[items] ())) identifier[table] = identifier[dict_to_table] (( literal[string] , literal[string] ), identifier[contents] ) identifier[aesthetics_table] = identifier[AESTHETICS_TABLE_TPL] . identifier[format] ( identifier[table] = identifier[table] ) identifier[tpl] = identifier[dedent] ( identifier[stat] . identifier[_aesthetics_doc] . identifier[lstrip] ( literal[string] )) identifier[aesthetics_doc] = identifier[tpl] . identifier[format] ( identifier[aesthetics_table] = identifier[aesthetics_table] ) identifier[aesthetics_doc] = identifier[indent] ( identifier[aesthetics_doc] , literal[string] * literal[int] ) identifier[d] = identifier[stat] . identifier[DEFAULT_PARAMS] identifier[common_parameters] = identifier[STAT_PARAMS_TPL] . identifier[format] ( identifier[default_geom] = identifier[d] [ literal[string] ], identifier[default_position] = identifier[d] [ literal[string] ], identifier[default_na_rm] = identifier[d] [ literal[string] ], identifier[_aesthetics_doc] = identifier[aesthetics_doc] , ** identifier[common_params_doc] ) identifier[docstring] = identifier[docstring] . identifier[replace] ( literal[string] , identifier[usage] ) identifier[docstring] = identifier[docstring] . identifier[replace] ( literal[string] , identifier[common_parameters] ) identifier[stat] . identifier[__doc__] = identifier[docstring] keyword[return] identifier[stat]
def document_stat(stat): """ Create a structured documentation for the stat It replaces `{usage}`, `{common_parameters}` and `{aesthetics}` with generated documentation. """ # Dedented so that it lineups (in sphinx) with the part # generated parts when put together docstring = dedent(stat.__doc__) # usage: signature = make_signature(stat.__name__, stat.DEFAULT_PARAMS, common_stat_params, common_stat_param_values) usage = STAT_SIGNATURE_TPL.format(signature=signature) # aesthetics contents = OrderedDict((('**{}**'.format(ae), '') for ae in sorted(stat.REQUIRED_AES))) contents.update(sorted(stat.DEFAULT_AES.items())) table = dict_to_table(('Aesthetic', 'Default value'), contents) aesthetics_table = AESTHETICS_TABLE_TPL.format(table=table) tpl = dedent(stat._aesthetics_doc.lstrip('\n')) aesthetics_doc = tpl.format(aesthetics_table=aesthetics_table) aesthetics_doc = indent(aesthetics_doc, ' ' * 4) # common_parameters d = stat.DEFAULT_PARAMS common_parameters = STAT_PARAMS_TPL.format(default_geom=d['geom'], default_position=d['position'], default_na_rm=d['na_rm'], _aesthetics_doc=aesthetics_doc, **common_params_doc) docstring = docstring.replace('{usage}', usage) docstring = docstring.replace('{common_parameters}', common_parameters) stat.__doc__ = docstring return stat
def register_series_method(method): """Register a function as a method attached to the Pandas Series. """ def inner(*args, **kwargs): class AccessorMethod(object): __doc__ = method.__doc__ def __init__(self, pandas_obj): self._obj = pandas_obj @wraps(method) def __call__(self, *args, **kwargs): return method(self._obj, *args, **kwargs) register_series_accessor(method.__name__)(AccessorMethod) return method return inner()
def function[register_series_method, parameter[method]]: constant[Register a function as a method attached to the Pandas Series. ] def function[inner, parameter[]]: class class[AccessorMethod, parameter[]] begin[:] variable[__doc__] assign[=] name[method].__doc__ def function[__init__, parameter[self, pandas_obj]]: name[self]._obj assign[=] name[pandas_obj] def function[__call__, parameter[self]]: return[call[name[method], parameter[name[self]._obj, <ast.Starred object at 0x7da1b1039d80>]]] call[call[name[register_series_accessor], parameter[name[method].__name__]], parameter[name[AccessorMethod]]] return[name[method]] return[call[name[inner], parameter[]]]
keyword[def] identifier[register_series_method] ( identifier[method] ): literal[string] keyword[def] identifier[inner] (* identifier[args] ,** identifier[kwargs] ): keyword[class] identifier[AccessorMethod] ( identifier[object] ): identifier[__doc__] = identifier[method] . identifier[__doc__] keyword[def] identifier[__init__] ( identifier[self] , identifier[pandas_obj] ): identifier[self] . identifier[_obj] = identifier[pandas_obj] @ identifier[wraps] ( identifier[method] ) keyword[def] identifier[__call__] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): keyword[return] identifier[method] ( identifier[self] . identifier[_obj] ,* identifier[args] ,** identifier[kwargs] ) identifier[register_series_accessor] ( identifier[method] . identifier[__name__] )( identifier[AccessorMethod] ) keyword[return] identifier[method] keyword[return] identifier[inner] ()
def register_series_method(method): """Register a function as a method attached to the Pandas Series. """ def inner(*args, **kwargs): class AccessorMethod(object): __doc__ = method.__doc__ def __init__(self, pandas_obj): self._obj = pandas_obj @wraps(method) def __call__(self, *args, **kwargs): return method(self._obj, *args, **kwargs) register_series_accessor(method.__name__)(AccessorMethod) return method return inner()
def get_action_meanings(self): """Return a list of actions meanings.""" actions = sorted(self._action_meanings.keys()) return [self._action_meanings[action] for action in actions]
def function[get_action_meanings, parameter[self]]: constant[Return a list of actions meanings.] variable[actions] assign[=] call[name[sorted], parameter[call[name[self]._action_meanings.keys, parameter[]]]] return[<ast.ListComp object at 0x7da1b26ad840>]
keyword[def] identifier[get_action_meanings] ( identifier[self] ): literal[string] identifier[actions] = identifier[sorted] ( identifier[self] . identifier[_action_meanings] . identifier[keys] ()) keyword[return] [ identifier[self] . identifier[_action_meanings] [ identifier[action] ] keyword[for] identifier[action] keyword[in] identifier[actions] ]
def get_action_meanings(self): """Return a list of actions meanings.""" actions = sorted(self._action_meanings.keys()) return [self._action_meanings[action] for action in actions]
def combine(self, other, mean_free=False): """ References ---------- [1] http://i.stanford.edu/pub/cstr/reports/cs/tr/79/773/CS-TR-79-773.pdf """ w1 = self.w w2 = other.w w = w1 + w2 # TODO: fix this div by zero error q = w2 / w1 dsx = q * self.sx - other.sx dsy = q * self.sy - other.sy # update self.w = w1 + w2 self.sx = self.sx + other.sx self.sy = self.sy + other.sy # if mean_free: if len(self.Mxy.shape) == 1: # diagonal only d = dsx*dsy else: d = np.outer(dsx, dsy) self.Mxy += other.Mxy + (w1 / (w2 * w)) * d else: self.Mxy += other.Mxy return self
def function[combine, parameter[self, other, mean_free]]: constant[ References ---------- [1] http://i.stanford.edu/pub/cstr/reports/cs/tr/79/773/CS-TR-79-773.pdf ] variable[w1] assign[=] name[self].w variable[w2] assign[=] name[other].w variable[w] assign[=] binary_operation[name[w1] + name[w2]] variable[q] assign[=] binary_operation[name[w2] / name[w1]] variable[dsx] assign[=] binary_operation[binary_operation[name[q] * name[self].sx] - name[other].sx] variable[dsy] assign[=] binary_operation[binary_operation[name[q] * name[self].sy] - name[other].sy] name[self].w assign[=] binary_operation[name[w1] + name[w2]] name[self].sx assign[=] binary_operation[name[self].sx + name[other].sx] name[self].sy assign[=] binary_operation[name[self].sy + name[other].sy] if name[mean_free] begin[:] if compare[call[name[len], parameter[name[self].Mxy.shape]] equal[==] constant[1]] begin[:] variable[d] assign[=] binary_operation[name[dsx] * name[dsy]] <ast.AugAssign object at 0x7da204623c70> return[name[self]]
keyword[def] identifier[combine] ( identifier[self] , identifier[other] , identifier[mean_free] = keyword[False] ): literal[string] identifier[w1] = identifier[self] . identifier[w] identifier[w2] = identifier[other] . identifier[w] identifier[w] = identifier[w1] + identifier[w2] identifier[q] = identifier[w2] / identifier[w1] identifier[dsx] = identifier[q] * identifier[self] . identifier[sx] - identifier[other] . identifier[sx] identifier[dsy] = identifier[q] * identifier[self] . identifier[sy] - identifier[other] . identifier[sy] identifier[self] . identifier[w] = identifier[w1] + identifier[w2] identifier[self] . identifier[sx] = identifier[self] . identifier[sx] + identifier[other] . identifier[sx] identifier[self] . identifier[sy] = identifier[self] . identifier[sy] + identifier[other] . identifier[sy] keyword[if] identifier[mean_free] : keyword[if] identifier[len] ( identifier[self] . identifier[Mxy] . identifier[shape] )== literal[int] : identifier[d] = identifier[dsx] * identifier[dsy] keyword[else] : identifier[d] = identifier[np] . identifier[outer] ( identifier[dsx] , identifier[dsy] ) identifier[self] . identifier[Mxy] += identifier[other] . identifier[Mxy] +( identifier[w1] /( identifier[w2] * identifier[w] ))* identifier[d] keyword[else] : identifier[self] . identifier[Mxy] += identifier[other] . identifier[Mxy] keyword[return] identifier[self]
def combine(self, other, mean_free=False): """ References ---------- [1] http://i.stanford.edu/pub/cstr/reports/cs/tr/79/773/CS-TR-79-773.pdf """ w1 = self.w w2 = other.w w = w1 + w2 # TODO: fix this div by zero error q = w2 / w1 dsx = q * self.sx - other.sx dsy = q * self.sy - other.sy # update self.w = w1 + w2 self.sx = self.sx + other.sx self.sy = self.sy + other.sy # if mean_free: if len(self.Mxy.shape) == 1: # diagonal only d = dsx * dsy # depends on [control=['if'], data=[]] else: d = np.outer(dsx, dsy) self.Mxy += other.Mxy + w1 / (w2 * w) * d # depends on [control=['if'], data=[]] else: self.Mxy += other.Mxy return self
def strip_prompt(self, *args, **kwargs): """Strip the trailing router prompt from the output.""" a_string = super(JuniperBase, self).strip_prompt(*args, **kwargs) return self.strip_context_items(a_string)
def function[strip_prompt, parameter[self]]: constant[Strip the trailing router prompt from the output.] variable[a_string] assign[=] call[call[name[super], parameter[name[JuniperBase], name[self]]].strip_prompt, parameter[<ast.Starred object at 0x7da1b1f0b220>]] return[call[name[self].strip_context_items, parameter[name[a_string]]]]
keyword[def] identifier[strip_prompt] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[a_string] = identifier[super] ( identifier[JuniperBase] , identifier[self] ). identifier[strip_prompt] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[self] . identifier[strip_context_items] ( identifier[a_string] )
def strip_prompt(self, *args, **kwargs): """Strip the trailing router prompt from the output.""" a_string = super(JuniperBase, self).strip_prompt(*args, **kwargs) return self.strip_context_items(a_string)
def create_or_update_role(self, name, azure_roles, ttl="", max_ttl="", mount_point=DEFAULT_MOUNT_POINT): """Create or update a Vault role. The provided Azure roles must exist for this call to succeed. See the Azure secrets roles docs for more information about roles. Supported methods: POST: /{mount_point}/roles/{name}. Produces: 204 (empty body) :param name: Name of the role. :type name: str | unicode :param azure_roles: List of Azure roles to be assigned to the generated service principal. :type azure_roles: list(dict) :param ttl: Specifies the default TTL for service principals generated using this role. Accepts time suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine default TTL time. :type ttl: str | unicode :param max_ttl: Specifies the maximum TTL for service principals generated using this role. Accepts time suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine max TTL time. :type max_ttl: str | unicode :param mount_point: The "path" the method/backend was mounted on. :type mount_point: str | unicode :return: The response of the request. :rtype: requests.Response """ params = { 'azure_roles': json.dumps(azure_roles), 'ttl': ttl, 'max_ttl': max_ttl, } api_path = '/v1/{mount_point}/roles/{name}'.format( mount_point=mount_point, name=name ) return self._adapter.post( url=api_path, json=params, )
def function[create_or_update_role, parameter[self, name, azure_roles, ttl, max_ttl, mount_point]]: constant[Create or update a Vault role. The provided Azure roles must exist for this call to succeed. See the Azure secrets roles docs for more information about roles. Supported methods: POST: /{mount_point}/roles/{name}. Produces: 204 (empty body) :param name: Name of the role. :type name: str | unicode :param azure_roles: List of Azure roles to be assigned to the generated service principal. :type azure_roles: list(dict) :param ttl: Specifies the default TTL for service principals generated using this role. Accepts time suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine default TTL time. :type ttl: str | unicode :param max_ttl: Specifies the maximum TTL for service principals generated using this role. Accepts time suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine max TTL time. :type max_ttl: str | unicode :param mount_point: The "path" the method/backend was mounted on. :type mount_point: str | unicode :return: The response of the request. :rtype: requests.Response ] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da2041db940>, <ast.Constant object at 0x7da2041dbb20>, <ast.Constant object at 0x7da2041dbdc0>], [<ast.Call object at 0x7da2041d82b0>, <ast.Name object at 0x7da2041d87c0>, <ast.Name object at 0x7da2041d9b10>]] variable[api_path] assign[=] call[constant[/v1/{mount_point}/roles/{name}].format, parameter[]] return[call[name[self]._adapter.post, parameter[]]]
keyword[def] identifier[create_or_update_role] ( identifier[self] , identifier[name] , identifier[azure_roles] , identifier[ttl] = literal[string] , identifier[max_ttl] = literal[string] , identifier[mount_point] = identifier[DEFAULT_MOUNT_POINT] ): literal[string] identifier[params] ={ literal[string] : identifier[json] . identifier[dumps] ( identifier[azure_roles] ), literal[string] : identifier[ttl] , literal[string] : identifier[max_ttl] , } identifier[api_path] = literal[string] . identifier[format] ( identifier[mount_point] = identifier[mount_point] , identifier[name] = identifier[name] ) keyword[return] identifier[self] . identifier[_adapter] . identifier[post] ( identifier[url] = identifier[api_path] , identifier[json] = identifier[params] , )
def create_or_update_role(self, name, azure_roles, ttl='', max_ttl='', mount_point=DEFAULT_MOUNT_POINT): """Create or update a Vault role. The provided Azure roles must exist for this call to succeed. See the Azure secrets roles docs for more information about roles. Supported methods: POST: /{mount_point}/roles/{name}. Produces: 204 (empty body) :param name: Name of the role. :type name: str | unicode :param azure_roles: List of Azure roles to be assigned to the generated service principal. :type azure_roles: list(dict) :param ttl: Specifies the default TTL for service principals generated using this role. Accepts time suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine default TTL time. :type ttl: str | unicode :param max_ttl: Specifies the maximum TTL for service principals generated using this role. Accepts time suffixed strings ("1h") or an integer number of seconds. Defaults to the system/engine max TTL time. :type max_ttl: str | unicode :param mount_point: The "path" the method/backend was mounted on. :type mount_point: str | unicode :return: The response of the request. :rtype: requests.Response """ params = {'azure_roles': json.dumps(azure_roles), 'ttl': ttl, 'max_ttl': max_ttl} api_path = '/v1/{mount_point}/roles/{name}'.format(mount_point=mount_point, name=name) return self._adapter.post(url=api_path, json=params)