code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def update(self, field_dict, where_clause=None): ''' update db entry :param field_dict: dictionary of fields and values :param where_clause: where clause for the update ''' query = ''' UPDATE %s SET %s ''' % ( self._name, ','.join('%s=:%s' % (k, k) for k in field_dict) ) if where_clause: query += ' WHERE %s' % (where_clause) self._cursor.execute(query, field_dict) self._connection.commit()
def function[update, parameter[self, field_dict, where_clause]]: constant[ update db entry :param field_dict: dictionary of fields and values :param where_clause: where clause for the update ] variable[query] assign[=] binary_operation[constant[ UPDATE %s SET %s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20e954a90>, <ast.Call object at 0x7da20e955cc0>]]] if name[where_clause] begin[:] <ast.AugAssign object at 0x7da207f998a0> call[name[self]._cursor.execute, parameter[name[query], name[field_dict]]] call[name[self]._connection.commit, parameter[]]
keyword[def] identifier[update] ( identifier[self] , identifier[field_dict] , identifier[where_clause] = keyword[None] ): literal[string] identifier[query] = literal[string] %( identifier[self] . identifier[_name] , literal[string] . identifier[join] ( literal[string] %( identifier[k] , identifier[k] ) keyword[for] identifier[k] keyword[in] identifier[field_dict] ) ) keyword[if] identifier[where_clause] : identifier[query] += literal[string] %( identifier[where_clause] ) identifier[self] . identifier[_cursor] . identifier[execute] ( identifier[query] , identifier[field_dict] ) identifier[self] . identifier[_connection] . identifier[commit] ()
def update(self, field_dict, where_clause=None): """ update db entry :param field_dict: dictionary of fields and values :param where_clause: where clause for the update """ query = '\n UPDATE %s SET %s\n ' % (self._name, ','.join(('%s=:%s' % (k, k) for k in field_dict))) if where_clause: query += ' WHERE %s' % where_clause # depends on [control=['if'], data=[]] self._cursor.execute(query, field_dict) self._connection.commit()
def HasDateExceptionOn(self, date, exception_type=_EXCEPTION_TYPE_ADD): """Test if this service period has a date exception of the given type. Args: date: a string of form "YYYYMMDD" exception_type: the exception type the date should have. Defaults to _EXCEPTION_TYPE_ADD Returns: True iff this service has service exception of specified type at date. """ if date in self.date_exceptions: return exception_type == self.date_exceptions[date][0] return False
def function[HasDateExceptionOn, parameter[self, date, exception_type]]: constant[Test if this service period has a date exception of the given type. Args: date: a string of form "YYYYMMDD" exception_type: the exception type the date should have. Defaults to _EXCEPTION_TYPE_ADD Returns: True iff this service has service exception of specified type at date. ] if compare[name[date] in name[self].date_exceptions] begin[:] return[compare[name[exception_type] equal[==] call[call[name[self].date_exceptions][name[date]]][constant[0]]]] return[constant[False]]
keyword[def] identifier[HasDateExceptionOn] ( identifier[self] , identifier[date] , identifier[exception_type] = identifier[_EXCEPTION_TYPE_ADD] ): literal[string] keyword[if] identifier[date] keyword[in] identifier[self] . identifier[date_exceptions] : keyword[return] identifier[exception_type] == identifier[self] . identifier[date_exceptions] [ identifier[date] ][ literal[int] ] keyword[return] keyword[False]
def HasDateExceptionOn(self, date, exception_type=_EXCEPTION_TYPE_ADD): """Test if this service period has a date exception of the given type. Args: date: a string of form "YYYYMMDD" exception_type: the exception type the date should have. Defaults to _EXCEPTION_TYPE_ADD Returns: True iff this service has service exception of specified type at date. """ if date in self.date_exceptions: return exception_type == self.date_exceptions[date][0] # depends on [control=['if'], data=['date']] return False
def stringmethod(func): """ Validator factory which call a single method on the string. """ method_name = func() @wraps(func) def factory(): def validator(v): if not isinstance(v, six.string_types): raise Invalid(_(u'Not a string'), get_type_name(six.text_type), get_type_name(type(v))) return getattr(v, method_name)() return validator return factory
def function[stringmethod, parameter[func]]: constant[ Validator factory which call a single method on the string. ] variable[method_name] assign[=] call[name[func], parameter[]] def function[factory, parameter[]]: def function[validator, parameter[v]]: if <ast.UnaryOp object at 0x7da1b265a410> begin[:] <ast.Raise object at 0x7da1b265b940> return[call[call[name[getattr], parameter[name[v], name[method_name]]], parameter[]]] return[name[validator]] return[name[factory]]
keyword[def] identifier[stringmethod] ( identifier[func] ): literal[string] identifier[method_name] = identifier[func] () @ identifier[wraps] ( identifier[func] ) keyword[def] identifier[factory] (): keyword[def] identifier[validator] ( identifier[v] ): keyword[if] keyword[not] identifier[isinstance] ( identifier[v] , identifier[six] . identifier[string_types] ): keyword[raise] identifier[Invalid] ( identifier[_] ( literal[string] ), identifier[get_type_name] ( identifier[six] . identifier[text_type] ), identifier[get_type_name] ( identifier[type] ( identifier[v] ))) keyword[return] identifier[getattr] ( identifier[v] , identifier[method_name] )() keyword[return] identifier[validator] keyword[return] identifier[factory]
def stringmethod(func): """ Validator factory which call a single method on the string. """ method_name = func() @wraps(func) def factory(): def validator(v): if not isinstance(v, six.string_types): raise Invalid(_(u'Not a string'), get_type_name(six.text_type), get_type_name(type(v))) # depends on [control=['if'], data=[]] return getattr(v, method_name)() return validator return factory
def from_call(foo, args, kwargs, hints, call_by_value=True): """Takes a function and a set of arguments it needs to run on. Returns a newly constructed workflow representing the promised value from the evaluation of the function with said arguments. These arguments are stored in a BoundArguments object matching to the signature of the given function ``foo``. That is, bound_args was constructed by doing:: inspect.signature(foo).bind(*args, **kwargs) The arguments stored in the ``bound_args`` object are filtered on being either plain, or promised. If an argument is promised, the value it represents is not actually available and needs to be computed by evaluating a workflow. If an argument is a promised value, the workflow representing the value is added to the new workflow. First all the nodes in the original workflow, if not already present in the new workflow from an earlier argument, are copied to the new workflow, and a new entry is made into the link dictionary. Then the links in the old workflow are also added to the link dictionary. Since the link dictionary points from nodes to a :py:class:`set` of :py:class:`ArgumentAddress` es, no links are duplicated. In the ``bound_args`` object the promised value is replaced by the ``Empty`` object, so that we can see which arguments still have to be evaluated. Doing this for all promised value arguments in the bound_args object, results in a new workflow with all the correct dependencies represented as links in the graph. :param foo: Function (or object) being called. :type foo: Callable :param args: Normal arguments to call :param kwargs: Keyword arguments to call :param hints: Hints that can be passed to the scheduler on where or how to schedule this job. :returns: New workflow. :rtype: Workflow """ # create the bound_args object bound_args = signature(foo).bind(*args, **kwargs) bound_args.apply_defaults() # get the name of the variadic argument if there is one variadic = next((x.name for x in bound_args.signature.parameters.values() if x.kind == Parameter.VAR_POSITIONAL), None) # *HACK* # the BoundArguments class uses a tuple to store the # variadic arguments. Since we need to modify them, # we have to replace the tuple with a list. This works, for now... if variadic: if variadic not in bound_args.arguments: bound_args.arguments[variadic] = [] else: bound_args.arguments[variadic] = \ list(bound_args.arguments[variadic]) # create the node and initialise hash key node = FunctionNode(foo, bound_args, hints) # setup the new workflow root = id(node) nodes = {root: node} links = {root: set()} # walk the arguments to the function call for address in serialize_arguments(node.bound_args): arg = ref_argument(node.bound_args, address) # the argument may still become a workflow if it # has the __deepcopy__ operator overloaded to return a workflow call_by_ref = 'call_by_ref' in hints and \ (hints['call_by_ref'] is True or address.name in hints['call_by_ref']) if not is_workflow(arg) and call_by_value and not call_by_ref: arg = deepcopy(arg) # if still not a workflow, we have a plain value! if not is_workflow(arg): set_argument(node.bound_args, address, arg) continue # merge the argument workflow into the new workflow workflow = get_workflow(arg) set_argument(node.bound_args, address, Empty) for n in workflow.nodes: if n not in nodes: nodes[n] = workflow.nodes[n] links[n] = set() links[n].update(workflow.links[n]) links[workflow.root].add((root, address)) return Workflow(root, nodes, links)
def function[from_call, parameter[foo, args, kwargs, hints, call_by_value]]: constant[Takes a function and a set of arguments it needs to run on. Returns a newly constructed workflow representing the promised value from the evaluation of the function with said arguments. These arguments are stored in a BoundArguments object matching to the signature of the given function ``foo``. That is, bound_args was constructed by doing:: inspect.signature(foo).bind(*args, **kwargs) The arguments stored in the ``bound_args`` object are filtered on being either plain, or promised. If an argument is promised, the value it represents is not actually available and needs to be computed by evaluating a workflow. If an argument is a promised value, the workflow representing the value is added to the new workflow. First all the nodes in the original workflow, if not already present in the new workflow from an earlier argument, are copied to the new workflow, and a new entry is made into the link dictionary. Then the links in the old workflow are also added to the link dictionary. Since the link dictionary points from nodes to a :py:class:`set` of :py:class:`ArgumentAddress` es, no links are duplicated. In the ``bound_args`` object the promised value is replaced by the ``Empty`` object, so that we can see which arguments still have to be evaluated. Doing this for all promised value arguments in the bound_args object, results in a new workflow with all the correct dependencies represented as links in the graph. :param foo: Function (or object) being called. :type foo: Callable :param args: Normal arguments to call :param kwargs: Keyword arguments to call :param hints: Hints that can be passed to the scheduler on where or how to schedule this job. :returns: New workflow. :rtype: Workflow ] variable[bound_args] assign[=] call[call[name[signature], parameter[name[foo]]].bind, parameter[<ast.Starred object at 0x7da18ede4580>]] call[name[bound_args].apply_defaults, parameter[]] variable[variadic] assign[=] call[name[next], parameter[<ast.GeneratorExp object at 0x7da18ede5600>, constant[None]]] if name[variadic] begin[:] if compare[name[variadic] <ast.NotIn object at 0x7da2590d7190> name[bound_args].arguments] begin[:] call[name[bound_args].arguments][name[variadic]] assign[=] list[[]] variable[node] assign[=] call[name[FunctionNode], parameter[name[foo], name[bound_args], name[hints]]] variable[root] assign[=] call[name[id], parameter[name[node]]] variable[nodes] assign[=] dictionary[[<ast.Name object at 0x7da18ede4040>], [<ast.Name object at 0x7da18ede7ee0>]] variable[links] assign[=] dictionary[[<ast.Name object at 0x7da18ede60e0>], [<ast.Call object at 0x7da18ede6620>]] for taget[name[address]] in starred[call[name[serialize_arguments], parameter[name[node].bound_args]]] begin[:] variable[arg] assign[=] call[name[ref_argument], parameter[name[node].bound_args, name[address]]] variable[call_by_ref] assign[=] <ast.BoolOp object at 0x7da18ede6b30> if <ast.BoolOp object at 0x7da18ede5c60> begin[:] variable[arg] assign[=] call[name[deepcopy], parameter[name[arg]]] if <ast.UnaryOp object at 0x7da18ede7460> begin[:] call[name[set_argument], parameter[name[node].bound_args, name[address], name[arg]]] continue variable[workflow] assign[=] call[name[get_workflow], parameter[name[arg]]] call[name[set_argument], parameter[name[node].bound_args, name[address], name[Empty]]] for taget[name[n]] in starred[name[workflow].nodes] begin[:] if compare[name[n] <ast.NotIn object at 0x7da2590d7190> name[nodes]] begin[:] call[name[nodes]][name[n]] assign[=] call[name[workflow].nodes][name[n]] call[name[links]][name[n]] assign[=] call[name[set], parameter[]] call[call[name[links]][name[n]].update, parameter[call[name[workflow].links][name[n]]]] call[call[name[links]][name[workflow].root].add, parameter[tuple[[<ast.Name object at 0x7da18eb54550>, <ast.Name object at 0x7da18eb56c80>]]]] return[call[name[Workflow], parameter[name[root], name[nodes], name[links]]]]
keyword[def] identifier[from_call] ( identifier[foo] , identifier[args] , identifier[kwargs] , identifier[hints] , identifier[call_by_value] = keyword[True] ): literal[string] identifier[bound_args] = identifier[signature] ( identifier[foo] ). identifier[bind] (* identifier[args] ,** identifier[kwargs] ) identifier[bound_args] . identifier[apply_defaults] () identifier[variadic] = identifier[next] (( identifier[x] . identifier[name] keyword[for] identifier[x] keyword[in] identifier[bound_args] . identifier[signature] . identifier[parameters] . identifier[values] () keyword[if] identifier[x] . identifier[kind] == identifier[Parameter] . identifier[VAR_POSITIONAL] ), keyword[None] ) keyword[if] identifier[variadic] : keyword[if] identifier[variadic] keyword[not] keyword[in] identifier[bound_args] . identifier[arguments] : identifier[bound_args] . identifier[arguments] [ identifier[variadic] ]=[] keyword[else] : identifier[bound_args] . identifier[arguments] [ identifier[variadic] ]= identifier[list] ( identifier[bound_args] . identifier[arguments] [ identifier[variadic] ]) identifier[node] = identifier[FunctionNode] ( identifier[foo] , identifier[bound_args] , identifier[hints] ) identifier[root] = identifier[id] ( identifier[node] ) identifier[nodes] ={ identifier[root] : identifier[node] } identifier[links] ={ identifier[root] : identifier[set] ()} keyword[for] identifier[address] keyword[in] identifier[serialize_arguments] ( identifier[node] . identifier[bound_args] ): identifier[arg] = identifier[ref_argument] ( identifier[node] . identifier[bound_args] , identifier[address] ) identifier[call_by_ref] = literal[string] keyword[in] identifier[hints] keyword[and] ( identifier[hints] [ literal[string] ] keyword[is] keyword[True] keyword[or] identifier[address] . identifier[name] keyword[in] identifier[hints] [ literal[string] ]) keyword[if] keyword[not] identifier[is_workflow] ( identifier[arg] ) keyword[and] identifier[call_by_value] keyword[and] keyword[not] identifier[call_by_ref] : identifier[arg] = identifier[deepcopy] ( identifier[arg] ) keyword[if] keyword[not] identifier[is_workflow] ( identifier[arg] ): identifier[set_argument] ( identifier[node] . identifier[bound_args] , identifier[address] , identifier[arg] ) keyword[continue] identifier[workflow] = identifier[get_workflow] ( identifier[arg] ) identifier[set_argument] ( identifier[node] . identifier[bound_args] , identifier[address] , identifier[Empty] ) keyword[for] identifier[n] keyword[in] identifier[workflow] . identifier[nodes] : keyword[if] identifier[n] keyword[not] keyword[in] identifier[nodes] : identifier[nodes] [ identifier[n] ]= identifier[workflow] . identifier[nodes] [ identifier[n] ] identifier[links] [ identifier[n] ]= identifier[set] () identifier[links] [ identifier[n] ]. identifier[update] ( identifier[workflow] . identifier[links] [ identifier[n] ]) identifier[links] [ identifier[workflow] . identifier[root] ]. identifier[add] (( identifier[root] , identifier[address] )) keyword[return] identifier[Workflow] ( identifier[root] , identifier[nodes] , identifier[links] )
def from_call(foo, args, kwargs, hints, call_by_value=True): """Takes a function and a set of arguments it needs to run on. Returns a newly constructed workflow representing the promised value from the evaluation of the function with said arguments. These arguments are stored in a BoundArguments object matching to the signature of the given function ``foo``. That is, bound_args was constructed by doing:: inspect.signature(foo).bind(*args, **kwargs) The arguments stored in the ``bound_args`` object are filtered on being either plain, or promised. If an argument is promised, the value it represents is not actually available and needs to be computed by evaluating a workflow. If an argument is a promised value, the workflow representing the value is added to the new workflow. First all the nodes in the original workflow, if not already present in the new workflow from an earlier argument, are copied to the new workflow, and a new entry is made into the link dictionary. Then the links in the old workflow are also added to the link dictionary. Since the link dictionary points from nodes to a :py:class:`set` of :py:class:`ArgumentAddress` es, no links are duplicated. In the ``bound_args`` object the promised value is replaced by the ``Empty`` object, so that we can see which arguments still have to be evaluated. Doing this for all promised value arguments in the bound_args object, results in a new workflow with all the correct dependencies represented as links in the graph. :param foo: Function (or object) being called. :type foo: Callable :param args: Normal arguments to call :param kwargs: Keyword arguments to call :param hints: Hints that can be passed to the scheduler on where or how to schedule this job. :returns: New workflow. :rtype: Workflow """ # create the bound_args object bound_args = signature(foo).bind(*args, **kwargs) bound_args.apply_defaults() # get the name of the variadic argument if there is one variadic = next((x.name for x in bound_args.signature.parameters.values() if x.kind == Parameter.VAR_POSITIONAL), None) # *HACK* # the BoundArguments class uses a tuple to store the # variadic arguments. Since we need to modify them, # we have to replace the tuple with a list. This works, for now... if variadic: if variadic not in bound_args.arguments: bound_args.arguments[variadic] = [] # depends on [control=['if'], data=['variadic']] else: bound_args.arguments[variadic] = list(bound_args.arguments[variadic]) # depends on [control=['if'], data=[]] # create the node and initialise hash key node = FunctionNode(foo, bound_args, hints) # setup the new workflow root = id(node) nodes = {root: node} links = {root: set()} # walk the arguments to the function call for address in serialize_arguments(node.bound_args): arg = ref_argument(node.bound_args, address) # the argument may still become a workflow if it # has the __deepcopy__ operator overloaded to return a workflow call_by_ref = 'call_by_ref' in hints and (hints['call_by_ref'] is True or address.name in hints['call_by_ref']) if not is_workflow(arg) and call_by_value and (not call_by_ref): arg = deepcopy(arg) # depends on [control=['if'], data=[]] # if still not a workflow, we have a plain value! if not is_workflow(arg): set_argument(node.bound_args, address, arg) continue # depends on [control=['if'], data=[]] # merge the argument workflow into the new workflow workflow = get_workflow(arg) set_argument(node.bound_args, address, Empty) for n in workflow.nodes: if n not in nodes: nodes[n] = workflow.nodes[n] links[n] = set() # depends on [control=['if'], data=['n', 'nodes']] links[n].update(workflow.links[n]) # depends on [control=['for'], data=['n']] links[workflow.root].add((root, address)) # depends on [control=['for'], data=['address']] return Workflow(root, nodes, links)
def high_density_tuples(row_tuple: Tuple) -> dict: """Color rows with extremely high population density red.""" opts = dict() if len(row_tuple) >= 7 and row_tuple[6] > EXTREMELY_HIGH_POULATION_DENSITY: opts[tf.TableFormatter.ROW_OPT_TEXT_COLOR] = tf.TableColors.TEXT_COLOR_RED return opts
def function[high_density_tuples, parameter[row_tuple]]: constant[Color rows with extremely high population density red.] variable[opts] assign[=] call[name[dict], parameter[]] if <ast.BoolOp object at 0x7da20e9558d0> begin[:] call[name[opts]][name[tf].TableFormatter.ROW_OPT_TEXT_COLOR] assign[=] name[tf].TableColors.TEXT_COLOR_RED return[name[opts]]
keyword[def] identifier[high_density_tuples] ( identifier[row_tuple] : identifier[Tuple] )-> identifier[dict] : literal[string] identifier[opts] = identifier[dict] () keyword[if] identifier[len] ( identifier[row_tuple] )>= literal[int] keyword[and] identifier[row_tuple] [ literal[int] ]> identifier[EXTREMELY_HIGH_POULATION_DENSITY] : identifier[opts] [ identifier[tf] . identifier[TableFormatter] . identifier[ROW_OPT_TEXT_COLOR] ]= identifier[tf] . identifier[TableColors] . identifier[TEXT_COLOR_RED] keyword[return] identifier[opts]
def high_density_tuples(row_tuple: Tuple) -> dict: """Color rows with extremely high population density red.""" opts = dict() if len(row_tuple) >= 7 and row_tuple[6] > EXTREMELY_HIGH_POULATION_DENSITY: opts[tf.TableFormatter.ROW_OPT_TEXT_COLOR] = tf.TableColors.TEXT_COLOR_RED # depends on [control=['if'], data=[]] return opts
def get_colorscale(scale): """ Returns a color scale to be used for a plotly figure Parameters: ----------- scale : str or list Color scale name If the color name is preceded by a minus (-) then the scale is inversed. Also accepts a list of colors (rgb,rgba,hex) Example: get_colorscale('accent') get_colorscale(['rgb(127,201,127)','rgb(190,174,212)','rgb(253,192,134)']) """ if type(scale) in string_types: scale = get_scales(scale) else: if type(scale) != list: raise Exception( "scale needs to be either a scale name or list of colors") cs = [[1.0 * c / (len(scale) - 1), scale[c]] for c in range(len(scale))] cs.sort() return cs
def function[get_colorscale, parameter[scale]]: constant[ Returns a color scale to be used for a plotly figure Parameters: ----------- scale : str or list Color scale name If the color name is preceded by a minus (-) then the scale is inversed. Also accepts a list of colors (rgb,rgba,hex) Example: get_colorscale('accent') get_colorscale(['rgb(127,201,127)','rgb(190,174,212)','rgb(253,192,134)']) ] if compare[call[name[type], parameter[name[scale]]] in name[string_types]] begin[:] variable[scale] assign[=] call[name[get_scales], parameter[name[scale]]] variable[cs] assign[=] <ast.ListComp object at 0x7da1b1cb35e0> call[name[cs].sort, parameter[]] return[name[cs]]
keyword[def] identifier[get_colorscale] ( identifier[scale] ): literal[string] keyword[if] identifier[type] ( identifier[scale] ) keyword[in] identifier[string_types] : identifier[scale] = identifier[get_scales] ( identifier[scale] ) keyword[else] : keyword[if] identifier[type] ( identifier[scale] )!= identifier[list] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[cs] =[[ literal[int] * identifier[c] /( identifier[len] ( identifier[scale] )- literal[int] ), identifier[scale] [ identifier[c] ]] keyword[for] identifier[c] keyword[in] identifier[range] ( identifier[len] ( identifier[scale] ))] identifier[cs] . identifier[sort] () keyword[return] identifier[cs]
def get_colorscale(scale): """ Returns a color scale to be used for a plotly figure Parameters: ----------- scale : str or list Color scale name If the color name is preceded by a minus (-) then the scale is inversed. Also accepts a list of colors (rgb,rgba,hex) Example: get_colorscale('accent') get_colorscale(['rgb(127,201,127)','rgb(190,174,212)','rgb(253,192,134)']) """ if type(scale) in string_types: scale = get_scales(scale) # depends on [control=['if'], data=[]] elif type(scale) != list: raise Exception('scale needs to be either a scale name or list of colors') # depends on [control=['if'], data=[]] cs = [[1.0 * c / (len(scale) - 1), scale[c]] for c in range(len(scale))] cs.sort() return cs
def remove_menu(self, name): """Remove a top-level menu. Only removes menus created by the same menu manager. """ if name not in self._menus: raise exceptions.MenuNotFound( "Menu {!r} was not found. It might be deleted, or belong to another menu manager.".format(name)) self._menu.removeAction(self._menus[name].menuAction()) del self._menus[name]
def function[remove_menu, parameter[self, name]]: constant[Remove a top-level menu. Only removes menus created by the same menu manager. ] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[self]._menus] begin[:] <ast.Raise object at 0x7da1b1292e00> call[name[self]._menu.removeAction, parameter[call[call[name[self]._menus][name[name]].menuAction, parameter[]]]] <ast.Delete object at 0x7da1b12939a0>
keyword[def] identifier[remove_menu] ( identifier[self] , identifier[name] ): literal[string] keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[_menus] : keyword[raise] identifier[exceptions] . identifier[MenuNotFound] ( literal[string] . identifier[format] ( identifier[name] )) identifier[self] . identifier[_menu] . identifier[removeAction] ( identifier[self] . identifier[_menus] [ identifier[name] ]. identifier[menuAction] ()) keyword[del] identifier[self] . identifier[_menus] [ identifier[name] ]
def remove_menu(self, name): """Remove a top-level menu. Only removes menus created by the same menu manager. """ if name not in self._menus: raise exceptions.MenuNotFound('Menu {!r} was not found. It might be deleted, or belong to another menu manager.'.format(name)) # depends on [control=['if'], data=['name']] self._menu.removeAction(self._menus[name].menuAction()) del self._menus[name]
def login(self, sendspec): """Logs the user in with the passed-in password and command. Tracks the login. If used, used logout to log out again. Assumes you are root when logging in, so no password required. If not, override the default command for multi-level logins. If passwords are required, see setup_prompt() and revert_prompt() @type param: see shutit_sendspec.ShutItSendSpec @type sendspec: shutit_sendspec.ShutItSendSpec """ user = sendspec.user command = sendspec.send prompt_prefix = sendspec.prompt_prefix shutit = self.shutit # We don't get the default expect here, as it's either passed in, or a base default regexp. if isinstance(sendspec.password,str): shutit_global.shutit_global_object.secret_words_set.add(sendspec.password) r_id = shutit_util.random_id() if prompt_prefix is None: prompt_prefix = r_id # Be helpful - if this looks like a command that requires a user, then suggest user provides one. if user is None: user = self.whoami() if 'bash' not in command: shutit.log('No user supplied to login function, so retrieving who I am (' + user + '). You may want to override.', level=logging.WARNING) if ' ' in user: self.shutit.fail('user has space in it - did you mean: login(command="' + user + '")?') # pragma: no cover if self.shutit.build['delivery'] == 'bash' and command == 'su -': # We want to retain the current working directory command = 'su' # If this is a su-type command, add the user, else assume user is in the command. if command == 'su -' or command == 'su' or command == 'login': send = command + ' ' + user else: send = command login_expect = sendspec.expect or shutit_global.shutit_global_object.base_prompt # We don't fail on empty before as many login programs mess with the output. # In this special case of login we expect either the prompt, or 'user@' as this has been seen to work. general_expect = [login_expect] # Add in a match if we see user+ and then the login matches. Be careful not to match against 'user+@...password:' general_expect = general_expect + [user+'@.*'+'[#$]'] # If not an ssh login, then we can match against user + @sign because it won't clash with 'user@adasdas password:' if (sendspec.is_ssh != None and sendspec.is_ssh) or command.find('ssh ') != -1: shutit.log('Assumed to be an ssh command, is_ssh: ' + str(sendspec.is_ssh) + ', command: ' + command, level=logging.DEBUG) # If user@ already there, remove it, as it can conflict with password lines in ssh calls. if user+'@' in general_expect: general_expect.remove(user+'@') # Adding the space to avoid commands which embed eg $(whoami) or ${var} general_expect.append('.*[#$] ') # Don't match 'Last login:' or 'Last failed login:' send_dict={'ontinue connecting':['yes', False]} if sendspec.password is not None: send_dict.update({'assword:':[sendspec.password, True]}) send_dict.update({r'[^dt] login:':[sendspec.password, True]}) else: send_dict={'ontinue connecting':['yes', False]} if sendspec.password is not None: send_dict.update({'assword:':[sendspec.password, True]}) send_dict.update({r'[^dt] login:':[sendspec.password, True]}) send_dict.update({user+'@':[sendspec.password, True]}) if user == 'bash' and command == 'su -': shutit.log('WARNING! user is bash - if you see problems below, did you mean: login(command="' + user + '")?', level=logging.WARNING) self.shutit.handle_note(sendspec.note,command=command + '\n\n[as user: "' + user + '"]',training_input=send) echo = self.shutit.get_echo_override(sendspec.echo) shutit.log('Logging in to new ShutIt environment.' + user, level=logging.DEBUG) shutit.log('Logging in with command: ' + send + ' as user: ' + user, level=logging.DEBUG) shutit.log('Login stack before login: ' + str(self.login_stack), level=logging.DEBUG) # check_sudo - set to false if the password has been supplied. check_sudo = False if sendspec.password is None and send.strip().find('sudo') == 0: check_sudo = True res = self.multisend(ShutItSendSpec(self, send=send, send_dict=send_dict, expect=general_expect, check_exit=False, timeout=sendspec.timeout, fail_on_empty_before=False, escape=sendspec.escape, echo=echo, remove_on_match=True, nonewline=sendspec.nonewline, check_sudo=check_sudo, loglevel=sendspec.loglevel)) if res == -1: # Should not get here as login should not be blocked. assert False, shutit_util.print_debug() # Setup prompt if prompt_prefix != None: self.setup_prompt(r_id,prefix=prompt_prefix,capture_exit_code=True) else: self.setup_prompt(r_id,capture_exit_code=True) self.login_stack.append(r_id) shutit.log('Login stack after login: ' + str(self.login_stack), level=logging.DEBUG) if self.send_and_get_output(''' echo $SHUTIT_EC && unset SHUTIT_EC''', loglevel=logging.DEBUG, echo=False) != '0': # TODO: remove just-added login stack item (since we failed to log in successfully)? if sendspec.fail_on_fail: # pragma: no cover self.shutit.fail('Login failure! You may want to re-run shutit with --echo or -l debug and scroll up to see what the problem was.') else: return False if sendspec.go_home: self.send(ShutItSendSpec(self, send='cd', check_exit=False, echo=False, ignore_background=True, loglevel=sendspec.loglevel)) self.shutit.handle_note_after(note=sendspec.note,training_input=send) return True
def function[login, parameter[self, sendspec]]: constant[Logs the user in with the passed-in password and command. Tracks the login. If used, used logout to log out again. Assumes you are root when logging in, so no password required. If not, override the default command for multi-level logins. If passwords are required, see setup_prompt() and revert_prompt() @type param: see shutit_sendspec.ShutItSendSpec @type sendspec: shutit_sendspec.ShutItSendSpec ] variable[user] assign[=] name[sendspec].user variable[command] assign[=] name[sendspec].send variable[prompt_prefix] assign[=] name[sendspec].prompt_prefix variable[shutit] assign[=] name[self].shutit if call[name[isinstance], parameter[name[sendspec].password, name[str]]] begin[:] call[name[shutit_global].shutit_global_object.secret_words_set.add, parameter[name[sendspec].password]] variable[r_id] assign[=] call[name[shutit_util].random_id, parameter[]] if compare[name[prompt_prefix] is constant[None]] begin[:] variable[prompt_prefix] assign[=] name[r_id] if compare[name[user] is constant[None]] begin[:] variable[user] assign[=] call[name[self].whoami, parameter[]] if compare[constant[bash] <ast.NotIn object at 0x7da2590d7190> name[command]] begin[:] call[name[shutit].log, parameter[binary_operation[binary_operation[constant[No user supplied to login function, so retrieving who I am (] + name[user]] + constant[). You may want to override.]]]] if compare[constant[ ] in name[user]] begin[:] call[name[self].shutit.fail, parameter[binary_operation[binary_operation[constant[user has space in it - did you mean: login(command="] + name[user]] + constant[")?]]]] if <ast.BoolOp object at 0x7da18f09e9e0> begin[:] variable[command] assign[=] constant[su] if <ast.BoolOp object at 0x7da18f09f0a0> begin[:] variable[send] assign[=] binary_operation[binary_operation[name[command] + constant[ ]] + name[user]] variable[login_expect] assign[=] <ast.BoolOp object at 0x7da18bc701f0> variable[general_expect] assign[=] list[[<ast.Name object at 0x7da18bc71b10>]] variable[general_expect] assign[=] binary_operation[name[general_expect] + list[[<ast.BinOp object at 0x7da18bc71180>]]] if <ast.BoolOp object at 0x7da18bc73d90> begin[:] call[name[shutit].log, parameter[binary_operation[binary_operation[binary_operation[constant[Assumed to be an ssh command, is_ssh: ] + call[name[str], parameter[name[sendspec].is_ssh]]] + constant[, command: ]] + name[command]]]] if compare[binary_operation[name[user] + constant[@]] in name[general_expect]] begin[:] call[name[general_expect].remove, parameter[binary_operation[name[user] + constant[@]]]] call[name[general_expect].append, parameter[constant[.*[#$] ]]] variable[send_dict] assign[=] dictionary[[<ast.Constant object at 0x7da18bc71960>], [<ast.List object at 0x7da18bc70370>]] if compare[name[sendspec].password is_not constant[None]] begin[:] call[name[send_dict].update, parameter[dictionary[[<ast.Constant object at 0x7da18bc73370>], [<ast.List object at 0x7da18bc712d0>]]]] call[name[send_dict].update, parameter[dictionary[[<ast.Constant object at 0x7da18bc71240>], [<ast.List object at 0x7da18bc72b90>]]]] if <ast.BoolOp object at 0x7da18bc73d00> begin[:] call[name[shutit].log, parameter[binary_operation[binary_operation[constant[WARNING! user is bash - if you see problems below, did you mean: login(command="] + name[user]] + constant[")?]]]] call[name[self].shutit.handle_note, parameter[name[sendspec].note]] variable[echo] assign[=] call[name[self].shutit.get_echo_override, parameter[name[sendspec].echo]] call[name[shutit].log, parameter[binary_operation[constant[Logging in to new ShutIt environment.] + name[user]]]] call[name[shutit].log, parameter[binary_operation[binary_operation[binary_operation[constant[Logging in with command: ] + name[send]] + constant[ as user: ]] + name[user]]]] call[name[shutit].log, parameter[binary_operation[constant[Login stack before login: ] + call[name[str], parameter[name[self].login_stack]]]]] variable[check_sudo] assign[=] constant[False] if <ast.BoolOp object at 0x7da18bc712a0> begin[:] variable[check_sudo] assign[=] constant[True] variable[res] assign[=] call[name[self].multisend, parameter[call[name[ShutItSendSpec], parameter[name[self]]]]] if compare[name[res] equal[==] <ast.UnaryOp object at 0x7da2044c1f90>] begin[:] assert[constant[False]] if compare[name[prompt_prefix] not_equal[!=] constant[None]] begin[:] call[name[self].setup_prompt, parameter[name[r_id]]] call[name[self].login_stack.append, parameter[name[r_id]]] call[name[shutit].log, parameter[binary_operation[constant[Login stack after login: ] + call[name[str], parameter[name[self].login_stack]]]]] if compare[call[name[self].send_and_get_output, parameter[constant[ echo $SHUTIT_EC && unset SHUTIT_EC]]] not_equal[!=] constant[0]] begin[:] if name[sendspec].fail_on_fail begin[:] call[name[self].shutit.fail, parameter[constant[Login failure! You may want to re-run shutit with --echo or -l debug and scroll up to see what the problem was.]]] if name[sendspec].go_home begin[:] call[name[self].send, parameter[call[name[ShutItSendSpec], parameter[name[self]]]]] call[name[self].shutit.handle_note_after, parameter[]] return[constant[True]]
keyword[def] identifier[login] ( identifier[self] , identifier[sendspec] ): literal[string] identifier[user] = identifier[sendspec] . identifier[user] identifier[command] = identifier[sendspec] . identifier[send] identifier[prompt_prefix] = identifier[sendspec] . identifier[prompt_prefix] identifier[shutit] = identifier[self] . identifier[shutit] keyword[if] identifier[isinstance] ( identifier[sendspec] . identifier[password] , identifier[str] ): identifier[shutit_global] . identifier[shutit_global_object] . identifier[secret_words_set] . identifier[add] ( identifier[sendspec] . identifier[password] ) identifier[r_id] = identifier[shutit_util] . identifier[random_id] () keyword[if] identifier[prompt_prefix] keyword[is] keyword[None] : identifier[prompt_prefix] = identifier[r_id] keyword[if] identifier[user] keyword[is] keyword[None] : identifier[user] = identifier[self] . identifier[whoami] () keyword[if] literal[string] keyword[not] keyword[in] identifier[command] : identifier[shutit] . identifier[log] ( literal[string] + identifier[user] + literal[string] , identifier[level] = identifier[logging] . identifier[WARNING] ) keyword[if] literal[string] keyword[in] identifier[user] : identifier[self] . identifier[shutit] . identifier[fail] ( literal[string] + identifier[user] + literal[string] ) keyword[if] identifier[self] . identifier[shutit] . identifier[build] [ literal[string] ]== literal[string] keyword[and] identifier[command] == literal[string] : identifier[command] = literal[string] keyword[if] identifier[command] == literal[string] keyword[or] identifier[command] == literal[string] keyword[or] identifier[command] == literal[string] : identifier[send] = identifier[command] + literal[string] + identifier[user] keyword[else] : identifier[send] = identifier[command] identifier[login_expect] = identifier[sendspec] . identifier[expect] keyword[or] identifier[shutit_global] . identifier[shutit_global_object] . identifier[base_prompt] identifier[general_expect] =[ identifier[login_expect] ] identifier[general_expect] = identifier[general_expect] +[ identifier[user] + literal[string] + literal[string] ] keyword[if] ( identifier[sendspec] . identifier[is_ssh] != keyword[None] keyword[and] identifier[sendspec] . identifier[is_ssh] ) keyword[or] identifier[command] . identifier[find] ( literal[string] )!=- literal[int] : identifier[shutit] . identifier[log] ( literal[string] + identifier[str] ( identifier[sendspec] . identifier[is_ssh] )+ literal[string] + identifier[command] , identifier[level] = identifier[logging] . identifier[DEBUG] ) keyword[if] identifier[user] + literal[string] keyword[in] identifier[general_expect] : identifier[general_expect] . identifier[remove] ( identifier[user] + literal[string] ) identifier[general_expect] . identifier[append] ( literal[string] ) identifier[send_dict] ={ literal[string] :[ literal[string] , keyword[False] ]} keyword[if] identifier[sendspec] . identifier[password] keyword[is] keyword[not] keyword[None] : identifier[send_dict] . identifier[update] ({ literal[string] :[ identifier[sendspec] . identifier[password] , keyword[True] ]}) identifier[send_dict] . identifier[update] ({ literal[string] :[ identifier[sendspec] . identifier[password] , keyword[True] ]}) keyword[else] : identifier[send_dict] ={ literal[string] :[ literal[string] , keyword[False] ]} keyword[if] identifier[sendspec] . identifier[password] keyword[is] keyword[not] keyword[None] : identifier[send_dict] . identifier[update] ({ literal[string] :[ identifier[sendspec] . identifier[password] , keyword[True] ]}) identifier[send_dict] . identifier[update] ({ literal[string] :[ identifier[sendspec] . identifier[password] , keyword[True] ]}) identifier[send_dict] . identifier[update] ({ identifier[user] + literal[string] :[ identifier[sendspec] . identifier[password] , keyword[True] ]}) keyword[if] identifier[user] == literal[string] keyword[and] identifier[command] == literal[string] : identifier[shutit] . identifier[log] ( literal[string] + identifier[user] + literal[string] , identifier[level] = identifier[logging] . identifier[WARNING] ) identifier[self] . identifier[shutit] . identifier[handle_note] ( identifier[sendspec] . identifier[note] , identifier[command] = identifier[command] + literal[string] + identifier[user] + literal[string] , identifier[training_input] = identifier[send] ) identifier[echo] = identifier[self] . identifier[shutit] . identifier[get_echo_override] ( identifier[sendspec] . identifier[echo] ) identifier[shutit] . identifier[log] ( literal[string] + identifier[user] , identifier[level] = identifier[logging] . identifier[DEBUG] ) identifier[shutit] . identifier[log] ( literal[string] + identifier[send] + literal[string] + identifier[user] , identifier[level] = identifier[logging] . identifier[DEBUG] ) identifier[shutit] . identifier[log] ( literal[string] + identifier[str] ( identifier[self] . identifier[login_stack] ), identifier[level] = identifier[logging] . identifier[DEBUG] ) identifier[check_sudo] = keyword[False] keyword[if] identifier[sendspec] . identifier[password] keyword[is] keyword[None] keyword[and] identifier[send] . identifier[strip] (). identifier[find] ( literal[string] )== literal[int] : identifier[check_sudo] = keyword[True] identifier[res] = identifier[self] . identifier[multisend] ( identifier[ShutItSendSpec] ( identifier[self] , identifier[send] = identifier[send] , identifier[send_dict] = identifier[send_dict] , identifier[expect] = identifier[general_expect] , identifier[check_exit] = keyword[False] , identifier[timeout] = identifier[sendspec] . identifier[timeout] , identifier[fail_on_empty_before] = keyword[False] , identifier[escape] = identifier[sendspec] . identifier[escape] , identifier[echo] = identifier[echo] , identifier[remove_on_match] = keyword[True] , identifier[nonewline] = identifier[sendspec] . identifier[nonewline] , identifier[check_sudo] = identifier[check_sudo] , identifier[loglevel] = identifier[sendspec] . identifier[loglevel] )) keyword[if] identifier[res] ==- literal[int] : keyword[assert] keyword[False] , identifier[shutit_util] . identifier[print_debug] () keyword[if] identifier[prompt_prefix] != keyword[None] : identifier[self] . identifier[setup_prompt] ( identifier[r_id] , identifier[prefix] = identifier[prompt_prefix] , identifier[capture_exit_code] = keyword[True] ) keyword[else] : identifier[self] . identifier[setup_prompt] ( identifier[r_id] , identifier[capture_exit_code] = keyword[True] ) identifier[self] . identifier[login_stack] . identifier[append] ( identifier[r_id] ) identifier[shutit] . identifier[log] ( literal[string] + identifier[str] ( identifier[self] . identifier[login_stack] ), identifier[level] = identifier[logging] . identifier[DEBUG] ) keyword[if] identifier[self] . identifier[send_and_get_output] ( literal[string] , identifier[loglevel] = identifier[logging] . identifier[DEBUG] , identifier[echo] = keyword[False] )!= literal[string] : keyword[if] identifier[sendspec] . identifier[fail_on_fail] : identifier[self] . identifier[shutit] . identifier[fail] ( literal[string] ) keyword[else] : keyword[return] keyword[False] keyword[if] identifier[sendspec] . identifier[go_home] : identifier[self] . identifier[send] ( identifier[ShutItSendSpec] ( identifier[self] , identifier[send] = literal[string] , identifier[check_exit] = keyword[False] , identifier[echo] = keyword[False] , identifier[ignore_background] = keyword[True] , identifier[loglevel] = identifier[sendspec] . identifier[loglevel] )) identifier[self] . identifier[shutit] . identifier[handle_note_after] ( identifier[note] = identifier[sendspec] . identifier[note] , identifier[training_input] = identifier[send] ) keyword[return] keyword[True]
def login(self, sendspec): """Logs the user in with the passed-in password and command. Tracks the login. If used, used logout to log out again. Assumes you are root when logging in, so no password required. If not, override the default command for multi-level logins. If passwords are required, see setup_prompt() and revert_prompt() @type param: see shutit_sendspec.ShutItSendSpec @type sendspec: shutit_sendspec.ShutItSendSpec """ user = sendspec.user command = sendspec.send prompt_prefix = sendspec.prompt_prefix shutit = self.shutit # We don't get the default expect here, as it's either passed in, or a base default regexp. if isinstance(sendspec.password, str): shutit_global.shutit_global_object.secret_words_set.add(sendspec.password) # depends on [control=['if'], data=[]] r_id = shutit_util.random_id() if prompt_prefix is None: prompt_prefix = r_id # depends on [control=['if'], data=['prompt_prefix']] # Be helpful - if this looks like a command that requires a user, then suggest user provides one. if user is None: user = self.whoami() if 'bash' not in command: shutit.log('No user supplied to login function, so retrieving who I am (' + user + '). You may want to override.', level=logging.WARNING) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['user']] if ' ' in user: self.shutit.fail('user has space in it - did you mean: login(command="' + user + '")?') # pragma: no cover # depends on [control=['if'], data=['user']] if self.shutit.build['delivery'] == 'bash' and command == 'su -': # We want to retain the current working directory command = 'su' # depends on [control=['if'], data=[]] # If this is a su-type command, add the user, else assume user is in the command. if command == 'su -' or command == 'su' or command == 'login': send = command + ' ' + user # depends on [control=['if'], data=[]] else: send = command login_expect = sendspec.expect or shutit_global.shutit_global_object.base_prompt # We don't fail on empty before as many login programs mess with the output. # In this special case of login we expect either the prompt, or 'user@' as this has been seen to work. general_expect = [login_expect] # Add in a match if we see user+ and then the login matches. Be careful not to match against 'user+@...password:' general_expect = general_expect + [user + '@.*' + '[#$]'] # If not an ssh login, then we can match against user + @sign because it won't clash with 'user@adasdas password:' if sendspec.is_ssh != None and sendspec.is_ssh or command.find('ssh ') != -1: shutit.log('Assumed to be an ssh command, is_ssh: ' + str(sendspec.is_ssh) + ', command: ' + command, level=logging.DEBUG) # If user@ already there, remove it, as it can conflict with password lines in ssh calls. if user + '@' in general_expect: general_expect.remove(user + '@') # depends on [control=['if'], data=['general_expect']] # Adding the space to avoid commands which embed eg $(whoami) or ${var} general_expect.append('.*[#$] ') # Don't match 'Last login:' or 'Last failed login:' send_dict = {'ontinue connecting': ['yes', False]} if sendspec.password is not None: send_dict.update({'assword:': [sendspec.password, True]}) send_dict.update({'[^dt] login:': [sendspec.password, True]}) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: send_dict = {'ontinue connecting': ['yes', False]} if sendspec.password is not None: send_dict.update({'assword:': [sendspec.password, True]}) send_dict.update({'[^dt] login:': [sendspec.password, True]}) send_dict.update({user + '@': [sendspec.password, True]}) # depends on [control=['if'], data=[]] if user == 'bash' and command == 'su -': shutit.log('WARNING! user is bash - if you see problems below, did you mean: login(command="' + user + '")?', level=logging.WARNING) # depends on [control=['if'], data=[]] self.shutit.handle_note(sendspec.note, command=command + '\n\n[as user: "' + user + '"]', training_input=send) echo = self.shutit.get_echo_override(sendspec.echo) shutit.log('Logging in to new ShutIt environment.' + user, level=logging.DEBUG) shutit.log('Logging in with command: ' + send + ' as user: ' + user, level=logging.DEBUG) shutit.log('Login stack before login: ' + str(self.login_stack), level=logging.DEBUG) # check_sudo - set to false if the password has been supplied. check_sudo = False if sendspec.password is None and send.strip().find('sudo') == 0: check_sudo = True # depends on [control=['if'], data=[]] res = self.multisend(ShutItSendSpec(self, send=send, send_dict=send_dict, expect=general_expect, check_exit=False, timeout=sendspec.timeout, fail_on_empty_before=False, escape=sendspec.escape, echo=echo, remove_on_match=True, nonewline=sendspec.nonewline, check_sudo=check_sudo, loglevel=sendspec.loglevel)) if res == -1: # Should not get here as login should not be blocked. assert False, shutit_util.print_debug() # depends on [control=['if'], data=[]] # Setup prompt if prompt_prefix != None: self.setup_prompt(r_id, prefix=prompt_prefix, capture_exit_code=True) # depends on [control=['if'], data=['prompt_prefix']] else: self.setup_prompt(r_id, capture_exit_code=True) self.login_stack.append(r_id) shutit.log('Login stack after login: ' + str(self.login_stack), level=logging.DEBUG) if self.send_and_get_output(' echo $SHUTIT_EC && unset SHUTIT_EC', loglevel=logging.DEBUG, echo=False) != '0': # TODO: remove just-added login stack item (since we failed to log in successfully)? if sendspec.fail_on_fail: # pragma: no cover self.shutit.fail('Login failure! You may want to re-run shutit with --echo or -l debug and scroll up to see what the problem was.') # depends on [control=['if'], data=[]] else: return False # depends on [control=['if'], data=[]] if sendspec.go_home: self.send(ShutItSendSpec(self, send='cd', check_exit=False, echo=False, ignore_background=True, loglevel=sendspec.loglevel)) # depends on [control=['if'], data=[]] self.shutit.handle_note_after(note=sendspec.note, training_input=send) return True
def creator(self, pathobj): """ Returns file creator This makes little sense for Artifactory, but to be consistent with pathlib, we return created_by instead, if available """ stat = self.stat(pathobj) if not stat.is_dir: return stat.created_by else: return 'nobody'
def function[creator, parameter[self, pathobj]]: constant[ Returns file creator This makes little sense for Artifactory, but to be consistent with pathlib, we return created_by instead, if available ] variable[stat] assign[=] call[name[self].stat, parameter[name[pathobj]]] if <ast.UnaryOp object at 0x7da18eb54970> begin[:] return[name[stat].created_by]
keyword[def] identifier[creator] ( identifier[self] , identifier[pathobj] ): literal[string] identifier[stat] = identifier[self] . identifier[stat] ( identifier[pathobj] ) keyword[if] keyword[not] identifier[stat] . identifier[is_dir] : keyword[return] identifier[stat] . identifier[created_by] keyword[else] : keyword[return] literal[string]
def creator(self, pathobj): """ Returns file creator This makes little sense for Artifactory, but to be consistent with pathlib, we return created_by instead, if available """ stat = self.stat(pathobj) if not stat.is_dir: return stat.created_by # depends on [control=['if'], data=[]] else: return 'nobody'
def get_records(self, ids): """Return records by their identifiers. :param ids: A list of record identifier. :returns: A list of records. """ return self.query(Ids(values=[str(id_) for id_ in ids]))
def function[get_records, parameter[self, ids]]: constant[Return records by their identifiers. :param ids: A list of record identifier. :returns: A list of records. ] return[call[name[self].query, parameter[call[name[Ids], parameter[]]]]]
keyword[def] identifier[get_records] ( identifier[self] , identifier[ids] ): literal[string] keyword[return] identifier[self] . identifier[query] ( identifier[Ids] ( identifier[values] =[ identifier[str] ( identifier[id_] ) keyword[for] identifier[id_] keyword[in] identifier[ids] ]))
def get_records(self, ids): """Return records by their identifiers. :param ids: A list of record identifier. :returns: A list of records. """ return self.query(Ids(values=[str(id_) for id_ in ids]))
def update(self, data): """Updates the object information based on live data, if there were any changes made. Any changes will be automatically applied to the object, but will not be automatically persisted. You must manually call `db.session.add(instance)` on the object. Args: data (:obj:): AWS API Resource object fetched from AWS API Returns: `bool` """ updated = False if 'missing_tags' in data: updated |= self.set_property('missing_tags', data['missing_tags']) if 'notes' in data: updated |= self.set_property('notes', data['notes']) if 'state' in data: updated |= self.set_property('state', data['state']) if 'last_alert' in data: updated |= self.set_property('last_alert', data['last_alert']) if updated: now = datetime.now() self.set_property('last_change', now) return updated
def function[update, parameter[self, data]]: constant[Updates the object information based on live data, if there were any changes made. Any changes will be automatically applied to the object, but will not be automatically persisted. You must manually call `db.session.add(instance)` on the object. Args: data (:obj:): AWS API Resource object fetched from AWS API Returns: `bool` ] variable[updated] assign[=] constant[False] if compare[constant[missing_tags] in name[data]] begin[:] <ast.AugAssign object at 0x7da1b2042860> if compare[constant[notes] in name[data]] begin[:] <ast.AugAssign object at 0x7da1b2040fd0> if compare[constant[state] in name[data]] begin[:] <ast.AugAssign object at 0x7da1b2040820> if compare[constant[last_alert] in name[data]] begin[:] <ast.AugAssign object at 0x7da1b2043b80> if name[updated] begin[:] variable[now] assign[=] call[name[datetime].now, parameter[]] call[name[self].set_property, parameter[constant[last_change], name[now]]] return[name[updated]]
keyword[def] identifier[update] ( identifier[self] , identifier[data] ): literal[string] identifier[updated] = keyword[False] keyword[if] literal[string] keyword[in] identifier[data] : identifier[updated] |= identifier[self] . identifier[set_property] ( literal[string] , identifier[data] [ literal[string] ]) keyword[if] literal[string] keyword[in] identifier[data] : identifier[updated] |= identifier[self] . identifier[set_property] ( literal[string] , identifier[data] [ literal[string] ]) keyword[if] literal[string] keyword[in] identifier[data] : identifier[updated] |= identifier[self] . identifier[set_property] ( literal[string] , identifier[data] [ literal[string] ]) keyword[if] literal[string] keyword[in] identifier[data] : identifier[updated] |= identifier[self] . identifier[set_property] ( literal[string] , identifier[data] [ literal[string] ]) keyword[if] identifier[updated] : identifier[now] = identifier[datetime] . identifier[now] () identifier[self] . identifier[set_property] ( literal[string] , identifier[now] ) keyword[return] identifier[updated]
def update(self, data): """Updates the object information based on live data, if there were any changes made. Any changes will be automatically applied to the object, but will not be automatically persisted. You must manually call `db.session.add(instance)` on the object. Args: data (:obj:): AWS API Resource object fetched from AWS API Returns: `bool` """ updated = False if 'missing_tags' in data: updated |= self.set_property('missing_tags', data['missing_tags']) # depends on [control=['if'], data=['data']] if 'notes' in data: updated |= self.set_property('notes', data['notes']) # depends on [control=['if'], data=['data']] if 'state' in data: updated |= self.set_property('state', data['state']) # depends on [control=['if'], data=['data']] if 'last_alert' in data: updated |= self.set_property('last_alert', data['last_alert']) # depends on [control=['if'], data=['data']] if updated: now = datetime.now() self.set_property('last_change', now) # depends on [control=['if'], data=[]] return updated
def QA_fetch_lhb(date, db=DATABASE): '获取某一天龙虎榜数据' try: collections = db.lhb return pd.DataFrame([item for item in collections.find( {'date': date}, {"_id": 0})]).set_index('code', drop=False).sort_index() except Exception as e: raise e
def function[QA_fetch_lhb, parameter[date, db]]: constant[获取某一天龙虎榜数据] <ast.Try object at 0x7da18bc72440>
keyword[def] identifier[QA_fetch_lhb] ( identifier[date] , identifier[db] = identifier[DATABASE] ): literal[string] keyword[try] : identifier[collections] = identifier[db] . identifier[lhb] keyword[return] identifier[pd] . identifier[DataFrame] ([ identifier[item] keyword[for] identifier[item] keyword[in] identifier[collections] . identifier[find] ( { literal[string] : identifier[date] },{ literal[string] : literal[int] })]). identifier[set_index] ( literal[string] , identifier[drop] = keyword[False] ). identifier[sort_index] () keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[raise] identifier[e]
def QA_fetch_lhb(date, db=DATABASE): """获取某一天龙虎榜数据""" try: collections = db.lhb return pd.DataFrame([item for item in collections.find({'date': date}, {'_id': 0})]).set_index('code', drop=False).sort_index() # depends on [control=['try'], data=[]] except Exception as e: raise e # depends on [control=['except'], data=['e']]
def buildPolyData(vertices, faces=None, indexOffset=0): """ Build a ``vtkPolyData`` object from a list of vertices where faces represents the connectivity of the polygonal mesh. E.g. : - ``vertices=[[x1,y1,z1],[x2,y2,z2], ...]`` - ``faces=[[0,1,2], [1,2,3], ...]`` Use ``indexOffset=1`` if face numbering starts from 1 instead of 0. .. hint:: |buildpolydata| |buildpolydata.py|_ """ if not utils.isSequence(vertices): # assume a dolfin.Mesh faces = vertices.cells() vertices = vertices.coordinates() sourcePoints = vtk.vtkPoints() sourcePolygons = vtk.vtkCellArray() sourceVertices = vtk.vtkCellArray() isgt2 = len(vertices[0]) > 2 is1 = len(vertices[0]) == 1 for pt in vertices: if isgt2: aid = sourcePoints.InsertNextPoint(pt[0], pt[1], pt[2]) elif is1: aid = sourcePoints.InsertNextPoint(pt[0], 0, 0) else: aid = sourcePoints.InsertNextPoint(pt[0], pt[1], 0) if faces is None: sourceVertices.InsertNextCell(1) sourceVertices.InsertCellPoint(aid) if faces is not None: showbar = False if len(faces) > 25000: showbar = True pb = ProgressBar(0, len(faces), ETA=False) for f in faces: n = len(f) if n == 4: ele0 = vtk.vtkTriangle() ele1 = vtk.vtkTriangle() ele2 = vtk.vtkTriangle() ele3 = vtk.vtkTriangle() f0, f1, f2, f3 = f if indexOffset: # for speed.. ele0.GetPointIds().SetId(0, f0 - indexOffset) ele0.GetPointIds().SetId(1, f1 - indexOffset) ele0.GetPointIds().SetId(2, f2 - indexOffset) ele1.GetPointIds().SetId(0, f0 - indexOffset) ele1.GetPointIds().SetId(1, f1 - indexOffset) ele1.GetPointIds().SetId(2, f3 - indexOffset) ele2.GetPointIds().SetId(0, f1 - indexOffset) ele2.GetPointIds().SetId(1, f2 - indexOffset) ele2.GetPointIds().SetId(2, f3 - indexOffset) ele3.GetPointIds().SetId(0, f2 - indexOffset) ele3.GetPointIds().SetId(1, f3 - indexOffset) ele3.GetPointIds().SetId(2, f0 - indexOffset) else: ele0.GetPointIds().SetId(0, f0) ele0.GetPointIds().SetId(1, f1) ele0.GetPointIds().SetId(2, f2) ele1.GetPointIds().SetId(0, f0) ele1.GetPointIds().SetId(1, f1) ele1.GetPointIds().SetId(2, f3) ele2.GetPointIds().SetId(0, f1) ele2.GetPointIds().SetId(1, f2) ele2.GetPointIds().SetId(2, f3) ele3.GetPointIds().SetId(0, f2) ele3.GetPointIds().SetId(1, f3) ele3.GetPointIds().SetId(2, f0) sourcePolygons.InsertNextCell(ele0) sourcePolygons.InsertNextCell(ele1) sourcePolygons.InsertNextCell(ele2) sourcePolygons.InsertNextCell(ele3) elif n == 3: ele = vtk.vtkTriangle() for i in range(3): ele.GetPointIds().SetId(i, f[i] - indexOffset) sourcePolygons.InsertNextCell(ele) else: ele = vtk.vtkPolygon() ele.GetPointIds().SetNumberOfIds(n) for i in range(n): ele.GetPointIds().SetId(i, f[i] - indexOffset) sourcePolygons.InsertNextCell(ele) if showbar: pb.print("converting mesh..") poly = vtk.vtkPolyData() poly.SetPoints(sourcePoints) if faces is None: poly.SetVerts(sourceVertices) else: poly.SetPolys(sourcePolygons) return poly
def function[buildPolyData, parameter[vertices, faces, indexOffset]]: constant[ Build a ``vtkPolyData`` object from a list of vertices where faces represents the connectivity of the polygonal mesh. E.g. : - ``vertices=[[x1,y1,z1],[x2,y2,z2], ...]`` - ``faces=[[0,1,2], [1,2,3], ...]`` Use ``indexOffset=1`` if face numbering starts from 1 instead of 0. .. hint:: |buildpolydata| |buildpolydata.py|_ ] if <ast.UnaryOp object at 0x7da18c4cf310> begin[:] variable[faces] assign[=] call[name[vertices].cells, parameter[]] variable[vertices] assign[=] call[name[vertices].coordinates, parameter[]] variable[sourcePoints] assign[=] call[name[vtk].vtkPoints, parameter[]] variable[sourcePolygons] assign[=] call[name[vtk].vtkCellArray, parameter[]] variable[sourceVertices] assign[=] call[name[vtk].vtkCellArray, parameter[]] variable[isgt2] assign[=] compare[call[name[len], parameter[call[name[vertices]][constant[0]]]] greater[>] constant[2]] variable[is1] assign[=] compare[call[name[len], parameter[call[name[vertices]][constant[0]]]] equal[==] constant[1]] for taget[name[pt]] in starred[name[vertices]] begin[:] if name[isgt2] begin[:] variable[aid] assign[=] call[name[sourcePoints].InsertNextPoint, parameter[call[name[pt]][constant[0]], call[name[pt]][constant[1]], call[name[pt]][constant[2]]]] if compare[name[faces] is constant[None]] begin[:] call[name[sourceVertices].InsertNextCell, parameter[constant[1]]] call[name[sourceVertices].InsertCellPoint, parameter[name[aid]]] if compare[name[faces] is_not constant[None]] begin[:] variable[showbar] assign[=] constant[False] if compare[call[name[len], parameter[name[faces]]] greater[>] constant[25000]] begin[:] variable[showbar] assign[=] constant[True] variable[pb] assign[=] call[name[ProgressBar], parameter[constant[0], call[name[len], parameter[name[faces]]]]] for taget[name[f]] in starred[name[faces]] begin[:] variable[n] assign[=] call[name[len], parameter[name[f]]] if compare[name[n] equal[==] constant[4]] begin[:] variable[ele0] assign[=] call[name[vtk].vtkTriangle, parameter[]] variable[ele1] assign[=] call[name[vtk].vtkTriangle, parameter[]] variable[ele2] assign[=] call[name[vtk].vtkTriangle, parameter[]] variable[ele3] assign[=] call[name[vtk].vtkTriangle, parameter[]] <ast.Tuple object at 0x7da18dc9ba90> assign[=] name[f] if name[indexOffset] begin[:] call[call[name[ele0].GetPointIds, parameter[]].SetId, parameter[constant[0], binary_operation[name[f0] - name[indexOffset]]]] call[call[name[ele0].GetPointIds, parameter[]].SetId, parameter[constant[1], binary_operation[name[f1] - name[indexOffset]]]] call[call[name[ele0].GetPointIds, parameter[]].SetId, parameter[constant[2], binary_operation[name[f2] - name[indexOffset]]]] call[call[name[ele1].GetPointIds, parameter[]].SetId, parameter[constant[0], binary_operation[name[f0] - name[indexOffset]]]] call[call[name[ele1].GetPointIds, parameter[]].SetId, parameter[constant[1], binary_operation[name[f1] - name[indexOffset]]]] call[call[name[ele1].GetPointIds, parameter[]].SetId, parameter[constant[2], binary_operation[name[f3] - name[indexOffset]]]] call[call[name[ele2].GetPointIds, parameter[]].SetId, parameter[constant[0], binary_operation[name[f1] - name[indexOffset]]]] call[call[name[ele2].GetPointIds, parameter[]].SetId, parameter[constant[1], binary_operation[name[f2] - name[indexOffset]]]] call[call[name[ele2].GetPointIds, parameter[]].SetId, parameter[constant[2], binary_operation[name[f3] - name[indexOffset]]]] call[call[name[ele3].GetPointIds, parameter[]].SetId, parameter[constant[0], binary_operation[name[f2] - name[indexOffset]]]] call[call[name[ele3].GetPointIds, parameter[]].SetId, parameter[constant[1], binary_operation[name[f3] - name[indexOffset]]]] call[call[name[ele3].GetPointIds, parameter[]].SetId, parameter[constant[2], binary_operation[name[f0] - name[indexOffset]]]] call[name[sourcePolygons].InsertNextCell, parameter[name[ele0]]] call[name[sourcePolygons].InsertNextCell, parameter[name[ele1]]] call[name[sourcePolygons].InsertNextCell, parameter[name[ele2]]] call[name[sourcePolygons].InsertNextCell, parameter[name[ele3]]] if name[showbar] begin[:] call[name[pb].print, parameter[constant[converting mesh..]]] variable[poly] assign[=] call[name[vtk].vtkPolyData, parameter[]] call[name[poly].SetPoints, parameter[name[sourcePoints]]] if compare[name[faces] is constant[None]] begin[:] call[name[poly].SetVerts, parameter[name[sourceVertices]]] return[name[poly]]
keyword[def] identifier[buildPolyData] ( identifier[vertices] , identifier[faces] = keyword[None] , identifier[indexOffset] = literal[int] ): literal[string] keyword[if] keyword[not] identifier[utils] . identifier[isSequence] ( identifier[vertices] ): identifier[faces] = identifier[vertices] . identifier[cells] () identifier[vertices] = identifier[vertices] . identifier[coordinates] () identifier[sourcePoints] = identifier[vtk] . identifier[vtkPoints] () identifier[sourcePolygons] = identifier[vtk] . identifier[vtkCellArray] () identifier[sourceVertices] = identifier[vtk] . identifier[vtkCellArray] () identifier[isgt2] = identifier[len] ( identifier[vertices] [ literal[int] ])> literal[int] identifier[is1] = identifier[len] ( identifier[vertices] [ literal[int] ])== literal[int] keyword[for] identifier[pt] keyword[in] identifier[vertices] : keyword[if] identifier[isgt2] : identifier[aid] = identifier[sourcePoints] . identifier[InsertNextPoint] ( identifier[pt] [ literal[int] ], identifier[pt] [ literal[int] ], identifier[pt] [ literal[int] ]) keyword[elif] identifier[is1] : identifier[aid] = identifier[sourcePoints] . identifier[InsertNextPoint] ( identifier[pt] [ literal[int] ], literal[int] , literal[int] ) keyword[else] : identifier[aid] = identifier[sourcePoints] . identifier[InsertNextPoint] ( identifier[pt] [ literal[int] ], identifier[pt] [ literal[int] ], literal[int] ) keyword[if] identifier[faces] keyword[is] keyword[None] : identifier[sourceVertices] . identifier[InsertNextCell] ( literal[int] ) identifier[sourceVertices] . identifier[InsertCellPoint] ( identifier[aid] ) keyword[if] identifier[faces] keyword[is] keyword[not] keyword[None] : identifier[showbar] = keyword[False] keyword[if] identifier[len] ( identifier[faces] )> literal[int] : identifier[showbar] = keyword[True] identifier[pb] = identifier[ProgressBar] ( literal[int] , identifier[len] ( identifier[faces] ), identifier[ETA] = keyword[False] ) keyword[for] identifier[f] keyword[in] identifier[faces] : identifier[n] = identifier[len] ( identifier[f] ) keyword[if] identifier[n] == literal[int] : identifier[ele0] = identifier[vtk] . identifier[vtkTriangle] () identifier[ele1] = identifier[vtk] . identifier[vtkTriangle] () identifier[ele2] = identifier[vtk] . identifier[vtkTriangle] () identifier[ele3] = identifier[vtk] . identifier[vtkTriangle] () identifier[f0] , identifier[f1] , identifier[f2] , identifier[f3] = identifier[f] keyword[if] identifier[indexOffset] : identifier[ele0] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f0] - identifier[indexOffset] ) identifier[ele0] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f1] - identifier[indexOffset] ) identifier[ele0] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f2] - identifier[indexOffset] ) identifier[ele1] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f0] - identifier[indexOffset] ) identifier[ele1] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f1] - identifier[indexOffset] ) identifier[ele1] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f3] - identifier[indexOffset] ) identifier[ele2] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f1] - identifier[indexOffset] ) identifier[ele2] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f2] - identifier[indexOffset] ) identifier[ele2] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f3] - identifier[indexOffset] ) identifier[ele3] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f2] - identifier[indexOffset] ) identifier[ele3] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f3] - identifier[indexOffset] ) identifier[ele3] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f0] - identifier[indexOffset] ) keyword[else] : identifier[ele0] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f0] ) identifier[ele0] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f1] ) identifier[ele0] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f2] ) identifier[ele1] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f0] ) identifier[ele1] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f1] ) identifier[ele1] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f3] ) identifier[ele2] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f1] ) identifier[ele2] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f2] ) identifier[ele2] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f3] ) identifier[ele3] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f2] ) identifier[ele3] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f3] ) identifier[ele3] . identifier[GetPointIds] (). identifier[SetId] ( literal[int] , identifier[f0] ) identifier[sourcePolygons] . identifier[InsertNextCell] ( identifier[ele0] ) identifier[sourcePolygons] . identifier[InsertNextCell] ( identifier[ele1] ) identifier[sourcePolygons] . identifier[InsertNextCell] ( identifier[ele2] ) identifier[sourcePolygons] . identifier[InsertNextCell] ( identifier[ele3] ) keyword[elif] identifier[n] == literal[int] : identifier[ele] = identifier[vtk] . identifier[vtkTriangle] () keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): identifier[ele] . identifier[GetPointIds] (). identifier[SetId] ( identifier[i] , identifier[f] [ identifier[i] ]- identifier[indexOffset] ) identifier[sourcePolygons] . identifier[InsertNextCell] ( identifier[ele] ) keyword[else] : identifier[ele] = identifier[vtk] . identifier[vtkPolygon] () identifier[ele] . identifier[GetPointIds] (). identifier[SetNumberOfIds] ( identifier[n] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ): identifier[ele] . identifier[GetPointIds] (). identifier[SetId] ( identifier[i] , identifier[f] [ identifier[i] ]- identifier[indexOffset] ) identifier[sourcePolygons] . identifier[InsertNextCell] ( identifier[ele] ) keyword[if] identifier[showbar] : identifier[pb] . identifier[print] ( literal[string] ) identifier[poly] = identifier[vtk] . identifier[vtkPolyData] () identifier[poly] . identifier[SetPoints] ( identifier[sourcePoints] ) keyword[if] identifier[faces] keyword[is] keyword[None] : identifier[poly] . identifier[SetVerts] ( identifier[sourceVertices] ) keyword[else] : identifier[poly] . identifier[SetPolys] ( identifier[sourcePolygons] ) keyword[return] identifier[poly]
def buildPolyData(vertices, faces=None, indexOffset=0): """ Build a ``vtkPolyData`` object from a list of vertices where faces represents the connectivity of the polygonal mesh. E.g. : - ``vertices=[[x1,y1,z1],[x2,y2,z2], ...]`` - ``faces=[[0,1,2], [1,2,3], ...]`` Use ``indexOffset=1`` if face numbering starts from 1 instead of 0. .. hint:: |buildpolydata| |buildpolydata.py|_ """ if not utils.isSequence(vertices): # assume a dolfin.Mesh faces = vertices.cells() vertices = vertices.coordinates() # depends on [control=['if'], data=[]] sourcePoints = vtk.vtkPoints() sourcePolygons = vtk.vtkCellArray() sourceVertices = vtk.vtkCellArray() isgt2 = len(vertices[0]) > 2 is1 = len(vertices[0]) == 1 for pt in vertices: if isgt2: aid = sourcePoints.InsertNextPoint(pt[0], pt[1], pt[2]) # depends on [control=['if'], data=[]] elif is1: aid = sourcePoints.InsertNextPoint(pt[0], 0, 0) # depends on [control=['if'], data=[]] else: aid = sourcePoints.InsertNextPoint(pt[0], pt[1], 0) if faces is None: sourceVertices.InsertNextCell(1) sourceVertices.InsertCellPoint(aid) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pt']] if faces is not None: showbar = False if len(faces) > 25000: showbar = True pb = ProgressBar(0, len(faces), ETA=False) # depends on [control=['if'], data=[]] for f in faces: n = len(f) if n == 4: ele0 = vtk.vtkTriangle() ele1 = vtk.vtkTriangle() ele2 = vtk.vtkTriangle() ele3 = vtk.vtkTriangle() (f0, f1, f2, f3) = f if indexOffset: # for speed.. ele0.GetPointIds().SetId(0, f0 - indexOffset) ele0.GetPointIds().SetId(1, f1 - indexOffset) ele0.GetPointIds().SetId(2, f2 - indexOffset) ele1.GetPointIds().SetId(0, f0 - indexOffset) ele1.GetPointIds().SetId(1, f1 - indexOffset) ele1.GetPointIds().SetId(2, f3 - indexOffset) ele2.GetPointIds().SetId(0, f1 - indexOffset) ele2.GetPointIds().SetId(1, f2 - indexOffset) ele2.GetPointIds().SetId(2, f3 - indexOffset) ele3.GetPointIds().SetId(0, f2 - indexOffset) ele3.GetPointIds().SetId(1, f3 - indexOffset) ele3.GetPointIds().SetId(2, f0 - indexOffset) # depends on [control=['if'], data=[]] else: ele0.GetPointIds().SetId(0, f0) ele0.GetPointIds().SetId(1, f1) ele0.GetPointIds().SetId(2, f2) ele1.GetPointIds().SetId(0, f0) ele1.GetPointIds().SetId(1, f1) ele1.GetPointIds().SetId(2, f3) ele2.GetPointIds().SetId(0, f1) ele2.GetPointIds().SetId(1, f2) ele2.GetPointIds().SetId(2, f3) ele3.GetPointIds().SetId(0, f2) ele3.GetPointIds().SetId(1, f3) ele3.GetPointIds().SetId(2, f0) sourcePolygons.InsertNextCell(ele0) sourcePolygons.InsertNextCell(ele1) sourcePolygons.InsertNextCell(ele2) sourcePolygons.InsertNextCell(ele3) # depends on [control=['if'], data=[]] elif n == 3: ele = vtk.vtkTriangle() for i in range(3): ele.GetPointIds().SetId(i, f[i] - indexOffset) # depends on [control=['for'], data=['i']] sourcePolygons.InsertNextCell(ele) # depends on [control=['if'], data=[]] else: ele = vtk.vtkPolygon() ele.GetPointIds().SetNumberOfIds(n) for i in range(n): ele.GetPointIds().SetId(i, f[i] - indexOffset) # depends on [control=['for'], data=['i']] sourcePolygons.InsertNextCell(ele) if showbar: pb.print('converting mesh..') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] # depends on [control=['if'], data=['faces']] poly = vtk.vtkPolyData() poly.SetPoints(sourcePoints) if faces is None: poly.SetVerts(sourceVertices) # depends on [control=['if'], data=[]] else: poly.SetPolys(sourcePolygons) return poly
def url(self): '''url of the item Notes ----- if remote-adderes was given, then that is used as the base ''' path = '/web/itemdetails.html?id={}'.format(self.id) return self.connector.get_url(path, attach_api_key=False)
def function[url, parameter[self]]: constant[url of the item Notes ----- if remote-adderes was given, then that is used as the base ] variable[path] assign[=] call[constant[/web/itemdetails.html?id={}].format, parameter[name[self].id]] return[call[name[self].connector.get_url, parameter[name[path]]]]
keyword[def] identifier[url] ( identifier[self] ): literal[string] identifier[path] = literal[string] . identifier[format] ( identifier[self] . identifier[id] ) keyword[return] identifier[self] . identifier[connector] . identifier[get_url] ( identifier[path] , identifier[attach_api_key] = keyword[False] )
def url(self): """url of the item Notes ----- if remote-adderes was given, then that is used as the base """ path = '/web/itemdetails.html?id={}'.format(self.id) return self.connector.get_url(path, attach_api_key=False)
def get_values(self, lst, list_columns): """ Get Values: formats values for list template. returns [{'col_name':'col_value',....},{'col_name':'col_value',....}] :param lst: The list of item objects from query :param list_columns: The list of columns to include """ for item in lst: retdict = {} for col in list_columns: retdict[col] = self._get_attr_value(item, col) yield retdict
def function[get_values, parameter[self, lst, list_columns]]: constant[ Get Values: formats values for list template. returns [{'col_name':'col_value',....},{'col_name':'col_value',....}] :param lst: The list of item objects from query :param list_columns: The list of columns to include ] for taget[name[item]] in starred[name[lst]] begin[:] variable[retdict] assign[=] dictionary[[], []] for taget[name[col]] in starred[name[list_columns]] begin[:] call[name[retdict]][name[col]] assign[=] call[name[self]._get_attr_value, parameter[name[item], name[col]]] <ast.Yield object at 0x7da18f09fc10>
keyword[def] identifier[get_values] ( identifier[self] , identifier[lst] , identifier[list_columns] ): literal[string] keyword[for] identifier[item] keyword[in] identifier[lst] : identifier[retdict] ={} keyword[for] identifier[col] keyword[in] identifier[list_columns] : identifier[retdict] [ identifier[col] ]= identifier[self] . identifier[_get_attr_value] ( identifier[item] , identifier[col] ) keyword[yield] identifier[retdict]
def get_values(self, lst, list_columns): """ Get Values: formats values for list template. returns [{'col_name':'col_value',....},{'col_name':'col_value',....}] :param lst: The list of item objects from query :param list_columns: The list of columns to include """ for item in lst: retdict = {} for col in list_columns: retdict[col] = self._get_attr_value(item, col) # depends on [control=['for'], data=['col']] yield retdict # depends on [control=['for'], data=['item']]
def parse_region(region): """ Parse region of type chr1:10-20 or chr1:10-20:+ Parameters: ----------- region : str Region of type chr1:10-20 or chr1:10-20:+. Returns ------- groups : tuple Tuple of groups from regex e.g. (chr1, 10, 20) or (chr1, 10, 20, +). """ m = R_REGEX_STRAND.search(region) if not m: m = R_REGEX.search(region) if m: groups = m.groups() return groups else: return None
def function[parse_region, parameter[region]]: constant[ Parse region of type chr1:10-20 or chr1:10-20:+ Parameters: ----------- region : str Region of type chr1:10-20 or chr1:10-20:+. Returns ------- groups : tuple Tuple of groups from regex e.g. (chr1, 10, 20) or (chr1, 10, 20, +). ] variable[m] assign[=] call[name[R_REGEX_STRAND].search, parameter[name[region]]] if <ast.UnaryOp object at 0x7da1b1616350> begin[:] variable[m] assign[=] call[name[R_REGEX].search, parameter[name[region]]] if name[m] begin[:] variable[groups] assign[=] call[name[m].groups, parameter[]] return[name[groups]]
keyword[def] identifier[parse_region] ( identifier[region] ): literal[string] identifier[m] = identifier[R_REGEX_STRAND] . identifier[search] ( identifier[region] ) keyword[if] keyword[not] identifier[m] : identifier[m] = identifier[R_REGEX] . identifier[search] ( identifier[region] ) keyword[if] identifier[m] : identifier[groups] = identifier[m] . identifier[groups] () keyword[return] identifier[groups] keyword[else] : keyword[return] keyword[None]
def parse_region(region): """ Parse region of type chr1:10-20 or chr1:10-20:+ Parameters: ----------- region : str Region of type chr1:10-20 or chr1:10-20:+. Returns ------- groups : tuple Tuple of groups from regex e.g. (chr1, 10, 20) or (chr1, 10, 20, +). """ m = R_REGEX_STRAND.search(region) if not m: m = R_REGEX.search(region) # depends on [control=['if'], data=[]] if m: groups = m.groups() return groups # depends on [control=['if'], data=[]] else: return None
def start_scan(self, scan_id): """ Starts the scan identified by the scan_id.s """ requests.post(self.url + 'scans/{}/launch'.format(scan_id), verify=False, headers=self.headers)
def function[start_scan, parameter[self, scan_id]]: constant[ Starts the scan identified by the scan_id.s ] call[name[requests].post, parameter[binary_operation[name[self].url + call[constant[scans/{}/launch].format, parameter[name[scan_id]]]]]]
keyword[def] identifier[start_scan] ( identifier[self] , identifier[scan_id] ): literal[string] identifier[requests] . identifier[post] ( identifier[self] . identifier[url] + literal[string] . identifier[format] ( identifier[scan_id] ), identifier[verify] = keyword[False] , identifier[headers] = identifier[self] . identifier[headers] )
def start_scan(self, scan_id): """ Starts the scan identified by the scan_id.s """ requests.post(self.url + 'scans/{}/launch'.format(scan_id), verify=False, headers=self.headers)
def calculate_leaf_paths(self): """Build map of reverse xrefs then traverse backwards marking path to leaf for all leaves. """ reverse_xref = {} leaves = set() for v in self.value.values(): if v.leaf: leaves.add(v) for xref in v.value_xref: reverse_xref.setdefault(xref, []).append(v.ident) for leaf in leaves: self.calculate_leaf_path(leaf, reverse_xref)
def function[calculate_leaf_paths, parameter[self]]: constant[Build map of reverse xrefs then traverse backwards marking path to leaf for all leaves. ] variable[reverse_xref] assign[=] dictionary[[], []] variable[leaves] assign[=] call[name[set], parameter[]] for taget[name[v]] in starred[call[name[self].value.values, parameter[]]] begin[:] if name[v].leaf begin[:] call[name[leaves].add, parameter[name[v]]] for taget[name[xref]] in starred[name[v].value_xref] begin[:] call[call[name[reverse_xref].setdefault, parameter[name[xref], list[[]]]].append, parameter[name[v].ident]] for taget[name[leaf]] in starred[name[leaves]] begin[:] call[name[self].calculate_leaf_path, parameter[name[leaf], name[reverse_xref]]]
keyword[def] identifier[calculate_leaf_paths] ( identifier[self] ): literal[string] identifier[reverse_xref] ={} identifier[leaves] = identifier[set] () keyword[for] identifier[v] keyword[in] identifier[self] . identifier[value] . identifier[values] (): keyword[if] identifier[v] . identifier[leaf] : identifier[leaves] . identifier[add] ( identifier[v] ) keyword[for] identifier[xref] keyword[in] identifier[v] . identifier[value_xref] : identifier[reverse_xref] . identifier[setdefault] ( identifier[xref] ,[]). identifier[append] ( identifier[v] . identifier[ident] ) keyword[for] identifier[leaf] keyword[in] identifier[leaves] : identifier[self] . identifier[calculate_leaf_path] ( identifier[leaf] , identifier[reverse_xref] )
def calculate_leaf_paths(self): """Build map of reverse xrefs then traverse backwards marking path to leaf for all leaves. """ reverse_xref = {} leaves = set() for v in self.value.values(): if v.leaf: leaves.add(v) # depends on [control=['if'], data=[]] for xref in v.value_xref: reverse_xref.setdefault(xref, []).append(v.ident) # depends on [control=['for'], data=['xref']] # depends on [control=['for'], data=['v']] for leaf in leaves: self.calculate_leaf_path(leaf, reverse_xref) # depends on [control=['for'], data=['leaf']]
def request(self, method, uri, params=None, data=None, headers=None, auth=None, timeout=None, allow_redirects=False): """ Make an HTTP request. """ url = self.relative_uri(uri) return self.domain.request( method, url, params=params, data=data, headers=headers, auth=auth, timeout=timeout, allow_redirects=allow_redirects )
def function[request, parameter[self, method, uri, params, data, headers, auth, timeout, allow_redirects]]: constant[ Make an HTTP request. ] variable[url] assign[=] call[name[self].relative_uri, parameter[name[uri]]] return[call[name[self].domain.request, parameter[name[method], name[url]]]]
keyword[def] identifier[request] ( identifier[self] , identifier[method] , identifier[uri] , identifier[params] = keyword[None] , identifier[data] = keyword[None] , identifier[headers] = keyword[None] , identifier[auth] = keyword[None] , identifier[timeout] = keyword[None] , identifier[allow_redirects] = keyword[False] ): literal[string] identifier[url] = identifier[self] . identifier[relative_uri] ( identifier[uri] ) keyword[return] identifier[self] . identifier[domain] . identifier[request] ( identifier[method] , identifier[url] , identifier[params] = identifier[params] , identifier[data] = identifier[data] , identifier[headers] = identifier[headers] , identifier[auth] = identifier[auth] , identifier[timeout] = identifier[timeout] , identifier[allow_redirects] = identifier[allow_redirects] )
def request(self, method, uri, params=None, data=None, headers=None, auth=None, timeout=None, allow_redirects=False): """ Make an HTTP request. """ url = self.relative_uri(uri) return self.domain.request(method, url, params=params, data=data, headers=headers, auth=auth, timeout=timeout, allow_redirects=allow_redirects)
def evaluate(data_eval, model, nsp_loss, mlm_loss, vocab_size, ctx, log_interval, dtype): """Evaluation function.""" mlm_metric = MaskedAccuracy() nsp_metric = MaskedAccuracy() mlm_metric.reset() nsp_metric.reset() eval_begin_time = time.time() begin_time = time.time() step_num = 0 running_mlm_loss = running_nsp_loss = 0 total_mlm_loss = total_nsp_loss = 0 running_num_tks = 0 for _, dataloader in enumerate(data_eval): for _, data_batch in enumerate(dataloader): step_num += 1 data_list = split_and_load(data_batch, ctx) loss_list = [] ns_label_list, ns_pred_list = [], [] mask_label_list, mask_pred_list, mask_weight_list = [], [], [] for data in data_list: out = forward(data, model, mlm_loss, nsp_loss, vocab_size, dtype) (ls, next_sentence_label, classified, masked_id, decoded, masked_weight, ls1, ls2, valid_length) = out loss_list.append(ls) ns_label_list.append(next_sentence_label) ns_pred_list.append(classified) mask_label_list.append(masked_id) mask_pred_list.append(decoded) mask_weight_list.append(masked_weight) running_mlm_loss += ls1.as_in_context(mx.cpu()) running_nsp_loss += ls2.as_in_context(mx.cpu()) running_num_tks += valid_length.sum().as_in_context(mx.cpu()) nsp_metric.update(ns_label_list, ns_pred_list) mlm_metric.update(mask_label_list, mask_pred_list, mask_weight_list) # logging if (step_num + 1) % (log_interval) == 0: total_mlm_loss += running_mlm_loss total_nsp_loss += running_nsp_loss log(begin_time, running_num_tks, running_mlm_loss, running_nsp_loss, step_num, mlm_metric, nsp_metric, None, log_interval) begin_time = time.time() running_mlm_loss = running_nsp_loss = running_num_tks = 0 mlm_metric.reset_local() nsp_metric.reset_local() mx.nd.waitall() eval_end_time = time.time() total_mlm_loss /= step_num total_nsp_loss /= step_num logging.info('mlm_loss={:.3f}\tmlm_acc={:.1f}\tnsp_loss={:.3f}\tnsp_acc={:.1f}\t' .format(total_mlm_loss.asscalar(), mlm_metric.get_global()[1] * 100, total_nsp_loss.asscalar(), nsp_metric.get_global()[1] * 100)) logging.info('Eval cost={:.1f}s'.format(eval_end_time - eval_begin_time))
def function[evaluate, parameter[data_eval, model, nsp_loss, mlm_loss, vocab_size, ctx, log_interval, dtype]]: constant[Evaluation function.] variable[mlm_metric] assign[=] call[name[MaskedAccuracy], parameter[]] variable[nsp_metric] assign[=] call[name[MaskedAccuracy], parameter[]] call[name[mlm_metric].reset, parameter[]] call[name[nsp_metric].reset, parameter[]] variable[eval_begin_time] assign[=] call[name[time].time, parameter[]] variable[begin_time] assign[=] call[name[time].time, parameter[]] variable[step_num] assign[=] constant[0] variable[running_mlm_loss] assign[=] constant[0] variable[total_mlm_loss] assign[=] constant[0] variable[running_num_tks] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da18dc04250>, <ast.Name object at 0x7da18dc05ae0>]]] in starred[call[name[enumerate], parameter[name[data_eval]]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da18dc06e90>, <ast.Name object at 0x7da18dc04910>]]] in starred[call[name[enumerate], parameter[name[dataloader]]]] begin[:] <ast.AugAssign object at 0x7da18dc07340> variable[data_list] assign[=] call[name[split_and_load], parameter[name[data_batch], name[ctx]]] variable[loss_list] assign[=] list[[]] <ast.Tuple object at 0x7da18dc067d0> assign[=] tuple[[<ast.List object at 0x7da18dc04550>, <ast.List object at 0x7da18dc06c80>]] <ast.Tuple object at 0x7da18dc046a0> assign[=] tuple[[<ast.List object at 0x7da18dc075e0>, <ast.List object at 0x7da18dc04220>, <ast.List object at 0x7da18dc04ee0>]] for taget[name[data]] in starred[name[data_list]] begin[:] variable[out] assign[=] call[name[forward], parameter[name[data], name[model], name[mlm_loss], name[nsp_loss], name[vocab_size], name[dtype]]] <ast.Tuple object at 0x7da18dc04a30> assign[=] name[out] call[name[loss_list].append, parameter[name[ls]]] call[name[ns_label_list].append, parameter[name[next_sentence_label]]] call[name[ns_pred_list].append, parameter[name[classified]]] call[name[mask_label_list].append, parameter[name[masked_id]]] call[name[mask_pred_list].append, parameter[name[decoded]]] call[name[mask_weight_list].append, parameter[name[masked_weight]]] <ast.AugAssign object at 0x7da18dc045e0> <ast.AugAssign object at 0x7da18dc05630> <ast.AugAssign object at 0x7da18dc07790> call[name[nsp_metric].update, parameter[name[ns_label_list], name[ns_pred_list]]] call[name[mlm_metric].update, parameter[name[mask_label_list], name[mask_pred_list], name[mask_weight_list]]] if compare[binary_operation[binary_operation[name[step_num] + constant[1]] <ast.Mod object at 0x7da2590d6920> name[log_interval]] equal[==] constant[0]] begin[:] <ast.AugAssign object at 0x7da18dc07610> <ast.AugAssign object at 0x7da18dc07ac0> call[name[log], parameter[name[begin_time], name[running_num_tks], name[running_mlm_loss], name[running_nsp_loss], name[step_num], name[mlm_metric], name[nsp_metric], constant[None], name[log_interval]]] variable[begin_time] assign[=] call[name[time].time, parameter[]] variable[running_mlm_loss] assign[=] constant[0] call[name[mlm_metric].reset_local, parameter[]] call[name[nsp_metric].reset_local, parameter[]] call[name[mx].nd.waitall, parameter[]] variable[eval_end_time] assign[=] call[name[time].time, parameter[]] <ast.AugAssign object at 0x7da2041da5c0> <ast.AugAssign object at 0x7da2041db5b0> call[name[logging].info, parameter[call[constant[mlm_loss={:.3f} mlm_acc={:.1f} nsp_loss={:.3f} nsp_acc={:.1f} ].format, parameter[call[name[total_mlm_loss].asscalar, parameter[]], binary_operation[call[call[name[mlm_metric].get_global, parameter[]]][constant[1]] * constant[100]], call[name[total_nsp_loss].asscalar, parameter[]], binary_operation[call[call[name[nsp_metric].get_global, parameter[]]][constant[1]] * constant[100]]]]]] call[name[logging].info, parameter[call[constant[Eval cost={:.1f}s].format, parameter[binary_operation[name[eval_end_time] - name[eval_begin_time]]]]]]
keyword[def] identifier[evaluate] ( identifier[data_eval] , identifier[model] , identifier[nsp_loss] , identifier[mlm_loss] , identifier[vocab_size] , identifier[ctx] , identifier[log_interval] , identifier[dtype] ): literal[string] identifier[mlm_metric] = identifier[MaskedAccuracy] () identifier[nsp_metric] = identifier[MaskedAccuracy] () identifier[mlm_metric] . identifier[reset] () identifier[nsp_metric] . identifier[reset] () identifier[eval_begin_time] = identifier[time] . identifier[time] () identifier[begin_time] = identifier[time] . identifier[time] () identifier[step_num] = literal[int] identifier[running_mlm_loss] = identifier[running_nsp_loss] = literal[int] identifier[total_mlm_loss] = identifier[total_nsp_loss] = literal[int] identifier[running_num_tks] = literal[int] keyword[for] identifier[_] , identifier[dataloader] keyword[in] identifier[enumerate] ( identifier[data_eval] ): keyword[for] identifier[_] , identifier[data_batch] keyword[in] identifier[enumerate] ( identifier[dataloader] ): identifier[step_num] += literal[int] identifier[data_list] = identifier[split_and_load] ( identifier[data_batch] , identifier[ctx] ) identifier[loss_list] =[] identifier[ns_label_list] , identifier[ns_pred_list] =[],[] identifier[mask_label_list] , identifier[mask_pred_list] , identifier[mask_weight_list] =[],[],[] keyword[for] identifier[data] keyword[in] identifier[data_list] : identifier[out] = identifier[forward] ( identifier[data] , identifier[model] , identifier[mlm_loss] , identifier[nsp_loss] , identifier[vocab_size] , identifier[dtype] ) ( identifier[ls] , identifier[next_sentence_label] , identifier[classified] , identifier[masked_id] , identifier[decoded] , identifier[masked_weight] , identifier[ls1] , identifier[ls2] , identifier[valid_length] )= identifier[out] identifier[loss_list] . identifier[append] ( identifier[ls] ) identifier[ns_label_list] . identifier[append] ( identifier[next_sentence_label] ) identifier[ns_pred_list] . identifier[append] ( identifier[classified] ) identifier[mask_label_list] . identifier[append] ( identifier[masked_id] ) identifier[mask_pred_list] . identifier[append] ( identifier[decoded] ) identifier[mask_weight_list] . identifier[append] ( identifier[masked_weight] ) identifier[running_mlm_loss] += identifier[ls1] . identifier[as_in_context] ( identifier[mx] . identifier[cpu] ()) identifier[running_nsp_loss] += identifier[ls2] . identifier[as_in_context] ( identifier[mx] . identifier[cpu] ()) identifier[running_num_tks] += identifier[valid_length] . identifier[sum] (). identifier[as_in_context] ( identifier[mx] . identifier[cpu] ()) identifier[nsp_metric] . identifier[update] ( identifier[ns_label_list] , identifier[ns_pred_list] ) identifier[mlm_metric] . identifier[update] ( identifier[mask_label_list] , identifier[mask_pred_list] , identifier[mask_weight_list] ) keyword[if] ( identifier[step_num] + literal[int] )%( identifier[log_interval] )== literal[int] : identifier[total_mlm_loss] += identifier[running_mlm_loss] identifier[total_nsp_loss] += identifier[running_nsp_loss] identifier[log] ( identifier[begin_time] , identifier[running_num_tks] , identifier[running_mlm_loss] , identifier[running_nsp_loss] , identifier[step_num] , identifier[mlm_metric] , identifier[nsp_metric] , keyword[None] , identifier[log_interval] ) identifier[begin_time] = identifier[time] . identifier[time] () identifier[running_mlm_loss] = identifier[running_nsp_loss] = identifier[running_num_tks] = literal[int] identifier[mlm_metric] . identifier[reset_local] () identifier[nsp_metric] . identifier[reset_local] () identifier[mx] . identifier[nd] . identifier[waitall] () identifier[eval_end_time] = identifier[time] . identifier[time] () identifier[total_mlm_loss] /= identifier[step_num] identifier[total_nsp_loss] /= identifier[step_num] identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[total_mlm_loss] . identifier[asscalar] (), identifier[mlm_metric] . identifier[get_global] ()[ literal[int] ]* literal[int] , identifier[total_nsp_loss] . identifier[asscalar] (), identifier[nsp_metric] . identifier[get_global] ()[ literal[int] ]* literal[int] )) identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[eval_end_time] - identifier[eval_begin_time] ))
def evaluate(data_eval, model, nsp_loss, mlm_loss, vocab_size, ctx, log_interval, dtype): """Evaluation function.""" mlm_metric = MaskedAccuracy() nsp_metric = MaskedAccuracy() mlm_metric.reset() nsp_metric.reset() eval_begin_time = time.time() begin_time = time.time() step_num = 0 running_mlm_loss = running_nsp_loss = 0 total_mlm_loss = total_nsp_loss = 0 running_num_tks = 0 for (_, dataloader) in enumerate(data_eval): for (_, data_batch) in enumerate(dataloader): step_num += 1 data_list = split_and_load(data_batch, ctx) loss_list = [] (ns_label_list, ns_pred_list) = ([], []) (mask_label_list, mask_pred_list, mask_weight_list) = ([], [], []) for data in data_list: out = forward(data, model, mlm_loss, nsp_loss, vocab_size, dtype) (ls, next_sentence_label, classified, masked_id, decoded, masked_weight, ls1, ls2, valid_length) = out loss_list.append(ls) ns_label_list.append(next_sentence_label) ns_pred_list.append(classified) mask_label_list.append(masked_id) mask_pred_list.append(decoded) mask_weight_list.append(masked_weight) running_mlm_loss += ls1.as_in_context(mx.cpu()) running_nsp_loss += ls2.as_in_context(mx.cpu()) running_num_tks += valid_length.sum().as_in_context(mx.cpu()) # depends on [control=['for'], data=['data']] nsp_metric.update(ns_label_list, ns_pred_list) mlm_metric.update(mask_label_list, mask_pred_list, mask_weight_list) # logging if (step_num + 1) % log_interval == 0: total_mlm_loss += running_mlm_loss total_nsp_loss += running_nsp_loss log(begin_time, running_num_tks, running_mlm_loss, running_nsp_loss, step_num, mlm_metric, nsp_metric, None, log_interval) begin_time = time.time() running_mlm_loss = running_nsp_loss = running_num_tks = 0 mlm_metric.reset_local() nsp_metric.reset_local() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] mx.nd.waitall() eval_end_time = time.time() total_mlm_loss /= step_num total_nsp_loss /= step_num logging.info('mlm_loss={:.3f}\tmlm_acc={:.1f}\tnsp_loss={:.3f}\tnsp_acc={:.1f}\t'.format(total_mlm_loss.asscalar(), mlm_metric.get_global()[1] * 100, total_nsp_loss.asscalar(), nsp_metric.get_global()[1] * 100)) logging.info('Eval cost={:.1f}s'.format(eval_end_time - eval_begin_time))
def _handle_system_status_event(self, event: SystemStatusEvent) -> None: """ DISARMED -> ARMED_AWAY -> EXIT_DELAY_START -> EXIT_DELAY_END (trip): -> ALARM -> OUTPUT_ON -> ALARM_RESTORE (disarm): -> DISARMED -> OUTPUT_OFF (disarm): -> DISARMED (disarm before EXIT_DELAY_END): -> DISARMED -> EXIT_DELAY_END TODO(NW): Check ALARM_RESTORE state transition to move back into ARMED_AWAY state """ if event.type == SystemStatusEvent.EventType.UNSEALED: return self._update_zone(event.zone, True) elif event.type == SystemStatusEvent.EventType.SEALED: return self._update_zone(event.zone, False) elif event.type == SystemStatusEvent.EventType.ALARM: return self._update_arming_state(ArmingState.TRIGGERED) elif event.type == SystemStatusEvent.EventType.ALARM_RESTORE: if self.arming_state != ArmingState.DISARMED: return self._update_arming_state(ArmingState.ARMED) elif event.type == SystemStatusEvent.EventType.ENTRY_DELAY_START: return self._update_arming_state(ArmingState.ENTRY_DELAY) elif event.type == SystemStatusEvent.EventType.ENTRY_DELAY_END: pass elif event.type == SystemStatusEvent.EventType.EXIT_DELAY_START: return self._update_arming_state(ArmingState.EXIT_DELAY) elif event.type == SystemStatusEvent.EventType.EXIT_DELAY_END: # Exit delay finished - if we were in the process of arming update # state to armed if self.arming_state == ArmingState.EXIT_DELAY: return self._update_arming_state(ArmingState.ARMED) elif event.type in Alarm.ARM_EVENTS: return self._update_arming_state(ArmingState.ARMING) elif event.type == SystemStatusEvent.EventType.DISARMED: return self._update_arming_state(ArmingState.DISARMED) elif event.type == SystemStatusEvent.EventType.ARMING_DELAYED: pass
def function[_handle_system_status_event, parameter[self, event]]: constant[ DISARMED -> ARMED_AWAY -> EXIT_DELAY_START -> EXIT_DELAY_END (trip): -> ALARM -> OUTPUT_ON -> ALARM_RESTORE (disarm): -> DISARMED -> OUTPUT_OFF (disarm): -> DISARMED (disarm before EXIT_DELAY_END): -> DISARMED -> EXIT_DELAY_END TODO(NW): Check ALARM_RESTORE state transition to move back into ARMED_AWAY state ] if compare[name[event].type equal[==] name[SystemStatusEvent].EventType.UNSEALED] begin[:] return[call[name[self]._update_zone, parameter[name[event].zone, constant[True]]]]
keyword[def] identifier[_handle_system_status_event] ( identifier[self] , identifier[event] : identifier[SystemStatusEvent] )-> keyword[None] : literal[string] keyword[if] identifier[event] . identifier[type] == identifier[SystemStatusEvent] . identifier[EventType] . identifier[UNSEALED] : keyword[return] identifier[self] . identifier[_update_zone] ( identifier[event] . identifier[zone] , keyword[True] ) keyword[elif] identifier[event] . identifier[type] == identifier[SystemStatusEvent] . identifier[EventType] . identifier[SEALED] : keyword[return] identifier[self] . identifier[_update_zone] ( identifier[event] . identifier[zone] , keyword[False] ) keyword[elif] identifier[event] . identifier[type] == identifier[SystemStatusEvent] . identifier[EventType] . identifier[ALARM] : keyword[return] identifier[self] . identifier[_update_arming_state] ( identifier[ArmingState] . identifier[TRIGGERED] ) keyword[elif] identifier[event] . identifier[type] == identifier[SystemStatusEvent] . identifier[EventType] . identifier[ALARM_RESTORE] : keyword[if] identifier[self] . identifier[arming_state] != identifier[ArmingState] . identifier[DISARMED] : keyword[return] identifier[self] . identifier[_update_arming_state] ( identifier[ArmingState] . identifier[ARMED] ) keyword[elif] identifier[event] . identifier[type] == identifier[SystemStatusEvent] . identifier[EventType] . identifier[ENTRY_DELAY_START] : keyword[return] identifier[self] . identifier[_update_arming_state] ( identifier[ArmingState] . identifier[ENTRY_DELAY] ) keyword[elif] identifier[event] . identifier[type] == identifier[SystemStatusEvent] . identifier[EventType] . identifier[ENTRY_DELAY_END] : keyword[pass] keyword[elif] identifier[event] . identifier[type] == identifier[SystemStatusEvent] . identifier[EventType] . identifier[EXIT_DELAY_START] : keyword[return] identifier[self] . identifier[_update_arming_state] ( identifier[ArmingState] . identifier[EXIT_DELAY] ) keyword[elif] identifier[event] . identifier[type] == identifier[SystemStatusEvent] . identifier[EventType] . identifier[EXIT_DELAY_END] : keyword[if] identifier[self] . identifier[arming_state] == identifier[ArmingState] . identifier[EXIT_DELAY] : keyword[return] identifier[self] . identifier[_update_arming_state] ( identifier[ArmingState] . identifier[ARMED] ) keyword[elif] identifier[event] . identifier[type] keyword[in] identifier[Alarm] . identifier[ARM_EVENTS] : keyword[return] identifier[self] . identifier[_update_arming_state] ( identifier[ArmingState] . identifier[ARMING] ) keyword[elif] identifier[event] . identifier[type] == identifier[SystemStatusEvent] . identifier[EventType] . identifier[DISARMED] : keyword[return] identifier[self] . identifier[_update_arming_state] ( identifier[ArmingState] . identifier[DISARMED] ) keyword[elif] identifier[event] . identifier[type] == identifier[SystemStatusEvent] . identifier[EventType] . identifier[ARMING_DELAYED] : keyword[pass]
def _handle_system_status_event(self, event: SystemStatusEvent) -> None: """ DISARMED -> ARMED_AWAY -> EXIT_DELAY_START -> EXIT_DELAY_END (trip): -> ALARM -> OUTPUT_ON -> ALARM_RESTORE (disarm): -> DISARMED -> OUTPUT_OFF (disarm): -> DISARMED (disarm before EXIT_DELAY_END): -> DISARMED -> EXIT_DELAY_END TODO(NW): Check ALARM_RESTORE state transition to move back into ARMED_AWAY state """ if event.type == SystemStatusEvent.EventType.UNSEALED: return self._update_zone(event.zone, True) # depends on [control=['if'], data=[]] elif event.type == SystemStatusEvent.EventType.SEALED: return self._update_zone(event.zone, False) # depends on [control=['if'], data=[]] elif event.type == SystemStatusEvent.EventType.ALARM: return self._update_arming_state(ArmingState.TRIGGERED) # depends on [control=['if'], data=[]] elif event.type == SystemStatusEvent.EventType.ALARM_RESTORE: if self.arming_state != ArmingState.DISARMED: return self._update_arming_state(ArmingState.ARMED) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif event.type == SystemStatusEvent.EventType.ENTRY_DELAY_START: return self._update_arming_state(ArmingState.ENTRY_DELAY) # depends on [control=['if'], data=[]] elif event.type == SystemStatusEvent.EventType.ENTRY_DELAY_END: pass # depends on [control=['if'], data=[]] elif event.type == SystemStatusEvent.EventType.EXIT_DELAY_START: return self._update_arming_state(ArmingState.EXIT_DELAY) # depends on [control=['if'], data=[]] elif event.type == SystemStatusEvent.EventType.EXIT_DELAY_END: # Exit delay finished - if we were in the process of arming update # state to armed if self.arming_state == ArmingState.EXIT_DELAY: return self._update_arming_state(ArmingState.ARMED) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif event.type in Alarm.ARM_EVENTS: return self._update_arming_state(ArmingState.ARMING) # depends on [control=['if'], data=[]] elif event.type == SystemStatusEvent.EventType.DISARMED: return self._update_arming_state(ArmingState.DISARMED) # depends on [control=['if'], data=[]] elif event.type == SystemStatusEvent.EventType.ARMING_DELAYED: pass # depends on [control=['if'], data=[]]
def get_merge_requests(self): "http://doc.gitlab.com/ce/api/merge_requests.html" g = self.gitlab merges = self.get(g['url'] + "/projects/" + g['repo'] + "/merge_requests", {'private_token': g['token'], 'state': 'all'}, cache=False) return dict([(str(merge['id']), merge) for merge in merges])
def function[get_merge_requests, parameter[self]]: constant[http://doc.gitlab.com/ce/api/merge_requests.html] variable[g] assign[=] name[self].gitlab variable[merges] assign[=] call[name[self].get, parameter[binary_operation[binary_operation[binary_operation[call[name[g]][constant[url]] + constant[/projects/]] + call[name[g]][constant[repo]]] + constant[/merge_requests]], dictionary[[<ast.Constant object at 0x7da1b0a4c340>, <ast.Constant object at 0x7da1b0a4eb60>], [<ast.Subscript object at 0x7da1b0a4fca0>, <ast.Constant object at 0x7da1b0b81030>]]]] return[call[name[dict], parameter[<ast.ListComp object at 0x7da1b0b80f40>]]]
keyword[def] identifier[get_merge_requests] ( identifier[self] ): literal[string] identifier[g] = identifier[self] . identifier[gitlab] identifier[merges] = identifier[self] . identifier[get] ( identifier[g] [ literal[string] ]+ literal[string] + identifier[g] [ literal[string] ]+ literal[string] , { literal[string] : identifier[g] [ literal[string] ], literal[string] : literal[string] }, identifier[cache] = keyword[False] ) keyword[return] identifier[dict] ([( identifier[str] ( identifier[merge] [ literal[string] ]), identifier[merge] ) keyword[for] identifier[merge] keyword[in] identifier[merges] ])
def get_merge_requests(self): """http://doc.gitlab.com/ce/api/merge_requests.html""" g = self.gitlab merges = self.get(g['url'] + '/projects/' + g['repo'] + '/merge_requests', {'private_token': g['token'], 'state': 'all'}, cache=False) return dict([(str(merge['id']), merge) for merge in merges])
def plot(self, ax=None, legend=None, return_fig=False, **kwargs): """ Plot a curve. Args: ax (ax): A matplotlib axis. legend (striplog.legend): A legend. Optional. return_fig (bool): whether to return the matplotlib figure. Default False. kwargs: Arguments for ``ax.set()`` Returns: ax. If you passed in an ax, otherwise None. """ if ax is None: fig = plt.figure(figsize=(2, 10)) ax = fig.add_subplot(111) return_ax = False else: return_ax = True d = None if legend is not None: try: d = legend.get_decor(self) except: pass if d is not None: kwargs['color'] = d.colour kwargs['lw'] = getattr(d, 'lineweight', None) or getattr(d, 'lw', 1) kwargs['ls'] = getattr(d, 'linestyle', None) or getattr(d, 'ls', '-') # Attempt to get axis parameters from decor. axkwargs = {} xlim = getattr(d, 'xlim', None) if xlim is not None: axkwargs['xlim'] = list(map(float, xlim.split(','))) xticks = getattr(d, 'xticks', None) if xticks is not None: axkwargs['xticks'] = list(map(float, xticks.split(','))) xscale = getattr(d, 'xscale', None) if xscale is not None: axkwargs['xscale'] = xscale ax.set(**axkwargs) ax.plot(self, self.basis, **kwargs) ax.set_title(self.mnemonic) # no longer needed ax.set_xlabel(self.units) if False: # labeltop of axes? ax.xaxis.tick_top() if True: # rotate x-tick labels labels = ax.get_xticklabels() for label in labels: label.set_rotation(90) ax.set_ylim([self.stop, self.start]) ax.grid('on', color='k', alpha=0.33, lw=0.33, linestyle='-') if return_ax: return ax elif return_fig: return fig else: return None
def function[plot, parameter[self, ax, legend, return_fig]]: constant[ Plot a curve. Args: ax (ax): A matplotlib axis. legend (striplog.legend): A legend. Optional. return_fig (bool): whether to return the matplotlib figure. Default False. kwargs: Arguments for ``ax.set()`` Returns: ax. If you passed in an ax, otherwise None. ] if compare[name[ax] is constant[None]] begin[:] variable[fig] assign[=] call[name[plt].figure, parameter[]] variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[111]]] variable[return_ax] assign[=] constant[False] variable[d] assign[=] constant[None] if compare[name[legend] is_not constant[None]] begin[:] <ast.Try object at 0x7da1b1d49e40> if compare[name[d] is_not constant[None]] begin[:] call[name[kwargs]][constant[color]] assign[=] name[d].colour call[name[kwargs]][constant[lw]] assign[=] <ast.BoolOp object at 0x7da1b1d48dc0> call[name[kwargs]][constant[ls]] assign[=] <ast.BoolOp object at 0x7da1b1d4bc40> variable[axkwargs] assign[=] dictionary[[], []] variable[xlim] assign[=] call[name[getattr], parameter[name[d], constant[xlim], constant[None]]] if compare[name[xlim] is_not constant[None]] begin[:] call[name[axkwargs]][constant[xlim]] assign[=] call[name[list], parameter[call[name[map], parameter[name[float], call[name[xlim].split, parameter[constant[,]]]]]]] variable[xticks] assign[=] call[name[getattr], parameter[name[d], constant[xticks], constant[None]]] if compare[name[xticks] is_not constant[None]] begin[:] call[name[axkwargs]][constant[xticks]] assign[=] call[name[list], parameter[call[name[map], parameter[name[float], call[name[xticks].split, parameter[constant[,]]]]]]] variable[xscale] assign[=] call[name[getattr], parameter[name[d], constant[xscale], constant[None]]] if compare[name[xscale] is_not constant[None]] begin[:] call[name[axkwargs]][constant[xscale]] assign[=] name[xscale] call[name[ax].set, parameter[]] call[name[ax].plot, parameter[name[self], name[self].basis]] call[name[ax].set_title, parameter[name[self].mnemonic]] call[name[ax].set_xlabel, parameter[name[self].units]] if constant[False] begin[:] call[name[ax].xaxis.tick_top, parameter[]] if constant[True] begin[:] variable[labels] assign[=] call[name[ax].get_xticklabels, parameter[]] for taget[name[label]] in starred[name[labels]] begin[:] call[name[label].set_rotation, parameter[constant[90]]] call[name[ax].set_ylim, parameter[list[[<ast.Attribute object at 0x7da1b23638b0>, <ast.Attribute object at 0x7da1b23634f0>]]]] call[name[ax].grid, parameter[constant[on]]] if name[return_ax] begin[:] return[name[ax]]
keyword[def] identifier[plot] ( identifier[self] , identifier[ax] = keyword[None] , identifier[legend] = keyword[None] , identifier[return_fig] = keyword[False] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[ax] keyword[is] keyword[None] : identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] =( literal[int] , literal[int] )) identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] ) identifier[return_ax] = keyword[False] keyword[else] : identifier[return_ax] = keyword[True] identifier[d] = keyword[None] keyword[if] identifier[legend] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[d] = identifier[legend] . identifier[get_decor] ( identifier[self] ) keyword[except] : keyword[pass] keyword[if] identifier[d] keyword[is] keyword[not] keyword[None] : identifier[kwargs] [ literal[string] ]= identifier[d] . identifier[colour] identifier[kwargs] [ literal[string] ]= identifier[getattr] ( identifier[d] , literal[string] , keyword[None] ) keyword[or] identifier[getattr] ( identifier[d] , literal[string] , literal[int] ) identifier[kwargs] [ literal[string] ]= identifier[getattr] ( identifier[d] , literal[string] , keyword[None] ) keyword[or] identifier[getattr] ( identifier[d] , literal[string] , literal[string] ) identifier[axkwargs] ={} identifier[xlim] = identifier[getattr] ( identifier[d] , literal[string] , keyword[None] ) keyword[if] identifier[xlim] keyword[is] keyword[not] keyword[None] : identifier[axkwargs] [ literal[string] ]= identifier[list] ( identifier[map] ( identifier[float] , identifier[xlim] . identifier[split] ( literal[string] ))) identifier[xticks] = identifier[getattr] ( identifier[d] , literal[string] , keyword[None] ) keyword[if] identifier[xticks] keyword[is] keyword[not] keyword[None] : identifier[axkwargs] [ literal[string] ]= identifier[list] ( identifier[map] ( identifier[float] , identifier[xticks] . identifier[split] ( literal[string] ))) identifier[xscale] = identifier[getattr] ( identifier[d] , literal[string] , keyword[None] ) keyword[if] identifier[xscale] keyword[is] keyword[not] keyword[None] : identifier[axkwargs] [ literal[string] ]= identifier[xscale] identifier[ax] . identifier[set] (** identifier[axkwargs] ) identifier[ax] . identifier[plot] ( identifier[self] , identifier[self] . identifier[basis] ,** identifier[kwargs] ) identifier[ax] . identifier[set_title] ( identifier[self] . identifier[mnemonic] ) identifier[ax] . identifier[set_xlabel] ( identifier[self] . identifier[units] ) keyword[if] keyword[False] : identifier[ax] . identifier[xaxis] . identifier[tick_top] () keyword[if] keyword[True] : identifier[labels] = identifier[ax] . identifier[get_xticklabels] () keyword[for] identifier[label] keyword[in] identifier[labels] : identifier[label] . identifier[set_rotation] ( literal[int] ) identifier[ax] . identifier[set_ylim] ([ identifier[self] . identifier[stop] , identifier[self] . identifier[start] ]) identifier[ax] . identifier[grid] ( literal[string] , identifier[color] = literal[string] , identifier[alpha] = literal[int] , identifier[lw] = literal[int] , identifier[linestyle] = literal[string] ) keyword[if] identifier[return_ax] : keyword[return] identifier[ax] keyword[elif] identifier[return_fig] : keyword[return] identifier[fig] keyword[else] : keyword[return] keyword[None]
def plot(self, ax=None, legend=None, return_fig=False, **kwargs): """ Plot a curve. Args: ax (ax): A matplotlib axis. legend (striplog.legend): A legend. Optional. return_fig (bool): whether to return the matplotlib figure. Default False. kwargs: Arguments for ``ax.set()`` Returns: ax. If you passed in an ax, otherwise None. """ if ax is None: fig = plt.figure(figsize=(2, 10)) ax = fig.add_subplot(111) return_ax = False # depends on [control=['if'], data=['ax']] else: return_ax = True d = None if legend is not None: try: d = legend.get_decor(self) # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['legend']] if d is not None: kwargs['color'] = d.colour kwargs['lw'] = getattr(d, 'lineweight', None) or getattr(d, 'lw', 1) kwargs['ls'] = getattr(d, 'linestyle', None) or getattr(d, 'ls', '-') # Attempt to get axis parameters from decor. axkwargs = {} xlim = getattr(d, 'xlim', None) if xlim is not None: axkwargs['xlim'] = list(map(float, xlim.split(','))) # depends on [control=['if'], data=['xlim']] xticks = getattr(d, 'xticks', None) if xticks is not None: axkwargs['xticks'] = list(map(float, xticks.split(','))) # depends on [control=['if'], data=['xticks']] xscale = getattr(d, 'xscale', None) if xscale is not None: axkwargs['xscale'] = xscale # depends on [control=['if'], data=['xscale']] ax.set(**axkwargs) # depends on [control=['if'], data=['d']] ax.plot(self, self.basis, **kwargs) ax.set_title(self.mnemonic) # no longer needed ax.set_xlabel(self.units) if False: # labeltop of axes? ax.xaxis.tick_top() # depends on [control=['if'], data=[]] if True: # rotate x-tick labels labels = ax.get_xticklabels() for label in labels: label.set_rotation(90) # depends on [control=['for'], data=['label']] # depends on [control=['if'], data=[]] ax.set_ylim([self.stop, self.start]) ax.grid('on', color='k', alpha=0.33, lw=0.33, linestyle='-') if return_ax: return ax # depends on [control=['if'], data=[]] elif return_fig: return fig # depends on [control=['if'], data=[]] else: return None
def removeItem( self, item ): """ Overloads the default QGraphicsScene method to handle cleanup and \ additional removal options for nodes. :param item <QGraphicsItem> :return <bool> """ # for nodes and connections, call the prepareToRemove method before # removing if ( isinstance( item, XNode ) or isinstance( item, XNodeConnection ) ): # make sure this item is ok to remove if ( not item.prepareToRemove() ): return False # remove the item using the base class method try: self._cache.remove(item) except KeyError: pass # mark the scene as modified self.setModified(True) super(XNodeScene, self).removeItem(item) if not self.signalsBlocked(): self.itemsRemoved.emit() return True
def function[removeItem, parameter[self, item]]: constant[ Overloads the default QGraphicsScene method to handle cleanup and additional removal options for nodes. :param item <QGraphicsItem> :return <bool> ] if <ast.BoolOp object at 0x7da2054a6c20> begin[:] if <ast.UnaryOp object at 0x7da18f09ce20> begin[:] return[constant[False]] <ast.Try object at 0x7da18f09dba0> call[name[self].setModified, parameter[constant[True]]] call[call[name[super], parameter[name[XNodeScene], name[self]]].removeItem, parameter[name[item]]] if <ast.UnaryOp object at 0x7da18f09e440> begin[:] call[name[self].itemsRemoved.emit, parameter[]] return[constant[True]]
keyword[def] identifier[removeItem] ( identifier[self] , identifier[item] ): literal[string] keyword[if] ( identifier[isinstance] ( identifier[item] , identifier[XNode] ) keyword[or] identifier[isinstance] ( identifier[item] , identifier[XNodeConnection] )): keyword[if] ( keyword[not] identifier[item] . identifier[prepareToRemove] ()): keyword[return] keyword[False] keyword[try] : identifier[self] . identifier[_cache] . identifier[remove] ( identifier[item] ) keyword[except] identifier[KeyError] : keyword[pass] identifier[self] . identifier[setModified] ( keyword[True] ) identifier[super] ( identifier[XNodeScene] , identifier[self] ). identifier[removeItem] ( identifier[item] ) keyword[if] keyword[not] identifier[self] . identifier[signalsBlocked] (): identifier[self] . identifier[itemsRemoved] . identifier[emit] () keyword[return] keyword[True]
def removeItem(self, item): """ Overloads the default QGraphicsScene method to handle cleanup and additional removal options for nodes. :param item <QGraphicsItem> :return <bool> """ # for nodes and connections, call the prepareToRemove method before # removing if isinstance(item, XNode) or isinstance(item, XNodeConnection): # make sure this item is ok to remove if not item.prepareToRemove(): return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # remove the item using the base class method try: self._cache.remove(item) # depends on [control=['try'], data=[]] except KeyError: pass # depends on [control=['except'], data=[]] # mark the scene as modified self.setModified(True) super(XNodeScene, self).removeItem(item) if not self.signalsBlocked(): self.itemsRemoved.emit() # depends on [control=['if'], data=[]] return True
def send_static_file(self, filename): ''' Override default static handling: - raises 404 if not debug - handle static aliases ''' if not self.debug: self.logger.error('Static files are only served in debug') abort(404) cache_timeout = self.get_send_file_max_age(filename) # Default behavior if isfile(join(self.static_folder, filename)): return send_static(self.static_folder, filename, cache_timeout=cache_timeout) # Handle aliases for prefix, directory in self.config.get('STATIC_DIRS', tuple()): if filename.startswith(prefix): real_filename = filename[len(prefix):] if real_filename.startswith('/'): real_filename = real_filename[1:] if isfile(join(directory, real_filename)): return send_static(directory, real_filename, cache_timeout=cache_timeout) abort(404)
def function[send_static_file, parameter[self, filename]]: constant[ Override default static handling: - raises 404 if not debug - handle static aliases ] if <ast.UnaryOp object at 0x7da20c6a8250> begin[:] call[name[self].logger.error, parameter[constant[Static files are only served in debug]]] call[name[abort], parameter[constant[404]]] variable[cache_timeout] assign[=] call[name[self].get_send_file_max_age, parameter[name[filename]]] if call[name[isfile], parameter[call[name[join], parameter[name[self].static_folder, name[filename]]]]] begin[:] return[call[name[send_static], parameter[name[self].static_folder, name[filename]]]] for taget[tuple[[<ast.Name object at 0x7da20c6a8610>, <ast.Name object at 0x7da20c6a9330>]]] in starred[call[name[self].config.get, parameter[constant[STATIC_DIRS], call[name[tuple], parameter[]]]]] begin[:] if call[name[filename].startswith, parameter[name[prefix]]] begin[:] variable[real_filename] assign[=] call[name[filename]][<ast.Slice object at 0x7da20c6a9480>] if call[name[real_filename].startswith, parameter[constant[/]]] begin[:] variable[real_filename] assign[=] call[name[real_filename]][<ast.Slice object at 0x7da20c6a91b0>] if call[name[isfile], parameter[call[name[join], parameter[name[directory], name[real_filename]]]]] begin[:] return[call[name[send_static], parameter[name[directory], name[real_filename]]]] call[name[abort], parameter[constant[404]]]
keyword[def] identifier[send_static_file] ( identifier[self] , identifier[filename] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[debug] : identifier[self] . identifier[logger] . identifier[error] ( literal[string] ) identifier[abort] ( literal[int] ) identifier[cache_timeout] = identifier[self] . identifier[get_send_file_max_age] ( identifier[filename] ) keyword[if] identifier[isfile] ( identifier[join] ( identifier[self] . identifier[static_folder] , identifier[filename] )): keyword[return] identifier[send_static] ( identifier[self] . identifier[static_folder] , identifier[filename] , identifier[cache_timeout] = identifier[cache_timeout] ) keyword[for] identifier[prefix] , identifier[directory] keyword[in] identifier[self] . identifier[config] . identifier[get] ( literal[string] , identifier[tuple] ()): keyword[if] identifier[filename] . identifier[startswith] ( identifier[prefix] ): identifier[real_filename] = identifier[filename] [ identifier[len] ( identifier[prefix] ):] keyword[if] identifier[real_filename] . identifier[startswith] ( literal[string] ): identifier[real_filename] = identifier[real_filename] [ literal[int] :] keyword[if] identifier[isfile] ( identifier[join] ( identifier[directory] , identifier[real_filename] )): keyword[return] identifier[send_static] ( identifier[directory] , identifier[real_filename] , identifier[cache_timeout] = identifier[cache_timeout] ) identifier[abort] ( literal[int] )
def send_static_file(self, filename): """ Override default static handling: - raises 404 if not debug - handle static aliases """ if not self.debug: self.logger.error('Static files are only served in debug') abort(404) # depends on [control=['if'], data=[]] cache_timeout = self.get_send_file_max_age(filename) # Default behavior if isfile(join(self.static_folder, filename)): return send_static(self.static_folder, filename, cache_timeout=cache_timeout) # depends on [control=['if'], data=[]] # Handle aliases for (prefix, directory) in self.config.get('STATIC_DIRS', tuple()): if filename.startswith(prefix): real_filename = filename[len(prefix):] if real_filename.startswith('/'): real_filename = real_filename[1:] # depends on [control=['if'], data=[]] if isfile(join(directory, real_filename)): return send_static(directory, real_filename, cache_timeout=cache_timeout) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] abort(404)
def _grab_concretization_results(cls, state): """ Grabs the concretized result so we can add the constraint ourselves. """ # only grab ones that match the constrained addrs if cls._should_add_constraints(state): addr = state.inspect.address_concretization_expr result = state.inspect.address_concretization_result if result is None: l.warning("addr concretization result is None") return state.preconstrainer.address_concretization.append((addr, result))
def function[_grab_concretization_results, parameter[cls, state]]: constant[ Grabs the concretized result so we can add the constraint ourselves. ] if call[name[cls]._should_add_constraints, parameter[name[state]]] begin[:] variable[addr] assign[=] name[state].inspect.address_concretization_expr variable[result] assign[=] name[state].inspect.address_concretization_result if compare[name[result] is constant[None]] begin[:] call[name[l].warning, parameter[constant[addr concretization result is None]]] return[None] call[name[state].preconstrainer.address_concretization.append, parameter[tuple[[<ast.Name object at 0x7da18ede6110>, <ast.Name object at 0x7da18ede49d0>]]]]
keyword[def] identifier[_grab_concretization_results] ( identifier[cls] , identifier[state] ): literal[string] keyword[if] identifier[cls] . identifier[_should_add_constraints] ( identifier[state] ): identifier[addr] = identifier[state] . identifier[inspect] . identifier[address_concretization_expr] identifier[result] = identifier[state] . identifier[inspect] . identifier[address_concretization_result] keyword[if] identifier[result] keyword[is] keyword[None] : identifier[l] . identifier[warning] ( literal[string] ) keyword[return] identifier[state] . identifier[preconstrainer] . identifier[address_concretization] . identifier[append] (( identifier[addr] , identifier[result] ))
def _grab_concretization_results(cls, state): """ Grabs the concretized result so we can add the constraint ourselves. """ # only grab ones that match the constrained addrs if cls._should_add_constraints(state): addr = state.inspect.address_concretization_expr result = state.inspect.address_concretization_result if result is None: l.warning('addr concretization result is None') return # depends on [control=['if'], data=[]] state.preconstrainer.address_concretization.append((addr, result)) # depends on [control=['if'], data=[]]
def headers_to_sign(self, http_request): """ Select the headers from the request that need to be included in the StringToSign. """ headers_to_sign = {} headers_to_sign = {'Host' : self.host} for name, value in http_request.headers.items(): lname = name.lower() if lname.startswith('x-amz'): headers_to_sign[name] = value return headers_to_sign
def function[headers_to_sign, parameter[self, http_request]]: constant[ Select the headers from the request that need to be included in the StringToSign. ] variable[headers_to_sign] assign[=] dictionary[[], []] variable[headers_to_sign] assign[=] dictionary[[<ast.Constant object at 0x7da1b26a7b80>], [<ast.Attribute object at 0x7da1b26a5870>]] for taget[tuple[[<ast.Name object at 0x7da1b26a70a0>, <ast.Name object at 0x7da1b26a5000>]]] in starred[call[name[http_request].headers.items, parameter[]]] begin[:] variable[lname] assign[=] call[name[name].lower, parameter[]] if call[name[lname].startswith, parameter[constant[x-amz]]] begin[:] call[name[headers_to_sign]][name[name]] assign[=] name[value] return[name[headers_to_sign]]
keyword[def] identifier[headers_to_sign] ( identifier[self] , identifier[http_request] ): literal[string] identifier[headers_to_sign] ={} identifier[headers_to_sign] ={ literal[string] : identifier[self] . identifier[host] } keyword[for] identifier[name] , identifier[value] keyword[in] identifier[http_request] . identifier[headers] . identifier[items] (): identifier[lname] = identifier[name] . identifier[lower] () keyword[if] identifier[lname] . identifier[startswith] ( literal[string] ): identifier[headers_to_sign] [ identifier[name] ]= identifier[value] keyword[return] identifier[headers_to_sign]
def headers_to_sign(self, http_request): """ Select the headers from the request that need to be included in the StringToSign. """ headers_to_sign = {} headers_to_sign = {'Host': self.host} for (name, value) in http_request.headers.items(): lname = name.lower() if lname.startswith('x-amz'): headers_to_sign[name] = value # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return headers_to_sign
def _unlock_cache(w_lock): ''' Unlock a FS file/dir based lock ''' if not os.path.exists(w_lock): return try: if os.path.isdir(w_lock): os.rmdir(w_lock) elif os.path.isfile(w_lock): os.unlink(w_lock) except (OSError, IOError) as exc: log.trace('Error removing lockfile %s: %s', w_lock, exc)
def function[_unlock_cache, parameter[w_lock]]: constant[ Unlock a FS file/dir based lock ] if <ast.UnaryOp object at 0x7da18f813d90> begin[:] return[None] <ast.Try object at 0x7da18f8129e0>
keyword[def] identifier[_unlock_cache] ( identifier[w_lock] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[w_lock] ): keyword[return] keyword[try] : keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[w_lock] ): identifier[os] . identifier[rmdir] ( identifier[w_lock] ) keyword[elif] identifier[os] . identifier[path] . identifier[isfile] ( identifier[w_lock] ): identifier[os] . identifier[unlink] ( identifier[w_lock] ) keyword[except] ( identifier[OSError] , identifier[IOError] ) keyword[as] identifier[exc] : identifier[log] . identifier[trace] ( literal[string] , identifier[w_lock] , identifier[exc] )
def _unlock_cache(w_lock): """ Unlock a FS file/dir based lock """ if not os.path.exists(w_lock): return # depends on [control=['if'], data=[]] try: if os.path.isdir(w_lock): os.rmdir(w_lock) # depends on [control=['if'], data=[]] elif os.path.isfile(w_lock): os.unlink(w_lock) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except (OSError, IOError) as exc: log.trace('Error removing lockfile %s: %s', w_lock, exc) # depends on [control=['except'], data=['exc']]
def get_members(self, access=None): """ returns list of members according to access type If access equals to None, then returned list will contain all members. You should not modify the list content, otherwise different optimization data will stop work and may to give you wrong results. :param access: describes desired members :type access: :class:ACCESS_TYPES :rtype: [ members ] """ if access == ACCESS_TYPES.PUBLIC: return self.public_members elif access == ACCESS_TYPES.PROTECTED: return self.protected_members elif access == ACCESS_TYPES.PRIVATE: return self.private_members all_members = [] all_members.extend(self.public_members) all_members.extend(self.protected_members) all_members.extend(self.private_members) return all_members
def function[get_members, parameter[self, access]]: constant[ returns list of members according to access type If access equals to None, then returned list will contain all members. You should not modify the list content, otherwise different optimization data will stop work and may to give you wrong results. :param access: describes desired members :type access: :class:ACCESS_TYPES :rtype: [ members ] ] if compare[name[access] equal[==] name[ACCESS_TYPES].PUBLIC] begin[:] return[name[self].public_members] variable[all_members] assign[=] list[[]] call[name[all_members].extend, parameter[name[self].public_members]] call[name[all_members].extend, parameter[name[self].protected_members]] call[name[all_members].extend, parameter[name[self].private_members]] return[name[all_members]]
keyword[def] identifier[get_members] ( identifier[self] , identifier[access] = keyword[None] ): literal[string] keyword[if] identifier[access] == identifier[ACCESS_TYPES] . identifier[PUBLIC] : keyword[return] identifier[self] . identifier[public_members] keyword[elif] identifier[access] == identifier[ACCESS_TYPES] . identifier[PROTECTED] : keyword[return] identifier[self] . identifier[protected_members] keyword[elif] identifier[access] == identifier[ACCESS_TYPES] . identifier[PRIVATE] : keyword[return] identifier[self] . identifier[private_members] identifier[all_members] =[] identifier[all_members] . identifier[extend] ( identifier[self] . identifier[public_members] ) identifier[all_members] . identifier[extend] ( identifier[self] . identifier[protected_members] ) identifier[all_members] . identifier[extend] ( identifier[self] . identifier[private_members] ) keyword[return] identifier[all_members]
def get_members(self, access=None): """ returns list of members according to access type If access equals to None, then returned list will contain all members. You should not modify the list content, otherwise different optimization data will stop work and may to give you wrong results. :param access: describes desired members :type access: :class:ACCESS_TYPES :rtype: [ members ] """ if access == ACCESS_TYPES.PUBLIC: return self.public_members # depends on [control=['if'], data=[]] elif access == ACCESS_TYPES.PROTECTED: return self.protected_members # depends on [control=['if'], data=[]] elif access == ACCESS_TYPES.PRIVATE: return self.private_members # depends on [control=['if'], data=[]] all_members = [] all_members.extend(self.public_members) all_members.extend(self.protected_members) all_members.extend(self.private_members) return all_members
def get_product(membersuite_id, client=None): """Return a Product object by ID. """ if not membersuite_id: return None client = client or get_new_client(request_session=True) object_query = "SELECT Object() FROM PRODUCT WHERE ID = '{}'".format( membersuite_id) result = client.execute_object_query(object_query) msql_result = result["body"]["ExecuteMSQLResult"] if msql_result["Success"]: membersuite_object_data = (msql_result["ResultValue"] ["SingleObject"]) else: raise ExecuteMSQLError(result=result) return Product(membersuite_object_data=membersuite_object_data)
def function[get_product, parameter[membersuite_id, client]]: constant[Return a Product object by ID. ] if <ast.UnaryOp object at 0x7da18f00c6a0> begin[:] return[constant[None]] variable[client] assign[=] <ast.BoolOp object at 0x7da20e954d90> variable[object_query] assign[=] call[constant[SELECT Object() FROM PRODUCT WHERE ID = '{}'].format, parameter[name[membersuite_id]]] variable[result] assign[=] call[name[client].execute_object_query, parameter[name[object_query]]] variable[msql_result] assign[=] call[call[name[result]][constant[body]]][constant[ExecuteMSQLResult]] if call[name[msql_result]][constant[Success]] begin[:] variable[membersuite_object_data] assign[=] call[call[name[msql_result]][constant[ResultValue]]][constant[SingleObject]] return[call[name[Product], parameter[]]]
keyword[def] identifier[get_product] ( identifier[membersuite_id] , identifier[client] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[membersuite_id] : keyword[return] keyword[None] identifier[client] = identifier[client] keyword[or] identifier[get_new_client] ( identifier[request_session] = keyword[True] ) identifier[object_query] = literal[string] . identifier[format] ( identifier[membersuite_id] ) identifier[result] = identifier[client] . identifier[execute_object_query] ( identifier[object_query] ) identifier[msql_result] = identifier[result] [ literal[string] ][ literal[string] ] keyword[if] identifier[msql_result] [ literal[string] ]: identifier[membersuite_object_data] =( identifier[msql_result] [ literal[string] ] [ literal[string] ]) keyword[else] : keyword[raise] identifier[ExecuteMSQLError] ( identifier[result] = identifier[result] ) keyword[return] identifier[Product] ( identifier[membersuite_object_data] = identifier[membersuite_object_data] )
def get_product(membersuite_id, client=None): """Return a Product object by ID. """ if not membersuite_id: return None # depends on [control=['if'], data=[]] client = client or get_new_client(request_session=True) object_query = "SELECT Object() FROM PRODUCT WHERE ID = '{}'".format(membersuite_id) result = client.execute_object_query(object_query) msql_result = result['body']['ExecuteMSQLResult'] if msql_result['Success']: membersuite_object_data = msql_result['ResultValue']['SingleObject'] # depends on [control=['if'], data=[]] else: raise ExecuteMSQLError(result=result) return Product(membersuite_object_data=membersuite_object_data)
def change_path(self, path): """ Return a new LuminosoClient for a subpath of this one. For example, you might want to start with a LuminosoClient for `https://analytics.luminoso.com/api/v4/`, then get a new one for `https://analytics.luminoso.com/api/v4/projects/myaccount/myprojectid`. You accomplish that with the following call: newclient = client.change_path('projects/myaccount/myproject_id') If you start the path with `/`, it will start from the root_url instead of the current url: project_area = newclient.change_path('/projects/myaccount') The advantage of using `.change_path` is that you will not need to re-authenticate like you would if you ran `.connect` again. You can use `.change_path` to split off as many sub-clients as you want, and you don't have to stop using the old one just because you got a new one with `.change_path`. """ if path.startswith('/'): url = self.root_url + path else: url = self.url + path return self.__class__(self.session, url)
def function[change_path, parameter[self, path]]: constant[ Return a new LuminosoClient for a subpath of this one. For example, you might want to start with a LuminosoClient for `https://analytics.luminoso.com/api/v4/`, then get a new one for `https://analytics.luminoso.com/api/v4/projects/myaccount/myprojectid`. You accomplish that with the following call: newclient = client.change_path('projects/myaccount/myproject_id') If you start the path with `/`, it will start from the root_url instead of the current url: project_area = newclient.change_path('/projects/myaccount') The advantage of using `.change_path` is that you will not need to re-authenticate like you would if you ran `.connect` again. You can use `.change_path` to split off as many sub-clients as you want, and you don't have to stop using the old one just because you got a new one with `.change_path`. ] if call[name[path].startswith, parameter[constant[/]]] begin[:] variable[url] assign[=] binary_operation[name[self].root_url + name[path]] return[call[name[self].__class__, parameter[name[self].session, name[url]]]]
keyword[def] identifier[change_path] ( identifier[self] , identifier[path] ): literal[string] keyword[if] identifier[path] . identifier[startswith] ( literal[string] ): identifier[url] = identifier[self] . identifier[root_url] + identifier[path] keyword[else] : identifier[url] = identifier[self] . identifier[url] + identifier[path] keyword[return] identifier[self] . identifier[__class__] ( identifier[self] . identifier[session] , identifier[url] )
def change_path(self, path): """ Return a new LuminosoClient for a subpath of this one. For example, you might want to start with a LuminosoClient for `https://analytics.luminoso.com/api/v4/`, then get a new one for `https://analytics.luminoso.com/api/v4/projects/myaccount/myprojectid`. You accomplish that with the following call: newclient = client.change_path('projects/myaccount/myproject_id') If you start the path with `/`, it will start from the root_url instead of the current url: project_area = newclient.change_path('/projects/myaccount') The advantage of using `.change_path` is that you will not need to re-authenticate like you would if you ran `.connect` again. You can use `.change_path` to split off as many sub-clients as you want, and you don't have to stop using the old one just because you got a new one with `.change_path`. """ if path.startswith('/'): url = self.root_url + path # depends on [control=['if'], data=[]] else: url = self.url + path return self.__class__(self.session, url)
def main(): """Run the LockingProtocol.""" args = parse_input() args.lock = True args.question = [] args.all = False args.timeout = 0 args.verbose = False args.interactive = False try: assign = assignment.load_assignment(args.config, args) msgs = messages.Messages() lock.protocol(args, assign).run(msgs) except (ex.LoadingException, ex.SerializeException) as e: log.warning('Assignment could not instantiate', exc_info=True) print('Error: ' + str(e).strip()) exit(1) except (KeyboardInterrupt, EOFError): log.info('Quitting...') else: assign.dump_tests()
def function[main, parameter[]]: constant[Run the LockingProtocol.] variable[args] assign[=] call[name[parse_input], parameter[]] name[args].lock assign[=] constant[True] name[args].question assign[=] list[[]] name[args].all assign[=] constant[False] name[args].timeout assign[=] constant[0] name[args].verbose assign[=] constant[False] name[args].interactive assign[=] constant[False] <ast.Try object at 0x7da204963c40>
keyword[def] identifier[main] (): literal[string] identifier[args] = identifier[parse_input] () identifier[args] . identifier[lock] = keyword[True] identifier[args] . identifier[question] =[] identifier[args] . identifier[all] = keyword[False] identifier[args] . identifier[timeout] = literal[int] identifier[args] . identifier[verbose] = keyword[False] identifier[args] . identifier[interactive] = keyword[False] keyword[try] : identifier[assign] = identifier[assignment] . identifier[load_assignment] ( identifier[args] . identifier[config] , identifier[args] ) identifier[msgs] = identifier[messages] . identifier[Messages] () identifier[lock] . identifier[protocol] ( identifier[args] , identifier[assign] ). identifier[run] ( identifier[msgs] ) keyword[except] ( identifier[ex] . identifier[LoadingException] , identifier[ex] . identifier[SerializeException] ) keyword[as] identifier[e] : identifier[log] . identifier[warning] ( literal[string] , identifier[exc_info] = keyword[True] ) identifier[print] ( literal[string] + identifier[str] ( identifier[e] ). identifier[strip] ()) identifier[exit] ( literal[int] ) keyword[except] ( identifier[KeyboardInterrupt] , identifier[EOFError] ): identifier[log] . identifier[info] ( literal[string] ) keyword[else] : identifier[assign] . identifier[dump_tests] ()
def main(): """Run the LockingProtocol.""" args = parse_input() args.lock = True args.question = [] args.all = False args.timeout = 0 args.verbose = False args.interactive = False try: assign = assignment.load_assignment(args.config, args) msgs = messages.Messages() lock.protocol(args, assign).run(msgs) # depends on [control=['try'], data=[]] except (ex.LoadingException, ex.SerializeException) as e: log.warning('Assignment could not instantiate', exc_info=True) print('Error: ' + str(e).strip()) exit(1) # depends on [control=['except'], data=['e']] except (KeyboardInterrupt, EOFError): log.info('Quitting...') # depends on [control=['except'], data=[]] else: assign.dump_tests()
def iterable(value, allow_empty = False, forbid_literals = (str, bytes), minimum_length = None, maximum_length = None, **kwargs): """Validate that ``value`` is a valid iterable. :param value: The value to validate. :param allow_empty: If ``True``, returns :obj:`None <python:None>` if ``value`` is empty. If ``False``, raises a :class:`EmptyValueError <validator_collection.errors.EmptyValueError>` if ``value`` is empty. Defaults to ``False``. :type allow_empty: :class:`bool <python:bool>` :param forbid_literals: A collection of literals that will be considered invalid even if they are (actually) iterable. Defaults to :class:`str <python:str>` and :class:`bytes <python:bytes>`. :type forbid_literals: iterable :param minimum_length: If supplied, indicates the minimum number of members needed to be valid. :type minimum_length: :class:`int <python:int>` :param maximum_length: If supplied, indicates the minimum number of members needed to be valid. :type maximum_length: :class:`int <python:int>` :returns: ``value`` / :obj:`None <python:None>` :rtype: iterable / :obj:`None <python:None>` :raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False`` :raises NotAnIterableError: if ``value`` is not a valid iterable or :obj:`None <python:None>` :raises MinimumLengthError: if ``minimum_length`` is supplied and the length of ``value`` is less than ``minimum_length`` and ``whitespace_padding`` is ``False`` :raises MaximumLengthError: if ``maximum_length`` is supplied and the length of ``value`` is more than the ``maximum_length`` """ if not value and not allow_empty: raise errors.EmptyValueError('value (%s) was empty' % value) elif value is None: return None minimum_length = integer(minimum_length, allow_empty = True, force_run = True) # pylint: disable=E1123 maximum_length = integer(maximum_length, allow_empty = True, force_run = True) # pylint: disable=E1123 if isinstance(value, forbid_literals) or not hasattr(value, '__iter__'): raise errors.NotAnIterableError('value type (%s) not iterable' % type(value)) if value and minimum_length is not None and len(value) < minimum_length: raise errors.MinimumLengthError( 'value has fewer items than the minimum length %s' % minimum_length ) if value and maximum_length is not None and len(value) > maximum_length: raise errors.MaximumLengthError( 'value has more items than the maximum length %s' % maximum_length ) return value
def function[iterable, parameter[value, allow_empty, forbid_literals, minimum_length, maximum_length]]: constant[Validate that ``value`` is a valid iterable. :param value: The value to validate. :param allow_empty: If ``True``, returns :obj:`None <python:None>` if ``value`` is empty. If ``False``, raises a :class:`EmptyValueError <validator_collection.errors.EmptyValueError>` if ``value`` is empty. Defaults to ``False``. :type allow_empty: :class:`bool <python:bool>` :param forbid_literals: A collection of literals that will be considered invalid even if they are (actually) iterable. Defaults to :class:`str <python:str>` and :class:`bytes <python:bytes>`. :type forbid_literals: iterable :param minimum_length: If supplied, indicates the minimum number of members needed to be valid. :type minimum_length: :class:`int <python:int>` :param maximum_length: If supplied, indicates the minimum number of members needed to be valid. :type maximum_length: :class:`int <python:int>` :returns: ``value`` / :obj:`None <python:None>` :rtype: iterable / :obj:`None <python:None>` :raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False`` :raises NotAnIterableError: if ``value`` is not a valid iterable or :obj:`None <python:None>` :raises MinimumLengthError: if ``minimum_length`` is supplied and the length of ``value`` is less than ``minimum_length`` and ``whitespace_padding`` is ``False`` :raises MaximumLengthError: if ``maximum_length`` is supplied and the length of ``value`` is more than the ``maximum_length`` ] if <ast.BoolOp object at 0x7da1b06fc2b0> begin[:] <ast.Raise object at 0x7da1b06ff280> variable[minimum_length] assign[=] call[name[integer], parameter[name[minimum_length]]] variable[maximum_length] assign[=] call[name[integer], parameter[name[maximum_length]]] if <ast.BoolOp object at 0x7da1b06ffb50> begin[:] <ast.Raise object at 0x7da1b06ffc70> if <ast.BoolOp object at 0x7da1b06fcb80> begin[:] <ast.Raise object at 0x7da1b06fdd80> if <ast.BoolOp object at 0x7da1b06fd450> begin[:] <ast.Raise object at 0x7da1b06fc760> return[name[value]]
keyword[def] identifier[iterable] ( identifier[value] , identifier[allow_empty] = keyword[False] , identifier[forbid_literals] =( identifier[str] , identifier[bytes] ), identifier[minimum_length] = keyword[None] , identifier[maximum_length] = keyword[None] , ** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[value] keyword[and] keyword[not] identifier[allow_empty] : keyword[raise] identifier[errors] . identifier[EmptyValueError] ( literal[string] % identifier[value] ) keyword[elif] identifier[value] keyword[is] keyword[None] : keyword[return] keyword[None] identifier[minimum_length] = identifier[integer] ( identifier[minimum_length] , identifier[allow_empty] = keyword[True] , identifier[force_run] = keyword[True] ) identifier[maximum_length] = identifier[integer] ( identifier[maximum_length] , identifier[allow_empty] = keyword[True] , identifier[force_run] = keyword[True] ) keyword[if] identifier[isinstance] ( identifier[value] , identifier[forbid_literals] ) keyword[or] keyword[not] identifier[hasattr] ( identifier[value] , literal[string] ): keyword[raise] identifier[errors] . identifier[NotAnIterableError] ( literal[string] % identifier[type] ( identifier[value] )) keyword[if] identifier[value] keyword[and] identifier[minimum_length] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[value] )< identifier[minimum_length] : keyword[raise] identifier[errors] . identifier[MinimumLengthError] ( literal[string] % identifier[minimum_length] ) keyword[if] identifier[value] keyword[and] identifier[maximum_length] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[value] )> identifier[maximum_length] : keyword[raise] identifier[errors] . identifier[MaximumLengthError] ( literal[string] % identifier[maximum_length] ) keyword[return] identifier[value]
def iterable(value, allow_empty=False, forbid_literals=(str, bytes), minimum_length=None, maximum_length=None, **kwargs): """Validate that ``value`` is a valid iterable. :param value: The value to validate. :param allow_empty: If ``True``, returns :obj:`None <python:None>` if ``value`` is empty. If ``False``, raises a :class:`EmptyValueError <validator_collection.errors.EmptyValueError>` if ``value`` is empty. Defaults to ``False``. :type allow_empty: :class:`bool <python:bool>` :param forbid_literals: A collection of literals that will be considered invalid even if they are (actually) iterable. Defaults to :class:`str <python:str>` and :class:`bytes <python:bytes>`. :type forbid_literals: iterable :param minimum_length: If supplied, indicates the minimum number of members needed to be valid. :type minimum_length: :class:`int <python:int>` :param maximum_length: If supplied, indicates the minimum number of members needed to be valid. :type maximum_length: :class:`int <python:int>` :returns: ``value`` / :obj:`None <python:None>` :rtype: iterable / :obj:`None <python:None>` :raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False`` :raises NotAnIterableError: if ``value`` is not a valid iterable or :obj:`None <python:None>` :raises MinimumLengthError: if ``minimum_length`` is supplied and the length of ``value`` is less than ``minimum_length`` and ``whitespace_padding`` is ``False`` :raises MaximumLengthError: if ``maximum_length`` is supplied and the length of ``value`` is more than the ``maximum_length`` """ if not value and (not allow_empty): raise errors.EmptyValueError('value (%s) was empty' % value) # depends on [control=['if'], data=[]] elif value is None: return None # depends on [control=['if'], data=[]] minimum_length = integer(minimum_length, allow_empty=True, force_run=True) # pylint: disable=E1123 maximum_length = integer(maximum_length, allow_empty=True, force_run=True) # pylint: disable=E1123 if isinstance(value, forbid_literals) or not hasattr(value, '__iter__'): raise errors.NotAnIterableError('value type (%s) not iterable' % type(value)) # depends on [control=['if'], data=[]] if value and minimum_length is not None and (len(value) < minimum_length): raise errors.MinimumLengthError('value has fewer items than the minimum length %s' % minimum_length) # depends on [control=['if'], data=[]] if value and maximum_length is not None and (len(value) > maximum_length): raise errors.MaximumLengthError('value has more items than the maximum length %s' % maximum_length) # depends on [control=['if'], data=[]] return value
def _create_and_rotate_coordinate_arrays(self, x, y, orientation): """ Create pattern matrices from x and y vectors, and rotate them to the specified orientation. """ # Using this two-liner requires that x increase from left to # right and y decrease from left to right; I don't think it # can be rewritten in so little code otherwise - but please # prove me wrong. pattern_y = np.subtract.outer(np.cos(orientation)*y, np.sin(orientation)*x) pattern_x = np.add.outer(np.sin(orientation)*y, np.cos(orientation)*x) return pattern_x, pattern_y
def function[_create_and_rotate_coordinate_arrays, parameter[self, x, y, orientation]]: constant[ Create pattern matrices from x and y vectors, and rotate them to the specified orientation. ] variable[pattern_y] assign[=] call[name[np].subtract.outer, parameter[binary_operation[call[name[np].cos, parameter[name[orientation]]] * name[y]], binary_operation[call[name[np].sin, parameter[name[orientation]]] * name[x]]]] variable[pattern_x] assign[=] call[name[np].add.outer, parameter[binary_operation[call[name[np].sin, parameter[name[orientation]]] * name[y]], binary_operation[call[name[np].cos, parameter[name[orientation]]] * name[x]]]] return[tuple[[<ast.Name object at 0x7da1b253e230>, <ast.Name object at 0x7da1b253e500>]]]
keyword[def] identifier[_create_and_rotate_coordinate_arrays] ( identifier[self] , identifier[x] , identifier[y] , identifier[orientation] ): literal[string] identifier[pattern_y] = identifier[np] . identifier[subtract] . identifier[outer] ( identifier[np] . identifier[cos] ( identifier[orientation] )* identifier[y] , identifier[np] . identifier[sin] ( identifier[orientation] )* identifier[x] ) identifier[pattern_x] = identifier[np] . identifier[add] . identifier[outer] ( identifier[np] . identifier[sin] ( identifier[orientation] )* identifier[y] , identifier[np] . identifier[cos] ( identifier[orientation] )* identifier[x] ) keyword[return] identifier[pattern_x] , identifier[pattern_y]
def _create_and_rotate_coordinate_arrays(self, x, y, orientation): """ Create pattern matrices from x and y vectors, and rotate them to the specified orientation. """ # Using this two-liner requires that x increase from left to # right and y decrease from left to right; I don't think it # can be rewritten in so little code otherwise - but please # prove me wrong. pattern_y = np.subtract.outer(np.cos(orientation) * y, np.sin(orientation) * x) pattern_x = np.add.outer(np.sin(orientation) * y, np.cos(orientation) * x) return (pattern_x, pattern_y)
def backup(self): """ Backup the developer state of `output/` in order to make it restorable and portable for user. """ # We set the current output directory path. output_path = self.base + PyFunceble.OUTPUTS["parent_directory"] # We initiate the structure base. result = {PyFunceble.OUTPUTS["parent_directory"]: {}} for root, _, files in PyFunceble.walk(output_path): # We loop through the current output directory structure. # We get the currently read directory name. directories = Directory(root.split(output_path)[1]).fix_path() # We initiate a local variable which will get the structure of the subdirectory. local_result = result[PyFunceble.OUTPUTS["parent_directory"]] for file in files: # We loop through the list of files. # We construct the file path. file_path = root + PyFunceble.directory_separator + file # We get the hash of the file. file_hash = Hash(file_path, "sha512", True).get() # We convert the file content to a list. lines_in_list = [line.rstrip("\n") for line in open(file_path)] # We convert the file content into a more flat format. # We use `@@@` as glue and implicitly replacement for `\n`. formatted_content = "@@@".join(lines_in_list) # We update the local result (and implicitly the global result) # with the files and directory informations/structure. local_result = local_result.setdefault( directories, {file: {"sha512": file_hash, "content": formatted_content}}, ) # We finally save the directory structure into the production file. Dict(result).to_json(self.base + "dir_structure_production.json")
def function[backup, parameter[self]]: constant[ Backup the developer state of `output/` in order to make it restorable and portable for user. ] variable[output_path] assign[=] binary_operation[name[self].base + call[name[PyFunceble].OUTPUTS][constant[parent_directory]]] variable[result] assign[=] dictionary[[<ast.Subscript object at 0x7da20e954310>], [<ast.Dict object at 0x7da20e955ae0>]] for taget[tuple[[<ast.Name object at 0x7da20e9541c0>, <ast.Name object at 0x7da20e956f80>, <ast.Name object at 0x7da20e956620>]]] in starred[call[name[PyFunceble].walk, parameter[name[output_path]]]] begin[:] variable[directories] assign[=] call[call[name[Directory], parameter[call[call[name[root].split, parameter[name[output_path]]]][constant[1]]]].fix_path, parameter[]] variable[local_result] assign[=] call[name[result]][call[name[PyFunceble].OUTPUTS][constant[parent_directory]]] for taget[name[file]] in starred[name[files]] begin[:] variable[file_path] assign[=] binary_operation[binary_operation[name[root] + name[PyFunceble].directory_separator] + name[file]] variable[file_hash] assign[=] call[call[name[Hash], parameter[name[file_path], constant[sha512], constant[True]]].get, parameter[]] variable[lines_in_list] assign[=] <ast.ListComp object at 0x7da20e955ab0> variable[formatted_content] assign[=] call[constant[@@@].join, parameter[name[lines_in_list]]] variable[local_result] assign[=] call[name[local_result].setdefault, parameter[name[directories], dictionary[[<ast.Name object at 0x7da20e955db0>], [<ast.Dict object at 0x7da20e955b40>]]]] call[call[name[Dict], parameter[name[result]]].to_json, parameter[binary_operation[name[self].base + constant[dir_structure_production.json]]]]
keyword[def] identifier[backup] ( identifier[self] ): literal[string] identifier[output_path] = identifier[self] . identifier[base] + identifier[PyFunceble] . identifier[OUTPUTS] [ literal[string] ] identifier[result] ={ identifier[PyFunceble] . identifier[OUTPUTS] [ literal[string] ]:{}} keyword[for] identifier[root] , identifier[_] , identifier[files] keyword[in] identifier[PyFunceble] . identifier[walk] ( identifier[output_path] ): identifier[directories] = identifier[Directory] ( identifier[root] . identifier[split] ( identifier[output_path] )[ literal[int] ]). identifier[fix_path] () identifier[local_result] = identifier[result] [ identifier[PyFunceble] . identifier[OUTPUTS] [ literal[string] ]] keyword[for] identifier[file] keyword[in] identifier[files] : identifier[file_path] = identifier[root] + identifier[PyFunceble] . identifier[directory_separator] + identifier[file] identifier[file_hash] = identifier[Hash] ( identifier[file_path] , literal[string] , keyword[True] ). identifier[get] () identifier[lines_in_list] =[ identifier[line] . identifier[rstrip] ( literal[string] ) keyword[for] identifier[line] keyword[in] identifier[open] ( identifier[file_path] )] identifier[formatted_content] = literal[string] . identifier[join] ( identifier[lines_in_list] ) identifier[local_result] = identifier[local_result] . identifier[setdefault] ( identifier[directories] , { identifier[file] :{ literal[string] : identifier[file_hash] , literal[string] : identifier[formatted_content] }}, ) identifier[Dict] ( identifier[result] ). identifier[to_json] ( identifier[self] . identifier[base] + literal[string] )
def backup(self): """ Backup the developer state of `output/` in order to make it restorable and portable for user. """ # We set the current output directory path. output_path = self.base + PyFunceble.OUTPUTS['parent_directory'] # We initiate the structure base. result = {PyFunceble.OUTPUTS['parent_directory']: {}} for (root, _, files) in PyFunceble.walk(output_path): # We loop through the current output directory structure. # We get the currently read directory name. directories = Directory(root.split(output_path)[1]).fix_path() # We initiate a local variable which will get the structure of the subdirectory. local_result = result[PyFunceble.OUTPUTS['parent_directory']] for file in files: # We loop through the list of files. # We construct the file path. file_path = root + PyFunceble.directory_separator + file # We get the hash of the file. file_hash = Hash(file_path, 'sha512', True).get() # We convert the file content to a list. lines_in_list = [line.rstrip('\n') for line in open(file_path)] # We convert the file content into a more flat format. # We use `@@@` as glue and implicitly replacement for `\n`. formatted_content = '@@@'.join(lines_in_list) # We update the local result (and implicitly the global result) # with the files and directory informations/structure. local_result = local_result.setdefault(directories, {file: {'sha512': file_hash, 'content': formatted_content}}) # depends on [control=['for'], data=['file']] # depends on [control=['for'], data=[]] # We finally save the directory structure into the production file. Dict(result).to_json(self.base + 'dir_structure_production.json')
def authenticate(self, request): """ Authenticate a user from a token form field Errors thrown here will be swallowed by django-rest-framework, and it expects us to return None if authentication fails. """ try: key = request.data['token'] except KeyError: return try: token = AuthToken.objects.get(key=key) except AuthToken.DoesNotExist: return return (token.user, token)
def function[authenticate, parameter[self, request]]: constant[ Authenticate a user from a token form field Errors thrown here will be swallowed by django-rest-framework, and it expects us to return None if authentication fails. ] <ast.Try object at 0x7da1b03493c0> <ast.Try object at 0x7da1b0349c00> return[tuple[[<ast.Attribute object at 0x7da1b025e0e0>, <ast.Name object at 0x7da1b025d690>]]]
keyword[def] identifier[authenticate] ( identifier[self] , identifier[request] ): literal[string] keyword[try] : identifier[key] = identifier[request] . identifier[data] [ literal[string] ] keyword[except] identifier[KeyError] : keyword[return] keyword[try] : identifier[token] = identifier[AuthToken] . identifier[objects] . identifier[get] ( identifier[key] = identifier[key] ) keyword[except] identifier[AuthToken] . identifier[DoesNotExist] : keyword[return] keyword[return] ( identifier[token] . identifier[user] , identifier[token] )
def authenticate(self, request): """ Authenticate a user from a token form field Errors thrown here will be swallowed by django-rest-framework, and it expects us to return None if authentication fails. """ try: key = request.data['token'] # depends on [control=['try'], data=[]] except KeyError: return # depends on [control=['except'], data=[]] try: token = AuthToken.objects.get(key=key) # depends on [control=['try'], data=[]] except AuthToken.DoesNotExist: return # depends on [control=['except'], data=[]] return (token.user, token)
def file_uptodate(fname, cmp_fname): """Check if a file exists, is non-empty and is more recent than cmp_fname. """ try: return (file_exists(fname) and file_exists(cmp_fname) and getmtime(fname) >= getmtime(cmp_fname)) except OSError: return False
def function[file_uptodate, parameter[fname, cmp_fname]]: constant[Check if a file exists, is non-empty and is more recent than cmp_fname. ] <ast.Try object at 0x7da20c6a82b0>
keyword[def] identifier[file_uptodate] ( identifier[fname] , identifier[cmp_fname] ): literal[string] keyword[try] : keyword[return] ( identifier[file_exists] ( identifier[fname] ) keyword[and] identifier[file_exists] ( identifier[cmp_fname] ) keyword[and] identifier[getmtime] ( identifier[fname] )>= identifier[getmtime] ( identifier[cmp_fname] )) keyword[except] identifier[OSError] : keyword[return] keyword[False]
def file_uptodate(fname, cmp_fname): """Check if a file exists, is non-empty and is more recent than cmp_fname. """ try: return file_exists(fname) and file_exists(cmp_fname) and (getmtime(fname) >= getmtime(cmp_fname)) # depends on [control=['try'], data=[]] except OSError: return False # depends on [control=['except'], data=[]]
def find_stop(self, query, direction=""): """ Search the list of stops, optionally in a direction (inbound or outbound), for the term passed to the function. Case insensitive, searches both the stop name and ID. Yields a generator. Defaults to both directions. """ _directions = ["inbound", "outbound", ""] direction = direction.lower() if direction == "inbound": stops = self.inbound_stops elif direction == "outbound": stops = self.outbound_stops else: stops = self.inbound_stops + self.outbound_stops found = [] for stop in stops: q = str(query).lower() if q in stop.name.lower() or q in str(stop.id).lower(): found.append(stop) return found
def function[find_stop, parameter[self, query, direction]]: constant[ Search the list of stops, optionally in a direction (inbound or outbound), for the term passed to the function. Case insensitive, searches both the stop name and ID. Yields a generator. Defaults to both directions. ] variable[_directions] assign[=] list[[<ast.Constant object at 0x7da1b2361270>, <ast.Constant object at 0x7da1b2362aa0>, <ast.Constant object at 0x7da1b23603a0>]] variable[direction] assign[=] call[name[direction].lower, parameter[]] if compare[name[direction] equal[==] constant[inbound]] begin[:] variable[stops] assign[=] name[self].inbound_stops variable[found] assign[=] list[[]] for taget[name[stop]] in starred[name[stops]] begin[:] variable[q] assign[=] call[call[name[str], parameter[name[query]]].lower, parameter[]] if <ast.BoolOp object at 0x7da1b2360400> begin[:] call[name[found].append, parameter[name[stop]]] return[name[found]]
keyword[def] identifier[find_stop] ( identifier[self] , identifier[query] , identifier[direction] = literal[string] ): literal[string] identifier[_directions] =[ literal[string] , literal[string] , literal[string] ] identifier[direction] = identifier[direction] . identifier[lower] () keyword[if] identifier[direction] == literal[string] : identifier[stops] = identifier[self] . identifier[inbound_stops] keyword[elif] identifier[direction] == literal[string] : identifier[stops] = identifier[self] . identifier[outbound_stops] keyword[else] : identifier[stops] = identifier[self] . identifier[inbound_stops] + identifier[self] . identifier[outbound_stops] identifier[found] =[] keyword[for] identifier[stop] keyword[in] identifier[stops] : identifier[q] = identifier[str] ( identifier[query] ). identifier[lower] () keyword[if] identifier[q] keyword[in] identifier[stop] . identifier[name] . identifier[lower] () keyword[or] identifier[q] keyword[in] identifier[str] ( identifier[stop] . identifier[id] ). identifier[lower] (): identifier[found] . identifier[append] ( identifier[stop] ) keyword[return] identifier[found]
def find_stop(self, query, direction=''): """ Search the list of stops, optionally in a direction (inbound or outbound), for the term passed to the function. Case insensitive, searches both the stop name and ID. Yields a generator. Defaults to both directions. """ _directions = ['inbound', 'outbound', ''] direction = direction.lower() if direction == 'inbound': stops = self.inbound_stops # depends on [control=['if'], data=[]] elif direction == 'outbound': stops = self.outbound_stops # depends on [control=['if'], data=[]] else: stops = self.inbound_stops + self.outbound_stops found = [] for stop in stops: q = str(query).lower() if q in stop.name.lower() or q in str(stop.id).lower(): found.append(stop) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['stop']] return found
def read_namespaced_secret(self, name, namespace, **kwargs): # noqa: E501 """read_namespaced_secret # noqa: E501 read the specified Secret # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_namespaced_secret(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Secret (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'. :param bool export: Should this value be exported. Export strips fields that a user can not specify. :return: V1Secret If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.read_namespaced_secret_with_http_info(name, namespace, **kwargs) # noqa: E501 else: (data) = self.read_namespaced_secret_with_http_info(name, namespace, **kwargs) # noqa: E501 return data
def function[read_namespaced_secret, parameter[self, name, namespace]]: constant[read_namespaced_secret # noqa: E501 read the specified Secret # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_namespaced_secret(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Secret (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'. :param bool export: Should this value be exported. Export strips fields that a user can not specify. :return: V1Secret If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async_req]]] begin[:] return[call[name[self].read_namespaced_secret_with_http_info, parameter[name[name], name[namespace]]]]
keyword[def] identifier[read_namespaced_secret] ( identifier[self] , identifier[name] , identifier[namespace] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[read_namespaced_secret_with_http_info] ( identifier[name] , identifier[namespace] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[read_namespaced_secret_with_http_info] ( identifier[name] , identifier[namespace] ,** identifier[kwargs] ) keyword[return] identifier[data]
def read_namespaced_secret(self, name, namespace, **kwargs): # noqa: E501 "read_namespaced_secret # noqa: E501\n\n read the specified Secret # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.read_namespaced_secret(name, namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Secret (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.\n :param bool export: Should this value be exported. Export strips fields that a user can not specify.\n :return: V1Secret\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.read_namespaced_secret_with_http_info(name, namespace, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]] else: data = self.read_namespaced_secret_with_http_info(name, namespace, **kwargs) # noqa: E501 return data
def folder_path_for_package(cls, package: ecore.EPackage): """Returns path to folder holding generated artifact for given element.""" parent = package.eContainer() if parent: return os.path.join(cls.folder_path_for_package(parent), package.name) return package.name
def function[folder_path_for_package, parameter[cls, package]]: constant[Returns path to folder holding generated artifact for given element.] variable[parent] assign[=] call[name[package].eContainer, parameter[]] if name[parent] begin[:] return[call[name[os].path.join, parameter[call[name[cls].folder_path_for_package, parameter[name[parent]]], name[package].name]]] return[name[package].name]
keyword[def] identifier[folder_path_for_package] ( identifier[cls] , identifier[package] : identifier[ecore] . identifier[EPackage] ): literal[string] identifier[parent] = identifier[package] . identifier[eContainer] () keyword[if] identifier[parent] : keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[cls] . identifier[folder_path_for_package] ( identifier[parent] ), identifier[package] . identifier[name] ) keyword[return] identifier[package] . identifier[name]
def folder_path_for_package(cls, package: ecore.EPackage): """Returns path to folder holding generated artifact for given element.""" parent = package.eContainer() if parent: return os.path.join(cls.folder_path_for_package(parent), package.name) # depends on [control=['if'], data=[]] return package.name
def tags(self, name=None) -> List['Tag']: """Return all tags with the given name.""" lststr = self._lststr type_to_spans = self._type_to_spans if name: if name in _tag_extensions: string = lststr[0] return [ Tag(lststr, type_to_spans, span, 'ExtensionTag') for span in type_to_spans['ExtensionTag'] if string.startswith('<' + name, span[0])] tags = [] # type: List['Tag'] else: # There is no name, add all extension tags. Before using shadow. tags = [ Tag(lststr, type_to_spans, span, 'ExtensionTag') for span in type_to_spans['ExtensionTag']] tags_append = tags.append # Get the left-most start tag, match it to right-most end tag # and so on. ss = self._span[0] shadow = self._shadow if name: # There is a name but it is not in TAG_EXTENSIONS. reversed_start_matches = reversed([m for m in regex_compile( START_TAG_PATTERN.replace( rb'{name}', rb'(?P<name>' + name.encode() + rb')') ).finditer(shadow)]) end_search = regex_compile(END_TAG_PATTERN .replace( b'{name}', name.encode())).search else: reversed_start_matches = reversed( [m for m in START_TAG_FINDITER(shadow)]) shadow_copy = shadow[:] spans = type_to_spans.setdefault('Tag', []) span_tuple_to_span_get = {(s[0], s[1]): s for s in spans}.get spans_append = spans.append for start_match in reversed_start_matches: if start_match['self_closing']: # Don't look for the end tag s, e = start_match.span() span = [ss + s, ss + e] else: # look for the end-tag if name: # the end_search is already available # noinspection PyUnboundLocalVariable end_match = end_search(shadow_copy, start_match.end()) else: # build end_search according to start tag name end_match = search( END_TAG_PATTERN.replace( b'{name}', start_match['name']), shadow_copy) if end_match: s, e = end_match.span() shadow_copy[s:e] = b'_' * (e - s) span = [ss + start_match.start(), ss + e] else: # Assume start-only tag. s, e = start_match.span() span = [ss + s, ss + e] old_span = span_tuple_to_span_get((span[0], span[1])) if old_span is None: spans_append(span) else: span = old_span tags_append(Tag(lststr, type_to_spans, span, 'Tag')) return sorted(tags, key=attrgetter('_span'))
def function[tags, parameter[self, name]]: constant[Return all tags with the given name.] variable[lststr] assign[=] name[self]._lststr variable[type_to_spans] assign[=] name[self]._type_to_spans if name[name] begin[:] if compare[name[name] in name[_tag_extensions]] begin[:] variable[string] assign[=] call[name[lststr]][constant[0]] return[<ast.ListComp object at 0x7da1b26ad540>] variable[tags] assign[=] list[[]] variable[tags_append] assign[=] name[tags].append variable[ss] assign[=] call[name[self]._span][constant[0]] variable[shadow] assign[=] name[self]._shadow if name[name] begin[:] variable[reversed_start_matches] assign[=] call[name[reversed], parameter[<ast.ListComp object at 0x7da18f720f10>]] variable[end_search] assign[=] call[name[regex_compile], parameter[call[name[END_TAG_PATTERN].replace, parameter[constant[b'{name}'], call[name[name].encode, parameter[]]]]]].search variable[shadow_copy] assign[=] call[name[shadow]][<ast.Slice object at 0x7da18f723e80>] variable[spans] assign[=] call[name[type_to_spans].setdefault, parameter[constant[Tag], list[[]]]] variable[span_tuple_to_span_get] assign[=] <ast.DictComp object at 0x7da18f721ff0>.get variable[spans_append] assign[=] name[spans].append for taget[name[start_match]] in starred[name[reversed_start_matches]] begin[:] if call[name[start_match]][constant[self_closing]] begin[:] <ast.Tuple object at 0x7da20c6e5000> assign[=] call[name[start_match].span, parameter[]] variable[span] assign[=] list[[<ast.BinOp object at 0x7da20c6e7580>, <ast.BinOp object at 0x7da20c6e7ca0>]] variable[old_span] assign[=] call[name[span_tuple_to_span_get], parameter[tuple[[<ast.Subscript object at 0x7da20c6e62f0>, <ast.Subscript object at 0x7da20c6e5120>]]]] if compare[name[old_span] is constant[None]] begin[:] call[name[spans_append], parameter[name[span]]] call[name[tags_append], parameter[call[name[Tag], parameter[name[lststr], name[type_to_spans], name[span], constant[Tag]]]]] return[call[name[sorted], parameter[name[tags]]]]
keyword[def] identifier[tags] ( identifier[self] , identifier[name] = keyword[None] )-> identifier[List] [ literal[string] ]: literal[string] identifier[lststr] = identifier[self] . identifier[_lststr] identifier[type_to_spans] = identifier[self] . identifier[_type_to_spans] keyword[if] identifier[name] : keyword[if] identifier[name] keyword[in] identifier[_tag_extensions] : identifier[string] = identifier[lststr] [ literal[int] ] keyword[return] [ identifier[Tag] ( identifier[lststr] , identifier[type_to_spans] , identifier[span] , literal[string] ) keyword[for] identifier[span] keyword[in] identifier[type_to_spans] [ literal[string] ] keyword[if] identifier[string] . identifier[startswith] ( literal[string] + identifier[name] , identifier[span] [ literal[int] ])] identifier[tags] =[] keyword[else] : identifier[tags] =[ identifier[Tag] ( identifier[lststr] , identifier[type_to_spans] , identifier[span] , literal[string] ) keyword[for] identifier[span] keyword[in] identifier[type_to_spans] [ literal[string] ]] identifier[tags_append] = identifier[tags] . identifier[append] identifier[ss] = identifier[self] . identifier[_span] [ literal[int] ] identifier[shadow] = identifier[self] . identifier[_shadow] keyword[if] identifier[name] : identifier[reversed_start_matches] = identifier[reversed] ([ identifier[m] keyword[for] identifier[m] keyword[in] identifier[regex_compile] ( identifier[START_TAG_PATTERN] . identifier[replace] ( literal[string] , literal[string] + identifier[name] . identifier[encode] ()+ literal[string] ) ). identifier[finditer] ( identifier[shadow] )]) identifier[end_search] = identifier[regex_compile] ( identifier[END_TAG_PATTERN] . identifier[replace] ( literal[string] , identifier[name] . identifier[encode] ())). identifier[search] keyword[else] : identifier[reversed_start_matches] = identifier[reversed] ( [ identifier[m] keyword[for] identifier[m] keyword[in] identifier[START_TAG_FINDITER] ( identifier[shadow] )]) identifier[shadow_copy] = identifier[shadow] [:] identifier[spans] = identifier[type_to_spans] . identifier[setdefault] ( literal[string] ,[]) identifier[span_tuple_to_span_get] ={( identifier[s] [ literal[int] ], identifier[s] [ literal[int] ]): identifier[s] keyword[for] identifier[s] keyword[in] identifier[spans] }. identifier[get] identifier[spans_append] = identifier[spans] . identifier[append] keyword[for] identifier[start_match] keyword[in] identifier[reversed_start_matches] : keyword[if] identifier[start_match] [ literal[string] ]: identifier[s] , identifier[e] = identifier[start_match] . identifier[span] () identifier[span] =[ identifier[ss] + identifier[s] , identifier[ss] + identifier[e] ] keyword[else] : keyword[if] identifier[name] : identifier[end_match] = identifier[end_search] ( identifier[shadow_copy] , identifier[start_match] . identifier[end] ()) keyword[else] : identifier[end_match] = identifier[search] ( identifier[END_TAG_PATTERN] . identifier[replace] ( literal[string] , identifier[start_match] [ literal[string] ]), identifier[shadow_copy] ) keyword[if] identifier[end_match] : identifier[s] , identifier[e] = identifier[end_match] . identifier[span] () identifier[shadow_copy] [ identifier[s] : identifier[e] ]= literal[string] *( identifier[e] - identifier[s] ) identifier[span] =[ identifier[ss] + identifier[start_match] . identifier[start] (), identifier[ss] + identifier[e] ] keyword[else] : identifier[s] , identifier[e] = identifier[start_match] . identifier[span] () identifier[span] =[ identifier[ss] + identifier[s] , identifier[ss] + identifier[e] ] identifier[old_span] = identifier[span_tuple_to_span_get] (( identifier[span] [ literal[int] ], identifier[span] [ literal[int] ])) keyword[if] identifier[old_span] keyword[is] keyword[None] : identifier[spans_append] ( identifier[span] ) keyword[else] : identifier[span] = identifier[old_span] identifier[tags_append] ( identifier[Tag] ( identifier[lststr] , identifier[type_to_spans] , identifier[span] , literal[string] )) keyword[return] identifier[sorted] ( identifier[tags] , identifier[key] = identifier[attrgetter] ( literal[string] ))
def tags(self, name=None) -> List['Tag']: """Return all tags with the given name.""" lststr = self._lststr type_to_spans = self._type_to_spans if name: if name in _tag_extensions: string = lststr[0] return [Tag(lststr, type_to_spans, span, 'ExtensionTag') for span in type_to_spans['ExtensionTag'] if string.startswith('<' + name, span[0])] # depends on [control=['if'], data=['name']] tags = [] # type: List['Tag'] # depends on [control=['if'], data=[]] else: # There is no name, add all extension tags. Before using shadow. tags = [Tag(lststr, type_to_spans, span, 'ExtensionTag') for span in type_to_spans['ExtensionTag']] tags_append = tags.append # Get the left-most start tag, match it to right-most end tag # and so on. ss = self._span[0] shadow = self._shadow if name: # There is a name but it is not in TAG_EXTENSIONS. reversed_start_matches = reversed([m for m in regex_compile(START_TAG_PATTERN.replace(b'{name}', b'(?P<name>' + name.encode() + b')')).finditer(shadow)]) end_search = regex_compile(END_TAG_PATTERN.replace(b'{name}', name.encode())).search # depends on [control=['if'], data=[]] else: reversed_start_matches = reversed([m for m in START_TAG_FINDITER(shadow)]) shadow_copy = shadow[:] spans = type_to_spans.setdefault('Tag', []) span_tuple_to_span_get = {(s[0], s[1]): s for s in spans}.get spans_append = spans.append for start_match in reversed_start_matches: if start_match['self_closing']: # Don't look for the end tag (s, e) = start_match.span() span = [ss + s, ss + e] # depends on [control=['if'], data=[]] else: # look for the end-tag if name: # the end_search is already available # noinspection PyUnboundLocalVariable end_match = end_search(shadow_copy, start_match.end()) # depends on [control=['if'], data=[]] else: # build end_search according to start tag name end_match = search(END_TAG_PATTERN.replace(b'{name}', start_match['name']), shadow_copy) if end_match: (s, e) = end_match.span() shadow_copy[s:e] = b'_' * (e - s) span = [ss + start_match.start(), ss + e] # depends on [control=['if'], data=[]] else: # Assume start-only tag. (s, e) = start_match.span() span = [ss + s, ss + e] old_span = span_tuple_to_span_get((span[0], span[1])) if old_span is None: spans_append(span) # depends on [control=['if'], data=[]] else: span = old_span tags_append(Tag(lststr, type_to_spans, span, 'Tag')) # depends on [control=['for'], data=['start_match']] return sorted(tags, key=attrgetter('_span'))
def format_image(path, options): '''Formats an image. Args: path (str): Path to the image file. options (dict): Options to apply to the image. Returns: (list) A list of PIL images. The list will always be of length 1 unless resolutions for resizing are provided in the options. ''' image = Image.open(path) image_pipeline_results = __pipeline_image(image, options) return image_pipeline_results
def function[format_image, parameter[path, options]]: constant[Formats an image. Args: path (str): Path to the image file. options (dict): Options to apply to the image. Returns: (list) A list of PIL images. The list will always be of length 1 unless resolutions for resizing are provided in the options. ] variable[image] assign[=] call[name[Image].open, parameter[name[path]]] variable[image_pipeline_results] assign[=] call[name[__pipeline_image], parameter[name[image], name[options]]] return[name[image_pipeline_results]]
keyword[def] identifier[format_image] ( identifier[path] , identifier[options] ): literal[string] identifier[image] = identifier[Image] . identifier[open] ( identifier[path] ) identifier[image_pipeline_results] = identifier[__pipeline_image] ( identifier[image] , identifier[options] ) keyword[return] identifier[image_pipeline_results]
def format_image(path, options): """Formats an image. Args: path (str): Path to the image file. options (dict): Options to apply to the image. Returns: (list) A list of PIL images. The list will always be of length 1 unless resolutions for resizing are provided in the options. """ image = Image.open(path) image_pipeline_results = __pipeline_image(image, options) return image_pipeline_results
def _update_fobj(self): """Updates fobj from GUI. Opposite of _update_gui().""" # print("PPPPPPPPPPPPPPPPPPPRINTANDO O STACK") # traceback.print_stack() emsg, flag_error = "", False fieldname = None try: self._before_update_fobj() for item in self._map: self._f.obj[item.fieldname] = item.get_value() self._after_update_fobj() except Exception as E: flag_error = True if fieldname is not None: emsg = "Field '{}': {}".format(fieldname, str(E)) else: emsg = str(E) self.add_log_error(emsg) self._flag_valid = not flag_error if not flag_error: self.status("")
def function[_update_fobj, parameter[self]]: constant[Updates fobj from GUI. Opposite of _update_gui().] <ast.Tuple object at 0x7da18f09e0e0> assign[=] tuple[[<ast.Constant object at 0x7da18f09e170>, <ast.Constant object at 0x7da18f09ee30>]] variable[fieldname] assign[=] constant[None] <ast.Try object at 0x7da18f09e0b0> name[self]._flag_valid assign[=] <ast.UnaryOp object at 0x7da20c990bb0> if <ast.UnaryOp object at 0x7da20c992c50> begin[:] call[name[self].status, parameter[constant[]]]
keyword[def] identifier[_update_fobj] ( identifier[self] ): literal[string] identifier[emsg] , identifier[flag_error] = literal[string] , keyword[False] identifier[fieldname] = keyword[None] keyword[try] : identifier[self] . identifier[_before_update_fobj] () keyword[for] identifier[item] keyword[in] identifier[self] . identifier[_map] : identifier[self] . identifier[_f] . identifier[obj] [ identifier[item] . identifier[fieldname] ]= identifier[item] . identifier[get_value] () identifier[self] . identifier[_after_update_fobj] () keyword[except] identifier[Exception] keyword[as] identifier[E] : identifier[flag_error] = keyword[True] keyword[if] identifier[fieldname] keyword[is] keyword[not] keyword[None] : identifier[emsg] = literal[string] . identifier[format] ( identifier[fieldname] , identifier[str] ( identifier[E] )) keyword[else] : identifier[emsg] = identifier[str] ( identifier[E] ) identifier[self] . identifier[add_log_error] ( identifier[emsg] ) identifier[self] . identifier[_flag_valid] = keyword[not] identifier[flag_error] keyword[if] keyword[not] identifier[flag_error] : identifier[self] . identifier[status] ( literal[string] )
def _update_fobj(self): """Updates fobj from GUI. Opposite of _update_gui().""" # print("PPPPPPPPPPPPPPPPPPPRINTANDO O STACK") # traceback.print_stack() (emsg, flag_error) = ('', False) fieldname = None try: self._before_update_fobj() for item in self._map: self._f.obj[item.fieldname] = item.get_value() # depends on [control=['for'], data=['item']] self._after_update_fobj() # depends on [control=['try'], data=[]] except Exception as E: flag_error = True if fieldname is not None: emsg = "Field '{}': {}".format(fieldname, str(E)) # depends on [control=['if'], data=['fieldname']] else: emsg = str(E) self.add_log_error(emsg) # depends on [control=['except'], data=['E']] self._flag_valid = not flag_error if not flag_error: self.status('') # depends on [control=['if'], data=[]]
def list_markets_by_currency(self, currency): """ Helper function to see which markets exist for a currency. Endpoint: /public/getmarkets Example :: >>> Bittrex(None, None).list_markets_by_currency('LTC') ['BTC-LTC', 'ETH-LTC', 'USDT-LTC'] :param currency: String literal for the currency (ex: LTC) :type currency: str :return: List of markets that the currency appears in :rtype: list """ return [market['MarketName'] for market in self.get_markets()['result'] if market['MarketName'].lower().endswith(currency.lower())]
def function[list_markets_by_currency, parameter[self, currency]]: constant[ Helper function to see which markets exist for a currency. Endpoint: /public/getmarkets Example :: >>> Bittrex(None, None).list_markets_by_currency('LTC') ['BTC-LTC', 'ETH-LTC', 'USDT-LTC'] :param currency: String literal for the currency (ex: LTC) :type currency: str :return: List of markets that the currency appears in :rtype: list ] return[<ast.ListComp object at 0x7da18eb54340>]
keyword[def] identifier[list_markets_by_currency] ( identifier[self] , identifier[currency] ): literal[string] keyword[return] [ identifier[market] [ literal[string] ] keyword[for] identifier[market] keyword[in] identifier[self] . identifier[get_markets] ()[ literal[string] ] keyword[if] identifier[market] [ literal[string] ]. identifier[lower] (). identifier[endswith] ( identifier[currency] . identifier[lower] ())]
def list_markets_by_currency(self, currency): """ Helper function to see which markets exist for a currency. Endpoint: /public/getmarkets Example :: >>> Bittrex(None, None).list_markets_by_currency('LTC') ['BTC-LTC', 'ETH-LTC', 'USDT-LTC'] :param currency: String literal for the currency (ex: LTC) :type currency: str :return: List of markets that the currency appears in :rtype: list """ return [market['MarketName'] for market in self.get_markets()['result'] if market['MarketName'].lower().endswith(currency.lower())]
def add_intspin(self, setting): '''add a spin control''' tab = self.panel(setting.tab) default = setting.value (minv, maxv) = setting.range ctrl = wx.SpinCtrl(tab, -1, initial = default, min = minv, max = maxv) self._add_input(setting, ctrl, value=default)
def function[add_intspin, parameter[self, setting]]: constant[add a spin control] variable[tab] assign[=] call[name[self].panel, parameter[name[setting].tab]] variable[default] assign[=] name[setting].value <ast.Tuple object at 0x7da1b2347280> assign[=] name[setting].range variable[ctrl] assign[=] call[name[wx].SpinCtrl, parameter[name[tab], <ast.UnaryOp object at 0x7da20c76f5e0>]] call[name[self]._add_input, parameter[name[setting], name[ctrl]]]
keyword[def] identifier[add_intspin] ( identifier[self] , identifier[setting] ): literal[string] identifier[tab] = identifier[self] . identifier[panel] ( identifier[setting] . identifier[tab] ) identifier[default] = identifier[setting] . identifier[value] ( identifier[minv] , identifier[maxv] )= identifier[setting] . identifier[range] identifier[ctrl] = identifier[wx] . identifier[SpinCtrl] ( identifier[tab] ,- literal[int] , identifier[initial] = identifier[default] , identifier[min] = identifier[minv] , identifier[max] = identifier[maxv] ) identifier[self] . identifier[_add_input] ( identifier[setting] , identifier[ctrl] , identifier[value] = identifier[default] )
def add_intspin(self, setting): """add a spin control""" tab = self.panel(setting.tab) default = setting.value (minv, maxv) = setting.range ctrl = wx.SpinCtrl(tab, -1, initial=default, min=minv, max=maxv) self._add_input(setting, ctrl, value=default)
def delims(self, delims): """Set the delimiters for line splitting.""" expr = '[' + ''.join('\\'+ c for c in delims) + ']' self._delim_re = re.compile(expr) self._delims = delims self._delim_expr = expr
def function[delims, parameter[self, delims]]: constant[Set the delimiters for line splitting.] variable[expr] assign[=] binary_operation[binary_operation[constant[[] + call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b021f640>]]] + constant[]]] name[self]._delim_re assign[=] call[name[re].compile, parameter[name[expr]]] name[self]._delims assign[=] name[delims] name[self]._delim_expr assign[=] name[expr]
keyword[def] identifier[delims] ( identifier[self] , identifier[delims] ): literal[string] identifier[expr] = literal[string] + literal[string] . identifier[join] ( literal[string] + identifier[c] keyword[for] identifier[c] keyword[in] identifier[delims] )+ literal[string] identifier[self] . identifier[_delim_re] = identifier[re] . identifier[compile] ( identifier[expr] ) identifier[self] . identifier[_delims] = identifier[delims] identifier[self] . identifier[_delim_expr] = identifier[expr]
def delims(self, delims): """Set the delimiters for line splitting.""" expr = '[' + ''.join(('\\' + c for c in delims)) + ']' self._delim_re = re.compile(expr) self._delims = delims self._delim_expr = expr
def _find_curr_line(self, prev=False): """ Internal helper function. Find the current (or previous if prev=True) line in a log file based on the current seek position. """ curr_pos = self.filehandle.tell() # jump back 15k characters (at most) and find last newline char jump_back = min(self.filehandle.tell(), 15000) self.filehandle.seek(-jump_back, 1) buff = self.filehandle.read(jump_back) self.filehandle.seek(curr_pos, 0) if prev and self.prev_pos is not None and self.prev_pos == curr_pos: # Number of characters to show before/after the log offset error_context = 300 self.filehandle.seek(-error_context, 1) buff = self.filehandle.read(curr_pos) hr = "-" * 60 print("Fatal log parsing loop detected trying to find previous " "log line near offset %s in %s:\n\n%s\n%s\n" "<--- (current log parsing offset) \n%s\n%s\n" % (curr_pos, self.name, hr, buff[:error_context], buff[error_context:error_context + 1], hr), file=sys.stderr) raise SystemExit("Cannot parse %s with requested options" % self.filehandle.name) else: self.prev_pos = curr_pos buff = buff.decode("utf-8", "replace") newline_pos = buff.rfind('\n') if prev: newline_pos = buff[:newline_pos].rfind('\n') # move back to last newline char if newline_pos == -1: self.filehandle.seek(0) return self.next() self.filehandle.seek(newline_pos - jump_back + 1, 1) # roll forward until we found a line with a datetime try: logevent = self.next() while not logevent.datetime: logevent = self.next() return logevent except StopIteration: # reached end of file return None
def function[_find_curr_line, parameter[self, prev]]: constant[ Internal helper function. Find the current (or previous if prev=True) line in a log file based on the current seek position. ] variable[curr_pos] assign[=] call[name[self].filehandle.tell, parameter[]] variable[jump_back] assign[=] call[name[min], parameter[call[name[self].filehandle.tell, parameter[]], constant[15000]]] call[name[self].filehandle.seek, parameter[<ast.UnaryOp object at 0x7da1b1642f50>, constant[1]]] variable[buff] assign[=] call[name[self].filehandle.read, parameter[name[jump_back]]] call[name[self].filehandle.seek, parameter[name[curr_pos], constant[0]]] if <ast.BoolOp object at 0x7da1b18e4070> begin[:] variable[error_context] assign[=] constant[300] call[name[self].filehandle.seek, parameter[<ast.UnaryOp object at 0x7da1b18e4e20>, constant[1]]] variable[buff] assign[=] call[name[self].filehandle.read, parameter[name[curr_pos]]] variable[hr] assign[=] binary_operation[constant[-] * constant[60]] call[name[print], parameter[binary_operation[constant[Fatal log parsing loop detected trying to find previous log line near offset %s in %s: %s %s <--- (current log parsing offset) %s %s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b16426b0>, <ast.Attribute object at 0x7da1b16402e0>, <ast.Name object at 0x7da1b1640c40>, <ast.Subscript object at 0x7da1b1643250>, <ast.Subscript object at 0x7da1b1641420>, <ast.Name object at 0x7da1b1643bb0>]]]]] <ast.Raise object at 0x7da1b16416f0> variable[buff] assign[=] call[name[buff].decode, parameter[constant[utf-8], constant[replace]]] variable[newline_pos] assign[=] call[name[buff].rfind, parameter[constant[ ]]] if name[prev] begin[:] variable[newline_pos] assign[=] call[call[name[buff]][<ast.Slice object at 0x7da1b17fb970>].rfind, parameter[constant[ ]]] if compare[name[newline_pos] equal[==] <ast.UnaryOp object at 0x7da1b17f9a80>] begin[:] call[name[self].filehandle.seek, parameter[constant[0]]] return[call[name[self].next, parameter[]]] call[name[self].filehandle.seek, parameter[binary_operation[binary_operation[name[newline_pos] - name[jump_back]] + constant[1]], constant[1]]] <ast.Try object at 0x7da1b17f89a0>
keyword[def] identifier[_find_curr_line] ( identifier[self] , identifier[prev] = keyword[False] ): literal[string] identifier[curr_pos] = identifier[self] . identifier[filehandle] . identifier[tell] () identifier[jump_back] = identifier[min] ( identifier[self] . identifier[filehandle] . identifier[tell] (), literal[int] ) identifier[self] . identifier[filehandle] . identifier[seek] (- identifier[jump_back] , literal[int] ) identifier[buff] = identifier[self] . identifier[filehandle] . identifier[read] ( identifier[jump_back] ) identifier[self] . identifier[filehandle] . identifier[seek] ( identifier[curr_pos] , literal[int] ) keyword[if] identifier[prev] keyword[and] identifier[self] . identifier[prev_pos] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[prev_pos] == identifier[curr_pos] : identifier[error_context] = literal[int] identifier[self] . identifier[filehandle] . identifier[seek] (- identifier[error_context] , literal[int] ) identifier[buff] = identifier[self] . identifier[filehandle] . identifier[read] ( identifier[curr_pos] ) identifier[hr] = literal[string] * literal[int] identifier[print] ( literal[string] literal[string] literal[string] %( identifier[curr_pos] , identifier[self] . identifier[name] , identifier[hr] , identifier[buff] [: identifier[error_context] ], identifier[buff] [ identifier[error_context] : identifier[error_context] + literal[int] ], identifier[hr] ), identifier[file] = identifier[sys] . identifier[stderr] ) keyword[raise] identifier[SystemExit] ( literal[string] % identifier[self] . identifier[filehandle] . identifier[name] ) keyword[else] : identifier[self] . identifier[prev_pos] = identifier[curr_pos] identifier[buff] = identifier[buff] . identifier[decode] ( literal[string] , literal[string] ) identifier[newline_pos] = identifier[buff] . identifier[rfind] ( literal[string] ) keyword[if] identifier[prev] : identifier[newline_pos] = identifier[buff] [: identifier[newline_pos] ]. identifier[rfind] ( literal[string] ) keyword[if] identifier[newline_pos] ==- literal[int] : identifier[self] . identifier[filehandle] . identifier[seek] ( literal[int] ) keyword[return] identifier[self] . identifier[next] () identifier[self] . identifier[filehandle] . identifier[seek] ( identifier[newline_pos] - identifier[jump_back] + literal[int] , literal[int] ) keyword[try] : identifier[logevent] = identifier[self] . identifier[next] () keyword[while] keyword[not] identifier[logevent] . identifier[datetime] : identifier[logevent] = identifier[self] . identifier[next] () keyword[return] identifier[logevent] keyword[except] identifier[StopIteration] : keyword[return] keyword[None]
def _find_curr_line(self, prev=False): """ Internal helper function. Find the current (or previous if prev=True) line in a log file based on the current seek position. """ curr_pos = self.filehandle.tell() # jump back 15k characters (at most) and find last newline char jump_back = min(self.filehandle.tell(), 15000) self.filehandle.seek(-jump_back, 1) buff = self.filehandle.read(jump_back) self.filehandle.seek(curr_pos, 0) if prev and self.prev_pos is not None and (self.prev_pos == curr_pos): # Number of characters to show before/after the log offset error_context = 300 self.filehandle.seek(-error_context, 1) buff = self.filehandle.read(curr_pos) hr = '-' * 60 print('Fatal log parsing loop detected trying to find previous log line near offset %s in %s:\n\n%s\n%s\n<--- (current log parsing offset) \n%s\n%s\n' % (curr_pos, self.name, hr, buff[:error_context], buff[error_context:error_context + 1], hr), file=sys.stderr) raise SystemExit('Cannot parse %s with requested options' % self.filehandle.name) # depends on [control=['if'], data=[]] else: self.prev_pos = curr_pos buff = buff.decode('utf-8', 'replace') newline_pos = buff.rfind('\n') if prev: newline_pos = buff[:newline_pos].rfind('\n') # depends on [control=['if'], data=[]] # move back to last newline char if newline_pos == -1: self.filehandle.seek(0) return self.next() # depends on [control=['if'], data=[]] self.filehandle.seek(newline_pos - jump_back + 1, 1) # roll forward until we found a line with a datetime try: logevent = self.next() while not logevent.datetime: logevent = self.next() # depends on [control=['while'], data=[]] return logevent # depends on [control=['try'], data=[]] except StopIteration: # reached end of file return None # depends on [control=['except'], data=[]]
def dump(*args, **kwargs): """Dump a numpy.ndarray to file stream. This works exactly like the usual `json.dump()` function, but it uses our custom serializer. """ kwargs.update(dict(cls=NumpyEncoder, sort_keys=True, indent=4, separators=(',', ': '))) return _json.dump(*args, **kwargs)
def function[dump, parameter[]]: constant[Dump a numpy.ndarray to file stream. This works exactly like the usual `json.dump()` function, but it uses our custom serializer. ] call[name[kwargs].update, parameter[call[name[dict], parameter[]]]] return[call[name[_json].dump, parameter[<ast.Starred object at 0x7da1b121bf70>]]]
keyword[def] identifier[dump] (* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[kwargs] . identifier[update] ( identifier[dict] ( identifier[cls] = identifier[NumpyEncoder] , identifier[sort_keys] = keyword[True] , identifier[indent] = literal[int] , identifier[separators] =( literal[string] , literal[string] ))) keyword[return] identifier[_json] . identifier[dump] (* identifier[args] ,** identifier[kwargs] )
def dump(*args, **kwargs): """Dump a numpy.ndarray to file stream. This works exactly like the usual `json.dump()` function, but it uses our custom serializer. """ kwargs.update(dict(cls=NumpyEncoder, sort_keys=True, indent=4, separators=(',', ': '))) return _json.dump(*args, **kwargs)
def points_random_3d(count, range_x=(-10.0, 10.0), range_y=(-10.0, 10.0), range_z=(-10.0, 10.0), seed=None) -> VAO: """ Generates random positions inside a confied box. Args: count (int): Number of points to generate Keyword Args: range_x (tuple): min-max range for x axis: Example (-10.0. 10.0) range_y (tuple): min-max range for y axis: Example (-10.0. 10.0) range_z (tuple): min-max range for z axis: Example (-10.0. 10.0) seed (int): The random seed Returns: A :py:class:`demosys.opengl.vao.VAO` instance """ random.seed(seed) def gen(): for _ in range(count): yield random.uniform(*range_x) yield random.uniform(*range_y) yield random.uniform(*range_z) data = numpy.fromiter(gen(), count=count * 3, dtype=numpy.float32) vao = VAO("geometry:points_random_3d", mode=moderngl.POINTS) vao.buffer(data, '3f', ['in_position']) return vao
def function[points_random_3d, parameter[count, range_x, range_y, range_z, seed]]: constant[ Generates random positions inside a confied box. Args: count (int): Number of points to generate Keyword Args: range_x (tuple): min-max range for x axis: Example (-10.0. 10.0) range_y (tuple): min-max range for y axis: Example (-10.0. 10.0) range_z (tuple): min-max range for z axis: Example (-10.0. 10.0) seed (int): The random seed Returns: A :py:class:`demosys.opengl.vao.VAO` instance ] call[name[random].seed, parameter[name[seed]]] def function[gen, parameter[]]: for taget[name[_]] in starred[call[name[range], parameter[name[count]]]] begin[:] <ast.Yield object at 0x7da2046238e0> <ast.Yield object at 0x7da204620b80> <ast.Yield object at 0x7da2046223e0> variable[data] assign[=] call[name[numpy].fromiter, parameter[call[name[gen], parameter[]]]] variable[vao] assign[=] call[name[VAO], parameter[constant[geometry:points_random_3d]]] call[name[vao].buffer, parameter[name[data], constant[3f], list[[<ast.Constant object at 0x7da2046236d0>]]]] return[name[vao]]
keyword[def] identifier[points_random_3d] ( identifier[count] , identifier[range_x] =(- literal[int] , literal[int] ), identifier[range_y] =(- literal[int] , literal[int] ), identifier[range_z] =(- literal[int] , literal[int] ), identifier[seed] = keyword[None] )-> identifier[VAO] : literal[string] identifier[random] . identifier[seed] ( identifier[seed] ) keyword[def] identifier[gen] (): keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[count] ): keyword[yield] identifier[random] . identifier[uniform] (* identifier[range_x] ) keyword[yield] identifier[random] . identifier[uniform] (* identifier[range_y] ) keyword[yield] identifier[random] . identifier[uniform] (* identifier[range_z] ) identifier[data] = identifier[numpy] . identifier[fromiter] ( identifier[gen] (), identifier[count] = identifier[count] * literal[int] , identifier[dtype] = identifier[numpy] . identifier[float32] ) identifier[vao] = identifier[VAO] ( literal[string] , identifier[mode] = identifier[moderngl] . identifier[POINTS] ) identifier[vao] . identifier[buffer] ( identifier[data] , literal[string] ,[ literal[string] ]) keyword[return] identifier[vao]
def points_random_3d(count, range_x=(-10.0, 10.0), range_y=(-10.0, 10.0), range_z=(-10.0, 10.0), seed=None) -> VAO: """ Generates random positions inside a confied box. Args: count (int): Number of points to generate Keyword Args: range_x (tuple): min-max range for x axis: Example (-10.0. 10.0) range_y (tuple): min-max range for y axis: Example (-10.0. 10.0) range_z (tuple): min-max range for z axis: Example (-10.0. 10.0) seed (int): The random seed Returns: A :py:class:`demosys.opengl.vao.VAO` instance """ random.seed(seed) def gen(): for _ in range(count): yield random.uniform(*range_x) yield random.uniform(*range_y) yield random.uniform(*range_z) # depends on [control=['for'], data=[]] data = numpy.fromiter(gen(), count=count * 3, dtype=numpy.float32) vao = VAO('geometry:points_random_3d', mode=moderngl.POINTS) vao.buffer(data, '3f', ['in_position']) return vao
def mmGetMetricFromTrace(self, trace): """ Convenience method to compute a metric over an indices trace, excluding resets. @param (IndicesTrace) Trace of indices @return (Metric) Metric over trace excluding resets """ return Metric.createFromTrace(trace.makeCountsTrace(), excludeResets=self.mmGetTraceResets())
def function[mmGetMetricFromTrace, parameter[self, trace]]: constant[ Convenience method to compute a metric over an indices trace, excluding resets. @param (IndicesTrace) Trace of indices @return (Metric) Metric over trace excluding resets ] return[call[name[Metric].createFromTrace, parameter[call[name[trace].makeCountsTrace, parameter[]]]]]
keyword[def] identifier[mmGetMetricFromTrace] ( identifier[self] , identifier[trace] ): literal[string] keyword[return] identifier[Metric] . identifier[createFromTrace] ( identifier[trace] . identifier[makeCountsTrace] (), identifier[excludeResets] = identifier[self] . identifier[mmGetTraceResets] ())
def mmGetMetricFromTrace(self, trace): """ Convenience method to compute a metric over an indices trace, excluding resets. @param (IndicesTrace) Trace of indices @return (Metric) Metric over trace excluding resets """ return Metric.createFromTrace(trace.makeCountsTrace(), excludeResets=self.mmGetTraceResets())
def remove_codeblock_syntax_sentinals(code_text): r""" Removes template comments and vim sentinals Args: code_text (str): Returns: str: code_text_ """ flags = re.MULTILINE | re.DOTALL code_text_ = code_text code_text_ = re.sub(r'^ *# *REM [^\n]*$\n?', '', code_text_, flags=flags) code_text_ = re.sub(r'^ *# STARTBLOCK *$\n', '', code_text_, flags=flags) code_text_ = re.sub(r'^ *# ENDBLOCK *$\n?', '', code_text_, flags=flags) code_text_ = code_text_.rstrip() return code_text_
def function[remove_codeblock_syntax_sentinals, parameter[code_text]]: constant[ Removes template comments and vim sentinals Args: code_text (str): Returns: str: code_text_ ] variable[flags] assign[=] binary_operation[name[re].MULTILINE <ast.BitOr object at 0x7da2590d6aa0> name[re].DOTALL] variable[code_text_] assign[=] name[code_text] variable[code_text_] assign[=] call[name[re].sub, parameter[constant[^ *# *REM [^\n]*$\n?], constant[], name[code_text_]]] variable[code_text_] assign[=] call[name[re].sub, parameter[constant[^ *# STARTBLOCK *$\n], constant[], name[code_text_]]] variable[code_text_] assign[=] call[name[re].sub, parameter[constant[^ *# ENDBLOCK *$\n?], constant[], name[code_text_]]] variable[code_text_] assign[=] call[name[code_text_].rstrip, parameter[]] return[name[code_text_]]
keyword[def] identifier[remove_codeblock_syntax_sentinals] ( identifier[code_text] ): literal[string] identifier[flags] = identifier[re] . identifier[MULTILINE] | identifier[re] . identifier[DOTALL] identifier[code_text_] = identifier[code_text] identifier[code_text_] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[code_text_] , identifier[flags] = identifier[flags] ) identifier[code_text_] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[code_text_] , identifier[flags] = identifier[flags] ) identifier[code_text_] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[code_text_] , identifier[flags] = identifier[flags] ) identifier[code_text_] = identifier[code_text_] . identifier[rstrip] () keyword[return] identifier[code_text_]
def remove_codeblock_syntax_sentinals(code_text): """ Removes template comments and vim sentinals Args: code_text (str): Returns: str: code_text_ """ flags = re.MULTILINE | re.DOTALL code_text_ = code_text code_text_ = re.sub('^ *# *REM [^\\n]*$\\n?', '', code_text_, flags=flags) code_text_ = re.sub('^ *# STARTBLOCK *$\\n', '', code_text_, flags=flags) code_text_ = re.sub('^ *# ENDBLOCK *$\\n?', '', code_text_, flags=flags) code_text_ = code_text_.rstrip() return code_text_
def for_account_hash(parent, account_hash): """ Returns a new AccountProxy that acquires the account with the given hash, if such an account is known to the account manager. It is an error if the account manager does not have such an account. """ account = AccountProxy(parent) account.account_hash = account_hash if account.acquire(): return account return None
def function[for_account_hash, parameter[parent, account_hash]]: constant[ Returns a new AccountProxy that acquires the account with the given hash, if such an account is known to the account manager. It is an error if the account manager does not have such an account. ] variable[account] assign[=] call[name[AccountProxy], parameter[name[parent]]] name[account].account_hash assign[=] name[account_hash] if call[name[account].acquire, parameter[]] begin[:] return[name[account]] return[constant[None]]
keyword[def] identifier[for_account_hash] ( identifier[parent] , identifier[account_hash] ): literal[string] identifier[account] = identifier[AccountProxy] ( identifier[parent] ) identifier[account] . identifier[account_hash] = identifier[account_hash] keyword[if] identifier[account] . identifier[acquire] (): keyword[return] identifier[account] keyword[return] keyword[None]
def for_account_hash(parent, account_hash): """ Returns a new AccountProxy that acquires the account with the given hash, if such an account is known to the account manager. It is an error if the account manager does not have such an account. """ account = AccountProxy(parent) account.account_hash = account_hash if account.acquire(): return account # depends on [control=['if'], data=[]] return None
def fit_transform(self, X, y=None): """Fit model to X and perform dimensionality reduction on X. Parameters ---------- X : array-like, shape (n_samples, n_features) Training data. y : Ignored Returns ------- X_new : array, shape (n_samples, n_components) Reduced version of X. This will always be a dense array, of the same type as the input array. If ``X`` was a ``dask.array``, then ``X_new`` will be a ``dask.array`` with the same chunks along the first dimension. """ X = self._check_array(X) if self.algorithm not in {"tsqr", "randomized"}: raise ValueError() if self.algorithm == "tsqr": u, s, v = da.linalg.svd(X) u = u[:, : self.n_components] s = s[: self.n_components] v = v[: self.n_components] else: u, s, v = da.linalg.svd_compressed( X, self.n_components, self.n_iter, seed=self.random_state ) u, v = svd_flip(u, v) X_transformed = u * s explained_var = X_transformed.var(axis=0) full_var = X.var(axis=0).sum() explained_variance_ratio = explained_var / full_var components, ev, evr, sv = compute(v, explained_var, explained_variance_ratio, s) self.components_ = components self.explained_variance_ = ev self.explained_variance_ratio_ = evr self.singular_values_ = sv return X_transformed
def function[fit_transform, parameter[self, X, y]]: constant[Fit model to X and perform dimensionality reduction on X. Parameters ---------- X : array-like, shape (n_samples, n_features) Training data. y : Ignored Returns ------- X_new : array, shape (n_samples, n_components) Reduced version of X. This will always be a dense array, of the same type as the input array. If ``X`` was a ``dask.array``, then ``X_new`` will be a ``dask.array`` with the same chunks along the first dimension. ] variable[X] assign[=] call[name[self]._check_array, parameter[name[X]]] if compare[name[self].algorithm <ast.NotIn object at 0x7da2590d7190> <ast.Set object at 0x7da1b18bf130>] begin[:] <ast.Raise object at 0x7da1b18bf100> if compare[name[self].algorithm equal[==] constant[tsqr]] begin[:] <ast.Tuple object at 0x7da1b18bd840> assign[=] call[name[da].linalg.svd, parameter[name[X]]] variable[u] assign[=] call[name[u]][tuple[[<ast.Slice object at 0x7da1b18bc100>, <ast.Slice object at 0x7da1b18bccd0>]]] variable[s] assign[=] call[name[s]][<ast.Slice object at 0x7da1b18bcac0>] variable[v] assign[=] call[name[v]][<ast.Slice object at 0x7da1b18bc9d0>] <ast.Tuple object at 0x7da1b18bd5a0> assign[=] call[name[svd_flip], parameter[name[u], name[v]]] variable[X_transformed] assign[=] binary_operation[name[u] * name[s]] variable[explained_var] assign[=] call[name[X_transformed].var, parameter[]] variable[full_var] assign[=] call[call[name[X].var, parameter[]].sum, parameter[]] variable[explained_variance_ratio] assign[=] binary_operation[name[explained_var] / name[full_var]] <ast.Tuple object at 0x7da1b1981750> assign[=] call[name[compute], parameter[name[v], name[explained_var], name[explained_variance_ratio], name[s]]] name[self].components_ assign[=] name[components] name[self].explained_variance_ assign[=] name[ev] name[self].explained_variance_ratio_ assign[=] name[evr] name[self].singular_values_ assign[=] name[sv] return[name[X_transformed]]
keyword[def] identifier[fit_transform] ( identifier[self] , identifier[X] , identifier[y] = keyword[None] ): literal[string] identifier[X] = identifier[self] . identifier[_check_array] ( identifier[X] ) keyword[if] identifier[self] . identifier[algorithm] keyword[not] keyword[in] { literal[string] , literal[string] }: keyword[raise] identifier[ValueError] () keyword[if] identifier[self] . identifier[algorithm] == literal[string] : identifier[u] , identifier[s] , identifier[v] = identifier[da] . identifier[linalg] . identifier[svd] ( identifier[X] ) identifier[u] = identifier[u] [:,: identifier[self] . identifier[n_components] ] identifier[s] = identifier[s] [: identifier[self] . identifier[n_components] ] identifier[v] = identifier[v] [: identifier[self] . identifier[n_components] ] keyword[else] : identifier[u] , identifier[s] , identifier[v] = identifier[da] . identifier[linalg] . identifier[svd_compressed] ( identifier[X] , identifier[self] . identifier[n_components] , identifier[self] . identifier[n_iter] , identifier[seed] = identifier[self] . identifier[random_state] ) identifier[u] , identifier[v] = identifier[svd_flip] ( identifier[u] , identifier[v] ) identifier[X_transformed] = identifier[u] * identifier[s] identifier[explained_var] = identifier[X_transformed] . identifier[var] ( identifier[axis] = literal[int] ) identifier[full_var] = identifier[X] . identifier[var] ( identifier[axis] = literal[int] ). identifier[sum] () identifier[explained_variance_ratio] = identifier[explained_var] / identifier[full_var] identifier[components] , identifier[ev] , identifier[evr] , identifier[sv] = identifier[compute] ( identifier[v] , identifier[explained_var] , identifier[explained_variance_ratio] , identifier[s] ) identifier[self] . identifier[components_] = identifier[components] identifier[self] . identifier[explained_variance_] = identifier[ev] identifier[self] . identifier[explained_variance_ratio_] = identifier[evr] identifier[self] . identifier[singular_values_] = identifier[sv] keyword[return] identifier[X_transformed]
def fit_transform(self, X, y=None): """Fit model to X and perform dimensionality reduction on X. Parameters ---------- X : array-like, shape (n_samples, n_features) Training data. y : Ignored Returns ------- X_new : array, shape (n_samples, n_components) Reduced version of X. This will always be a dense array, of the same type as the input array. If ``X`` was a ``dask.array``, then ``X_new`` will be a ``dask.array`` with the same chunks along the first dimension. """ X = self._check_array(X) if self.algorithm not in {'tsqr', 'randomized'}: raise ValueError() # depends on [control=['if'], data=[]] if self.algorithm == 'tsqr': (u, s, v) = da.linalg.svd(X) u = u[:, :self.n_components] s = s[:self.n_components] v = v[:self.n_components] # depends on [control=['if'], data=[]] else: (u, s, v) = da.linalg.svd_compressed(X, self.n_components, self.n_iter, seed=self.random_state) (u, v) = svd_flip(u, v) X_transformed = u * s explained_var = X_transformed.var(axis=0) full_var = X.var(axis=0).sum() explained_variance_ratio = explained_var / full_var (components, ev, evr, sv) = compute(v, explained_var, explained_variance_ratio, s) self.components_ = components self.explained_variance_ = ev self.explained_variance_ratio_ = evr self.singular_values_ = sv return X_transformed
def serialize(self, content): """ Serialize to xml. :return string: """ return self.xmldoc_tpl % ( 'true' if self.response.status_code == HTTP_200_OK else 'false', str(self.resource.api or ''), int(mktime(datetime.now().timetuple())), super(XMLTemplateEmitter, self).serialize(content) )
def function[serialize, parameter[self, content]]: constant[ Serialize to xml. :return string: ] return[binary_operation[name[self].xmldoc_tpl <ast.Mod object at 0x7da2590d6920> tuple[[<ast.IfExp object at 0x7da2047ebb20>, <ast.Call object at 0x7da2047eba30>, <ast.Call object at 0x7da2047eac50>, <ast.Call object at 0x7da2047eaa70>]]]]
keyword[def] identifier[serialize] ( identifier[self] , identifier[content] ): literal[string] keyword[return] identifier[self] . identifier[xmldoc_tpl] %( literal[string] keyword[if] identifier[self] . identifier[response] . identifier[status_code] == identifier[HTTP_200_OK] keyword[else] literal[string] , identifier[str] ( identifier[self] . identifier[resource] . identifier[api] keyword[or] literal[string] ), identifier[int] ( identifier[mktime] ( identifier[datetime] . identifier[now] (). identifier[timetuple] ())), identifier[super] ( identifier[XMLTemplateEmitter] , identifier[self] ). identifier[serialize] ( identifier[content] ) )
def serialize(self, content): """ Serialize to xml. :return string: """ return self.xmldoc_tpl % ('true' if self.response.status_code == HTTP_200_OK else 'false', str(self.resource.api or ''), int(mktime(datetime.now().timetuple())), super(XMLTemplateEmitter, self).serialize(content))
def Text(text): """ provide a wrapper for python string map byte to str (python 3) all string in utf-8 encoding normalize string to NFC """ if not is_unicode(text): text = text.decode("utf-8") text = unicodedata.normalize("NFC", text) return text
def function[Text, parameter[text]]: constant[ provide a wrapper for python string map byte to str (python 3) all string in utf-8 encoding normalize string to NFC ] if <ast.UnaryOp object at 0x7da1b23093f0> begin[:] variable[text] assign[=] call[name[text].decode, parameter[constant[utf-8]]] variable[text] assign[=] call[name[unicodedata].normalize, parameter[constant[NFC], name[text]]] return[name[text]]
keyword[def] identifier[Text] ( identifier[text] ): literal[string] keyword[if] keyword[not] identifier[is_unicode] ( identifier[text] ): identifier[text] = identifier[text] . identifier[decode] ( literal[string] ) identifier[text] = identifier[unicodedata] . identifier[normalize] ( literal[string] , identifier[text] ) keyword[return] identifier[text]
def Text(text): """ provide a wrapper for python string map byte to str (python 3) all string in utf-8 encoding normalize string to NFC """ if not is_unicode(text): text = text.decode('utf-8') # depends on [control=['if'], data=[]] text = unicodedata.normalize('NFC', text) return text
def clear(self): """ Clears out all the settings for this instance. """ if self._customFormat: self._customFormat.clear() else: super(XSettings, self).clear()
def function[clear, parameter[self]]: constant[ Clears out all the settings for this instance. ] if name[self]._customFormat begin[:] call[name[self]._customFormat.clear, parameter[]]
keyword[def] identifier[clear] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_customFormat] : identifier[self] . identifier[_customFormat] . identifier[clear] () keyword[else] : identifier[super] ( identifier[XSettings] , identifier[self] ). identifier[clear] ()
def clear(self): """ Clears out all the settings for this instance. """ if self._customFormat: self._customFormat.clear() # depends on [control=['if'], data=[]] else: super(XSettings, self).clear()
def add_content(obj, language, slot, content): """ Adds a TextPlugin with given content to given slot """ placeholder = obj.placeholders.get(slot=slot) add_plugin(placeholder, TextPlugin, language, body=content)
def function[add_content, parameter[obj, language, slot, content]]: constant[ Adds a TextPlugin with given content to given slot ] variable[placeholder] assign[=] call[name[obj].placeholders.get, parameter[]] call[name[add_plugin], parameter[name[placeholder], name[TextPlugin], name[language]]]
keyword[def] identifier[add_content] ( identifier[obj] , identifier[language] , identifier[slot] , identifier[content] ): literal[string] identifier[placeholder] = identifier[obj] . identifier[placeholders] . identifier[get] ( identifier[slot] = identifier[slot] ) identifier[add_plugin] ( identifier[placeholder] , identifier[TextPlugin] , identifier[language] , identifier[body] = identifier[content] )
def add_content(obj, language, slot, content): """ Adds a TextPlugin with given content to given slot """ placeholder = obj.placeholders.get(slot=slot) add_plugin(placeholder, TextPlugin, language, body=content)
def delete(self, key=None): """Deletes the given key, or the whole bucket.""" # Delete the whole bucket. if key is None: # Delete everything in the bucket. for key in self.all(): key.delete() # Delete the bucket. return self._boto_bucket.delete() # If a key was passed, delete they key. k = self.key(key) return k.delete()
def function[delete, parameter[self, key]]: constant[Deletes the given key, or the whole bucket.] if compare[name[key] is constant[None]] begin[:] for taget[name[key]] in starred[call[name[self].all, parameter[]]] begin[:] call[name[key].delete, parameter[]] return[call[name[self]._boto_bucket.delete, parameter[]]] variable[k] assign[=] call[name[self].key, parameter[name[key]]] return[call[name[k].delete, parameter[]]]
keyword[def] identifier[delete] ( identifier[self] , identifier[key] = keyword[None] ): literal[string] keyword[if] identifier[key] keyword[is] keyword[None] : keyword[for] identifier[key] keyword[in] identifier[self] . identifier[all] (): identifier[key] . identifier[delete] () keyword[return] identifier[self] . identifier[_boto_bucket] . identifier[delete] () identifier[k] = identifier[self] . identifier[key] ( identifier[key] ) keyword[return] identifier[k] . identifier[delete] ()
def delete(self, key=None): """Deletes the given key, or the whole bucket.""" # Delete the whole bucket. if key is None: # Delete everything in the bucket. for key in self.all(): key.delete() # depends on [control=['for'], data=['key']] # Delete the bucket. return self._boto_bucket.delete() # depends on [control=['if'], data=['key']] # If a key was passed, delete they key. k = self.key(key) return k.delete()
def main(args=None): # type: (Optional[List[str]]) -> int """ Main logic. """ cli_args = ArgumentParser() cli_args.add_argument( "-c", "--coordinates", default="", type=str, help="the part of the screen to capture: top, left, width, height", ) cli_args.add_argument( "-l", "--level", default=6, type=int, choices=list(range(10)), help="the PNG compression level", ) cli_args.add_argument( "-m", "--monitor", default=0, type=int, help="the monitor to screen shot" ) cli_args.add_argument( "-o", "--output", default="monitor-{mon}.png", help="the output file name" ) cli_args.add_argument( "-q", "--quiet", default=False, action="store_true", help="do not print created files", ) cli_args.add_argument("-v", "--version", action="version", version=__version__) options = cli_args.parse_args(args) kwargs = {"mon": options.monitor, "output": options.output} if options.coordinates: try: top, left, width, height = options.coordinates.split(",") except ValueError: print("Coordinates syntax: top, left, width, height") return 2 kwargs["mon"] = { "top": int(top), "left": int(left), "width": int(width), "height": int(height), } if options.output == "monitor-{mon}.png": kwargs["output"] = "sct-{top}x{left}_{width}x{height}.png" try: with mss() as sct: if options.coordinates: output = kwargs["output"].format(**kwargs["mon"]) sct_img = sct.grab(kwargs["mon"]) to_png(sct_img.rgb, sct_img.size, level=options.level, output=output) if not options.quiet: print(os.path.realpath(output)) else: for file_name in sct.save(**kwargs): if not options.quiet: print(os.path.realpath(file_name)) return 0 except ScreenShotError: return 1
def function[main, parameter[args]]: constant[ Main logic. ] variable[cli_args] assign[=] call[name[ArgumentParser], parameter[]] call[name[cli_args].add_argument, parameter[constant[-c], constant[--coordinates]]] call[name[cli_args].add_argument, parameter[constant[-l], constant[--level]]] call[name[cli_args].add_argument, parameter[constant[-m], constant[--monitor]]] call[name[cli_args].add_argument, parameter[constant[-o], constant[--output]]] call[name[cli_args].add_argument, parameter[constant[-q], constant[--quiet]]] call[name[cli_args].add_argument, parameter[constant[-v], constant[--version]]] variable[options] assign[=] call[name[cli_args].parse_args, parameter[name[args]]] variable[kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b07e10c0>, <ast.Constant object at 0x7da1b07e11e0>], [<ast.Attribute object at 0x7da1b07e1150>, <ast.Attribute object at 0x7da1b07e1c90>]] if name[options].coordinates begin[:] <ast.Try object at 0x7da1b07e0ee0> call[name[kwargs]][constant[mon]] assign[=] dictionary[[<ast.Constant object at 0x7da1b0832ec0>, <ast.Constant object at 0x7da1b0830a60>, <ast.Constant object at 0x7da1b0830fa0>, <ast.Constant object at 0x7da1b0830f70>], [<ast.Call object at 0x7da1b0833610>, <ast.Call object at 0x7da1b0832290>, <ast.Call object at 0x7da1b08330a0>, <ast.Call object at 0x7da1b0831720>]] if compare[name[options].output equal[==] constant[monitor-{mon}.png]] begin[:] call[name[kwargs]][constant[output]] assign[=] constant[sct-{top}x{left}_{width}x{height}.png] <ast.Try object at 0x7da1b0799420>
keyword[def] identifier[main] ( identifier[args] = keyword[None] ): literal[string] identifier[cli_args] = identifier[ArgumentParser] () identifier[cli_args] . identifier[add_argument] ( literal[string] , literal[string] , identifier[default] = literal[string] , identifier[type] = identifier[str] , identifier[help] = literal[string] , ) identifier[cli_args] . identifier[add_argument] ( literal[string] , literal[string] , identifier[default] = literal[int] , identifier[type] = identifier[int] , identifier[choices] = identifier[list] ( identifier[range] ( literal[int] )), identifier[help] = literal[string] , ) identifier[cli_args] . identifier[add_argument] ( literal[string] , literal[string] , identifier[default] = literal[int] , identifier[type] = identifier[int] , identifier[help] = literal[string] ) identifier[cli_args] . identifier[add_argument] ( literal[string] , literal[string] , identifier[default] = literal[string] , identifier[help] = literal[string] ) identifier[cli_args] . identifier[add_argument] ( literal[string] , literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] , identifier[help] = literal[string] , ) identifier[cli_args] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[version] = identifier[__version__] ) identifier[options] = identifier[cli_args] . identifier[parse_args] ( identifier[args] ) identifier[kwargs] ={ literal[string] : identifier[options] . identifier[monitor] , literal[string] : identifier[options] . identifier[output] } keyword[if] identifier[options] . identifier[coordinates] : keyword[try] : identifier[top] , identifier[left] , identifier[width] , identifier[height] = identifier[options] . identifier[coordinates] . identifier[split] ( literal[string] ) keyword[except] identifier[ValueError] : identifier[print] ( literal[string] ) keyword[return] literal[int] identifier[kwargs] [ literal[string] ]={ literal[string] : identifier[int] ( identifier[top] ), literal[string] : identifier[int] ( identifier[left] ), literal[string] : identifier[int] ( identifier[width] ), literal[string] : identifier[int] ( identifier[height] ), } keyword[if] identifier[options] . identifier[output] == literal[string] : identifier[kwargs] [ literal[string] ]= literal[string] keyword[try] : keyword[with] identifier[mss] () keyword[as] identifier[sct] : keyword[if] identifier[options] . identifier[coordinates] : identifier[output] = identifier[kwargs] [ literal[string] ]. identifier[format] (** identifier[kwargs] [ literal[string] ]) identifier[sct_img] = identifier[sct] . identifier[grab] ( identifier[kwargs] [ literal[string] ]) identifier[to_png] ( identifier[sct_img] . identifier[rgb] , identifier[sct_img] . identifier[size] , identifier[level] = identifier[options] . identifier[level] , identifier[output] = identifier[output] ) keyword[if] keyword[not] identifier[options] . identifier[quiet] : identifier[print] ( identifier[os] . identifier[path] . identifier[realpath] ( identifier[output] )) keyword[else] : keyword[for] identifier[file_name] keyword[in] identifier[sct] . identifier[save] (** identifier[kwargs] ): keyword[if] keyword[not] identifier[options] . identifier[quiet] : identifier[print] ( identifier[os] . identifier[path] . identifier[realpath] ( identifier[file_name] )) keyword[return] literal[int] keyword[except] identifier[ScreenShotError] : keyword[return] literal[int]
def main(args=None): # type: (Optional[List[str]]) -> int ' Main logic. ' cli_args = ArgumentParser() cli_args.add_argument('-c', '--coordinates', default='', type=str, help='the part of the screen to capture: top, left, width, height') cli_args.add_argument('-l', '--level', default=6, type=int, choices=list(range(10)), help='the PNG compression level') cli_args.add_argument('-m', '--monitor', default=0, type=int, help='the monitor to screen shot') cli_args.add_argument('-o', '--output', default='monitor-{mon}.png', help='the output file name') cli_args.add_argument('-q', '--quiet', default=False, action='store_true', help='do not print created files') cli_args.add_argument('-v', '--version', action='version', version=__version__) options = cli_args.parse_args(args) kwargs = {'mon': options.monitor, 'output': options.output} if options.coordinates: try: (top, left, width, height) = options.coordinates.split(',') # depends on [control=['try'], data=[]] except ValueError: print('Coordinates syntax: top, left, width, height') return 2 # depends on [control=['except'], data=[]] kwargs['mon'] = {'top': int(top), 'left': int(left), 'width': int(width), 'height': int(height)} if options.output == 'monitor-{mon}.png': kwargs['output'] = 'sct-{top}x{left}_{width}x{height}.png' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] try: with mss() as sct: if options.coordinates: output = kwargs['output'].format(**kwargs['mon']) sct_img = sct.grab(kwargs['mon']) to_png(sct_img.rgb, sct_img.size, level=options.level, output=output) if not options.quiet: print(os.path.realpath(output)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: for file_name in sct.save(**kwargs): if not options.quiet: print(os.path.realpath(file_name)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['file_name']] return 0 # depends on [control=['with'], data=['sct']] # depends on [control=['try'], data=[]] except ScreenShotError: return 1 # depends on [control=['except'], data=[]]
def update(self, results): """ Compute the new metrics values, given the next inference/ground-truth values :param results: (:class:`~nupic.frameworks.opf.opf_utils.ModelResult`) object that was computed during the last iteration of the model. :returns: (dict) where each key is the metric-name, and the values are it scalar value. """ #print "\n\n---------------------------------------------------------------" #print "Model results: \nrawInput:%s \ninferences:%s" % \ # (pprint.pformat(results.rawInput), pprint.pformat(results.inferences)) self._addResults(results) if not self.__metricSpecs \ or self.__currentInference is None: return {} metricResults = {} for metric, spec, label in zip(self.__metrics, self.__metricSpecs, self.__metricLabels): inferenceElement = spec.inferenceElement field = spec.field groundTruth = self._getGroundTruth(inferenceElement) inference = self._getInference(inferenceElement) rawRecord = self._getRawGroundTruth() result = self.__currentResult if field: if type(inference) in (list, tuple): if field in self.__fieldNameIndexMap: # NOTE: If the predicted field is not fed in at the bottom, we # won't have it in our fieldNameIndexMap fieldIndex = self.__fieldNameIndexMap[field] inference = inference[fieldIndex] else: inference = None if groundTruth is not None: if type(groundTruth) in (list, tuple): if field in self.__fieldNameIndexMap: # NOTE: If the predicted field is not fed in at the bottom, we # won't have it in our fieldNameIndexMap fieldIndex = self.__fieldNameIndexMap[field] groundTruth = groundTruth[fieldIndex] else: groundTruth = None else: # groundTruth could be a dict based off of field names groundTruth = groundTruth[field] metric.addInstance(groundTruth=groundTruth, prediction=inference, record=rawRecord, result=result) metricResults[label] = metric.getMetric()['value'] return metricResults
def function[update, parameter[self, results]]: constant[ Compute the new metrics values, given the next inference/ground-truth values :param results: (:class:`~nupic.frameworks.opf.opf_utils.ModelResult`) object that was computed during the last iteration of the model. :returns: (dict) where each key is the metric-name, and the values are it scalar value. ] call[name[self]._addResults, parameter[name[results]]] if <ast.BoolOp object at 0x7da20c993820> begin[:] return[dictionary[[], []]] variable[metricResults] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da2047e8730>, <ast.Name object at 0x7da2047e9210>, <ast.Name object at 0x7da2047e9f60>]]] in starred[call[name[zip], parameter[name[self].__metrics, name[self].__metricSpecs, name[self].__metricLabels]]] begin[:] variable[inferenceElement] assign[=] name[spec].inferenceElement variable[field] assign[=] name[spec].field variable[groundTruth] assign[=] call[name[self]._getGroundTruth, parameter[name[inferenceElement]]] variable[inference] assign[=] call[name[self]._getInference, parameter[name[inferenceElement]]] variable[rawRecord] assign[=] call[name[self]._getRawGroundTruth, parameter[]] variable[result] assign[=] name[self].__currentResult if name[field] begin[:] if compare[call[name[type], parameter[name[inference]]] in tuple[[<ast.Name object at 0x7da18f09f0d0>, <ast.Name object at 0x7da18f09e470>]]] begin[:] if compare[name[field] in name[self].__fieldNameIndexMap] begin[:] variable[fieldIndex] assign[=] call[name[self].__fieldNameIndexMap][name[field]] variable[inference] assign[=] call[name[inference]][name[fieldIndex]] if compare[name[groundTruth] is_not constant[None]] begin[:] if compare[call[name[type], parameter[name[groundTruth]]] in tuple[[<ast.Name object at 0x7da18f09e9e0>, <ast.Name object at 0x7da18f09e170>]]] begin[:] if compare[name[field] in name[self].__fieldNameIndexMap] begin[:] variable[fieldIndex] assign[=] call[name[self].__fieldNameIndexMap][name[field]] variable[groundTruth] assign[=] call[name[groundTruth]][name[fieldIndex]] call[name[metric].addInstance, parameter[]] call[name[metricResults]][name[label]] assign[=] call[call[name[metric].getMetric, parameter[]]][constant[value]] return[name[metricResults]]
keyword[def] identifier[update] ( identifier[self] , identifier[results] ): literal[string] identifier[self] . identifier[_addResults] ( identifier[results] ) keyword[if] keyword[not] identifier[self] . identifier[__metricSpecs] keyword[or] identifier[self] . identifier[__currentInference] keyword[is] keyword[None] : keyword[return] {} identifier[metricResults] ={} keyword[for] identifier[metric] , identifier[spec] , identifier[label] keyword[in] identifier[zip] ( identifier[self] . identifier[__metrics] , identifier[self] . identifier[__metricSpecs] , identifier[self] . identifier[__metricLabels] ): identifier[inferenceElement] = identifier[spec] . identifier[inferenceElement] identifier[field] = identifier[spec] . identifier[field] identifier[groundTruth] = identifier[self] . identifier[_getGroundTruth] ( identifier[inferenceElement] ) identifier[inference] = identifier[self] . identifier[_getInference] ( identifier[inferenceElement] ) identifier[rawRecord] = identifier[self] . identifier[_getRawGroundTruth] () identifier[result] = identifier[self] . identifier[__currentResult] keyword[if] identifier[field] : keyword[if] identifier[type] ( identifier[inference] ) keyword[in] ( identifier[list] , identifier[tuple] ): keyword[if] identifier[field] keyword[in] identifier[self] . identifier[__fieldNameIndexMap] : identifier[fieldIndex] = identifier[self] . identifier[__fieldNameIndexMap] [ identifier[field] ] identifier[inference] = identifier[inference] [ identifier[fieldIndex] ] keyword[else] : identifier[inference] = keyword[None] keyword[if] identifier[groundTruth] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[type] ( identifier[groundTruth] ) keyword[in] ( identifier[list] , identifier[tuple] ): keyword[if] identifier[field] keyword[in] identifier[self] . identifier[__fieldNameIndexMap] : identifier[fieldIndex] = identifier[self] . identifier[__fieldNameIndexMap] [ identifier[field] ] identifier[groundTruth] = identifier[groundTruth] [ identifier[fieldIndex] ] keyword[else] : identifier[groundTruth] = keyword[None] keyword[else] : identifier[groundTruth] = identifier[groundTruth] [ identifier[field] ] identifier[metric] . identifier[addInstance] ( identifier[groundTruth] = identifier[groundTruth] , identifier[prediction] = identifier[inference] , identifier[record] = identifier[rawRecord] , identifier[result] = identifier[result] ) identifier[metricResults] [ identifier[label] ]= identifier[metric] . identifier[getMetric] ()[ literal[string] ] keyword[return] identifier[metricResults]
def update(self, results): """ Compute the new metrics values, given the next inference/ground-truth values :param results: (:class:`~nupic.frameworks.opf.opf_utils.ModelResult`) object that was computed during the last iteration of the model. :returns: (dict) where each key is the metric-name, and the values are it scalar value. """ #print "\n\n---------------------------------------------------------------" #print "Model results: \nrawInput:%s \ninferences:%s" % \ # (pprint.pformat(results.rawInput), pprint.pformat(results.inferences)) self._addResults(results) if not self.__metricSpecs or self.__currentInference is None: return {} # depends on [control=['if'], data=[]] metricResults = {} for (metric, spec, label) in zip(self.__metrics, self.__metricSpecs, self.__metricLabels): inferenceElement = spec.inferenceElement field = spec.field groundTruth = self._getGroundTruth(inferenceElement) inference = self._getInference(inferenceElement) rawRecord = self._getRawGroundTruth() result = self.__currentResult if field: if type(inference) in (list, tuple): if field in self.__fieldNameIndexMap: # NOTE: If the predicted field is not fed in at the bottom, we # won't have it in our fieldNameIndexMap fieldIndex = self.__fieldNameIndexMap[field] inference = inference[fieldIndex] # depends on [control=['if'], data=['field']] else: inference = None # depends on [control=['if'], data=[]] if groundTruth is not None: if type(groundTruth) in (list, tuple): if field in self.__fieldNameIndexMap: # NOTE: If the predicted field is not fed in at the bottom, we # won't have it in our fieldNameIndexMap fieldIndex = self.__fieldNameIndexMap[field] groundTruth = groundTruth[fieldIndex] # depends on [control=['if'], data=['field']] else: groundTruth = None # depends on [control=['if'], data=[]] else: # groundTruth could be a dict based off of field names groundTruth = groundTruth[field] # depends on [control=['if'], data=['groundTruth']] # depends on [control=['if'], data=[]] metric.addInstance(groundTruth=groundTruth, prediction=inference, record=rawRecord, result=result) metricResults[label] = metric.getMetric()['value'] # depends on [control=['for'], data=[]] return metricResults
def lp_pooling(attrs, inputs, proto_obj): """LP Pooling""" p_value = attrs.get('p', 2) new_attrs = translation_utils._fix_attribute_names(attrs, {'kernel_shape': 'kernel', 'strides': 'stride', 'pads': 'pad' }) new_attrs = translation_utils._remove_attributes(new_attrs, ['p']) new_attrs = translation_utils._add_extra_attributes(new_attrs, {'pooling_convention': 'valid', 'p_value': p_value }) new_op = translation_utils._fix_pooling('lp', inputs, new_attrs) return new_op, new_attrs, inputs
def function[lp_pooling, parameter[attrs, inputs, proto_obj]]: constant[LP Pooling] variable[p_value] assign[=] call[name[attrs].get, parameter[constant[p], constant[2]]] variable[new_attrs] assign[=] call[name[translation_utils]._fix_attribute_names, parameter[name[attrs], dictionary[[<ast.Constant object at 0x7da1b2064ca0>, <ast.Constant object at 0x7da1b2064c40>, <ast.Constant object at 0x7da1b2066260>], [<ast.Constant object at 0x7da1b20641c0>, <ast.Constant object at 0x7da1b20640a0>, <ast.Constant object at 0x7da1b2065540>]]]] variable[new_attrs] assign[=] call[name[translation_utils]._remove_attributes, parameter[name[new_attrs], list[[<ast.Constant object at 0x7da1b2067070>]]]] variable[new_attrs] assign[=] call[name[translation_utils]._add_extra_attributes, parameter[name[new_attrs], dictionary[[<ast.Constant object at 0x7da1b2065840>, <ast.Constant object at 0x7da1b2065ae0>], [<ast.Constant object at 0x7da1b20671f0>, <ast.Name object at 0x7da1b2064c70>]]]] variable[new_op] assign[=] call[name[translation_utils]._fix_pooling, parameter[constant[lp], name[inputs], name[new_attrs]]] return[tuple[[<ast.Name object at 0x7da1b2067d60>, <ast.Name object at 0x7da1b20672b0>, <ast.Name object at 0x7da1b2065930>]]]
keyword[def] identifier[lp_pooling] ( identifier[attrs] , identifier[inputs] , identifier[proto_obj] ): literal[string] identifier[p_value] = identifier[attrs] . identifier[get] ( literal[string] , literal[int] ) identifier[new_attrs] = identifier[translation_utils] . identifier[_fix_attribute_names] ( identifier[attrs] , { literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }) identifier[new_attrs] = identifier[translation_utils] . identifier[_remove_attributes] ( identifier[new_attrs] ,[ literal[string] ]) identifier[new_attrs] = identifier[translation_utils] . identifier[_add_extra_attributes] ( identifier[new_attrs] , { literal[string] : literal[string] , literal[string] : identifier[p_value] }) identifier[new_op] = identifier[translation_utils] . identifier[_fix_pooling] ( literal[string] , identifier[inputs] , identifier[new_attrs] ) keyword[return] identifier[new_op] , identifier[new_attrs] , identifier[inputs]
def lp_pooling(attrs, inputs, proto_obj): """LP Pooling""" p_value = attrs.get('p', 2) new_attrs = translation_utils._fix_attribute_names(attrs, {'kernel_shape': 'kernel', 'strides': 'stride', 'pads': 'pad'}) new_attrs = translation_utils._remove_attributes(new_attrs, ['p']) new_attrs = translation_utils._add_extra_attributes(new_attrs, {'pooling_convention': 'valid', 'p_value': p_value}) new_op = translation_utils._fix_pooling('lp', inputs, new_attrs) return (new_op, new_attrs, inputs)
def send_keys(self, text: str = 'cerium') -> None: '''Simulates typing keys.''' self.click() self._parent.send_keys(text)
def function[send_keys, parameter[self, text]]: constant[Simulates typing keys.] call[name[self].click, parameter[]] call[name[self]._parent.send_keys, parameter[name[text]]]
keyword[def] identifier[send_keys] ( identifier[self] , identifier[text] : identifier[str] = literal[string] )-> keyword[None] : literal[string] identifier[self] . identifier[click] () identifier[self] . identifier[_parent] . identifier[send_keys] ( identifier[text] )
def send_keys(self, text: str='cerium') -> None: """Simulates typing keys.""" self.click() self._parent.send_keys(text)
def GroupEncoder(field_number, is_repeated, is_packed): """Returns an encoder for a group field.""" start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP) end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP) assert not is_packed if is_repeated: def EncodeRepeatedField(write, value): for element in value: write(start_tag) element._InternalSerialize(write) write(end_tag) return EncodeRepeatedField else: def EncodeField(write, value): write(start_tag) value._InternalSerialize(write) return write(end_tag) return EncodeField
def function[GroupEncoder, parameter[field_number, is_repeated, is_packed]]: constant[Returns an encoder for a group field.] variable[start_tag] assign[=] call[name[TagBytes], parameter[name[field_number], name[wire_format].WIRETYPE_START_GROUP]] variable[end_tag] assign[=] call[name[TagBytes], parameter[name[field_number], name[wire_format].WIRETYPE_END_GROUP]] assert[<ast.UnaryOp object at 0x7da1b205bd60>] if name[is_repeated] begin[:] def function[EncodeRepeatedField, parameter[write, value]]: for taget[name[element]] in starred[name[value]] begin[:] call[name[write], parameter[name[start_tag]]] call[name[element]._InternalSerialize, parameter[name[write]]] call[name[write], parameter[name[end_tag]]] return[name[EncodeRepeatedField]]
keyword[def] identifier[GroupEncoder] ( identifier[field_number] , identifier[is_repeated] , identifier[is_packed] ): literal[string] identifier[start_tag] = identifier[TagBytes] ( identifier[field_number] , identifier[wire_format] . identifier[WIRETYPE_START_GROUP] ) identifier[end_tag] = identifier[TagBytes] ( identifier[field_number] , identifier[wire_format] . identifier[WIRETYPE_END_GROUP] ) keyword[assert] keyword[not] identifier[is_packed] keyword[if] identifier[is_repeated] : keyword[def] identifier[EncodeRepeatedField] ( identifier[write] , identifier[value] ): keyword[for] identifier[element] keyword[in] identifier[value] : identifier[write] ( identifier[start_tag] ) identifier[element] . identifier[_InternalSerialize] ( identifier[write] ) identifier[write] ( identifier[end_tag] ) keyword[return] identifier[EncodeRepeatedField] keyword[else] : keyword[def] identifier[EncodeField] ( identifier[write] , identifier[value] ): identifier[write] ( identifier[start_tag] ) identifier[value] . identifier[_InternalSerialize] ( identifier[write] ) keyword[return] identifier[write] ( identifier[end_tag] ) keyword[return] identifier[EncodeField]
def GroupEncoder(field_number, is_repeated, is_packed): """Returns an encoder for a group field.""" start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP) end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP) assert not is_packed if is_repeated: def EncodeRepeatedField(write, value): for element in value: write(start_tag) element._InternalSerialize(write) write(end_tag) # depends on [control=['for'], data=['element']] return EncodeRepeatedField # depends on [control=['if'], data=[]] else: def EncodeField(write, value): write(start_tag) value._InternalSerialize(write) return write(end_tag) return EncodeField
def _get_rom_firmware_version(self, data): """Gets the rom firmware version for server capabilities Parse the get_host_health_data() to retreive the firmware details. :param data: the output returned by get_host_health_data() :returns: a dictionary of rom firmware version. """ firmware_details = self._get_firmware_embedded_health(data) if firmware_details: try: rom_firmware_version = ( firmware_details['HP ProLiant System ROM']) return {'rom_firmware_version': rom_firmware_version} except KeyError: return None
def function[_get_rom_firmware_version, parameter[self, data]]: constant[Gets the rom firmware version for server capabilities Parse the get_host_health_data() to retreive the firmware details. :param data: the output returned by get_host_health_data() :returns: a dictionary of rom firmware version. ] variable[firmware_details] assign[=] call[name[self]._get_firmware_embedded_health, parameter[name[data]]] if name[firmware_details] begin[:] <ast.Try object at 0x7da20c6c7dc0>
keyword[def] identifier[_get_rom_firmware_version] ( identifier[self] , identifier[data] ): literal[string] identifier[firmware_details] = identifier[self] . identifier[_get_firmware_embedded_health] ( identifier[data] ) keyword[if] identifier[firmware_details] : keyword[try] : identifier[rom_firmware_version] =( identifier[firmware_details] [ literal[string] ]) keyword[return] { literal[string] : identifier[rom_firmware_version] } keyword[except] identifier[KeyError] : keyword[return] keyword[None]
def _get_rom_firmware_version(self, data): """Gets the rom firmware version for server capabilities Parse the get_host_health_data() to retreive the firmware details. :param data: the output returned by get_host_health_data() :returns: a dictionary of rom firmware version. """ firmware_details = self._get_firmware_embedded_health(data) if firmware_details: try: rom_firmware_version = firmware_details['HP ProLiant System ROM'] return {'rom_firmware_version': rom_firmware_version} # depends on [control=['try'], data=[]] except KeyError: return None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
def random_mixed_actions(nums_actions, random_state=None): """ Return a tuple of random mixed actions (vectors of floats). Parameters ---------- nums_actions : tuple(int) Tuple of the numbers of actions, one for each player. random_state : int or np.random.RandomState, optional Random seed (integer) or np.random.RandomState instance to set the initial state of the random number generator for reproducibility. If None, a randomly initialized RandomState is used. Returns ------- action_profile : tuple(ndarray(float, ndim=1)) Tuple of mixed_actions, one for each player. """ random_state = check_random_state(random_state) action_profile = tuple( [probvec(1, num_actions, random_state).ravel() for num_actions in nums_actions] ) return action_profile
def function[random_mixed_actions, parameter[nums_actions, random_state]]: constant[ Return a tuple of random mixed actions (vectors of floats). Parameters ---------- nums_actions : tuple(int) Tuple of the numbers of actions, one for each player. random_state : int or np.random.RandomState, optional Random seed (integer) or np.random.RandomState instance to set the initial state of the random number generator for reproducibility. If None, a randomly initialized RandomState is used. Returns ------- action_profile : tuple(ndarray(float, ndim=1)) Tuple of mixed_actions, one for each player. ] variable[random_state] assign[=] call[name[check_random_state], parameter[name[random_state]]] variable[action_profile] assign[=] call[name[tuple], parameter[<ast.ListComp object at 0x7da1b26ad5d0>]] return[name[action_profile]]
keyword[def] identifier[random_mixed_actions] ( identifier[nums_actions] , identifier[random_state] = keyword[None] ): literal[string] identifier[random_state] = identifier[check_random_state] ( identifier[random_state] ) identifier[action_profile] = identifier[tuple] ( [ identifier[probvec] ( literal[int] , identifier[num_actions] , identifier[random_state] ). identifier[ravel] () keyword[for] identifier[num_actions] keyword[in] identifier[nums_actions] ] ) keyword[return] identifier[action_profile]
def random_mixed_actions(nums_actions, random_state=None): """ Return a tuple of random mixed actions (vectors of floats). Parameters ---------- nums_actions : tuple(int) Tuple of the numbers of actions, one for each player. random_state : int or np.random.RandomState, optional Random seed (integer) or np.random.RandomState instance to set the initial state of the random number generator for reproducibility. If None, a randomly initialized RandomState is used. Returns ------- action_profile : tuple(ndarray(float, ndim=1)) Tuple of mixed_actions, one for each player. """ random_state = check_random_state(random_state) action_profile = tuple([probvec(1, num_actions, random_state).ravel() for num_actions in nums_actions]) return action_profile
def read_into(self, buf: bytearray, partial: bool = False) -> Awaitable[int]: """Asynchronously read a number of bytes. ``buf`` must be a writable buffer into which data will be read. If ``partial`` is true, the callback is run as soon as any bytes have been read. Otherwise, it is run when the ``buf`` has been entirely filled with read data. .. versionadded:: 5.0 .. versionchanged:: 6.0 The ``callback`` argument was removed. Use the returned `.Future` instead. """ future = self._start_read() # First copy data already in read buffer available_bytes = self._read_buffer_size n = len(buf) if available_bytes >= n: end = self._read_buffer_pos + n buf[:] = memoryview(self._read_buffer)[self._read_buffer_pos : end] del self._read_buffer[:end] self._after_user_read_buffer = self._read_buffer elif available_bytes > 0: buf[:available_bytes] = memoryview(self._read_buffer)[ self._read_buffer_pos : ] # Set up the supplied buffer as our temporary read buffer. # The original (if it had any data remaining) has been # saved for later. self._user_read_buffer = True self._read_buffer = buf self._read_buffer_pos = 0 self._read_buffer_size = available_bytes self._read_bytes = n self._read_partial = partial try: self._try_inline_read() except: future.add_done_callback(lambda f: f.exception()) raise return future
def function[read_into, parameter[self, buf, partial]]: constant[Asynchronously read a number of bytes. ``buf`` must be a writable buffer into which data will be read. If ``partial`` is true, the callback is run as soon as any bytes have been read. Otherwise, it is run when the ``buf`` has been entirely filled with read data. .. versionadded:: 5.0 .. versionchanged:: 6.0 The ``callback`` argument was removed. Use the returned `.Future` instead. ] variable[future] assign[=] call[name[self]._start_read, parameter[]] variable[available_bytes] assign[=] name[self]._read_buffer_size variable[n] assign[=] call[name[len], parameter[name[buf]]] if compare[name[available_bytes] greater_or_equal[>=] name[n]] begin[:] variable[end] assign[=] binary_operation[name[self]._read_buffer_pos + name[n]] call[name[buf]][<ast.Slice object at 0x7da1b1f18490>] assign[=] call[call[name[memoryview], parameter[name[self]._read_buffer]]][<ast.Slice object at 0x7da1b1f1aa10>] <ast.Delete object at 0x7da1b1f1a260> name[self]._after_user_read_buffer assign[=] name[self]._read_buffer name[self]._user_read_buffer assign[=] constant[True] name[self]._read_buffer assign[=] name[buf] name[self]._read_buffer_pos assign[=] constant[0] name[self]._read_buffer_size assign[=] name[available_bytes] name[self]._read_bytes assign[=] name[n] name[self]._read_partial assign[=] name[partial] <ast.Try object at 0x7da1b1f19cc0> return[name[future]]
keyword[def] identifier[read_into] ( identifier[self] , identifier[buf] : identifier[bytearray] , identifier[partial] : identifier[bool] = keyword[False] )-> identifier[Awaitable] [ identifier[int] ]: literal[string] identifier[future] = identifier[self] . identifier[_start_read] () identifier[available_bytes] = identifier[self] . identifier[_read_buffer_size] identifier[n] = identifier[len] ( identifier[buf] ) keyword[if] identifier[available_bytes] >= identifier[n] : identifier[end] = identifier[self] . identifier[_read_buffer_pos] + identifier[n] identifier[buf] [:]= identifier[memoryview] ( identifier[self] . identifier[_read_buffer] )[ identifier[self] . identifier[_read_buffer_pos] : identifier[end] ] keyword[del] identifier[self] . identifier[_read_buffer] [: identifier[end] ] identifier[self] . identifier[_after_user_read_buffer] = identifier[self] . identifier[_read_buffer] keyword[elif] identifier[available_bytes] > literal[int] : identifier[buf] [: identifier[available_bytes] ]= identifier[memoryview] ( identifier[self] . identifier[_read_buffer] )[ identifier[self] . identifier[_read_buffer_pos] : ] identifier[self] . identifier[_user_read_buffer] = keyword[True] identifier[self] . identifier[_read_buffer] = identifier[buf] identifier[self] . identifier[_read_buffer_pos] = literal[int] identifier[self] . identifier[_read_buffer_size] = identifier[available_bytes] identifier[self] . identifier[_read_bytes] = identifier[n] identifier[self] . identifier[_read_partial] = identifier[partial] keyword[try] : identifier[self] . identifier[_try_inline_read] () keyword[except] : identifier[future] . identifier[add_done_callback] ( keyword[lambda] identifier[f] : identifier[f] . identifier[exception] ()) keyword[raise] keyword[return] identifier[future]
def read_into(self, buf: bytearray, partial: bool=False) -> Awaitable[int]: """Asynchronously read a number of bytes. ``buf`` must be a writable buffer into which data will be read. If ``partial`` is true, the callback is run as soon as any bytes have been read. Otherwise, it is run when the ``buf`` has been entirely filled with read data. .. versionadded:: 5.0 .. versionchanged:: 6.0 The ``callback`` argument was removed. Use the returned `.Future` instead. """ future = self._start_read() # First copy data already in read buffer available_bytes = self._read_buffer_size n = len(buf) if available_bytes >= n: end = self._read_buffer_pos + n buf[:] = memoryview(self._read_buffer)[self._read_buffer_pos:end] del self._read_buffer[:end] self._after_user_read_buffer = self._read_buffer # depends on [control=['if'], data=['n']] elif available_bytes > 0: buf[:available_bytes] = memoryview(self._read_buffer)[self._read_buffer_pos:] # depends on [control=['if'], data=['available_bytes']] # Set up the supplied buffer as our temporary read buffer. # The original (if it had any data remaining) has been # saved for later. self._user_read_buffer = True self._read_buffer = buf self._read_buffer_pos = 0 self._read_buffer_size = available_bytes self._read_bytes = n self._read_partial = partial try: self._try_inline_read() # depends on [control=['try'], data=[]] except: future.add_done_callback(lambda f: f.exception()) raise # depends on [control=['except'], data=[]] return future
def action_ipset(reader, *args): """Show the set of IPs seen in Flow Log records.""" ip_set = set() for record in reader: if record.log_status in (SKIPDATA, NODATA): continue ip_set.add(record.srcaddr) ip_set.add(record.dstaddr) for ip in ip_set: print(ip)
def function[action_ipset, parameter[reader]]: constant[Show the set of IPs seen in Flow Log records.] variable[ip_set] assign[=] call[name[set], parameter[]] for taget[name[record]] in starred[name[reader]] begin[:] if compare[name[record].log_status in tuple[[<ast.Name object at 0x7da204622050>, <ast.Name object at 0x7da18fe909d0>]]] begin[:] continue call[name[ip_set].add, parameter[name[record].srcaddr]] call[name[ip_set].add, parameter[name[record].dstaddr]] for taget[name[ip]] in starred[name[ip_set]] begin[:] call[name[print], parameter[name[ip]]]
keyword[def] identifier[action_ipset] ( identifier[reader] ,* identifier[args] ): literal[string] identifier[ip_set] = identifier[set] () keyword[for] identifier[record] keyword[in] identifier[reader] : keyword[if] identifier[record] . identifier[log_status] keyword[in] ( identifier[SKIPDATA] , identifier[NODATA] ): keyword[continue] identifier[ip_set] . identifier[add] ( identifier[record] . identifier[srcaddr] ) identifier[ip_set] . identifier[add] ( identifier[record] . identifier[dstaddr] ) keyword[for] identifier[ip] keyword[in] identifier[ip_set] : identifier[print] ( identifier[ip] )
def action_ipset(reader, *args): """Show the set of IPs seen in Flow Log records.""" ip_set = set() for record in reader: if record.log_status in (SKIPDATA, NODATA): continue # depends on [control=['if'], data=[]] ip_set.add(record.srcaddr) ip_set.add(record.dstaddr) # depends on [control=['for'], data=['record']] for ip in ip_set: print(ip) # depends on [control=['for'], data=['ip']]
def rc4_encrypt(key, data): """ Encrypts plaintext using RC4 with a 40-128 bit key :param key: The encryption key - a byte string 5-16 bytes long :param data: The plaintext - a byte string :raises: ValueError - when any of the parameters contain an invalid value TypeError - when any of the parameters are of the wrong type OSError - when an error is returned by OpenSSL :return: A byte string of the ciphertext """ if len(key) < 5 or len(key) > 16: raise ValueError(pretty_message( ''' key must be 5 to 16 bytes (40 to 128 bits) long - is %s ''', len(key) )) return _encrypt('rc4', key, data, None, None)
def function[rc4_encrypt, parameter[key, data]]: constant[ Encrypts plaintext using RC4 with a 40-128 bit key :param key: The encryption key - a byte string 5-16 bytes long :param data: The plaintext - a byte string :raises: ValueError - when any of the parameters contain an invalid value TypeError - when any of the parameters are of the wrong type OSError - when an error is returned by OpenSSL :return: A byte string of the ciphertext ] if <ast.BoolOp object at 0x7da1b00eb6a0> begin[:] <ast.Raise object at 0x7da1b00313f0> return[call[name[_encrypt], parameter[constant[rc4], name[key], name[data], constant[None], constant[None]]]]
keyword[def] identifier[rc4_encrypt] ( identifier[key] , identifier[data] ): literal[string] keyword[if] identifier[len] ( identifier[key] )< literal[int] keyword[or] identifier[len] ( identifier[key] )> literal[int] : keyword[raise] identifier[ValueError] ( identifier[pretty_message] ( literal[string] , identifier[len] ( identifier[key] ) )) keyword[return] identifier[_encrypt] ( literal[string] , identifier[key] , identifier[data] , keyword[None] , keyword[None] )
def rc4_encrypt(key, data): """ Encrypts plaintext using RC4 with a 40-128 bit key :param key: The encryption key - a byte string 5-16 bytes long :param data: The plaintext - a byte string :raises: ValueError - when any of the parameters contain an invalid value TypeError - when any of the parameters are of the wrong type OSError - when an error is returned by OpenSSL :return: A byte string of the ciphertext """ if len(key) < 5 or len(key) > 16: raise ValueError(pretty_message('\n key must be 5 to 16 bytes (40 to 128 bits) long - is %s\n ', len(key))) # depends on [control=['if'], data=[]] return _encrypt('rc4', key, data, None, None)
def _cache_is_expired(): """Indica si la caché está caducada""" now = timezone.now() timediff = TransCache.SINGLETON_CREATION_DATETIME - now return (timediff.total_seconds() > TransCache.SINGLETON_EXPIRATION_MAX_SECONDS)
def function[_cache_is_expired, parameter[]]: constant[Indica si la caché está caducada] variable[now] assign[=] call[name[timezone].now, parameter[]] variable[timediff] assign[=] binary_operation[name[TransCache].SINGLETON_CREATION_DATETIME - name[now]] return[compare[call[name[timediff].total_seconds, parameter[]] greater[>] name[TransCache].SINGLETON_EXPIRATION_MAX_SECONDS]]
keyword[def] identifier[_cache_is_expired] (): literal[string] identifier[now] = identifier[timezone] . identifier[now] () identifier[timediff] = identifier[TransCache] . identifier[SINGLETON_CREATION_DATETIME] - identifier[now] keyword[return] ( identifier[timediff] . identifier[total_seconds] ()> identifier[TransCache] . identifier[SINGLETON_EXPIRATION_MAX_SECONDS] )
def _cache_is_expired(): """Indica si la caché está caducada""" now = timezone.now() timediff = TransCache.SINGLETON_CREATION_DATETIME - now return timediff.total_seconds() > TransCache.SINGLETON_EXPIRATION_MAX_SECONDS
def pretty_size(value): """Convert a number of bytes into a human-readable string. Output is 2...5 characters. Values >= 1000 always produce output in form: x.xxxU, xx.xxU, xxxU, xxxxU. """ exp = int(math.log(value, 1024)) if value > 0 else 0 unit = 'bkMGTPEZY'[exp] if exp == 0: return '%d%s' % (value, unit) # value < 1024, result is always without fractions unit_value = value / (1024.0 ** exp) # value in the relevant units places = int(math.log(unit_value, 10)) # number of digits before decimal point return '%.*f%s' % (2 - places, unit_value, unit)
def function[pretty_size, parameter[value]]: constant[Convert a number of bytes into a human-readable string. Output is 2...5 characters. Values >= 1000 always produce output in form: x.xxxU, xx.xxU, xxxU, xxxxU. ] variable[exp] assign[=] <ast.IfExp object at 0x7da20e955b10> variable[unit] assign[=] call[constant[bkMGTPEZY]][name[exp]] if compare[name[exp] equal[==] constant[0]] begin[:] return[binary_operation[constant[%d%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e956c80>, <ast.Name object at 0x7da20e954be0>]]]] variable[unit_value] assign[=] binary_operation[name[value] / binary_operation[constant[1024.0] ** name[exp]]] variable[places] assign[=] call[name[int], parameter[call[name[math].log, parameter[name[unit_value], constant[10]]]]] return[binary_operation[constant[%.*f%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da18f813910>, <ast.Name object at 0x7da18f810940>, <ast.Name object at 0x7da18f810310>]]]]
keyword[def] identifier[pretty_size] ( identifier[value] ): literal[string] identifier[exp] = identifier[int] ( identifier[math] . identifier[log] ( identifier[value] , literal[int] )) keyword[if] identifier[value] > literal[int] keyword[else] literal[int] identifier[unit] = literal[string] [ identifier[exp] ] keyword[if] identifier[exp] == literal[int] : keyword[return] literal[string] %( identifier[value] , identifier[unit] ) identifier[unit_value] = identifier[value] /( literal[int] ** identifier[exp] ) identifier[places] = identifier[int] ( identifier[math] . identifier[log] ( identifier[unit_value] , literal[int] )) keyword[return] literal[string] %( literal[int] - identifier[places] , identifier[unit_value] , identifier[unit] )
def pretty_size(value): """Convert a number of bytes into a human-readable string. Output is 2...5 characters. Values >= 1000 always produce output in form: x.xxxU, xx.xxU, xxxU, xxxxU. """ exp = int(math.log(value, 1024)) if value > 0 else 0 unit = 'bkMGTPEZY'[exp] if exp == 0: return '%d%s' % (value, unit) # value < 1024, result is always without fractions # depends on [control=['if'], data=[]] unit_value = value / 1024.0 ** exp # value in the relevant units places = int(math.log(unit_value, 10)) # number of digits before decimal point return '%.*f%s' % (2 - places, unit_value, unit)
def add_member_by_id(self, member_id, membership_type='normal'): ''' Add a member to the board using the id. Membership type can be normal or admin. Returns JSON of all members if successful or raises an Unauthorised exception if not. ''' return self.fetch_json( uri_path=self.base_uri + '/members/%s' % member_id, http_method='PUT', query_params={ 'type': membership_type } )
def function[add_member_by_id, parameter[self, member_id, membership_type]]: constant[ Add a member to the board using the id. Membership type can be normal or admin. Returns JSON of all members if successful or raises an Unauthorised exception if not. ] return[call[name[self].fetch_json, parameter[]]]
keyword[def] identifier[add_member_by_id] ( identifier[self] , identifier[member_id] , identifier[membership_type] = literal[string] ): literal[string] keyword[return] identifier[self] . identifier[fetch_json] ( identifier[uri_path] = identifier[self] . identifier[base_uri] + literal[string] % identifier[member_id] , identifier[http_method] = literal[string] , identifier[query_params] ={ literal[string] : identifier[membership_type] } )
def add_member_by_id(self, member_id, membership_type='normal'): """ Add a member to the board using the id. Membership type can be normal or admin. Returns JSON of all members if successful or raises an Unauthorised exception if not. """ return self.fetch_json(uri_path=self.base_uri + '/members/%s' % member_id, http_method='PUT', query_params={'type': membership_type})
def save(self, idempotency_key=None): """Return a deferred.""" updated_params = self.serialize(None) headers = populate_headers(idempotency_key) if not updated_params: util.logger.debug("Trying to save already saved object %r", self) return defer.succeed(self) d = self.request('post', self.instance_url(), updated_params, headers) return d.addCallback(self.refresh_from).addCallback(lambda _: self)
def function[save, parameter[self, idempotency_key]]: constant[Return a deferred.] variable[updated_params] assign[=] call[name[self].serialize, parameter[constant[None]]] variable[headers] assign[=] call[name[populate_headers], parameter[name[idempotency_key]]] if <ast.UnaryOp object at 0x7da1b1623b50> begin[:] call[name[util].logger.debug, parameter[constant[Trying to save already saved object %r], name[self]]] return[call[name[defer].succeed, parameter[name[self]]]] variable[d] assign[=] call[name[self].request, parameter[constant[post], call[name[self].instance_url, parameter[]], name[updated_params], name[headers]]] return[call[call[name[d].addCallback, parameter[name[self].refresh_from]].addCallback, parameter[<ast.Lambda object at 0x7da1b16427d0>]]]
keyword[def] identifier[save] ( identifier[self] , identifier[idempotency_key] = keyword[None] ): literal[string] identifier[updated_params] = identifier[self] . identifier[serialize] ( keyword[None] ) identifier[headers] = identifier[populate_headers] ( identifier[idempotency_key] ) keyword[if] keyword[not] identifier[updated_params] : identifier[util] . identifier[logger] . identifier[debug] ( literal[string] , identifier[self] ) keyword[return] identifier[defer] . identifier[succeed] ( identifier[self] ) identifier[d] = identifier[self] . identifier[request] ( literal[string] , identifier[self] . identifier[instance_url] (), identifier[updated_params] , identifier[headers] ) keyword[return] identifier[d] . identifier[addCallback] ( identifier[self] . identifier[refresh_from] ). identifier[addCallback] ( keyword[lambda] identifier[_] : identifier[self] )
def save(self, idempotency_key=None): """Return a deferred.""" updated_params = self.serialize(None) headers = populate_headers(idempotency_key) if not updated_params: util.logger.debug('Trying to save already saved object %r', self) return defer.succeed(self) # depends on [control=['if'], data=[]] d = self.request('post', self.instance_url(), updated_params, headers) return d.addCallback(self.refresh_from).addCallback(lambda _: self)
def notebooks_in_git_index(fmt): """Return the list of modified and deleted ipynb files in the git index that match the given format""" git_status = system('git', 'status', '--porcelain') re_modified = re.compile(r'^[AM]+\s+(?P<name>.*)', re.MULTILINE) modified_files_in_git_index = re_modified.findall(git_status) files = [] for nb_file in modified_files_in_git_index: if nb_file.startswith('"') and nb_file.endswith('"'): nb_file = nb_file[1:-1] try: base_path(nb_file, fmt) files.append(nb_file) except InconsistentPath: continue return files
def function[notebooks_in_git_index, parameter[fmt]]: constant[Return the list of modified and deleted ipynb files in the git index that match the given format] variable[git_status] assign[=] call[name[system], parameter[constant[git], constant[status], constant[--porcelain]]] variable[re_modified] assign[=] call[name[re].compile, parameter[constant[^[AM]+\s+(?P<name>.*)], name[re].MULTILINE]] variable[modified_files_in_git_index] assign[=] call[name[re_modified].findall, parameter[name[git_status]]] variable[files] assign[=] list[[]] for taget[name[nb_file]] in starred[name[modified_files_in_git_index]] begin[:] if <ast.BoolOp object at 0x7da18fe924d0> begin[:] variable[nb_file] assign[=] call[name[nb_file]][<ast.Slice object at 0x7da18fe92530>] <ast.Try object at 0x7da18fe93fd0> return[name[files]]
keyword[def] identifier[notebooks_in_git_index] ( identifier[fmt] ): literal[string] identifier[git_status] = identifier[system] ( literal[string] , literal[string] , literal[string] ) identifier[re_modified] = identifier[re] . identifier[compile] ( literal[string] , identifier[re] . identifier[MULTILINE] ) identifier[modified_files_in_git_index] = identifier[re_modified] . identifier[findall] ( identifier[git_status] ) identifier[files] =[] keyword[for] identifier[nb_file] keyword[in] identifier[modified_files_in_git_index] : keyword[if] identifier[nb_file] . identifier[startswith] ( literal[string] ) keyword[and] identifier[nb_file] . identifier[endswith] ( literal[string] ): identifier[nb_file] = identifier[nb_file] [ literal[int] :- literal[int] ] keyword[try] : identifier[base_path] ( identifier[nb_file] , identifier[fmt] ) identifier[files] . identifier[append] ( identifier[nb_file] ) keyword[except] identifier[InconsistentPath] : keyword[continue] keyword[return] identifier[files]
def notebooks_in_git_index(fmt): """Return the list of modified and deleted ipynb files in the git index that match the given format""" git_status = system('git', 'status', '--porcelain') re_modified = re.compile('^[AM]+\\s+(?P<name>.*)', re.MULTILINE) modified_files_in_git_index = re_modified.findall(git_status) files = [] for nb_file in modified_files_in_git_index: if nb_file.startswith('"') and nb_file.endswith('"'): nb_file = nb_file[1:-1] # depends on [control=['if'], data=[]] try: base_path(nb_file, fmt) files.append(nb_file) # depends on [control=['try'], data=[]] except InconsistentPath: continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['nb_file']] return files
def process_metadata(meta): """ Merge metadata of run on multiple grid districts Parameters ---------- meta: list of dict Metadata of run of each MV grid district Returns ------- dict Single metadata dict including merge metadata """ mvgds = [] metadata = meta[0] for mvgd in meta: if isinstance(mvgd['mv_grid_districts'], list): mvgds.extend(mvgd['mv_grid_districts']) else: mvgds.append(mvgd['mv_grid_districts']) metadata['mv_grid_districts'] = mvgds return metadata
def function[process_metadata, parameter[meta]]: constant[ Merge metadata of run on multiple grid districts Parameters ---------- meta: list of dict Metadata of run of each MV grid district Returns ------- dict Single metadata dict including merge metadata ] variable[mvgds] assign[=] list[[]] variable[metadata] assign[=] call[name[meta]][constant[0]] for taget[name[mvgd]] in starred[name[meta]] begin[:] if call[name[isinstance], parameter[call[name[mvgd]][constant[mv_grid_districts]], name[list]]] begin[:] call[name[mvgds].extend, parameter[call[name[mvgd]][constant[mv_grid_districts]]]] call[name[metadata]][constant[mv_grid_districts]] assign[=] name[mvgds] return[name[metadata]]
keyword[def] identifier[process_metadata] ( identifier[meta] ): literal[string] identifier[mvgds] =[] identifier[metadata] = identifier[meta] [ literal[int] ] keyword[for] identifier[mvgd] keyword[in] identifier[meta] : keyword[if] identifier[isinstance] ( identifier[mvgd] [ literal[string] ], identifier[list] ): identifier[mvgds] . identifier[extend] ( identifier[mvgd] [ literal[string] ]) keyword[else] : identifier[mvgds] . identifier[append] ( identifier[mvgd] [ literal[string] ]) identifier[metadata] [ literal[string] ]= identifier[mvgds] keyword[return] identifier[metadata]
def process_metadata(meta): """ Merge metadata of run on multiple grid districts Parameters ---------- meta: list of dict Metadata of run of each MV grid district Returns ------- dict Single metadata dict including merge metadata """ mvgds = [] metadata = meta[0] for mvgd in meta: if isinstance(mvgd['mv_grid_districts'], list): mvgds.extend(mvgd['mv_grid_districts']) # depends on [control=['if'], data=[]] else: mvgds.append(mvgd['mv_grid_districts']) # depends on [control=['for'], data=['mvgd']] metadata['mv_grid_districts'] = mvgds return metadata
def send_feedback(cls, type=FeedbackType.IDEA, referrer=None, text=None, api=None): """ Sends feedback to sevenbridges. :param type: FeedbackType wither IDEA, PROBLEM or THOUGHT. :param text: Feedback text. :param referrer: Feedback referrer. :param api: Api instance. """ api = api if api else cls._API data = {'type': type, 'text': text, 'referrer': referrer if referrer else six.text_type( client_info )} extra = { 'resource': cls.__name__, 'query': data } logger.info('Sending feedback', extra=extra) api.post(url=cls._URL['send_feedback'], data=data)
def function[send_feedback, parameter[cls, type, referrer, text, api]]: constant[ Sends feedback to sevenbridges. :param type: FeedbackType wither IDEA, PROBLEM or THOUGHT. :param text: Feedback text. :param referrer: Feedback referrer. :param api: Api instance. ] variable[api] assign[=] <ast.IfExp object at 0x7da18bc70ca0> variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da18bc70d90>, <ast.Constant object at 0x7da18bc72fb0>, <ast.Constant object at 0x7da18bc72110>], [<ast.Name object at 0x7da18bc71ae0>, <ast.Name object at 0x7da18bc732e0>, <ast.IfExp object at 0x7da18bc70220>]] variable[extra] assign[=] dictionary[[<ast.Constant object at 0x7da18bc72d10>, <ast.Constant object at 0x7da18bc71930>], [<ast.Attribute object at 0x7da18bc72650>, <ast.Name object at 0x7da18bc727a0>]] call[name[logger].info, parameter[constant[Sending feedback]]] call[name[api].post, parameter[]]
keyword[def] identifier[send_feedback] ( identifier[cls] , identifier[type] = identifier[FeedbackType] . identifier[IDEA] , identifier[referrer] = keyword[None] , identifier[text] = keyword[None] , identifier[api] = keyword[None] ): literal[string] identifier[api] = identifier[api] keyword[if] identifier[api] keyword[else] identifier[cls] . identifier[_API] identifier[data] ={ literal[string] : identifier[type] , literal[string] : identifier[text] , literal[string] : identifier[referrer] keyword[if] identifier[referrer] keyword[else] identifier[six] . identifier[text_type] ( identifier[client_info] )} identifier[extra] ={ literal[string] : identifier[cls] . identifier[__name__] , literal[string] : identifier[data] } identifier[logger] . identifier[info] ( literal[string] , identifier[extra] = identifier[extra] ) identifier[api] . identifier[post] ( identifier[url] = identifier[cls] . identifier[_URL] [ literal[string] ], identifier[data] = identifier[data] )
def send_feedback(cls, type=FeedbackType.IDEA, referrer=None, text=None, api=None): """ Sends feedback to sevenbridges. :param type: FeedbackType wither IDEA, PROBLEM or THOUGHT. :param text: Feedback text. :param referrer: Feedback referrer. :param api: Api instance. """ api = api if api else cls._API data = {'type': type, 'text': text, 'referrer': referrer if referrer else six.text_type(client_info)} extra = {'resource': cls.__name__, 'query': data} logger.info('Sending feedback', extra=extra) api.post(url=cls._URL['send_feedback'], data=data)
def get_cities(self): """Get a dictionary of cities keyed by geonameid.""" if self.cities is None: self.cities = self._load_data(self.cities, 'cities.json') return self.cities
def function[get_cities, parameter[self]]: constant[Get a dictionary of cities keyed by geonameid.] if compare[name[self].cities is constant[None]] begin[:] name[self].cities assign[=] call[name[self]._load_data, parameter[name[self].cities, constant[cities.json]]] return[name[self].cities]
keyword[def] identifier[get_cities] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[cities] keyword[is] keyword[None] : identifier[self] . identifier[cities] = identifier[self] . identifier[_load_data] ( identifier[self] . identifier[cities] , literal[string] ) keyword[return] identifier[self] . identifier[cities]
def get_cities(self): """Get a dictionary of cities keyed by geonameid.""" if self.cities is None: self.cities = self._load_data(self.cities, 'cities.json') # depends on [control=['if'], data=[]] return self.cities
def table( self, dirPath=None): """*Render the results as an ascii table* **Key Arguments:** - ``dirPath`` -- the path to the directory to save the rendered results to. Default *None* **Return:** - `tableSources` -- the top-level transient data - `tablePhot` -- all photometry associated with the transients - `tableSpec` -- all spectral data associated with the transients - `tableFiles` -- all files associated with the matched transients found on the tns **Usage:** To render the results in ascii table format: .. code-block:: python tableSources, tablePhot, tableSpec, tableFiles = tns.table() print tableSources .. code-block:: text +----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+ | TNSId | TNSName | discoveryName | discSurvey | raSex | decSex | raDeg | decDeg | transRedshift | specType | discMag | discMagFilter | discDate | objectUrl | hostName | hostRedshift | separationArcsec | separationNorthArcsec | separationEastArcsec | +----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+ | 2016asf | SN2016asf | ASASSN-16cs | ASAS-SN | 06:50:36.73 | +31:06:45.36 | 102.6530 | 31.1126 | 0.021 | SN Ia | 17.1 | V-Johnson | 2016-03-06 08:09:36 | http://wis-tns.weizmann.ac.il/object/2016asf | KUG 0647+311 | | 0.66 | 0.65 | -0.13 | +----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+ You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`. .. code-block:: python tns.table("~/tns") .. image:: https://i.imgur.com/m09M0ho.png :width: 800px :alt: ascii files """ if dirPath: p = self._file_prefix() tableSources = self.sourceResults.table( filepath=dirPath + "/" + p + "sources.ascii") tablePhot = self.photResults.table( filepath=dirPath + "/" + p + "phot.ascii") tableSpec = self.specResults.table( filepath=dirPath + "/" + p + "spec.ascii") tableFiles = self.relatedFilesResults.table( filepath=dirPath + "/" + p + "relatedFiles.ascii") else: tableSources = self.sourceResults.table() tablePhot = self.photResults.table() tableSpec = self.specResults.table() tableFiles = self.relatedFilesResults.table() return tableSources, tablePhot, tableSpec, tableFiles
def function[table, parameter[self, dirPath]]: constant[*Render the results as an ascii table* **Key Arguments:** - ``dirPath`` -- the path to the directory to save the rendered results to. Default *None* **Return:** - `tableSources` -- the top-level transient data - `tablePhot` -- all photometry associated with the transients - `tableSpec` -- all spectral data associated with the transients - `tableFiles` -- all files associated with the matched transients found on the tns **Usage:** To render the results in ascii table format: .. code-block:: python tableSources, tablePhot, tableSpec, tableFiles = tns.table() print tableSources .. code-block:: text +----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+ | TNSId | TNSName | discoveryName | discSurvey | raSex | decSex | raDeg | decDeg | transRedshift | specType | discMag | discMagFilter | discDate | objectUrl | hostName | hostRedshift | separationArcsec | separationNorthArcsec | separationEastArcsec | +----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+ | 2016asf | SN2016asf | ASASSN-16cs | ASAS-SN | 06:50:36.73 | +31:06:45.36 | 102.6530 | 31.1126 | 0.021 | SN Ia | 17.1 | V-Johnson | 2016-03-06 08:09:36 | http://wis-tns.weizmann.ac.il/object/2016asf | KUG 0647+311 | | 0.66 | 0.65 | -0.13 | +----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+ You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`. .. code-block:: python tns.table("~/tns") .. image:: https://i.imgur.com/m09M0ho.png :width: 800px :alt: ascii files ] if name[dirPath] begin[:] variable[p] assign[=] call[name[self]._file_prefix, parameter[]] variable[tableSources] assign[=] call[name[self].sourceResults.table, parameter[]] variable[tablePhot] assign[=] call[name[self].photResults.table, parameter[]] variable[tableSpec] assign[=] call[name[self].specResults.table, parameter[]] variable[tableFiles] assign[=] call[name[self].relatedFilesResults.table, parameter[]] return[tuple[[<ast.Name object at 0x7da1b1309a50>, <ast.Name object at 0x7da1b130a8c0>, <ast.Name object at 0x7da1b1308a90>, <ast.Name object at 0x7da1b1308c40>]]]
keyword[def] identifier[table] ( identifier[self] , identifier[dirPath] = keyword[None] ): literal[string] keyword[if] identifier[dirPath] : identifier[p] = identifier[self] . identifier[_file_prefix] () identifier[tableSources] = identifier[self] . identifier[sourceResults] . identifier[table] ( identifier[filepath] = identifier[dirPath] + literal[string] + identifier[p] + literal[string] ) identifier[tablePhot] = identifier[self] . identifier[photResults] . identifier[table] ( identifier[filepath] = identifier[dirPath] + literal[string] + identifier[p] + literal[string] ) identifier[tableSpec] = identifier[self] . identifier[specResults] . identifier[table] ( identifier[filepath] = identifier[dirPath] + literal[string] + identifier[p] + literal[string] ) identifier[tableFiles] = identifier[self] . identifier[relatedFilesResults] . identifier[table] ( identifier[filepath] = identifier[dirPath] + literal[string] + identifier[p] + literal[string] ) keyword[else] : identifier[tableSources] = identifier[self] . identifier[sourceResults] . identifier[table] () identifier[tablePhot] = identifier[self] . identifier[photResults] . identifier[table] () identifier[tableSpec] = identifier[self] . identifier[specResults] . identifier[table] () identifier[tableFiles] = identifier[self] . identifier[relatedFilesResults] . identifier[table] () keyword[return] identifier[tableSources] , identifier[tablePhot] , identifier[tableSpec] , identifier[tableFiles]
def table(self, dirPath=None): """*Render the results as an ascii table* **Key Arguments:** - ``dirPath`` -- the path to the directory to save the rendered results to. Default *None* **Return:** - `tableSources` -- the top-level transient data - `tablePhot` -- all photometry associated with the transients - `tableSpec` -- all spectral data associated with the transients - `tableFiles` -- all files associated with the matched transients found on the tns **Usage:** To render the results in ascii table format: .. code-block:: python tableSources, tablePhot, tableSpec, tableFiles = tns.table() print tableSources .. code-block:: text +----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+ | TNSId | TNSName | discoveryName | discSurvey | raSex | decSex | raDeg | decDeg | transRedshift | specType | discMag | discMagFilter | discDate | objectUrl | hostName | hostRedshift | separationArcsec | separationNorthArcsec | separationEastArcsec | +----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+ | 2016asf | SN2016asf | ASASSN-16cs | ASAS-SN | 06:50:36.73 | +31:06:45.36 | 102.6530 | 31.1126 | 0.021 | SN Ia | 17.1 | V-Johnson | 2016-03-06 08:09:36 | http://wis-tns.weizmann.ac.il/object/2016asf | KUG 0647+311 | | 0.66 | 0.65 | -0.13 | +----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+ You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`. .. code-block:: python tns.table("~/tns") .. image:: https://i.imgur.com/m09M0ho.png :width: 800px :alt: ascii files """ if dirPath: p = self._file_prefix() tableSources = self.sourceResults.table(filepath=dirPath + '/' + p + 'sources.ascii') tablePhot = self.photResults.table(filepath=dirPath + '/' + p + 'phot.ascii') tableSpec = self.specResults.table(filepath=dirPath + '/' + p + 'spec.ascii') tableFiles = self.relatedFilesResults.table(filepath=dirPath + '/' + p + 'relatedFiles.ascii') # depends on [control=['if'], data=[]] else: tableSources = self.sourceResults.table() tablePhot = self.photResults.table() tableSpec = self.specResults.table() tableFiles = self.relatedFilesResults.table() return (tableSources, tablePhot, tableSpec, tableFiles)
def ask(question, default=True, exact=False): """Ask the question in y/n form and return True/False. If you don't want a default 'yes', set default to None (or to False if you want a default 'no'). With exact=True, we want to get a literal 'yes' or 'no', at least when it does not match the default. """ if AUTO_RESPONSE: if default is None: msg = ("The question '%s' requires a manual answer, but " + "we're running in --no-input mode.") msg = msg % question raise RuntimeError(msg) logger.debug("Auto-responding '%s' to the question below." % ( default and "yes" or "no")) logger.debug(question) return default while True: yn = 'y/n' if default is True: yn = 'Y/n' if default is False: yn = 'y/N' q = question + " (%s)? " % yn answer = input(q) if answer: answer = answer else: answer = '' if not answer and default is not None: return default if exact and answer.lower() not in ('yes', 'no'): print("Please explicitly answer yes/no in full " "(or accept the default)") continue if answer: answer = answer[0].lower() if answer == 'y': return True if answer == 'n': return False # We really want an answer. print('Please explicitly answer y/n') continue
def function[ask, parameter[question, default, exact]]: constant[Ask the question in y/n form and return True/False. If you don't want a default 'yes', set default to None (or to False if you want a default 'no'). With exact=True, we want to get a literal 'yes' or 'no', at least when it does not match the default. ] if name[AUTO_RESPONSE] begin[:] if compare[name[default] is constant[None]] begin[:] variable[msg] assign[=] binary_operation[constant[The question '%s' requires a manual answer, but ] + constant[we're running in --no-input mode.]] variable[msg] assign[=] binary_operation[name[msg] <ast.Mod object at 0x7da2590d6920> name[question]] <ast.Raise object at 0x7da1b1601cc0> call[name[logger].debug, parameter[binary_operation[constant[Auto-responding '%s' to the question below.] <ast.Mod object at 0x7da2590d6920> <ast.BoolOp object at 0x7da1b1601b40>]]] call[name[logger].debug, parameter[name[question]]] return[name[default]] while constant[True] begin[:] variable[yn] assign[=] constant[y/n] if compare[name[default] is constant[True]] begin[:] variable[yn] assign[=] constant[Y/n] if compare[name[default] is constant[False]] begin[:] variable[yn] assign[=] constant[y/N] variable[q] assign[=] binary_operation[name[question] + binary_operation[constant[ (%s)? ] <ast.Mod object at 0x7da2590d6920> name[yn]]] variable[answer] assign[=] call[name[input], parameter[name[q]]] if name[answer] begin[:] variable[answer] assign[=] name[answer] if <ast.BoolOp object at 0x7da20c6ab250> begin[:] return[name[default]] if <ast.BoolOp object at 0x7da20c6a8f70> begin[:] call[name[print], parameter[constant[Please explicitly answer yes/no in full (or accept the default)]]] continue if name[answer] begin[:] variable[answer] assign[=] call[call[name[answer]][constant[0]].lower, parameter[]] if compare[name[answer] equal[==] constant[y]] begin[:] return[constant[True]] if compare[name[answer] equal[==] constant[n]] begin[:] return[constant[False]] call[name[print], parameter[constant[Please explicitly answer y/n]]] continue
keyword[def] identifier[ask] ( identifier[question] , identifier[default] = keyword[True] , identifier[exact] = keyword[False] ): literal[string] keyword[if] identifier[AUTO_RESPONSE] : keyword[if] identifier[default] keyword[is] keyword[None] : identifier[msg] =( literal[string] + literal[string] ) identifier[msg] = identifier[msg] % identifier[question] keyword[raise] identifier[RuntimeError] ( identifier[msg] ) identifier[logger] . identifier[debug] ( literal[string] %( identifier[default] keyword[and] literal[string] keyword[or] literal[string] )) identifier[logger] . identifier[debug] ( identifier[question] ) keyword[return] identifier[default] keyword[while] keyword[True] : identifier[yn] = literal[string] keyword[if] identifier[default] keyword[is] keyword[True] : identifier[yn] = literal[string] keyword[if] identifier[default] keyword[is] keyword[False] : identifier[yn] = literal[string] identifier[q] = identifier[question] + literal[string] % identifier[yn] identifier[answer] = identifier[input] ( identifier[q] ) keyword[if] identifier[answer] : identifier[answer] = identifier[answer] keyword[else] : identifier[answer] = literal[string] keyword[if] keyword[not] identifier[answer] keyword[and] identifier[default] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[default] keyword[if] identifier[exact] keyword[and] identifier[answer] . identifier[lower] () keyword[not] keyword[in] ( literal[string] , literal[string] ): identifier[print] ( literal[string] literal[string] ) keyword[continue] keyword[if] identifier[answer] : identifier[answer] = identifier[answer] [ literal[int] ]. identifier[lower] () keyword[if] identifier[answer] == literal[string] : keyword[return] keyword[True] keyword[if] identifier[answer] == literal[string] : keyword[return] keyword[False] identifier[print] ( literal[string] ) keyword[continue]
def ask(question, default=True, exact=False): """Ask the question in y/n form and return True/False. If you don't want a default 'yes', set default to None (or to False if you want a default 'no'). With exact=True, we want to get a literal 'yes' or 'no', at least when it does not match the default. """ if AUTO_RESPONSE: if default is None: msg = "The question '%s' requires a manual answer, but " + "we're running in --no-input mode." msg = msg % question raise RuntimeError(msg) # depends on [control=['if'], data=[]] logger.debug("Auto-responding '%s' to the question below." % (default and 'yes' or 'no')) logger.debug(question) return default # depends on [control=['if'], data=[]] while True: yn = 'y/n' if default is True: yn = 'Y/n' # depends on [control=['if'], data=[]] if default is False: yn = 'y/N' # depends on [control=['if'], data=[]] q = question + ' (%s)? ' % yn answer = input(q) if answer: answer = answer # depends on [control=['if'], data=[]] else: answer = '' if not answer and default is not None: return default # depends on [control=['if'], data=[]] if exact and answer.lower() not in ('yes', 'no'): print('Please explicitly answer yes/no in full (or accept the default)') continue # depends on [control=['if'], data=[]] if answer: answer = answer[0].lower() if answer == 'y': return True # depends on [control=['if'], data=[]] if answer == 'n': return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # We really want an answer. print('Please explicitly answer y/n') continue # depends on [control=['while'], data=[]]
def _read(self): """Get next packet from transport. :return: parsed packet in a tuple with message type and payload :rtype: :py:class:`collections.namedtuple` """ raw_response = self.transport.receive() response = Packet.parse(raw_response) # FIXME if response.response_type == Packet.EVENT and response.event_type == "log": # queue up any debug log messages, and get next self.log_events.append(response) # do something? self._read() else: return response
def function[_read, parameter[self]]: constant[Get next packet from transport. :return: parsed packet in a tuple with message type and payload :rtype: :py:class:`collections.namedtuple` ] variable[raw_response] assign[=] call[name[self].transport.receive, parameter[]] variable[response] assign[=] call[name[Packet].parse, parameter[name[raw_response]]] if <ast.BoolOp object at 0x7da1b2546ec0> begin[:] call[name[self].log_events.append, parameter[name[response]]] call[name[self]._read, parameter[]]
keyword[def] identifier[_read] ( identifier[self] ): literal[string] identifier[raw_response] = identifier[self] . identifier[transport] . identifier[receive] () identifier[response] = identifier[Packet] . identifier[parse] ( identifier[raw_response] ) keyword[if] identifier[response] . identifier[response_type] == identifier[Packet] . identifier[EVENT] keyword[and] identifier[response] . identifier[event_type] == literal[string] : identifier[self] . identifier[log_events] . identifier[append] ( identifier[response] ) identifier[self] . identifier[_read] () keyword[else] : keyword[return] identifier[response]
def _read(self): """Get next packet from transport. :return: parsed packet in a tuple with message type and payload :rtype: :py:class:`collections.namedtuple` """ raw_response = self.transport.receive() response = Packet.parse(raw_response) # FIXME if response.response_type == Packet.EVENT and response.event_type == 'log': # queue up any debug log messages, and get next self.log_events.append(response) # do something? self._read() # depends on [control=['if'], data=[]] else: return response
def __update_keyboard(self, milliseconds): """ Use the keyboard to control selection of the buttons. """ if Ragnarok.get_world().Keyboard.is_clicked(self.move_up_button): self.move_up() elif Ragnarok.get_world().Keyboard.is_clicked(self.move_down_button): self.move_down() elif Ragnarok.get_world().Keyboard.is_clicked(self.select_button): self.gui_buttons[self.current_index].clicked_action() for button in self.gui_buttons: button.update(milliseconds)
def function[__update_keyboard, parameter[self, milliseconds]]: constant[ Use the keyboard to control selection of the buttons. ] if call[call[name[Ragnarok].get_world, parameter[]].Keyboard.is_clicked, parameter[name[self].move_up_button]] begin[:] call[name[self].move_up, parameter[]] for taget[name[button]] in starred[name[self].gui_buttons] begin[:] call[name[button].update, parameter[name[milliseconds]]]
keyword[def] identifier[__update_keyboard] ( identifier[self] , identifier[milliseconds] ): literal[string] keyword[if] identifier[Ragnarok] . identifier[get_world] (). identifier[Keyboard] . identifier[is_clicked] ( identifier[self] . identifier[move_up_button] ): identifier[self] . identifier[move_up] () keyword[elif] identifier[Ragnarok] . identifier[get_world] (). identifier[Keyboard] . identifier[is_clicked] ( identifier[self] . identifier[move_down_button] ): identifier[self] . identifier[move_down] () keyword[elif] identifier[Ragnarok] . identifier[get_world] (). identifier[Keyboard] . identifier[is_clicked] ( identifier[self] . identifier[select_button] ): identifier[self] . identifier[gui_buttons] [ identifier[self] . identifier[current_index] ]. identifier[clicked_action] () keyword[for] identifier[button] keyword[in] identifier[self] . identifier[gui_buttons] : identifier[button] . identifier[update] ( identifier[milliseconds] )
def __update_keyboard(self, milliseconds): """ Use the keyboard to control selection of the buttons. """ if Ragnarok.get_world().Keyboard.is_clicked(self.move_up_button): self.move_up() # depends on [control=['if'], data=[]] elif Ragnarok.get_world().Keyboard.is_clicked(self.move_down_button): self.move_down() # depends on [control=['if'], data=[]] elif Ragnarok.get_world().Keyboard.is_clicked(self.select_button): self.gui_buttons[self.current_index].clicked_action() # depends on [control=['if'], data=[]] for button in self.gui_buttons: button.update(milliseconds) # depends on [control=['for'], data=['button']]
def ngram_diff(args, parser): """Outputs the results of performing a diff query.""" store = utils.get_data_store(args) corpus = utils.get_corpus(args) catalogue = utils.get_catalogue(args) tokenizer = utils.get_tokenizer(args) store.validate(corpus, catalogue) if args.asymmetric: store.diff_asymmetric(catalogue, args.asymmetric, tokenizer, sys.stdout) else: store.diff(catalogue, tokenizer, sys.stdout)
def function[ngram_diff, parameter[args, parser]]: constant[Outputs the results of performing a diff query.] variable[store] assign[=] call[name[utils].get_data_store, parameter[name[args]]] variable[corpus] assign[=] call[name[utils].get_corpus, parameter[name[args]]] variable[catalogue] assign[=] call[name[utils].get_catalogue, parameter[name[args]]] variable[tokenizer] assign[=] call[name[utils].get_tokenizer, parameter[name[args]]] call[name[store].validate, parameter[name[corpus], name[catalogue]]] if name[args].asymmetric begin[:] call[name[store].diff_asymmetric, parameter[name[catalogue], name[args].asymmetric, name[tokenizer], name[sys].stdout]]
keyword[def] identifier[ngram_diff] ( identifier[args] , identifier[parser] ): literal[string] identifier[store] = identifier[utils] . identifier[get_data_store] ( identifier[args] ) identifier[corpus] = identifier[utils] . identifier[get_corpus] ( identifier[args] ) identifier[catalogue] = identifier[utils] . identifier[get_catalogue] ( identifier[args] ) identifier[tokenizer] = identifier[utils] . identifier[get_tokenizer] ( identifier[args] ) identifier[store] . identifier[validate] ( identifier[corpus] , identifier[catalogue] ) keyword[if] identifier[args] . identifier[asymmetric] : identifier[store] . identifier[diff_asymmetric] ( identifier[catalogue] , identifier[args] . identifier[asymmetric] , identifier[tokenizer] , identifier[sys] . identifier[stdout] ) keyword[else] : identifier[store] . identifier[diff] ( identifier[catalogue] , identifier[tokenizer] , identifier[sys] . identifier[stdout] )
def ngram_diff(args, parser): """Outputs the results of performing a diff query.""" store = utils.get_data_store(args) corpus = utils.get_corpus(args) catalogue = utils.get_catalogue(args) tokenizer = utils.get_tokenizer(args) store.validate(corpus, catalogue) if args.asymmetric: store.diff_asymmetric(catalogue, args.asymmetric, tokenizer, sys.stdout) # depends on [control=['if'], data=[]] else: store.diff(catalogue, tokenizer, sys.stdout)
def unflatten(guide, falttened_input): """Unflatten a falttened generator. Args: guide: A guide list to follow the structure falttened_input: A flattened iterator object Usage: guide = [["a"], ["b","c","d"], [["e"]], ["f"]] input_list = [0, 1, 2, 3, 4, 5, 6, 7] unflatten(guide, iter(input_list)) >> [[0], [1, 2, 3], [[4]], [5]] """ return [unflatten(sub_list, falttened_input) if isinstance(sub_list, list) else next(falttened_input) for sub_list in guide]
def function[unflatten, parameter[guide, falttened_input]]: constant[Unflatten a falttened generator. Args: guide: A guide list to follow the structure falttened_input: A flattened iterator object Usage: guide = [["a"], ["b","c","d"], [["e"]], ["f"]] input_list = [0, 1, 2, 3, 4, 5, 6, 7] unflatten(guide, iter(input_list)) >> [[0], [1, 2, 3], [[4]], [5]] ] return[<ast.ListComp object at 0x7da1b12765f0>]
keyword[def] identifier[unflatten] ( identifier[guide] , identifier[falttened_input] ): literal[string] keyword[return] [ identifier[unflatten] ( identifier[sub_list] , identifier[falttened_input] ) keyword[if] identifier[isinstance] ( identifier[sub_list] , identifier[list] ) keyword[else] identifier[next] ( identifier[falttened_input] ) keyword[for] identifier[sub_list] keyword[in] identifier[guide] ]
def unflatten(guide, falttened_input): """Unflatten a falttened generator. Args: guide: A guide list to follow the structure falttened_input: A flattened iterator object Usage: guide = [["a"], ["b","c","d"], [["e"]], ["f"]] input_list = [0, 1, 2, 3, 4, 5, 6, 7] unflatten(guide, iter(input_list)) >> [[0], [1, 2, 3], [[4]], [5]] """ return [unflatten(sub_list, falttened_input) if isinstance(sub_list, list) else next(falttened_input) for sub_list in guide]
def bottleneck_block(cnn, depth, depth_bottleneck, stride, pre_activation): """Bottleneck block with identity short-cut. Args: cnn: the network to append bottleneck blocks. depth: the number of output filters for this bottleneck block. depth_bottleneck: the number of bottleneck filters for this block. stride: Stride used in the first layer of the bottleneck block. pre_activation: use pre_activation structure used in v2 or not. """ if pre_activation: bottleneck_block_v2(cnn, depth, depth_bottleneck, stride) else: bottleneck_block_v1(cnn, depth, depth_bottleneck, stride)
def function[bottleneck_block, parameter[cnn, depth, depth_bottleneck, stride, pre_activation]]: constant[Bottleneck block with identity short-cut. Args: cnn: the network to append bottleneck blocks. depth: the number of output filters for this bottleneck block. depth_bottleneck: the number of bottleneck filters for this block. stride: Stride used in the first layer of the bottleneck block. pre_activation: use pre_activation structure used in v2 or not. ] if name[pre_activation] begin[:] call[name[bottleneck_block_v2], parameter[name[cnn], name[depth], name[depth_bottleneck], name[stride]]]
keyword[def] identifier[bottleneck_block] ( identifier[cnn] , identifier[depth] , identifier[depth_bottleneck] , identifier[stride] , identifier[pre_activation] ): literal[string] keyword[if] identifier[pre_activation] : identifier[bottleneck_block_v2] ( identifier[cnn] , identifier[depth] , identifier[depth_bottleneck] , identifier[stride] ) keyword[else] : identifier[bottleneck_block_v1] ( identifier[cnn] , identifier[depth] , identifier[depth_bottleneck] , identifier[stride] )
def bottleneck_block(cnn, depth, depth_bottleneck, stride, pre_activation): """Bottleneck block with identity short-cut. Args: cnn: the network to append bottleneck blocks. depth: the number of output filters for this bottleneck block. depth_bottleneck: the number of bottleneck filters for this block. stride: Stride used in the first layer of the bottleneck block. pre_activation: use pre_activation structure used in v2 or not. """ if pre_activation: bottleneck_block_v2(cnn, depth, depth_bottleneck, stride) # depends on [control=['if'], data=[]] else: bottleneck_block_v1(cnn, depth, depth_bottleneck, stride)
def set_x10_address(self, x10address): """Set the X10 address for the current group/button.""" set_cmd = self._create_set_property_msg('_x10_house_code', 0x04, x10address) self._send_method(set_cmd, self._property_set)
def function[set_x10_address, parameter[self, x10address]]: constant[Set the X10 address for the current group/button.] variable[set_cmd] assign[=] call[name[self]._create_set_property_msg, parameter[constant[_x10_house_code], constant[4], name[x10address]]] call[name[self]._send_method, parameter[name[set_cmd], name[self]._property_set]]
keyword[def] identifier[set_x10_address] ( identifier[self] , identifier[x10address] ): literal[string] identifier[set_cmd] = identifier[self] . identifier[_create_set_property_msg] ( literal[string] , literal[int] , identifier[x10address] ) identifier[self] . identifier[_send_method] ( identifier[set_cmd] , identifier[self] . identifier[_property_set] )
def set_x10_address(self, x10address): """Set the X10 address for the current group/button.""" set_cmd = self._create_set_property_msg('_x10_house_code', 4, x10address) self._send_method(set_cmd, self._property_set)
def _delete_device(device): ''' Returns a vim.vm.device.VirtualDeviceSpec specifying to remove a virtual machine device device Device data type object ''' log.trace('Deleting device with type %s', type(device)) device_spec = vim.vm.device.VirtualDeviceSpec() device_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.remove device_spec.device = device return device_spec
def function[_delete_device, parameter[device]]: constant[ Returns a vim.vm.device.VirtualDeviceSpec specifying to remove a virtual machine device device Device data type object ] call[name[log].trace, parameter[constant[Deleting device with type %s], call[name[type], parameter[name[device]]]]] variable[device_spec] assign[=] call[name[vim].vm.device.VirtualDeviceSpec, parameter[]] name[device_spec].operation assign[=] name[vim].vm.device.VirtualDeviceSpec.Operation.remove name[device_spec].device assign[=] name[device] return[name[device_spec]]
keyword[def] identifier[_delete_device] ( identifier[device] ): literal[string] identifier[log] . identifier[trace] ( literal[string] , identifier[type] ( identifier[device] )) identifier[device_spec] = identifier[vim] . identifier[vm] . identifier[device] . identifier[VirtualDeviceSpec] () identifier[device_spec] . identifier[operation] = identifier[vim] . identifier[vm] . identifier[device] . identifier[VirtualDeviceSpec] . identifier[Operation] . identifier[remove] identifier[device_spec] . identifier[device] = identifier[device] keyword[return] identifier[device_spec]
def _delete_device(device): """ Returns a vim.vm.device.VirtualDeviceSpec specifying to remove a virtual machine device device Device data type object """ log.trace('Deleting device with type %s', type(device)) device_spec = vim.vm.device.VirtualDeviceSpec() device_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.remove device_spec.device = device return device_spec
def runAddAnnouncement(self, flaskrequest): """ Takes a flask request from the frontend and attempts to parse into an AnnouncePeerRequest. If successful, it will log the announcement to the `announcement` table with some other metadata gathered from the request. """ announcement = {} # We want to parse the request ourselves to collect a little more # data about it. try: requestData = protocol.fromJson( flaskrequest.get_data(), protocol.AnnouncePeerRequest) announcement['hostname'] = flaskrequest.host_url announcement['remote_addr'] = flaskrequest.remote_addr announcement['user_agent'] = flaskrequest.headers.get('User-Agent') except AttributeError: # Sometimes in testing we will send protocol requests instead # of flask requests and so the hostname and user agent won't # be present. try: requestData = protocol.fromJson( flaskrequest, protocol.AnnouncePeerRequest) except Exception as e: raise exceptions.InvalidJsonException(e) except Exception as e: raise exceptions.InvalidJsonException(e) # Validate the url before accepting the announcement peer = datamodel.peers.Peer(requestData.peer.url) peer.setAttributesJson(protocol.toJson( requestData.peer.attributes)) announcement['url'] = peer.getUrl() announcement['attributes'] = peer.getAttributes() try: self.getDataRepository().insertAnnouncement(announcement) except: raise exceptions.BadRequestException(announcement['url']) return protocol.toJson( protocol.AnnouncePeerResponse(success=True))
def function[runAddAnnouncement, parameter[self, flaskrequest]]: constant[ Takes a flask request from the frontend and attempts to parse into an AnnouncePeerRequest. If successful, it will log the announcement to the `announcement` table with some other metadata gathered from the request. ] variable[announcement] assign[=] dictionary[[], []] <ast.Try object at 0x7da207f01990> variable[peer] assign[=] call[name[datamodel].peers.Peer, parameter[name[requestData].peer.url]] call[name[peer].setAttributesJson, parameter[call[name[protocol].toJson, parameter[name[requestData].peer.attributes]]]] call[name[announcement]][constant[url]] assign[=] call[name[peer].getUrl, parameter[]] call[name[announcement]][constant[attributes]] assign[=] call[name[peer].getAttributes, parameter[]] <ast.Try object at 0x7da207f02ef0> return[call[name[protocol].toJson, parameter[call[name[protocol].AnnouncePeerResponse, parameter[]]]]]
keyword[def] identifier[runAddAnnouncement] ( identifier[self] , identifier[flaskrequest] ): literal[string] identifier[announcement] ={} keyword[try] : identifier[requestData] = identifier[protocol] . identifier[fromJson] ( identifier[flaskrequest] . identifier[get_data] (), identifier[protocol] . identifier[AnnouncePeerRequest] ) identifier[announcement] [ literal[string] ]= identifier[flaskrequest] . identifier[host_url] identifier[announcement] [ literal[string] ]= identifier[flaskrequest] . identifier[remote_addr] identifier[announcement] [ literal[string] ]= identifier[flaskrequest] . identifier[headers] . identifier[get] ( literal[string] ) keyword[except] identifier[AttributeError] : keyword[try] : identifier[requestData] = identifier[protocol] . identifier[fromJson] ( identifier[flaskrequest] , identifier[protocol] . identifier[AnnouncePeerRequest] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[raise] identifier[exceptions] . identifier[InvalidJsonException] ( identifier[e] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[raise] identifier[exceptions] . identifier[InvalidJsonException] ( identifier[e] ) identifier[peer] = identifier[datamodel] . identifier[peers] . identifier[Peer] ( identifier[requestData] . identifier[peer] . identifier[url] ) identifier[peer] . identifier[setAttributesJson] ( identifier[protocol] . identifier[toJson] ( identifier[requestData] . identifier[peer] . identifier[attributes] )) identifier[announcement] [ literal[string] ]= identifier[peer] . identifier[getUrl] () identifier[announcement] [ literal[string] ]= identifier[peer] . identifier[getAttributes] () keyword[try] : identifier[self] . identifier[getDataRepository] (). identifier[insertAnnouncement] ( identifier[announcement] ) keyword[except] : keyword[raise] identifier[exceptions] . identifier[BadRequestException] ( identifier[announcement] [ literal[string] ]) keyword[return] identifier[protocol] . identifier[toJson] ( identifier[protocol] . identifier[AnnouncePeerResponse] ( identifier[success] = keyword[True] ))
def runAddAnnouncement(self, flaskrequest): """ Takes a flask request from the frontend and attempts to parse into an AnnouncePeerRequest. If successful, it will log the announcement to the `announcement` table with some other metadata gathered from the request. """ announcement = {} # We want to parse the request ourselves to collect a little more # data about it. try: requestData = protocol.fromJson(flaskrequest.get_data(), protocol.AnnouncePeerRequest) announcement['hostname'] = flaskrequest.host_url announcement['remote_addr'] = flaskrequest.remote_addr announcement['user_agent'] = flaskrequest.headers.get('User-Agent') # depends on [control=['try'], data=[]] except AttributeError: # Sometimes in testing we will send protocol requests instead # of flask requests and so the hostname and user agent won't # be present. try: requestData = protocol.fromJson(flaskrequest, protocol.AnnouncePeerRequest) # depends on [control=['try'], data=[]] except Exception as e: raise exceptions.InvalidJsonException(e) # depends on [control=['except'], data=['e']] # depends on [control=['except'], data=[]] except Exception as e: raise exceptions.InvalidJsonException(e) # depends on [control=['except'], data=['e']] # Validate the url before accepting the announcement peer = datamodel.peers.Peer(requestData.peer.url) peer.setAttributesJson(protocol.toJson(requestData.peer.attributes)) announcement['url'] = peer.getUrl() announcement['attributes'] = peer.getAttributes() try: self.getDataRepository().insertAnnouncement(announcement) # depends on [control=['try'], data=[]] except: raise exceptions.BadRequestException(announcement['url']) # depends on [control=['except'], data=[]] return protocol.toJson(protocol.AnnouncePeerResponse(success=True))
def delete_key_recursive(hive, key, use_32bit_registry=False): r''' .. versionadded:: 2015.5.4 Delete a registry key to include all subkeys and value/data pairs. Args: hive (str): The name of the hive. Can be one of the following - HKEY_LOCAL_MACHINE or HKLM - HKEY_CURRENT_USER or HKCU - HKEY_USER or HKU - HKEY_CLASSES_ROOT or HKCR - HKEY_CURRENT_CONFIG or HKCC key (str): The key to remove (looks like a path) use_32bit_registry (bool): Deletes the 32bit portion of the registry on 64bit installations. On 32bit machines this is ignored. Returns: dict: A dictionary listing the keys that deleted successfully as well as those that failed to delete. CLI Example: The following example will remove ``delete_me`` and all its subkeys from the ``SOFTWARE`` key in ``HKEY_LOCAL_MACHINE``: .. code-block:: bash salt '*' reg.delete_key_recursive HKLM SOFTWARE\\delete_me ''' return __utils__['reg.delete_key_recursive'](hive=hive, key=key, use_32bit_registry=use_32bit_registry)
def function[delete_key_recursive, parameter[hive, key, use_32bit_registry]]: constant[ .. versionadded:: 2015.5.4 Delete a registry key to include all subkeys and value/data pairs. Args: hive (str): The name of the hive. Can be one of the following - HKEY_LOCAL_MACHINE or HKLM - HKEY_CURRENT_USER or HKCU - HKEY_USER or HKU - HKEY_CLASSES_ROOT or HKCR - HKEY_CURRENT_CONFIG or HKCC key (str): The key to remove (looks like a path) use_32bit_registry (bool): Deletes the 32bit portion of the registry on 64bit installations. On 32bit machines this is ignored. Returns: dict: A dictionary listing the keys that deleted successfully as well as those that failed to delete. CLI Example: The following example will remove ``delete_me`` and all its subkeys from the ``SOFTWARE`` key in ``HKEY_LOCAL_MACHINE``: .. code-block:: bash salt '*' reg.delete_key_recursive HKLM SOFTWARE\\delete_me ] return[call[call[name[__utils__]][constant[reg.delete_key_recursive]], parameter[]]]
keyword[def] identifier[delete_key_recursive] ( identifier[hive] , identifier[key] , identifier[use_32bit_registry] = keyword[False] ): literal[string] keyword[return] identifier[__utils__] [ literal[string] ]( identifier[hive] = identifier[hive] , identifier[key] = identifier[key] , identifier[use_32bit_registry] = identifier[use_32bit_registry] )
def delete_key_recursive(hive, key, use_32bit_registry=False): """ .. versionadded:: 2015.5.4 Delete a registry key to include all subkeys and value/data pairs. Args: hive (str): The name of the hive. Can be one of the following - HKEY_LOCAL_MACHINE or HKLM - HKEY_CURRENT_USER or HKCU - HKEY_USER or HKU - HKEY_CLASSES_ROOT or HKCR - HKEY_CURRENT_CONFIG or HKCC key (str): The key to remove (looks like a path) use_32bit_registry (bool): Deletes the 32bit portion of the registry on 64bit installations. On 32bit machines this is ignored. Returns: dict: A dictionary listing the keys that deleted successfully as well as those that failed to delete. CLI Example: The following example will remove ``delete_me`` and all its subkeys from the ``SOFTWARE`` key in ``HKEY_LOCAL_MACHINE``: .. code-block:: bash salt '*' reg.delete_key_recursive HKLM SOFTWARE\\\\delete_me """ return __utils__['reg.delete_key_recursive'](hive=hive, key=key, use_32bit_registry=use_32bit_registry)
def shell_exec(command, **kwargs): # from gitapi.py """Excecutes the given command silently. """ proc = Popen(shlex.split(command), stdout=PIPE, stderr=PIPE, **kwargs) out, err = [x.decode("utf-8") for x in proc.communicate()] return {'out': out, 'err': err, 'code': proc.returncode}
def function[shell_exec, parameter[command]]: constant[Excecutes the given command silently. ] variable[proc] assign[=] call[name[Popen], parameter[call[name[shlex].split, parameter[name[command]]]]] <ast.Tuple object at 0x7da204565f60> assign[=] <ast.ListComp object at 0x7da2045663e0> return[dictionary[[<ast.Constant object at 0x7da204347700>, <ast.Constant object at 0x7da18ede66e0>, <ast.Constant object at 0x7da18ede62f0>], [<ast.Name object at 0x7da18ede7640>, <ast.Name object at 0x7da18ede4dc0>, <ast.Attribute object at 0x7da18ede7400>]]]
keyword[def] identifier[shell_exec] ( identifier[command] ,** identifier[kwargs] ): literal[string] identifier[proc] = identifier[Popen] ( identifier[shlex] . identifier[split] ( identifier[command] ), identifier[stdout] = identifier[PIPE] , identifier[stderr] = identifier[PIPE] ,** identifier[kwargs] ) identifier[out] , identifier[err] =[ identifier[x] . identifier[decode] ( literal[string] ) keyword[for] identifier[x] keyword[in] identifier[proc] . identifier[communicate] ()] keyword[return] { literal[string] : identifier[out] , literal[string] : identifier[err] , literal[string] : identifier[proc] . identifier[returncode] }
def shell_exec(command, **kwargs): # from gitapi.py 'Excecutes the given command silently.\n ' proc = Popen(shlex.split(command), stdout=PIPE, stderr=PIPE, **kwargs) (out, err) = [x.decode('utf-8') for x in proc.communicate()] return {'out': out, 'err': err, 'code': proc.returncode}
def change_status(request, page_id): """ Switch the status of a page. """ perm = request.user.has_perm('pages.change_page') if perm and request.method == 'POST': page = Page.objects.get(pk=page_id) page.status = int(request.POST['status']) page.invalidate() page.save() return HttpResponse(str(page.status)) raise Http404
def function[change_status, parameter[request, page_id]]: constant[ Switch the status of a page. ] variable[perm] assign[=] call[name[request].user.has_perm, parameter[constant[pages.change_page]]] if <ast.BoolOp object at 0x7da2041d9180> begin[:] variable[page] assign[=] call[name[Page].objects.get, parameter[]] name[page].status assign[=] call[name[int], parameter[call[name[request].POST][constant[status]]]] call[name[page].invalidate, parameter[]] call[name[page].save, parameter[]] return[call[name[HttpResponse], parameter[call[name[str], parameter[name[page].status]]]]] <ast.Raise object at 0x7da2041db880>
keyword[def] identifier[change_status] ( identifier[request] , identifier[page_id] ): literal[string] identifier[perm] = identifier[request] . identifier[user] . identifier[has_perm] ( literal[string] ) keyword[if] identifier[perm] keyword[and] identifier[request] . identifier[method] == literal[string] : identifier[page] = identifier[Page] . identifier[objects] . identifier[get] ( identifier[pk] = identifier[page_id] ) identifier[page] . identifier[status] = identifier[int] ( identifier[request] . identifier[POST] [ literal[string] ]) identifier[page] . identifier[invalidate] () identifier[page] . identifier[save] () keyword[return] identifier[HttpResponse] ( identifier[str] ( identifier[page] . identifier[status] )) keyword[raise] identifier[Http404]
def change_status(request, page_id): """ Switch the status of a page. """ perm = request.user.has_perm('pages.change_page') if perm and request.method == 'POST': page = Page.objects.get(pk=page_id) page.status = int(request.POST['status']) page.invalidate() page.save() return HttpResponse(str(page.status)) # depends on [control=['if'], data=[]] raise Http404
def parse_course_key(course_identifier): """ Return the serialized course key given either a course run ID or course key. """ try: course_run_key = CourseKey.from_string(course_identifier) except InvalidKeyError: # Assume we already have a course key. return course_identifier return quote_plus(' '.join([course_run_key.org, course_run_key.course]))
def function[parse_course_key, parameter[course_identifier]]: constant[ Return the serialized course key given either a course run ID or course key. ] <ast.Try object at 0x7da18f09e6b0> return[call[name[quote_plus], parameter[call[constant[ ].join, parameter[list[[<ast.Attribute object at 0x7da18f09f550>, <ast.Attribute object at 0x7da18f09c190>]]]]]]]
keyword[def] identifier[parse_course_key] ( identifier[course_identifier] ): literal[string] keyword[try] : identifier[course_run_key] = identifier[CourseKey] . identifier[from_string] ( identifier[course_identifier] ) keyword[except] identifier[InvalidKeyError] : keyword[return] identifier[course_identifier] keyword[return] identifier[quote_plus] ( literal[string] . identifier[join] ([ identifier[course_run_key] . identifier[org] , identifier[course_run_key] . identifier[course] ]))
def parse_course_key(course_identifier): """ Return the serialized course key given either a course run ID or course key. """ try: course_run_key = CourseKey.from_string(course_identifier) # depends on [control=['try'], data=[]] except InvalidKeyError: # Assume we already have a course key. return course_identifier # depends on [control=['except'], data=[]] return quote_plus(' '.join([course_run_key.org, course_run_key.course]))
def rerun(client, run, job): """Re-run existing workflow or tool using CWL runner.""" from renku.models.provenance import ProcessRun activity = client.process_commmit() if not isinstance(activity, ProcessRun): click.secho('No tool was found.', fg='red', file=sys.stderr) return try: args = ['cwl-runner', activity.path] if job: job_file = tempfile.NamedTemporaryFile( suffix='.yml', dir=os.getcwd(), delete=False ) args.append(job_file.name) with job_file as fp: yaml.dump(yaml.safe_load(job), stream=fp, encoding='utf-8') if run: return call(args, cwd=os.getcwd()) finally: if job: os.unlink(job_file.name)
def function[rerun, parameter[client, run, job]]: constant[Re-run existing workflow or tool using CWL runner.] from relative_module[renku.models.provenance] import module[ProcessRun] variable[activity] assign[=] call[name[client].process_commmit, parameter[]] if <ast.UnaryOp object at 0x7da18f7202b0> begin[:] call[name[click].secho, parameter[constant[No tool was found.]]] return[None] <ast.Try object at 0x7da18f720310>
keyword[def] identifier[rerun] ( identifier[client] , identifier[run] , identifier[job] ): literal[string] keyword[from] identifier[renku] . identifier[models] . identifier[provenance] keyword[import] identifier[ProcessRun] identifier[activity] = identifier[client] . identifier[process_commmit] () keyword[if] keyword[not] identifier[isinstance] ( identifier[activity] , identifier[ProcessRun] ): identifier[click] . identifier[secho] ( literal[string] , identifier[fg] = literal[string] , identifier[file] = identifier[sys] . identifier[stderr] ) keyword[return] keyword[try] : identifier[args] =[ literal[string] , identifier[activity] . identifier[path] ] keyword[if] identifier[job] : identifier[job_file] = identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[suffix] = literal[string] , identifier[dir] = identifier[os] . identifier[getcwd] (), identifier[delete] = keyword[False] ) identifier[args] . identifier[append] ( identifier[job_file] . identifier[name] ) keyword[with] identifier[job_file] keyword[as] identifier[fp] : identifier[yaml] . identifier[dump] ( identifier[yaml] . identifier[safe_load] ( identifier[job] ), identifier[stream] = identifier[fp] , identifier[encoding] = literal[string] ) keyword[if] identifier[run] : keyword[return] identifier[call] ( identifier[args] , identifier[cwd] = identifier[os] . identifier[getcwd] ()) keyword[finally] : keyword[if] identifier[job] : identifier[os] . identifier[unlink] ( identifier[job_file] . identifier[name] )
def rerun(client, run, job): """Re-run existing workflow or tool using CWL runner.""" from renku.models.provenance import ProcessRun activity = client.process_commmit() if not isinstance(activity, ProcessRun): click.secho('No tool was found.', fg='red', file=sys.stderr) return # depends on [control=['if'], data=[]] try: args = ['cwl-runner', activity.path] if job: job_file = tempfile.NamedTemporaryFile(suffix='.yml', dir=os.getcwd(), delete=False) args.append(job_file.name) with job_file as fp: yaml.dump(yaml.safe_load(job), stream=fp, encoding='utf-8') # depends on [control=['with'], data=['fp']] # depends on [control=['if'], data=[]] if run: return call(args, cwd=os.getcwd()) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] finally: if job: os.unlink(job_file.name) # depends on [control=['if'], data=[]]
def rename_all_checkpoints(self, old_path, new_path): """Rename all checkpoints for old_path to new_path.""" with self.engine.begin() as db: return move_remote_checkpoints( db, self.user_id, old_path, new_path, )
def function[rename_all_checkpoints, parameter[self, old_path, new_path]]: constant[Rename all checkpoints for old_path to new_path.] with call[name[self].engine.begin, parameter[]] begin[:] return[call[name[move_remote_checkpoints], parameter[name[db], name[self].user_id, name[old_path], name[new_path]]]]
keyword[def] identifier[rename_all_checkpoints] ( identifier[self] , identifier[old_path] , identifier[new_path] ): literal[string] keyword[with] identifier[self] . identifier[engine] . identifier[begin] () keyword[as] identifier[db] : keyword[return] identifier[move_remote_checkpoints] ( identifier[db] , identifier[self] . identifier[user_id] , identifier[old_path] , identifier[new_path] , )
def rename_all_checkpoints(self, old_path, new_path): """Rename all checkpoints for old_path to new_path.""" with self.engine.begin() as db: return move_remote_checkpoints(db, self.user_id, old_path, new_path) # depends on [control=['with'], data=['db']]
def _expectation(p, kern, feat, none2, none3, nghp=None): r""" Compute the expectation: <\Sum_i Ki_{X, Z}>_p(X) - \Sum_i Ki_{.,.} :: Sum kernel :return: NxM """ return functools.reduce(tf.add, [ expectation(p, (k, feat), nghp=nghp) for k in kern.kernels])
def function[_expectation, parameter[p, kern, feat, none2, none3, nghp]]: constant[ Compute the expectation: <\Sum_i Ki_{X, Z}>_p(X) - \Sum_i Ki_{.,.} :: Sum kernel :return: NxM ] return[call[name[functools].reduce, parameter[name[tf].add, <ast.ListComp object at 0x7da1b1c3c4f0>]]]
keyword[def] identifier[_expectation] ( identifier[p] , identifier[kern] , identifier[feat] , identifier[none2] , identifier[none3] , identifier[nghp] = keyword[None] ): literal[string] keyword[return] identifier[functools] . identifier[reduce] ( identifier[tf] . identifier[add] ,[ identifier[expectation] ( identifier[p] ,( identifier[k] , identifier[feat] ), identifier[nghp] = identifier[nghp] ) keyword[for] identifier[k] keyword[in] identifier[kern] . identifier[kernels] ])
def _expectation(p, kern, feat, none2, none3, nghp=None): """ Compute the expectation: <\\Sum_i Ki_{X, Z}>_p(X) - \\Sum_i Ki_{.,.} :: Sum kernel :return: NxM """ return functools.reduce(tf.add, [expectation(p, (k, feat), nghp=nghp) for k in kern.kernels])
def reverseCommit(self): """ Re-insert the previously deleted line. """ if self.markerPos is None: return # Remove the specified string from the same position in every line # in between the mark and the cursor (inclusive). col = min((self.markerPos[1], self.cursorPos[1])) rng = range(self.markerPos[0], self.cursorPos[0] + 1) for idx, line in enumerate(rng): text = self.removedText[idx] if text != '': self.baseClass.insertAt(text, line, col) self.qteWidget.setCursorPosition(*self.cursorPos)
def function[reverseCommit, parameter[self]]: constant[ Re-insert the previously deleted line. ] if compare[name[self].markerPos is constant[None]] begin[:] return[None] variable[col] assign[=] call[name[min], parameter[tuple[[<ast.Subscript object at 0x7da2054a4700>, <ast.Subscript object at 0x7da2054a4a60>]]]] variable[rng] assign[=] call[name[range], parameter[call[name[self].markerPos][constant[0]], binary_operation[call[name[self].cursorPos][constant[0]] + constant[1]]]] for taget[tuple[[<ast.Name object at 0x7da2054a53f0>, <ast.Name object at 0x7da2054a5870>]]] in starred[call[name[enumerate], parameter[name[rng]]]] begin[:] variable[text] assign[=] call[name[self].removedText][name[idx]] if compare[name[text] not_equal[!=] constant[]] begin[:] call[name[self].baseClass.insertAt, parameter[name[text], name[line], name[col]]] call[name[self].qteWidget.setCursorPosition, parameter[<ast.Starred object at 0x7da204962e00>]]
keyword[def] identifier[reverseCommit] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[markerPos] keyword[is] keyword[None] : keyword[return] identifier[col] = identifier[min] (( identifier[self] . identifier[markerPos] [ literal[int] ], identifier[self] . identifier[cursorPos] [ literal[int] ])) identifier[rng] = identifier[range] ( identifier[self] . identifier[markerPos] [ literal[int] ], identifier[self] . identifier[cursorPos] [ literal[int] ]+ literal[int] ) keyword[for] identifier[idx] , identifier[line] keyword[in] identifier[enumerate] ( identifier[rng] ): identifier[text] = identifier[self] . identifier[removedText] [ identifier[idx] ] keyword[if] identifier[text] != literal[string] : identifier[self] . identifier[baseClass] . identifier[insertAt] ( identifier[text] , identifier[line] , identifier[col] ) identifier[self] . identifier[qteWidget] . identifier[setCursorPosition] (* identifier[self] . identifier[cursorPos] )
def reverseCommit(self): """ Re-insert the previously deleted line. """ if self.markerPos is None: return # depends on [control=['if'], data=[]] # Remove the specified string from the same position in every line # in between the mark and the cursor (inclusive). col = min((self.markerPos[1], self.cursorPos[1])) rng = range(self.markerPos[0], self.cursorPos[0] + 1) for (idx, line) in enumerate(rng): text = self.removedText[idx] if text != '': self.baseClass.insertAt(text, line, col) # depends on [control=['if'], data=['text']] # depends on [control=['for'], data=[]] self.qteWidget.setCursorPosition(*self.cursorPos)
def unlock(self, source_node, check_status=True): """ Unlock the task, set its status to `S_READY` so that the scheduler can submit it. source_node is the :class:`Node` that removed the lock Call task.check_status if check_status is True. """ if self.status != self.S_LOCKED: raise RuntimeError("Trying to unlock a task with status %s" % self.status) self._status = self.S_READY if check_status: self.check_status() self.history.info("Unlocked by %s", source_node)
def function[unlock, parameter[self, source_node, check_status]]: constant[ Unlock the task, set its status to `S_READY` so that the scheduler can submit it. source_node is the :class:`Node` that removed the lock Call task.check_status if check_status is True. ] if compare[name[self].status not_equal[!=] name[self].S_LOCKED] begin[:] <ast.Raise object at 0x7da18c4cd3c0> name[self]._status assign[=] name[self].S_READY if name[check_status] begin[:] call[name[self].check_status, parameter[]] call[name[self].history.info, parameter[constant[Unlocked by %s], name[source_node]]]
keyword[def] identifier[unlock] ( identifier[self] , identifier[source_node] , identifier[check_status] = keyword[True] ): literal[string] keyword[if] identifier[self] . identifier[status] != identifier[self] . identifier[S_LOCKED] : keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[self] . identifier[status] ) identifier[self] . identifier[_status] = identifier[self] . identifier[S_READY] keyword[if] identifier[check_status] : identifier[self] . identifier[check_status] () identifier[self] . identifier[history] . identifier[info] ( literal[string] , identifier[source_node] )
def unlock(self, source_node, check_status=True): """ Unlock the task, set its status to `S_READY` so that the scheduler can submit it. source_node is the :class:`Node` that removed the lock Call task.check_status if check_status is True. """ if self.status != self.S_LOCKED: raise RuntimeError('Trying to unlock a task with status %s' % self.status) # depends on [control=['if'], data=[]] self._status = self.S_READY if check_status: self.check_status() # depends on [control=['if'], data=[]] self.history.info('Unlocked by %s', source_node)
def body(self): """The body of the packet.""" view = ffi.buffer(self.packet.m_body, self.packet.m_nBodySize) return view[:]
def function[body, parameter[self]]: constant[The body of the packet.] variable[view] assign[=] call[name[ffi].buffer, parameter[name[self].packet.m_body, name[self].packet.m_nBodySize]] return[call[name[view]][<ast.Slice object at 0x7da18f09df60>]]
keyword[def] identifier[body] ( identifier[self] ): literal[string] identifier[view] = identifier[ffi] . identifier[buffer] ( identifier[self] . identifier[packet] . identifier[m_body] , identifier[self] . identifier[packet] . identifier[m_nBodySize] ) keyword[return] identifier[view] [:]
def body(self): """The body of the packet.""" view = ffi.buffer(self.packet.m_body, self.packet.m_nBodySize) return view[:]
def edit_line(self, line): """Edit a single line using the code expression.""" for code, code_obj in self.code_objs.items(): line = self.__edit_line(line, code, code_obj) return line
def function[edit_line, parameter[self, line]]: constant[Edit a single line using the code expression.] for taget[tuple[[<ast.Name object at 0x7da1b0d759f0>, <ast.Name object at 0x7da1b0d76560>]]] in starred[call[name[self].code_objs.items, parameter[]]] begin[:] variable[line] assign[=] call[name[self].__edit_line, parameter[name[line], name[code], name[code_obj]]] return[name[line]]
keyword[def] identifier[edit_line] ( identifier[self] , identifier[line] ): literal[string] keyword[for] identifier[code] , identifier[code_obj] keyword[in] identifier[self] . identifier[code_objs] . identifier[items] (): identifier[line] = identifier[self] . identifier[__edit_line] ( identifier[line] , identifier[code] , identifier[code_obj] ) keyword[return] identifier[line]
def edit_line(self, line): """Edit a single line using the code expression.""" for (code, code_obj) in self.code_objs.items(): line = self.__edit_line(line, code, code_obj) # depends on [control=['for'], data=[]] return line
def generateKey(self, template, mecha=MechanismAESGENERATEKEY): """ generate a secret key :param template: template for the secret key :param mecha: mechanism to use :return: handle of the generated key :rtype: PyKCS11.LowLevel.CK_OBJECT_HANDLE """ t = self._template2ckattrlist(template) ck_handle = PyKCS11.LowLevel.CK_OBJECT_HANDLE() m = mecha.to_native() rv = self.lib.C_GenerateKey(self.session, m, t, ck_handle) if rv != CKR_OK: raise PyKCS11Error(rv) return ck_handle
def function[generateKey, parameter[self, template, mecha]]: constant[ generate a secret key :param template: template for the secret key :param mecha: mechanism to use :return: handle of the generated key :rtype: PyKCS11.LowLevel.CK_OBJECT_HANDLE ] variable[t] assign[=] call[name[self]._template2ckattrlist, parameter[name[template]]] variable[ck_handle] assign[=] call[name[PyKCS11].LowLevel.CK_OBJECT_HANDLE, parameter[]] variable[m] assign[=] call[name[mecha].to_native, parameter[]] variable[rv] assign[=] call[name[self].lib.C_GenerateKey, parameter[name[self].session, name[m], name[t], name[ck_handle]]] if compare[name[rv] not_equal[!=] name[CKR_OK]] begin[:] <ast.Raise object at 0x7da20e956710> return[name[ck_handle]]
keyword[def] identifier[generateKey] ( identifier[self] , identifier[template] , identifier[mecha] = identifier[MechanismAESGENERATEKEY] ): literal[string] identifier[t] = identifier[self] . identifier[_template2ckattrlist] ( identifier[template] ) identifier[ck_handle] = identifier[PyKCS11] . identifier[LowLevel] . identifier[CK_OBJECT_HANDLE] () identifier[m] = identifier[mecha] . identifier[to_native] () identifier[rv] = identifier[self] . identifier[lib] . identifier[C_GenerateKey] ( identifier[self] . identifier[session] , identifier[m] , identifier[t] , identifier[ck_handle] ) keyword[if] identifier[rv] != identifier[CKR_OK] : keyword[raise] identifier[PyKCS11Error] ( identifier[rv] ) keyword[return] identifier[ck_handle]
def generateKey(self, template, mecha=MechanismAESGENERATEKEY): """ generate a secret key :param template: template for the secret key :param mecha: mechanism to use :return: handle of the generated key :rtype: PyKCS11.LowLevel.CK_OBJECT_HANDLE """ t = self._template2ckattrlist(template) ck_handle = PyKCS11.LowLevel.CK_OBJECT_HANDLE() m = mecha.to_native() rv = self.lib.C_GenerateKey(self.session, m, t, ck_handle) if rv != CKR_OK: raise PyKCS11Error(rv) # depends on [control=['if'], data=['rv']] return ck_handle
def parse_workflow_declaration(self, wf_declaration_subAST): ''' Parses a WDL declaration AST subtree into a string and a python dictionary containing its 'type' and 'value'. For example: var_name = refIndex var_map = {'type': File, 'value': bamIndex} :param wf_declaration_subAST: An AST subtree of a workflow declaration. :return: var_name, which is the name of the declared variable :return: var_map, a dictionary with keys for type and value. e.g. {'type': File, 'value': bamIndex} ''' var_map = OrderedDict() var_name = self.parse_declaration_name(wf_declaration_subAST.attr("name")) var_type = self.parse_declaration_type(wf_declaration_subAST.attr("type")) var_expressn = self.parse_declaration_expressn(wf_declaration_subAST.attr("expression"), es='') var_map['name'] = var_name var_map['type'] = var_type var_map['value'] = var_expressn return var_name, var_map
def function[parse_workflow_declaration, parameter[self, wf_declaration_subAST]]: constant[ Parses a WDL declaration AST subtree into a string and a python dictionary containing its 'type' and 'value'. For example: var_name = refIndex var_map = {'type': File, 'value': bamIndex} :param wf_declaration_subAST: An AST subtree of a workflow declaration. :return: var_name, which is the name of the declared variable :return: var_map, a dictionary with keys for type and value. e.g. {'type': File, 'value': bamIndex} ] variable[var_map] assign[=] call[name[OrderedDict], parameter[]] variable[var_name] assign[=] call[name[self].parse_declaration_name, parameter[call[name[wf_declaration_subAST].attr, parameter[constant[name]]]]] variable[var_type] assign[=] call[name[self].parse_declaration_type, parameter[call[name[wf_declaration_subAST].attr, parameter[constant[type]]]]] variable[var_expressn] assign[=] call[name[self].parse_declaration_expressn, parameter[call[name[wf_declaration_subAST].attr, parameter[constant[expression]]]]] call[name[var_map]][constant[name]] assign[=] name[var_name] call[name[var_map]][constant[type]] assign[=] name[var_type] call[name[var_map]][constant[value]] assign[=] name[var_expressn] return[tuple[[<ast.Name object at 0x7da18dc051b0>, <ast.Name object at 0x7da18dc069e0>]]]
keyword[def] identifier[parse_workflow_declaration] ( identifier[self] , identifier[wf_declaration_subAST] ): literal[string] identifier[var_map] = identifier[OrderedDict] () identifier[var_name] = identifier[self] . identifier[parse_declaration_name] ( identifier[wf_declaration_subAST] . identifier[attr] ( literal[string] )) identifier[var_type] = identifier[self] . identifier[parse_declaration_type] ( identifier[wf_declaration_subAST] . identifier[attr] ( literal[string] )) identifier[var_expressn] = identifier[self] . identifier[parse_declaration_expressn] ( identifier[wf_declaration_subAST] . identifier[attr] ( literal[string] ), identifier[es] = literal[string] ) identifier[var_map] [ literal[string] ]= identifier[var_name] identifier[var_map] [ literal[string] ]= identifier[var_type] identifier[var_map] [ literal[string] ]= identifier[var_expressn] keyword[return] identifier[var_name] , identifier[var_map]
def parse_workflow_declaration(self, wf_declaration_subAST): """ Parses a WDL declaration AST subtree into a string and a python dictionary containing its 'type' and 'value'. For example: var_name = refIndex var_map = {'type': File, 'value': bamIndex} :param wf_declaration_subAST: An AST subtree of a workflow declaration. :return: var_name, which is the name of the declared variable :return: var_map, a dictionary with keys for type and value. e.g. {'type': File, 'value': bamIndex} """ var_map = OrderedDict() var_name = self.parse_declaration_name(wf_declaration_subAST.attr('name')) var_type = self.parse_declaration_type(wf_declaration_subAST.attr('type')) var_expressn = self.parse_declaration_expressn(wf_declaration_subAST.attr('expression'), es='') var_map['name'] = var_name var_map['type'] = var_type var_map['value'] = var_expressn return (var_name, var_map)
def _update_page(self, uri, path): """Update page content.""" if uri in self._pages: self._pages[uri].update() else: self._pages[uri] = Page(uri=uri, path=path)
def function[_update_page, parameter[self, uri, path]]: constant[Update page content.] if compare[name[uri] in name[self]._pages] begin[:] call[call[name[self]._pages][name[uri]].update, parameter[]]
keyword[def] identifier[_update_page] ( identifier[self] , identifier[uri] , identifier[path] ): literal[string] keyword[if] identifier[uri] keyword[in] identifier[self] . identifier[_pages] : identifier[self] . identifier[_pages] [ identifier[uri] ]. identifier[update] () keyword[else] : identifier[self] . identifier[_pages] [ identifier[uri] ]= identifier[Page] ( identifier[uri] = identifier[uri] , identifier[path] = identifier[path] )
def _update_page(self, uri, path): """Update page content.""" if uri in self._pages: self._pages[uri].update() # depends on [control=['if'], data=['uri']] else: self._pages[uri] = Page(uri=uri, path=path)