code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def reportProgress(self, state, action, text=None, tick=None): """ If we want to keep other code updated about our progress. state: 'prep' reading sources 'generate' making instances 'done' wrapping up 'error' reporting a problem action: 'start' begin generating 'stop' end generating 'source' which ufo we're reading text: <file.ufo> ufoname (for instance) tick: a float between 0 and 1 indicating progress. """ if self.progressFunc is not None: self.progressFunc(state=state, action=action, text=text, tick=tick)
def function[reportProgress, parameter[self, state, action, text, tick]]: constant[ If we want to keep other code updated about our progress. state: 'prep' reading sources 'generate' making instances 'done' wrapping up 'error' reporting a problem action: 'start' begin generating 'stop' end generating 'source' which ufo we're reading text: <file.ufo> ufoname (for instance) tick: a float between 0 and 1 indicating progress. ] if compare[name[self].progressFunc is_not constant[None]] begin[:] call[name[self].progressFunc, parameter[]]
keyword[def] identifier[reportProgress] ( identifier[self] , identifier[state] , identifier[action] , identifier[text] = keyword[None] , identifier[tick] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[progressFunc] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[progressFunc] ( identifier[state] = identifier[state] , identifier[action] = identifier[action] , identifier[text] = identifier[text] , identifier[tick] = identifier[tick] )
def reportProgress(self, state, action, text=None, tick=None): """ If we want to keep other code updated about our progress. state: 'prep' reading sources 'generate' making instances 'done' wrapping up 'error' reporting a problem action: 'start' begin generating 'stop' end generating 'source' which ufo we're reading text: <file.ufo> ufoname (for instance) tick: a float between 0 and 1 indicating progress. """ if self.progressFunc is not None: self.progressFunc(state=state, action=action, text=text, tick=tick) # depends on [control=['if'], data=[]]
def detectWebOSTV(self): """Return detection of a WebOS smart TV Detects if the current browser is on a WebOS smart TV. """ return UAgentInfo.deviceWebOStv in self.__userAgent \ and UAgentInfo.smartTV2 in self.__userAgent
def function[detectWebOSTV, parameter[self]]: constant[Return detection of a WebOS smart TV Detects if the current browser is on a WebOS smart TV. ] return[<ast.BoolOp object at 0x7da1b0af81f0>]
keyword[def] identifier[detectWebOSTV] ( identifier[self] ): literal[string] keyword[return] identifier[UAgentInfo] . identifier[deviceWebOStv] keyword[in] identifier[self] . identifier[__userAgent] keyword[and] identifier[UAgentInfo] . identifier[smartTV2] keyword[in] identifier[self] . identifier[__userAgent]
def detectWebOSTV(self): """Return detection of a WebOS smart TV Detects if the current browser is on a WebOS smart TV. """ return UAgentInfo.deviceWebOStv in self.__userAgent and UAgentInfo.smartTV2 in self.__userAgent
def getDigitalID(self,num): """ Reads the COMTRADE ID of a given channel number. The number to be given is the same of the COMTRADE header. """ listidx = self.Dn.index(num) # Get the position of the channel number. return self.Dch_id[listidx]
def function[getDigitalID, parameter[self, num]]: constant[ Reads the COMTRADE ID of a given channel number. The number to be given is the same of the COMTRADE header. ] variable[listidx] assign[=] call[name[self].Dn.index, parameter[name[num]]] return[call[name[self].Dch_id][name[listidx]]]
keyword[def] identifier[getDigitalID] ( identifier[self] , identifier[num] ): literal[string] identifier[listidx] = identifier[self] . identifier[Dn] . identifier[index] ( identifier[num] ) keyword[return] identifier[self] . identifier[Dch_id] [ identifier[listidx] ]
def getDigitalID(self, num): """ Reads the COMTRADE ID of a given channel number. The number to be given is the same of the COMTRADE header. """ listidx = self.Dn.index(num) # Get the position of the channel number. return self.Dch_id[listidx]
def on_error(self, error, items): """ Handles error callbacks when using Segment with segment_debug_mode set to True """ self.log.error('Encountered Segment error: {segment_error} with ' 'items: {with_items}'.format(segment_error=error, with_items=items)) raise AirflowException('Segment error: {}'.format(error))
def function[on_error, parameter[self, error, items]]: constant[ Handles error callbacks when using Segment with segment_debug_mode set to True ] call[name[self].log.error, parameter[call[constant[Encountered Segment error: {segment_error} with items: {with_items}].format, parameter[]]]] <ast.Raise object at 0x7da20e961ab0>
keyword[def] identifier[on_error] ( identifier[self] , identifier[error] , identifier[items] ): literal[string] identifier[self] . identifier[log] . identifier[error] ( literal[string] literal[string] . identifier[format] ( identifier[segment_error] = identifier[error] , identifier[with_items] = identifier[items] )) keyword[raise] identifier[AirflowException] ( literal[string] . identifier[format] ( identifier[error] ))
def on_error(self, error, items): """ Handles error callbacks when using Segment with segment_debug_mode set to True """ self.log.error('Encountered Segment error: {segment_error} with items: {with_items}'.format(segment_error=error, with_items=items)) raise AirflowException('Segment error: {}'.format(error))
def add_available_action(self, name, metadata, cls): """ Add an available action. name -- name of the action metadata -- action metadata, i.e. type, description, etc., as a dict cls -- class to instantiate for this action """ if metadata is None: metadata = {} self.available_actions[name] = { 'metadata': metadata, 'class': cls, } self.actions[name] = []
def function[add_available_action, parameter[self, name, metadata, cls]]: constant[ Add an available action. name -- name of the action metadata -- action metadata, i.e. type, description, etc., as a dict cls -- class to instantiate for this action ] if compare[name[metadata] is constant[None]] begin[:] variable[metadata] assign[=] dictionary[[], []] call[name[self].available_actions][name[name]] assign[=] dictionary[[<ast.Constant object at 0x7da1b033a1a0>, <ast.Constant object at 0x7da1b033a050>], [<ast.Name object at 0x7da1b033ad40>, <ast.Name object at 0x7da1b0339cc0>]] call[name[self].actions][name[name]] assign[=] list[[]]
keyword[def] identifier[add_available_action] ( identifier[self] , identifier[name] , identifier[metadata] , identifier[cls] ): literal[string] keyword[if] identifier[metadata] keyword[is] keyword[None] : identifier[metadata] ={} identifier[self] . identifier[available_actions] [ identifier[name] ]={ literal[string] : identifier[metadata] , literal[string] : identifier[cls] , } identifier[self] . identifier[actions] [ identifier[name] ]=[]
def add_available_action(self, name, metadata, cls): """ Add an available action. name -- name of the action metadata -- action metadata, i.e. type, description, etc., as a dict cls -- class to instantiate for this action """ if metadata is None: metadata = {} # depends on [control=['if'], data=['metadata']] self.available_actions[name] = {'metadata': metadata, 'class': cls} self.actions[name] = []
def new_message(self, recipient=None, *, recipient_type=RecipientType.TO): """ This method returns a new draft Message instance with contacts first email as a recipient :param Recipient recipient: a Recipient instance where to send this message. If None first email of this contact will be used :param RecipientType recipient_type: section to add recipient into :return: newly created message :rtype: Message or None """ if self.main_resource == GAL_MAIN_RESOURCE: # preventing the contact lookup to explode for big organizations.. raise RuntimeError('Sending a message to all users within an ' 'Organization is not allowed') if isinstance(recipient_type, str): recipient_type = RecipientType(recipient_type) recipient = recipient or self.emails.get_first_recipient_with_address() if not recipient: return None new_message = self.message_constructor(parent=self, is_draft=True) target_recipients = getattr(new_message, str(recipient_type.value)) target_recipients.add(recipient) return new_message
def function[new_message, parameter[self, recipient]]: constant[ This method returns a new draft Message instance with contacts first email as a recipient :param Recipient recipient: a Recipient instance where to send this message. If None first email of this contact will be used :param RecipientType recipient_type: section to add recipient into :return: newly created message :rtype: Message or None ] if compare[name[self].main_resource equal[==] name[GAL_MAIN_RESOURCE]] begin[:] <ast.Raise object at 0x7da1b1baf250> if call[name[isinstance], parameter[name[recipient_type], name[str]]] begin[:] variable[recipient_type] assign[=] call[name[RecipientType], parameter[name[recipient_type]]] variable[recipient] assign[=] <ast.BoolOp object at 0x7da1b1b79630> if <ast.UnaryOp object at 0x7da1b1b78790> begin[:] return[constant[None]] variable[new_message] assign[=] call[name[self].message_constructor, parameter[]] variable[target_recipients] assign[=] call[name[getattr], parameter[name[new_message], call[name[str], parameter[name[recipient_type].value]]]] call[name[target_recipients].add, parameter[name[recipient]]] return[name[new_message]]
keyword[def] identifier[new_message] ( identifier[self] , identifier[recipient] = keyword[None] ,*, identifier[recipient_type] = identifier[RecipientType] . identifier[TO] ): literal[string] keyword[if] identifier[self] . identifier[main_resource] == identifier[GAL_MAIN_RESOURCE] : keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] ) keyword[if] identifier[isinstance] ( identifier[recipient_type] , identifier[str] ): identifier[recipient_type] = identifier[RecipientType] ( identifier[recipient_type] ) identifier[recipient] = identifier[recipient] keyword[or] identifier[self] . identifier[emails] . identifier[get_first_recipient_with_address] () keyword[if] keyword[not] identifier[recipient] : keyword[return] keyword[None] identifier[new_message] = identifier[self] . identifier[message_constructor] ( identifier[parent] = identifier[self] , identifier[is_draft] = keyword[True] ) identifier[target_recipients] = identifier[getattr] ( identifier[new_message] , identifier[str] ( identifier[recipient_type] . identifier[value] )) identifier[target_recipients] . identifier[add] ( identifier[recipient] ) keyword[return] identifier[new_message]
def new_message(self, recipient=None, *, recipient_type=RecipientType.TO): """ This method returns a new draft Message instance with contacts first email as a recipient :param Recipient recipient: a Recipient instance where to send this message. If None first email of this contact will be used :param RecipientType recipient_type: section to add recipient into :return: newly created message :rtype: Message or None """ if self.main_resource == GAL_MAIN_RESOURCE: # preventing the contact lookup to explode for big organizations.. raise RuntimeError('Sending a message to all users within an Organization is not allowed') # depends on [control=['if'], data=[]] if isinstance(recipient_type, str): recipient_type = RecipientType(recipient_type) # depends on [control=['if'], data=[]] recipient = recipient or self.emails.get_first_recipient_with_address() if not recipient: return None # depends on [control=['if'], data=[]] new_message = self.message_constructor(parent=self, is_draft=True) target_recipients = getattr(new_message, str(recipient_type.value)) target_recipients.add(recipient) return new_message
def peek(quantity, min_type=EventType.firstevent, max_type=EventType.lastevent): """Return events at the front of the event queue, within the specified minimum and maximum type, and do not remove them from the queue. Args: quantity (int): The maximum number of events to return. min_type (int): The minimum value for the event type of the returned events. max_type (int): The maximum value for the event type of the returned events. Returns: List[Event]: Events from the front of the event queue. Raises: SDLError: If there was an error retrieving the events. """ return _peep(quantity, lib.SDL_PEEKEVENT, min_type, max_type)
def function[peek, parameter[quantity, min_type, max_type]]: constant[Return events at the front of the event queue, within the specified minimum and maximum type, and do not remove them from the queue. Args: quantity (int): The maximum number of events to return. min_type (int): The minimum value for the event type of the returned events. max_type (int): The maximum value for the event type of the returned events. Returns: List[Event]: Events from the front of the event queue. Raises: SDLError: If there was an error retrieving the events. ] return[call[name[_peep], parameter[name[quantity], name[lib].SDL_PEEKEVENT, name[min_type], name[max_type]]]]
keyword[def] identifier[peek] ( identifier[quantity] , identifier[min_type] = identifier[EventType] . identifier[firstevent] , identifier[max_type] = identifier[EventType] . identifier[lastevent] ): literal[string] keyword[return] identifier[_peep] ( identifier[quantity] , identifier[lib] . identifier[SDL_PEEKEVENT] , identifier[min_type] , identifier[max_type] )
def peek(quantity, min_type=EventType.firstevent, max_type=EventType.lastevent): """Return events at the front of the event queue, within the specified minimum and maximum type, and do not remove them from the queue. Args: quantity (int): The maximum number of events to return. min_type (int): The minimum value for the event type of the returned events. max_type (int): The maximum value for the event type of the returned events. Returns: List[Event]: Events from the front of the event queue. Raises: SDLError: If there was an error retrieving the events. """ return _peep(quantity, lib.SDL_PEEKEVENT, min_type, max_type)
def web_task(f): """ Checks if the task is called through the web interface. Task return value should be in format {'data': ...}. """ @wraps(f) def web_task_decorator(*args, **kwargs): jc = JobContext.get_current_context() if not isinstance(jc, WebJobContext): raise Exception( "The WebTask is not called through the web interface.") data = f(*args, **kwargs) jc.add_responder(WebTaskResponder(data)) return data['data'] if 'data' in data else "" return web_task_decorator
def function[web_task, parameter[f]]: constant[ Checks if the task is called through the web interface. Task return value should be in format {'data': ...}. ] def function[web_task_decorator, parameter[]]: variable[jc] assign[=] call[name[JobContext].get_current_context, parameter[]] if <ast.UnaryOp object at 0x7da20c6e6830> begin[:] <ast.Raise object at 0x7da20c6e4e20> variable[data] assign[=] call[name[f], parameter[<ast.Starred object at 0x7da20c6e6140>]] call[name[jc].add_responder, parameter[call[name[WebTaskResponder], parameter[name[data]]]]] return[<ast.IfExp object at 0x7da20c6e5360>] return[name[web_task_decorator]]
keyword[def] identifier[web_task] ( identifier[f] ): literal[string] @ identifier[wraps] ( identifier[f] ) keyword[def] identifier[web_task_decorator] (* identifier[args] ,** identifier[kwargs] ): identifier[jc] = identifier[JobContext] . identifier[get_current_context] () keyword[if] keyword[not] identifier[isinstance] ( identifier[jc] , identifier[WebJobContext] ): keyword[raise] identifier[Exception] ( literal[string] ) identifier[data] = identifier[f] (* identifier[args] ,** identifier[kwargs] ) identifier[jc] . identifier[add_responder] ( identifier[WebTaskResponder] ( identifier[data] )) keyword[return] identifier[data] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[data] keyword[else] literal[string] keyword[return] identifier[web_task_decorator]
def web_task(f): """ Checks if the task is called through the web interface. Task return value should be in format {'data': ...}. """ @wraps(f) def web_task_decorator(*args, **kwargs): jc = JobContext.get_current_context() if not isinstance(jc, WebJobContext): raise Exception('The WebTask is not called through the web interface.') # depends on [control=['if'], data=[]] data = f(*args, **kwargs) jc.add_responder(WebTaskResponder(data)) return data['data'] if 'data' in data else '' return web_task_decorator
def getRejectionReasonsItems(self): """Return the list of predefined rejection reasons """ reasons = self.getRejectionReasons() if not reasons: return [] reasons = reasons[0] keys = filter(lambda key: key != "checkbox", reasons.keys()) return map(lambda key: reasons[key], sorted(keys)) or []
def function[getRejectionReasonsItems, parameter[self]]: constant[Return the list of predefined rejection reasons ] variable[reasons] assign[=] call[name[self].getRejectionReasons, parameter[]] if <ast.UnaryOp object at 0x7da1b1d49a50> begin[:] return[list[[]]] variable[reasons] assign[=] call[name[reasons]][constant[0]] variable[keys] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da1b2311030>, call[name[reasons].keys, parameter[]]]] return[<ast.BoolOp object at 0x7da1b2313160>]
keyword[def] identifier[getRejectionReasonsItems] ( identifier[self] ): literal[string] identifier[reasons] = identifier[self] . identifier[getRejectionReasons] () keyword[if] keyword[not] identifier[reasons] : keyword[return] [] identifier[reasons] = identifier[reasons] [ literal[int] ] identifier[keys] = identifier[filter] ( keyword[lambda] identifier[key] : identifier[key] != literal[string] , identifier[reasons] . identifier[keys] ()) keyword[return] identifier[map] ( keyword[lambda] identifier[key] : identifier[reasons] [ identifier[key] ], identifier[sorted] ( identifier[keys] )) keyword[or] []
def getRejectionReasonsItems(self): """Return the list of predefined rejection reasons """ reasons = self.getRejectionReasons() if not reasons: return [] # depends on [control=['if'], data=[]] reasons = reasons[0] keys = filter(lambda key: key != 'checkbox', reasons.keys()) return map(lambda key: reasons[key], sorted(keys)) or []
def resolve(self, parameters): """ Resolve given variable """ if self.default_value == DUMMY_VALUE: if self.name in parameters: return parameters[self.name] else: raise VelException(f"Undefined parameter: {self.name}") else: return parameters.get(self.name, self.default_value)
def function[resolve, parameter[self, parameters]]: constant[ Resolve given variable ] if compare[name[self].default_value equal[==] name[DUMMY_VALUE]] begin[:] if compare[name[self].name in name[parameters]] begin[:] return[call[name[parameters]][name[self].name]]
keyword[def] identifier[resolve] ( identifier[self] , identifier[parameters] ): literal[string] keyword[if] identifier[self] . identifier[default_value] == identifier[DUMMY_VALUE] : keyword[if] identifier[self] . identifier[name] keyword[in] identifier[parameters] : keyword[return] identifier[parameters] [ identifier[self] . identifier[name] ] keyword[else] : keyword[raise] identifier[VelException] ( literal[string] ) keyword[else] : keyword[return] identifier[parameters] . identifier[get] ( identifier[self] . identifier[name] , identifier[self] . identifier[default_value] )
def resolve(self, parameters): """ Resolve given variable """ if self.default_value == DUMMY_VALUE: if self.name in parameters: return parameters[self.name] # depends on [control=['if'], data=['parameters']] else: raise VelException(f'Undefined parameter: {self.name}') # depends on [control=['if'], data=[]] else: return parameters.get(self.name, self.default_value)
def get_instance(cls, instance_or_pk): """Return a model instance in ``db.session``. :param instance_or_pk: An instance of this model class, or a primary key. A composite primary key can be passed as a tuple. Example:: @db.atomic def increase_account_balance(account, amount): # Here `Account` is a subclass of `db.Model`. account = Account.get_instance(account) account.balance += amount # Now `increase_account_balance` can be # called with an account instance: increase_account_balance(my_account, 100.00) # or with an account primary key (1234): increase_account_balance(1234, 100.00) """ if isinstance(instance_or_pk, cls): if instance_or_pk in cls._flask_signalbus_sa.session: return instance_or_pk instance_or_pk = inspect(cls).primary_key_from_instance(instance_or_pk) return cls.query.get(instance_or_pk)
def function[get_instance, parameter[cls, instance_or_pk]]: constant[Return a model instance in ``db.session``. :param instance_or_pk: An instance of this model class, or a primary key. A composite primary key can be passed as a tuple. Example:: @db.atomic def increase_account_balance(account, amount): # Here `Account` is a subclass of `db.Model`. account = Account.get_instance(account) account.balance += amount # Now `increase_account_balance` can be # called with an account instance: increase_account_balance(my_account, 100.00) # or with an account primary key (1234): increase_account_balance(1234, 100.00) ] if call[name[isinstance], parameter[name[instance_or_pk], name[cls]]] begin[:] if compare[name[instance_or_pk] in name[cls]._flask_signalbus_sa.session] begin[:] return[name[instance_or_pk]] variable[instance_or_pk] assign[=] call[call[name[inspect], parameter[name[cls]]].primary_key_from_instance, parameter[name[instance_or_pk]]] return[call[name[cls].query.get, parameter[name[instance_or_pk]]]]
keyword[def] identifier[get_instance] ( identifier[cls] , identifier[instance_or_pk] ): literal[string] keyword[if] identifier[isinstance] ( identifier[instance_or_pk] , identifier[cls] ): keyword[if] identifier[instance_or_pk] keyword[in] identifier[cls] . identifier[_flask_signalbus_sa] . identifier[session] : keyword[return] identifier[instance_or_pk] identifier[instance_or_pk] = identifier[inspect] ( identifier[cls] ). identifier[primary_key_from_instance] ( identifier[instance_or_pk] ) keyword[return] identifier[cls] . identifier[query] . identifier[get] ( identifier[instance_or_pk] )
def get_instance(cls, instance_or_pk): """Return a model instance in ``db.session``. :param instance_or_pk: An instance of this model class, or a primary key. A composite primary key can be passed as a tuple. Example:: @db.atomic def increase_account_balance(account, amount): # Here `Account` is a subclass of `db.Model`. account = Account.get_instance(account) account.balance += amount # Now `increase_account_balance` can be # called with an account instance: increase_account_balance(my_account, 100.00) # or with an account primary key (1234): increase_account_balance(1234, 100.00) """ if isinstance(instance_or_pk, cls): if instance_or_pk in cls._flask_signalbus_sa.session: return instance_or_pk # depends on [control=['if'], data=['instance_or_pk']] instance_or_pk = inspect(cls).primary_key_from_instance(instance_or_pk) # depends on [control=['if'], data=[]] return cls.query.get(instance_or_pk)
def t384(args): """ %prog t384 Print out a table converting between 96 well to 384 well """ p = OptionParser(t384.__doc__) opts, args = p.parse_args(args) plate, splate = get_plate() fw = sys.stdout for i in plate: for j, p in enumerate(i): if j != 0: fw.write('|') fw.write(p) fw.write('\n')
def function[t384, parameter[args]]: constant[ %prog t384 Print out a table converting between 96 well to 384 well ] variable[p] assign[=] call[name[OptionParser], parameter[name[t384].__doc__]] <ast.Tuple object at 0x7da1b077d840> assign[=] call[name[p].parse_args, parameter[name[args]]] <ast.Tuple object at 0x7da1b077d6c0> assign[=] call[name[get_plate], parameter[]] variable[fw] assign[=] name[sys].stdout for taget[name[i]] in starred[name[plate]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b07bb250>, <ast.Name object at 0x7da1b07bb280>]]] in starred[call[name[enumerate], parameter[name[i]]]] begin[:] if compare[name[j] not_equal[!=] constant[0]] begin[:] call[name[fw].write, parameter[constant[|]]] call[name[fw].write, parameter[name[p]]] call[name[fw].write, parameter[constant[ ]]]
keyword[def] identifier[t384] ( identifier[args] ): literal[string] identifier[p] = identifier[OptionParser] ( identifier[t384] . identifier[__doc__] ) identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] ) identifier[plate] , identifier[splate] = identifier[get_plate] () identifier[fw] = identifier[sys] . identifier[stdout] keyword[for] identifier[i] keyword[in] identifier[plate] : keyword[for] identifier[j] , identifier[p] keyword[in] identifier[enumerate] ( identifier[i] ): keyword[if] identifier[j] != literal[int] : identifier[fw] . identifier[write] ( literal[string] ) identifier[fw] . identifier[write] ( identifier[p] ) identifier[fw] . identifier[write] ( literal[string] )
def t384(args): """ %prog t384 Print out a table converting between 96 well to 384 well """ p = OptionParser(t384.__doc__) (opts, args) = p.parse_args(args) (plate, splate) = get_plate() fw = sys.stdout for i in plate: for (j, p) in enumerate(i): if j != 0: fw.write('|') # depends on [control=['if'], data=[]] fw.write(p) # depends on [control=['for'], data=[]] fw.write('\n') # depends on [control=['for'], data=['i']]
def create( name, attributes=None, region=None, key=None, keyid=None, profile=None, ): ''' Create an SQS queue. CLI Example: .. code-block:: bash salt myminion boto_sqs.create myqueue region=us-east-1 ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if attributes is None: attributes = {} attributes = _preprocess_attributes(attributes) try: conn.create_queue(QueueName=name, Attributes=attributes) except botocore.exceptions.ClientError as e: return {'error': __utils__['boto3.get_error'](e)} return {'result': True}
def function[create, parameter[name, attributes, region, key, keyid, profile]]: constant[ Create an SQS queue. CLI Example: .. code-block:: bash salt myminion boto_sqs.create myqueue region=us-east-1 ] variable[conn] assign[=] call[name[_get_conn], parameter[]] if compare[name[attributes] is constant[None]] begin[:] variable[attributes] assign[=] dictionary[[], []] variable[attributes] assign[=] call[name[_preprocess_attributes], parameter[name[attributes]]] <ast.Try object at 0x7da18eb578e0> return[dictionary[[<ast.Constant object at 0x7da18eb573a0>], [<ast.Constant object at 0x7da18eb573d0>]]]
keyword[def] identifier[create] ( identifier[name] , identifier[attributes] = keyword[None] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] , ): literal[string] identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) keyword[if] identifier[attributes] keyword[is] keyword[None] : identifier[attributes] ={} identifier[attributes] = identifier[_preprocess_attributes] ( identifier[attributes] ) keyword[try] : identifier[conn] . identifier[create_queue] ( identifier[QueueName] = identifier[name] , identifier[Attributes] = identifier[attributes] ) keyword[except] identifier[botocore] . identifier[exceptions] . identifier[ClientError] keyword[as] identifier[e] : keyword[return] { literal[string] : identifier[__utils__] [ literal[string] ]( identifier[e] )} keyword[return] { literal[string] : keyword[True] }
def create(name, attributes=None, region=None, key=None, keyid=None, profile=None): """ Create an SQS queue. CLI Example: .. code-block:: bash salt myminion boto_sqs.create myqueue region=us-east-1 """ conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if attributes is None: attributes = {} # depends on [control=['if'], data=['attributes']] attributes = _preprocess_attributes(attributes) try: conn.create_queue(QueueName=name, Attributes=attributes) # depends on [control=['try'], data=[]] except botocore.exceptions.ClientError as e: return {'error': __utils__['boto3.get_error'](e)} # depends on [control=['except'], data=['e']] return {'result': True}
def import_from_json(self, data): """ Replace the current roster with the :meth:`export_as_json`-compatible dictionary in `data`. No events are fired during this activity. After this method completes, the whole roster contents are exchanged with the contents from `data`. Also, no data is transferred to the server; this method is intended to be used for roster versioning. See below (in the docs of :class:`Service`). """ self.version = data.get("ver", None) self.items.clear() self.groups.clear() for jid, data in data.get("items", {}).items(): jid = structs.JID.fromstr(jid) item = Item(jid) item.update_from_json(data) self.items[jid] = item for group in item.groups: self.groups.setdefault(group, set()).add(item)
def function[import_from_json, parameter[self, data]]: constant[ Replace the current roster with the :meth:`export_as_json`-compatible dictionary in `data`. No events are fired during this activity. After this method completes, the whole roster contents are exchanged with the contents from `data`. Also, no data is transferred to the server; this method is intended to be used for roster versioning. See below (in the docs of :class:`Service`). ] name[self].version assign[=] call[name[data].get, parameter[constant[ver], constant[None]]] call[name[self].items.clear, parameter[]] call[name[self].groups.clear, parameter[]] for taget[tuple[[<ast.Name object at 0x7da18f723e80>, <ast.Name object at 0x7da18f722f80>]]] in starred[call[call[name[data].get, parameter[constant[items], dictionary[[], []]]].items, parameter[]]] begin[:] variable[jid] assign[=] call[name[structs].JID.fromstr, parameter[name[jid]]] variable[item] assign[=] call[name[Item], parameter[name[jid]]] call[name[item].update_from_json, parameter[name[data]]] call[name[self].items][name[jid]] assign[=] name[item] for taget[name[group]] in starred[name[item].groups] begin[:] call[call[name[self].groups.setdefault, parameter[name[group], call[name[set], parameter[]]]].add, parameter[name[item]]]
keyword[def] identifier[import_from_json] ( identifier[self] , identifier[data] ): literal[string] identifier[self] . identifier[version] = identifier[data] . identifier[get] ( literal[string] , keyword[None] ) identifier[self] . identifier[items] . identifier[clear] () identifier[self] . identifier[groups] . identifier[clear] () keyword[for] identifier[jid] , identifier[data] keyword[in] identifier[data] . identifier[get] ( literal[string] ,{}). identifier[items] (): identifier[jid] = identifier[structs] . identifier[JID] . identifier[fromstr] ( identifier[jid] ) identifier[item] = identifier[Item] ( identifier[jid] ) identifier[item] . identifier[update_from_json] ( identifier[data] ) identifier[self] . identifier[items] [ identifier[jid] ]= identifier[item] keyword[for] identifier[group] keyword[in] identifier[item] . identifier[groups] : identifier[self] . identifier[groups] . identifier[setdefault] ( identifier[group] , identifier[set] ()). identifier[add] ( identifier[item] )
def import_from_json(self, data): """ Replace the current roster with the :meth:`export_as_json`-compatible dictionary in `data`. No events are fired during this activity. After this method completes, the whole roster contents are exchanged with the contents from `data`. Also, no data is transferred to the server; this method is intended to be used for roster versioning. See below (in the docs of :class:`Service`). """ self.version = data.get('ver', None) self.items.clear() self.groups.clear() for (jid, data) in data.get('items', {}).items(): jid = structs.JID.fromstr(jid) item = Item(jid) item.update_from_json(data) self.items[jid] = item for group in item.groups: self.groups.setdefault(group, set()).add(item) # depends on [control=['for'], data=['group']] # depends on [control=['for'], data=[]]
def remove(self, decoration): """ Removes a text decoration from the editor. :param decoration: Text decoration to remove :type decoration: spyder.api.TextDecoration """ try: self._decorations.remove(decoration) self.update() return True except ValueError: return False except RuntimeError: # This is needed to fix issue 9173 pass
def function[remove, parameter[self, decoration]]: constant[ Removes a text decoration from the editor. :param decoration: Text decoration to remove :type decoration: spyder.api.TextDecoration ] <ast.Try object at 0x7da2041d9360>
keyword[def] identifier[remove] ( identifier[self] , identifier[decoration] ): literal[string] keyword[try] : identifier[self] . identifier[_decorations] . identifier[remove] ( identifier[decoration] ) identifier[self] . identifier[update] () keyword[return] keyword[True] keyword[except] identifier[ValueError] : keyword[return] keyword[False] keyword[except] identifier[RuntimeError] : keyword[pass]
def remove(self, decoration): """ Removes a text decoration from the editor. :param decoration: Text decoration to remove :type decoration: spyder.api.TextDecoration """ try: self._decorations.remove(decoration) self.update() return True # depends on [control=['try'], data=[]] except ValueError: return False # depends on [control=['except'], data=[]] except RuntimeError: # This is needed to fix issue 9173 pass # depends on [control=['except'], data=[]]
def create(self): """Create a single instance of notebook.""" # Point to chart repo. out = helm( "repo", "add", "jupyterhub", self.helm_repo ) out = helm("repo", "update") # Get token to secure Jupyterhub secret_yaml = self.get_security_yaml() # Get Jupyterhub. out = helm( "upgrade", "--install", self.release, "jupyterhub/jupyterhub", namespace=self.namespace, version=self.version, input=secret_yaml ) if out.returncode != 0: print(out.stderr) else: print(out.stdout)
def function[create, parameter[self]]: constant[Create a single instance of notebook.] variable[out] assign[=] call[name[helm], parameter[constant[repo], constant[add], constant[jupyterhub], name[self].helm_repo]] variable[out] assign[=] call[name[helm], parameter[constant[repo], constant[update]]] variable[secret_yaml] assign[=] call[name[self].get_security_yaml, parameter[]] variable[out] assign[=] call[name[helm], parameter[constant[upgrade], constant[--install], name[self].release, constant[jupyterhub/jupyterhub]]] if compare[name[out].returncode not_equal[!=] constant[0]] begin[:] call[name[print], parameter[name[out].stderr]]
keyword[def] identifier[create] ( identifier[self] ): literal[string] identifier[out] = identifier[helm] ( literal[string] , literal[string] , literal[string] , identifier[self] . identifier[helm_repo] ) identifier[out] = identifier[helm] ( literal[string] , literal[string] ) identifier[secret_yaml] = identifier[self] . identifier[get_security_yaml] () identifier[out] = identifier[helm] ( literal[string] , literal[string] , identifier[self] . identifier[release] , literal[string] , identifier[namespace] = identifier[self] . identifier[namespace] , identifier[version] = identifier[self] . identifier[version] , identifier[input] = identifier[secret_yaml] ) keyword[if] identifier[out] . identifier[returncode] != literal[int] : identifier[print] ( identifier[out] . identifier[stderr] ) keyword[else] : identifier[print] ( identifier[out] . identifier[stdout] )
def create(self): """Create a single instance of notebook.""" # Point to chart repo. out = helm('repo', 'add', 'jupyterhub', self.helm_repo) out = helm('repo', 'update') # Get token to secure Jupyterhub secret_yaml = self.get_security_yaml() # Get Jupyterhub. out = helm('upgrade', '--install', self.release, 'jupyterhub/jupyterhub', namespace=self.namespace, version=self.version, input=secret_yaml) if out.returncode != 0: print(out.stderr) # depends on [control=['if'], data=[]] else: print(out.stdout)
def audio_interfaces(): """ Extracts audio interfaces data :return list[AudioInterface]: Audio interfaces data """ p = pyaudio.PyAudio() interfaces = [] for i in range(p.get_device_count()): data = p.get_device_info_by_index(i) if 'hw' not in data['name']: interfaces.append(AudioInterface(data)) p.terminate() return interfaces
def function[audio_interfaces, parameter[]]: constant[ Extracts audio interfaces data :return list[AudioInterface]: Audio interfaces data ] variable[p] assign[=] call[name[pyaudio].PyAudio, parameter[]] variable[interfaces] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[call[name[p].get_device_count, parameter[]]]]] begin[:] variable[data] assign[=] call[name[p].get_device_info_by_index, parameter[name[i]]] if compare[constant[hw] <ast.NotIn object at 0x7da2590d7190> call[name[data]][constant[name]]] begin[:] call[name[interfaces].append, parameter[call[name[AudioInterface], parameter[name[data]]]]] call[name[p].terminate, parameter[]] return[name[interfaces]]
keyword[def] identifier[audio_interfaces] (): literal[string] identifier[p] = identifier[pyaudio] . identifier[PyAudio] () identifier[interfaces] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[p] . identifier[get_device_count] ()): identifier[data] = identifier[p] . identifier[get_device_info_by_index] ( identifier[i] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[data] [ literal[string] ]: identifier[interfaces] . identifier[append] ( identifier[AudioInterface] ( identifier[data] )) identifier[p] . identifier[terminate] () keyword[return] identifier[interfaces]
def audio_interfaces(): """ Extracts audio interfaces data :return list[AudioInterface]: Audio interfaces data """ p = pyaudio.PyAudio() interfaces = [] for i in range(p.get_device_count()): data = p.get_device_info_by_index(i) if 'hw' not in data['name']: interfaces.append(AudioInterface(data)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] p.terminate() return interfaces
def get_history(self): """get all msg_ids, ordered by time submitted.""" cursor = self._records.find({},{'msg_id':1}).sort('submitted') return [ rec['msg_id'] for rec in cursor ]
def function[get_history, parameter[self]]: constant[get all msg_ids, ordered by time submitted.] variable[cursor] assign[=] call[call[name[self]._records.find, parameter[dictionary[[], []], dictionary[[<ast.Constant object at 0x7da2045668c0>], [<ast.Constant object at 0x7da204564340>]]]].sort, parameter[constant[submitted]]] return[<ast.ListComp object at 0x7da2045647f0>]
keyword[def] identifier[get_history] ( identifier[self] ): literal[string] identifier[cursor] = identifier[self] . identifier[_records] . identifier[find] ({},{ literal[string] : literal[int] }). identifier[sort] ( literal[string] ) keyword[return] [ identifier[rec] [ literal[string] ] keyword[for] identifier[rec] keyword[in] identifier[cursor] ]
def get_history(self): """get all msg_ids, ordered by time submitted.""" cursor = self._records.find({}, {'msg_id': 1}).sort('submitted') return [rec['msg_id'] for rec in cursor]
def height(self, height=None): """Returns or sets (if a value is provided) the chart's height. :param height: If given, the chart's height will be set to this.""" if height is None: return self._height else: if not is_numeric(height): raise TypeError("height must be numeric, not '%s'" % str(height)) self._height = height
def function[height, parameter[self, height]]: constant[Returns or sets (if a value is provided) the chart's height. :param height: If given, the chart's height will be set to this.] if compare[name[height] is constant[None]] begin[:] return[name[self]._height]
keyword[def] identifier[height] ( identifier[self] , identifier[height] = keyword[None] ): literal[string] keyword[if] identifier[height] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[_height] keyword[else] : keyword[if] keyword[not] identifier[is_numeric] ( identifier[height] ): keyword[raise] identifier[TypeError] ( literal[string] % identifier[str] ( identifier[height] )) identifier[self] . identifier[_height] = identifier[height]
def height(self, height=None): """Returns or sets (if a value is provided) the chart's height. :param height: If given, the chart's height will be set to this.""" if height is None: return self._height # depends on [control=['if'], data=[]] else: if not is_numeric(height): raise TypeError("height must be numeric, not '%s'" % str(height)) # depends on [control=['if'], data=[]] self._height = height
def run(self): """Run.""" self.count += 1 message = Paragraph('IF2 run %i - running' % self.count) dispatcher.send( signal=DYNAMIC_MESSAGE_SIGNAL, sender=self, message=message)
def function[run, parameter[self]]: constant[Run.] <ast.AugAssign object at 0x7da18dc06200> variable[message] assign[=] call[name[Paragraph], parameter[binary_operation[constant[IF2 run %i - running] <ast.Mod object at 0x7da2590d6920> name[self].count]]] call[name[dispatcher].send, parameter[]]
keyword[def] identifier[run] ( identifier[self] ): literal[string] identifier[self] . identifier[count] += literal[int] identifier[message] = identifier[Paragraph] ( literal[string] % identifier[self] . identifier[count] ) identifier[dispatcher] . identifier[send] ( identifier[signal] = identifier[DYNAMIC_MESSAGE_SIGNAL] , identifier[sender] = identifier[self] , identifier[message] = identifier[message] )
def run(self): """Run.""" self.count += 1 message = Paragraph('IF2 run %i - running' % self.count) dispatcher.send(signal=DYNAMIC_MESSAGE_SIGNAL, sender=self, message=message)
def loop(self): """ check for mails and send them """ for mail in Mail.objects.filter(done=False, send_fail_count__lt=3): # send all emails that are not already send or failed to send less # then three times mail.send_mail() for mail in Mail.objects.filter(done=True, timestamp__lt=time() - 60 * 60 * 24 * 7): # delete all done emails older then one week mail.delete() return 1, None
def function[loop, parameter[self]]: constant[ check for mails and send them ] for taget[name[mail]] in starred[call[name[Mail].objects.filter, parameter[]]] begin[:] call[name[mail].send_mail, parameter[]] for taget[name[mail]] in starred[call[name[Mail].objects.filter, parameter[]]] begin[:] call[name[mail].delete, parameter[]] return[tuple[[<ast.Constant object at 0x7da18f00dd80>, <ast.Constant object at 0x7da18f00fca0>]]]
keyword[def] identifier[loop] ( identifier[self] ): literal[string] keyword[for] identifier[mail] keyword[in] identifier[Mail] . identifier[objects] . identifier[filter] ( identifier[done] = keyword[False] , identifier[send_fail_count__lt] = literal[int] ): identifier[mail] . identifier[send_mail] () keyword[for] identifier[mail] keyword[in] identifier[Mail] . identifier[objects] . identifier[filter] ( identifier[done] = keyword[True] , identifier[timestamp__lt] = identifier[time] ()- literal[int] * literal[int] * literal[int] * literal[int] ): identifier[mail] . identifier[delete] () keyword[return] literal[int] , keyword[None]
def loop(self): """ check for mails and send them """ for mail in Mail.objects.filter(done=False, send_fail_count__lt=3): # send all emails that are not already send or failed to send less # then three times mail.send_mail() # depends on [control=['for'], data=['mail']] for mail in Mail.objects.filter(done=True, timestamp__lt=time() - 60 * 60 * 24 * 7): # delete all done emails older then one week mail.delete() # depends on [control=['for'], data=['mail']] return (1, None)
def get_empty_dimension(**kwargs): """ Returns a dimension object initialized with empty values """ dimension = JSONObject(Dimension()) dimension.id = None dimension.name = '' dimension.description = '' dimension.project_id = None dimension.units = [] return dimension
def function[get_empty_dimension, parameter[]]: constant[ Returns a dimension object initialized with empty values ] variable[dimension] assign[=] call[name[JSONObject], parameter[call[name[Dimension], parameter[]]]] name[dimension].id assign[=] constant[None] name[dimension].name assign[=] constant[] name[dimension].description assign[=] constant[] name[dimension].project_id assign[=] constant[None] name[dimension].units assign[=] list[[]] return[name[dimension]]
keyword[def] identifier[get_empty_dimension] (** identifier[kwargs] ): literal[string] identifier[dimension] = identifier[JSONObject] ( identifier[Dimension] ()) identifier[dimension] . identifier[id] = keyword[None] identifier[dimension] . identifier[name] = literal[string] identifier[dimension] . identifier[description] = literal[string] identifier[dimension] . identifier[project_id] = keyword[None] identifier[dimension] . identifier[units] =[] keyword[return] identifier[dimension]
def get_empty_dimension(**kwargs): """ Returns a dimension object initialized with empty values """ dimension = JSONObject(Dimension()) dimension.id = None dimension.name = '' dimension.description = '' dimension.project_id = None dimension.units = [] return dimension
def convertShpToExtend(pathToShp): """ reprojette en WGS84 et recupere l'extend """ driver = ogr.GetDriverByName('ESRI Shapefile') dataset = driver.Open(pathToShp) if dataset is not None: # from Layer layer = dataset.GetLayer() spatialRef = layer.GetSpatialRef() # from Geometry feature = layer.GetNextFeature() geom = feature.GetGeometryRef() spatialRef = geom.GetSpatialReference() #WGS84 outSpatialRef = osr.SpatialReference() outSpatialRef.ImportFromEPSG(4326) coordTrans = osr.CoordinateTransformation(spatialRef, outSpatialRef) env = geom.GetEnvelope() pointMAX = ogr.Geometry(ogr.wkbPoint) pointMAX.AddPoint(env[1], env[3]) pointMAX.Transform(coordTrans) pointMIN = ogr.Geometry(ogr.wkbPoint) pointMIN.AddPoint(env[0], env[2]) pointMIN.Transform(coordTrans) return [pointMAX.GetPoint()[1],pointMIN.GetPoint()[0],pointMIN.GetPoint()[1],pointMAX.GetPoint()[0]] else: exit(" shapefile not found. Please verify your path to the shapefile")
def function[convertShpToExtend, parameter[pathToShp]]: constant[ reprojette en WGS84 et recupere l'extend ] variable[driver] assign[=] call[name[ogr].GetDriverByName, parameter[constant[ESRI Shapefile]]] variable[dataset] assign[=] call[name[driver].Open, parameter[name[pathToShp]]] if compare[name[dataset] is_not constant[None]] begin[:] variable[layer] assign[=] call[name[dataset].GetLayer, parameter[]] variable[spatialRef] assign[=] call[name[layer].GetSpatialRef, parameter[]] variable[feature] assign[=] call[name[layer].GetNextFeature, parameter[]] variable[geom] assign[=] call[name[feature].GetGeometryRef, parameter[]] variable[spatialRef] assign[=] call[name[geom].GetSpatialReference, parameter[]] variable[outSpatialRef] assign[=] call[name[osr].SpatialReference, parameter[]] call[name[outSpatialRef].ImportFromEPSG, parameter[constant[4326]]] variable[coordTrans] assign[=] call[name[osr].CoordinateTransformation, parameter[name[spatialRef], name[outSpatialRef]]] variable[env] assign[=] call[name[geom].GetEnvelope, parameter[]] variable[pointMAX] assign[=] call[name[ogr].Geometry, parameter[name[ogr].wkbPoint]] call[name[pointMAX].AddPoint, parameter[call[name[env]][constant[1]], call[name[env]][constant[3]]]] call[name[pointMAX].Transform, parameter[name[coordTrans]]] variable[pointMIN] assign[=] call[name[ogr].Geometry, parameter[name[ogr].wkbPoint]] call[name[pointMIN].AddPoint, parameter[call[name[env]][constant[0]], call[name[env]][constant[2]]]] call[name[pointMIN].Transform, parameter[name[coordTrans]]] return[list[[<ast.Subscript object at 0x7da1afe38460>, <ast.Subscript object at 0x7da1afe3a320>, <ast.Subscript object at 0x7da1afe39b70>, <ast.Subscript object at 0x7da1afe38e80>]]]
keyword[def] identifier[convertShpToExtend] ( identifier[pathToShp] ): literal[string] identifier[driver] = identifier[ogr] . identifier[GetDriverByName] ( literal[string] ) identifier[dataset] = identifier[driver] . identifier[Open] ( identifier[pathToShp] ) keyword[if] identifier[dataset] keyword[is] keyword[not] keyword[None] : identifier[layer] = identifier[dataset] . identifier[GetLayer] () identifier[spatialRef] = identifier[layer] . identifier[GetSpatialRef] () identifier[feature] = identifier[layer] . identifier[GetNextFeature] () identifier[geom] = identifier[feature] . identifier[GetGeometryRef] () identifier[spatialRef] = identifier[geom] . identifier[GetSpatialReference] () identifier[outSpatialRef] = identifier[osr] . identifier[SpatialReference] () identifier[outSpatialRef] . identifier[ImportFromEPSG] ( literal[int] ) identifier[coordTrans] = identifier[osr] . identifier[CoordinateTransformation] ( identifier[spatialRef] , identifier[outSpatialRef] ) identifier[env] = identifier[geom] . identifier[GetEnvelope] () identifier[pointMAX] = identifier[ogr] . identifier[Geometry] ( identifier[ogr] . identifier[wkbPoint] ) identifier[pointMAX] . identifier[AddPoint] ( identifier[env] [ literal[int] ], identifier[env] [ literal[int] ]) identifier[pointMAX] . identifier[Transform] ( identifier[coordTrans] ) identifier[pointMIN] = identifier[ogr] . identifier[Geometry] ( identifier[ogr] . identifier[wkbPoint] ) identifier[pointMIN] . identifier[AddPoint] ( identifier[env] [ literal[int] ], identifier[env] [ literal[int] ]) identifier[pointMIN] . identifier[Transform] ( identifier[coordTrans] ) keyword[return] [ identifier[pointMAX] . identifier[GetPoint] ()[ literal[int] ], identifier[pointMIN] . identifier[GetPoint] ()[ literal[int] ], identifier[pointMIN] . identifier[GetPoint] ()[ literal[int] ], identifier[pointMAX] . identifier[GetPoint] ()[ literal[int] ]] keyword[else] : identifier[exit] ( literal[string] )
def convertShpToExtend(pathToShp): """ reprojette en WGS84 et recupere l'extend """ driver = ogr.GetDriverByName('ESRI Shapefile') dataset = driver.Open(pathToShp) if dataset is not None: # from Layer layer = dataset.GetLayer() spatialRef = layer.GetSpatialRef() # from Geometry feature = layer.GetNextFeature() geom = feature.GetGeometryRef() spatialRef = geom.GetSpatialReference() #WGS84 outSpatialRef = osr.SpatialReference() outSpatialRef.ImportFromEPSG(4326) coordTrans = osr.CoordinateTransformation(spatialRef, outSpatialRef) env = geom.GetEnvelope() pointMAX = ogr.Geometry(ogr.wkbPoint) pointMAX.AddPoint(env[1], env[3]) pointMAX.Transform(coordTrans) pointMIN = ogr.Geometry(ogr.wkbPoint) pointMIN.AddPoint(env[0], env[2]) pointMIN.Transform(coordTrans) return [pointMAX.GetPoint()[1], pointMIN.GetPoint()[0], pointMIN.GetPoint()[1], pointMAX.GetPoint()[0]] # depends on [control=['if'], data=['dataset']] else: exit(' shapefile not found. Please verify your path to the shapefile')
def create_translation(self, language_code, **fields): """ Add a translation to the model. The :func:`save_translations` function is called afterwards. The object will be saved immediately, similar to calling :func:`~django.db.models.manager.Manager.create` or :func:`~django.db.models.fields.related.RelatedManager.create` on related fields. """ if language_code is None: raise ValueError(get_null_language_error()) meta = self._parler_meta if self._translations_cache[meta.root_model].get(language_code, None): # MISSING evaluates to False too raise ValueError("Translation already exists: {0}".format(language_code)) # Save all fields in the proper translated model. for translation in self._set_translated_fields(language_code, **fields): self.save_translation(translation)
def function[create_translation, parameter[self, language_code]]: constant[ Add a translation to the model. The :func:`save_translations` function is called afterwards. The object will be saved immediately, similar to calling :func:`~django.db.models.manager.Manager.create` or :func:`~django.db.models.fields.related.RelatedManager.create` on related fields. ] if compare[name[language_code] is constant[None]] begin[:] <ast.Raise object at 0x7da204620f10> variable[meta] assign[=] name[self]._parler_meta if call[call[name[self]._translations_cache][name[meta].root_model].get, parameter[name[language_code], constant[None]]] begin[:] <ast.Raise object at 0x7da2046231f0> for taget[name[translation]] in starred[call[name[self]._set_translated_fields, parameter[name[language_code]]]] begin[:] call[name[self].save_translation, parameter[name[translation]]]
keyword[def] identifier[create_translation] ( identifier[self] , identifier[language_code] ,** identifier[fields] ): literal[string] keyword[if] identifier[language_code] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( identifier[get_null_language_error] ()) identifier[meta] = identifier[self] . identifier[_parler_meta] keyword[if] identifier[self] . identifier[_translations_cache] [ identifier[meta] . identifier[root_model] ]. identifier[get] ( identifier[language_code] , keyword[None] ): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[language_code] )) keyword[for] identifier[translation] keyword[in] identifier[self] . identifier[_set_translated_fields] ( identifier[language_code] ,** identifier[fields] ): identifier[self] . identifier[save_translation] ( identifier[translation] )
def create_translation(self, language_code, **fields): """ Add a translation to the model. The :func:`save_translations` function is called afterwards. The object will be saved immediately, similar to calling :func:`~django.db.models.manager.Manager.create` or :func:`~django.db.models.fields.related.RelatedManager.create` on related fields. """ if language_code is None: raise ValueError(get_null_language_error()) # depends on [control=['if'], data=[]] meta = self._parler_meta if self._translations_cache[meta.root_model].get(language_code, None): # MISSING evaluates to False too raise ValueError('Translation already exists: {0}'.format(language_code)) # depends on [control=['if'], data=[]] # Save all fields in the proper translated model. for translation in self._set_translated_fields(language_code, **fields): self.save_translation(translation) # depends on [control=['for'], data=['translation']]
def set_names(self, names): """ Change names of all columns in the frame. :param List[str] names: The list of new names for every column in the frame. """ assert_is_type(names, [str]) assert_satisfies(names, len(names) == self.ncol) self._ex = ExprNode("colnames=", self, range(self.ncol), names) # Update-in-place, but still lazy return self
def function[set_names, parameter[self, names]]: constant[ Change names of all columns in the frame. :param List[str] names: The list of new names for every column in the frame. ] call[name[assert_is_type], parameter[name[names], list[[<ast.Name object at 0x7da204345e70>]]]] call[name[assert_satisfies], parameter[name[names], compare[call[name[len], parameter[name[names]]] equal[==] name[self].ncol]]] name[self]._ex assign[=] call[name[ExprNode], parameter[constant[colnames=], name[self], call[name[range], parameter[name[self].ncol]], name[names]]] return[name[self]]
keyword[def] identifier[set_names] ( identifier[self] , identifier[names] ): literal[string] identifier[assert_is_type] ( identifier[names] ,[ identifier[str] ]) identifier[assert_satisfies] ( identifier[names] , identifier[len] ( identifier[names] )== identifier[self] . identifier[ncol] ) identifier[self] . identifier[_ex] = identifier[ExprNode] ( literal[string] , identifier[self] , identifier[range] ( identifier[self] . identifier[ncol] ), identifier[names] ) keyword[return] identifier[self]
def set_names(self, names): """ Change names of all columns in the frame. :param List[str] names: The list of new names for every column in the frame. """ assert_is_type(names, [str]) assert_satisfies(names, len(names) == self.ncol) self._ex = ExprNode('colnames=', self, range(self.ncol), names) # Update-in-place, but still lazy return self
def _validate_cert_chain(self, cert_chain): # type: (Certificate) -> None """Validate the certificate chain. This method checks if the passed in certificate chain is valid, i.e it is not expired and the Alexa domain is present in the SAN extensions of the certificate chain. A :py:class:`VerificationException` is raised if the certificate chain is not valid. :param cert_chain: Certificate chain to be validated :type cert_chain: cryptography.x509.Certificate :return: None :raises: :py:class:`VerificationException` if certificated is not valid """ now = datetime.utcnow() if not (cert_chain.not_valid_before <= now <= cert_chain.not_valid_after): raise VerificationException("Signing Certificate expired") ext = cert_chain.extensions.get_extension_for_oid( ExtensionOID.SUBJECT_ALTERNATIVE_NAME) if CERT_CHAIN_DOMAIN not in ext.value.get_values_for_type( DNSName): raise VerificationException( "{} domain missing in Signature Certificate Chain".format( CERT_CHAIN_DOMAIN))
def function[_validate_cert_chain, parameter[self, cert_chain]]: constant[Validate the certificate chain. This method checks if the passed in certificate chain is valid, i.e it is not expired and the Alexa domain is present in the SAN extensions of the certificate chain. A :py:class:`VerificationException` is raised if the certificate chain is not valid. :param cert_chain: Certificate chain to be validated :type cert_chain: cryptography.x509.Certificate :return: None :raises: :py:class:`VerificationException` if certificated is not valid ] variable[now] assign[=] call[name[datetime].utcnow, parameter[]] if <ast.UnaryOp object at 0x7da1b18ae1a0> begin[:] <ast.Raise object at 0x7da1b18aeef0> variable[ext] assign[=] call[name[cert_chain].extensions.get_extension_for_oid, parameter[name[ExtensionOID].SUBJECT_ALTERNATIVE_NAME]] if compare[name[CERT_CHAIN_DOMAIN] <ast.NotIn object at 0x7da2590d7190> call[name[ext].value.get_values_for_type, parameter[name[DNSName]]]] begin[:] <ast.Raise object at 0x7da1b18ad2d0>
keyword[def] identifier[_validate_cert_chain] ( identifier[self] , identifier[cert_chain] ): literal[string] identifier[now] = identifier[datetime] . identifier[utcnow] () keyword[if] keyword[not] ( identifier[cert_chain] . identifier[not_valid_before] <= identifier[now] <= identifier[cert_chain] . identifier[not_valid_after] ): keyword[raise] identifier[VerificationException] ( literal[string] ) identifier[ext] = identifier[cert_chain] . identifier[extensions] . identifier[get_extension_for_oid] ( identifier[ExtensionOID] . identifier[SUBJECT_ALTERNATIVE_NAME] ) keyword[if] identifier[CERT_CHAIN_DOMAIN] keyword[not] keyword[in] identifier[ext] . identifier[value] . identifier[get_values_for_type] ( identifier[DNSName] ): keyword[raise] identifier[VerificationException] ( literal[string] . identifier[format] ( identifier[CERT_CHAIN_DOMAIN] ))
def _validate_cert_chain(self, cert_chain): # type: (Certificate) -> None 'Validate the certificate chain.\n\n This method checks if the passed in certificate chain is valid,\n i.e it is not expired and the Alexa domain is present in the\n SAN extensions of the certificate chain. A\n :py:class:`VerificationException` is raised if the certificate\n chain is not valid.\n\n :param cert_chain: Certificate chain to be validated\n :type cert_chain: cryptography.x509.Certificate\n :return: None\n :raises: :py:class:`VerificationException` if certificated is\n not valid\n ' now = datetime.utcnow() if not cert_chain.not_valid_before <= now <= cert_chain.not_valid_after: raise VerificationException('Signing Certificate expired') # depends on [control=['if'], data=[]] ext = cert_chain.extensions.get_extension_for_oid(ExtensionOID.SUBJECT_ALTERNATIVE_NAME) if CERT_CHAIN_DOMAIN not in ext.value.get_values_for_type(DNSName): raise VerificationException('{} domain missing in Signature Certificate Chain'.format(CERT_CHAIN_DOMAIN)) # depends on [control=['if'], data=['CERT_CHAIN_DOMAIN']]
def check_file_for_aws_keys(filenames, keys): # type: (Sequence[str], Set[str]) -> List[Dict[str, str]] """Check if files contain AWS secrets. Return a list of all files containing AWS secrets and keys found, with all but the first four characters obfuscated to ease debugging. """ bad_files = [] for filename in filenames: with open(filename, 'r') as content: text_body = content.read() for key in keys: # naively match the entire file, low chance of incorrect # collision if key in text_body: bad_files.append({ 'filename': filename, 'key': key[:4] + '*' * 28, }) return bad_files
def function[check_file_for_aws_keys, parameter[filenames, keys]]: constant[Check if files contain AWS secrets. Return a list of all files containing AWS secrets and keys found, with all but the first four characters obfuscated to ease debugging. ] variable[bad_files] assign[=] list[[]] for taget[name[filename]] in starred[name[filenames]] begin[:] with call[name[open], parameter[name[filename], constant[r]]] begin[:] variable[text_body] assign[=] call[name[content].read, parameter[]] for taget[name[key]] in starred[name[keys]] begin[:] if compare[name[key] in name[text_body]] begin[:] call[name[bad_files].append, parameter[dictionary[[<ast.Constant object at 0x7da2054a59f0>, <ast.Constant object at 0x7da2054a7fd0>], [<ast.Name object at 0x7da2054a6e30>, <ast.BinOp object at 0x7da2054a78b0>]]]] return[name[bad_files]]
keyword[def] identifier[check_file_for_aws_keys] ( identifier[filenames] , identifier[keys] ): literal[string] identifier[bad_files] =[] keyword[for] identifier[filename] keyword[in] identifier[filenames] : keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[content] : identifier[text_body] = identifier[content] . identifier[read] () keyword[for] identifier[key] keyword[in] identifier[keys] : keyword[if] identifier[key] keyword[in] identifier[text_body] : identifier[bad_files] . identifier[append] ({ literal[string] : identifier[filename] , literal[string] : identifier[key] [: literal[int] ]+ literal[string] * literal[int] , }) keyword[return] identifier[bad_files]
def check_file_for_aws_keys(filenames, keys): # type: (Sequence[str], Set[str]) -> List[Dict[str, str]] 'Check if files contain AWS secrets.\n\n Return a list of all files containing AWS secrets and keys found, with all\n but the first four characters obfuscated to ease debugging.\n ' bad_files = [] for filename in filenames: with open(filename, 'r') as content: text_body = content.read() for key in keys: # naively match the entire file, low chance of incorrect # collision if key in text_body: bad_files.append({'filename': filename, 'key': key[:4] + '*' * 28}) # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']] # depends on [control=['with'], data=['content']] # depends on [control=['for'], data=['filename']] return bad_files
def subscribe(self, obj, handler): """Subscribes to status updates of the requested object. DEPRECATED The handler will be invoked when the controller sends a notification regarding changed state. The user can then further query the object for the state itself.""" if not isinstance(obj, LutronEntity): raise InvalidSubscription("Subscription target not a LutronEntity") _LOGGER.warning("DEPRECATED: Subscribing via Lutron.subscribe is obsolete. " "Please use LutronEntity.subscribe") if obj not in self._legacy_subscribers: self._legacy_subscribers[obj] = handler obj.subscribe(self._dispatch_legacy_subscriber, None)
def function[subscribe, parameter[self, obj, handler]]: constant[Subscribes to status updates of the requested object. DEPRECATED The handler will be invoked when the controller sends a notification regarding changed state. The user can then further query the object for the state itself.] if <ast.UnaryOp object at 0x7da1b05bdde0> begin[:] <ast.Raise object at 0x7da1b05bd1b0> call[name[_LOGGER].warning, parameter[constant[DEPRECATED: Subscribing via Lutron.subscribe is obsolete. Please use LutronEntity.subscribe]]] if compare[name[obj] <ast.NotIn object at 0x7da2590d7190> name[self]._legacy_subscribers] begin[:] call[name[self]._legacy_subscribers][name[obj]] assign[=] name[handler] call[name[obj].subscribe, parameter[name[self]._dispatch_legacy_subscriber, constant[None]]]
keyword[def] identifier[subscribe] ( identifier[self] , identifier[obj] , identifier[handler] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[obj] , identifier[LutronEntity] ): keyword[raise] identifier[InvalidSubscription] ( literal[string] ) identifier[_LOGGER] . identifier[warning] ( literal[string] literal[string] ) keyword[if] identifier[obj] keyword[not] keyword[in] identifier[self] . identifier[_legacy_subscribers] : identifier[self] . identifier[_legacy_subscribers] [ identifier[obj] ]= identifier[handler] identifier[obj] . identifier[subscribe] ( identifier[self] . identifier[_dispatch_legacy_subscriber] , keyword[None] )
def subscribe(self, obj, handler): """Subscribes to status updates of the requested object. DEPRECATED The handler will be invoked when the controller sends a notification regarding changed state. The user can then further query the object for the state itself.""" if not isinstance(obj, LutronEntity): raise InvalidSubscription('Subscription target not a LutronEntity') # depends on [control=['if'], data=[]] _LOGGER.warning('DEPRECATED: Subscribing via Lutron.subscribe is obsolete. Please use LutronEntity.subscribe') if obj not in self._legacy_subscribers: self._legacy_subscribers[obj] = handler obj.subscribe(self._dispatch_legacy_subscriber, None) # depends on [control=['if'], data=['obj']]
def split_input(cls, mapper_spec): """Returns a list of input readers for the given input specification. Args: mapper_spec: The MapperSpec for this InputReader. Returns: A list of InputReaders. """ params = _get_params(mapper_spec) shard_count = mapper_spec.shard_count # Pick out the overall start and end times and time step per shard. start_time = params[cls.START_TIME_PARAM] end_time = params[cls.END_TIME_PARAM] seconds_per_shard = (end_time - start_time) / shard_count # Create a LogInputReader for each shard, modulating the params as we go. shards = [] for _ in xrange(shard_count - 1): params[cls.END_TIME_PARAM] = (params[cls.START_TIME_PARAM] + seconds_per_shard) shards.append(LogInputReader(**params)) params[cls.START_TIME_PARAM] = params[cls.END_TIME_PARAM] # Create a final shard to complete the time range. params[cls.END_TIME_PARAM] = end_time return shards + [LogInputReader(**params)]
def function[split_input, parameter[cls, mapper_spec]]: constant[Returns a list of input readers for the given input specification. Args: mapper_spec: The MapperSpec for this InputReader. Returns: A list of InputReaders. ] variable[params] assign[=] call[name[_get_params], parameter[name[mapper_spec]]] variable[shard_count] assign[=] name[mapper_spec].shard_count variable[start_time] assign[=] call[name[params]][name[cls].START_TIME_PARAM] variable[end_time] assign[=] call[name[params]][name[cls].END_TIME_PARAM] variable[seconds_per_shard] assign[=] binary_operation[binary_operation[name[end_time] - name[start_time]] / name[shard_count]] variable[shards] assign[=] list[[]] for taget[name[_]] in starred[call[name[xrange], parameter[binary_operation[name[shard_count] - constant[1]]]]] begin[:] call[name[params]][name[cls].END_TIME_PARAM] assign[=] binary_operation[call[name[params]][name[cls].START_TIME_PARAM] + name[seconds_per_shard]] call[name[shards].append, parameter[call[name[LogInputReader], parameter[]]]] call[name[params]][name[cls].START_TIME_PARAM] assign[=] call[name[params]][name[cls].END_TIME_PARAM] call[name[params]][name[cls].END_TIME_PARAM] assign[=] name[end_time] return[binary_operation[name[shards] + list[[<ast.Call object at 0x7da18eb541c0>]]]]
keyword[def] identifier[split_input] ( identifier[cls] , identifier[mapper_spec] ): literal[string] identifier[params] = identifier[_get_params] ( identifier[mapper_spec] ) identifier[shard_count] = identifier[mapper_spec] . identifier[shard_count] identifier[start_time] = identifier[params] [ identifier[cls] . identifier[START_TIME_PARAM] ] identifier[end_time] = identifier[params] [ identifier[cls] . identifier[END_TIME_PARAM] ] identifier[seconds_per_shard] =( identifier[end_time] - identifier[start_time] )/ identifier[shard_count] identifier[shards] =[] keyword[for] identifier[_] keyword[in] identifier[xrange] ( identifier[shard_count] - literal[int] ): identifier[params] [ identifier[cls] . identifier[END_TIME_PARAM] ]=( identifier[params] [ identifier[cls] . identifier[START_TIME_PARAM] ]+ identifier[seconds_per_shard] ) identifier[shards] . identifier[append] ( identifier[LogInputReader] (** identifier[params] )) identifier[params] [ identifier[cls] . identifier[START_TIME_PARAM] ]= identifier[params] [ identifier[cls] . identifier[END_TIME_PARAM] ] identifier[params] [ identifier[cls] . identifier[END_TIME_PARAM] ]= identifier[end_time] keyword[return] identifier[shards] +[ identifier[LogInputReader] (** identifier[params] )]
def split_input(cls, mapper_spec): """Returns a list of input readers for the given input specification. Args: mapper_spec: The MapperSpec for this InputReader. Returns: A list of InputReaders. """ params = _get_params(mapper_spec) shard_count = mapper_spec.shard_count # Pick out the overall start and end times and time step per shard. start_time = params[cls.START_TIME_PARAM] end_time = params[cls.END_TIME_PARAM] seconds_per_shard = (end_time - start_time) / shard_count # Create a LogInputReader for each shard, modulating the params as we go. shards = [] for _ in xrange(shard_count - 1): params[cls.END_TIME_PARAM] = params[cls.START_TIME_PARAM] + seconds_per_shard shards.append(LogInputReader(**params)) params[cls.START_TIME_PARAM] = params[cls.END_TIME_PARAM] # depends on [control=['for'], data=[]] # Create a final shard to complete the time range. params[cls.END_TIME_PARAM] = end_time return shards + [LogInputReader(**params)]
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # extracting dictionary of coefficients specific to required # intensity measure type. C = self.COEFFS[imt] mean = (self._compute_style_of_faulting_term(rup, C) + self._compute_magnitude_scaling(rup.mag, C) + self._compute_distance_scaling(dists.rjb, C) + self._compute_site_term(sites.vs30, C)) stddevs = self._get_stddevs(C, stddev_types, num_sites=len(sites.vs30)) return mean, stddevs
def function[get_mean_and_stddevs, parameter[self, sites, rup, dists, imt, stddev_types]]: constant[ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. ] variable[C] assign[=] call[name[self].COEFFS][name[imt]] variable[mean] assign[=] binary_operation[binary_operation[binary_operation[call[name[self]._compute_style_of_faulting_term, parameter[name[rup], name[C]]] + call[name[self]._compute_magnitude_scaling, parameter[name[rup].mag, name[C]]]] + call[name[self]._compute_distance_scaling, parameter[name[dists].rjb, name[C]]]] + call[name[self]._compute_site_term, parameter[name[sites].vs30, name[C]]]] variable[stddevs] assign[=] call[name[self]._get_stddevs, parameter[name[C], name[stddev_types]]] return[tuple[[<ast.Name object at 0x7da18bccbc10>, <ast.Name object at 0x7da18bcca0e0>]]]
keyword[def] identifier[get_mean_and_stddevs] ( identifier[self] , identifier[sites] , identifier[rup] , identifier[dists] , identifier[imt] , identifier[stddev_types] ): literal[string] identifier[C] = identifier[self] . identifier[COEFFS] [ identifier[imt] ] identifier[mean] =( identifier[self] . identifier[_compute_style_of_faulting_term] ( identifier[rup] , identifier[C] )+ identifier[self] . identifier[_compute_magnitude_scaling] ( identifier[rup] . identifier[mag] , identifier[C] )+ identifier[self] . identifier[_compute_distance_scaling] ( identifier[dists] . identifier[rjb] , identifier[C] )+ identifier[self] . identifier[_compute_site_term] ( identifier[sites] . identifier[vs30] , identifier[C] )) identifier[stddevs] = identifier[self] . identifier[_get_stddevs] ( identifier[C] , identifier[stddev_types] , identifier[num_sites] = identifier[len] ( identifier[sites] . identifier[vs30] )) keyword[return] identifier[mean] , identifier[stddevs]
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # extracting dictionary of coefficients specific to required # intensity measure type. C = self.COEFFS[imt] mean = self._compute_style_of_faulting_term(rup, C) + self._compute_magnitude_scaling(rup.mag, C) + self._compute_distance_scaling(dists.rjb, C) + self._compute_site_term(sites.vs30, C) stddevs = self._get_stddevs(C, stddev_types, num_sites=len(sites.vs30)) return (mean, stddevs)
def index(args): """ %prog index database.fasta ` Wrapper for `gmap_build`. Same interface. """ p = OptionParser(index.__doc__) p.add_option("--supercat", default=False, action="store_true", help="Concatenate reference to speed up alignment") opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) dbfile, = args check_index(dbfile, supercat=opts.supercat)
def function[index, parameter[args]]: constant[ %prog index database.fasta ` Wrapper for `gmap_build`. Same interface. ] variable[p] assign[=] call[name[OptionParser], parameter[name[index].__doc__]] call[name[p].add_option, parameter[constant[--supercat]]] <ast.Tuple object at 0x7da20e956dd0> assign[=] call[name[p].parse_args, parameter[name[args]]] if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[1]] begin[:] call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da20e955240>]] <ast.Tuple object at 0x7da20e9549d0> assign[=] name[args] call[name[check_index], parameter[name[dbfile]]]
keyword[def] identifier[index] ( identifier[args] ): literal[string] identifier[p] = identifier[OptionParser] ( identifier[index] . identifier[__doc__] ) identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] ) keyword[if] identifier[len] ( identifier[args] )!= literal[int] : identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ()) identifier[dbfile] ,= identifier[args] identifier[check_index] ( identifier[dbfile] , identifier[supercat] = identifier[opts] . identifier[supercat] )
def index(args): """ %prog index database.fasta ` Wrapper for `gmap_build`. Same interface. """ p = OptionParser(index.__doc__) p.add_option('--supercat', default=False, action='store_true', help='Concatenate reference to speed up alignment') (opts, args) = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) # depends on [control=['if'], data=[]] (dbfile,) = args check_index(dbfile, supercat=opts.supercat)
def complete_handle(self) -> Generator[Any, None, None]: """ 完成回调 """ if self._request is None: return self._response = Response( self._loop, cast(asyncio.Transport, self._transport), self._request.version or DEFAULT_HTTP_VERSION, self._response_charset, ) keep_alive = self._request.should_keep_alive if not keep_alive: self._response.set("Connection", "close") yield from self._handle(self._request, self._response) if not keep_alive and self._transport is not None: self._transport.close() # self._request_parser = None self._request = None self._response = None
def function[complete_handle, parameter[self]]: constant[ 完成回调 ] if compare[name[self]._request is constant[None]] begin[:] return[None] name[self]._response assign[=] call[name[Response], parameter[name[self]._loop, call[name[cast], parameter[name[asyncio].Transport, name[self]._transport]], <ast.BoolOp object at 0x7da1b162b940>, name[self]._response_charset]] variable[keep_alive] assign[=] name[self]._request.should_keep_alive if <ast.UnaryOp object at 0x7da1b16280a0> begin[:] call[name[self]._response.set, parameter[constant[Connection], constant[close]]] <ast.YieldFrom object at 0x7da1b162aec0> if <ast.BoolOp object at 0x7da1b1628490> begin[:] call[name[self]._transport.close, parameter[]] name[self]._request assign[=] constant[None] name[self]._response assign[=] constant[None]
keyword[def] identifier[complete_handle] ( identifier[self] )-> identifier[Generator] [ identifier[Any] , keyword[None] , keyword[None] ]: literal[string] keyword[if] identifier[self] . identifier[_request] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[_response] = identifier[Response] ( identifier[self] . identifier[_loop] , identifier[cast] ( identifier[asyncio] . identifier[Transport] , identifier[self] . identifier[_transport] ), identifier[self] . identifier[_request] . identifier[version] keyword[or] identifier[DEFAULT_HTTP_VERSION] , identifier[self] . identifier[_response_charset] , ) identifier[keep_alive] = identifier[self] . identifier[_request] . identifier[should_keep_alive] keyword[if] keyword[not] identifier[keep_alive] : identifier[self] . identifier[_response] . identifier[set] ( literal[string] , literal[string] ) keyword[yield] keyword[from] identifier[self] . identifier[_handle] ( identifier[self] . identifier[_request] , identifier[self] . identifier[_response] ) keyword[if] keyword[not] identifier[keep_alive] keyword[and] identifier[self] . identifier[_transport] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[_transport] . identifier[close] () identifier[self] . identifier[_request] = keyword[None] identifier[self] . identifier[_response] = keyword[None]
def complete_handle(self) -> Generator[Any, None, None]: """ 完成回调 """ if self._request is None: return # depends on [control=['if'], data=[]] self._response = Response(self._loop, cast(asyncio.Transport, self._transport), self._request.version or DEFAULT_HTTP_VERSION, self._response_charset) keep_alive = self._request.should_keep_alive if not keep_alive: self._response.set('Connection', 'close') # depends on [control=['if'], data=[]] yield from self._handle(self._request, self._response) if not keep_alive and self._transport is not None: self._transport.close() # depends on [control=['if'], data=[]] # self._request_parser = None self._request = None self._response = None
def _to_dict(self): """Return a json dictionary representing this model.""" _dict = {} if hasattr(self, 'status') and self.status is not None: _dict['status'] = self.status if hasattr(self, 'type') and self.type is not None: _dict['type'] = self.type return _dict
def function[_to_dict, parameter[self]]: constant[Return a json dictionary representing this model.] variable[_dict] assign[=] dictionary[[], []] if <ast.BoolOp object at 0x7da18f09fbb0> begin[:] call[name[_dict]][constant[status]] assign[=] name[self].status if <ast.BoolOp object at 0x7da18f09cc40> begin[:] call[name[_dict]][constant[type]] assign[=] name[self].type return[name[_dict]]
keyword[def] identifier[_to_dict] ( identifier[self] ): literal[string] identifier[_dict] ={} keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[status] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[self] . identifier[status] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[type] keyword[is] keyword[not] keyword[None] : identifier[_dict] [ literal[string] ]= identifier[self] . identifier[type] keyword[return] identifier[_dict]
def _to_dict(self): """Return a json dictionary representing this model.""" _dict = {} if hasattr(self, 'status') and self.status is not None: _dict['status'] = self.status # depends on [control=['if'], data=[]] if hasattr(self, 'type') and self.type is not None: _dict['type'] = self.type # depends on [control=['if'], data=[]] return _dict
def data_to_sys_base(self): """ Converts parameters to system base. Stores a copy in ``self._store``. Sets the flag ``self.flag['sysbase']`` to True. :return: None """ if (not self.n) or self._flags['sysbase']: return Sb = self.system.mva Vb = matrix([]) if 'bus' in self._ac.keys(): Vb = self.read_data_ext('Bus', 'Vn', idx=self.bus) elif 'bus1' in self._ac.keys(): Vb = self.read_data_ext('Bus', 'Vn', idx=self.bus1) for var in self._voltages: self._store[var] = self.__dict__[var] self.__dict__[var] = mul(self.__dict__[var], self.Vn) self.__dict__[var] = div(self.__dict__[var], Vb) for var in self._powers: self._store[var] = self.__dict__[var] self.__dict__[var] = mul(self.__dict__[var], self.Sn) self.__dict__[var] /= Sb for var in self._currents: self._store[var] = self.__dict__[var] self.__dict__[var] = mul(self.__dict__[var], self.Sn) self.__dict__[var] = div(self.__dict__[var], self.Vn) self.__dict__[var] = mul(self.__dict__[var], Vb) self.__dict__[var] /= Sb if len(self._z) or len(self._y): Zn = div(self.Vn**2, self.Sn) Zb = (Vb**2) / Sb for var in self._z: self._store[var] = self.__dict__[var] self.__dict__[var] = mul(self.__dict__[var], Zn) self.__dict__[var] = div(self.__dict__[var], Zb) for var in self._y: self._store[var] = self.__dict__[var] if self.__dict__[var].typecode == 'd': self.__dict__[var] = div(self.__dict__[var], Zn) self.__dict__[var] = mul(self.__dict__[var], Zb) elif self.__dict__[var].typecode == 'z': self.__dict__[var] = div(self.__dict__[var], Zn + 0j) self.__dict__[var] = mul(self.__dict__[var], Zb + 0j) if len(self._dcvoltages) or len(self._dccurrents) or len( self._r) or len(self._g): dckey = sorted(self._dc.keys())[0] Vbdc = self.read_data_ext('Node', 'Vdcn', self.__dict__[dckey]) Ib = div(Sb, Vbdc) Rb = div(Vbdc, Ib) for var in self._dcvoltages: self._store[var] = self.__dict__[var] self.__dict__[var] = mul(self.__dict__[var], self.Vdcn) self.__dict__[var] = div(self.__dict__[var], Vbdc) for var in self._dccurrents: self._store[var] = self.__dict__[var] self.__dict__[var] = mul(self.__dict__[var], self.Idcn) self.__dict__[var] = div(self.__dict__[var], Ib) for var in self._r: self._store[var] = self.__dict__[var] self.__dict__[var] = div(self.__dict__[var], Rb) for var in self._g: self._store[var] = self.__dict__[var] self.__dict__[var] = mul(self.__dict__[var], Rb) self._flags['sysbase'] = True
def function[data_to_sys_base, parameter[self]]: constant[ Converts parameters to system base. Stores a copy in ``self._store``. Sets the flag ``self.flag['sysbase']`` to True. :return: None ] if <ast.BoolOp object at 0x7da18f58fe50> begin[:] return[None] variable[Sb] assign[=] name[self].system.mva variable[Vb] assign[=] call[name[matrix], parameter[list[[]]]] if compare[constant[bus] in call[name[self]._ac.keys, parameter[]]] begin[:] variable[Vb] assign[=] call[name[self].read_data_ext, parameter[constant[Bus], constant[Vn]]] for taget[name[var]] in starred[name[self]._voltages] begin[:] call[name[self]._store][name[var]] assign[=] call[name[self].__dict__][name[var]] call[name[self].__dict__][name[var]] assign[=] call[name[mul], parameter[call[name[self].__dict__][name[var]], name[self].Vn]] call[name[self].__dict__][name[var]] assign[=] call[name[div], parameter[call[name[self].__dict__][name[var]], name[Vb]]] for taget[name[var]] in starred[name[self]._powers] begin[:] call[name[self]._store][name[var]] assign[=] call[name[self].__dict__][name[var]] call[name[self].__dict__][name[var]] assign[=] call[name[mul], parameter[call[name[self].__dict__][name[var]], name[self].Sn]] <ast.AugAssign object at 0x7da20e74a6e0> for taget[name[var]] in starred[name[self]._currents] begin[:] call[name[self]._store][name[var]] assign[=] call[name[self].__dict__][name[var]] call[name[self].__dict__][name[var]] assign[=] call[name[mul], parameter[call[name[self].__dict__][name[var]], name[self].Sn]] call[name[self].__dict__][name[var]] assign[=] call[name[div], parameter[call[name[self].__dict__][name[var]], name[self].Vn]] call[name[self].__dict__][name[var]] assign[=] call[name[mul], parameter[call[name[self].__dict__][name[var]], name[Vb]]] <ast.AugAssign object at 0x7da1b0534fd0> if <ast.BoolOp object at 0x7da1b0536fe0> begin[:] variable[Zn] assign[=] call[name[div], parameter[binary_operation[name[self].Vn ** constant[2]], name[self].Sn]] variable[Zb] assign[=] binary_operation[binary_operation[name[Vb] ** constant[2]] / name[Sb]] for taget[name[var]] in starred[name[self]._z] begin[:] call[name[self]._store][name[var]] assign[=] call[name[self].__dict__][name[var]] call[name[self].__dict__][name[var]] assign[=] call[name[mul], parameter[call[name[self].__dict__][name[var]], name[Zn]]] call[name[self].__dict__][name[var]] assign[=] call[name[div], parameter[call[name[self].__dict__][name[var]], name[Zb]]] for taget[name[var]] in starred[name[self]._y] begin[:] call[name[self]._store][name[var]] assign[=] call[name[self].__dict__][name[var]] if compare[call[name[self].__dict__][name[var]].typecode equal[==] constant[d]] begin[:] call[name[self].__dict__][name[var]] assign[=] call[name[div], parameter[call[name[self].__dict__][name[var]], name[Zn]]] call[name[self].__dict__][name[var]] assign[=] call[name[mul], parameter[call[name[self].__dict__][name[var]], name[Zb]]] if <ast.BoolOp object at 0x7da20cabd870> begin[:] variable[dckey] assign[=] call[call[name[sorted], parameter[call[name[self]._dc.keys, parameter[]]]]][constant[0]] variable[Vbdc] assign[=] call[name[self].read_data_ext, parameter[constant[Node], constant[Vdcn], call[name[self].__dict__][name[dckey]]]] variable[Ib] assign[=] call[name[div], parameter[name[Sb], name[Vbdc]]] variable[Rb] assign[=] call[name[div], parameter[name[Vbdc], name[Ib]]] for taget[name[var]] in starred[name[self]._dcvoltages] begin[:] call[name[self]._store][name[var]] assign[=] call[name[self].__dict__][name[var]] call[name[self].__dict__][name[var]] assign[=] call[name[mul], parameter[call[name[self].__dict__][name[var]], name[self].Vdcn]] call[name[self].__dict__][name[var]] assign[=] call[name[div], parameter[call[name[self].__dict__][name[var]], name[Vbdc]]] for taget[name[var]] in starred[name[self]._dccurrents] begin[:] call[name[self]._store][name[var]] assign[=] call[name[self].__dict__][name[var]] call[name[self].__dict__][name[var]] assign[=] call[name[mul], parameter[call[name[self].__dict__][name[var]], name[self].Idcn]] call[name[self].__dict__][name[var]] assign[=] call[name[div], parameter[call[name[self].__dict__][name[var]], name[Ib]]] for taget[name[var]] in starred[name[self]._r] begin[:] call[name[self]._store][name[var]] assign[=] call[name[self].__dict__][name[var]] call[name[self].__dict__][name[var]] assign[=] call[name[div], parameter[call[name[self].__dict__][name[var]], name[Rb]]] for taget[name[var]] in starred[name[self]._g] begin[:] call[name[self]._store][name[var]] assign[=] call[name[self].__dict__][name[var]] call[name[self].__dict__][name[var]] assign[=] call[name[mul], parameter[call[name[self].__dict__][name[var]], name[Rb]]] call[name[self]._flags][constant[sysbase]] assign[=] constant[True]
keyword[def] identifier[data_to_sys_base] ( identifier[self] ): literal[string] keyword[if] ( keyword[not] identifier[self] . identifier[n] ) keyword[or] identifier[self] . identifier[_flags] [ literal[string] ]: keyword[return] identifier[Sb] = identifier[self] . identifier[system] . identifier[mva] identifier[Vb] = identifier[matrix] ([]) keyword[if] literal[string] keyword[in] identifier[self] . identifier[_ac] . identifier[keys] (): identifier[Vb] = identifier[self] . identifier[read_data_ext] ( literal[string] , literal[string] , identifier[idx] = identifier[self] . identifier[bus] ) keyword[elif] literal[string] keyword[in] identifier[self] . identifier[_ac] . identifier[keys] (): identifier[Vb] = identifier[self] . identifier[read_data_ext] ( literal[string] , literal[string] , identifier[idx] = identifier[self] . identifier[bus1] ) keyword[for] identifier[var] keyword[in] identifier[self] . identifier[_voltages] : identifier[self] . identifier[_store] [ identifier[var] ]= identifier[self] . identifier[__dict__] [ identifier[var] ] identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[mul] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[self] . identifier[Vn] ) identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[div] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[Vb] ) keyword[for] identifier[var] keyword[in] identifier[self] . identifier[_powers] : identifier[self] . identifier[_store] [ identifier[var] ]= identifier[self] . identifier[__dict__] [ identifier[var] ] identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[mul] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[self] . identifier[Sn] ) identifier[self] . identifier[__dict__] [ identifier[var] ]/= identifier[Sb] keyword[for] identifier[var] keyword[in] identifier[self] . identifier[_currents] : identifier[self] . identifier[_store] [ identifier[var] ]= identifier[self] . identifier[__dict__] [ identifier[var] ] identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[mul] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[self] . identifier[Sn] ) identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[div] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[self] . identifier[Vn] ) identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[mul] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[Vb] ) identifier[self] . identifier[__dict__] [ identifier[var] ]/= identifier[Sb] keyword[if] identifier[len] ( identifier[self] . identifier[_z] ) keyword[or] identifier[len] ( identifier[self] . identifier[_y] ): identifier[Zn] = identifier[div] ( identifier[self] . identifier[Vn] ** literal[int] , identifier[self] . identifier[Sn] ) identifier[Zb] =( identifier[Vb] ** literal[int] )/ identifier[Sb] keyword[for] identifier[var] keyword[in] identifier[self] . identifier[_z] : identifier[self] . identifier[_store] [ identifier[var] ]= identifier[self] . identifier[__dict__] [ identifier[var] ] identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[mul] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[Zn] ) identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[div] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[Zb] ) keyword[for] identifier[var] keyword[in] identifier[self] . identifier[_y] : identifier[self] . identifier[_store] [ identifier[var] ]= identifier[self] . identifier[__dict__] [ identifier[var] ] keyword[if] identifier[self] . identifier[__dict__] [ identifier[var] ]. identifier[typecode] == literal[string] : identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[div] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[Zn] ) identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[mul] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[Zb] ) keyword[elif] identifier[self] . identifier[__dict__] [ identifier[var] ]. identifier[typecode] == literal[string] : identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[div] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[Zn] + literal[int] ) identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[mul] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[Zb] + literal[int] ) keyword[if] identifier[len] ( identifier[self] . identifier[_dcvoltages] ) keyword[or] identifier[len] ( identifier[self] . identifier[_dccurrents] ) keyword[or] identifier[len] ( identifier[self] . identifier[_r] ) keyword[or] identifier[len] ( identifier[self] . identifier[_g] ): identifier[dckey] = identifier[sorted] ( identifier[self] . identifier[_dc] . identifier[keys] ())[ literal[int] ] identifier[Vbdc] = identifier[self] . identifier[read_data_ext] ( literal[string] , literal[string] , identifier[self] . identifier[__dict__] [ identifier[dckey] ]) identifier[Ib] = identifier[div] ( identifier[Sb] , identifier[Vbdc] ) identifier[Rb] = identifier[div] ( identifier[Vbdc] , identifier[Ib] ) keyword[for] identifier[var] keyword[in] identifier[self] . identifier[_dcvoltages] : identifier[self] . identifier[_store] [ identifier[var] ]= identifier[self] . identifier[__dict__] [ identifier[var] ] identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[mul] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[self] . identifier[Vdcn] ) identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[div] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[Vbdc] ) keyword[for] identifier[var] keyword[in] identifier[self] . identifier[_dccurrents] : identifier[self] . identifier[_store] [ identifier[var] ]= identifier[self] . identifier[__dict__] [ identifier[var] ] identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[mul] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[self] . identifier[Idcn] ) identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[div] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[Ib] ) keyword[for] identifier[var] keyword[in] identifier[self] . identifier[_r] : identifier[self] . identifier[_store] [ identifier[var] ]= identifier[self] . identifier[__dict__] [ identifier[var] ] identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[div] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[Rb] ) keyword[for] identifier[var] keyword[in] identifier[self] . identifier[_g] : identifier[self] . identifier[_store] [ identifier[var] ]= identifier[self] . identifier[__dict__] [ identifier[var] ] identifier[self] . identifier[__dict__] [ identifier[var] ]= identifier[mul] ( identifier[self] . identifier[__dict__] [ identifier[var] ], identifier[Rb] ) identifier[self] . identifier[_flags] [ literal[string] ]= keyword[True]
def data_to_sys_base(self): """ Converts parameters to system base. Stores a copy in ``self._store``. Sets the flag ``self.flag['sysbase']`` to True. :return: None """ if not self.n or self._flags['sysbase']: return # depends on [control=['if'], data=[]] Sb = self.system.mva Vb = matrix([]) if 'bus' in self._ac.keys(): Vb = self.read_data_ext('Bus', 'Vn', idx=self.bus) # depends on [control=['if'], data=[]] elif 'bus1' in self._ac.keys(): Vb = self.read_data_ext('Bus', 'Vn', idx=self.bus1) # depends on [control=['if'], data=[]] for var in self._voltages: self._store[var] = self.__dict__[var] self.__dict__[var] = mul(self.__dict__[var], self.Vn) self.__dict__[var] = div(self.__dict__[var], Vb) # depends on [control=['for'], data=['var']] for var in self._powers: self._store[var] = self.__dict__[var] self.__dict__[var] = mul(self.__dict__[var], self.Sn) self.__dict__[var] /= Sb # depends on [control=['for'], data=['var']] for var in self._currents: self._store[var] = self.__dict__[var] self.__dict__[var] = mul(self.__dict__[var], self.Sn) self.__dict__[var] = div(self.__dict__[var], self.Vn) self.__dict__[var] = mul(self.__dict__[var], Vb) self.__dict__[var] /= Sb # depends on [control=['for'], data=['var']] if len(self._z) or len(self._y): Zn = div(self.Vn ** 2, self.Sn) Zb = Vb ** 2 / Sb for var in self._z: self._store[var] = self.__dict__[var] self.__dict__[var] = mul(self.__dict__[var], Zn) self.__dict__[var] = div(self.__dict__[var], Zb) # depends on [control=['for'], data=['var']] for var in self._y: self._store[var] = self.__dict__[var] if self.__dict__[var].typecode == 'd': self.__dict__[var] = div(self.__dict__[var], Zn) self.__dict__[var] = mul(self.__dict__[var], Zb) # depends on [control=['if'], data=[]] elif self.__dict__[var].typecode == 'z': self.__dict__[var] = div(self.__dict__[var], Zn + 0j) self.__dict__[var] = mul(self.__dict__[var], Zb + 0j) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['var']] # depends on [control=['if'], data=[]] if len(self._dcvoltages) or len(self._dccurrents) or len(self._r) or len(self._g): dckey = sorted(self._dc.keys())[0] Vbdc = self.read_data_ext('Node', 'Vdcn', self.__dict__[dckey]) Ib = div(Sb, Vbdc) Rb = div(Vbdc, Ib) # depends on [control=['if'], data=[]] for var in self._dcvoltages: self._store[var] = self.__dict__[var] self.__dict__[var] = mul(self.__dict__[var], self.Vdcn) self.__dict__[var] = div(self.__dict__[var], Vbdc) # depends on [control=['for'], data=['var']] for var in self._dccurrents: self._store[var] = self.__dict__[var] self.__dict__[var] = mul(self.__dict__[var], self.Idcn) self.__dict__[var] = div(self.__dict__[var], Ib) # depends on [control=['for'], data=['var']] for var in self._r: self._store[var] = self.__dict__[var] self.__dict__[var] = div(self.__dict__[var], Rb) # depends on [control=['for'], data=['var']] for var in self._g: self._store[var] = self.__dict__[var] self.__dict__[var] = mul(self.__dict__[var], Rb) # depends on [control=['for'], data=['var']] self._flags['sysbase'] = True
def set_http_application_url_output_status_string(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") set_http_application_url = ET.Element("set_http_application_url") config = set_http_application_url output = ET.SubElement(set_http_application_url, "output") status_string = ET.SubElement(output, "status-string") status_string.text = kwargs.pop('status_string') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[set_http_application_url_output_status_string, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[set_http_application_url] assign[=] call[name[ET].Element, parameter[constant[set_http_application_url]]] variable[config] assign[=] name[set_http_application_url] variable[output] assign[=] call[name[ET].SubElement, parameter[name[set_http_application_url], constant[output]]] variable[status_string] assign[=] call[name[ET].SubElement, parameter[name[output], constant[status-string]]] name[status_string].text assign[=] call[name[kwargs].pop, parameter[constant[status_string]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[set_http_application_url_output_status_string] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[set_http_application_url] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[config] = identifier[set_http_application_url] identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[set_http_application_url] , literal[string] ) identifier[status_string] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] ) identifier[status_string] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def set_http_application_url_output_status_string(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') set_http_application_url = ET.Element('set_http_application_url') config = set_http_application_url output = ET.SubElement(set_http_application_url, 'output') status_string = ET.SubElement(output, 'status-string') status_string.text = kwargs.pop('status_string') callback = kwargs.pop('callback', self._callback) return callback(config)
def gap_to_sorl(time_gap): """ P1D to +1DAY :param time_gap: :return: solr's format duration. """ quantity, unit = parse_ISO8601(time_gap) if unit[0] == "WEEKS": return "+{0}DAYS".format(quantity * 7) else: return "+{0}{1}".format(quantity, unit[0])
def function[gap_to_sorl, parameter[time_gap]]: constant[ P1D to +1DAY :param time_gap: :return: solr's format duration. ] <ast.Tuple object at 0x7da18dc99f90> assign[=] call[name[parse_ISO8601], parameter[name[time_gap]]] if compare[call[name[unit]][constant[0]] equal[==] constant[WEEKS]] begin[:] return[call[constant[+{0}DAYS].format, parameter[binary_operation[name[quantity] * constant[7]]]]]
keyword[def] identifier[gap_to_sorl] ( identifier[time_gap] ): literal[string] identifier[quantity] , identifier[unit] = identifier[parse_ISO8601] ( identifier[time_gap] ) keyword[if] identifier[unit] [ literal[int] ]== literal[string] : keyword[return] literal[string] . identifier[format] ( identifier[quantity] * literal[int] ) keyword[else] : keyword[return] literal[string] . identifier[format] ( identifier[quantity] , identifier[unit] [ literal[int] ])
def gap_to_sorl(time_gap): """ P1D to +1DAY :param time_gap: :return: solr's format duration. """ (quantity, unit) = parse_ISO8601(time_gap) if unit[0] == 'WEEKS': return '+{0}DAYS'.format(quantity * 7) # depends on [control=['if'], data=[]] else: return '+{0}{1}'.format(quantity, unit[0])
def utcnow_ts(): """Timestamp version of our utcnow function.""" if utcnow.override_time is None: # NOTE(kgriffs): This is several times faster # than going through calendar.timegm(...) return int(time.time()) return calendar.timegm(utcnow().timetuple())
def function[utcnow_ts, parameter[]]: constant[Timestamp version of our utcnow function.] if compare[name[utcnow].override_time is constant[None]] begin[:] return[call[name[int], parameter[call[name[time].time, parameter[]]]]] return[call[name[calendar].timegm, parameter[call[call[name[utcnow], parameter[]].timetuple, parameter[]]]]]
keyword[def] identifier[utcnow_ts] (): literal[string] keyword[if] identifier[utcnow] . identifier[override_time] keyword[is] keyword[None] : keyword[return] identifier[int] ( identifier[time] . identifier[time] ()) keyword[return] identifier[calendar] . identifier[timegm] ( identifier[utcnow] (). identifier[timetuple] ())
def utcnow_ts(): """Timestamp version of our utcnow function.""" if utcnow.override_time is None: # NOTE(kgriffs): This is several times faster # than going through calendar.timegm(...) return int(time.time()) # depends on [control=['if'], data=[]] return calendar.timegm(utcnow().timetuple())
def element_or_none(self, using, value): """Check if an element in the current element. Support: Android iOS Web(WebView) Args: using(str): The element location strategy. value(str): The value of the location strategy. Returns: Return Element if the element does exists and return None otherwise. Raises: WebDriverException. """ try: return self._execute(Command.FIND_CHILD_ELEMENT, { 'using': using, 'value': value }) except: return None
def function[element_or_none, parameter[self, using, value]]: constant[Check if an element in the current element. Support: Android iOS Web(WebView) Args: using(str): The element location strategy. value(str): The value of the location strategy. Returns: Return Element if the element does exists and return None otherwise. Raises: WebDriverException. ] <ast.Try object at 0x7da1aff8ecb0>
keyword[def] identifier[element_or_none] ( identifier[self] , identifier[using] , identifier[value] ): literal[string] keyword[try] : keyword[return] identifier[self] . identifier[_execute] ( identifier[Command] . identifier[FIND_CHILD_ELEMENT] ,{ literal[string] : identifier[using] , literal[string] : identifier[value] }) keyword[except] : keyword[return] keyword[None]
def element_or_none(self, using, value): """Check if an element in the current element. Support: Android iOS Web(WebView) Args: using(str): The element location strategy. value(str): The value of the location strategy. Returns: Return Element if the element does exists and return None otherwise. Raises: WebDriverException. """ try: return self._execute(Command.FIND_CHILD_ELEMENT, {'using': using, 'value': value}) # depends on [control=['try'], data=[]] except: return None # depends on [control=['except'], data=[]]
def code_div(self): """The string for creating a code example for the gallery""" code_example = self.code_example if code_example is None: return None return self.CODE_TEMPLATE.format( snippet=self.get_description()[1], code=code_example, ref_name=self.reference)
def function[code_div, parameter[self]]: constant[The string for creating a code example for the gallery] variable[code_example] assign[=] name[self].code_example if compare[name[code_example] is constant[None]] begin[:] return[constant[None]] return[call[name[self].CODE_TEMPLATE.format, parameter[]]]
keyword[def] identifier[code_div] ( identifier[self] ): literal[string] identifier[code_example] = identifier[self] . identifier[code_example] keyword[if] identifier[code_example] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[return] identifier[self] . identifier[CODE_TEMPLATE] . identifier[format] ( identifier[snippet] = identifier[self] . identifier[get_description] ()[ literal[int] ], identifier[code] = identifier[code_example] , identifier[ref_name] = identifier[self] . identifier[reference] )
def code_div(self): """The string for creating a code example for the gallery""" code_example = self.code_example if code_example is None: return None # depends on [control=['if'], data=[]] return self.CODE_TEMPLATE.format(snippet=self.get_description()[1], code=code_example, ref_name=self.reference)
def open_submission(self): """ Open the full submission and comment tree for the selected comment. """ url = self.get_selected_item().get('submission_permalink') if url: self.selected_page = self.open_submission_page(url)
def function[open_submission, parameter[self]]: constant[ Open the full submission and comment tree for the selected comment. ] variable[url] assign[=] call[call[name[self].get_selected_item, parameter[]].get, parameter[constant[submission_permalink]]] if name[url] begin[:] name[self].selected_page assign[=] call[name[self].open_submission_page, parameter[name[url]]]
keyword[def] identifier[open_submission] ( identifier[self] ): literal[string] identifier[url] = identifier[self] . identifier[get_selected_item] (). identifier[get] ( literal[string] ) keyword[if] identifier[url] : identifier[self] . identifier[selected_page] = identifier[self] . identifier[open_submission_page] ( identifier[url] )
def open_submission(self): """ Open the full submission and comment tree for the selected comment. """ url = self.get_selected_item().get('submission_permalink') if url: self.selected_page = self.open_submission_page(url) # depends on [control=['if'], data=[]]
def chgid(name, gid): ''' Change the gid for a named group CLI Example: .. code-block:: bash salt '*' group.chgid foo 4376 ''' if not isinstance(gid, int): raise SaltInvocationError('gid must be an integer') pre_gid = __salt__['file.group_to_gid'](name) pre_info = info(name) if not pre_info: raise CommandExecutionError( 'Group \'{0}\' does not exist'.format(name) ) if gid == pre_info['gid']: return True cmd = ['dseditgroup', '-o', 'edit', '-i', gid, name] return __salt__['cmd.retcode'](cmd, python_shell=False) == 0
def function[chgid, parameter[name, gid]]: constant[ Change the gid for a named group CLI Example: .. code-block:: bash salt '*' group.chgid foo 4376 ] if <ast.UnaryOp object at 0x7da20c7c8f70> begin[:] <ast.Raise object at 0x7da20c7cab00> variable[pre_gid] assign[=] call[call[name[__salt__]][constant[file.group_to_gid]], parameter[name[name]]] variable[pre_info] assign[=] call[name[info], parameter[name[name]]] if <ast.UnaryOp object at 0x7da20c6aabc0> begin[:] <ast.Raise object at 0x7da20c6a8ee0> if compare[name[gid] equal[==] call[name[pre_info]][constant[gid]]] begin[:] return[constant[True]] variable[cmd] assign[=] list[[<ast.Constant object at 0x7da18ede53f0>, <ast.Constant object at 0x7da18ede4100>, <ast.Constant object at 0x7da18ede5f90>, <ast.Constant object at 0x7da18ede7130>, <ast.Name object at 0x7da18ede7a60>, <ast.Name object at 0x7da18ede5030>]] return[compare[call[call[name[__salt__]][constant[cmd.retcode]], parameter[name[cmd]]] equal[==] constant[0]]]
keyword[def] identifier[chgid] ( identifier[name] , identifier[gid] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[gid] , identifier[int] ): keyword[raise] identifier[SaltInvocationError] ( literal[string] ) identifier[pre_gid] = identifier[__salt__] [ literal[string] ]( identifier[name] ) identifier[pre_info] = identifier[info] ( identifier[name] ) keyword[if] keyword[not] identifier[pre_info] : keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[name] ) ) keyword[if] identifier[gid] == identifier[pre_info] [ literal[string] ]: keyword[return] keyword[True] identifier[cmd] =[ literal[string] , literal[string] , literal[string] , literal[string] , identifier[gid] , identifier[name] ] keyword[return] identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[False] )== literal[int]
def chgid(name, gid): """ Change the gid for a named group CLI Example: .. code-block:: bash salt '*' group.chgid foo 4376 """ if not isinstance(gid, int): raise SaltInvocationError('gid must be an integer') # depends on [control=['if'], data=[]] pre_gid = __salt__['file.group_to_gid'](name) pre_info = info(name) if not pre_info: raise CommandExecutionError("Group '{0}' does not exist".format(name)) # depends on [control=['if'], data=[]] if gid == pre_info['gid']: return True # depends on [control=['if'], data=[]] cmd = ['dseditgroup', '-o', 'edit', '-i', gid, name] return __salt__['cmd.retcode'](cmd, python_shell=False) == 0
def run(self, calc_bleu=True, epoch=None, iteration=None, eval_path=None, summary=False, reference_path=None): """ Runs translation on test dataset. :param calc_bleu: if True compares results with reference and computes BLEU score :param epoch: index of the current epoch :param iteration: index of the current iteration :param eval_path: path to the file for saving results :param summary: if True prints summary :param reference_path: path to the file with reference translation """ if self.cuda: test_bleu = torch.cuda.FloatTensor([0]) break_training = torch.cuda.LongTensor([0]) else: test_bleu = torch.FloatTensor([0]) break_training = torch.LongTensor([0]) if eval_path is None: eval_path = self.build_eval_path(epoch, iteration) detok_eval_path = eval_path + '.detok' with contextlib.suppress(FileNotFoundError): os.remove(eval_path) os.remove(detok_eval_path) rank = get_rank() logging.info(f'Running evaluation on test set') self.model.eval() torch.cuda.empty_cache() output = self.evaluate(epoch, iteration, summary) output = output[:len(self.loader.dataset)] output = self.loader.dataset.unsort(output) if rank == 0: with open(eval_path, 'a') as eval_file: eval_file.writelines(output) if calc_bleu: self.run_detokenizer(eval_path) test_bleu[0] = self.run_sacrebleu(detok_eval_path, reference_path) if summary: logging.info(f'BLEU on test dataset: {test_bleu[0]:.2f}') if self.target_bleu and test_bleu[0] >= self.target_bleu: logging.info(f'Target accuracy reached') break_training[0] = 1 barrier() torch.cuda.empty_cache() logging.info(f'Finished evaluation on test set') if self.distributed: dist.broadcast(break_training, 0) dist.broadcast(test_bleu, 0) return test_bleu[0].item(), break_training[0].item()
def function[run, parameter[self, calc_bleu, epoch, iteration, eval_path, summary, reference_path]]: constant[ Runs translation on test dataset. :param calc_bleu: if True compares results with reference and computes BLEU score :param epoch: index of the current epoch :param iteration: index of the current iteration :param eval_path: path to the file for saving results :param summary: if True prints summary :param reference_path: path to the file with reference translation ] if name[self].cuda begin[:] variable[test_bleu] assign[=] call[name[torch].cuda.FloatTensor, parameter[list[[<ast.Constant object at 0x7da1b21ee140>]]]] variable[break_training] assign[=] call[name[torch].cuda.LongTensor, parameter[list[[<ast.Constant object at 0x7da1b21efe50>]]]] if compare[name[eval_path] is constant[None]] begin[:] variable[eval_path] assign[=] call[name[self].build_eval_path, parameter[name[epoch], name[iteration]]] variable[detok_eval_path] assign[=] binary_operation[name[eval_path] + constant[.detok]] with call[name[contextlib].suppress, parameter[name[FileNotFoundError]]] begin[:] call[name[os].remove, parameter[name[eval_path]]] call[name[os].remove, parameter[name[detok_eval_path]]] variable[rank] assign[=] call[name[get_rank], parameter[]] call[name[logging].info, parameter[<ast.JoinedStr object at 0x7da1b21edcf0>]] call[name[self].model.eval, parameter[]] call[name[torch].cuda.empty_cache, parameter[]] variable[output] assign[=] call[name[self].evaluate, parameter[name[epoch], name[iteration], name[summary]]] variable[output] assign[=] call[name[output]][<ast.Slice object at 0x7da1b21efb50>] variable[output] assign[=] call[name[self].loader.dataset.unsort, parameter[name[output]]] if compare[name[rank] equal[==] constant[0]] begin[:] with call[name[open], parameter[name[eval_path], constant[a]]] begin[:] call[name[eval_file].writelines, parameter[name[output]]] if name[calc_bleu] begin[:] call[name[self].run_detokenizer, parameter[name[eval_path]]] call[name[test_bleu]][constant[0]] assign[=] call[name[self].run_sacrebleu, parameter[name[detok_eval_path], name[reference_path]]] if name[summary] begin[:] call[name[logging].info, parameter[<ast.JoinedStr object at 0x7da1b21a0250>]] if <ast.BoolOp object at 0x7da1b21a18a0> begin[:] call[name[logging].info, parameter[<ast.JoinedStr object at 0x7da1b21a04f0>]] call[name[break_training]][constant[0]] assign[=] constant[1] call[name[barrier], parameter[]] call[name[torch].cuda.empty_cache, parameter[]] call[name[logging].info, parameter[<ast.JoinedStr object at 0x7da1b21a2dd0>]] if name[self].distributed begin[:] call[name[dist].broadcast, parameter[name[break_training], constant[0]]] call[name[dist].broadcast, parameter[name[test_bleu], constant[0]]] return[tuple[[<ast.Call object at 0x7da1b21a2ce0>, <ast.Call object at 0x7da1b21a06d0>]]]
keyword[def] identifier[run] ( identifier[self] , identifier[calc_bleu] = keyword[True] , identifier[epoch] = keyword[None] , identifier[iteration] = keyword[None] , identifier[eval_path] = keyword[None] , identifier[summary] = keyword[False] , identifier[reference_path] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[cuda] : identifier[test_bleu] = identifier[torch] . identifier[cuda] . identifier[FloatTensor] ([ literal[int] ]) identifier[break_training] = identifier[torch] . identifier[cuda] . identifier[LongTensor] ([ literal[int] ]) keyword[else] : identifier[test_bleu] = identifier[torch] . identifier[FloatTensor] ([ literal[int] ]) identifier[break_training] = identifier[torch] . identifier[LongTensor] ([ literal[int] ]) keyword[if] identifier[eval_path] keyword[is] keyword[None] : identifier[eval_path] = identifier[self] . identifier[build_eval_path] ( identifier[epoch] , identifier[iteration] ) identifier[detok_eval_path] = identifier[eval_path] + literal[string] keyword[with] identifier[contextlib] . identifier[suppress] ( identifier[FileNotFoundError] ): identifier[os] . identifier[remove] ( identifier[eval_path] ) identifier[os] . identifier[remove] ( identifier[detok_eval_path] ) identifier[rank] = identifier[get_rank] () identifier[logging] . identifier[info] ( literal[string] ) identifier[self] . identifier[model] . identifier[eval] () identifier[torch] . identifier[cuda] . identifier[empty_cache] () identifier[output] = identifier[self] . identifier[evaluate] ( identifier[epoch] , identifier[iteration] , identifier[summary] ) identifier[output] = identifier[output] [: identifier[len] ( identifier[self] . identifier[loader] . identifier[dataset] )] identifier[output] = identifier[self] . identifier[loader] . identifier[dataset] . identifier[unsort] ( identifier[output] ) keyword[if] identifier[rank] == literal[int] : keyword[with] identifier[open] ( identifier[eval_path] , literal[string] ) keyword[as] identifier[eval_file] : identifier[eval_file] . identifier[writelines] ( identifier[output] ) keyword[if] identifier[calc_bleu] : identifier[self] . identifier[run_detokenizer] ( identifier[eval_path] ) identifier[test_bleu] [ literal[int] ]= identifier[self] . identifier[run_sacrebleu] ( identifier[detok_eval_path] , identifier[reference_path] ) keyword[if] identifier[summary] : identifier[logging] . identifier[info] ( literal[string] ) keyword[if] identifier[self] . identifier[target_bleu] keyword[and] identifier[test_bleu] [ literal[int] ]>= identifier[self] . identifier[target_bleu] : identifier[logging] . identifier[info] ( literal[string] ) identifier[break_training] [ literal[int] ]= literal[int] identifier[barrier] () identifier[torch] . identifier[cuda] . identifier[empty_cache] () identifier[logging] . identifier[info] ( literal[string] ) keyword[if] identifier[self] . identifier[distributed] : identifier[dist] . identifier[broadcast] ( identifier[break_training] , literal[int] ) identifier[dist] . identifier[broadcast] ( identifier[test_bleu] , literal[int] ) keyword[return] identifier[test_bleu] [ literal[int] ]. identifier[item] (), identifier[break_training] [ literal[int] ]. identifier[item] ()
def run(self, calc_bleu=True, epoch=None, iteration=None, eval_path=None, summary=False, reference_path=None): """ Runs translation on test dataset. :param calc_bleu: if True compares results with reference and computes BLEU score :param epoch: index of the current epoch :param iteration: index of the current iteration :param eval_path: path to the file for saving results :param summary: if True prints summary :param reference_path: path to the file with reference translation """ if self.cuda: test_bleu = torch.cuda.FloatTensor([0]) break_training = torch.cuda.LongTensor([0]) # depends on [control=['if'], data=[]] else: test_bleu = torch.FloatTensor([0]) break_training = torch.LongTensor([0]) if eval_path is None: eval_path = self.build_eval_path(epoch, iteration) # depends on [control=['if'], data=['eval_path']] detok_eval_path = eval_path + '.detok' with contextlib.suppress(FileNotFoundError): os.remove(eval_path) os.remove(detok_eval_path) # depends on [control=['with'], data=[]] rank = get_rank() logging.info(f'Running evaluation on test set') self.model.eval() torch.cuda.empty_cache() output = self.evaluate(epoch, iteration, summary) output = output[:len(self.loader.dataset)] output = self.loader.dataset.unsort(output) if rank == 0: with open(eval_path, 'a') as eval_file: eval_file.writelines(output) # depends on [control=['with'], data=['eval_file']] if calc_bleu: self.run_detokenizer(eval_path) test_bleu[0] = self.run_sacrebleu(detok_eval_path, reference_path) if summary: logging.info(f'BLEU on test dataset: {test_bleu[0]:.2f}') # depends on [control=['if'], data=[]] if self.target_bleu and test_bleu[0] >= self.target_bleu: logging.info(f'Target accuracy reached') break_training[0] = 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] barrier() torch.cuda.empty_cache() logging.info(f'Finished evaluation on test set') if self.distributed: dist.broadcast(break_training, 0) dist.broadcast(test_bleu, 0) # depends on [control=['if'], data=[]] return (test_bleu[0].item(), break_training[0].item())
def set_state(self, state, *, index=0): """Set state of a light.""" return self.set_values({ ATTR_DEVICE_STATE: int(state) }, index=index)
def function[set_state, parameter[self, state]]: constant[Set state of a light.] return[call[name[self].set_values, parameter[dictionary[[<ast.Name object at 0x7da18ede5090>], [<ast.Call object at 0x7da18ede6290>]]]]]
keyword[def] identifier[set_state] ( identifier[self] , identifier[state] ,*, identifier[index] = literal[int] ): literal[string] keyword[return] identifier[self] . identifier[set_values] ({ identifier[ATTR_DEVICE_STATE] : identifier[int] ( identifier[state] ) }, identifier[index] = identifier[index] )
def set_state(self, state, *, index=0): """Set state of a light.""" return self.set_values({ATTR_DEVICE_STATE: int(state)}, index=index)
def find_creation_date(path): """ Try to get the date that a file was created, falling back to when it was last modified if that's not possible. Parameters ---------- path : str File's path. Returns ---------- creation_date : str Time of file creation. Example ---------- >>> import neurokit as nk >>> import datetime >>> >>> creation_date = nk.find_creation_date(file) >>> creation_date = datetime.datetime.fromtimestamp(creation_date) Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ - Mark Amery *Dependencies* - platform - os *See Also* - http://stackoverflow.com/a/39501288/1709587 """ if platform.system() == 'Windows': return(os.path.getctime(path)) else: stat = os.stat(path) try: return(stat.st_birthtime) except AttributeError: print("Neuropsydia error: get_creation_date(): We're probably on Linux. No easy way to get creation dates here, so we'll settle for when its content was last modified.") return(stat.st_mtime)
def function[find_creation_date, parameter[path]]: constant[ Try to get the date that a file was created, falling back to when it was last modified if that's not possible. Parameters ---------- path : str File's path. Returns ---------- creation_date : str Time of file creation. Example ---------- >>> import neurokit as nk >>> import datetime >>> >>> creation_date = nk.find_creation_date(file) >>> creation_date = datetime.datetime.fromtimestamp(creation_date) Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ - Mark Amery *Dependencies* - platform - os *See Also* - http://stackoverflow.com/a/39501288/1709587 ] if compare[call[name[platform].system, parameter[]] equal[==] constant[Windows]] begin[:] return[call[name[os].path.getctime, parameter[name[path]]]]
keyword[def] identifier[find_creation_date] ( identifier[path] ): literal[string] keyword[if] identifier[platform] . identifier[system] ()== literal[string] : keyword[return] ( identifier[os] . identifier[path] . identifier[getctime] ( identifier[path] )) keyword[else] : identifier[stat] = identifier[os] . identifier[stat] ( identifier[path] ) keyword[try] : keyword[return] ( identifier[stat] . identifier[st_birthtime] ) keyword[except] identifier[AttributeError] : identifier[print] ( literal[string] ) keyword[return] ( identifier[stat] . identifier[st_mtime] )
def find_creation_date(path): """ Try to get the date that a file was created, falling back to when it was last modified if that's not possible. Parameters ---------- path : str File's path. Returns ---------- creation_date : str Time of file creation. Example ---------- >>> import neurokit as nk >>> import datetime >>> >>> creation_date = nk.find_creation_date(file) >>> creation_date = datetime.datetime.fromtimestamp(creation_date) Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ - Mark Amery *Dependencies* - platform - os *See Also* - http://stackoverflow.com/a/39501288/1709587 """ if platform.system() == 'Windows': return os.path.getctime(path) # depends on [control=['if'], data=[]] else: stat = os.stat(path) try: return stat.st_birthtime # depends on [control=['try'], data=[]] except AttributeError: print("Neuropsydia error: get_creation_date(): We're probably on Linux. No easy way to get creation dates here, so we'll settle for when its content was last modified.") return stat.st_mtime # depends on [control=['except'], data=[]]
def report(self, simulation, state): """Generate a report. Parameters ---------- simulation : Simulation The Simulation to generate a report for state : State The current state of the simulation """ if not self._initialized: self._initial_clock_time = datetime.now() self._initial_simulation_time = state.getTime() self._initial_steps = simulation.currentStep self._initialized = True steps = simulation.currentStep time = datetime.now() - self._initial_clock_time days = time.total_seconds()/86400.0 ns = (state.getTime()-self._initial_simulation_time).value_in_unit(u.nanosecond) margin = ' ' * self.margin ns_day = ns/days delta = ((self.total_steps-steps)*time.total_seconds())/steps # remove microseconds to have cleaner output remaining = timedelta(seconds=int(delta)) percentage = 100.0*steps/self.total_steps if ns_day: template = '{}{}/{} steps ({:.1f}%) - {} left @ {:.1f} ns/day \r' else: template = '{}{}/{} steps ({:.1f}%) \r' report = template.format(margin, steps, self.total_steps, percentage, remaining, ns_day) self._out.write(report) if hasattr(self._out, 'flush'): self._out.flush()
def function[report, parameter[self, simulation, state]]: constant[Generate a report. Parameters ---------- simulation : Simulation The Simulation to generate a report for state : State The current state of the simulation ] if <ast.UnaryOp object at 0x7da18f00ee30> begin[:] name[self]._initial_clock_time assign[=] call[name[datetime].now, parameter[]] name[self]._initial_simulation_time assign[=] call[name[state].getTime, parameter[]] name[self]._initial_steps assign[=] name[simulation].currentStep name[self]._initialized assign[=] constant[True] variable[steps] assign[=] name[simulation].currentStep variable[time] assign[=] binary_operation[call[name[datetime].now, parameter[]] - name[self]._initial_clock_time] variable[days] assign[=] binary_operation[call[name[time].total_seconds, parameter[]] / constant[86400.0]] variable[ns] assign[=] call[binary_operation[call[name[state].getTime, parameter[]] - name[self]._initial_simulation_time].value_in_unit, parameter[name[u].nanosecond]] variable[margin] assign[=] binary_operation[constant[ ] * name[self].margin] variable[ns_day] assign[=] binary_operation[name[ns] / name[days]] variable[delta] assign[=] binary_operation[binary_operation[binary_operation[name[self].total_steps - name[steps]] * call[name[time].total_seconds, parameter[]]] / name[steps]] variable[remaining] assign[=] call[name[timedelta], parameter[]] variable[percentage] assign[=] binary_operation[binary_operation[constant[100.0] * name[steps]] / name[self].total_steps] if name[ns_day] begin[:] variable[template] assign[=] constant[{}{}/{} steps ({:.1f}%) - {} left @ {:.1f} ns/day ] variable[report] assign[=] call[name[template].format, parameter[name[margin], name[steps], name[self].total_steps, name[percentage], name[remaining], name[ns_day]]] call[name[self]._out.write, parameter[name[report]]] if call[name[hasattr], parameter[name[self]._out, constant[flush]]] begin[:] call[name[self]._out.flush, parameter[]]
keyword[def] identifier[report] ( identifier[self] , identifier[simulation] , identifier[state] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_initialized] : identifier[self] . identifier[_initial_clock_time] = identifier[datetime] . identifier[now] () identifier[self] . identifier[_initial_simulation_time] = identifier[state] . identifier[getTime] () identifier[self] . identifier[_initial_steps] = identifier[simulation] . identifier[currentStep] identifier[self] . identifier[_initialized] = keyword[True] identifier[steps] = identifier[simulation] . identifier[currentStep] identifier[time] = identifier[datetime] . identifier[now] ()- identifier[self] . identifier[_initial_clock_time] identifier[days] = identifier[time] . identifier[total_seconds] ()/ literal[int] identifier[ns] =( identifier[state] . identifier[getTime] ()- identifier[self] . identifier[_initial_simulation_time] ). identifier[value_in_unit] ( identifier[u] . identifier[nanosecond] ) identifier[margin] = literal[string] * identifier[self] . identifier[margin] identifier[ns_day] = identifier[ns] / identifier[days] identifier[delta] =(( identifier[self] . identifier[total_steps] - identifier[steps] )* identifier[time] . identifier[total_seconds] ())/ identifier[steps] identifier[remaining] = identifier[timedelta] ( identifier[seconds] = identifier[int] ( identifier[delta] )) identifier[percentage] = literal[int] * identifier[steps] / identifier[self] . identifier[total_steps] keyword[if] identifier[ns_day] : identifier[template] = literal[string] keyword[else] : identifier[template] = literal[string] identifier[report] = identifier[template] . identifier[format] ( identifier[margin] , identifier[steps] , identifier[self] . identifier[total_steps] , identifier[percentage] , identifier[remaining] , identifier[ns_day] ) identifier[self] . identifier[_out] . identifier[write] ( identifier[report] ) keyword[if] identifier[hasattr] ( identifier[self] . identifier[_out] , literal[string] ): identifier[self] . identifier[_out] . identifier[flush] ()
def report(self, simulation, state): """Generate a report. Parameters ---------- simulation : Simulation The Simulation to generate a report for state : State The current state of the simulation """ if not self._initialized: self._initial_clock_time = datetime.now() self._initial_simulation_time = state.getTime() self._initial_steps = simulation.currentStep self._initialized = True # depends on [control=['if'], data=[]] steps = simulation.currentStep time = datetime.now() - self._initial_clock_time days = time.total_seconds() / 86400.0 ns = (state.getTime() - self._initial_simulation_time).value_in_unit(u.nanosecond) margin = ' ' * self.margin ns_day = ns / days delta = (self.total_steps - steps) * time.total_seconds() / steps # remove microseconds to have cleaner output remaining = timedelta(seconds=int(delta)) percentage = 100.0 * steps / self.total_steps if ns_day: template = '{}{}/{} steps ({:.1f}%) - {} left @ {:.1f} ns/day \r' # depends on [control=['if'], data=[]] else: template = '{}{}/{} steps ({:.1f}%) \r' report = template.format(margin, steps, self.total_steps, percentage, remaining, ns_day) self._out.write(report) if hasattr(self._out, 'flush'): self._out.flush() # depends on [control=['if'], data=[]]
def generate(self, x, **kwargs): """ Return a tensor that constructs adversarial examples for the given input. Generate uses tf.py_func in order to operate over tensors. :param x: A tensor with the inputs. :param kwargs: See `parse_params` """ self.parse_params(**kwargs) shape = [int(i) for i in x.get_shape().as_list()[1:]] assert self.sess is not None, \ 'Cannot use `generate` when no `sess` was provided' _check_first_dimension(x, 'input') if self.y_target is not None: _check_first_dimension(self.y_target, 'y_target') assert self.image_target is not None, \ 'Require a target image for targeted attack.' _check_first_dimension(self.image_target, 'image_target') # Set shape and d. self.shape = shape self.d = int(np.prod(shape)) # Set binary search threshold. if self.constraint == 'l2': self.theta = self.gamma / np.sqrt(self.d) else: self.theta = self.gamma / self.d # Construct input placeholder and output for decision function. self.input_ph = tf.placeholder( tf_dtype, [None] + list(self.shape), name='input_image') self.logits = self.model.get_logits(self.input_ph) def bapp_wrap(x, target_label, target_image): """ Wrapper to use tensors as input and output. """ return np.array(self._bapp(x, target_label, target_image), dtype=self.np_dtype) if self.y_target is not None: # targeted attack that requires target label and image. wrap = tf.py_func(bapp_wrap, [x[0], self.y_target[0], self.image_target[0]], self.tf_dtype) else: if self.image_target is not None: # untargeted attack with an initialized image. wrap = tf.py_func(lambda x, target_image: bapp_wrap(x, None, target_image), [x[0], self.image_target[0]], self.tf_dtype) else: # untargeted attack without an initialized image. wrap = tf.py_func(lambda x: bapp_wrap(x, None, None), [x[0]], self.tf_dtype) wrap.set_shape(x.get_shape()) return wrap
def function[generate, parameter[self, x]]: constant[ Return a tensor that constructs adversarial examples for the given input. Generate uses tf.py_func in order to operate over tensors. :param x: A tensor with the inputs. :param kwargs: See `parse_params` ] call[name[self].parse_params, parameter[]] variable[shape] assign[=] <ast.ListComp object at 0x7da204345810> assert[compare[name[self].sess is_not constant[None]]] call[name[_check_first_dimension], parameter[name[x], constant[input]]] if compare[name[self].y_target is_not constant[None]] begin[:] call[name[_check_first_dimension], parameter[name[self].y_target, constant[y_target]]] assert[compare[name[self].image_target is_not constant[None]]] call[name[_check_first_dimension], parameter[name[self].image_target, constant[image_target]]] name[self].shape assign[=] name[shape] name[self].d assign[=] call[name[int], parameter[call[name[np].prod, parameter[name[shape]]]]] if compare[name[self].constraint equal[==] constant[l2]] begin[:] name[self].theta assign[=] binary_operation[name[self].gamma / call[name[np].sqrt, parameter[name[self].d]]] name[self].input_ph assign[=] call[name[tf].placeholder, parameter[name[tf_dtype], binary_operation[list[[<ast.Constant object at 0x7da20c990040>]] + call[name[list], parameter[name[self].shape]]]]] name[self].logits assign[=] call[name[self].model.get_logits, parameter[name[self].input_ph]] def function[bapp_wrap, parameter[x, target_label, target_image]]: constant[ Wrapper to use tensors as input and output. ] return[call[name[np].array, parameter[call[name[self]._bapp, parameter[name[x], name[target_label], name[target_image]]]]]] if compare[name[self].y_target is_not constant[None]] begin[:] variable[wrap] assign[=] call[name[tf].py_func, parameter[name[bapp_wrap], list[[<ast.Subscript object at 0x7da1b1fd4e50>, <ast.Subscript object at 0x7da1b1fd4dc0>, <ast.Subscript object at 0x7da1b1fd48b0>]], name[self].tf_dtype]] call[name[wrap].set_shape, parameter[call[name[x].get_shape, parameter[]]]] return[name[wrap]]
keyword[def] identifier[generate] ( identifier[self] , identifier[x] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[parse_params] (** identifier[kwargs] ) identifier[shape] =[ identifier[int] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[x] . identifier[get_shape] (). identifier[as_list] ()[ literal[int] :]] keyword[assert] identifier[self] . identifier[sess] keyword[is] keyword[not] keyword[None] , literal[string] identifier[_check_first_dimension] ( identifier[x] , literal[string] ) keyword[if] identifier[self] . identifier[y_target] keyword[is] keyword[not] keyword[None] : identifier[_check_first_dimension] ( identifier[self] . identifier[y_target] , literal[string] ) keyword[assert] identifier[self] . identifier[image_target] keyword[is] keyword[not] keyword[None] , literal[string] identifier[_check_first_dimension] ( identifier[self] . identifier[image_target] , literal[string] ) identifier[self] . identifier[shape] = identifier[shape] identifier[self] . identifier[d] = identifier[int] ( identifier[np] . identifier[prod] ( identifier[shape] )) keyword[if] identifier[self] . identifier[constraint] == literal[string] : identifier[self] . identifier[theta] = identifier[self] . identifier[gamma] / identifier[np] . identifier[sqrt] ( identifier[self] . identifier[d] ) keyword[else] : identifier[self] . identifier[theta] = identifier[self] . identifier[gamma] / identifier[self] . identifier[d] identifier[self] . identifier[input_ph] = identifier[tf] . identifier[placeholder] ( identifier[tf_dtype] ,[ keyword[None] ]+ identifier[list] ( identifier[self] . identifier[shape] ), identifier[name] = literal[string] ) identifier[self] . identifier[logits] = identifier[self] . identifier[model] . identifier[get_logits] ( identifier[self] . identifier[input_ph] ) keyword[def] identifier[bapp_wrap] ( identifier[x] , identifier[target_label] , identifier[target_image] ): literal[string] keyword[return] identifier[np] . identifier[array] ( identifier[self] . identifier[_bapp] ( identifier[x] , identifier[target_label] , identifier[target_image] ), identifier[dtype] = identifier[self] . identifier[np_dtype] ) keyword[if] identifier[self] . identifier[y_target] keyword[is] keyword[not] keyword[None] : identifier[wrap] = identifier[tf] . identifier[py_func] ( identifier[bapp_wrap] , [ identifier[x] [ literal[int] ], identifier[self] . identifier[y_target] [ literal[int] ], identifier[self] . identifier[image_target] [ literal[int] ]], identifier[self] . identifier[tf_dtype] ) keyword[else] : keyword[if] identifier[self] . identifier[image_target] keyword[is] keyword[not] keyword[None] : identifier[wrap] = identifier[tf] . identifier[py_func] ( keyword[lambda] identifier[x] , identifier[target_image] : identifier[bapp_wrap] ( identifier[x] , keyword[None] , identifier[target_image] ), [ identifier[x] [ literal[int] ], identifier[self] . identifier[image_target] [ literal[int] ]], identifier[self] . identifier[tf_dtype] ) keyword[else] : identifier[wrap] = identifier[tf] . identifier[py_func] ( keyword[lambda] identifier[x] : identifier[bapp_wrap] ( identifier[x] , keyword[None] , keyword[None] ), [ identifier[x] [ literal[int] ]], identifier[self] . identifier[tf_dtype] ) identifier[wrap] . identifier[set_shape] ( identifier[x] . identifier[get_shape] ()) keyword[return] identifier[wrap]
def generate(self, x, **kwargs): """ Return a tensor that constructs adversarial examples for the given input. Generate uses tf.py_func in order to operate over tensors. :param x: A tensor with the inputs. :param kwargs: See `parse_params` """ self.parse_params(**kwargs) shape = [int(i) for i in x.get_shape().as_list()[1:]] assert self.sess is not None, 'Cannot use `generate` when no `sess` was provided' _check_first_dimension(x, 'input') if self.y_target is not None: _check_first_dimension(self.y_target, 'y_target') assert self.image_target is not None, 'Require a target image for targeted attack.' _check_first_dimension(self.image_target, 'image_target') # depends on [control=['if'], data=[]] # Set shape and d. self.shape = shape self.d = int(np.prod(shape)) # Set binary search threshold. if self.constraint == 'l2': self.theta = self.gamma / np.sqrt(self.d) # depends on [control=['if'], data=[]] else: self.theta = self.gamma / self.d # Construct input placeholder and output for decision function. self.input_ph = tf.placeholder(tf_dtype, [None] + list(self.shape), name='input_image') self.logits = self.model.get_logits(self.input_ph) def bapp_wrap(x, target_label, target_image): """ Wrapper to use tensors as input and output. """ return np.array(self._bapp(x, target_label, target_image), dtype=self.np_dtype) if self.y_target is not None: # targeted attack that requires target label and image. wrap = tf.py_func(bapp_wrap, [x[0], self.y_target[0], self.image_target[0]], self.tf_dtype) # depends on [control=['if'], data=[]] elif self.image_target is not None: # untargeted attack with an initialized image. wrap = tf.py_func(lambda x, target_image: bapp_wrap(x, None, target_image), [x[0], self.image_target[0]], self.tf_dtype) # depends on [control=['if'], data=[]] else: # untargeted attack without an initialized image. wrap = tf.py_func(lambda x: bapp_wrap(x, None, None), [x[0]], self.tf_dtype) wrap.set_shape(x.get_shape()) return wrap
def set(self, column, value, useMethod=True, **context): """ Sets the value for this record at the inputted column name. If the columnName provided doesn't exist within the schema, then the ColumnNotFound error will be raised. :param columnName | <str> value | <variant> :return <bool> changed """ col = self.schema().column(column, raise_=False) if col is None: # allow setting of collections as well collector = self.schema().collector(column) if collector: my_context = self.context() for k, v in my_context.raw_values.items(): if k not in orb.Context.QueryFields: context.setdefault(k, v) sub_context = orb.Context(**context) method = collector.settermethod() if method and useMethod: return method(self, value, context=sub_context) else: records = self.get(collector.name(), context=sub_context) records.update(value, useMethod=useMethod, context=sub_context) # remove any preloaded values from the collector self.__preload.pop(collector.name(), None) return records else: raise errors.ColumnNotFound(schema=self.schema(), column=column) elif col.testFlag(col.Flags.ReadOnly): raise errors.ColumnReadOnly(schema=self.schema(), column=column) context = self.context(**context) if useMethod: method = col.settermethod() if method: keywords = list(funcutil.extract_keywords(method)) if 'locale' in keywords: return method(self, value, locale=context.locale) else: return method(self, value) if self.isRecord() and self.__delayed: self.__delayed = False self.read() with WriteLocker(self.__dataLock): orig, curr = self.__values.get(col.name(), (None, None)) value = col.store(value, context) # update the context based on the locale value if col.testFlag(col.Flags.I18n) and isinstance(curr, dict) and isinstance(value, dict): new_value = curr.copy() new_value.update(value) value = new_value try: change = curr != value except TypeError: change = True if change: self.__values[col.name()] = (orig, value) # broadcast the change event if change: if col.testFlag(col.Flags.I18n) and context.locale != 'all': old_value = curr.get(context.locale) if isinstance(curr, dict) else curr new_value = value.get(context.locale) if isinstance(value, dict) else value else: old_value = curr new_value = value event = orb.events.ChangeEvent(record=self, column=col, old=old_value, value=new_value) if self.processEvent(event): self.onChange(event) if event.preventDefault: with WriteLocker(self.__dataLock): orig, _ = self.__values.get(col.name(), (None, None)) self.__values[col.name()] = (orig, curr) return False else: return change else: return False
def function[set, parameter[self, column, value, useMethod]]: constant[ Sets the value for this record at the inputted column name. If the columnName provided doesn't exist within the schema, then the ColumnNotFound error will be raised. :param columnName | <str> value | <variant> :return <bool> changed ] variable[col] assign[=] call[call[name[self].schema, parameter[]].column, parameter[name[column]]] if compare[name[col] is constant[None]] begin[:] variable[collector] assign[=] call[call[name[self].schema, parameter[]].collector, parameter[name[column]]] if name[collector] begin[:] variable[my_context] assign[=] call[name[self].context, parameter[]] for taget[tuple[[<ast.Name object at 0x7da20c6aa710>, <ast.Name object at 0x7da20c6ab0d0>]]] in starred[call[name[my_context].raw_values.items, parameter[]]] begin[:] if compare[name[k] <ast.NotIn object at 0x7da2590d7190> name[orb].Context.QueryFields] begin[:] call[name[context].setdefault, parameter[name[k], name[v]]] variable[sub_context] assign[=] call[name[orb].Context, parameter[]] variable[method] assign[=] call[name[collector].settermethod, parameter[]] if <ast.BoolOp object at 0x7da20c6a98d0> begin[:] return[call[name[method], parameter[name[self], name[value]]]] variable[context] assign[=] call[name[self].context, parameter[]] if name[useMethod] begin[:] variable[method] assign[=] call[name[col].settermethod, parameter[]] if name[method] begin[:] variable[keywords] assign[=] call[name[list], parameter[call[name[funcutil].extract_keywords, parameter[name[method]]]]] if compare[constant[locale] in name[keywords]] begin[:] return[call[name[method], parameter[name[self], name[value]]]] if <ast.BoolOp object at 0x7da20c6a9ba0> begin[:] name[self].__delayed assign[=] constant[False] call[name[self].read, parameter[]] with call[name[WriteLocker], parameter[name[self].__dataLock]] begin[:] <ast.Tuple object at 0x7da20c6a9db0> assign[=] call[name[self].__values.get, parameter[call[name[col].name, parameter[]], tuple[[<ast.Constant object at 0x7da20c6a8220>, <ast.Constant object at 0x7da20c6a89d0>]]]] variable[value] assign[=] call[name[col].store, parameter[name[value], name[context]]] if <ast.BoolOp object at 0x7da20c6ab640> begin[:] variable[new_value] assign[=] call[name[curr].copy, parameter[]] call[name[new_value].update, parameter[name[value]]] variable[value] assign[=] name[new_value] <ast.Try object at 0x7da20c6a8bb0> if name[change] begin[:] call[name[self].__values][call[name[col].name, parameter[]]] assign[=] tuple[[<ast.Name object at 0x7da20c6a9c60>, <ast.Name object at 0x7da20c6aa680>]] if name[change] begin[:] if <ast.BoolOp object at 0x7da20c6a8550> begin[:] variable[old_value] assign[=] <ast.IfExp object at 0x7da20c6aa4a0> variable[new_value] assign[=] <ast.IfExp object at 0x7da20c6a8d00> variable[event] assign[=] call[name[orb].events.ChangeEvent, parameter[]] if call[name[self].processEvent, parameter[name[event]]] begin[:] call[name[self].onChange, parameter[name[event]]] if name[event].preventDefault begin[:] with call[name[WriteLocker], parameter[name[self].__dataLock]] begin[:] <ast.Tuple object at 0x7da2054a63e0> assign[=] call[name[self].__values.get, parameter[call[name[col].name, parameter[]], tuple[[<ast.Constant object at 0x7da2054a5bd0>, <ast.Constant object at 0x7da2054a71f0>]]]] call[name[self].__values][call[name[col].name, parameter[]]] assign[=] tuple[[<ast.Name object at 0x7da2054a5ff0>, <ast.Name object at 0x7da2054a6560>]] return[constant[False]]
keyword[def] identifier[set] ( identifier[self] , identifier[column] , identifier[value] , identifier[useMethod] = keyword[True] ,** identifier[context] ): literal[string] identifier[col] = identifier[self] . identifier[schema] (). identifier[column] ( identifier[column] , identifier[raise_] = keyword[False] ) keyword[if] identifier[col] keyword[is] keyword[None] : identifier[collector] = identifier[self] . identifier[schema] (). identifier[collector] ( identifier[column] ) keyword[if] identifier[collector] : identifier[my_context] = identifier[self] . identifier[context] () keyword[for] identifier[k] , identifier[v] keyword[in] identifier[my_context] . identifier[raw_values] . identifier[items] (): keyword[if] identifier[k] keyword[not] keyword[in] identifier[orb] . identifier[Context] . identifier[QueryFields] : identifier[context] . identifier[setdefault] ( identifier[k] , identifier[v] ) identifier[sub_context] = identifier[orb] . identifier[Context] (** identifier[context] ) identifier[method] = identifier[collector] . identifier[settermethod] () keyword[if] identifier[method] keyword[and] identifier[useMethod] : keyword[return] identifier[method] ( identifier[self] , identifier[value] , identifier[context] = identifier[sub_context] ) keyword[else] : identifier[records] = identifier[self] . identifier[get] ( identifier[collector] . identifier[name] (), identifier[context] = identifier[sub_context] ) identifier[records] . identifier[update] ( identifier[value] , identifier[useMethod] = identifier[useMethod] , identifier[context] = identifier[sub_context] ) identifier[self] . identifier[__preload] . identifier[pop] ( identifier[collector] . identifier[name] (), keyword[None] ) keyword[return] identifier[records] keyword[else] : keyword[raise] identifier[errors] . identifier[ColumnNotFound] ( identifier[schema] = identifier[self] . identifier[schema] (), identifier[column] = identifier[column] ) keyword[elif] identifier[col] . identifier[testFlag] ( identifier[col] . identifier[Flags] . identifier[ReadOnly] ): keyword[raise] identifier[errors] . identifier[ColumnReadOnly] ( identifier[schema] = identifier[self] . identifier[schema] (), identifier[column] = identifier[column] ) identifier[context] = identifier[self] . identifier[context] (** identifier[context] ) keyword[if] identifier[useMethod] : identifier[method] = identifier[col] . identifier[settermethod] () keyword[if] identifier[method] : identifier[keywords] = identifier[list] ( identifier[funcutil] . identifier[extract_keywords] ( identifier[method] )) keyword[if] literal[string] keyword[in] identifier[keywords] : keyword[return] identifier[method] ( identifier[self] , identifier[value] , identifier[locale] = identifier[context] . identifier[locale] ) keyword[else] : keyword[return] identifier[method] ( identifier[self] , identifier[value] ) keyword[if] identifier[self] . identifier[isRecord] () keyword[and] identifier[self] . identifier[__delayed] : identifier[self] . identifier[__delayed] = keyword[False] identifier[self] . identifier[read] () keyword[with] identifier[WriteLocker] ( identifier[self] . identifier[__dataLock] ): identifier[orig] , identifier[curr] = identifier[self] . identifier[__values] . identifier[get] ( identifier[col] . identifier[name] (),( keyword[None] , keyword[None] )) identifier[value] = identifier[col] . identifier[store] ( identifier[value] , identifier[context] ) keyword[if] identifier[col] . identifier[testFlag] ( identifier[col] . identifier[Flags] . identifier[I18n] ) keyword[and] identifier[isinstance] ( identifier[curr] , identifier[dict] ) keyword[and] identifier[isinstance] ( identifier[value] , identifier[dict] ): identifier[new_value] = identifier[curr] . identifier[copy] () identifier[new_value] . identifier[update] ( identifier[value] ) identifier[value] = identifier[new_value] keyword[try] : identifier[change] = identifier[curr] != identifier[value] keyword[except] identifier[TypeError] : identifier[change] = keyword[True] keyword[if] identifier[change] : identifier[self] . identifier[__values] [ identifier[col] . identifier[name] ()]=( identifier[orig] , identifier[value] ) keyword[if] identifier[change] : keyword[if] identifier[col] . identifier[testFlag] ( identifier[col] . identifier[Flags] . identifier[I18n] ) keyword[and] identifier[context] . identifier[locale] != literal[string] : identifier[old_value] = identifier[curr] . identifier[get] ( identifier[context] . identifier[locale] ) keyword[if] identifier[isinstance] ( identifier[curr] , identifier[dict] ) keyword[else] identifier[curr] identifier[new_value] = identifier[value] . identifier[get] ( identifier[context] . identifier[locale] ) keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ) keyword[else] identifier[value] keyword[else] : identifier[old_value] = identifier[curr] identifier[new_value] = identifier[value] identifier[event] = identifier[orb] . identifier[events] . identifier[ChangeEvent] ( identifier[record] = identifier[self] , identifier[column] = identifier[col] , identifier[old] = identifier[old_value] , identifier[value] = identifier[new_value] ) keyword[if] identifier[self] . identifier[processEvent] ( identifier[event] ): identifier[self] . identifier[onChange] ( identifier[event] ) keyword[if] identifier[event] . identifier[preventDefault] : keyword[with] identifier[WriteLocker] ( identifier[self] . identifier[__dataLock] ): identifier[orig] , identifier[_] = identifier[self] . identifier[__values] . identifier[get] ( identifier[col] . identifier[name] (),( keyword[None] , keyword[None] )) identifier[self] . identifier[__values] [ identifier[col] . identifier[name] ()]=( identifier[orig] , identifier[curr] ) keyword[return] keyword[False] keyword[else] : keyword[return] identifier[change] keyword[else] : keyword[return] keyword[False]
def set(self, column, value, useMethod=True, **context): """ Sets the value for this record at the inputted column name. If the columnName provided doesn't exist within the schema, then the ColumnNotFound error will be raised. :param columnName | <str> value | <variant> :return <bool> changed """ col = self.schema().column(column, raise_=False) if col is None: # allow setting of collections as well collector = self.schema().collector(column) if collector: my_context = self.context() for (k, v) in my_context.raw_values.items(): if k not in orb.Context.QueryFields: context.setdefault(k, v) # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=[]] sub_context = orb.Context(**context) method = collector.settermethod() if method and useMethod: return method(self, value, context=sub_context) # depends on [control=['if'], data=[]] else: records = self.get(collector.name(), context=sub_context) records.update(value, useMethod=useMethod, context=sub_context) # remove any preloaded values from the collector self.__preload.pop(collector.name(), None) return records # depends on [control=['if'], data=[]] else: raise errors.ColumnNotFound(schema=self.schema(), column=column) # depends on [control=['if'], data=[]] elif col.testFlag(col.Flags.ReadOnly): raise errors.ColumnReadOnly(schema=self.schema(), column=column) # depends on [control=['if'], data=[]] context = self.context(**context) if useMethod: method = col.settermethod() if method: keywords = list(funcutil.extract_keywords(method)) if 'locale' in keywords: return method(self, value, locale=context.locale) # depends on [control=['if'], data=[]] else: return method(self, value) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if self.isRecord() and self.__delayed: self.__delayed = False self.read() # depends on [control=['if'], data=[]] with WriteLocker(self.__dataLock): (orig, curr) = self.__values.get(col.name(), (None, None)) value = col.store(value, context) # update the context based on the locale value if col.testFlag(col.Flags.I18n) and isinstance(curr, dict) and isinstance(value, dict): new_value = curr.copy() new_value.update(value) value = new_value # depends on [control=['if'], data=[]] try: change = curr != value # depends on [control=['try'], data=[]] except TypeError: change = True # depends on [control=['except'], data=[]] if change: self.__values[col.name()] = (orig, value) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]] # broadcast the change event if change: if col.testFlag(col.Flags.I18n) and context.locale != 'all': old_value = curr.get(context.locale) if isinstance(curr, dict) else curr new_value = value.get(context.locale) if isinstance(value, dict) else value # depends on [control=['if'], data=[]] else: old_value = curr new_value = value event = orb.events.ChangeEvent(record=self, column=col, old=old_value, value=new_value) if self.processEvent(event): self.onChange(event) # depends on [control=['if'], data=[]] if event.preventDefault: with WriteLocker(self.__dataLock): (orig, _) = self.__values.get(col.name(), (None, None)) self.__values[col.name()] = (orig, curr) # depends on [control=['with'], data=[]] return False # depends on [control=['if'], data=[]] else: return change # depends on [control=['if'], data=[]] else: return False
def to_numpy_matrix(self, variable_order=None): """Convert a binary quadratic model to NumPy 2D array. Args: variable_order (list, optional): If provided, indexes the rows/columns of the NumPy array. If `variable_order` includes any variables not in the binary quadratic model, these are added to the NumPy array. Returns: :class:`numpy.ndarray`: The binary quadratic model as a NumPy 2D array. Note that the binary quadratic model is converted to :class:`~.Vartype.BINARY` vartype. Notes: The matrix representation of a binary quadratic model only makes sense for binary models. For a binary sample x, the energy of the model is given by: .. math:: E(x) = x^T Q x The offset is dropped when converting to a NumPy array. Examples: This example converts a binary quadratic model to NumPy array format while ordering variables and adding one ('d'). >>> import dimod >>> import numpy as np ... >>> model = dimod.BinaryQuadraticModel({'a': 1, 'b': -1, 'c': .5}, ... {('a', 'b'): .5, ('b', 'c'): 1.5}, ... 1.4, ... dimod.BINARY) >>> model.to_numpy_matrix(variable_order=['d', 'c', 'b', 'a']) array([[ 0. , 0. , 0. , 0. ], [ 0. , 0.5, 1.5, 0. ], [ 0. , 0. , -1. , 0.5], [ 0. , 0. , 0. , 1. ]]) """ import numpy as np if variable_order is None: # just use the existing variable labels, assuming that they are [0, N) num_variables = len(self) mat = np.zeros((num_variables, num_variables), dtype=float) try: for v, bias in iteritems(self.binary.linear): mat[v, v] = bias except IndexError: raise ValueError(("if 'variable_order' is not provided, binary quadratic model must be " "index labeled [0, ..., N-1]")) for (u, v), bias in iteritems(self.binary.quadratic): if u < v: mat[u, v] = bias else: mat[v, u] = bias else: num_variables = len(variable_order) idx = {v: i for i, v in enumerate(variable_order)} mat = np.zeros((num_variables, num_variables), dtype=float) try: for v, bias in iteritems(self.binary.linear): mat[idx[v], idx[v]] = bias except KeyError as e: raise ValueError(("variable {} is missing from variable_order".format(e))) for (u, v), bias in iteritems(self.binary.quadratic): iu, iv = idx[u], idx[v] if iu < iv: mat[iu, iv] = bias else: mat[iv, iu] = bias return mat
def function[to_numpy_matrix, parameter[self, variable_order]]: constant[Convert a binary quadratic model to NumPy 2D array. Args: variable_order (list, optional): If provided, indexes the rows/columns of the NumPy array. If `variable_order` includes any variables not in the binary quadratic model, these are added to the NumPy array. Returns: :class:`numpy.ndarray`: The binary quadratic model as a NumPy 2D array. Note that the binary quadratic model is converted to :class:`~.Vartype.BINARY` vartype. Notes: The matrix representation of a binary quadratic model only makes sense for binary models. For a binary sample x, the energy of the model is given by: .. math:: E(x) = x^T Q x The offset is dropped when converting to a NumPy array. Examples: This example converts a binary quadratic model to NumPy array format while ordering variables and adding one ('d'). >>> import dimod >>> import numpy as np ... >>> model = dimod.BinaryQuadraticModel({'a': 1, 'b': -1, 'c': .5}, ... {('a', 'b'): .5, ('b', 'c'): 1.5}, ... 1.4, ... dimod.BINARY) >>> model.to_numpy_matrix(variable_order=['d', 'c', 'b', 'a']) array([[ 0. , 0. , 0. , 0. ], [ 0. , 0.5, 1.5, 0. ], [ 0. , 0. , -1. , 0.5], [ 0. , 0. , 0. , 1. ]]) ] import module[numpy] as alias[np] if compare[name[variable_order] is constant[None]] begin[:] variable[num_variables] assign[=] call[name[len], parameter[name[self]]] variable[mat] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b07af580>, <ast.Name object at 0x7da1b07adf00>]]]] <ast.Try object at 0x7da1b07acee0> for taget[tuple[[<ast.Tuple object at 0x7da1b07ad720>, <ast.Name object at 0x7da1b07ac3d0>]]] in starred[call[name[iteritems], parameter[name[self].binary.quadratic]]] begin[:] if compare[name[u] less[<] name[v]] begin[:] call[name[mat]][tuple[[<ast.Name object at 0x7da1b072d150>, <ast.Name object at 0x7da1b072e1a0>]]] assign[=] name[bias] return[name[mat]]
keyword[def] identifier[to_numpy_matrix] ( identifier[self] , identifier[variable_order] = keyword[None] ): literal[string] keyword[import] identifier[numpy] keyword[as] identifier[np] keyword[if] identifier[variable_order] keyword[is] keyword[None] : identifier[num_variables] = identifier[len] ( identifier[self] ) identifier[mat] = identifier[np] . identifier[zeros] (( identifier[num_variables] , identifier[num_variables] ), identifier[dtype] = identifier[float] ) keyword[try] : keyword[for] identifier[v] , identifier[bias] keyword[in] identifier[iteritems] ( identifier[self] . identifier[binary] . identifier[linear] ): identifier[mat] [ identifier[v] , identifier[v] ]= identifier[bias] keyword[except] identifier[IndexError] : keyword[raise] identifier[ValueError] (( literal[string] literal[string] )) keyword[for] ( identifier[u] , identifier[v] ), identifier[bias] keyword[in] identifier[iteritems] ( identifier[self] . identifier[binary] . identifier[quadratic] ): keyword[if] identifier[u] < identifier[v] : identifier[mat] [ identifier[u] , identifier[v] ]= identifier[bias] keyword[else] : identifier[mat] [ identifier[v] , identifier[u] ]= identifier[bias] keyword[else] : identifier[num_variables] = identifier[len] ( identifier[variable_order] ) identifier[idx] ={ identifier[v] : identifier[i] keyword[for] identifier[i] , identifier[v] keyword[in] identifier[enumerate] ( identifier[variable_order] )} identifier[mat] = identifier[np] . identifier[zeros] (( identifier[num_variables] , identifier[num_variables] ), identifier[dtype] = identifier[float] ) keyword[try] : keyword[for] identifier[v] , identifier[bias] keyword[in] identifier[iteritems] ( identifier[self] . identifier[binary] . identifier[linear] ): identifier[mat] [ identifier[idx] [ identifier[v] ], identifier[idx] [ identifier[v] ]]= identifier[bias] keyword[except] identifier[KeyError] keyword[as] identifier[e] : keyword[raise] identifier[ValueError] (( literal[string] . identifier[format] ( identifier[e] ))) keyword[for] ( identifier[u] , identifier[v] ), identifier[bias] keyword[in] identifier[iteritems] ( identifier[self] . identifier[binary] . identifier[quadratic] ): identifier[iu] , identifier[iv] = identifier[idx] [ identifier[u] ], identifier[idx] [ identifier[v] ] keyword[if] identifier[iu] < identifier[iv] : identifier[mat] [ identifier[iu] , identifier[iv] ]= identifier[bias] keyword[else] : identifier[mat] [ identifier[iv] , identifier[iu] ]= identifier[bias] keyword[return] identifier[mat]
def to_numpy_matrix(self, variable_order=None): """Convert a binary quadratic model to NumPy 2D array. Args: variable_order (list, optional): If provided, indexes the rows/columns of the NumPy array. If `variable_order` includes any variables not in the binary quadratic model, these are added to the NumPy array. Returns: :class:`numpy.ndarray`: The binary quadratic model as a NumPy 2D array. Note that the binary quadratic model is converted to :class:`~.Vartype.BINARY` vartype. Notes: The matrix representation of a binary quadratic model only makes sense for binary models. For a binary sample x, the energy of the model is given by: .. math:: E(x) = x^T Q x The offset is dropped when converting to a NumPy array. Examples: This example converts a binary quadratic model to NumPy array format while ordering variables and adding one ('d'). >>> import dimod >>> import numpy as np ... >>> model = dimod.BinaryQuadraticModel({'a': 1, 'b': -1, 'c': .5}, ... {('a', 'b'): .5, ('b', 'c'): 1.5}, ... 1.4, ... dimod.BINARY) >>> model.to_numpy_matrix(variable_order=['d', 'c', 'b', 'a']) array([[ 0. , 0. , 0. , 0. ], [ 0. , 0.5, 1.5, 0. ], [ 0. , 0. , -1. , 0.5], [ 0. , 0. , 0. , 1. ]]) """ import numpy as np if variable_order is None: # just use the existing variable labels, assuming that they are [0, N) num_variables = len(self) mat = np.zeros((num_variables, num_variables), dtype=float) try: for (v, bias) in iteritems(self.binary.linear): mat[v, v] = bias # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]] except IndexError: raise ValueError("if 'variable_order' is not provided, binary quadratic model must be index labeled [0, ..., N-1]") # depends on [control=['except'], data=[]] for ((u, v), bias) in iteritems(self.binary.quadratic): if u < v: mat[u, v] = bias # depends on [control=['if'], data=['u', 'v']] else: mat[v, u] = bias # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: num_variables = len(variable_order) idx = {v: i for (i, v) in enumerate(variable_order)} mat = np.zeros((num_variables, num_variables), dtype=float) try: for (v, bias) in iteritems(self.binary.linear): mat[idx[v], idx[v]] = bias # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]] except KeyError as e: raise ValueError('variable {} is missing from variable_order'.format(e)) # depends on [control=['except'], data=['e']] for ((u, v), bias) in iteritems(self.binary.quadratic): (iu, iv) = (idx[u], idx[v]) if iu < iv: mat[iu, iv] = bias # depends on [control=['if'], data=['iu', 'iv']] else: mat[iv, iu] = bias # depends on [control=['for'], data=[]] return mat
def start_output (self): """Write start of checking info.""" super(HtmlLogger, self).start_output() header = { "encoding": self.get_charset_encoding(), "title": configuration.App, "body": self.colorbackground, "link": self.colorlink, "vlink": self.colorlink, "alink": self.colorlink, "url": self.colorurl, "error": self.colorerror, "valid": self.colorok, "warning": self.colorwarning, } self.write(HTML_HEADER % header) self.comment("Generated by %s" % configuration.App) if self.has_part('intro'): self.write(u"<h2>"+configuration.App+ "</h2><br/><blockquote>"+ configuration.Freeware+"<br/><br/>"+ (_("Start checking at %s") % strformat.strtime(self.starttime))+ os.linesep+"<br/>") self.check_date() self.flush()
def function[start_output, parameter[self]]: constant[Write start of checking info.] call[call[name[super], parameter[name[HtmlLogger], name[self]]].start_output, parameter[]] variable[header] assign[=] dictionary[[<ast.Constant object at 0x7da18c4ce320>, <ast.Constant object at 0x7da18c4cf820>, <ast.Constant object at 0x7da18c4ccc70>, <ast.Constant object at 0x7da18c4cdf90>, <ast.Constant object at 0x7da18c4cf8b0>, <ast.Constant object at 0x7da18c4cceb0>, <ast.Constant object at 0x7da18c4cd120>, <ast.Constant object at 0x7da18c4cc2b0>, <ast.Constant object at 0x7da18c4cf640>, <ast.Constant object at 0x7da18c4ccf10>], [<ast.Call object at 0x7da18c4cde70>, <ast.Attribute object at 0x7da18c4cca00>, <ast.Attribute object at 0x7da18c4cded0>, <ast.Attribute object at 0x7da18c4cc8b0>, <ast.Attribute object at 0x7da18c4cc190>, <ast.Attribute object at 0x7da18c4ce470>, <ast.Attribute object at 0x7da18c4cd210>, <ast.Attribute object at 0x7da18c4cf6d0>, <ast.Attribute object at 0x7da18c4cffa0>, <ast.Attribute object at 0x7da18c4cd0c0>]] call[name[self].write, parameter[binary_operation[name[HTML_HEADER] <ast.Mod object at 0x7da2590d6920> name[header]]]] call[name[self].comment, parameter[binary_operation[constant[Generated by %s] <ast.Mod object at 0x7da2590d6920> name[configuration].App]]] if call[name[self].has_part, parameter[constant[intro]]] begin[:] call[name[self].write, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[<h2>] + name[configuration].App] + constant[</h2><br/><blockquote>]] + name[configuration].Freeware] + constant[<br/><br/>]] + binary_operation[call[name[_], parameter[constant[Start checking at %s]]] <ast.Mod object at 0x7da2590d6920> call[name[strformat].strtime, parameter[name[self].starttime]]]] + name[os].linesep] + constant[<br/>]]]] call[name[self].check_date, parameter[]] call[name[self].flush, parameter[]]
keyword[def] identifier[start_output] ( identifier[self] ): literal[string] identifier[super] ( identifier[HtmlLogger] , identifier[self] ). identifier[start_output] () identifier[header] ={ literal[string] : identifier[self] . identifier[get_charset_encoding] (), literal[string] : identifier[configuration] . identifier[App] , literal[string] : identifier[self] . identifier[colorbackground] , literal[string] : identifier[self] . identifier[colorlink] , literal[string] : identifier[self] . identifier[colorlink] , literal[string] : identifier[self] . identifier[colorlink] , literal[string] : identifier[self] . identifier[colorurl] , literal[string] : identifier[self] . identifier[colorerror] , literal[string] : identifier[self] . identifier[colorok] , literal[string] : identifier[self] . identifier[colorwarning] , } identifier[self] . identifier[write] ( identifier[HTML_HEADER] % identifier[header] ) identifier[self] . identifier[comment] ( literal[string] % identifier[configuration] . identifier[App] ) keyword[if] identifier[self] . identifier[has_part] ( literal[string] ): identifier[self] . identifier[write] ( literal[string] + identifier[configuration] . identifier[App] + literal[string] + identifier[configuration] . identifier[Freeware] + literal[string] + ( identifier[_] ( literal[string] )% identifier[strformat] . identifier[strtime] ( identifier[self] . identifier[starttime] ))+ identifier[os] . identifier[linesep] + literal[string] ) identifier[self] . identifier[check_date] () identifier[self] . identifier[flush] ()
def start_output(self): """Write start of checking info.""" super(HtmlLogger, self).start_output() header = {'encoding': self.get_charset_encoding(), 'title': configuration.App, 'body': self.colorbackground, 'link': self.colorlink, 'vlink': self.colorlink, 'alink': self.colorlink, 'url': self.colorurl, 'error': self.colorerror, 'valid': self.colorok, 'warning': self.colorwarning} self.write(HTML_HEADER % header) self.comment('Generated by %s' % configuration.App) if self.has_part('intro'): self.write(u'<h2>' + configuration.App + '</h2><br/><blockquote>' + configuration.Freeware + '<br/><br/>' + _('Start checking at %s') % strformat.strtime(self.starttime) + os.linesep + '<br/>') self.check_date() # depends on [control=['if'], data=[]] self.flush()
def requestFields(self, field_names, required=False, strict=False): """Add the given list of fields to the request @param field_names: The simple registration data fields to request @type field_names: [str] @param required: Whether these values should be presented to the user as required @param strict: whether to raise an exception when a field is added to a request more than once @raise ValueError: when a field requested is not a simple registration field or strict is set and a field was requested more than once """ if isinstance(field_names, basestring): raise TypeError('Fields should be passed as a list of ' 'strings (not %r)' % (type(field_names),)) for field_name in field_names: self.requestField(field_name, required, strict=strict)
def function[requestFields, parameter[self, field_names, required, strict]]: constant[Add the given list of fields to the request @param field_names: The simple registration data fields to request @type field_names: [str] @param required: Whether these values should be presented to the user as required @param strict: whether to raise an exception when a field is added to a request more than once @raise ValueError: when a field requested is not a simple registration field or strict is set and a field was requested more than once ] if call[name[isinstance], parameter[name[field_names], name[basestring]]] begin[:] <ast.Raise object at 0x7da18dc04d90> for taget[name[field_name]] in starred[name[field_names]] begin[:] call[name[self].requestField, parameter[name[field_name], name[required]]]
keyword[def] identifier[requestFields] ( identifier[self] , identifier[field_names] , identifier[required] = keyword[False] , identifier[strict] = keyword[False] ): literal[string] keyword[if] identifier[isinstance] ( identifier[field_names] , identifier[basestring] ): keyword[raise] identifier[TypeError] ( literal[string] literal[string] %( identifier[type] ( identifier[field_names] ),)) keyword[for] identifier[field_name] keyword[in] identifier[field_names] : identifier[self] . identifier[requestField] ( identifier[field_name] , identifier[required] , identifier[strict] = identifier[strict] )
def requestFields(self, field_names, required=False, strict=False): """Add the given list of fields to the request @param field_names: The simple registration data fields to request @type field_names: [str] @param required: Whether these values should be presented to the user as required @param strict: whether to raise an exception when a field is added to a request more than once @raise ValueError: when a field requested is not a simple registration field or strict is set and a field was requested more than once """ if isinstance(field_names, basestring): raise TypeError('Fields should be passed as a list of strings (not %r)' % (type(field_names),)) # depends on [control=['if'], data=[]] for field_name in field_names: self.requestField(field_name, required, strict=strict) # depends on [control=['for'], data=['field_name']]
def next_child(self, child_pid): """Get the next child PID in the PID relation.""" relation = self._get_child_relation(child_pid) if relation.index is not None: return self.children.filter( PIDRelation.index > relation.index ).ordered(ord='asc').first() else: return None
def function[next_child, parameter[self, child_pid]]: constant[Get the next child PID in the PID relation.] variable[relation] assign[=] call[name[self]._get_child_relation, parameter[name[child_pid]]] if compare[name[relation].index is_not constant[None]] begin[:] return[call[call[call[name[self].children.filter, parameter[compare[name[PIDRelation].index greater[>] name[relation].index]]].ordered, parameter[]].first, parameter[]]]
keyword[def] identifier[next_child] ( identifier[self] , identifier[child_pid] ): literal[string] identifier[relation] = identifier[self] . identifier[_get_child_relation] ( identifier[child_pid] ) keyword[if] identifier[relation] . identifier[index] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[self] . identifier[children] . identifier[filter] ( identifier[PIDRelation] . identifier[index] > identifier[relation] . identifier[index] ). identifier[ordered] ( identifier[ord] = literal[string] ). identifier[first] () keyword[else] : keyword[return] keyword[None]
def next_child(self, child_pid): """Get the next child PID in the PID relation.""" relation = self._get_child_relation(child_pid) if relation.index is not None: return self.children.filter(PIDRelation.index > relation.index).ordered(ord='asc').first() # depends on [control=['if'], data=[]] else: return None
def flush(self, file=str()): """ Flushes the updated file content to the given *file*. .. note:: Overwrites an existing file. :param str file: name and location of the file. Default is the original file. """ if file: Path(file).write_bytes(self._cache) else: self.path.write_bytes(self._cache)
def function[flush, parameter[self, file]]: constant[ Flushes the updated file content to the given *file*. .. note:: Overwrites an existing file. :param str file: name and location of the file. Default is the original file. ] if name[file] begin[:] call[call[name[Path], parameter[name[file]]].write_bytes, parameter[name[self]._cache]]
keyword[def] identifier[flush] ( identifier[self] , identifier[file] = identifier[str] ()): literal[string] keyword[if] identifier[file] : identifier[Path] ( identifier[file] ). identifier[write_bytes] ( identifier[self] . identifier[_cache] ) keyword[else] : identifier[self] . identifier[path] . identifier[write_bytes] ( identifier[self] . identifier[_cache] )
def flush(self, file=str()): """ Flushes the updated file content to the given *file*. .. note:: Overwrites an existing file. :param str file: name and location of the file. Default is the original file. """ if file: Path(file).write_bytes(self._cache) # depends on [control=['if'], data=[]] else: self.path.write_bytes(self._cache)
def mktempfile(self, data=None, prefix='ipython_edit_'): """Make a new tempfile and return its filename. This makes a call to tempfile.mktemp, but it registers the created filename internally so ipython cleans it up at exit time. Optional inputs: - data(None): if data is given, it gets written out to the temp file immediately, and the file is closed again.""" filename = tempfile.mktemp('.py', prefix) self.tempfiles.append(filename) if data: tmp_file = open(filename,'w') tmp_file.write(data) tmp_file.close() return filename
def function[mktempfile, parameter[self, data, prefix]]: constant[Make a new tempfile and return its filename. This makes a call to tempfile.mktemp, but it registers the created filename internally so ipython cleans it up at exit time. Optional inputs: - data(None): if data is given, it gets written out to the temp file immediately, and the file is closed again.] variable[filename] assign[=] call[name[tempfile].mktemp, parameter[constant[.py], name[prefix]]] call[name[self].tempfiles.append, parameter[name[filename]]] if name[data] begin[:] variable[tmp_file] assign[=] call[name[open], parameter[name[filename], constant[w]]] call[name[tmp_file].write, parameter[name[data]]] call[name[tmp_file].close, parameter[]] return[name[filename]]
keyword[def] identifier[mktempfile] ( identifier[self] , identifier[data] = keyword[None] , identifier[prefix] = literal[string] ): literal[string] identifier[filename] = identifier[tempfile] . identifier[mktemp] ( literal[string] , identifier[prefix] ) identifier[self] . identifier[tempfiles] . identifier[append] ( identifier[filename] ) keyword[if] identifier[data] : identifier[tmp_file] = identifier[open] ( identifier[filename] , literal[string] ) identifier[tmp_file] . identifier[write] ( identifier[data] ) identifier[tmp_file] . identifier[close] () keyword[return] identifier[filename]
def mktempfile(self, data=None, prefix='ipython_edit_'): """Make a new tempfile and return its filename. This makes a call to tempfile.mktemp, but it registers the created filename internally so ipython cleans it up at exit time. Optional inputs: - data(None): if data is given, it gets written out to the temp file immediately, and the file is closed again.""" filename = tempfile.mktemp('.py', prefix) self.tempfiles.append(filename) if data: tmp_file = open(filename, 'w') tmp_file.write(data) tmp_file.close() # depends on [control=['if'], data=[]] return filename
def ext_pillar(minion_id, # pylint: disable=W0613 pillar, # pylint: disable=W0613 config_file): ''' Execute LDAP searches and return the aggregated data ''' config_template = None try: config_template = _render_template(config_file) except jinja2.exceptions.TemplateNotFound: log.debug('pillar_ldap: missing configuration file %s', config_file) except Exception: log.debug('pillar_ldap: failed to render template for %s', config_file, exc_info=True) if not config_template: # We don't have a config file return {} import salt.utils.yaml try: opts = salt.utils.yaml.safe_load(config_template) or {} opts['conf_file'] = config_file except Exception as err: import salt.log msg = 'pillar_ldap: error parsing configuration file: {0} - {1}'.format( config_file, err ) if salt.log.is_console_configured(): log.warning(msg) else: print(msg) return {} else: if not isinstance(opts, dict): log.warning( 'pillar_ldap: %s is invalidly formatted, must be a YAML ' 'dictionary. See the documentation for more information.', config_file ) return {} if 'search_order' not in opts: log.warning( 'pillar_ldap: search_order missing from configuration. See the ' 'documentation for more information.' ) return {} data = {} for source in opts['search_order']: config = opts[source] result = _do_search(config) log.debug('source %s got result %s', source, result) if result: data = _result_to_dict(data, result, config, source) return data
def function[ext_pillar, parameter[minion_id, pillar, config_file]]: constant[ Execute LDAP searches and return the aggregated data ] variable[config_template] assign[=] constant[None] <ast.Try object at 0x7da1b2139510> if <ast.UnaryOp object at 0x7da1b21398a0> begin[:] return[dictionary[[], []]] import module[salt.utils.yaml] <ast.Try object at 0x7da1b2139a80> if compare[constant[search_order] <ast.NotIn object at 0x7da2590d7190> name[opts]] begin[:] call[name[log].warning, parameter[constant[pillar_ldap: search_order missing from configuration. See the documentation for more information.]]] return[dictionary[[], []]] variable[data] assign[=] dictionary[[], []] for taget[name[source]] in starred[call[name[opts]][constant[search_order]]] begin[:] variable[config] assign[=] call[name[opts]][name[source]] variable[result] assign[=] call[name[_do_search], parameter[name[config]]] call[name[log].debug, parameter[constant[source %s got result %s], name[source], name[result]]] if name[result] begin[:] variable[data] assign[=] call[name[_result_to_dict], parameter[name[data], name[result], name[config], name[source]]] return[name[data]]
keyword[def] identifier[ext_pillar] ( identifier[minion_id] , identifier[pillar] , identifier[config_file] ): literal[string] identifier[config_template] = keyword[None] keyword[try] : identifier[config_template] = identifier[_render_template] ( identifier[config_file] ) keyword[except] identifier[jinja2] . identifier[exceptions] . identifier[TemplateNotFound] : identifier[log] . identifier[debug] ( literal[string] , identifier[config_file] ) keyword[except] identifier[Exception] : identifier[log] . identifier[debug] ( literal[string] , identifier[config_file] , identifier[exc_info] = keyword[True] ) keyword[if] keyword[not] identifier[config_template] : keyword[return] {} keyword[import] identifier[salt] . identifier[utils] . identifier[yaml] keyword[try] : identifier[opts] = identifier[salt] . identifier[utils] . identifier[yaml] . identifier[safe_load] ( identifier[config_template] ) keyword[or] {} identifier[opts] [ literal[string] ]= identifier[config_file] keyword[except] identifier[Exception] keyword[as] identifier[err] : keyword[import] identifier[salt] . identifier[log] identifier[msg] = literal[string] . identifier[format] ( identifier[config_file] , identifier[err] ) keyword[if] identifier[salt] . identifier[log] . identifier[is_console_configured] (): identifier[log] . identifier[warning] ( identifier[msg] ) keyword[else] : identifier[print] ( identifier[msg] ) keyword[return] {} keyword[else] : keyword[if] keyword[not] identifier[isinstance] ( identifier[opts] , identifier[dict] ): identifier[log] . identifier[warning] ( literal[string] literal[string] , identifier[config_file] ) keyword[return] {} keyword[if] literal[string] keyword[not] keyword[in] identifier[opts] : identifier[log] . identifier[warning] ( literal[string] literal[string] ) keyword[return] {} identifier[data] ={} keyword[for] identifier[source] keyword[in] identifier[opts] [ literal[string] ]: identifier[config] = identifier[opts] [ identifier[source] ] identifier[result] = identifier[_do_search] ( identifier[config] ) identifier[log] . identifier[debug] ( literal[string] , identifier[source] , identifier[result] ) keyword[if] identifier[result] : identifier[data] = identifier[_result_to_dict] ( identifier[data] , identifier[result] , identifier[config] , identifier[source] ) keyword[return] identifier[data]
def ext_pillar(minion_id, pillar, config_file): # pylint: disable=W0613 # pylint: disable=W0613 '\n Execute LDAP searches and return the aggregated data\n ' config_template = None try: config_template = _render_template(config_file) # depends on [control=['try'], data=[]] except jinja2.exceptions.TemplateNotFound: log.debug('pillar_ldap: missing configuration file %s', config_file) # depends on [control=['except'], data=[]] except Exception: log.debug('pillar_ldap: failed to render template for %s', config_file, exc_info=True) # depends on [control=['except'], data=[]] if not config_template: # We don't have a config file return {} # depends on [control=['if'], data=[]] import salt.utils.yaml try: opts = salt.utils.yaml.safe_load(config_template) or {} opts['conf_file'] = config_file # depends on [control=['try'], data=[]] except Exception as err: import salt.log msg = 'pillar_ldap: error parsing configuration file: {0} - {1}'.format(config_file, err) if salt.log.is_console_configured(): log.warning(msg) # depends on [control=['if'], data=[]] else: print(msg) return {} # depends on [control=['except'], data=['err']] else: if not isinstance(opts, dict): log.warning('pillar_ldap: %s is invalidly formatted, must be a YAML dictionary. See the documentation for more information.', config_file) return {} # depends on [control=['if'], data=[]] if 'search_order' not in opts: log.warning('pillar_ldap: search_order missing from configuration. See the documentation for more information.') return {} # depends on [control=['if'], data=[]] data = {} for source in opts['search_order']: config = opts[source] result = _do_search(config) log.debug('source %s got result %s', source, result) if result: data = _result_to_dict(data, result, config, source) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['source']] return data
def fetch_organization_courses(organization): """ Retrieves the set of courses currently linked to the specified organization """ organization_obj = serializers.deserialize_organization(organization) queryset = internal.OrganizationCourse.objects.filter( organization=organization_obj, active=True ).select_related('organization') return [serializers.serialize_organization_with_course(organization) for organization in queryset]
def function[fetch_organization_courses, parameter[organization]]: constant[ Retrieves the set of courses currently linked to the specified organization ] variable[organization_obj] assign[=] call[name[serializers].deserialize_organization, parameter[name[organization]]] variable[queryset] assign[=] call[call[name[internal].OrganizationCourse.objects.filter, parameter[]].select_related, parameter[constant[organization]]] return[<ast.ListComp object at 0x7da2044c0e50>]
keyword[def] identifier[fetch_organization_courses] ( identifier[organization] ): literal[string] identifier[organization_obj] = identifier[serializers] . identifier[deserialize_organization] ( identifier[organization] ) identifier[queryset] = identifier[internal] . identifier[OrganizationCourse] . identifier[objects] . identifier[filter] ( identifier[organization] = identifier[organization_obj] , identifier[active] = keyword[True] ). identifier[select_related] ( literal[string] ) keyword[return] [ identifier[serializers] . identifier[serialize_organization_with_course] ( identifier[organization] ) keyword[for] identifier[organization] keyword[in] identifier[queryset] ]
def fetch_organization_courses(organization): """ Retrieves the set of courses currently linked to the specified organization """ organization_obj = serializers.deserialize_organization(organization) queryset = internal.OrganizationCourse.objects.filter(organization=organization_obj, active=True).select_related('organization') return [serializers.serialize_organization_with_course(organization) for organization in queryset]
def getSizes(self): """ Get all the available sizes of the current image, and all available data about them. Returns: A list of dicts with the size data. """ method = 'flickr.photos.getSizes' data = _doget(method, photo_id=self.id) ret = [] # The given props are those that we return and the according types, since # return width and height as string would make "75">"100" be True, which # is just error prone. props = {'url':str,'width':int,'height':int,'label':str,'source':str,'text':str} for psize in data.rsp.sizes.size: d = {} for prop,convert_to_type in props.items(): d[prop] = convert_to_type(getattr(psize, prop)) ret.append(d) return ret
def function[getSizes, parameter[self]]: constant[ Get all the available sizes of the current image, and all available data about them. Returns: A list of dicts with the size data. ] variable[method] assign[=] constant[flickr.photos.getSizes] variable[data] assign[=] call[name[_doget], parameter[name[method]]] variable[ret] assign[=] list[[]] variable[props] assign[=] dictionary[[<ast.Constant object at 0x7da1b26773a0>, <ast.Constant object at 0x7da1b2677460>, <ast.Constant object at 0x7da1b26773d0>, <ast.Constant object at 0x7da1b2677850>, <ast.Constant object at 0x7da1b2676f50>, <ast.Constant object at 0x7da20e9565c0>], [<ast.Name object at 0x7da20e956740>, <ast.Name object at 0x7da20e9576d0>, <ast.Name object at 0x7da20e956290>, <ast.Name object at 0x7da20e9551b0>, <ast.Name object at 0x7da20e957340>, <ast.Name object at 0x7da20e9575e0>]] for taget[name[psize]] in starred[name[data].rsp.sizes.size] begin[:] variable[d] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b2662410>, <ast.Name object at 0x7da1b2661300>]]] in starred[call[name[props].items, parameter[]]] begin[:] call[name[d]][name[prop]] assign[=] call[name[convert_to_type], parameter[call[name[getattr], parameter[name[psize], name[prop]]]]] call[name[ret].append, parameter[name[d]]] return[name[ret]]
keyword[def] identifier[getSizes] ( identifier[self] ): literal[string] identifier[method] = literal[string] identifier[data] = identifier[_doget] ( identifier[method] , identifier[photo_id] = identifier[self] . identifier[id] ) identifier[ret] =[] identifier[props] ={ literal[string] : identifier[str] , literal[string] : identifier[int] , literal[string] : identifier[int] , literal[string] : identifier[str] , literal[string] : identifier[str] , literal[string] : identifier[str] } keyword[for] identifier[psize] keyword[in] identifier[data] . identifier[rsp] . identifier[sizes] . identifier[size] : identifier[d] ={} keyword[for] identifier[prop] , identifier[convert_to_type] keyword[in] identifier[props] . identifier[items] (): identifier[d] [ identifier[prop] ]= identifier[convert_to_type] ( identifier[getattr] ( identifier[psize] , identifier[prop] )) identifier[ret] . identifier[append] ( identifier[d] ) keyword[return] identifier[ret]
def getSizes(self): """ Get all the available sizes of the current image, and all available data about them. Returns: A list of dicts with the size data. """ method = 'flickr.photos.getSizes' data = _doget(method, photo_id=self.id) ret = [] # The given props are those that we return and the according types, since # return width and height as string would make "75">"100" be True, which # is just error prone. props = {'url': str, 'width': int, 'height': int, 'label': str, 'source': str, 'text': str} for psize in data.rsp.sizes.size: d = {} for (prop, convert_to_type) in props.items(): d[prop] = convert_to_type(getattr(psize, prop)) # depends on [control=['for'], data=[]] ret.append(d) # depends on [control=['for'], data=['psize']] return ret
def p_action_blocks(p): """ action_blocks : action_blocks action_block """ if isinstance(p[1], list): if isinstance(p[1][0], list): p[0] = p[1][0] + [p[2]] else: p[0] = p[1] + p[2] else: p[0] = [p[1], p[2]]
def function[p_action_blocks, parameter[p]]: constant[ action_blocks : action_blocks action_block ] if call[name[isinstance], parameter[call[name[p]][constant[1]], name[list]]] begin[:] if call[name[isinstance], parameter[call[call[name[p]][constant[1]]][constant[0]], name[list]]] begin[:] call[name[p]][constant[0]] assign[=] binary_operation[call[call[name[p]][constant[1]]][constant[0]] + list[[<ast.Subscript object at 0x7da1b021df00>]]]
keyword[def] identifier[p_action_blocks] ( identifier[p] ): literal[string] keyword[if] identifier[isinstance] ( identifier[p] [ literal[int] ], identifier[list] ): keyword[if] identifier[isinstance] ( identifier[p] [ literal[int] ][ literal[int] ], identifier[list] ): identifier[p] [ literal[int] ]= identifier[p] [ literal[int] ][ literal[int] ]+[ identifier[p] [ literal[int] ]] keyword[else] : identifier[p] [ literal[int] ]= identifier[p] [ literal[int] ]+ identifier[p] [ literal[int] ] keyword[else] : identifier[p] [ literal[int] ]=[ identifier[p] [ literal[int] ], identifier[p] [ literal[int] ]]
def p_action_blocks(p): """ action_blocks : action_blocks action_block """ if isinstance(p[1], list): if isinstance(p[1][0], list): p[0] = p[1][0] + [p[2]] # depends on [control=['if'], data=[]] else: p[0] = p[1] + p[2] # depends on [control=['if'], data=[]] else: p[0] = [p[1], p[2]]
def getmlsthelper(referencefilepath, start, organism, update): """Prepares to run the getmlst.py script provided in SRST2""" from accessoryFunctions.accessoryFunctions import GenObject # Initialise a set to for the organism(s) for which new alleles and profiles are desired organismset = set() # Allow for Shigella to use the Escherichia MLST profile/alleles organism = organism if organism != 'Shigella' else 'Escherichia' # As there are multiple profiles for certain organisms, this dictionary has the schemes I use as values organismdictionary = {'Escherichia': 'Escherichia coli#1', 'Shigella': 'Escherichia coli#1', 'Vibrio': 'Vibrio parahaemolyticus', 'Campylobacter': 'Campylobacter jejuni', 'Listeria': 'Listeria monocytogenes', 'Bacillus': 'Bacillus cereus', 'Klebsiella': 'Klebsiella pneumoniae'} # Allow for a genus not in the dictionary being specified try: organismset.add(organismdictionary[organism]) except KeyError: # Add the organism to the set organismset.add(organism) for scheme in organismset: organismpath = os.path.join(referencefilepath, 'MLST', organism) # Find all folders (with the trailing / in the glob search) and remove the trailing / try: lastfolder = sorted(glob('{}/*/'.format(organismpath)))[-1].rstrip('/') except IndexError: lastfolder = [] # Run the method to determine the most recent folder, and how recently it was updated delta, foldersize, d1 = schemedate(lastfolder) # Set the path/name of the folder to contain the new alleles and profile newfolder = '{}/{}'.format(organismpath, d1) if update: if delta.days > 7 or foldersize < 100: printtime('Downloading {} MLST scheme from pubmlst.org'.format(organism), start) # Create the object to store the argument attributes to feed to getmlst getmlstargs = GenObject() getmlstargs.species = scheme getmlstargs.repository_url = 'http://pubmlst.org/data/dbases.xml' getmlstargs.force_scheme_name = False getmlstargs.path = newfolder # Create the path to store the downloaded make_path(getmlstargs.path) getmlst.main(getmlstargs) # Even if there is an issue contacting the database, files are created, however, they are populated # with XML strings indicating that the download failed # Read the first character in the file try: profilestart = open(glob('{}/*.txt'.format(newfolder))[0]).readline() except IndexError: profilestart = [] # If it is a <, then the download failed if not profilestart or profilestart[0] == '<': # Delete the folder, and use the previous definitions instead shutil.rmtree(newfolder) newfolder = lastfolder # If the profile and alleles are up-to-date, set :newfolder to :lastfolder else: newfolder = lastfolder # If update isn't specified, don't update else: newfolder = lastfolder # Ensure that the profile/alleles updated successfully # Calculate the size of the folder by adding the sizes of all the files within the folder together try: newfoldersize = sum(os.path.getsize('{}/{}'.format(newfolder, f)) for f in os.listdir(newfolder) if os.path.isfile('{}/{}'.format(newfolder, f))) except (OSError, TypeError): newfoldersize = 100 # If the profile/allele failed, remove the folder, and use the most recent update if newfoldersize < 100: shutil.rmtree(newfolder) try: newfolder = sorted(glob('{}/*/'.format(organismpath)))[-1].rstrip('/') except IndexError: newfolder = organismpath # Return the name/path of the allele-containing folder return newfolder
def function[getmlsthelper, parameter[referencefilepath, start, organism, update]]: constant[Prepares to run the getmlst.py script provided in SRST2] from relative_module[accessoryFunctions.accessoryFunctions] import module[GenObject] variable[organismset] assign[=] call[name[set], parameter[]] variable[organism] assign[=] <ast.IfExp object at 0x7da18f09d870> variable[organismdictionary] assign[=] dictionary[[<ast.Constant object at 0x7da18f09d5d0>, <ast.Constant object at 0x7da18f09cca0>, <ast.Constant object at 0x7da18f09cac0>, <ast.Constant object at 0x7da18f09e980>, <ast.Constant object at 0x7da18f09f220>, <ast.Constant object at 0x7da18f09d0f0>, <ast.Constant object at 0x7da18f09d5a0>], [<ast.Constant object at 0x7da18f09e710>, <ast.Constant object at 0x7da18f09f010>, <ast.Constant object at 0x7da18f09ee60>, <ast.Constant object at 0x7da18f09eb60>, <ast.Constant object at 0x7da18f09f700>, <ast.Constant object at 0x7da18f09c070>, <ast.Constant object at 0x7da18f09d150>]] <ast.Try object at 0x7da18f09db70> for taget[name[scheme]] in starred[name[organismset]] begin[:] variable[organismpath] assign[=] call[name[os].path.join, parameter[name[referencefilepath], constant[MLST], name[organism]]] <ast.Try object at 0x7da18f09dde0> <ast.Tuple object at 0x7da18f09cfd0> assign[=] call[name[schemedate], parameter[name[lastfolder]]] variable[newfolder] assign[=] call[constant[{}/{}].format, parameter[name[organismpath], name[d1]]] if name[update] begin[:] if <ast.BoolOp object at 0x7da18f09ebc0> begin[:] call[name[printtime], parameter[call[constant[Downloading {} MLST scheme from pubmlst.org].format, parameter[name[organism]]], name[start]]] variable[getmlstargs] assign[=] call[name[GenObject], parameter[]] name[getmlstargs].species assign[=] name[scheme] name[getmlstargs].repository_url assign[=] constant[http://pubmlst.org/data/dbases.xml] name[getmlstargs].force_scheme_name assign[=] constant[False] name[getmlstargs].path assign[=] name[newfolder] call[name[make_path], parameter[name[getmlstargs].path]] call[name[getmlst].main, parameter[name[getmlstargs]]] <ast.Try object at 0x7da18f09c520> if <ast.BoolOp object at 0x7da18f09ded0> begin[:] call[name[shutil].rmtree, parameter[name[newfolder]]] variable[newfolder] assign[=] name[lastfolder] <ast.Try object at 0x7da18f09f940> if compare[name[newfoldersize] less[<] constant[100]] begin[:] call[name[shutil].rmtree, parameter[name[newfolder]]] <ast.Try object at 0x7da18f09dc00> return[name[newfolder]]
keyword[def] identifier[getmlsthelper] ( identifier[referencefilepath] , identifier[start] , identifier[organism] , identifier[update] ): literal[string] keyword[from] identifier[accessoryFunctions] . identifier[accessoryFunctions] keyword[import] identifier[GenObject] identifier[organismset] = identifier[set] () identifier[organism] = identifier[organism] keyword[if] identifier[organism] != literal[string] keyword[else] literal[string] identifier[organismdictionary] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] } keyword[try] : identifier[organismset] . identifier[add] ( identifier[organismdictionary] [ identifier[organism] ]) keyword[except] identifier[KeyError] : identifier[organismset] . identifier[add] ( identifier[organism] ) keyword[for] identifier[scheme] keyword[in] identifier[organismset] : identifier[organismpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[referencefilepath] , literal[string] , identifier[organism] ) keyword[try] : identifier[lastfolder] = identifier[sorted] ( identifier[glob] ( literal[string] . identifier[format] ( identifier[organismpath] )))[- literal[int] ]. identifier[rstrip] ( literal[string] ) keyword[except] identifier[IndexError] : identifier[lastfolder] =[] identifier[delta] , identifier[foldersize] , identifier[d1] = identifier[schemedate] ( identifier[lastfolder] ) identifier[newfolder] = literal[string] . identifier[format] ( identifier[organismpath] , identifier[d1] ) keyword[if] identifier[update] : keyword[if] identifier[delta] . identifier[days] > literal[int] keyword[or] identifier[foldersize] < literal[int] : identifier[printtime] ( literal[string] . identifier[format] ( identifier[organism] ), identifier[start] ) identifier[getmlstargs] = identifier[GenObject] () identifier[getmlstargs] . identifier[species] = identifier[scheme] identifier[getmlstargs] . identifier[repository_url] = literal[string] identifier[getmlstargs] . identifier[force_scheme_name] = keyword[False] identifier[getmlstargs] . identifier[path] = identifier[newfolder] identifier[make_path] ( identifier[getmlstargs] . identifier[path] ) identifier[getmlst] . identifier[main] ( identifier[getmlstargs] ) keyword[try] : identifier[profilestart] = identifier[open] ( identifier[glob] ( literal[string] . identifier[format] ( identifier[newfolder] ))[ literal[int] ]). identifier[readline] () keyword[except] identifier[IndexError] : identifier[profilestart] =[] keyword[if] keyword[not] identifier[profilestart] keyword[or] identifier[profilestart] [ literal[int] ]== literal[string] : identifier[shutil] . identifier[rmtree] ( identifier[newfolder] ) identifier[newfolder] = identifier[lastfolder] keyword[else] : identifier[newfolder] = identifier[lastfolder] keyword[else] : identifier[newfolder] = identifier[lastfolder] keyword[try] : identifier[newfoldersize] = identifier[sum] ( identifier[os] . identifier[path] . identifier[getsize] ( literal[string] . identifier[format] ( identifier[newfolder] , identifier[f] )) keyword[for] identifier[f] keyword[in] identifier[os] . identifier[listdir] ( identifier[newfolder] ) keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( literal[string] . identifier[format] ( identifier[newfolder] , identifier[f] ))) keyword[except] ( identifier[OSError] , identifier[TypeError] ): identifier[newfoldersize] = literal[int] keyword[if] identifier[newfoldersize] < literal[int] : identifier[shutil] . identifier[rmtree] ( identifier[newfolder] ) keyword[try] : identifier[newfolder] = identifier[sorted] ( identifier[glob] ( literal[string] . identifier[format] ( identifier[organismpath] )))[- literal[int] ]. identifier[rstrip] ( literal[string] ) keyword[except] identifier[IndexError] : identifier[newfolder] = identifier[organismpath] keyword[return] identifier[newfolder]
def getmlsthelper(referencefilepath, start, organism, update): """Prepares to run the getmlst.py script provided in SRST2""" from accessoryFunctions.accessoryFunctions import GenObject # Initialise a set to for the organism(s) for which new alleles and profiles are desired organismset = set() # Allow for Shigella to use the Escherichia MLST profile/alleles organism = organism if organism != 'Shigella' else 'Escherichia' # As there are multiple profiles for certain organisms, this dictionary has the schemes I use as values organismdictionary = {'Escherichia': 'Escherichia coli#1', 'Shigella': 'Escherichia coli#1', 'Vibrio': 'Vibrio parahaemolyticus', 'Campylobacter': 'Campylobacter jejuni', 'Listeria': 'Listeria monocytogenes', 'Bacillus': 'Bacillus cereus', 'Klebsiella': 'Klebsiella pneumoniae'} # Allow for a genus not in the dictionary being specified try: organismset.add(organismdictionary[organism]) # depends on [control=['try'], data=[]] except KeyError: # Add the organism to the set organismset.add(organism) # depends on [control=['except'], data=[]] for scheme in organismset: organismpath = os.path.join(referencefilepath, 'MLST', organism) # Find all folders (with the trailing / in the glob search) and remove the trailing / try: lastfolder = sorted(glob('{}/*/'.format(organismpath)))[-1].rstrip('/') # depends on [control=['try'], data=[]] except IndexError: lastfolder = [] # depends on [control=['except'], data=[]] # Run the method to determine the most recent folder, and how recently it was updated (delta, foldersize, d1) = schemedate(lastfolder) # Set the path/name of the folder to contain the new alleles and profile newfolder = '{}/{}'.format(organismpath, d1) if update: if delta.days > 7 or foldersize < 100: printtime('Downloading {} MLST scheme from pubmlst.org'.format(organism), start) # Create the object to store the argument attributes to feed to getmlst getmlstargs = GenObject() getmlstargs.species = scheme getmlstargs.repository_url = 'http://pubmlst.org/data/dbases.xml' getmlstargs.force_scheme_name = False getmlstargs.path = newfolder # Create the path to store the downloaded make_path(getmlstargs.path) getmlst.main(getmlstargs) # Even if there is an issue contacting the database, files are created, however, they are populated # with XML strings indicating that the download failed # Read the first character in the file try: profilestart = open(glob('{}/*.txt'.format(newfolder))[0]).readline() # depends on [control=['try'], data=[]] except IndexError: profilestart = [] # depends on [control=['except'], data=[]] # If it is a <, then the download failed if not profilestart or profilestart[0] == '<': # Delete the folder, and use the previous definitions instead shutil.rmtree(newfolder) newfolder = lastfolder # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # If the profile and alleles are up-to-date, set :newfolder to :lastfolder newfolder = lastfolder # depends on [control=['if'], data=[]] else: # If update isn't specified, don't update newfolder = lastfolder # Ensure that the profile/alleles updated successfully # Calculate the size of the folder by adding the sizes of all the files within the folder together try: newfoldersize = sum((os.path.getsize('{}/{}'.format(newfolder, f)) for f in os.listdir(newfolder) if os.path.isfile('{}/{}'.format(newfolder, f)))) # depends on [control=['try'], data=[]] except (OSError, TypeError): newfoldersize = 100 # depends on [control=['except'], data=[]] # If the profile/allele failed, remove the folder, and use the most recent update if newfoldersize < 100: shutil.rmtree(newfolder) try: newfolder = sorted(glob('{}/*/'.format(organismpath)))[-1].rstrip('/') # depends on [control=['try'], data=[]] except IndexError: newfolder = organismpath # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # Return the name/path of the allele-containing folder return newfolder # depends on [control=['for'], data=['scheme']]
def validate_list_of_identical_dicts(self, list_of_dicts): """Check that all dicts within a list are identical.""" hashes = [] for _dict in list_of_dicts: hashes.append(hash(frozenset(_dict.items()))) self.log.debug('Hashes: {}'.format(hashes)) if len(set(hashes)) == 1: self.log.debug('Dicts within list are identical') else: return 'Dicts within list are not identical' return None
def function[validate_list_of_identical_dicts, parameter[self, list_of_dicts]]: constant[Check that all dicts within a list are identical.] variable[hashes] assign[=] list[[]] for taget[name[_dict]] in starred[name[list_of_dicts]] begin[:] call[name[hashes].append, parameter[call[name[hash], parameter[call[name[frozenset], parameter[call[name[_dict].items, parameter[]]]]]]]] call[name[self].log.debug, parameter[call[constant[Hashes: {}].format, parameter[name[hashes]]]]] if compare[call[name[len], parameter[call[name[set], parameter[name[hashes]]]]] equal[==] constant[1]] begin[:] call[name[self].log.debug, parameter[constant[Dicts within list are identical]]] return[constant[None]]
keyword[def] identifier[validate_list_of_identical_dicts] ( identifier[self] , identifier[list_of_dicts] ): literal[string] identifier[hashes] =[] keyword[for] identifier[_dict] keyword[in] identifier[list_of_dicts] : identifier[hashes] . identifier[append] ( identifier[hash] ( identifier[frozenset] ( identifier[_dict] . identifier[items] ()))) identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[hashes] )) keyword[if] identifier[len] ( identifier[set] ( identifier[hashes] ))== literal[int] : identifier[self] . identifier[log] . identifier[debug] ( literal[string] ) keyword[else] : keyword[return] literal[string] keyword[return] keyword[None]
def validate_list_of_identical_dicts(self, list_of_dicts): """Check that all dicts within a list are identical.""" hashes = [] for _dict in list_of_dicts: hashes.append(hash(frozenset(_dict.items()))) # depends on [control=['for'], data=['_dict']] self.log.debug('Hashes: {}'.format(hashes)) if len(set(hashes)) == 1: self.log.debug('Dicts within list are identical') # depends on [control=['if'], data=[]] else: return 'Dicts within list are not identical' return None
def renew_close_to_expiration(self, margin_in_seconds=A_DAY): """Automatically renew subscriptions that are close to expiring, or have already expired. margin_in_seconds determines if a subscription is in fact close to expiring. By default, said margin is set to be a single day (24 hours). This is a long-running method for any non-trivial usage of the subscriber module, as renewal requires several http requests, and subscriptions are processed serially. Because of that, it is recommended to run this method in a celery task. """ subscriptions = self.storage.close_to_expiration(margin_in_seconds) for subscription in subscriptions: try: self.subscribe_impl(**subscription) except SubscriberError as e: warn(RENEW_FAILURE % (subscription['topic_url'], subscription['callback_id']), e)
def function[renew_close_to_expiration, parameter[self, margin_in_seconds]]: constant[Automatically renew subscriptions that are close to expiring, or have already expired. margin_in_seconds determines if a subscription is in fact close to expiring. By default, said margin is set to be a single day (24 hours). This is a long-running method for any non-trivial usage of the subscriber module, as renewal requires several http requests, and subscriptions are processed serially. Because of that, it is recommended to run this method in a celery task. ] variable[subscriptions] assign[=] call[name[self].storage.close_to_expiration, parameter[name[margin_in_seconds]]] for taget[name[subscription]] in starred[name[subscriptions]] begin[:] <ast.Try object at 0x7da1b26ad450>
keyword[def] identifier[renew_close_to_expiration] ( identifier[self] , identifier[margin_in_seconds] = identifier[A_DAY] ): literal[string] identifier[subscriptions] = identifier[self] . identifier[storage] . identifier[close_to_expiration] ( identifier[margin_in_seconds] ) keyword[for] identifier[subscription] keyword[in] identifier[subscriptions] : keyword[try] : identifier[self] . identifier[subscribe_impl] (** identifier[subscription] ) keyword[except] identifier[SubscriberError] keyword[as] identifier[e] : identifier[warn] ( identifier[RENEW_FAILURE] %( identifier[subscription] [ literal[string] ], identifier[subscription] [ literal[string] ]), identifier[e] )
def renew_close_to_expiration(self, margin_in_seconds=A_DAY): """Automatically renew subscriptions that are close to expiring, or have already expired. margin_in_seconds determines if a subscription is in fact close to expiring. By default, said margin is set to be a single day (24 hours). This is a long-running method for any non-trivial usage of the subscriber module, as renewal requires several http requests, and subscriptions are processed serially. Because of that, it is recommended to run this method in a celery task. """ subscriptions = self.storage.close_to_expiration(margin_in_seconds) for subscription in subscriptions: try: self.subscribe_impl(**subscription) # depends on [control=['try'], data=[]] except SubscriberError as e: warn(RENEW_FAILURE % (subscription['topic_url'], subscription['callback_id']), e) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['subscription']]
def from_(self, pct_pts): """Reverse of :meth:`to_`.""" pct_pts = np.asarray(pct_pts, dtype=np.float) has_z = (pct_pts.shape[-1] > 2) max_pt = list(self.viewer.get_window_size()) if has_z: max_pt.append(0.0) win_pts = np.multiply(pct_pts, max_pt) # round to pixel units, if asked if self.as_int: win_pts = np.rint(win_pts).astype(np.int, copy=False) return win_pts
def function[from_, parameter[self, pct_pts]]: constant[Reverse of :meth:`to_`.] variable[pct_pts] assign[=] call[name[np].asarray, parameter[name[pct_pts]]] variable[has_z] assign[=] compare[call[name[pct_pts].shape][<ast.UnaryOp object at 0x7da1b0dbd030>] greater[>] constant[2]] variable[max_pt] assign[=] call[name[list], parameter[call[name[self].viewer.get_window_size, parameter[]]]] if name[has_z] begin[:] call[name[max_pt].append, parameter[constant[0.0]]] variable[win_pts] assign[=] call[name[np].multiply, parameter[name[pct_pts], name[max_pt]]] if name[self].as_int begin[:] variable[win_pts] assign[=] call[call[name[np].rint, parameter[name[win_pts]]].astype, parameter[name[np].int]] return[name[win_pts]]
keyword[def] identifier[from_] ( identifier[self] , identifier[pct_pts] ): literal[string] identifier[pct_pts] = identifier[np] . identifier[asarray] ( identifier[pct_pts] , identifier[dtype] = identifier[np] . identifier[float] ) identifier[has_z] =( identifier[pct_pts] . identifier[shape] [- literal[int] ]> literal[int] ) identifier[max_pt] = identifier[list] ( identifier[self] . identifier[viewer] . identifier[get_window_size] ()) keyword[if] identifier[has_z] : identifier[max_pt] . identifier[append] ( literal[int] ) identifier[win_pts] = identifier[np] . identifier[multiply] ( identifier[pct_pts] , identifier[max_pt] ) keyword[if] identifier[self] . identifier[as_int] : identifier[win_pts] = identifier[np] . identifier[rint] ( identifier[win_pts] ). identifier[astype] ( identifier[np] . identifier[int] , identifier[copy] = keyword[False] ) keyword[return] identifier[win_pts]
def from_(self, pct_pts): """Reverse of :meth:`to_`.""" pct_pts = np.asarray(pct_pts, dtype=np.float) has_z = pct_pts.shape[-1] > 2 max_pt = list(self.viewer.get_window_size()) if has_z: max_pt.append(0.0) # depends on [control=['if'], data=[]] win_pts = np.multiply(pct_pts, max_pt) # round to pixel units, if asked if self.as_int: win_pts = np.rint(win_pts).astype(np.int, copy=False) # depends on [control=['if'], data=[]] return win_pts
def add_result(self, key, result): """Add result object to cache with given key. The request is ignored when the cache is already full or the key is None. """ if len(self.cache) > self.max_size: return if key is not None: self.cache[key] = result
def function[add_result, parameter[self, key, result]]: constant[Add result object to cache with given key. The request is ignored when the cache is already full or the key is None. ] if compare[call[name[len], parameter[name[self].cache]] greater[>] name[self].max_size] begin[:] return[None] if compare[name[key] is_not constant[None]] begin[:] call[name[self].cache][name[key]] assign[=] name[result]
keyword[def] identifier[add_result] ( identifier[self] , identifier[key] , identifier[result] ): literal[string] keyword[if] identifier[len] ( identifier[self] . identifier[cache] )> identifier[self] . identifier[max_size] : keyword[return] keyword[if] identifier[key] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[cache] [ identifier[key] ]= identifier[result]
def add_result(self, key, result): """Add result object to cache with given key. The request is ignored when the cache is already full or the key is None. """ if len(self.cache) > self.max_size: return # depends on [control=['if'], data=[]] if key is not None: self.cache[key] = result # depends on [control=['if'], data=['key']]
def query_bitmap_info(self): """Information about the screen bitmap. out address of type str out width of type int out height of type int out bits_per_pixel of type int out bytes_per_line of type int out bitmap_format of type :class:`BitmapFormat` """ (address, width, height, bits_per_pixel, bytes_per_line, bitmap_format) = self._call("queryBitmapInfo") bitmap_format = BitmapFormat(bitmap_format) return (address, width, height, bits_per_pixel, bytes_per_line, bitmap_format)
def function[query_bitmap_info, parameter[self]]: constant[Information about the screen bitmap. out address of type str out width of type int out height of type int out bits_per_pixel of type int out bytes_per_line of type int out bitmap_format of type :class:`BitmapFormat` ] <ast.Tuple object at 0x7da20e9b3f70> assign[=] call[name[self]._call, parameter[constant[queryBitmapInfo]]] variable[bitmap_format] assign[=] call[name[BitmapFormat], parameter[name[bitmap_format]]] return[tuple[[<ast.Name object at 0x7da1b26af880>, <ast.Name object at 0x7da1b26ac730>, <ast.Name object at 0x7da1b26ae0e0>, <ast.Name object at 0x7da1b26ae6e0>, <ast.Name object at 0x7da1b26ad540>, <ast.Name object at 0x7da1b26af700>]]]
keyword[def] identifier[query_bitmap_info] ( identifier[self] ): literal[string] ( identifier[address] , identifier[width] , identifier[height] , identifier[bits_per_pixel] , identifier[bytes_per_line] , identifier[bitmap_format] )= identifier[self] . identifier[_call] ( literal[string] ) identifier[bitmap_format] = identifier[BitmapFormat] ( identifier[bitmap_format] ) keyword[return] ( identifier[address] , identifier[width] , identifier[height] , identifier[bits_per_pixel] , identifier[bytes_per_line] , identifier[bitmap_format] )
def query_bitmap_info(self): """Information about the screen bitmap. out address of type str out width of type int out height of type int out bits_per_pixel of type int out bytes_per_line of type int out bitmap_format of type :class:`BitmapFormat` """ (address, width, height, bits_per_pixel, bytes_per_line, bitmap_format) = self._call('queryBitmapInfo') bitmap_format = BitmapFormat(bitmap_format) return (address, width, height, bits_per_pixel, bytes_per_line, bitmap_format)
def getSubdirectories(d): '''Returns a list of subdirectories in a directory. This function performed three times better for me than "for root, dirs, files in os.walk(d): return dirs" ''' return [f for f in os.listdir(d) if os.path.isdir(os.path.join(d, f)) ]
def function[getSubdirectories, parameter[d]]: constant[Returns a list of subdirectories in a directory. This function performed three times better for me than "for root, dirs, files in os.walk(d): return dirs" ] return[<ast.ListComp object at 0x7da18bc734c0>]
keyword[def] identifier[getSubdirectories] ( identifier[d] ): literal[string] keyword[return] [ identifier[f] keyword[for] identifier[f] keyword[in] identifier[os] . identifier[listdir] ( identifier[d] ) keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[d] , identifier[f] ))]
def getSubdirectories(d): """Returns a list of subdirectories in a directory. This function performed three times better for me than "for root, dirs, files in os.walk(d): return dirs" """ return [f for f in os.listdir(d) if os.path.isdir(os.path.join(d, f))]
def authenticate_redirect(self, callback_uri=None, cancel_uri=None, extended_permissions=None): """Authenticates/installs this app for the current user.""" self.require_setting("facebook_api_key", "Facebook Connect") callback_uri = callback_uri or self.request.uri args = { "api_key": self.settings["facebook_api_key"], "v": "1.0", "fbconnect": "true", "display": "page", "next": urljoin(self.request.full_url(), callback_uri), "return_session": "true", } if cancel_uri: args["cancel_url"] = urljoin( self.request.full_url(), cancel_uri) if extended_permissions: if isinstance(extended_permissions, (str, bytes_type)): extended_permissions = [extended_permissions] args["req_perms"] = ",".join(extended_permissions) self.redirect("http://www.facebook.com/login.php?" + urlencode(args))
def function[authenticate_redirect, parameter[self, callback_uri, cancel_uri, extended_permissions]]: constant[Authenticates/installs this app for the current user.] call[name[self].require_setting, parameter[constant[facebook_api_key], constant[Facebook Connect]]] variable[callback_uri] assign[=] <ast.BoolOp object at 0x7da1b2334dc0> variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da1b2337c40>, <ast.Constant object at 0x7da1b23359c0>, <ast.Constant object at 0x7da1b2335180>, <ast.Constant object at 0x7da1b2335750>, <ast.Constant object at 0x7da1b2335300>, <ast.Constant object at 0x7da1b23378e0>], [<ast.Subscript object at 0x7da1b2334e20>, <ast.Constant object at 0x7da1b2335780>, <ast.Constant object at 0x7da1b2335c60>, <ast.Constant object at 0x7da1b2334f40>, <ast.Call object at 0x7da1b2335390>, <ast.Constant object at 0x7da1b2335090>]] if name[cancel_uri] begin[:] call[name[args]][constant[cancel_url]] assign[=] call[name[urljoin], parameter[call[name[self].request.full_url, parameter[]], name[cancel_uri]]] if name[extended_permissions] begin[:] if call[name[isinstance], parameter[name[extended_permissions], tuple[[<ast.Name object at 0x7da1b24acd30>, <ast.Name object at 0x7da1b24ac070>]]]] begin[:] variable[extended_permissions] assign[=] list[[<ast.Name object at 0x7da1b24ac610>]] call[name[args]][constant[req_perms]] assign[=] call[constant[,].join, parameter[name[extended_permissions]]] call[name[self].redirect, parameter[binary_operation[constant[http://www.facebook.com/login.php?] + call[name[urlencode], parameter[name[args]]]]]]
keyword[def] identifier[authenticate_redirect] ( identifier[self] , identifier[callback_uri] = keyword[None] , identifier[cancel_uri] = keyword[None] , identifier[extended_permissions] = keyword[None] ): literal[string] identifier[self] . identifier[require_setting] ( literal[string] , literal[string] ) identifier[callback_uri] = identifier[callback_uri] keyword[or] identifier[self] . identifier[request] . identifier[uri] identifier[args] ={ literal[string] : identifier[self] . identifier[settings] [ literal[string] ], literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[urljoin] ( identifier[self] . identifier[request] . identifier[full_url] (), identifier[callback_uri] ), literal[string] : literal[string] , } keyword[if] identifier[cancel_uri] : identifier[args] [ literal[string] ]= identifier[urljoin] ( identifier[self] . identifier[request] . identifier[full_url] (), identifier[cancel_uri] ) keyword[if] identifier[extended_permissions] : keyword[if] identifier[isinstance] ( identifier[extended_permissions] ,( identifier[str] , identifier[bytes_type] )): identifier[extended_permissions] =[ identifier[extended_permissions] ] identifier[args] [ literal[string] ]= literal[string] . identifier[join] ( identifier[extended_permissions] ) identifier[self] . identifier[redirect] ( literal[string] + identifier[urlencode] ( identifier[args] ))
def authenticate_redirect(self, callback_uri=None, cancel_uri=None, extended_permissions=None): """Authenticates/installs this app for the current user.""" self.require_setting('facebook_api_key', 'Facebook Connect') callback_uri = callback_uri or self.request.uri args = {'api_key': self.settings['facebook_api_key'], 'v': '1.0', 'fbconnect': 'true', 'display': 'page', 'next': urljoin(self.request.full_url(), callback_uri), 'return_session': 'true'} if cancel_uri: args['cancel_url'] = urljoin(self.request.full_url(), cancel_uri) # depends on [control=['if'], data=[]] if extended_permissions: if isinstance(extended_permissions, (str, bytes_type)): extended_permissions = [extended_permissions] # depends on [control=['if'], data=[]] args['req_perms'] = ','.join(extended_permissions) # depends on [control=['if'], data=[]] self.redirect('http://www.facebook.com/login.php?' + urlencode(args))
def vagalume(song): """ Returns the lyrics found in vagalume.com.br for the specified mp3 file or an empty string if not found. """ translate = { '@': 'a', URLESCAPE: '', ' ': '-' } artist = song.artist.lower() artist = normalize(artist, translate) artist = re.sub(r'\-{2,}', '-', artist) title = song.title.lower() title = normalize(title, translate) title = re.sub(r'\-{2,}', '-', title) url = 'https://www.vagalume.com.br/{}/{}.html'.format(artist, title) soup = get_url(url) body = soup.select('div#lyrics') if body == []: return '' content = body[0] for br in content.find_all('br'): br.replace_with('\n') return content.get_text().strip()
def function[vagalume, parameter[song]]: constant[ Returns the lyrics found in vagalume.com.br for the specified mp3 file or an empty string if not found. ] variable[translate] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c7280>, <ast.Name object at 0x7da20c6c5270>, <ast.Constant object at 0x7da20c6c5990>], [<ast.Constant object at 0x7da20c6c5ff0>, <ast.Constant object at 0x7da20c6c7c10>, <ast.Constant object at 0x7da20c6c5b40>]] variable[artist] assign[=] call[name[song].artist.lower, parameter[]] variable[artist] assign[=] call[name[normalize], parameter[name[artist], name[translate]]] variable[artist] assign[=] call[name[re].sub, parameter[constant[\-{2,}], constant[-], name[artist]]] variable[title] assign[=] call[name[song].title.lower, parameter[]] variable[title] assign[=] call[name[normalize], parameter[name[title], name[translate]]] variable[title] assign[=] call[name[re].sub, parameter[constant[\-{2,}], constant[-], name[title]]] variable[url] assign[=] call[constant[https://www.vagalume.com.br/{}/{}.html].format, parameter[name[artist], name[title]]] variable[soup] assign[=] call[name[get_url], parameter[name[url]]] variable[body] assign[=] call[name[soup].select, parameter[constant[div#lyrics]]] if compare[name[body] equal[==] list[[]]] begin[:] return[constant[]] variable[content] assign[=] call[name[body]][constant[0]] for taget[name[br]] in starred[call[name[content].find_all, parameter[constant[br]]]] begin[:] call[name[br].replace_with, parameter[constant[ ]]] return[call[call[name[content].get_text, parameter[]].strip, parameter[]]]
keyword[def] identifier[vagalume] ( identifier[song] ): literal[string] identifier[translate] ={ literal[string] : literal[string] , identifier[URLESCAPE] : literal[string] , literal[string] : literal[string] } identifier[artist] = identifier[song] . identifier[artist] . identifier[lower] () identifier[artist] = identifier[normalize] ( identifier[artist] , identifier[translate] ) identifier[artist] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[artist] ) identifier[title] = identifier[song] . identifier[title] . identifier[lower] () identifier[title] = identifier[normalize] ( identifier[title] , identifier[translate] ) identifier[title] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[title] ) identifier[url] = literal[string] . identifier[format] ( identifier[artist] , identifier[title] ) identifier[soup] = identifier[get_url] ( identifier[url] ) identifier[body] = identifier[soup] . identifier[select] ( literal[string] ) keyword[if] identifier[body] ==[]: keyword[return] literal[string] identifier[content] = identifier[body] [ literal[int] ] keyword[for] identifier[br] keyword[in] identifier[content] . identifier[find_all] ( literal[string] ): identifier[br] . identifier[replace_with] ( literal[string] ) keyword[return] identifier[content] . identifier[get_text] (). identifier[strip] ()
def vagalume(song): """ Returns the lyrics found in vagalume.com.br for the specified mp3 file or an empty string if not found. """ translate = {'@': 'a', URLESCAPE: '', ' ': '-'} artist = song.artist.lower() artist = normalize(artist, translate) artist = re.sub('\\-{2,}', '-', artist) title = song.title.lower() title = normalize(title, translate) title = re.sub('\\-{2,}', '-', title) url = 'https://www.vagalume.com.br/{}/{}.html'.format(artist, title) soup = get_url(url) body = soup.select('div#lyrics') if body == []: return '' # depends on [control=['if'], data=[]] content = body[0] for br in content.find_all('br'): br.replace_with('\n') # depends on [control=['for'], data=['br']] return content.get_text().strip()
def add_buffer(self, buf_header, buf_payload): ''' Associate a buffer header and payload with this message. Args: buf_header (``JSON``) : a buffer header buf_payload (``JSON`` or bytes) : a buffer payload Returns: None Raises: MessageError ''' if 'num_buffers' in self._header: self._header['num_buffers'] += 1 else: self._header['num_buffers'] = 1 self._header_json = None self._buffers.append((buf_header, buf_payload))
def function[add_buffer, parameter[self, buf_header, buf_payload]]: constant[ Associate a buffer header and payload with this message. Args: buf_header (``JSON``) : a buffer header buf_payload (``JSON`` or bytes) : a buffer payload Returns: None Raises: MessageError ] if compare[constant[num_buffers] in name[self]._header] begin[:] <ast.AugAssign object at 0x7da207f99030> name[self]._header_json assign[=] constant[None] call[name[self]._buffers.append, parameter[tuple[[<ast.Name object at 0x7da207f9a7a0>, <ast.Name object at 0x7da207f9b760>]]]]
keyword[def] identifier[add_buffer] ( identifier[self] , identifier[buf_header] , identifier[buf_payload] ): literal[string] keyword[if] literal[string] keyword[in] identifier[self] . identifier[_header] : identifier[self] . identifier[_header] [ literal[string] ]+= literal[int] keyword[else] : identifier[self] . identifier[_header] [ literal[string] ]= literal[int] identifier[self] . identifier[_header_json] = keyword[None] identifier[self] . identifier[_buffers] . identifier[append] (( identifier[buf_header] , identifier[buf_payload] ))
def add_buffer(self, buf_header, buf_payload): """ Associate a buffer header and payload with this message. Args: buf_header (``JSON``) : a buffer header buf_payload (``JSON`` or bytes) : a buffer payload Returns: None Raises: MessageError """ if 'num_buffers' in self._header: self._header['num_buffers'] += 1 # depends on [control=['if'], data=[]] else: self._header['num_buffers'] = 1 self._header_json = None self._buffers.append((buf_header, buf_payload))
def ok(self): """ Returns True if OK to use, else False """ try: v = int(self._value) chunk = self.mfac.value() if v < self.imin or v > self.imax or (v % chunk != 0): return False else: return True except: return False
def function[ok, parameter[self]]: constant[ Returns True if OK to use, else False ] <ast.Try object at 0x7da20c6ab7f0>
keyword[def] identifier[ok] ( identifier[self] ): literal[string] keyword[try] : identifier[v] = identifier[int] ( identifier[self] . identifier[_value] ) identifier[chunk] = identifier[self] . identifier[mfac] . identifier[value] () keyword[if] identifier[v] < identifier[self] . identifier[imin] keyword[or] identifier[v] > identifier[self] . identifier[imax] keyword[or] ( identifier[v] % identifier[chunk] != literal[int] ): keyword[return] keyword[False] keyword[else] : keyword[return] keyword[True] keyword[except] : keyword[return] keyword[False]
def ok(self): """ Returns True if OK to use, else False """ try: v = int(self._value) chunk = self.mfac.value() if v < self.imin or v > self.imax or v % chunk != 0: return False # depends on [control=['if'], data=[]] else: return True # depends on [control=['try'], data=[]] except: return False # depends on [control=['except'], data=[]]
def _to_args(x): """Convert to args representation""" if not isinstance(x, (list, tuple, np.ndarray)): x = [x] return x
def function[_to_args, parameter[x]]: constant[Convert to args representation] if <ast.UnaryOp object at 0x7da1b0e92a10> begin[:] variable[x] assign[=] list[[<ast.Name object at 0x7da1b0e91150>]] return[name[x]]
keyword[def] identifier[_to_args] ( identifier[x] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[x] ,( identifier[list] , identifier[tuple] , identifier[np] . identifier[ndarray] )): identifier[x] =[ identifier[x] ] keyword[return] identifier[x]
def _to_args(x): """Convert to args representation""" if not isinstance(x, (list, tuple, np.ndarray)): x = [x] # depends on [control=['if'], data=[]] return x
def activities_list(self, since=None, **kwargs): "https://developer.zendesk.com/rest_api/docs/core/activity_stream#list-activities" api_path = "/api/v2/activities.json" api_query = {} if "query" in kwargs.keys(): api_query.update(kwargs["query"]) del kwargs["query"] if since: api_query.update({ "since": since, }) return self.call(api_path, query=api_query, **kwargs)
def function[activities_list, parameter[self, since]]: constant[https://developer.zendesk.com/rest_api/docs/core/activity_stream#list-activities] variable[api_path] assign[=] constant[/api/v2/activities.json] variable[api_query] assign[=] dictionary[[], []] if compare[constant[query] in call[name[kwargs].keys, parameter[]]] begin[:] call[name[api_query].update, parameter[call[name[kwargs]][constant[query]]]] <ast.Delete object at 0x7da1b0e26860> if name[since] begin[:] call[name[api_query].update, parameter[dictionary[[<ast.Constant object at 0x7da1b0d4bd00>], [<ast.Name object at 0x7da1b0d4bb80>]]]] return[call[name[self].call, parameter[name[api_path]]]]
keyword[def] identifier[activities_list] ( identifier[self] , identifier[since] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[api_path] = literal[string] identifier[api_query] ={} keyword[if] literal[string] keyword[in] identifier[kwargs] . identifier[keys] (): identifier[api_query] . identifier[update] ( identifier[kwargs] [ literal[string] ]) keyword[del] identifier[kwargs] [ literal[string] ] keyword[if] identifier[since] : identifier[api_query] . identifier[update] ({ literal[string] : identifier[since] , }) keyword[return] identifier[self] . identifier[call] ( identifier[api_path] , identifier[query] = identifier[api_query] ,** identifier[kwargs] )
def activities_list(self, since=None, **kwargs): """https://developer.zendesk.com/rest_api/docs/core/activity_stream#list-activities""" api_path = '/api/v2/activities.json' api_query = {} if 'query' in kwargs.keys(): api_query.update(kwargs['query']) del kwargs['query'] # depends on [control=['if'], data=[]] if since: api_query.update({'since': since}) # depends on [control=['if'], data=[]] return self.call(api_path, query=api_query, **kwargs)
def contains(this, that, axis=semantics.axis_default): """Returns bool for each element of `that`, indicating if it is contained in `this` Parameters ---------- this : indexable key sequence sequence of items to test against that : indexable key sequence sequence of items to test for Returns ------- ndarray, [that.size], bool returns a bool for each element in `that`, indicating if it is contained in `this` Notes ----- Reads as 'this contains that' Similar to 'that in this', but with different performance characteristics """ this = as_index(this, axis=axis, lex_as_struct=True, base=True) that = as_index(that, axis=axis, lex_as_struct=True) left = np.searchsorted(that._keys, this._keys, sorter=that.sorter, side='left') right = np.searchsorted(that._keys, this._keys, sorter=that.sorter, side='right') flags = np.zeros(that.size + 1, dtype=np.int) np.add.at(flags, left, 1) np.add.at(flags, right, -1) return np.cumsum(flags)[:-1].astype(np.bool)[that.rank]
def function[contains, parameter[this, that, axis]]: constant[Returns bool for each element of `that`, indicating if it is contained in `this` Parameters ---------- this : indexable key sequence sequence of items to test against that : indexable key sequence sequence of items to test for Returns ------- ndarray, [that.size], bool returns a bool for each element in `that`, indicating if it is contained in `this` Notes ----- Reads as 'this contains that' Similar to 'that in this', but with different performance characteristics ] variable[this] assign[=] call[name[as_index], parameter[name[this]]] variable[that] assign[=] call[name[as_index], parameter[name[that]]] variable[left] assign[=] call[name[np].searchsorted, parameter[name[that]._keys, name[this]._keys]] variable[right] assign[=] call[name[np].searchsorted, parameter[name[that]._keys, name[this]._keys]] variable[flags] assign[=] call[name[np].zeros, parameter[binary_operation[name[that].size + constant[1]]]] call[name[np].add.at, parameter[name[flags], name[left], constant[1]]] call[name[np].add.at, parameter[name[flags], name[right], <ast.UnaryOp object at 0x7da20cabedd0>]] return[call[call[call[call[name[np].cumsum, parameter[name[flags]]]][<ast.Slice object at 0x7da20cabda80>].astype, parameter[name[np].bool]]][name[that].rank]]
keyword[def] identifier[contains] ( identifier[this] , identifier[that] , identifier[axis] = identifier[semantics] . identifier[axis_default] ): literal[string] identifier[this] = identifier[as_index] ( identifier[this] , identifier[axis] = identifier[axis] , identifier[lex_as_struct] = keyword[True] , identifier[base] = keyword[True] ) identifier[that] = identifier[as_index] ( identifier[that] , identifier[axis] = identifier[axis] , identifier[lex_as_struct] = keyword[True] ) identifier[left] = identifier[np] . identifier[searchsorted] ( identifier[that] . identifier[_keys] , identifier[this] . identifier[_keys] , identifier[sorter] = identifier[that] . identifier[sorter] , identifier[side] = literal[string] ) identifier[right] = identifier[np] . identifier[searchsorted] ( identifier[that] . identifier[_keys] , identifier[this] . identifier[_keys] , identifier[sorter] = identifier[that] . identifier[sorter] , identifier[side] = literal[string] ) identifier[flags] = identifier[np] . identifier[zeros] ( identifier[that] . identifier[size] + literal[int] , identifier[dtype] = identifier[np] . identifier[int] ) identifier[np] . identifier[add] . identifier[at] ( identifier[flags] , identifier[left] , literal[int] ) identifier[np] . identifier[add] . identifier[at] ( identifier[flags] , identifier[right] ,- literal[int] ) keyword[return] identifier[np] . identifier[cumsum] ( identifier[flags] )[:- literal[int] ]. identifier[astype] ( identifier[np] . identifier[bool] )[ identifier[that] . identifier[rank] ]
def contains(this, that, axis=semantics.axis_default): """Returns bool for each element of `that`, indicating if it is contained in `this` Parameters ---------- this : indexable key sequence sequence of items to test against that : indexable key sequence sequence of items to test for Returns ------- ndarray, [that.size], bool returns a bool for each element in `that`, indicating if it is contained in `this` Notes ----- Reads as 'this contains that' Similar to 'that in this', but with different performance characteristics """ this = as_index(this, axis=axis, lex_as_struct=True, base=True) that = as_index(that, axis=axis, lex_as_struct=True) left = np.searchsorted(that._keys, this._keys, sorter=that.sorter, side='left') right = np.searchsorted(that._keys, this._keys, sorter=that.sorter, side='right') flags = np.zeros(that.size + 1, dtype=np.int) np.add.at(flags, left, 1) np.add.at(flags, right, -1) return np.cumsum(flags)[:-1].astype(np.bool)[that.rank]
def show(self, block=None, warn=True): """Display the current figure (if possible). If blocking, this method replicates the behaviour of :func:`matplotlib.pyplot.show()`, otherwise it just calls up to :meth:`~matplotlib.figure.Figure.show`. This method also supports repeatedly showing the same figure, even after closing the display window, which isn't supported by `pyplot.show` (AFAIK). Parameters ---------- block : `bool`, optional open the figure and block until the figure is closed, otherwise open the figure as a detached window, default: `None`. If `None`, block if using an interactive backend and _not_ inside IPython. warn : `bool`, optional print a warning if matplotlib is not running in an interactive backend and cannot display the figure, default: `True`. """ # this method tries to reproduce the functionality of pyplot.show, # mainly for user convenience. However, as of matplotlib-3.0.0, # pyplot.show() ends up calling _back_ to Plot.show(), # so we have to be careful not to end up in a recursive loop # # Developer note: if we ever make it pinning to matplotlib >=3.0.0 # this method can likely be completely removed # import inspect try: callframe = inspect.currentframe().f_back except AttributeError: pass else: if 'matplotlib' in callframe.f_code.co_filename: block = False # render super(Plot, self).show(warn=warn) # don't block on ipython with interactive backends if block is None and interactive_backend(): block = not IPYTHON # block in GUI loop (stolen from mpl.backend_bases._Backend.show) if block: backend_mod = get_backend_mod() try: backend_mod.Show().mainloop() except AttributeError: # matplotlib < 2.1.0 backend_mod.show.mainloop()
def function[show, parameter[self, block, warn]]: constant[Display the current figure (if possible). If blocking, this method replicates the behaviour of :func:`matplotlib.pyplot.show()`, otherwise it just calls up to :meth:`~matplotlib.figure.Figure.show`. This method also supports repeatedly showing the same figure, even after closing the display window, which isn't supported by `pyplot.show` (AFAIK). Parameters ---------- block : `bool`, optional open the figure and block until the figure is closed, otherwise open the figure as a detached window, default: `None`. If `None`, block if using an interactive backend and _not_ inside IPython. warn : `bool`, optional print a warning if matplotlib is not running in an interactive backend and cannot display the figure, default: `True`. ] import module[inspect] <ast.Try object at 0x7da204622830> call[call[name[super], parameter[name[Plot], name[self]]].show, parameter[]] if <ast.BoolOp object at 0x7da204622ec0> begin[:] variable[block] assign[=] <ast.UnaryOp object at 0x7da204622f80> if name[block] begin[:] variable[backend_mod] assign[=] call[name[get_backend_mod], parameter[]] <ast.Try object at 0x7da204620df0>
keyword[def] identifier[show] ( identifier[self] , identifier[block] = keyword[None] , identifier[warn] = keyword[True] ): literal[string] keyword[import] identifier[inspect] keyword[try] : identifier[callframe] = identifier[inspect] . identifier[currentframe] (). identifier[f_back] keyword[except] identifier[AttributeError] : keyword[pass] keyword[else] : keyword[if] literal[string] keyword[in] identifier[callframe] . identifier[f_code] . identifier[co_filename] : identifier[block] = keyword[False] identifier[super] ( identifier[Plot] , identifier[self] ). identifier[show] ( identifier[warn] = identifier[warn] ) keyword[if] identifier[block] keyword[is] keyword[None] keyword[and] identifier[interactive_backend] (): identifier[block] = keyword[not] identifier[IPYTHON] keyword[if] identifier[block] : identifier[backend_mod] = identifier[get_backend_mod] () keyword[try] : identifier[backend_mod] . identifier[Show] (). identifier[mainloop] () keyword[except] identifier[AttributeError] : identifier[backend_mod] . identifier[show] . identifier[mainloop] ()
def show(self, block=None, warn=True): """Display the current figure (if possible). If blocking, this method replicates the behaviour of :func:`matplotlib.pyplot.show()`, otherwise it just calls up to :meth:`~matplotlib.figure.Figure.show`. This method also supports repeatedly showing the same figure, even after closing the display window, which isn't supported by `pyplot.show` (AFAIK). Parameters ---------- block : `bool`, optional open the figure and block until the figure is closed, otherwise open the figure as a detached window, default: `None`. If `None`, block if using an interactive backend and _not_ inside IPython. warn : `bool`, optional print a warning if matplotlib is not running in an interactive backend and cannot display the figure, default: `True`. """ # this method tries to reproduce the functionality of pyplot.show, # mainly for user convenience. However, as of matplotlib-3.0.0, # pyplot.show() ends up calling _back_ to Plot.show(), # so we have to be careful not to end up in a recursive loop # # Developer note: if we ever make it pinning to matplotlib >=3.0.0 # this method can likely be completely removed # import inspect try: callframe = inspect.currentframe().f_back # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] else: if 'matplotlib' in callframe.f_code.co_filename: block = False # depends on [control=['if'], data=[]] # render super(Plot, self).show(warn=warn) # don't block on ipython with interactive backends if block is None and interactive_backend(): block = not IPYTHON # depends on [control=['if'], data=[]] # block in GUI loop (stolen from mpl.backend_bases._Backend.show) if block: backend_mod = get_backend_mod() try: backend_mod.Show().mainloop() # depends on [control=['try'], data=[]] except AttributeError: # matplotlib < 2.1.0 backend_mod.show.mainloop() # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
def grab_java_message(node_list, curr_testname): """scan through the java output text and extract the java messages related to running test specified in curr_testname. Parameters ---------- :param node_list: list of H2O nodes List of H2o nodes associated with an H2OCloud (cluster) that are performing the test specified in curr_testname. :param curr_testname: str Store the unit test name (can be R unit or Py unit) that has been completed and failed. :return: a string object that is either empty or the java messages that associated with the test in curr_testname. The java messages can usually be found in one of the java_*_0.out.txt """ global g_java_start_text # contains text that describe the start of a unit test. java_messages = "" start_test = False # denote when the current test was found in the java_*_0.out.txt file # grab each java file and try to grab the java messages associated with curr_testname for each_node in node_list: java_filename = each_node.output_file_name # find the java_*_0.out.txt file if os.path.isfile(java_filename): java_file = open(java_filename, 'r') for each_line in java_file: if g_java_start_text in each_line: start_str, found, end_str = each_line.partition(g_java_start_text) if len(found) > 0: # a new test is being started. current_testname = end_str.strip() # grab the test name and check if it is curr_testname if current_testname == curr_testname: # found the line starting with current test. Grab everything now start_test = True # found text in java_*_0.out.txt that describe curr_testname # add header to make JAVA messages visible. java_messages += "\n\n**********************************************************\n" java_messages += "**********************************************************\n" java_messages += "JAVA Messages\n" java_messages += "**********************************************************\n" java_messages += "**********************************************************\n\n" else: # found a differnt test than our curr_testname. We are done! if start_test: # in the middle of curr_testname but found a new test starting, can quit now. break # store java message associated with curr_testname into java_messages if start_test: java_messages += each_line java_file.close() # finished finding java messages if start_test: # found java message associate with our test already. No need to continue the loop. break return java_messages
def function[grab_java_message, parameter[node_list, curr_testname]]: constant[scan through the java output text and extract the java messages related to running test specified in curr_testname. Parameters ---------- :param node_list: list of H2O nodes List of H2o nodes associated with an H2OCloud (cluster) that are performing the test specified in curr_testname. :param curr_testname: str Store the unit test name (can be R unit or Py unit) that has been completed and failed. :return: a string object that is either empty or the java messages that associated with the test in curr_testname. The java messages can usually be found in one of the java_*_0.out.txt ] <ast.Global object at 0x7da20e9b35b0> variable[java_messages] assign[=] constant[] variable[start_test] assign[=] constant[False] for taget[name[each_node]] in starred[name[node_list]] begin[:] variable[java_filename] assign[=] name[each_node].output_file_name if call[name[os].path.isfile, parameter[name[java_filename]]] begin[:] variable[java_file] assign[=] call[name[open], parameter[name[java_filename], constant[r]]] for taget[name[each_line]] in starred[name[java_file]] begin[:] if compare[name[g_java_start_text] in name[each_line]] begin[:] <ast.Tuple object at 0x7da20e9b3b50> assign[=] call[name[each_line].partition, parameter[name[g_java_start_text]]] if compare[call[name[len], parameter[name[found]]] greater[>] constant[0]] begin[:] variable[current_testname] assign[=] call[name[end_str].strip, parameter[]] if compare[name[current_testname] equal[==] name[curr_testname]] begin[:] variable[start_test] assign[=] constant[True] <ast.AugAssign object at 0x7da20e9b28c0> <ast.AugAssign object at 0x7da20e9b0520> <ast.AugAssign object at 0x7da20e9b3a60> <ast.AugAssign object at 0x7da20e9b26e0> <ast.AugAssign object at 0x7da20e9b3e20> if name[start_test] begin[:] <ast.AugAssign object at 0x7da20e9b1480> call[name[java_file].close, parameter[]] if name[start_test] begin[:] break return[name[java_messages]]
keyword[def] identifier[grab_java_message] ( identifier[node_list] , identifier[curr_testname] ): literal[string] keyword[global] identifier[g_java_start_text] identifier[java_messages] = literal[string] identifier[start_test] = keyword[False] keyword[for] identifier[each_node] keyword[in] identifier[node_list] : identifier[java_filename] = identifier[each_node] . identifier[output_file_name] keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[java_filename] ): identifier[java_file] = identifier[open] ( identifier[java_filename] , literal[string] ) keyword[for] identifier[each_line] keyword[in] identifier[java_file] : keyword[if] identifier[g_java_start_text] keyword[in] identifier[each_line] : identifier[start_str] , identifier[found] , identifier[end_str] = identifier[each_line] . identifier[partition] ( identifier[g_java_start_text] ) keyword[if] identifier[len] ( identifier[found] )> literal[int] : identifier[current_testname] = identifier[end_str] . identifier[strip] () keyword[if] identifier[current_testname] == identifier[curr_testname] : identifier[start_test] = keyword[True] identifier[java_messages] += literal[string] identifier[java_messages] += literal[string] identifier[java_messages] += literal[string] identifier[java_messages] += literal[string] identifier[java_messages] += literal[string] keyword[else] : keyword[if] identifier[start_test] : keyword[break] keyword[if] identifier[start_test] : identifier[java_messages] += identifier[each_line] identifier[java_file] . identifier[close] () keyword[if] identifier[start_test] : keyword[break] keyword[return] identifier[java_messages]
def grab_java_message(node_list, curr_testname): """scan through the java output text and extract the java messages related to running test specified in curr_testname. Parameters ---------- :param node_list: list of H2O nodes List of H2o nodes associated with an H2OCloud (cluster) that are performing the test specified in curr_testname. :param curr_testname: str Store the unit test name (can be R unit or Py unit) that has been completed and failed. :return: a string object that is either empty or the java messages that associated with the test in curr_testname. The java messages can usually be found in one of the java_*_0.out.txt """ global g_java_start_text # contains text that describe the start of a unit test. java_messages = '' start_test = False # denote when the current test was found in the java_*_0.out.txt file # grab each java file and try to grab the java messages associated with curr_testname for each_node in node_list: java_filename = each_node.output_file_name # find the java_*_0.out.txt file if os.path.isfile(java_filename): java_file = open(java_filename, 'r') for each_line in java_file: if g_java_start_text in each_line: (start_str, found, end_str) = each_line.partition(g_java_start_text) if len(found) > 0: # a new test is being started. current_testname = end_str.strip() # grab the test name and check if it is curr_testname if current_testname == curr_testname: # found the line starting with current test. Grab everything now start_test = True # found text in java_*_0.out.txt that describe curr_testname # add header to make JAVA messages visible. java_messages += '\n\n**********************************************************\n' java_messages += '**********************************************************\n' java_messages += 'JAVA Messages\n' java_messages += '**********************************************************\n' java_messages += '**********************************************************\n\n' # depends on [control=['if'], data=[]] # found a differnt test than our curr_testname. We are done! elif start_test: # in the middle of curr_testname but found a new test starting, can quit now. break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['g_java_start_text', 'each_line']] # store java message associated with curr_testname into java_messages if start_test: java_messages += each_line # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['each_line']] java_file.close() # finished finding java messages # depends on [control=['if'], data=[]] if start_test: # found java message associate with our test already. No need to continue the loop. break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['each_node']] return java_messages
def resize(self, image, size): """ Resizes the image :param image: The image object :param size: size is PIL tuple (width, heigth, force) ex: (200,100,True) """ (width, height, force) = size if image.size[0] > width or image.size[1] > height: if force: return ImageOps.fit(self.image, (width, height), Image.ANTIALIAS) else: thumb = self.image.copy() thumb.thumbnail((width, height), Image.ANTIALIAS) return thumb return image
def function[resize, parameter[self, image, size]]: constant[ Resizes the image :param image: The image object :param size: size is PIL tuple (width, heigth, force) ex: (200,100,True) ] <ast.Tuple object at 0x7da20e955d50> assign[=] name[size] if <ast.BoolOp object at 0x7da20e954490> begin[:] if name[force] begin[:] return[call[name[ImageOps].fit, parameter[name[self].image, tuple[[<ast.Name object at 0x7da20e955270>, <ast.Name object at 0x7da20e954af0>]], name[Image].ANTIALIAS]]] return[name[image]]
keyword[def] identifier[resize] ( identifier[self] , identifier[image] , identifier[size] ): literal[string] ( identifier[width] , identifier[height] , identifier[force] )= identifier[size] keyword[if] identifier[image] . identifier[size] [ literal[int] ]> identifier[width] keyword[or] identifier[image] . identifier[size] [ literal[int] ]> identifier[height] : keyword[if] identifier[force] : keyword[return] identifier[ImageOps] . identifier[fit] ( identifier[self] . identifier[image] ,( identifier[width] , identifier[height] ), identifier[Image] . identifier[ANTIALIAS] ) keyword[else] : identifier[thumb] = identifier[self] . identifier[image] . identifier[copy] () identifier[thumb] . identifier[thumbnail] (( identifier[width] , identifier[height] ), identifier[Image] . identifier[ANTIALIAS] ) keyword[return] identifier[thumb] keyword[return] identifier[image]
def resize(self, image, size): """ Resizes the image :param image: The image object :param size: size is PIL tuple (width, heigth, force) ex: (200,100,True) """ (width, height, force) = size if image.size[0] > width or image.size[1] > height: if force: return ImageOps.fit(self.image, (width, height), Image.ANTIALIAS) # depends on [control=['if'], data=[]] else: thumb = self.image.copy() thumb.thumbnail((width, height), Image.ANTIALIAS) return thumb # depends on [control=['if'], data=[]] return image
def limits(self, clip_negative=True): """Return intensity limits, i.e. (min, max) tuple, of the dtype. Args: clip_negative : bool, optional If True, clip the negative range (i.e. return 0 for min intensity) even if the image dtype allows negative values. Returns min, max : tuple Lower and upper intensity limits. """ min, max = dtype_range[self.as_numpy_dtype] # pylint: disable=redefined-builtin if clip_negative: min = 0 # pylint: disable=redefined-builtin return min, max
def function[limits, parameter[self, clip_negative]]: constant[Return intensity limits, i.e. (min, max) tuple, of the dtype. Args: clip_negative : bool, optional If True, clip the negative range (i.e. return 0 for min intensity) even if the image dtype allows negative values. Returns min, max : tuple Lower and upper intensity limits. ] <ast.Tuple object at 0x7da1b1f44700> assign[=] call[name[dtype_range]][name[self].as_numpy_dtype] if name[clip_negative] begin[:] variable[min] assign[=] constant[0] return[tuple[[<ast.Name object at 0x7da1b21a4250>, <ast.Name object at 0x7da1b21a69b0>]]]
keyword[def] identifier[limits] ( identifier[self] , identifier[clip_negative] = keyword[True] ): literal[string] identifier[min] , identifier[max] = identifier[dtype_range] [ identifier[self] . identifier[as_numpy_dtype] ] keyword[if] identifier[clip_negative] : identifier[min] = literal[int] keyword[return] identifier[min] , identifier[max]
def limits(self, clip_negative=True): """Return intensity limits, i.e. (min, max) tuple, of the dtype. Args: clip_negative : bool, optional If True, clip the negative range (i.e. return 0 for min intensity) even if the image dtype allows negative values. Returns min, max : tuple Lower and upper intensity limits. """ (min, max) = dtype_range[self.as_numpy_dtype] # pylint: disable=redefined-builtin if clip_negative: min = 0 # pylint: disable=redefined-builtin # depends on [control=['if'], data=[]] return (min, max)
def add_env(self, key, value): """ Add an environemnt variable For Ubuntu, the best place is /etc/environment. Values placed here do not need to be exported. """ boto.log.info('Adding env variable: %s=%s' % (key, value)) if not os.path.exists("/etc/environment.orig"): self.run('cp /etc/environment /etc/environment.orig', notify=False, exit_on_error=False) fp = open('/etc/environment', 'a') fp.write('\n%s="%s"' % (key, value)) fp.close() os.environ[key] = value
def function[add_env, parameter[self, key, value]]: constant[ Add an environemnt variable For Ubuntu, the best place is /etc/environment. Values placed here do not need to be exported. ] call[name[boto].log.info, parameter[binary_operation[constant[Adding env variable: %s=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b26158a0>, <ast.Name object at 0x7da1b26158d0>]]]]] if <ast.UnaryOp object at 0x7da1b26176a0> begin[:] call[name[self].run, parameter[constant[cp /etc/environment /etc/environment.orig]]] variable[fp] assign[=] call[name[open], parameter[constant[/etc/environment], constant[a]]] call[name[fp].write, parameter[binary_operation[constant[ %s="%s"] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2615f00>, <ast.Name object at 0x7da1b2617df0>]]]]] call[name[fp].close, parameter[]] call[name[os].environ][name[key]] assign[=] name[value]
keyword[def] identifier[add_env] ( identifier[self] , identifier[key] , identifier[value] ): literal[string] identifier[boto] . identifier[log] . identifier[info] ( literal[string] %( identifier[key] , identifier[value] )) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( literal[string] ): identifier[self] . identifier[run] ( literal[string] , identifier[notify] = keyword[False] , identifier[exit_on_error] = keyword[False] ) identifier[fp] = identifier[open] ( literal[string] , literal[string] ) identifier[fp] . identifier[write] ( literal[string] %( identifier[key] , identifier[value] )) identifier[fp] . identifier[close] () identifier[os] . identifier[environ] [ identifier[key] ]= identifier[value]
def add_env(self, key, value): """ Add an environemnt variable For Ubuntu, the best place is /etc/environment. Values placed here do not need to be exported. """ boto.log.info('Adding env variable: %s=%s' % (key, value)) if not os.path.exists('/etc/environment.orig'): self.run('cp /etc/environment /etc/environment.orig', notify=False, exit_on_error=False) # depends on [control=['if'], data=[]] fp = open('/etc/environment', 'a') fp.write('\n%s="%s"' % (key, value)) fp.close() os.environ[key] = value
def iterqueue(self, limit=None, infinite=False): """Infinite iterator yielding pending messages, by using synchronous direct access to the queue (``basic_get``). :meth:`iterqueue` is used where synchronous functionality is more important than performance. If you can, use :meth:`iterconsume` instead. :keyword limit: If set, the iterator stops when it has processed this number of messages in total. :keyword infinite: Don't raise :exc:`StopIteration` if there is no messages waiting, but return ``None`` instead. If infinite you obviously shouldn't consume the whole iterator at once without using a ``limit``. :raises StopIteration: If there is no messages waiting, and the iterator is not infinite. """ for items_since_start in count(): item = self.fetch() if (not infinite and item is None) or \ (limit and items_since_start >= limit): raise StopIteration yield item
def function[iterqueue, parameter[self, limit, infinite]]: constant[Infinite iterator yielding pending messages, by using synchronous direct access to the queue (``basic_get``). :meth:`iterqueue` is used where synchronous functionality is more important than performance. If you can, use :meth:`iterconsume` instead. :keyword limit: If set, the iterator stops when it has processed this number of messages in total. :keyword infinite: Don't raise :exc:`StopIteration` if there is no messages waiting, but return ``None`` instead. If infinite you obviously shouldn't consume the whole iterator at once without using a ``limit``. :raises StopIteration: If there is no messages waiting, and the iterator is not infinite. ] for taget[name[items_since_start]] in starred[call[name[count], parameter[]]] begin[:] variable[item] assign[=] call[name[self].fetch, parameter[]] if <ast.BoolOp object at 0x7da1b0f5a650> begin[:] <ast.Raise object at 0x7da1b0f5a140> <ast.Yield object at 0x7da1b0f5a620>
keyword[def] identifier[iterqueue] ( identifier[self] , identifier[limit] = keyword[None] , identifier[infinite] = keyword[False] ): literal[string] keyword[for] identifier[items_since_start] keyword[in] identifier[count] (): identifier[item] = identifier[self] . identifier[fetch] () keyword[if] ( keyword[not] identifier[infinite] keyword[and] identifier[item] keyword[is] keyword[None] ) keyword[or] ( identifier[limit] keyword[and] identifier[items_since_start] >= identifier[limit] ): keyword[raise] identifier[StopIteration] keyword[yield] identifier[item]
def iterqueue(self, limit=None, infinite=False): """Infinite iterator yielding pending messages, by using synchronous direct access to the queue (``basic_get``). :meth:`iterqueue` is used where synchronous functionality is more important than performance. If you can, use :meth:`iterconsume` instead. :keyword limit: If set, the iterator stops when it has processed this number of messages in total. :keyword infinite: Don't raise :exc:`StopIteration` if there is no messages waiting, but return ``None`` instead. If infinite you obviously shouldn't consume the whole iterator at once without using a ``limit``. :raises StopIteration: If there is no messages waiting, and the iterator is not infinite. """ for items_since_start in count(): item = self.fetch() if not infinite and item is None or (limit and items_since_start >= limit): raise StopIteration # depends on [control=['if'], data=[]] yield item # depends on [control=['for'], data=['items_since_start']]
def raise_302(instance, location): """Abort the current request with a 302 (Found) response code. Sets the Location header correctly. If the location does not start with a slash, the path of the current request is prepended. :param instance: Resource instance (used to access the response) :type instance: :class:`webob.resource.Resource` :raises: :class:`webob.exceptions.ResponseException` of status 302 """ _set_location(instance, location) instance.response.status = 302 raise ResponseException(instance.response)
def function[raise_302, parameter[instance, location]]: constant[Abort the current request with a 302 (Found) response code. Sets the Location header correctly. If the location does not start with a slash, the path of the current request is prepended. :param instance: Resource instance (used to access the response) :type instance: :class:`webob.resource.Resource` :raises: :class:`webob.exceptions.ResponseException` of status 302 ] call[name[_set_location], parameter[name[instance], name[location]]] name[instance].response.status assign[=] constant[302] <ast.Raise object at 0x7da18bccb6a0>
keyword[def] identifier[raise_302] ( identifier[instance] , identifier[location] ): literal[string] identifier[_set_location] ( identifier[instance] , identifier[location] ) identifier[instance] . identifier[response] . identifier[status] = literal[int] keyword[raise] identifier[ResponseException] ( identifier[instance] . identifier[response] )
def raise_302(instance, location): """Abort the current request with a 302 (Found) response code. Sets the Location header correctly. If the location does not start with a slash, the path of the current request is prepended. :param instance: Resource instance (used to access the response) :type instance: :class:`webob.resource.Resource` :raises: :class:`webob.exceptions.ResponseException` of status 302 """ _set_location(instance, location) instance.response.status = 302 raise ResponseException(instance.response)
def determine_file_type(filename): """ :param filename: str :rtype: FileType """ if filename.endswith('.cls'): return FileType.CLS elif filename.endswith('.go'): return FileType.GO elif filename.endswith('.java'): return FileType.JAVA elif filename.endswith('.js'): return FileType.JAVASCRIPT elif filename.endswith('.php'): return FileType.PHP elif filename.endswith('.py'): return FileType.PYTHON elif ( filename.endswith( ('.yaml', '.yml'), ) ): return FileType.YAML return FileType.OTHER
def function[determine_file_type, parameter[filename]]: constant[ :param filename: str :rtype: FileType ] if call[name[filename].endswith, parameter[constant[.cls]]] begin[:] return[name[FileType].CLS] return[name[FileType].OTHER]
keyword[def] identifier[determine_file_type] ( identifier[filename] ): literal[string] keyword[if] identifier[filename] . identifier[endswith] ( literal[string] ): keyword[return] identifier[FileType] . identifier[CLS] keyword[elif] identifier[filename] . identifier[endswith] ( literal[string] ): keyword[return] identifier[FileType] . identifier[GO] keyword[elif] identifier[filename] . identifier[endswith] ( literal[string] ): keyword[return] identifier[FileType] . identifier[JAVA] keyword[elif] identifier[filename] . identifier[endswith] ( literal[string] ): keyword[return] identifier[FileType] . identifier[JAVASCRIPT] keyword[elif] identifier[filename] . identifier[endswith] ( literal[string] ): keyword[return] identifier[FileType] . identifier[PHP] keyword[elif] identifier[filename] . identifier[endswith] ( literal[string] ): keyword[return] identifier[FileType] . identifier[PYTHON] keyword[elif] ( identifier[filename] . identifier[endswith] ( ( literal[string] , literal[string] ), ) ): keyword[return] identifier[FileType] . identifier[YAML] keyword[return] identifier[FileType] . identifier[OTHER]
def determine_file_type(filename): """ :param filename: str :rtype: FileType """ if filename.endswith('.cls'): return FileType.CLS # depends on [control=['if'], data=[]] elif filename.endswith('.go'): return FileType.GO # depends on [control=['if'], data=[]] elif filename.endswith('.java'): return FileType.JAVA # depends on [control=['if'], data=[]] elif filename.endswith('.js'): return FileType.JAVASCRIPT # depends on [control=['if'], data=[]] elif filename.endswith('.php'): return FileType.PHP # depends on [control=['if'], data=[]] elif filename.endswith('.py'): return FileType.PYTHON # depends on [control=['if'], data=[]] elif filename.endswith(('.yaml', '.yml')): return FileType.YAML # depends on [control=['if'], data=[]] return FileType.OTHER
def complete_opt_format(self, text, *_): """ Autocomplete for format option """ return [t + " " for t in FORMATTERS if t.startswith(text)]
def function[complete_opt_format, parameter[self, text]]: constant[ Autocomplete for format option ] return[<ast.ListComp object at 0x7da1b0ccba30>]
keyword[def] identifier[complete_opt_format] ( identifier[self] , identifier[text] ,* identifier[_] ): literal[string] keyword[return] [ identifier[t] + literal[string] keyword[for] identifier[t] keyword[in] identifier[FORMATTERS] keyword[if] identifier[t] . identifier[startswith] ( identifier[text] )]
def complete_opt_format(self, text, *_): """ Autocomplete for format option """ return [t + ' ' for t in FORMATTERS if t.startswith(text)]
def getCorner(index,top,left,expand=0,y=206): ''' Return part of the XML string that defines the requested corner''' x = str(-(expand+old_div(ARENA_WIDTH,2))) if left else str(expand+old_div(ARENA_WIDTH,2)) z = str(-(expand+old_div(ARENA_BREADTH,2))) if top else str(expand+old_div(ARENA_BREADTH,2)) return 'x'+index+'="'+x+'" y'+index+'="' +str(y)+'" z'+index+'="'+z+'"'
def function[getCorner, parameter[index, top, left, expand, y]]: constant[ Return part of the XML string that defines the requested corner] variable[x] assign[=] <ast.IfExp object at 0x7da1b1b13940> variable[z] assign[=] <ast.IfExp object at 0x7da1b1a436d0> return[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[x] + name[index]] + constant[="]] + name[x]] + constant[" y]] + name[index]] + constant[="]] + call[name[str], parameter[name[y]]]] + constant[" z]] + name[index]] + constant[="]] + name[z]] + constant["]]]
keyword[def] identifier[getCorner] ( identifier[index] , identifier[top] , identifier[left] , identifier[expand] = literal[int] , identifier[y] = literal[int] ): literal[string] identifier[x] = identifier[str] (-( identifier[expand] + identifier[old_div] ( identifier[ARENA_WIDTH] , literal[int] ))) keyword[if] identifier[left] keyword[else] identifier[str] ( identifier[expand] + identifier[old_div] ( identifier[ARENA_WIDTH] , literal[int] )) identifier[z] = identifier[str] (-( identifier[expand] + identifier[old_div] ( identifier[ARENA_BREADTH] , literal[int] ))) keyword[if] identifier[top] keyword[else] identifier[str] ( identifier[expand] + identifier[old_div] ( identifier[ARENA_BREADTH] , literal[int] )) keyword[return] literal[string] + identifier[index] + literal[string] + identifier[x] + literal[string] + identifier[index] + literal[string] + identifier[str] ( identifier[y] )+ literal[string] + identifier[index] + literal[string] + identifier[z] + literal[string]
def getCorner(index, top, left, expand=0, y=206): """ Return part of the XML string that defines the requested corner""" x = str(-(expand + old_div(ARENA_WIDTH, 2))) if left else str(expand + old_div(ARENA_WIDTH, 2)) z = str(-(expand + old_div(ARENA_BREADTH, 2))) if top else str(expand + old_div(ARENA_BREADTH, 2)) return 'x' + index + '="' + x + '" y' + index + '="' + str(y) + '" z' + index + '="' + z + '"'
def remove_positive_resample(X, y, model_generator, method_name, num_fcounts=11): """ Remove Positive (resample) xlabel = "Max fraction of features removed" ylabel = "Negative mean model output" transform = "negate" sort_order = 13 """ return __run_measure(measures.remove_resample, X, y, model_generator, method_name, 1, num_fcounts, __mean_pred)
def function[remove_positive_resample, parameter[X, y, model_generator, method_name, num_fcounts]]: constant[ Remove Positive (resample) xlabel = "Max fraction of features removed" ylabel = "Negative mean model output" transform = "negate" sort_order = 13 ] return[call[name[__run_measure], parameter[name[measures].remove_resample, name[X], name[y], name[model_generator], name[method_name], constant[1], name[num_fcounts], name[__mean_pred]]]]
keyword[def] identifier[remove_positive_resample] ( identifier[X] , identifier[y] , identifier[model_generator] , identifier[method_name] , identifier[num_fcounts] = literal[int] ): literal[string] keyword[return] identifier[__run_measure] ( identifier[measures] . identifier[remove_resample] , identifier[X] , identifier[y] , identifier[model_generator] , identifier[method_name] , literal[int] , identifier[num_fcounts] , identifier[__mean_pred] )
def remove_positive_resample(X, y, model_generator, method_name, num_fcounts=11): """ Remove Positive (resample) xlabel = "Max fraction of features removed" ylabel = "Negative mean model output" transform = "negate" sort_order = 13 """ return __run_measure(measures.remove_resample, X, y, model_generator, method_name, 1, num_fcounts, __mean_pred)
def get_node_type(dgtree): """Returns the type of the root node of a DGParentedTree.""" if is_leaf(dgtree): return TreeNodeTypes.leaf_node root_label = dgtree.label() if root_label == '': assert dgtree == DGParentedTree('', []), \ "The tree has no root label, but isn't empty: {}".format(dgtree) return TreeNodeTypes.empty_tree elif root_label in NUCLEARITY_LABELS: return TreeNodeTypes.nuclearity_node else: assert isinstance(dgtree, (RSTTree, DGParentedTree)), type(dgtree) return TreeNodeTypes.relation_node
def function[get_node_type, parameter[dgtree]]: constant[Returns the type of the root node of a DGParentedTree.] if call[name[is_leaf], parameter[name[dgtree]]] begin[:] return[name[TreeNodeTypes].leaf_node] variable[root_label] assign[=] call[name[dgtree].label, parameter[]] if compare[name[root_label] equal[==] constant[]] begin[:] assert[compare[name[dgtree] equal[==] call[name[DGParentedTree], parameter[constant[], list[[]]]]]] return[name[TreeNodeTypes].empty_tree]
keyword[def] identifier[get_node_type] ( identifier[dgtree] ): literal[string] keyword[if] identifier[is_leaf] ( identifier[dgtree] ): keyword[return] identifier[TreeNodeTypes] . identifier[leaf_node] identifier[root_label] = identifier[dgtree] . identifier[label] () keyword[if] identifier[root_label] == literal[string] : keyword[assert] identifier[dgtree] == identifier[DGParentedTree] ( literal[string] ,[]), literal[string] . identifier[format] ( identifier[dgtree] ) keyword[return] identifier[TreeNodeTypes] . identifier[empty_tree] keyword[elif] identifier[root_label] keyword[in] identifier[NUCLEARITY_LABELS] : keyword[return] identifier[TreeNodeTypes] . identifier[nuclearity_node] keyword[else] : keyword[assert] identifier[isinstance] ( identifier[dgtree] ,( identifier[RSTTree] , identifier[DGParentedTree] )), identifier[type] ( identifier[dgtree] ) keyword[return] identifier[TreeNodeTypes] . identifier[relation_node]
def get_node_type(dgtree): """Returns the type of the root node of a DGParentedTree.""" if is_leaf(dgtree): return TreeNodeTypes.leaf_node # depends on [control=['if'], data=[]] root_label = dgtree.label() if root_label == '': assert dgtree == DGParentedTree('', []), "The tree has no root label, but isn't empty: {}".format(dgtree) return TreeNodeTypes.empty_tree # depends on [control=['if'], data=[]] elif root_label in NUCLEARITY_LABELS: return TreeNodeTypes.nuclearity_node # depends on [control=['if'], data=[]] else: assert isinstance(dgtree, (RSTTree, DGParentedTree)), type(dgtree) return TreeNodeTypes.relation_node
def _chunks(iterable, n): """ Splits an iterable into chunks of size n. """ iterable = iter(iterable) while True: # store one line in memory, # chain it to an iterator on the rest of the chunk yield chain([next(iterable)], islice(iterable, n-1))
def function[_chunks, parameter[iterable, n]]: constant[ Splits an iterable into chunks of size n. ] variable[iterable] assign[=] call[name[iter], parameter[name[iterable]]] while constant[True] begin[:] <ast.Yield object at 0x7da204623e50>
keyword[def] identifier[_chunks] ( identifier[iterable] , identifier[n] ): literal[string] identifier[iterable] = identifier[iter] ( identifier[iterable] ) keyword[while] keyword[True] : keyword[yield] identifier[chain] ([ identifier[next] ( identifier[iterable] )], identifier[islice] ( identifier[iterable] , identifier[n] - literal[int] ))
def _chunks(iterable, n): """ Splits an iterable into chunks of size n. """ iterable = iter(iterable) while True: # store one line in memory, # chain it to an iterator on the rest of the chunk yield chain([next(iterable)], islice(iterable, n - 1)) # depends on [control=['while'], data=[]]
def trigger_show_by_trigger_name(self, trigger_name, **kwargs): "https://developer.zendesk.com/rest_api/docs/chat/triggers#get-a-trigger" api_path = "/api/v2/triggers/{trigger_name}" api_path = api_path.format(trigger_name=trigger_name) return self.call(api_path, **kwargs)
def function[trigger_show_by_trigger_name, parameter[self, trigger_name]]: constant[https://developer.zendesk.com/rest_api/docs/chat/triggers#get-a-trigger] variable[api_path] assign[=] constant[/api/v2/triggers/{trigger_name}] variable[api_path] assign[=] call[name[api_path].format, parameter[]] return[call[name[self].call, parameter[name[api_path]]]]
keyword[def] identifier[trigger_show_by_trigger_name] ( identifier[self] , identifier[trigger_name] ,** identifier[kwargs] ): literal[string] identifier[api_path] = literal[string] identifier[api_path] = identifier[api_path] . identifier[format] ( identifier[trigger_name] = identifier[trigger_name] ) keyword[return] identifier[self] . identifier[call] ( identifier[api_path] ,** identifier[kwargs] )
def trigger_show_by_trigger_name(self, trigger_name, **kwargs): """https://developer.zendesk.com/rest_api/docs/chat/triggers#get-a-trigger""" api_path = '/api/v2/triggers/{trigger_name}' api_path = api_path.format(trigger_name=trigger_name) return self.call(api_path, **kwargs)
def compile(pattern, flags=0, sep=None, split_prefix=False): ''' Converts a glob-matching pattern (using Apache Cocoon style rules) to a regular expression, which basically means that the following characters have special meanings: * ``?``: matches any single character excluding the separator character * ``*``: matches zero or more characters excluding the separator character * ``**``: matches zero or more characters including the separator character * ``\``: escape character used to precede any of the others for a literal * ``[...]``: matches any character in the specified regex-style range * ``{...}``: inlines a regex expression :Parameters: sep : str; default: "/" The `sep` parameter specifies the hierarchical path component separator to use. By default, it uses the unix-style forward-slash separator (``"/"``), but can be overriden to be a sequence of alternative valid hierarchical path component separator characters. Note that although `sep` *could* be set to both forward- and back- slashes (i.e. ``"/\\"``) to, theoretically, support either unix- and windows-style path components, this has the significant flaw that then *both* characters can be used within the same path as separators. flags : int; default: 0 The `flags` bit mask can contain all the standard `re` flags, in addition to the ``globre.EXACT`` flag. If EXACT is set, then the returned regex will include a leading '^' and trailing '$', meaning that the regex must match the entire string, from beginning to end. split_prefix : bool; default: false If `split_prefix` is truthy, the return value becomes a tuple with the first element set to any initial non-wildcarded string found in the pattern. The second element remains the regex object as before. For example, the pattern ``foo/**.ini`` would result in a tuple equivalent to ``('foo/', re.compile('foo/.*\\.ini'))``. ''' prefix = None expr = '' if sep is None: sep = '/' if not sep: TypeError('invalid parameter "sep" value: %r' % (sep,)) if set(sep) & set(SPECIAL_CHARS): TypeError('parameter "sep" cannot contain any of %r' % (SPECIAL_CHARS,)) if len(sep) == 1: literal = re.escape else: def make_literal(sep): sep = '[' + re.escape(sep) + ']' sepcre = re.compile(sep) def _literal(text): return sep.join(sepcre.split(text)) return _literal literal = make_literal(sep) if sep != '/': sep = re.escape(sep) for token in Tokenizer(pattern).tokens(): if split_prefix and expr == '': prefix = token[1] if token[0] == Tokenizer.LITERAL else '' if token[0] == Tokenizer.LITERAL: expr += literal(token[1]) elif token[0] == Tokenizer.SINGLE: expr += '[^' + sep + ']' elif token[0] == Tokenizer.MULTIPLE: expr += '[^' + sep + ']*?' elif token[0] == Tokenizer.ANY: expr += '.*?' elif token[0] == Tokenizer.RANGE: expr += '[' + token[1] + ']' elif token[0] == Tokenizer.REGEX: expr += token[1] else: ValueError('unexpected token %r from globre.Tokenizer for glob: %s' % (token, pattern)) if flags & EXACT: if not expr.startswith('^'): expr = '^' + expr # todo: technically, the last "$" *could* be escaped and therefore # an extra "$" would need to be added... but that is very unlikely. if not expr.endswith('$'): expr += '$' expr = re.compile(expr, flags=flags & ~ EXACT) if prefix is not None: return (prefix, expr) return expr
def function[compile, parameter[pattern, flags, sep, split_prefix]]: constant[ Converts a glob-matching pattern (using Apache Cocoon style rules) to a regular expression, which basically means that the following characters have special meanings: * ``?``: matches any single character excluding the separator character * ``*``: matches zero or more characters excluding the separator character * ``**``: matches zero or more characters including the separator character * ``\``: escape character used to precede any of the others for a literal * ``[...]``: matches any character in the specified regex-style range * ``{...}``: inlines a regex expression :Parameters: sep : str; default: "/" The `sep` parameter specifies the hierarchical path component separator to use. By default, it uses the unix-style forward-slash separator (``"/"``), but can be overriden to be a sequence of alternative valid hierarchical path component separator characters. Note that although `sep` *could* be set to both forward- and back- slashes (i.e. ``"/\"``) to, theoretically, support either unix- and windows-style path components, this has the significant flaw that then *both* characters can be used within the same path as separators. flags : int; default: 0 The `flags` bit mask can contain all the standard `re` flags, in addition to the ``globre.EXACT`` flag. If EXACT is set, then the returned regex will include a leading '^' and trailing '$', meaning that the regex must match the entire string, from beginning to end. split_prefix : bool; default: false If `split_prefix` is truthy, the return value becomes a tuple with the first element set to any initial non-wildcarded string found in the pattern. The second element remains the regex object as before. For example, the pattern ``foo/**.ini`` would result in a tuple equivalent to ``('foo/', re.compile('foo/.*\.ini'))``. ] variable[prefix] assign[=] constant[None] variable[expr] assign[=] constant[] if compare[name[sep] is constant[None]] begin[:] variable[sep] assign[=] constant[/] if <ast.UnaryOp object at 0x7da1b0a73070> begin[:] call[name[TypeError], parameter[binary_operation[constant[invalid parameter "sep" value: %r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0a721a0>]]]]] if binary_operation[call[name[set], parameter[name[sep]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[name[SPECIAL_CHARS]]]] begin[:] call[name[TypeError], parameter[binary_operation[constant[parameter "sep" cannot contain any of %r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0a713f0>]]]]] if compare[call[name[len], parameter[name[sep]]] equal[==] constant[1]] begin[:] variable[literal] assign[=] name[re].escape if compare[name[sep] not_equal[!=] constant[/]] begin[:] variable[sep] assign[=] call[name[re].escape, parameter[name[sep]]] for taget[name[token]] in starred[call[call[name[Tokenizer], parameter[name[pattern]]].tokens, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da1b0a71db0> begin[:] variable[prefix] assign[=] <ast.IfExp object at 0x7da1b0a72cb0> if compare[call[name[token]][constant[0]] equal[==] name[Tokenizer].LITERAL] begin[:] <ast.AugAssign object at 0x7da1b0a71bd0> if binary_operation[name[flags] <ast.BitAnd object at 0x7da2590d6b60> name[EXACT]] begin[:] if <ast.UnaryOp object at 0x7da1b0aed270> begin[:] variable[expr] assign[=] binary_operation[constant[^] + name[expr]] if <ast.UnaryOp object at 0x7da1b0aee260> begin[:] <ast.AugAssign object at 0x7da1b0aef790> variable[expr] assign[=] call[name[re].compile, parameter[name[expr]]] if compare[name[prefix] is_not constant[None]] begin[:] return[tuple[[<ast.Name object at 0x7da1b0aedde0>, <ast.Name object at 0x7da1b0aee8f0>]]] return[name[expr]]
keyword[def] identifier[compile] ( identifier[pattern] , identifier[flags] = literal[int] , identifier[sep] = keyword[None] , identifier[split_prefix] = keyword[False] ): literal[string] identifier[prefix] = keyword[None] identifier[expr] = literal[string] keyword[if] identifier[sep] keyword[is] keyword[None] : identifier[sep] = literal[string] keyword[if] keyword[not] identifier[sep] : identifier[TypeError] ( literal[string] %( identifier[sep] ,)) keyword[if] identifier[set] ( identifier[sep] )& identifier[set] ( identifier[SPECIAL_CHARS] ): identifier[TypeError] ( literal[string] %( identifier[SPECIAL_CHARS] ,)) keyword[if] identifier[len] ( identifier[sep] )== literal[int] : identifier[literal] = identifier[re] . identifier[escape] keyword[else] : keyword[def] identifier[make_literal] ( identifier[sep] ): identifier[sep] = literal[string] + identifier[re] . identifier[escape] ( identifier[sep] )+ literal[string] identifier[sepcre] = identifier[re] . identifier[compile] ( identifier[sep] ) keyword[def] identifier[_literal] ( identifier[text] ): keyword[return] identifier[sep] . identifier[join] ( identifier[sepcre] . identifier[split] ( identifier[text] )) keyword[return] identifier[_literal] identifier[literal] = identifier[make_literal] ( identifier[sep] ) keyword[if] identifier[sep] != literal[string] : identifier[sep] = identifier[re] . identifier[escape] ( identifier[sep] ) keyword[for] identifier[token] keyword[in] identifier[Tokenizer] ( identifier[pattern] ). identifier[tokens] (): keyword[if] identifier[split_prefix] keyword[and] identifier[expr] == literal[string] : identifier[prefix] = identifier[token] [ literal[int] ] keyword[if] identifier[token] [ literal[int] ]== identifier[Tokenizer] . identifier[LITERAL] keyword[else] literal[string] keyword[if] identifier[token] [ literal[int] ]== identifier[Tokenizer] . identifier[LITERAL] : identifier[expr] += identifier[literal] ( identifier[token] [ literal[int] ]) keyword[elif] identifier[token] [ literal[int] ]== identifier[Tokenizer] . identifier[SINGLE] : identifier[expr] += literal[string] + identifier[sep] + literal[string] keyword[elif] identifier[token] [ literal[int] ]== identifier[Tokenizer] . identifier[MULTIPLE] : identifier[expr] += literal[string] + identifier[sep] + literal[string] keyword[elif] identifier[token] [ literal[int] ]== identifier[Tokenizer] . identifier[ANY] : identifier[expr] += literal[string] keyword[elif] identifier[token] [ literal[int] ]== identifier[Tokenizer] . identifier[RANGE] : identifier[expr] += literal[string] + identifier[token] [ literal[int] ]+ literal[string] keyword[elif] identifier[token] [ literal[int] ]== identifier[Tokenizer] . identifier[REGEX] : identifier[expr] += identifier[token] [ literal[int] ] keyword[else] : identifier[ValueError] ( literal[string] %( identifier[token] , identifier[pattern] )) keyword[if] identifier[flags] & identifier[EXACT] : keyword[if] keyword[not] identifier[expr] . identifier[startswith] ( literal[string] ): identifier[expr] = literal[string] + identifier[expr] keyword[if] keyword[not] identifier[expr] . identifier[endswith] ( literal[string] ): identifier[expr] += literal[string] identifier[expr] = identifier[re] . identifier[compile] ( identifier[expr] , identifier[flags] = identifier[flags] &~ identifier[EXACT] ) keyword[if] identifier[prefix] keyword[is] keyword[not] keyword[None] : keyword[return] ( identifier[prefix] , identifier[expr] ) keyword[return] identifier[expr]
def compile(pattern, flags=0, sep=None, split_prefix=False): """ Converts a glob-matching pattern (using Apache Cocoon style rules) to a regular expression, which basically means that the following characters have special meanings: * ``?``: matches any single character excluding the separator character * ``*``: matches zero or more characters excluding the separator character * ``**``: matches zero or more characters including the separator character * ``\\``: escape character used to precede any of the others for a literal * ``[...]``: matches any character in the specified regex-style range * ``{...}``: inlines a regex expression :Parameters: sep : str; default: "/" The `sep` parameter specifies the hierarchical path component separator to use. By default, it uses the unix-style forward-slash separator (``"/"``), but can be overriden to be a sequence of alternative valid hierarchical path component separator characters. Note that although `sep` *could* be set to both forward- and back- slashes (i.e. ``"/\\"``) to, theoretically, support either unix- and windows-style path components, this has the significant flaw that then *both* characters can be used within the same path as separators. flags : int; default: 0 The `flags` bit mask can contain all the standard `re` flags, in addition to the ``globre.EXACT`` flag. If EXACT is set, then the returned regex will include a leading '^' and trailing '$', meaning that the regex must match the entire string, from beginning to end. split_prefix : bool; default: false If `split_prefix` is truthy, the return value becomes a tuple with the first element set to any initial non-wildcarded string found in the pattern. The second element remains the regex object as before. For example, the pattern ``foo/**.ini`` would result in a tuple equivalent to ``('foo/', re.compile('foo/.*\\.ini'))``. """ prefix = None expr = '' if sep is None: sep = '/' # depends on [control=['if'], data=['sep']] if not sep: TypeError('invalid parameter "sep" value: %r' % (sep,)) # depends on [control=['if'], data=[]] if set(sep) & set(SPECIAL_CHARS): TypeError('parameter "sep" cannot contain any of %r' % (SPECIAL_CHARS,)) # depends on [control=['if'], data=[]] if len(sep) == 1: literal = re.escape # depends on [control=['if'], data=[]] else: def make_literal(sep): sep = '[' + re.escape(sep) + ']' sepcre = re.compile(sep) def _literal(text): return sep.join(sepcre.split(text)) return _literal literal = make_literal(sep) if sep != '/': sep = re.escape(sep) # depends on [control=['if'], data=['sep']] for token in Tokenizer(pattern).tokens(): if split_prefix and expr == '': prefix = token[1] if token[0] == Tokenizer.LITERAL else '' # depends on [control=['if'], data=[]] if token[0] == Tokenizer.LITERAL: expr += literal(token[1]) # depends on [control=['if'], data=[]] elif token[0] == Tokenizer.SINGLE: expr += '[^' + sep + ']' # depends on [control=['if'], data=[]] elif token[0] == Tokenizer.MULTIPLE: expr += '[^' + sep + ']*?' # depends on [control=['if'], data=[]] elif token[0] == Tokenizer.ANY: expr += '.*?' # depends on [control=['if'], data=[]] elif token[0] == Tokenizer.RANGE: expr += '[' + token[1] + ']' # depends on [control=['if'], data=[]] elif token[0] == Tokenizer.REGEX: expr += token[1] # depends on [control=['if'], data=[]] else: ValueError('unexpected token %r from globre.Tokenizer for glob: %s' % (token, pattern)) # depends on [control=['for'], data=['token']] if flags & EXACT: if not expr.startswith('^'): expr = '^' + expr # depends on [control=['if'], data=[]] # todo: technically, the last "$" *could* be escaped and therefore # an extra "$" would need to be added... but that is very unlikely. if not expr.endswith('$'): expr += '$' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] expr = re.compile(expr, flags=flags & ~EXACT) if prefix is not None: return (prefix, expr) # depends on [control=['if'], data=['prefix']] return expr
def search_users(self): """ :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/get-users-search :allowed_param:'q', 'count', 'page' """ return bind_api( api=self, path='/users/search.json', payload_type='user', payload_list=True, require_auth=True, allowed_param=['q', 'count', 'page'] )
def function[search_users, parameter[self]]: constant[ :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/get-users-search :allowed_param:'q', 'count', 'page' ] return[call[name[bind_api], parameter[]]]
keyword[def] identifier[search_users] ( identifier[self] ): literal[string] keyword[return] identifier[bind_api] ( identifier[api] = identifier[self] , identifier[path] = literal[string] , identifier[payload_type] = literal[string] , identifier[payload_list] = keyword[True] , identifier[require_auth] = keyword[True] , identifier[allowed_param] =[ literal[string] , literal[string] , literal[string] ] )
def search_users(self): """ :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/get-users-search :allowed_param:'q', 'count', 'page' """ return bind_api(api=self, path='/users/search.json', payload_type='user', payload_list=True, require_auth=True, allowed_param=['q', 'count', 'page'])
def get_tunnel_info_output_tunnel_oper_state(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_tunnel_info = ET.Element("get_tunnel_info") config = get_tunnel_info output = ET.SubElement(get_tunnel_info, "output") tunnel = ET.SubElement(output, "tunnel") oper_state = ET.SubElement(tunnel, "oper-state") oper_state.text = kwargs.pop('oper_state') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[get_tunnel_info_output_tunnel_oper_state, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[get_tunnel_info] assign[=] call[name[ET].Element, parameter[constant[get_tunnel_info]]] variable[config] assign[=] name[get_tunnel_info] variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_tunnel_info], constant[output]]] variable[tunnel] assign[=] call[name[ET].SubElement, parameter[name[output], constant[tunnel]]] variable[oper_state] assign[=] call[name[ET].SubElement, parameter[name[tunnel], constant[oper-state]]] name[oper_state].text assign[=] call[name[kwargs].pop, parameter[constant[oper_state]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[get_tunnel_info_output_tunnel_oper_state] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[get_tunnel_info] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[config] = identifier[get_tunnel_info] identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_tunnel_info] , literal[string] ) identifier[tunnel] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] ) identifier[oper_state] = identifier[ET] . identifier[SubElement] ( identifier[tunnel] , literal[string] ) identifier[oper_state] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def get_tunnel_info_output_tunnel_oper_state(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') get_tunnel_info = ET.Element('get_tunnel_info') config = get_tunnel_info output = ET.SubElement(get_tunnel_info, 'output') tunnel = ET.SubElement(output, 'tunnel') oper_state = ET.SubElement(tunnel, 'oper-state') oper_state.text = kwargs.pop('oper_state') callback = kwargs.pop('callback', self._callback) return callback(config)
def get_accessibility(self, plugin_override=True): """ Return the AccessibleTime object associated with the accessibility of this course """ vals = self._hook_manager.call_hook('course_accessibility', course=self, default=self._accessible) return vals[0] if len(vals) and plugin_override else self._accessible
def function[get_accessibility, parameter[self, plugin_override]]: constant[ Return the AccessibleTime object associated with the accessibility of this course ] variable[vals] assign[=] call[name[self]._hook_manager.call_hook, parameter[constant[course_accessibility]]] return[<ast.IfExp object at 0x7da18dc07250>]
keyword[def] identifier[get_accessibility] ( identifier[self] , identifier[plugin_override] = keyword[True] ): literal[string] identifier[vals] = identifier[self] . identifier[_hook_manager] . identifier[call_hook] ( literal[string] , identifier[course] = identifier[self] , identifier[default] = identifier[self] . identifier[_accessible] ) keyword[return] identifier[vals] [ literal[int] ] keyword[if] identifier[len] ( identifier[vals] ) keyword[and] identifier[plugin_override] keyword[else] identifier[self] . identifier[_accessible]
def get_accessibility(self, plugin_override=True): """ Return the AccessibleTime object associated with the accessibility of this course """ vals = self._hook_manager.call_hook('course_accessibility', course=self, default=self._accessible) return vals[0] if len(vals) and plugin_override else self._accessible
def _get_tables(self, base_dir): """Load the contents of meta_file and the corresponding data. If fields containing Personally Identifiable Information are detected in the metadata they are anonymized before asign them into `table_dict`. Args: base_dir(str): Root folder of the dataset files. Returns: dict: Mapping str -> tuple(pandas.DataFrame, dict) """ table_dict = {} for table in self.metadata['tables']: if table['use']: relative_path = os.path.join(base_dir, self.metadata['path'], table['path']) data_table = pd.read_csv(relative_path) pii_fields = self._get_pii_fields(table) data_table = self._anonymize_table(data_table, pii_fields) table_dict[table['name']] = (data_table, table) return table_dict
def function[_get_tables, parameter[self, base_dir]]: constant[Load the contents of meta_file and the corresponding data. If fields containing Personally Identifiable Information are detected in the metadata they are anonymized before asign them into `table_dict`. Args: base_dir(str): Root folder of the dataset files. Returns: dict: Mapping str -> tuple(pandas.DataFrame, dict) ] variable[table_dict] assign[=] dictionary[[], []] for taget[name[table]] in starred[call[name[self].metadata][constant[tables]]] begin[:] if call[name[table]][constant[use]] begin[:] variable[relative_path] assign[=] call[name[os].path.join, parameter[name[base_dir], call[name[self].metadata][constant[path]], call[name[table]][constant[path]]]] variable[data_table] assign[=] call[name[pd].read_csv, parameter[name[relative_path]]] variable[pii_fields] assign[=] call[name[self]._get_pii_fields, parameter[name[table]]] variable[data_table] assign[=] call[name[self]._anonymize_table, parameter[name[data_table], name[pii_fields]]] call[name[table_dict]][call[name[table]][constant[name]]] assign[=] tuple[[<ast.Name object at 0x7da207f98880>, <ast.Name object at 0x7da207f9a140>]] return[name[table_dict]]
keyword[def] identifier[_get_tables] ( identifier[self] , identifier[base_dir] ): literal[string] identifier[table_dict] ={} keyword[for] identifier[table] keyword[in] identifier[self] . identifier[metadata] [ literal[string] ]: keyword[if] identifier[table] [ literal[string] ]: identifier[relative_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[base_dir] , identifier[self] . identifier[metadata] [ literal[string] ], identifier[table] [ literal[string] ]) identifier[data_table] = identifier[pd] . identifier[read_csv] ( identifier[relative_path] ) identifier[pii_fields] = identifier[self] . identifier[_get_pii_fields] ( identifier[table] ) identifier[data_table] = identifier[self] . identifier[_anonymize_table] ( identifier[data_table] , identifier[pii_fields] ) identifier[table_dict] [ identifier[table] [ literal[string] ]]=( identifier[data_table] , identifier[table] ) keyword[return] identifier[table_dict]
def _get_tables(self, base_dir): """Load the contents of meta_file and the corresponding data. If fields containing Personally Identifiable Information are detected in the metadata they are anonymized before asign them into `table_dict`. Args: base_dir(str): Root folder of the dataset files. Returns: dict: Mapping str -> tuple(pandas.DataFrame, dict) """ table_dict = {} for table in self.metadata['tables']: if table['use']: relative_path = os.path.join(base_dir, self.metadata['path'], table['path']) data_table = pd.read_csv(relative_path) pii_fields = self._get_pii_fields(table) data_table = self._anonymize_table(data_table, pii_fields) table_dict[table['name']] = (data_table, table) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['table']] return table_dict
def quaternion_inverse(quaternion): """Return inverse of quaternion. >>> q0 = random_quaternion() >>> q1 = quaternion_inverse(q0) >>> np.allclose(quaternion_multiply(q0, q1), [1, 0, 0, 0]) True """ q = np.array(quaternion, dtype=np.float64, copy=True) np.negative(q[1:], q[1:]) return q / np.dot(q, q)
def function[quaternion_inverse, parameter[quaternion]]: constant[Return inverse of quaternion. >>> q0 = random_quaternion() >>> q1 = quaternion_inverse(q0) >>> np.allclose(quaternion_multiply(q0, q1), [1, 0, 0, 0]) True ] variable[q] assign[=] call[name[np].array, parameter[name[quaternion]]] call[name[np].negative, parameter[call[name[q]][<ast.Slice object at 0x7da1b23c5060>], call[name[q]][<ast.Slice object at 0x7da1b23c62f0>]]] return[binary_operation[name[q] / call[name[np].dot, parameter[name[q], name[q]]]]]
keyword[def] identifier[quaternion_inverse] ( identifier[quaternion] ): literal[string] identifier[q] = identifier[np] . identifier[array] ( identifier[quaternion] , identifier[dtype] = identifier[np] . identifier[float64] , identifier[copy] = keyword[True] ) identifier[np] . identifier[negative] ( identifier[q] [ literal[int] :], identifier[q] [ literal[int] :]) keyword[return] identifier[q] / identifier[np] . identifier[dot] ( identifier[q] , identifier[q] )
def quaternion_inverse(quaternion): """Return inverse of quaternion. >>> q0 = random_quaternion() >>> q1 = quaternion_inverse(q0) >>> np.allclose(quaternion_multiply(q0, q1), [1, 0, 0, 0]) True """ q = np.array(quaternion, dtype=np.float64, copy=True) np.negative(q[1:], q[1:]) return q / np.dot(q, q)
def load_patt(filename): """Loads a file that was saved with the save_patt routine.""" with open(filename) as f: lines = f.readlines() lst = lines[0].split(',') patt = np.zeros([int(lst[0]), int(lst[1])], dtype=np.complex128) lines.pop(0) for line in lines: lst = line.split(',') n = int(lst[0]) m = int(lst[1]) re = float(lst[2]) im = float(lst[3]) patt[n, m] = re + 1j * im return sp.ScalarPatternUniform(patt, doublesphere=False)
def function[load_patt, parameter[filename]]: constant[Loads a file that was saved with the save_patt routine.] with call[name[open], parameter[name[filename]]] begin[:] variable[lines] assign[=] call[name[f].readlines, parameter[]] variable[lst] assign[=] call[call[name[lines]][constant[0]].split, parameter[constant[,]]] variable[patt] assign[=] call[name[np].zeros, parameter[list[[<ast.Call object at 0x7da1b0bb9450>, <ast.Call object at 0x7da1b0bb96f0>]]]] call[name[lines].pop, parameter[constant[0]]] for taget[name[line]] in starred[name[lines]] begin[:] variable[lst] assign[=] call[name[line].split, parameter[constant[,]]] variable[n] assign[=] call[name[int], parameter[call[name[lst]][constant[0]]]] variable[m] assign[=] call[name[int], parameter[call[name[lst]][constant[1]]]] variable[re] assign[=] call[name[float], parameter[call[name[lst]][constant[2]]]] variable[im] assign[=] call[name[float], parameter[call[name[lst]][constant[3]]]] call[name[patt]][tuple[[<ast.Name object at 0x7da1b0a2fb80>, <ast.Name object at 0x7da1b0a2fbb0>]]] assign[=] binary_operation[name[re] + binary_operation[constant[1j] * name[im]]] return[call[name[sp].ScalarPatternUniform, parameter[name[patt]]]]
keyword[def] identifier[load_patt] ( identifier[filename] ): literal[string] keyword[with] identifier[open] ( identifier[filename] ) keyword[as] identifier[f] : identifier[lines] = identifier[f] . identifier[readlines] () identifier[lst] = identifier[lines] [ literal[int] ]. identifier[split] ( literal[string] ) identifier[patt] = identifier[np] . identifier[zeros] ([ identifier[int] ( identifier[lst] [ literal[int] ]), identifier[int] ( identifier[lst] [ literal[int] ])], identifier[dtype] = identifier[np] . identifier[complex128] ) identifier[lines] . identifier[pop] ( literal[int] ) keyword[for] identifier[line] keyword[in] identifier[lines] : identifier[lst] = identifier[line] . identifier[split] ( literal[string] ) identifier[n] = identifier[int] ( identifier[lst] [ literal[int] ]) identifier[m] = identifier[int] ( identifier[lst] [ literal[int] ]) identifier[re] = identifier[float] ( identifier[lst] [ literal[int] ]) identifier[im] = identifier[float] ( identifier[lst] [ literal[int] ]) identifier[patt] [ identifier[n] , identifier[m] ]= identifier[re] + literal[int] * identifier[im] keyword[return] identifier[sp] . identifier[ScalarPatternUniform] ( identifier[patt] , identifier[doublesphere] = keyword[False] )
def load_patt(filename): """Loads a file that was saved with the save_patt routine.""" with open(filename) as f: lines = f.readlines() lst = lines[0].split(',') patt = np.zeros([int(lst[0]), int(lst[1])], dtype=np.complex128) lines.pop(0) for line in lines: lst = line.split(',') n = int(lst[0]) m = int(lst[1]) re = float(lst[2]) im = float(lst[3]) patt[n, m] = re + 1j * im # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['f']] return sp.ScalarPatternUniform(patt, doublesphere=False)
def get_occurrences(self, start, end, *, limit=None, query=None, order_by=None, batch=None): """ Returns all the occurrences of a seriesMaster event for a specified time range. :type start: datetime :param start: the start of the time range :type end: datetime :param end: the end of the time range :param int limit: ax no. of events to get. Over 999 uses batch. :type query: Query or str :param query: optional. extra filters or ordes to apply to this query :type order_by: str :param order_by: orders the result set based on this condition :param int batch: batch size, retrieves items in batches allowing to retrieve more items than the limit. :return: a list of events :rtype: list[Event] or Pagination """ if self.event_type != EventType.SeriesMaster: # you can only get occurrences if its a seriesMaster return [] url = self.build_url( self._endpoints.get('occurrences').format(id=self.object_id)) if limit is None or limit > self.protocol.max_top_value: batch = self.protocol.max_top_value params = {'$top': batch if batch else limit} if order_by: params['$orderby'] = order_by if query: if isinstance(query, str): params['$filter'] = query else: params.update(query.as_params()) if start.tzinfo is None: # if it's a naive datetime, localize the datetime. start = self.protocol.timezone.localize(start) # localize datetime into local tz if start.tzinfo != pytz.utc: start = start.astimezone(pytz.utc) # transform local datetime to utc if end.tzinfo is None: # if it's a naive datetime, localize the datetime. end = self.protocol.timezone.localize(end) # localize datetime into local tz if end.tzinfo != pytz.utc: end = end.astimezone(pytz.utc) # transform local datetime to utc params[self._cc('startDateTime')] = start.isoformat() params[self._cc('endDateTime')] = end.isoformat() response = self.con.get(url, params=params, headers={'Prefer': 'outlook.timezone="UTC"'}) if not response: return iter(()) data = response.json() # Everything received from cloud must be passed as self._cloud_data_key events = (self.__class__(parent=self, **{self._cloud_data_key: event}) for event in data.get('value', [])) next_link = data.get(NEXT_LINK_KEYWORD, None) if batch and next_link: return Pagination(parent=self, data=events, constructor=self.__class__, next_link=next_link, limit=limit) else: return events
def function[get_occurrences, parameter[self, start, end]]: constant[ Returns all the occurrences of a seriesMaster event for a specified time range. :type start: datetime :param start: the start of the time range :type end: datetime :param end: the end of the time range :param int limit: ax no. of events to get. Over 999 uses batch. :type query: Query or str :param query: optional. extra filters or ordes to apply to this query :type order_by: str :param order_by: orders the result set based on this condition :param int batch: batch size, retrieves items in batches allowing to retrieve more items than the limit. :return: a list of events :rtype: list[Event] or Pagination ] if compare[name[self].event_type not_equal[!=] name[EventType].SeriesMaster] begin[:] return[list[[]]] variable[url] assign[=] call[name[self].build_url, parameter[call[call[name[self]._endpoints.get, parameter[constant[occurrences]]].format, parameter[]]]] if <ast.BoolOp object at 0x7da1b1b7a0e0> begin[:] variable[batch] assign[=] name[self].protocol.max_top_value variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b1b793c0>], [<ast.IfExp object at 0x7da1b1b7a9e0>]] if name[order_by] begin[:] call[name[params]][constant[$orderby]] assign[=] name[order_by] if name[query] begin[:] if call[name[isinstance], parameter[name[query], name[str]]] begin[:] call[name[params]][constant[$filter]] assign[=] name[query] if compare[name[start].tzinfo is constant[None]] begin[:] variable[start] assign[=] call[name[self].protocol.timezone.localize, parameter[name[start]]] if compare[name[start].tzinfo not_equal[!=] name[pytz].utc] begin[:] variable[start] assign[=] call[name[start].astimezone, parameter[name[pytz].utc]] if compare[name[end].tzinfo is constant[None]] begin[:] variable[end] assign[=] call[name[self].protocol.timezone.localize, parameter[name[end]]] if compare[name[end].tzinfo not_equal[!=] name[pytz].utc] begin[:] variable[end] assign[=] call[name[end].astimezone, parameter[name[pytz].utc]] call[name[params]][call[name[self]._cc, parameter[constant[startDateTime]]]] assign[=] call[name[start].isoformat, parameter[]] call[name[params]][call[name[self]._cc, parameter[constant[endDateTime]]]] assign[=] call[name[end].isoformat, parameter[]] variable[response] assign[=] call[name[self].con.get, parameter[name[url]]] if <ast.UnaryOp object at 0x7da1b1b78970> begin[:] return[call[name[iter], parameter[tuple[[]]]]] variable[data] assign[=] call[name[response].json, parameter[]] variable[events] assign[=] <ast.GeneratorExp object at 0x7da1b1b78400> variable[next_link] assign[=] call[name[data].get, parameter[name[NEXT_LINK_KEYWORD], constant[None]]] if <ast.BoolOp object at 0x7da1b1b79e40> begin[:] return[call[name[Pagination], parameter[]]]
keyword[def] identifier[get_occurrences] ( identifier[self] , identifier[start] , identifier[end] ,*, identifier[limit] = keyword[None] , identifier[query] = keyword[None] , identifier[order_by] = keyword[None] , identifier[batch] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[event_type] != identifier[EventType] . identifier[SeriesMaster] : keyword[return] [] identifier[url] = identifier[self] . identifier[build_url] ( identifier[self] . identifier[_endpoints] . identifier[get] ( literal[string] ). identifier[format] ( identifier[id] = identifier[self] . identifier[object_id] )) keyword[if] identifier[limit] keyword[is] keyword[None] keyword[or] identifier[limit] > identifier[self] . identifier[protocol] . identifier[max_top_value] : identifier[batch] = identifier[self] . identifier[protocol] . identifier[max_top_value] identifier[params] ={ literal[string] : identifier[batch] keyword[if] identifier[batch] keyword[else] identifier[limit] } keyword[if] identifier[order_by] : identifier[params] [ literal[string] ]= identifier[order_by] keyword[if] identifier[query] : keyword[if] identifier[isinstance] ( identifier[query] , identifier[str] ): identifier[params] [ literal[string] ]= identifier[query] keyword[else] : identifier[params] . identifier[update] ( identifier[query] . identifier[as_params] ()) keyword[if] identifier[start] . identifier[tzinfo] keyword[is] keyword[None] : identifier[start] = identifier[self] . identifier[protocol] . identifier[timezone] . identifier[localize] ( identifier[start] ) keyword[if] identifier[start] . identifier[tzinfo] != identifier[pytz] . identifier[utc] : identifier[start] = identifier[start] . identifier[astimezone] ( identifier[pytz] . identifier[utc] ) keyword[if] identifier[end] . identifier[tzinfo] keyword[is] keyword[None] : identifier[end] = identifier[self] . identifier[protocol] . identifier[timezone] . identifier[localize] ( identifier[end] ) keyword[if] identifier[end] . identifier[tzinfo] != identifier[pytz] . identifier[utc] : identifier[end] = identifier[end] . identifier[astimezone] ( identifier[pytz] . identifier[utc] ) identifier[params] [ identifier[self] . identifier[_cc] ( literal[string] )]= identifier[start] . identifier[isoformat] () identifier[params] [ identifier[self] . identifier[_cc] ( literal[string] )]= identifier[end] . identifier[isoformat] () identifier[response] = identifier[self] . identifier[con] . identifier[get] ( identifier[url] , identifier[params] = identifier[params] , identifier[headers] ={ literal[string] : literal[string] }) keyword[if] keyword[not] identifier[response] : keyword[return] identifier[iter] (()) identifier[data] = identifier[response] . identifier[json] () identifier[events] =( identifier[self] . identifier[__class__] ( identifier[parent] = identifier[self] ,**{ identifier[self] . identifier[_cloud_data_key] : identifier[event] }) keyword[for] identifier[event] keyword[in] identifier[data] . identifier[get] ( literal[string] ,[])) identifier[next_link] = identifier[data] . identifier[get] ( identifier[NEXT_LINK_KEYWORD] , keyword[None] ) keyword[if] identifier[batch] keyword[and] identifier[next_link] : keyword[return] identifier[Pagination] ( identifier[parent] = identifier[self] , identifier[data] = identifier[events] , identifier[constructor] = identifier[self] . identifier[__class__] , identifier[next_link] = identifier[next_link] , identifier[limit] = identifier[limit] ) keyword[else] : keyword[return] identifier[events]
def get_occurrences(self, start, end, *, limit=None, query=None, order_by=None, batch=None): """ Returns all the occurrences of a seriesMaster event for a specified time range. :type start: datetime :param start: the start of the time range :type end: datetime :param end: the end of the time range :param int limit: ax no. of events to get. Over 999 uses batch. :type query: Query or str :param query: optional. extra filters or ordes to apply to this query :type order_by: str :param order_by: orders the result set based on this condition :param int batch: batch size, retrieves items in batches allowing to retrieve more items than the limit. :return: a list of events :rtype: list[Event] or Pagination """ if self.event_type != EventType.SeriesMaster: # you can only get occurrences if its a seriesMaster return [] # depends on [control=['if'], data=[]] url = self.build_url(self._endpoints.get('occurrences').format(id=self.object_id)) if limit is None or limit > self.protocol.max_top_value: batch = self.protocol.max_top_value # depends on [control=['if'], data=[]] params = {'$top': batch if batch else limit} if order_by: params['$orderby'] = order_by # depends on [control=['if'], data=[]] if query: if isinstance(query, str): params['$filter'] = query # depends on [control=['if'], data=[]] else: params.update(query.as_params()) # depends on [control=['if'], data=[]] if start.tzinfo is None: # if it's a naive datetime, localize the datetime. start = self.protocol.timezone.localize(start) # localize datetime into local tz # depends on [control=['if'], data=[]] if start.tzinfo != pytz.utc: start = start.astimezone(pytz.utc) # transform local datetime to utc # depends on [control=['if'], data=[]] if end.tzinfo is None: # if it's a naive datetime, localize the datetime. end = self.protocol.timezone.localize(end) # localize datetime into local tz # depends on [control=['if'], data=[]] if end.tzinfo != pytz.utc: end = end.astimezone(pytz.utc) # transform local datetime to utc # depends on [control=['if'], data=[]] params[self._cc('startDateTime')] = start.isoformat() params[self._cc('endDateTime')] = end.isoformat() response = self.con.get(url, params=params, headers={'Prefer': 'outlook.timezone="UTC"'}) if not response: return iter(()) # depends on [control=['if'], data=[]] data = response.json() # Everything received from cloud must be passed as self._cloud_data_key events = (self.__class__(parent=self, **{self._cloud_data_key: event}) for event in data.get('value', [])) next_link = data.get(NEXT_LINK_KEYWORD, None) if batch and next_link: return Pagination(parent=self, data=events, constructor=self.__class__, next_link=next_link, limit=limit) # depends on [control=['if'], data=[]] else: return events
def tt(self, key, locale=None, locale2=None, default=I18n.DFT): """ |tt| means text transform. key: tt key. locale: main locale key into |self.tt_dd|. Default to |self.locale| locale2: fallback locale key into |self.tt_dd|. Default to |self.locale2| default: a default value in case tt value is not found. Default to raise KeyError. """ #/ locale = locale or self.locale locale2 = locale2 or self.locale2 #/ get tt dict of the locale tt_d = self.tt_dd.get(locale, None) if tt_d is not None: #/ val = tt_d.get(key, I18n.DFT) #/ if tt value is found if val is not I18n.DFT: return val #/ tt value is not found #/ if has locale2 ## Y if locale2 and locale2 != locale: #/ fall back to locale2 return self.tt(key, locale=locale2, default=default) ## N else: #/ if default is specified ## N if default is I18n.DFT: raise KeyError(key) ## Y else: return default
def function[tt, parameter[self, key, locale, locale2, default]]: constant[ |tt| means text transform. key: tt key. locale: main locale key into |self.tt_dd|. Default to |self.locale| locale2: fallback locale key into |self.tt_dd|. Default to |self.locale2| default: a default value in case tt value is not found. Default to raise KeyError. ] variable[locale] assign[=] <ast.BoolOp object at 0x7da1b14a86d0> variable[locale2] assign[=] <ast.BoolOp object at 0x7da1b14a9c00> variable[tt_d] assign[=] call[name[self].tt_dd.get, parameter[name[locale], constant[None]]] if compare[name[tt_d] is_not constant[None]] begin[:] variable[val] assign[=] call[name[tt_d].get, parameter[name[key], name[I18n].DFT]] if compare[name[val] is_not name[I18n].DFT] begin[:] return[name[val]] if <ast.BoolOp object at 0x7da1b14ab3a0> begin[:] return[call[name[self].tt, parameter[name[key]]]]
keyword[def] identifier[tt] ( identifier[self] , identifier[key] , identifier[locale] = keyword[None] , identifier[locale2] = keyword[None] , identifier[default] = identifier[I18n] . identifier[DFT] ): literal[string] identifier[locale] = identifier[locale] keyword[or] identifier[self] . identifier[locale] identifier[locale2] = identifier[locale2] keyword[or] identifier[self] . identifier[locale2] identifier[tt_d] = identifier[self] . identifier[tt_dd] . identifier[get] ( identifier[locale] , keyword[None] ) keyword[if] identifier[tt_d] keyword[is] keyword[not] keyword[None] : identifier[val] = identifier[tt_d] . identifier[get] ( identifier[key] , identifier[I18n] . identifier[DFT] ) keyword[if] identifier[val] keyword[is] keyword[not] identifier[I18n] . identifier[DFT] : keyword[return] identifier[val] keyword[if] identifier[locale2] keyword[and] identifier[locale2] != identifier[locale] : keyword[return] identifier[self] . identifier[tt] ( identifier[key] , identifier[locale] = identifier[locale2] , identifier[default] = identifier[default] ) keyword[else] : keyword[if] identifier[default] keyword[is] identifier[I18n] . identifier[DFT] : keyword[raise] identifier[KeyError] ( identifier[key] ) keyword[else] : keyword[return] identifier[default]
def tt(self, key, locale=None, locale2=None, default=I18n.DFT): """ |tt| means text transform. key: tt key. locale: main locale key into |self.tt_dd|. Default to |self.locale| locale2: fallback locale key into |self.tt_dd|. Default to |self.locale2| default: a default value in case tt value is not found. Default to raise KeyError. """ #/ locale = locale or self.locale locale2 = locale2 or self.locale2 #/ get tt dict of the locale tt_d = self.tt_dd.get(locale, None) if tt_d is not None: #/ val = tt_d.get(key, I18n.DFT) #/ if tt value is found if val is not I18n.DFT: return val # depends on [control=['if'], data=['val']] # depends on [control=['if'], data=['tt_d']] #/ tt value is not found #/ if has locale2 ## Y if locale2 and locale2 != locale: #/ fall back to locale2 return self.tt(key, locale=locale2, default=default) # depends on [control=['if'], data=[]] ## N #/ if default is specified ## N elif default is I18n.DFT: raise KeyError(key) # depends on [control=['if'], data=[]] else: ## Y return default
def read_next_line(self): """Read another line from the file.""" next_line = self.file.readline() if not next_line or next_line[-1:] != '\n': # no newline on last line of file self.file = None else: # trim newline characters next_line = next_line[:-1] expanded = next_line.expandtabs() edit = urwid.Edit("", expanded, allow_tab=True) edit.set_edit_pos(0) edit.original_text = next_line self.lines.append(edit) return next_line
def function[read_next_line, parameter[self]]: constant[Read another line from the file.] variable[next_line] assign[=] call[name[self].file.readline, parameter[]] if <ast.BoolOp object at 0x7da18f09fac0> begin[:] name[self].file assign[=] constant[None] variable[expanded] assign[=] call[name[next_line].expandtabs, parameter[]] variable[edit] assign[=] call[name[urwid].Edit, parameter[constant[], name[expanded]]] call[name[edit].set_edit_pos, parameter[constant[0]]] name[edit].original_text assign[=] name[next_line] call[name[self].lines.append, parameter[name[edit]]] return[name[next_line]]
keyword[def] identifier[read_next_line] ( identifier[self] ): literal[string] identifier[next_line] = identifier[self] . identifier[file] . identifier[readline] () keyword[if] keyword[not] identifier[next_line] keyword[or] identifier[next_line] [- literal[int] :]!= literal[string] : identifier[self] . identifier[file] = keyword[None] keyword[else] : identifier[next_line] = identifier[next_line] [:- literal[int] ] identifier[expanded] = identifier[next_line] . identifier[expandtabs] () identifier[edit] = identifier[urwid] . identifier[Edit] ( literal[string] , identifier[expanded] , identifier[allow_tab] = keyword[True] ) identifier[edit] . identifier[set_edit_pos] ( literal[int] ) identifier[edit] . identifier[original_text] = identifier[next_line] identifier[self] . identifier[lines] . identifier[append] ( identifier[edit] ) keyword[return] identifier[next_line]
def read_next_line(self): """Read another line from the file.""" next_line = self.file.readline() if not next_line or next_line[-1:] != '\n': # no newline on last line of file self.file = None # depends on [control=['if'], data=[]] else: # trim newline characters next_line = next_line[:-1] expanded = next_line.expandtabs() edit = urwid.Edit('', expanded, allow_tab=True) edit.set_edit_pos(0) edit.original_text = next_line self.lines.append(edit) return next_line
def difference(self, other): """ Compute the difference between this and a given range. >>> intrange(1, 10).difference(intrange(10, 15)) intrange([1,10)) >>> intrange(1, 10).difference(intrange(5, 10)) intrange([1,5)) >>> intrange(1, 5).difference(intrange(5, 10)) intrange([1,5)) >>> intrange(1, 5).difference(intrange(1, 10)) intrange(empty) The difference can not be computed if the resulting range would be split in two separate ranges. This happens when the given range is completely within this range and does not start or end at the same value. >>> intrange(1, 15).difference(intrange(5, 10)) Traceback (most recent call last): File "<stdin>", line 1, in <module> ValueError: Other range must not be within this range This does not modify the range in place. This is the same as the ``-`` operator for two ranges in PostgreSQL. :param other: Range to difference against. :return: A new range that is the difference between this and `other`. :raises ValueError: If difference bethween this and `other` can not be computed. """ if not self.is_valid_range(other): msg = "Unsupported type to test for difference '{.__class__.__name__}'" raise TypeError(msg.format(other)) # Consider empty ranges or no overlap if not self or not other or not self.overlap(other): return self # If self is contained within other, the result is empty elif self in other: return self.empty() elif other in self and not (self.startswith(other) or self.endswith(other)): raise ValueError("Other range must not be within this range") elif self.endsbefore(other): return self.replace(upper=other.lower, upper_inc=not other.lower_inc) elif self.startsafter(other): return self.replace(lower=other.upper, lower_inc=not other.upper_inc) else: return self.empty()
def function[difference, parameter[self, other]]: constant[ Compute the difference between this and a given range. >>> intrange(1, 10).difference(intrange(10, 15)) intrange([1,10)) >>> intrange(1, 10).difference(intrange(5, 10)) intrange([1,5)) >>> intrange(1, 5).difference(intrange(5, 10)) intrange([1,5)) >>> intrange(1, 5).difference(intrange(1, 10)) intrange(empty) The difference can not be computed if the resulting range would be split in two separate ranges. This happens when the given range is completely within this range and does not start or end at the same value. >>> intrange(1, 15).difference(intrange(5, 10)) Traceback (most recent call last): File "<stdin>", line 1, in <module> ValueError: Other range must not be within this range This does not modify the range in place. This is the same as the ``-`` operator for two ranges in PostgreSQL. :param other: Range to difference against. :return: A new range that is the difference between this and `other`. :raises ValueError: If difference bethween this and `other` can not be computed. ] if <ast.UnaryOp object at 0x7da20c794940> begin[:] variable[msg] assign[=] constant[Unsupported type to test for difference '{.__class__.__name__}'] <ast.Raise object at 0x7da20c7943d0> if <ast.BoolOp object at 0x7da20c796560> begin[:] return[name[self]]
keyword[def] identifier[difference] ( identifier[self] , identifier[other] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[is_valid_range] ( identifier[other] ): identifier[msg] = literal[string] keyword[raise] identifier[TypeError] ( identifier[msg] . identifier[format] ( identifier[other] )) keyword[if] keyword[not] identifier[self] keyword[or] keyword[not] identifier[other] keyword[or] keyword[not] identifier[self] . identifier[overlap] ( identifier[other] ): keyword[return] identifier[self] keyword[elif] identifier[self] keyword[in] identifier[other] : keyword[return] identifier[self] . identifier[empty] () keyword[elif] identifier[other] keyword[in] identifier[self] keyword[and] keyword[not] ( identifier[self] . identifier[startswith] ( identifier[other] ) keyword[or] identifier[self] . identifier[endswith] ( identifier[other] )): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[elif] identifier[self] . identifier[endsbefore] ( identifier[other] ): keyword[return] identifier[self] . identifier[replace] ( identifier[upper] = identifier[other] . identifier[lower] , identifier[upper_inc] = keyword[not] identifier[other] . identifier[lower_inc] ) keyword[elif] identifier[self] . identifier[startsafter] ( identifier[other] ): keyword[return] identifier[self] . identifier[replace] ( identifier[lower] = identifier[other] . identifier[upper] , identifier[lower_inc] = keyword[not] identifier[other] . identifier[upper_inc] ) keyword[else] : keyword[return] identifier[self] . identifier[empty] ()
def difference(self, other): """ Compute the difference between this and a given range. >>> intrange(1, 10).difference(intrange(10, 15)) intrange([1,10)) >>> intrange(1, 10).difference(intrange(5, 10)) intrange([1,5)) >>> intrange(1, 5).difference(intrange(5, 10)) intrange([1,5)) >>> intrange(1, 5).difference(intrange(1, 10)) intrange(empty) The difference can not be computed if the resulting range would be split in two separate ranges. This happens when the given range is completely within this range and does not start or end at the same value. >>> intrange(1, 15).difference(intrange(5, 10)) Traceback (most recent call last): File "<stdin>", line 1, in <module> ValueError: Other range must not be within this range This does not modify the range in place. This is the same as the ``-`` operator for two ranges in PostgreSQL. :param other: Range to difference against. :return: A new range that is the difference between this and `other`. :raises ValueError: If difference bethween this and `other` can not be computed. """ if not self.is_valid_range(other): msg = "Unsupported type to test for difference '{.__class__.__name__}'" raise TypeError(msg.format(other)) # depends on [control=['if'], data=[]] # Consider empty ranges or no overlap if not self or not other or (not self.overlap(other)): return self # depends on [control=['if'], data=[]] # If self is contained within other, the result is empty elif self in other: return self.empty() # depends on [control=['if'], data=['self']] elif other in self and (not (self.startswith(other) or self.endswith(other))): raise ValueError('Other range must not be within this range') # depends on [control=['if'], data=[]] elif self.endsbefore(other): return self.replace(upper=other.lower, upper_inc=not other.lower_inc) # depends on [control=['if'], data=[]] elif self.startsafter(other): return self.replace(lower=other.upper, lower_inc=not other.upper_inc) # depends on [control=['if'], data=[]] else: return self.empty()
def reply(self, status=200, new_response=False, **kw): """ Defines the mock response. Arguments: status (int, optional): response status code. Defaults to ``200``. **kw (dict): optional keyword arguments passed to ``pook.Response`` constructor. Returns: pook.Response: mock response definition instance. """ # Use or create a Response mock instance res = Response(**kw) if new_response else self._response # Define HTTP mandatory response status res.status(status or res._status) # Expose current mock instance in response for self-reference res.mock = self # Define mock response self._response = res # Return response return res
def function[reply, parameter[self, status, new_response]]: constant[ Defines the mock response. Arguments: status (int, optional): response status code. Defaults to ``200``. **kw (dict): optional keyword arguments passed to ``pook.Response`` constructor. Returns: pook.Response: mock response definition instance. ] variable[res] assign[=] <ast.IfExp object at 0x7da1b02a5cc0> call[name[res].status, parameter[<ast.BoolOp object at 0x7da1b02a64a0>]] name[res].mock assign[=] name[self] name[self]._response assign[=] name[res] return[name[res]]
keyword[def] identifier[reply] ( identifier[self] , identifier[status] = literal[int] , identifier[new_response] = keyword[False] ,** identifier[kw] ): literal[string] identifier[res] = identifier[Response] (** identifier[kw] ) keyword[if] identifier[new_response] keyword[else] identifier[self] . identifier[_response] identifier[res] . identifier[status] ( identifier[status] keyword[or] identifier[res] . identifier[_status] ) identifier[res] . identifier[mock] = identifier[self] identifier[self] . identifier[_response] = identifier[res] keyword[return] identifier[res]
def reply(self, status=200, new_response=False, **kw): """ Defines the mock response. Arguments: status (int, optional): response status code. Defaults to ``200``. **kw (dict): optional keyword arguments passed to ``pook.Response`` constructor. Returns: pook.Response: mock response definition instance. """ # Use or create a Response mock instance res = Response(**kw) if new_response else self._response # Define HTTP mandatory response status res.status(status or res._status) # Expose current mock instance in response for self-reference res.mock = self # Define mock response self._response = res # Return response return res
def is_resource_protected(self, request, **kwargs): """ Determines if a resource should be protected. Returns true if and only if the resource's access_state matches an entry in the return value of get_protected_states(). """ access_state = self._get_resource_access_state(request) protected_states = self.get_protected_states() return access_state in protected_states
def function[is_resource_protected, parameter[self, request]]: constant[ Determines if a resource should be protected. Returns true if and only if the resource's access_state matches an entry in the return value of get_protected_states(). ] variable[access_state] assign[=] call[name[self]._get_resource_access_state, parameter[name[request]]] variable[protected_states] assign[=] call[name[self].get_protected_states, parameter[]] return[compare[name[access_state] in name[protected_states]]]
keyword[def] identifier[is_resource_protected] ( identifier[self] , identifier[request] ,** identifier[kwargs] ): literal[string] identifier[access_state] = identifier[self] . identifier[_get_resource_access_state] ( identifier[request] ) identifier[protected_states] = identifier[self] . identifier[get_protected_states] () keyword[return] identifier[access_state] keyword[in] identifier[protected_states]
def is_resource_protected(self, request, **kwargs): """ Determines if a resource should be protected. Returns true if and only if the resource's access_state matches an entry in the return value of get_protected_states(). """ access_state = self._get_resource_access_state(request) protected_states = self.get_protected_states() return access_state in protected_states
def set_image(self, image, filename=None, resize=False): """ Set the poster or thumbnail of a this Vidoe. """ if self.id: data = self.connection.post('add_image', filename, video_id=self.id, image=image.to_dict(), resize=resize) if data: self.image = Image(data=data)
def function[set_image, parameter[self, image, filename, resize]]: constant[ Set the poster or thumbnail of a this Vidoe. ] if name[self].id begin[:] variable[data] assign[=] call[name[self].connection.post, parameter[constant[add_image], name[filename]]] if name[data] begin[:] name[self].image assign[=] call[name[Image], parameter[]]
keyword[def] identifier[set_image] ( identifier[self] , identifier[image] , identifier[filename] = keyword[None] , identifier[resize] = keyword[False] ): literal[string] keyword[if] identifier[self] . identifier[id] : identifier[data] = identifier[self] . identifier[connection] . identifier[post] ( literal[string] , identifier[filename] , identifier[video_id] = identifier[self] . identifier[id] , identifier[image] = identifier[image] . identifier[to_dict] (), identifier[resize] = identifier[resize] ) keyword[if] identifier[data] : identifier[self] . identifier[image] = identifier[Image] ( identifier[data] = identifier[data] )
def set_image(self, image, filename=None, resize=False): """ Set the poster or thumbnail of a this Vidoe. """ if self.id: data = self.connection.post('add_image', filename, video_id=self.id, image=image.to_dict(), resize=resize) if data: self.image = Image(data=data) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def iter(self, columnnames, order='', sort=True): """Return a tableiter object. :class:`tableiter` lets one iterate over a table by returning in each iteration step a reference table containing equal values for the given columns. By default a sort is done on the given columns to get the correct iteration order. `order` | 'ascending' is iterate in ascending order (is the default). | 'descending' is iterate in descending order. `sort=False` do not sort (because table is already in correct order). For example, iterate by time through a measurementset table:: t = table('3c343.MS') for ts in t.iter('TIME'): print ts.nrows() """ from .tableiter import tableiter return tableiter(self, columnnames, order, sort)
def function[iter, parameter[self, columnnames, order, sort]]: constant[Return a tableiter object. :class:`tableiter` lets one iterate over a table by returning in each iteration step a reference table containing equal values for the given columns. By default a sort is done on the given columns to get the correct iteration order. `order` | 'ascending' is iterate in ascending order (is the default). | 'descending' is iterate in descending order. `sort=False` do not sort (because table is already in correct order). For example, iterate by time through a measurementset table:: t = table('3c343.MS') for ts in t.iter('TIME'): print ts.nrows() ] from relative_module[tableiter] import module[tableiter] return[call[name[tableiter], parameter[name[self], name[columnnames], name[order], name[sort]]]]
keyword[def] identifier[iter] ( identifier[self] , identifier[columnnames] , identifier[order] = literal[string] , identifier[sort] = keyword[True] ): literal[string] keyword[from] . identifier[tableiter] keyword[import] identifier[tableiter] keyword[return] identifier[tableiter] ( identifier[self] , identifier[columnnames] , identifier[order] , identifier[sort] )
def iter(self, columnnames, order='', sort=True): """Return a tableiter object. :class:`tableiter` lets one iterate over a table by returning in each iteration step a reference table containing equal values for the given columns. By default a sort is done on the given columns to get the correct iteration order. `order` | 'ascending' is iterate in ascending order (is the default). | 'descending' is iterate in descending order. `sort=False` do not sort (because table is already in correct order). For example, iterate by time through a measurementset table:: t = table('3c343.MS') for ts in t.iter('TIME'): print ts.nrows() """ from .tableiter import tableiter return tableiter(self, columnnames, order, sort)