code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def create_session(self): """Create a session. First we look in self.key_file for a path to a json file with the credentials. The key file should have 'AWSAccessKeyId' and 'AWSSecretKey'. Next we look at self.profile for a profile name and try to use the Session call to automatically pick up the keys for the profile from the user default keys file ~/.aws/config. Finally, boto3 will look for the keys in environment variables: AWS_ACCESS_KEY_ID: The access key for your AWS account. AWS_SECRET_ACCESS_KEY: The secret key for your AWS account. AWS_SESSION_TOKEN: The session key for your AWS account. This is only needed when you are using temporary credentials. The AWS_SECURITY_TOKEN environment variable can also be used, but is only supported for backwards compatibility purposes. AWS_SESSION_TOKEN is supported by multiple AWS SDKs besides python. """ session = None if self.key_file is not None: credfile = os.path.expandvars(os.path.expanduser(self.key_file)) try: with open(credfile, 'r') as f: creds = json.load(f) except json.JSONDecodeError as e: logger.error( "EC2Provider '{}': json decode error in credential file {}".format(self.label, credfile) ) raise e except Exception as e: logger.debug( "EC2Provider '{0}' caught exception while reading credential file: {1}".format( self.label, credfile ) ) raise e logger.debug("EC2Provider '{}': Using credential file to create session".format(self.label)) session = boto3.session.Session(region_name=self.region, **creds) elif self.profile is not None: logger.debug("EC2Provider '{}': Using profile name to create session".format(self.label)) session = boto3.session.Session( profile_name=self.profile, region_name=self.region ) else: logger.debug("EC2Provider '{}': Using environment variables to create session".format(self.label)) session = boto3.session.Session(region_name=self.region) return session
def function[create_session, parameter[self]]: constant[Create a session. First we look in self.key_file for a path to a json file with the credentials. The key file should have 'AWSAccessKeyId' and 'AWSSecretKey'. Next we look at self.profile for a profile name and try to use the Session call to automatically pick up the keys for the profile from the user default keys file ~/.aws/config. Finally, boto3 will look for the keys in environment variables: AWS_ACCESS_KEY_ID: The access key for your AWS account. AWS_SECRET_ACCESS_KEY: The secret key for your AWS account. AWS_SESSION_TOKEN: The session key for your AWS account. This is only needed when you are using temporary credentials. The AWS_SECURITY_TOKEN environment variable can also be used, but is only supported for backwards compatibility purposes. AWS_SESSION_TOKEN is supported by multiple AWS SDKs besides python. ] variable[session] assign[=] constant[None] if compare[name[self].key_file is_not constant[None]] begin[:] variable[credfile] assign[=] call[name[os].path.expandvars, parameter[call[name[os].path.expanduser, parameter[name[self].key_file]]]] <ast.Try object at 0x7da1b01d6440> call[name[logger].debug, parameter[call[constant[EC2Provider '{}': Using credential file to create session].format, parameter[name[self].label]]]] variable[session] assign[=] call[name[boto3].session.Session, parameter[]] return[name[session]]
keyword[def] identifier[create_session] ( identifier[self] ): literal[string] identifier[session] = keyword[None] keyword[if] identifier[self] . identifier[key_file] keyword[is] keyword[not] keyword[None] : identifier[credfile] = identifier[os] . identifier[path] . identifier[expandvars] ( identifier[os] . identifier[path] . identifier[expanduser] ( identifier[self] . identifier[key_file] )) keyword[try] : keyword[with] identifier[open] ( identifier[credfile] , literal[string] ) keyword[as] identifier[f] : identifier[creds] = identifier[json] . identifier[load] ( identifier[f] ) keyword[except] identifier[json] . identifier[JSONDecodeError] keyword[as] identifier[e] : identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[self] . identifier[label] , identifier[credfile] ) ) keyword[raise] identifier[e] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[label] , identifier[credfile] ) ) keyword[raise] identifier[e] identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[label] )) identifier[session] = identifier[boto3] . identifier[session] . identifier[Session] ( identifier[region_name] = identifier[self] . identifier[region] ,** identifier[creds] ) keyword[elif] identifier[self] . identifier[profile] keyword[is] keyword[not] keyword[None] : identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[label] )) identifier[session] = identifier[boto3] . identifier[session] . identifier[Session] ( identifier[profile_name] = identifier[self] . identifier[profile] , identifier[region_name] = identifier[self] . identifier[region] ) keyword[else] : identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[label] )) identifier[session] = identifier[boto3] . identifier[session] . identifier[Session] ( identifier[region_name] = identifier[self] . identifier[region] ) keyword[return] identifier[session]
def create_session(self): """Create a session. First we look in self.key_file for a path to a json file with the credentials. The key file should have 'AWSAccessKeyId' and 'AWSSecretKey'. Next we look at self.profile for a profile name and try to use the Session call to automatically pick up the keys for the profile from the user default keys file ~/.aws/config. Finally, boto3 will look for the keys in environment variables: AWS_ACCESS_KEY_ID: The access key for your AWS account. AWS_SECRET_ACCESS_KEY: The secret key for your AWS account. AWS_SESSION_TOKEN: The session key for your AWS account. This is only needed when you are using temporary credentials. The AWS_SECURITY_TOKEN environment variable can also be used, but is only supported for backwards compatibility purposes. AWS_SESSION_TOKEN is supported by multiple AWS SDKs besides python. """ session = None if self.key_file is not None: credfile = os.path.expandvars(os.path.expanduser(self.key_file)) try: with open(credfile, 'r') as f: creds = json.load(f) # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]] except json.JSONDecodeError as e: logger.error("EC2Provider '{}': json decode error in credential file {}".format(self.label, credfile)) raise e # depends on [control=['except'], data=['e']] except Exception as e: logger.debug("EC2Provider '{0}' caught exception while reading credential file: {1}".format(self.label, credfile)) raise e # depends on [control=['except'], data=['e']] logger.debug("EC2Provider '{}': Using credential file to create session".format(self.label)) session = boto3.session.Session(region_name=self.region, **creds) # depends on [control=['if'], data=[]] elif self.profile is not None: logger.debug("EC2Provider '{}': Using profile name to create session".format(self.label)) session = boto3.session.Session(profile_name=self.profile, region_name=self.region) # depends on [control=['if'], data=[]] else: logger.debug("EC2Provider '{}': Using environment variables to create session".format(self.label)) session = boto3.session.Session(region_name=self.region) return session
def connect_inputs(self, datas): """ Connects input ``Pipers`` to "datas" input data in the correct order determined, by the ``Piper.ornament`` attribute and the ``Dagger._cmp`` function. It is assumed that the input data is in the form of an iterator and that all inputs have the same number of input items. A pipeline will **deadlock** otherwise. Arguments: - datas (sequence of sequences) An ordered sequence of inputs for all input ``Pipers``. """ start_pipers = self.get_inputs() self.log.debug('%s trying to connect inputs in the order %s' % \ (repr(self), repr(start_pipers))) for piper, data in izip(start_pipers, datas): piper.connect([data]) self.log.debug('%s succesfuly connected inputs' % repr(self))
def function[connect_inputs, parameter[self, datas]]: constant[ Connects input ``Pipers`` to "datas" input data in the correct order determined, by the ``Piper.ornament`` attribute and the ``Dagger._cmp`` function. It is assumed that the input data is in the form of an iterator and that all inputs have the same number of input items. A pipeline will **deadlock** otherwise. Arguments: - datas (sequence of sequences) An ordered sequence of inputs for all input ``Pipers``. ] variable[start_pipers] assign[=] call[name[self].get_inputs, parameter[]] call[name[self].log.debug, parameter[binary_operation[constant[%s trying to connect inputs in the order %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18fe93af0>, <ast.Call object at 0x7da18fe92920>]]]]] for taget[tuple[[<ast.Name object at 0x7da18fe935b0>, <ast.Name object at 0x7da18fe93610>]]] in starred[call[name[izip], parameter[name[start_pipers], name[datas]]]] begin[:] call[name[piper].connect, parameter[list[[<ast.Name object at 0x7da20c6a9900>]]]] call[name[self].log.debug, parameter[binary_operation[constant[%s succesfuly connected inputs] <ast.Mod object at 0x7da2590d6920> call[name[repr], parameter[name[self]]]]]]
keyword[def] identifier[connect_inputs] ( identifier[self] , identifier[datas] ): literal[string] identifier[start_pipers] = identifier[self] . identifier[get_inputs] () identifier[self] . identifier[log] . identifier[debug] ( literal[string] %( identifier[repr] ( identifier[self] ), identifier[repr] ( identifier[start_pipers] ))) keyword[for] identifier[piper] , identifier[data] keyword[in] identifier[izip] ( identifier[start_pipers] , identifier[datas] ): identifier[piper] . identifier[connect] ([ identifier[data] ]) identifier[self] . identifier[log] . identifier[debug] ( literal[string] % identifier[repr] ( identifier[self] ))
def connect_inputs(self, datas): """ Connects input ``Pipers`` to "datas" input data in the correct order determined, by the ``Piper.ornament`` attribute and the ``Dagger._cmp`` function. It is assumed that the input data is in the form of an iterator and that all inputs have the same number of input items. A pipeline will **deadlock** otherwise. Arguments: - datas (sequence of sequences) An ordered sequence of inputs for all input ``Pipers``. """ start_pipers = self.get_inputs() self.log.debug('%s trying to connect inputs in the order %s' % (repr(self), repr(start_pipers))) for (piper, data) in izip(start_pipers, datas): piper.connect([data]) # depends on [control=['for'], data=[]] self.log.debug('%s succesfuly connected inputs' % repr(self))
def btop2cigar(btopString, concise=False, aa=False): """ Convert a BTOP string to a CIGAR string. @param btopString: A C{str} BTOP sequence. @param concise: If C{True}, use 'M' for matches and mismatches instead of the more specific 'X' and '='. @param aa: If C{True}, C{btopString} will be interpreted as though it refers to amino acids (as in the BTOP string produced by DIAMOND). In that case, it is not possible to use the 'precise' CIGAR characters because amino acids have multiple codons so we cannot know whether an amino acid match is due to an exact nucleotide matches or not. Also, the numbers in the BTOP string will be multiplied by 3 since they refer to a number of amino acids matching. @raise ValueError: If L{parseBtop} finds an error in C{btopString} or if C{aa} and C{concise} are both C{True}. @return: A C{str} CIGAR string. """ if aa and concise: raise ValueError('aa and concise cannot both be True') result = [] thisLength = thisOperation = currentLength = currentOperation = None for item in parseBtop(btopString): if isinstance(item, int): thisLength = item thisOperation = CEQUAL if concise else CMATCH else: thisLength = 1 query, reference = item if query == '-': # The query has a gap. That means that in matching the # query to the reference a deletion is needed in the # reference. assert reference != '-' thisOperation = CDEL elif reference == '-': # The reference has a gap. That means that in matching the # query to the reference an insertion is needed in the # reference. thisOperation = CINS else: # A substitution was needed. assert query != reference thisOperation = CDIFF if concise else CMATCH if thisOperation == currentOperation: currentLength += thisLength else: if currentOperation: result.append( '%d%s' % ((3 * currentLength) if aa else currentLength, currentOperation)) currentLength, currentOperation = thisLength, thisOperation # We reached the end of the BTOP string. If there was an operation # underway, emit it. The 'if' here should only be needed to catch the # case where btopString was empty. assert currentOperation or btopString == '' if currentOperation: result.append( '%d%s' % ((3 * currentLength) if aa else currentLength, currentOperation)) return ''.join(result)
def function[btop2cigar, parameter[btopString, concise, aa]]: constant[ Convert a BTOP string to a CIGAR string. @param btopString: A C{str} BTOP sequence. @param concise: If C{True}, use 'M' for matches and mismatches instead of the more specific 'X' and '='. @param aa: If C{True}, C{btopString} will be interpreted as though it refers to amino acids (as in the BTOP string produced by DIAMOND). In that case, it is not possible to use the 'precise' CIGAR characters because amino acids have multiple codons so we cannot know whether an amino acid match is due to an exact nucleotide matches or not. Also, the numbers in the BTOP string will be multiplied by 3 since they refer to a number of amino acids matching. @raise ValueError: If L{parseBtop} finds an error in C{btopString} or if C{aa} and C{concise} are both C{True}. @return: A C{str} CIGAR string. ] if <ast.BoolOp object at 0x7da18eb55cc0> begin[:] <ast.Raise object at 0x7da18eb55ea0> variable[result] assign[=] list[[]] variable[thisLength] assign[=] constant[None] for taget[name[item]] in starred[call[name[parseBtop], parameter[name[btopString]]]] begin[:] if call[name[isinstance], parameter[name[item], name[int]]] begin[:] variable[thisLength] assign[=] name[item] variable[thisOperation] assign[=] <ast.IfExp object at 0x7da18eb57190> if compare[name[thisOperation] equal[==] name[currentOperation]] begin[:] <ast.AugAssign object at 0x7da18eb54c70> assert[<ast.BoolOp object at 0x7da2041da140>] if name[currentOperation] begin[:] call[name[result].append, parameter[binary_operation[constant[%d%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.IfExp object at 0x7da2041db610>, <ast.Name object at 0x7da2041da440>]]]]] return[call[constant[].join, parameter[name[result]]]]
keyword[def] identifier[btop2cigar] ( identifier[btopString] , identifier[concise] = keyword[False] , identifier[aa] = keyword[False] ): literal[string] keyword[if] identifier[aa] keyword[and] identifier[concise] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[result] =[] identifier[thisLength] = identifier[thisOperation] = identifier[currentLength] = identifier[currentOperation] = keyword[None] keyword[for] identifier[item] keyword[in] identifier[parseBtop] ( identifier[btopString] ): keyword[if] identifier[isinstance] ( identifier[item] , identifier[int] ): identifier[thisLength] = identifier[item] identifier[thisOperation] = identifier[CEQUAL] keyword[if] identifier[concise] keyword[else] identifier[CMATCH] keyword[else] : identifier[thisLength] = literal[int] identifier[query] , identifier[reference] = identifier[item] keyword[if] identifier[query] == literal[string] : keyword[assert] identifier[reference] != literal[string] identifier[thisOperation] = identifier[CDEL] keyword[elif] identifier[reference] == literal[string] : identifier[thisOperation] = identifier[CINS] keyword[else] : keyword[assert] identifier[query] != identifier[reference] identifier[thisOperation] = identifier[CDIFF] keyword[if] identifier[concise] keyword[else] identifier[CMATCH] keyword[if] identifier[thisOperation] == identifier[currentOperation] : identifier[currentLength] += identifier[thisLength] keyword[else] : keyword[if] identifier[currentOperation] : identifier[result] . identifier[append] ( literal[string] % (( literal[int] * identifier[currentLength] ) keyword[if] identifier[aa] keyword[else] identifier[currentLength] , identifier[currentOperation] )) identifier[currentLength] , identifier[currentOperation] = identifier[thisLength] , identifier[thisOperation] keyword[assert] identifier[currentOperation] keyword[or] identifier[btopString] == literal[string] keyword[if] identifier[currentOperation] : identifier[result] . identifier[append] ( literal[string] % (( literal[int] * identifier[currentLength] ) keyword[if] identifier[aa] keyword[else] identifier[currentLength] , identifier[currentOperation] )) keyword[return] literal[string] . identifier[join] ( identifier[result] )
def btop2cigar(btopString, concise=False, aa=False): """ Convert a BTOP string to a CIGAR string. @param btopString: A C{str} BTOP sequence. @param concise: If C{True}, use 'M' for matches and mismatches instead of the more specific 'X' and '='. @param aa: If C{True}, C{btopString} will be interpreted as though it refers to amino acids (as in the BTOP string produced by DIAMOND). In that case, it is not possible to use the 'precise' CIGAR characters because amino acids have multiple codons so we cannot know whether an amino acid match is due to an exact nucleotide matches or not. Also, the numbers in the BTOP string will be multiplied by 3 since they refer to a number of amino acids matching. @raise ValueError: If L{parseBtop} finds an error in C{btopString} or if C{aa} and C{concise} are both C{True}. @return: A C{str} CIGAR string. """ if aa and concise: raise ValueError('aa and concise cannot both be True') # depends on [control=['if'], data=[]] result = [] thisLength = thisOperation = currentLength = currentOperation = None for item in parseBtop(btopString): if isinstance(item, int): thisLength = item thisOperation = CEQUAL if concise else CMATCH # depends on [control=['if'], data=[]] else: thisLength = 1 (query, reference) = item if query == '-': # The query has a gap. That means that in matching the # query to the reference a deletion is needed in the # reference. assert reference != '-' thisOperation = CDEL # depends on [control=['if'], data=[]] elif reference == '-': # The reference has a gap. That means that in matching the # query to the reference an insertion is needed in the # reference. thisOperation = CINS # depends on [control=['if'], data=[]] else: # A substitution was needed. assert query != reference thisOperation = CDIFF if concise else CMATCH if thisOperation == currentOperation: currentLength += thisLength # depends on [control=['if'], data=[]] else: if currentOperation: result.append('%d%s' % (3 * currentLength if aa else currentLength, currentOperation)) # depends on [control=['if'], data=[]] (currentLength, currentOperation) = (thisLength, thisOperation) # depends on [control=['for'], data=['item']] # We reached the end of the BTOP string. If there was an operation # underway, emit it. The 'if' here should only be needed to catch the # case where btopString was empty. assert currentOperation or btopString == '' if currentOperation: result.append('%d%s' % (3 * currentLength if aa else currentLength, currentOperation)) # depends on [control=['if'], data=[]] return ''.join(result)
def ents(self,cls="Word",flattenList=True): """Returns a list of entities of the classname specified the second argument. For instance. to call a Word-object's ents('Phoneme') would return a list of the word's Phoneme objects sequentially. This method recursively searches the self-object's children for the type of object specified.""" ents = [] """ print 'getting entities',self.classname() if self.classname() == cls: return [self] else: for child in self.children: if type(child)==type([]): if flattenList: ents+=child[0].ents(cls=cls,flattenList=flattenList) else: ents_list2ndlevel=[] for chld in child: if chld: ents_list2ndlevel+=chld.ents(cls=cls,flattenList=flattenList) ents+=[ents_list2ndlevel] else: if child: ents += child.ents(cls=cls,flattenList=flattenList) """ #print 'getting entities',self.classname() if self.classname() == cls: return [self] else: for child in self.children: #print child,child.classname() if child.classname()=='WordToken': if cls=='WordToken': ents+=[child] elif not child.children: pass elif cls=='Word': if flattenList: ents+=[child.children[0]] else: ents+=[child.children] else: if child: ents += child.children[0].ents(cls=cls,flattenList=flattenList) else: if child: ents += child.ents(cls=cls,flattenList=flattenList) return ents
def function[ents, parameter[self, cls, flattenList]]: constant[Returns a list of entities of the classname specified the second argument. For instance. to call a Word-object's ents('Phoneme') would return a list of the word's Phoneme objects sequentially. This method recursively searches the self-object's children for the type of object specified.] variable[ents] assign[=] list[[]] constant[ print 'getting entities',self.classname() if self.classname() == cls: return [self] else: for child in self.children: if type(child)==type([]): if flattenList: ents+=child[0].ents(cls=cls,flattenList=flattenList) else: ents_list2ndlevel=[] for chld in child: if chld: ents_list2ndlevel+=chld.ents(cls=cls,flattenList=flattenList) ents+=[ents_list2ndlevel] else: if child: ents += child.ents(cls=cls,flattenList=flattenList) ] if compare[call[name[self].classname, parameter[]] equal[==] name[cls]] begin[:] return[list[[<ast.Name object at 0x7da204962890>]]] return[name[ents]]
keyword[def] identifier[ents] ( identifier[self] , identifier[cls] = literal[string] , identifier[flattenList] = keyword[True] ): literal[string] identifier[ents] =[] literal[string] keyword[if] identifier[self] . identifier[classname] ()== identifier[cls] : keyword[return] [ identifier[self] ] keyword[else] : keyword[for] identifier[child] keyword[in] identifier[self] . identifier[children] : keyword[if] identifier[child] . identifier[classname] ()== literal[string] : keyword[if] identifier[cls] == literal[string] : identifier[ents] +=[ identifier[child] ] keyword[elif] keyword[not] identifier[child] . identifier[children] : keyword[pass] keyword[elif] identifier[cls] == literal[string] : keyword[if] identifier[flattenList] : identifier[ents] +=[ identifier[child] . identifier[children] [ literal[int] ]] keyword[else] : identifier[ents] +=[ identifier[child] . identifier[children] ] keyword[else] : keyword[if] identifier[child] : identifier[ents] += identifier[child] . identifier[children] [ literal[int] ]. identifier[ents] ( identifier[cls] = identifier[cls] , identifier[flattenList] = identifier[flattenList] ) keyword[else] : keyword[if] identifier[child] : identifier[ents] += identifier[child] . identifier[ents] ( identifier[cls] = identifier[cls] , identifier[flattenList] = identifier[flattenList] ) keyword[return] identifier[ents]
def ents(self, cls='Word', flattenList=True): """Returns a list of entities of the classname specified the second argument. For instance. to call a Word-object's ents('Phoneme') would return a list of the word's Phoneme objects sequentially. This method recursively searches the self-object's children for the type of object specified.""" ents = [] "\n\t\tprint 'getting entities',self.classname()\n\t\tif self.classname() == cls:\n\t\t\treturn [self]\n\t\telse:\n\t\t\tfor child in self.children:\n\t\t\t\tif type(child)==type([]):\n\t\t\t\t\tif flattenList:\n\t\t\t\t\t\tents+=child[0].ents(cls=cls,flattenList=flattenList)\n\t\t\t\t\telse:\n\t\t\t\t\t\tents_list2ndlevel=[]\n\t\t\t\t\t\tfor chld in child:\n\t\t\t\t\t\t\tif chld:\n\t\t\t\t\t\t\t\tents_list2ndlevel+=chld.ents(cls=cls,flattenList=flattenList)\n\t\t\t\t\t\tents+=[ents_list2ndlevel]\n\n\t\t\t\telse:\n\t\t\t\t\tif child:\n\t\t\t\t\t\tents += child.ents(cls=cls,flattenList=flattenList)\n\t\t" #print 'getting entities',self.classname() if self.classname() == cls: return [self] # depends on [control=['if'], data=[]] else: for child in self.children: #print child,child.classname() if child.classname() == 'WordToken': if cls == 'WordToken': ents += [child] # depends on [control=['if'], data=[]] elif not child.children: pass # depends on [control=['if'], data=[]] elif cls == 'Word': if flattenList: ents += [child.children[0]] # depends on [control=['if'], data=[]] else: ents += [child.children] # depends on [control=['if'], data=[]] elif child: ents += child.children[0].ents(cls=cls, flattenList=flattenList) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif child: ents += child.ents(cls=cls, flattenList=flattenList) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['child']] return ents
def hr_dp996(self, value=None): """ Corresponds to IDD Field `hr_dp996` humidity ratio, calculated at standard atmospheric pressure at elevation of station, corresponding to Dew-point temperature corresponding to 99.6% annual cumulative frequency of occurrence (cold conditions) Args: value (float): value for IDD Field `hr_dp996` if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """ if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `hr_dp996`'.format(value)) self._hr_dp996 = value
def function[hr_dp996, parameter[self, value]]: constant[ Corresponds to IDD Field `hr_dp996` humidity ratio, calculated at standard atmospheric pressure at elevation of station, corresponding to Dew-point temperature corresponding to 99.6% annual cumulative frequency of occurrence (cold conditions) Args: value (float): value for IDD Field `hr_dp996` if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value ] if compare[name[value] is_not constant[None]] begin[:] <ast.Try object at 0x7da18c4cda50> name[self]._hr_dp996 assign[=] name[value]
keyword[def] identifier[hr_dp996] ( identifier[self] , identifier[value] = keyword[None] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[value] = identifier[float] ( identifier[value] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[value] )) identifier[self] . identifier[_hr_dp996] = identifier[value]
def hr_dp996(self, value=None): """ Corresponds to IDD Field `hr_dp996` humidity ratio, calculated at standard atmospheric pressure at elevation of station, corresponding to Dew-point temperature corresponding to 99.6% annual cumulative frequency of occurrence (cold conditions) Args: value (float): value for IDD Field `hr_dp996` if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """ if value is not None: try: value = float(value) # depends on [control=['try'], data=[]] except ValueError: raise ValueError('value {} need to be of type float for field `hr_dp996`'.format(value)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['value']] self._hr_dp996 = value
def GetArtifactParserDependencies(rdf_artifact): """Return the set of knowledgebase path dependencies required by the parser. Args: rdf_artifact: RDF artifact object. Returns: A set of strings for the required kb objects e.g. ["users.appdata", "systemroot"] """ deps = set() processors = parser.Parser.GetClassesByArtifact(rdf_artifact.name) for p in processors: deps.update(p.knowledgebase_dependencies) return deps
def function[GetArtifactParserDependencies, parameter[rdf_artifact]]: constant[Return the set of knowledgebase path dependencies required by the parser. Args: rdf_artifact: RDF artifact object. Returns: A set of strings for the required kb objects e.g. ["users.appdata", "systemroot"] ] variable[deps] assign[=] call[name[set], parameter[]] variable[processors] assign[=] call[name[parser].Parser.GetClassesByArtifact, parameter[name[rdf_artifact].name]] for taget[name[p]] in starred[name[processors]] begin[:] call[name[deps].update, parameter[name[p].knowledgebase_dependencies]] return[name[deps]]
keyword[def] identifier[GetArtifactParserDependencies] ( identifier[rdf_artifact] ): literal[string] identifier[deps] = identifier[set] () identifier[processors] = identifier[parser] . identifier[Parser] . identifier[GetClassesByArtifact] ( identifier[rdf_artifact] . identifier[name] ) keyword[for] identifier[p] keyword[in] identifier[processors] : identifier[deps] . identifier[update] ( identifier[p] . identifier[knowledgebase_dependencies] ) keyword[return] identifier[deps]
def GetArtifactParserDependencies(rdf_artifact): """Return the set of knowledgebase path dependencies required by the parser. Args: rdf_artifact: RDF artifact object. Returns: A set of strings for the required kb objects e.g. ["users.appdata", "systemroot"] """ deps = set() processors = parser.Parser.GetClassesByArtifact(rdf_artifact.name) for p in processors: deps.update(p.knowledgebase_dependencies) # depends on [control=['for'], data=['p']] return deps
def encryptstring(text, password): """ Encrypt a string according to a specific password. :type text: string :param text: The text to encrypt. :type pass: string :param pass: The password to encrypt the text with. """ enc = [] for i in enumerate(text): key_c = password[i[0] % len(password)] enc_c = chr((ord(i[1]) + ord(key_c)) % 256) enc.append(enc_c) return base64.urlsafe_b64encode("".join(enc).encode()).decode()
def function[encryptstring, parameter[text, password]]: constant[ Encrypt a string according to a specific password. :type text: string :param text: The text to encrypt. :type pass: string :param pass: The password to encrypt the text with. ] variable[enc] assign[=] list[[]] for taget[name[i]] in starred[call[name[enumerate], parameter[name[text]]]] begin[:] variable[key_c] assign[=] call[name[password]][binary_operation[call[name[i]][constant[0]] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[password]]]]] variable[enc_c] assign[=] call[name[chr], parameter[binary_operation[binary_operation[call[name[ord], parameter[call[name[i]][constant[1]]]] + call[name[ord], parameter[name[key_c]]]] <ast.Mod object at 0x7da2590d6920> constant[256]]]] call[name[enc].append, parameter[name[enc_c]]] return[call[call[name[base64].urlsafe_b64encode, parameter[call[call[constant[].join, parameter[name[enc]]].encode, parameter[]]]].decode, parameter[]]]
keyword[def] identifier[encryptstring] ( identifier[text] , identifier[password] ): literal[string] identifier[enc] =[] keyword[for] identifier[i] keyword[in] identifier[enumerate] ( identifier[text] ): identifier[key_c] = identifier[password] [ identifier[i] [ literal[int] ]% identifier[len] ( identifier[password] )] identifier[enc_c] = identifier[chr] (( identifier[ord] ( identifier[i] [ literal[int] ])+ identifier[ord] ( identifier[key_c] ))% literal[int] ) identifier[enc] . identifier[append] ( identifier[enc_c] ) keyword[return] identifier[base64] . identifier[urlsafe_b64encode] ( literal[string] . identifier[join] ( identifier[enc] ). identifier[encode] ()). identifier[decode] ()
def encryptstring(text, password): """ Encrypt a string according to a specific password. :type text: string :param text: The text to encrypt. :type pass: string :param pass: The password to encrypt the text with. """ enc = [] for i in enumerate(text): key_c = password[i[0] % len(password)] enc_c = chr((ord(i[1]) + ord(key_c)) % 256) enc.append(enc_c) # depends on [control=['for'], data=['i']] return base64.urlsafe_b64encode(''.join(enc).encode()).decode()
def recordSet( self ): """ Returns the record set that is associated with this widget. :return <orb.RecordSet> || None """ if ( not self.table() ): return None recordSet = RecordSet(self.table()) recordSet.setQuery(self.query()) # set the grouping options grouping = nativestring(self.uiGroupingTXT.text()).split(',') while ( '' in grouping ): grouping.remove('') recordSet.setGroupBy( grouping ) # set the sorting options sorting = nativestring(self.uiSortingTXT.text()).split(',') while ( '' in sorting ): sorting.remove('') recordSet.setOrder([i.split('|') for i in sorting]) # set the paged options recordSet.setPaged(self.uiPagedCHK.isChecked()) recordSet.setPageSize(self.uiPagedSPN.value()) return recordSet
def function[recordSet, parameter[self]]: constant[ Returns the record set that is associated with this widget. :return <orb.RecordSet> || None ] if <ast.UnaryOp object at 0x7da18f09f5e0> begin[:] return[constant[None]] variable[recordSet] assign[=] call[name[RecordSet], parameter[call[name[self].table, parameter[]]]] call[name[recordSet].setQuery, parameter[call[name[self].query, parameter[]]]] variable[grouping] assign[=] call[call[name[nativestring], parameter[call[name[self].uiGroupingTXT.text, parameter[]]]].split, parameter[constant[,]]] while compare[constant[] in name[grouping]] begin[:] call[name[grouping].remove, parameter[constant[]]] call[name[recordSet].setGroupBy, parameter[name[grouping]]] variable[sorting] assign[=] call[call[name[nativestring], parameter[call[name[self].uiSortingTXT.text, parameter[]]]].split, parameter[constant[,]]] while compare[constant[] in name[sorting]] begin[:] call[name[sorting].remove, parameter[constant[]]] call[name[recordSet].setOrder, parameter[<ast.ListComp object at 0x7da18fe92860>]] call[name[recordSet].setPaged, parameter[call[name[self].uiPagedCHK.isChecked, parameter[]]]] call[name[recordSet].setPageSize, parameter[call[name[self].uiPagedSPN.value, parameter[]]]] return[name[recordSet]]
keyword[def] identifier[recordSet] ( identifier[self] ): literal[string] keyword[if] ( keyword[not] identifier[self] . identifier[table] ()): keyword[return] keyword[None] identifier[recordSet] = identifier[RecordSet] ( identifier[self] . identifier[table] ()) identifier[recordSet] . identifier[setQuery] ( identifier[self] . identifier[query] ()) identifier[grouping] = identifier[nativestring] ( identifier[self] . identifier[uiGroupingTXT] . identifier[text] ()). identifier[split] ( literal[string] ) keyword[while] ( literal[string] keyword[in] identifier[grouping] ): identifier[grouping] . identifier[remove] ( literal[string] ) identifier[recordSet] . identifier[setGroupBy] ( identifier[grouping] ) identifier[sorting] = identifier[nativestring] ( identifier[self] . identifier[uiSortingTXT] . identifier[text] ()). identifier[split] ( literal[string] ) keyword[while] ( literal[string] keyword[in] identifier[sorting] ): identifier[sorting] . identifier[remove] ( literal[string] ) identifier[recordSet] . identifier[setOrder] ([ identifier[i] . identifier[split] ( literal[string] ) keyword[for] identifier[i] keyword[in] identifier[sorting] ]) identifier[recordSet] . identifier[setPaged] ( identifier[self] . identifier[uiPagedCHK] . identifier[isChecked] ()) identifier[recordSet] . identifier[setPageSize] ( identifier[self] . identifier[uiPagedSPN] . identifier[value] ()) keyword[return] identifier[recordSet]
def recordSet(self): """ Returns the record set that is associated with this widget. :return <orb.RecordSet> || None """ if not self.table(): return None # depends on [control=['if'], data=[]] recordSet = RecordSet(self.table()) recordSet.setQuery(self.query()) # set the grouping options grouping = nativestring(self.uiGroupingTXT.text()).split(',') while '' in grouping: grouping.remove('') # depends on [control=['while'], data=['grouping']] recordSet.setGroupBy(grouping) # set the sorting options sorting = nativestring(self.uiSortingTXT.text()).split(',') while '' in sorting: sorting.remove('') # depends on [control=['while'], data=['sorting']] recordSet.setOrder([i.split('|') for i in sorting]) # set the paged options recordSet.setPaged(self.uiPagedCHK.isChecked()) recordSet.setPageSize(self.uiPagedSPN.value()) return recordSet
def add_forbidden_path(self, forbidden): """Specify a path, or list of paths, to not copy, even if it's part of a copy_specs[] entry. """ if isinstance(forbidden, six.string_types): forbidden = [forbidden] if self.use_sysroot(): forbidden = [self.join_sysroot(f) for f in forbidden] for forbid in forbidden: self._log_info("adding forbidden path '%s'" % forbid) for path in glob.glob(forbid): self.forbidden_paths.append(path)
def function[add_forbidden_path, parameter[self, forbidden]]: constant[Specify a path, or list of paths, to not copy, even if it's part of a copy_specs[] entry. ] if call[name[isinstance], parameter[name[forbidden], name[six].string_types]] begin[:] variable[forbidden] assign[=] list[[<ast.Name object at 0x7da204621600>]] if call[name[self].use_sysroot, parameter[]] begin[:] variable[forbidden] assign[=] <ast.ListComp object at 0x7da204623d60> for taget[name[forbid]] in starred[name[forbidden]] begin[:] call[name[self]._log_info, parameter[binary_operation[constant[adding forbidden path '%s'] <ast.Mod object at 0x7da2590d6920> name[forbid]]]] for taget[name[path]] in starred[call[name[glob].glob, parameter[name[forbid]]]] begin[:] call[name[self].forbidden_paths.append, parameter[name[path]]]
keyword[def] identifier[add_forbidden_path] ( identifier[self] , identifier[forbidden] ): literal[string] keyword[if] identifier[isinstance] ( identifier[forbidden] , identifier[six] . identifier[string_types] ): identifier[forbidden] =[ identifier[forbidden] ] keyword[if] identifier[self] . identifier[use_sysroot] (): identifier[forbidden] =[ identifier[self] . identifier[join_sysroot] ( identifier[f] ) keyword[for] identifier[f] keyword[in] identifier[forbidden] ] keyword[for] identifier[forbid] keyword[in] identifier[forbidden] : identifier[self] . identifier[_log_info] ( literal[string] % identifier[forbid] ) keyword[for] identifier[path] keyword[in] identifier[glob] . identifier[glob] ( identifier[forbid] ): identifier[self] . identifier[forbidden_paths] . identifier[append] ( identifier[path] )
def add_forbidden_path(self, forbidden): """Specify a path, or list of paths, to not copy, even if it's part of a copy_specs[] entry. """ if isinstance(forbidden, six.string_types): forbidden = [forbidden] # depends on [control=['if'], data=[]] if self.use_sysroot(): forbidden = [self.join_sysroot(f) for f in forbidden] # depends on [control=['if'], data=[]] for forbid in forbidden: self._log_info("adding forbidden path '%s'" % forbid) for path in glob.glob(forbid): self.forbidden_paths.append(path) # depends on [control=['for'], data=['path']] # depends on [control=['for'], data=['forbid']]
def synchelp(f): ''' The synchelp decorator allows the transparent execution of a coroutine using the global loop from a thread other than the event loop. In both use cases, teh actual work is done by the global event loop. Examples: Use as a decorator:: @s_glob.synchelp async def stuff(x, y): await dostuff() Calling the stuff function as regular async code using the standard await syntax:: valu = await stuff(x, y) Calling the stuff function as regular sync code outside of the event loop thread:: valu = stuff(x, y) ''' def wrap(*args, **kwargs): coro = f(*args, **kwargs) if not iAmLoop(): return sync(coro) return coro return wrap
def function[synchelp, parameter[f]]: constant[ The synchelp decorator allows the transparent execution of a coroutine using the global loop from a thread other than the event loop. In both use cases, teh actual work is done by the global event loop. Examples: Use as a decorator:: @s_glob.synchelp async def stuff(x, y): await dostuff() Calling the stuff function as regular async code using the standard await syntax:: valu = await stuff(x, y) Calling the stuff function as regular sync code outside of the event loop thread:: valu = stuff(x, y) ] def function[wrap, parameter[]]: variable[coro] assign[=] call[name[f], parameter[<ast.Starred object at 0x7da1b230a560>]] if <ast.UnaryOp object at 0x7da1b230baf0> begin[:] return[call[name[sync], parameter[name[coro]]]] return[name[coro]] return[name[wrap]]
keyword[def] identifier[synchelp] ( identifier[f] ): literal[string] keyword[def] identifier[wrap] (* identifier[args] ,** identifier[kwargs] ): identifier[coro] = identifier[f] (* identifier[args] ,** identifier[kwargs] ) keyword[if] keyword[not] identifier[iAmLoop] (): keyword[return] identifier[sync] ( identifier[coro] ) keyword[return] identifier[coro] keyword[return] identifier[wrap]
def synchelp(f): """ The synchelp decorator allows the transparent execution of a coroutine using the global loop from a thread other than the event loop. In both use cases, teh actual work is done by the global event loop. Examples: Use as a decorator:: @s_glob.synchelp async def stuff(x, y): await dostuff() Calling the stuff function as regular async code using the standard await syntax:: valu = await stuff(x, y) Calling the stuff function as regular sync code outside of the event loop thread:: valu = stuff(x, y) """ def wrap(*args, **kwargs): coro = f(*args, **kwargs) if not iAmLoop(): return sync(coro) # depends on [control=['if'], data=[]] return coro return wrap
def get_all_tags_of_supplier(self, supplier_id): """ Get all supplier properties This will iterate over all pages until it gets all elements. So if the rate limit exceeded it will throw an Exception and you will get nothing :param supplier_id: the supplier id :return: list """ return self._iterate_through_pages( get_function=self.get_tags_of_supplier_per_page, resource=SUPPLIER_TAGS, **{'supplier_id': supplier_id} )
def function[get_all_tags_of_supplier, parameter[self, supplier_id]]: constant[ Get all supplier properties This will iterate over all pages until it gets all elements. So if the rate limit exceeded it will throw an Exception and you will get nothing :param supplier_id: the supplier id :return: list ] return[call[name[self]._iterate_through_pages, parameter[]]]
keyword[def] identifier[get_all_tags_of_supplier] ( identifier[self] , identifier[supplier_id] ): literal[string] keyword[return] identifier[self] . identifier[_iterate_through_pages] ( identifier[get_function] = identifier[self] . identifier[get_tags_of_supplier_per_page] , identifier[resource] = identifier[SUPPLIER_TAGS] , **{ literal[string] : identifier[supplier_id] } )
def get_all_tags_of_supplier(self, supplier_id): """ Get all supplier properties This will iterate over all pages until it gets all elements. So if the rate limit exceeded it will throw an Exception and you will get nothing :param supplier_id: the supplier id :return: list """ return self._iterate_through_pages(get_function=self.get_tags_of_supplier_per_page, resource=SUPPLIER_TAGS, **{'supplier_id': supplier_id})
def remover(self, id_rack): """Remove Rack by the identifier. :param id_rack: Identifier of the Rack. Integer value and greater than zero. :return: None :raise InvalidParameterError: The identifier of Rack is null and invalid. :raise RackNaoExisteError: Rack not registered. :raise RackError: Rack is associated with a script. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response. """ if not is_valid_int_param(id_rack): raise InvalidParameterError( u'The identifier of Rack is invalid or was not informed.') url = 'rack/' + str(id_rack) + '/' code, xml = self.submit(None, 'DELETE', url) return self.response(code, xml)
def function[remover, parameter[self, id_rack]]: constant[Remove Rack by the identifier. :param id_rack: Identifier of the Rack. Integer value and greater than zero. :return: None :raise InvalidParameterError: The identifier of Rack is null and invalid. :raise RackNaoExisteError: Rack not registered. :raise RackError: Rack is associated with a script. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response. ] if <ast.UnaryOp object at 0x7da1b2345d80> begin[:] <ast.Raise object at 0x7da1b23441f0> variable[url] assign[=] binary_operation[binary_operation[constant[rack/] + call[name[str], parameter[name[id_rack]]]] + constant[/]] <ast.Tuple object at 0x7da1b23442b0> assign[=] call[name[self].submit, parameter[constant[None], constant[DELETE], name[url]]] return[call[name[self].response, parameter[name[code], name[xml]]]]
keyword[def] identifier[remover] ( identifier[self] , identifier[id_rack] ): literal[string] keyword[if] keyword[not] identifier[is_valid_int_param] ( identifier[id_rack] ): keyword[raise] identifier[InvalidParameterError] ( literal[string] ) identifier[url] = literal[string] + identifier[str] ( identifier[id_rack] )+ literal[string] identifier[code] , identifier[xml] = identifier[self] . identifier[submit] ( keyword[None] , literal[string] , identifier[url] ) keyword[return] identifier[self] . identifier[response] ( identifier[code] , identifier[xml] )
def remover(self, id_rack): """Remove Rack by the identifier. :param id_rack: Identifier of the Rack. Integer value and greater than zero. :return: None :raise InvalidParameterError: The identifier of Rack is null and invalid. :raise RackNaoExisteError: Rack not registered. :raise RackError: Rack is associated with a script. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response. """ if not is_valid_int_param(id_rack): raise InvalidParameterError(u'The identifier of Rack is invalid or was not informed.') # depends on [control=['if'], data=[]] url = 'rack/' + str(id_rack) + '/' (code, xml) = self.submit(None, 'DELETE', url) return self.response(code, xml)
def handle_subscribed_event(self, event_obj, event_name): """Execute the registered handler of an event. Retrieve the handler and its arguments, and execute the handler in a new thread. Args: event_obj: Json object of the event. event_name: Name of the event to call handler for. """ handler, args = self.handlers[event_name] self.executor.submit(handler, event_obj, *args)
def function[handle_subscribed_event, parameter[self, event_obj, event_name]]: constant[Execute the registered handler of an event. Retrieve the handler and its arguments, and execute the handler in a new thread. Args: event_obj: Json object of the event. event_name: Name of the event to call handler for. ] <ast.Tuple object at 0x7da1b08c9ba0> assign[=] call[name[self].handlers][name[event_name]] call[name[self].executor.submit, parameter[name[handler], name[event_obj], <ast.Starred object at 0x7da1b08c8250>]]
keyword[def] identifier[handle_subscribed_event] ( identifier[self] , identifier[event_obj] , identifier[event_name] ): literal[string] identifier[handler] , identifier[args] = identifier[self] . identifier[handlers] [ identifier[event_name] ] identifier[self] . identifier[executor] . identifier[submit] ( identifier[handler] , identifier[event_obj] ,* identifier[args] )
def handle_subscribed_event(self, event_obj, event_name): """Execute the registered handler of an event. Retrieve the handler and its arguments, and execute the handler in a new thread. Args: event_obj: Json object of the event. event_name: Name of the event to call handler for. """ (handler, args) = self.handlers[event_name] self.executor.submit(handler, event_obj, *args)
def _default_tokenizer_func(content, keyword_weight_pair): """ Default tokenizer function that uses jieba tokenizer. :param keyword_weight_pair: maximum pair number of the keyword-weight list. :return: return keyword-weight list. Example: [('Example',0.4511233019962264),('Hello',0.25548051420382073),...]. """ seg_list = jieba.lcut_for_search(content) # Extract keyword-weight list by TF-IDF algorithms and by sorted maximum weight return jieba.analyse.extract_tags("".join(seg_list), topK=keyword_weight_pair, withWeight=True)
def function[_default_tokenizer_func, parameter[content, keyword_weight_pair]]: constant[ Default tokenizer function that uses jieba tokenizer. :param keyword_weight_pair: maximum pair number of the keyword-weight list. :return: return keyword-weight list. Example: [('Example',0.4511233019962264),('Hello',0.25548051420382073),...]. ] variable[seg_list] assign[=] call[name[jieba].lcut_for_search, parameter[name[content]]] return[call[name[jieba].analyse.extract_tags, parameter[call[constant[].join, parameter[name[seg_list]]]]]]
keyword[def] identifier[_default_tokenizer_func] ( identifier[content] , identifier[keyword_weight_pair] ): literal[string] identifier[seg_list] = identifier[jieba] . identifier[lcut_for_search] ( identifier[content] ) keyword[return] identifier[jieba] . identifier[analyse] . identifier[extract_tags] ( literal[string] . identifier[join] ( identifier[seg_list] ), identifier[topK] = identifier[keyword_weight_pair] , identifier[withWeight] = keyword[True] )
def _default_tokenizer_func(content, keyword_weight_pair): """ Default tokenizer function that uses jieba tokenizer. :param keyword_weight_pair: maximum pair number of the keyword-weight list. :return: return keyword-weight list. Example: [('Example',0.4511233019962264),('Hello',0.25548051420382073),...]. """ seg_list = jieba.lcut_for_search(content) # Extract keyword-weight list by TF-IDF algorithms and by sorted maximum weight return jieba.analyse.extract_tags(''.join(seg_list), topK=keyword_weight_pair, withWeight=True)
def CreateControlFromElement(element) -> 'Control': """ Create a concreate `Control` from a com type `IUIAutomationElement`. element: `ctypes.POINTER(IUIAutomationElement)`. Return a subclass of `Control`, an instance of the control's real type. """ if element: controlType = element.CurrentControlType if controlType in ControlConstructors: return ControlConstructors[controlType](element=element) else: Logger.WriteLine("element.CurrentControlType returns {}, invalid ControlType!".format(controlType), ConsoleColor.Red)
def function[CreateControlFromElement, parameter[element]]: constant[ Create a concreate `Control` from a com type `IUIAutomationElement`. element: `ctypes.POINTER(IUIAutomationElement)`. Return a subclass of `Control`, an instance of the control's real type. ] if name[element] begin[:] variable[controlType] assign[=] name[element].CurrentControlType if compare[name[controlType] in name[ControlConstructors]] begin[:] return[call[call[name[ControlConstructors]][name[controlType]], parameter[]]]
keyword[def] identifier[CreateControlFromElement] ( identifier[element] )-> literal[string] : literal[string] keyword[if] identifier[element] : identifier[controlType] = identifier[element] . identifier[CurrentControlType] keyword[if] identifier[controlType] keyword[in] identifier[ControlConstructors] : keyword[return] identifier[ControlConstructors] [ identifier[controlType] ]( identifier[element] = identifier[element] ) keyword[else] : identifier[Logger] . identifier[WriteLine] ( literal[string] . identifier[format] ( identifier[controlType] ), identifier[ConsoleColor] . identifier[Red] )
def CreateControlFromElement(element) -> 'Control': """ Create a concreate `Control` from a com type `IUIAutomationElement`. element: `ctypes.POINTER(IUIAutomationElement)`. Return a subclass of `Control`, an instance of the control's real type. """ if element: controlType = element.CurrentControlType if controlType in ControlConstructors: return ControlConstructors[controlType](element=element) # depends on [control=['if'], data=['controlType', 'ControlConstructors']] else: Logger.WriteLine('element.CurrentControlType returns {}, invalid ControlType!'.format(controlType), ConsoleColor.Red) # depends on [control=['if'], data=[]]
def pyflakes(): '''passive check of python programs by pyflakes. requirements: - pyflakes_ should be installed. ``easy_install pyflakes`` options.paved.pycheck.pyflakes.param .. _pyflakes: http://pypi.python.org/pypi/pyflakes ''' # filter out subpackages packages = [x for x in options.setup.packages if '.' not in x] sh('pyflakes {param} {files}'.format(param=options.paved.pycheck.pyflakes.param, files=' '.join(packages)))
def function[pyflakes, parameter[]]: constant[passive check of python programs by pyflakes. requirements: - pyflakes_ should be installed. ``easy_install pyflakes`` options.paved.pycheck.pyflakes.param .. _pyflakes: http://pypi.python.org/pypi/pyflakes ] variable[packages] assign[=] <ast.ListComp object at 0x7da1affedde0> call[name[sh], parameter[call[constant[pyflakes {param} {files}].format, parameter[]]]]
keyword[def] identifier[pyflakes] (): literal[string] identifier[packages] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[options] . identifier[setup] . identifier[packages] keyword[if] literal[string] keyword[not] keyword[in] identifier[x] ] identifier[sh] ( literal[string] . identifier[format] ( identifier[param] = identifier[options] . identifier[paved] . identifier[pycheck] . identifier[pyflakes] . identifier[param] , identifier[files] = literal[string] . identifier[join] ( identifier[packages] )))
def pyflakes(): """passive check of python programs by pyflakes. requirements: - pyflakes_ should be installed. ``easy_install pyflakes`` options.paved.pycheck.pyflakes.param .. _pyflakes: http://pypi.python.org/pypi/pyflakes """ # filter out subpackages packages = [x for x in options.setup.packages if '.' not in x] sh('pyflakes {param} {files}'.format(param=options.paved.pycheck.pyflakes.param, files=' '.join(packages)))
def date_from_string(string, format_string=None): """Runs through a few common string formats for datetimes, and attempts to coerce them into a datetime. Alternatively, format_string can provide either a single string to attempt or an iterable of strings to attempt.""" if isinstance(format_string, str): return datetime.datetime.strptime(string, format_string).date() elif format_string is None: format_string = [ "%Y-%m-%d", "%m-%d-%Y", "%m/%d/%Y", "%d/%m/%Y", ] for format in format_string: try: return datetime.datetime.strptime(string, format).date() except ValueError: continue raise ValueError("Could not produce date from string: {}".format(string))
def function[date_from_string, parameter[string, format_string]]: constant[Runs through a few common string formats for datetimes, and attempts to coerce them into a datetime. Alternatively, format_string can provide either a single string to attempt or an iterable of strings to attempt.] if call[name[isinstance], parameter[name[format_string], name[str]]] begin[:] return[call[call[name[datetime].datetime.strptime, parameter[name[string], name[format_string]]].date, parameter[]]] for taget[name[format]] in starred[name[format_string]] begin[:] <ast.Try object at 0x7da1b28af010> <ast.Raise object at 0x7da1b28b8c70>
keyword[def] identifier[date_from_string] ( identifier[string] , identifier[format_string] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[format_string] , identifier[str] ): keyword[return] identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[string] , identifier[format_string] ). identifier[date] () keyword[elif] identifier[format_string] keyword[is] keyword[None] : identifier[format_string] =[ literal[string] , literal[string] , literal[string] , literal[string] , ] keyword[for] identifier[format] keyword[in] identifier[format_string] : keyword[try] : keyword[return] identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[string] , identifier[format] ). identifier[date] () keyword[except] identifier[ValueError] : keyword[continue] keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[string] ))
def date_from_string(string, format_string=None): """Runs through a few common string formats for datetimes, and attempts to coerce them into a datetime. Alternatively, format_string can provide either a single string to attempt or an iterable of strings to attempt.""" if isinstance(format_string, str): return datetime.datetime.strptime(string, format_string).date() # depends on [control=['if'], data=[]] elif format_string is None: format_string = ['%Y-%m-%d', '%m-%d-%Y', '%m/%d/%Y', '%d/%m/%Y'] # depends on [control=['if'], data=['format_string']] for format in format_string: try: return datetime.datetime.strptime(string, format).date() # depends on [control=['try'], data=[]] except ValueError: continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['format']] raise ValueError('Could not produce date from string: {}'.format(string))
def get(tree, name): """ Return a float value attribute NAME from TREE. """ if name in tree: value = tree[name] else: return float("nan") try: a = float(value) except ValueError: a = float("nan") return a
def function[get, parameter[tree, name]]: constant[ Return a float value attribute NAME from TREE. ] if compare[name[name] in name[tree]] begin[:] variable[value] assign[=] call[name[tree]][name[name]] <ast.Try object at 0x7da2044c3fa0> return[name[a]]
keyword[def] identifier[get] ( identifier[tree] , identifier[name] ): literal[string] keyword[if] identifier[name] keyword[in] identifier[tree] : identifier[value] = identifier[tree] [ identifier[name] ] keyword[else] : keyword[return] identifier[float] ( literal[string] ) keyword[try] : identifier[a] = identifier[float] ( identifier[value] ) keyword[except] identifier[ValueError] : identifier[a] = identifier[float] ( literal[string] ) keyword[return] identifier[a]
def get(tree, name): """ Return a float value attribute NAME from TREE. """ if name in tree: value = tree[name] # depends on [control=['if'], data=['name', 'tree']] else: return float('nan') try: a = float(value) # depends on [control=['try'], data=[]] except ValueError: a = float('nan') # depends on [control=['except'], data=[]] return a
def _is_surrounded(self, b): """ Perform a wrapped LTE comparison only considering the SI bounds :param a: The first operand :param b: The second operand :return: True if a <= b, False otherwise """ a = self if a.is_empty: return True if a.is_top and b.is_top: return True elif a.is_top: return False elif b.is_top: return True if b._surrounds_member(a.lower_bound) and b._surrounds_member(a.upper_bound): if ((b.lower_bound == a.lower_bound and b.upper_bound == a.upper_bound) or not a._surrounds_member(b.lower_bound) or not a._surrounds_member(b.upper_bound)): return True return False
def function[_is_surrounded, parameter[self, b]]: constant[ Perform a wrapped LTE comparison only considering the SI bounds :param a: The first operand :param b: The second operand :return: True if a <= b, False otherwise ] variable[a] assign[=] name[self] if name[a].is_empty begin[:] return[constant[True]] if <ast.BoolOp object at 0x7da20c7cb610> begin[:] return[constant[True]] if <ast.BoolOp object at 0x7da20c7cb880> begin[:] if <ast.BoolOp object at 0x7da20c7c9de0> begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[_is_surrounded] ( identifier[self] , identifier[b] ): literal[string] identifier[a] = identifier[self] keyword[if] identifier[a] . identifier[is_empty] : keyword[return] keyword[True] keyword[if] identifier[a] . identifier[is_top] keyword[and] identifier[b] . identifier[is_top] : keyword[return] keyword[True] keyword[elif] identifier[a] . identifier[is_top] : keyword[return] keyword[False] keyword[elif] identifier[b] . identifier[is_top] : keyword[return] keyword[True] keyword[if] identifier[b] . identifier[_surrounds_member] ( identifier[a] . identifier[lower_bound] ) keyword[and] identifier[b] . identifier[_surrounds_member] ( identifier[a] . identifier[upper_bound] ): keyword[if] (( identifier[b] . identifier[lower_bound] == identifier[a] . identifier[lower_bound] keyword[and] identifier[b] . identifier[upper_bound] == identifier[a] . identifier[upper_bound] ) keyword[or] keyword[not] identifier[a] . identifier[_surrounds_member] ( identifier[b] . identifier[lower_bound] ) keyword[or] keyword[not] identifier[a] . identifier[_surrounds_member] ( identifier[b] . identifier[upper_bound] )): keyword[return] keyword[True] keyword[return] keyword[False]
def _is_surrounded(self, b): """ Perform a wrapped LTE comparison only considering the SI bounds :param a: The first operand :param b: The second operand :return: True if a <= b, False otherwise """ a = self if a.is_empty: return True # depends on [control=['if'], data=[]] if a.is_top and b.is_top: return True # depends on [control=['if'], data=[]] elif a.is_top: return False # depends on [control=['if'], data=[]] elif b.is_top: return True # depends on [control=['if'], data=[]] if b._surrounds_member(a.lower_bound) and b._surrounds_member(a.upper_bound): if b.lower_bound == a.lower_bound and b.upper_bound == a.upper_bound or not a._surrounds_member(b.lower_bound) or (not a._surrounds_member(b.upper_bound)): return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return False
def end_tag(self, alt=None): """Return XML end tag for the receiver.""" if alt: name = alt else: name = self.name return "</" + name + ">"
def function[end_tag, parameter[self, alt]]: constant[Return XML end tag for the receiver.] if name[alt] begin[:] variable[name] assign[=] name[alt] return[binary_operation[binary_operation[constant[</] + name[name]] + constant[>]]]
keyword[def] identifier[end_tag] ( identifier[self] , identifier[alt] = keyword[None] ): literal[string] keyword[if] identifier[alt] : identifier[name] = identifier[alt] keyword[else] : identifier[name] = identifier[self] . identifier[name] keyword[return] literal[string] + identifier[name] + literal[string]
def end_tag(self, alt=None): """Return XML end tag for the receiver.""" if alt: name = alt # depends on [control=['if'], data=[]] else: name = self.name return '</' + name + '>'
def go(fn, *args, **kwargs): """Launch an operation on a thread and get a handle to its future result. >>> from time import sleep >>> def print_sleep_print(duration): ... sleep(duration) ... print('hello from background thread') ... sleep(duration) ... print('goodbye from background thread') ... return 'return value' ... >>> future = go(print_sleep_print, 0.1) >>> sleep(0.15) hello from background thread >>> print('main thread') main thread >>> result = future() goodbye from background thread >>> result 'return value' """ if not callable(fn): raise TypeError('go() requires a function, not %r' % (fn,)) result = [None] error = [] def target(): try: result[0] = fn(*args, **kwargs) except Exception: # Are we in interpreter shutdown? if sys: error.extend(sys.exc_info()) t = threading.Thread(target=target) t.daemon = True t.start() def get_result(timeout=10): t.join(timeout) if t.is_alive(): raise AssertionError('timed out waiting for %r' % fn) if error: reraise(*error) return result[0] return get_result
def function[go, parameter[fn]]: constant[Launch an operation on a thread and get a handle to its future result. >>> from time import sleep >>> def print_sleep_print(duration): ... sleep(duration) ... print('hello from background thread') ... sleep(duration) ... print('goodbye from background thread') ... return 'return value' ... >>> future = go(print_sleep_print, 0.1) >>> sleep(0.15) hello from background thread >>> print('main thread') main thread >>> result = future() goodbye from background thread >>> result 'return value' ] if <ast.UnaryOp object at 0x7da1b2766320> begin[:] <ast.Raise object at 0x7da1b27651e0> variable[result] assign[=] list[[<ast.Constant object at 0x7da1b2766d40>]] variable[error] assign[=] list[[]] def function[target, parameter[]]: <ast.Try object at 0x7da1b27662c0> variable[t] assign[=] call[name[threading].Thread, parameter[]] name[t].daemon assign[=] constant[True] call[name[t].start, parameter[]] def function[get_result, parameter[timeout]]: call[name[t].join, parameter[name[timeout]]] if call[name[t].is_alive, parameter[]] begin[:] <ast.Raise object at 0x7da1b2767a90> if name[error] begin[:] call[name[reraise], parameter[<ast.Starred object at 0x7da1b2764e20>]] return[call[name[result]][constant[0]]] return[name[get_result]]
keyword[def] identifier[go] ( identifier[fn] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[callable] ( identifier[fn] ): keyword[raise] identifier[TypeError] ( literal[string] %( identifier[fn] ,)) identifier[result] =[ keyword[None] ] identifier[error] =[] keyword[def] identifier[target] (): keyword[try] : identifier[result] [ literal[int] ]= identifier[fn] (* identifier[args] ,** identifier[kwargs] ) keyword[except] identifier[Exception] : keyword[if] identifier[sys] : identifier[error] . identifier[extend] ( identifier[sys] . identifier[exc_info] ()) identifier[t] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[target] ) identifier[t] . identifier[daemon] = keyword[True] identifier[t] . identifier[start] () keyword[def] identifier[get_result] ( identifier[timeout] = literal[int] ): identifier[t] . identifier[join] ( identifier[timeout] ) keyword[if] identifier[t] . identifier[is_alive] (): keyword[raise] identifier[AssertionError] ( literal[string] % identifier[fn] ) keyword[if] identifier[error] : identifier[reraise] (* identifier[error] ) keyword[return] identifier[result] [ literal[int] ] keyword[return] identifier[get_result]
def go(fn, *args, **kwargs): """Launch an operation on a thread and get a handle to its future result. >>> from time import sleep >>> def print_sleep_print(duration): ... sleep(duration) ... print('hello from background thread') ... sleep(duration) ... print('goodbye from background thread') ... return 'return value' ... >>> future = go(print_sleep_print, 0.1) >>> sleep(0.15) hello from background thread >>> print('main thread') main thread >>> result = future() goodbye from background thread >>> result 'return value' """ if not callable(fn): raise TypeError('go() requires a function, not %r' % (fn,)) # depends on [control=['if'], data=[]] result = [None] error = [] def target(): try: result[0] = fn(*args, **kwargs) # depends on [control=['try'], data=[]] except Exception: # Are we in interpreter shutdown? if sys: error.extend(sys.exc_info()) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] t = threading.Thread(target=target) t.daemon = True t.start() def get_result(timeout=10): t.join(timeout) if t.is_alive(): raise AssertionError('timed out waiting for %r' % fn) # depends on [control=['if'], data=[]] if error: reraise(*error) # depends on [control=['if'], data=[]] return result[0] return get_result
def event_return(events): ''' Return event data to remote carbon server Provide a list of events to be stored in carbon ''' opts = _get_options({}) # Pass in empty ret, since this is a list of events opts['skip'] = True for event in events: log.trace('Carbon returner received event: %s', event) metric_base = event['tag'] saltdata = event['data'].get('data') _send(saltdata, metric_base, opts)
def function[event_return, parameter[events]]: constant[ Return event data to remote carbon server Provide a list of events to be stored in carbon ] variable[opts] assign[=] call[name[_get_options], parameter[dictionary[[], []]]] call[name[opts]][constant[skip]] assign[=] constant[True] for taget[name[event]] in starred[name[events]] begin[:] call[name[log].trace, parameter[constant[Carbon returner received event: %s], name[event]]] variable[metric_base] assign[=] call[name[event]][constant[tag]] variable[saltdata] assign[=] call[call[name[event]][constant[data]].get, parameter[constant[data]]] call[name[_send], parameter[name[saltdata], name[metric_base], name[opts]]]
keyword[def] identifier[event_return] ( identifier[events] ): literal[string] identifier[opts] = identifier[_get_options] ({}) identifier[opts] [ literal[string] ]= keyword[True] keyword[for] identifier[event] keyword[in] identifier[events] : identifier[log] . identifier[trace] ( literal[string] , identifier[event] ) identifier[metric_base] = identifier[event] [ literal[string] ] identifier[saltdata] = identifier[event] [ literal[string] ]. identifier[get] ( literal[string] ) identifier[_send] ( identifier[saltdata] , identifier[metric_base] , identifier[opts] )
def event_return(events): """ Return event data to remote carbon server Provide a list of events to be stored in carbon """ opts = _get_options({}) # Pass in empty ret, since this is a list of events opts['skip'] = True for event in events: log.trace('Carbon returner received event: %s', event) metric_base = event['tag'] saltdata = event['data'].get('data') _send(saltdata, metric_base, opts) # depends on [control=['for'], data=['event']]
def newcursor(self, dictcursor=False): ''' This creates a DB cursor for the current DB connection using a randomly generated handle. Returns a tuple with cursor and handle. Parameters ---------- dictcursor : bool If True, returns a cursor where each returned row can be addressed as a dictionary by column name. Returns ------- tuple The tuple is of the form (handle, psycopg2.Cursor instance). ''' handle = hashlib.sha256(os.urandom(12)).hexdigest() if dictcursor: self.cursors[handle] = self.connection.cursor( cursor_factory=psycopg2.extras.DictCursor ) else: self.cursors[handle] = self.connection.cursor() return (self.cursors[handle], handle)
def function[newcursor, parameter[self, dictcursor]]: constant[ This creates a DB cursor for the current DB connection using a randomly generated handle. Returns a tuple with cursor and handle. Parameters ---------- dictcursor : bool If True, returns a cursor where each returned row can be addressed as a dictionary by column name. Returns ------- tuple The tuple is of the form (handle, psycopg2.Cursor instance). ] variable[handle] assign[=] call[call[name[hashlib].sha256, parameter[call[name[os].urandom, parameter[constant[12]]]]].hexdigest, parameter[]] if name[dictcursor] begin[:] call[name[self].cursors][name[handle]] assign[=] call[name[self].connection.cursor, parameter[]]
keyword[def] identifier[newcursor] ( identifier[self] , identifier[dictcursor] = keyword[False] ): literal[string] identifier[handle] = identifier[hashlib] . identifier[sha256] ( identifier[os] . identifier[urandom] ( literal[int] )). identifier[hexdigest] () keyword[if] identifier[dictcursor] : identifier[self] . identifier[cursors] [ identifier[handle] ]= identifier[self] . identifier[connection] . identifier[cursor] ( identifier[cursor_factory] = identifier[psycopg2] . identifier[extras] . identifier[DictCursor] ) keyword[else] : identifier[self] . identifier[cursors] [ identifier[handle] ]= identifier[self] . identifier[connection] . identifier[cursor] () keyword[return] ( identifier[self] . identifier[cursors] [ identifier[handle] ], identifier[handle] )
def newcursor(self, dictcursor=False): """ This creates a DB cursor for the current DB connection using a randomly generated handle. Returns a tuple with cursor and handle. Parameters ---------- dictcursor : bool If True, returns a cursor where each returned row can be addressed as a dictionary by column name. Returns ------- tuple The tuple is of the form (handle, psycopg2.Cursor instance). """ handle = hashlib.sha256(os.urandom(12)).hexdigest() if dictcursor: self.cursors[handle] = self.connection.cursor(cursor_factory=psycopg2.extras.DictCursor) # depends on [control=['if'], data=[]] else: self.cursors[handle] = self.connection.cursor() return (self.cursors[handle], handle)
def copy_contents(self, dst, src, size, condition=None, src_memory=None, dst_memory=None, inspect=True, disable_actions=False): """ Copies data within a memory. :param dst: A claripy expression representing the address of the destination :param src: A claripy expression representing the address of the source The following parameters are optional. :param src_memory: Copy data from this SimMemory instead of self :param src_memory: Copy data to this SimMemory instead of self :param size: A claripy expression representing the size of the copy :param condition: A claripy expression representing a condition, if the write should be conditional. If this is determined to be false, the size of the copy will be 0. """ dst = _raw_ast(dst) src = _raw_ast(src) size = _raw_ast(size) condition = _raw_ast(condition) return self._copy_contents(dst, src, size, condition=condition, src_memory=src_memory, dst_memory=dst_memory, inspect=inspect, disable_actions=disable_actions)
def function[copy_contents, parameter[self, dst, src, size, condition, src_memory, dst_memory, inspect, disable_actions]]: constant[ Copies data within a memory. :param dst: A claripy expression representing the address of the destination :param src: A claripy expression representing the address of the source The following parameters are optional. :param src_memory: Copy data from this SimMemory instead of self :param src_memory: Copy data to this SimMemory instead of self :param size: A claripy expression representing the size of the copy :param condition: A claripy expression representing a condition, if the write should be conditional. If this is determined to be false, the size of the copy will be 0. ] variable[dst] assign[=] call[name[_raw_ast], parameter[name[dst]]] variable[src] assign[=] call[name[_raw_ast], parameter[name[src]]] variable[size] assign[=] call[name[_raw_ast], parameter[name[size]]] variable[condition] assign[=] call[name[_raw_ast], parameter[name[condition]]] return[call[name[self]._copy_contents, parameter[name[dst], name[src], name[size]]]]
keyword[def] identifier[copy_contents] ( identifier[self] , identifier[dst] , identifier[src] , identifier[size] , identifier[condition] = keyword[None] , identifier[src_memory] = keyword[None] , identifier[dst_memory] = keyword[None] , identifier[inspect] = keyword[True] , identifier[disable_actions] = keyword[False] ): literal[string] identifier[dst] = identifier[_raw_ast] ( identifier[dst] ) identifier[src] = identifier[_raw_ast] ( identifier[src] ) identifier[size] = identifier[_raw_ast] ( identifier[size] ) identifier[condition] = identifier[_raw_ast] ( identifier[condition] ) keyword[return] identifier[self] . identifier[_copy_contents] ( identifier[dst] , identifier[src] , identifier[size] , identifier[condition] = identifier[condition] , identifier[src_memory] = identifier[src_memory] , identifier[dst_memory] = identifier[dst_memory] , identifier[inspect] = identifier[inspect] , identifier[disable_actions] = identifier[disable_actions] )
def copy_contents(self, dst, src, size, condition=None, src_memory=None, dst_memory=None, inspect=True, disable_actions=False): """ Copies data within a memory. :param dst: A claripy expression representing the address of the destination :param src: A claripy expression representing the address of the source The following parameters are optional. :param src_memory: Copy data from this SimMemory instead of self :param src_memory: Copy data to this SimMemory instead of self :param size: A claripy expression representing the size of the copy :param condition: A claripy expression representing a condition, if the write should be conditional. If this is determined to be false, the size of the copy will be 0. """ dst = _raw_ast(dst) src = _raw_ast(src) size = _raw_ast(size) condition = _raw_ast(condition) return self._copy_contents(dst, src, size, condition=condition, src_memory=src_memory, dst_memory=dst_memory, inspect=inspect, disable_actions=disable_actions)
def osfn(filename): """Convert IRAF virtual path name to OS pathname.""" # Try to emulate the CL version closely: # # - expands IRAF virtual file names # - strips blanks around path components # - if no slashes or relative paths, return relative pathname # - otherwise return absolute pathname if filename is None: return filename ename = Expand(filename) dlist = [part.strip() for part in ename.split(os.sep)] if len(dlist) == 1 and dlist[0] not in [os.curdir, os.pardir]: return dlist[0] # I use str.join instead of os.path.join here because # os.path.join("","") returns "" instead of "/" epath = os.sep.join(dlist) fname = os.path.abspath(epath) # append '/' if relative directory was at end or filename ends with '/' if fname[-1] != os.sep and dlist[-1] in ['', os.curdir, os.pardir]: fname = fname + os.sep return fname
def function[osfn, parameter[filename]]: constant[Convert IRAF virtual path name to OS pathname.] if compare[name[filename] is constant[None]] begin[:] return[name[filename]] variable[ename] assign[=] call[name[Expand], parameter[name[filename]]] variable[dlist] assign[=] <ast.ListComp object at 0x7da2043449d0> if <ast.BoolOp object at 0x7da204347010> begin[:] return[call[name[dlist]][constant[0]]] variable[epath] assign[=] call[name[os].sep.join, parameter[name[dlist]]] variable[fname] assign[=] call[name[os].path.abspath, parameter[name[epath]]] if <ast.BoolOp object at 0x7da204346860> begin[:] variable[fname] assign[=] binary_operation[name[fname] + name[os].sep] return[name[fname]]
keyword[def] identifier[osfn] ( identifier[filename] ): literal[string] keyword[if] identifier[filename] keyword[is] keyword[None] : keyword[return] identifier[filename] identifier[ename] = identifier[Expand] ( identifier[filename] ) identifier[dlist] =[ identifier[part] . identifier[strip] () keyword[for] identifier[part] keyword[in] identifier[ename] . identifier[split] ( identifier[os] . identifier[sep] )] keyword[if] identifier[len] ( identifier[dlist] )== literal[int] keyword[and] identifier[dlist] [ literal[int] ] keyword[not] keyword[in] [ identifier[os] . identifier[curdir] , identifier[os] . identifier[pardir] ]: keyword[return] identifier[dlist] [ literal[int] ] identifier[epath] = identifier[os] . identifier[sep] . identifier[join] ( identifier[dlist] ) identifier[fname] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[epath] ) keyword[if] identifier[fname] [- literal[int] ]!= identifier[os] . identifier[sep] keyword[and] identifier[dlist] [- literal[int] ] keyword[in] [ literal[string] , identifier[os] . identifier[curdir] , identifier[os] . identifier[pardir] ]: identifier[fname] = identifier[fname] + identifier[os] . identifier[sep] keyword[return] identifier[fname]
def osfn(filename): """Convert IRAF virtual path name to OS pathname.""" # Try to emulate the CL version closely: # # - expands IRAF virtual file names # - strips blanks around path components # - if no slashes or relative paths, return relative pathname # - otherwise return absolute pathname if filename is None: return filename # depends on [control=['if'], data=['filename']] ename = Expand(filename) dlist = [part.strip() for part in ename.split(os.sep)] if len(dlist) == 1 and dlist[0] not in [os.curdir, os.pardir]: return dlist[0] # depends on [control=['if'], data=[]] # I use str.join instead of os.path.join here because # os.path.join("","") returns "" instead of "/" epath = os.sep.join(dlist) fname = os.path.abspath(epath) # append '/' if relative directory was at end or filename ends with '/' if fname[-1] != os.sep and dlist[-1] in ['', os.curdir, os.pardir]: fname = fname + os.sep # depends on [control=['if'], data=[]] return fname
def filter_transcription_factor(stmts_in, **kwargs): """Filter out RegulateAmounts where subject is not a transcription factor. Parameters ---------- stmts_in : list[indra.statements.Statement] A list of statements to filter. save : Optional[str] The name of a pickle file to save the results (stmts_out) into. Returns ------- stmts_out : list[indra.statements.Statement] A list of filtered statements. """ logger.info('Filtering %d statements to remove ' % len(stmts_in) + 'amount regulations by non-transcription-factors...') path = os.path.dirname(os.path.abspath(__file__)) tf_table = \ read_unicode_csv(path + '/../resources/transcription_factors.csv') gene_names = [lin[1] for lin in list(tf_table)[1:]] stmts_out = [] for st in stmts_in: if isinstance(st, RegulateAmount): if st.subj is not None: if st.subj.name in gene_names: stmts_out.append(st) else: stmts_out.append(st) logger.info('%d statements after filter...' % len(stmts_out)) dump_pkl = kwargs.get('save') if dump_pkl: dump_statements(stmts_out, dump_pkl) return stmts_out
def function[filter_transcription_factor, parameter[stmts_in]]: constant[Filter out RegulateAmounts where subject is not a transcription factor. Parameters ---------- stmts_in : list[indra.statements.Statement] A list of statements to filter. save : Optional[str] The name of a pickle file to save the results (stmts_out) into. Returns ------- stmts_out : list[indra.statements.Statement] A list of filtered statements. ] call[name[logger].info, parameter[binary_operation[binary_operation[constant[Filtering %d statements to remove ] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[stmts_in]]]] + constant[amount regulations by non-transcription-factors...]]]] variable[path] assign[=] call[name[os].path.dirname, parameter[call[name[os].path.abspath, parameter[name[__file__]]]]] variable[tf_table] assign[=] call[name[read_unicode_csv], parameter[binary_operation[name[path] + constant[/../resources/transcription_factors.csv]]]] variable[gene_names] assign[=] <ast.ListComp object at 0x7da18c4cdff0> variable[stmts_out] assign[=] list[[]] for taget[name[st]] in starred[name[stmts_in]] begin[:] if call[name[isinstance], parameter[name[st], name[RegulateAmount]]] begin[:] if compare[name[st].subj is_not constant[None]] begin[:] if compare[name[st].subj.name in name[gene_names]] begin[:] call[name[stmts_out].append, parameter[name[st]]] call[name[logger].info, parameter[binary_operation[constant[%d statements after filter...] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[stmts_out]]]]]] variable[dump_pkl] assign[=] call[name[kwargs].get, parameter[constant[save]]] if name[dump_pkl] begin[:] call[name[dump_statements], parameter[name[stmts_out], name[dump_pkl]]] return[name[stmts_out]]
keyword[def] identifier[filter_transcription_factor] ( identifier[stmts_in] ,** identifier[kwargs] ): literal[string] identifier[logger] . identifier[info] ( literal[string] % identifier[len] ( identifier[stmts_in] )+ literal[string] ) identifier[path] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[__file__] )) identifier[tf_table] = identifier[read_unicode_csv] ( identifier[path] + literal[string] ) identifier[gene_names] =[ identifier[lin] [ literal[int] ] keyword[for] identifier[lin] keyword[in] identifier[list] ( identifier[tf_table] )[ literal[int] :]] identifier[stmts_out] =[] keyword[for] identifier[st] keyword[in] identifier[stmts_in] : keyword[if] identifier[isinstance] ( identifier[st] , identifier[RegulateAmount] ): keyword[if] identifier[st] . identifier[subj] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[st] . identifier[subj] . identifier[name] keyword[in] identifier[gene_names] : identifier[stmts_out] . identifier[append] ( identifier[st] ) keyword[else] : identifier[stmts_out] . identifier[append] ( identifier[st] ) identifier[logger] . identifier[info] ( literal[string] % identifier[len] ( identifier[stmts_out] )) identifier[dump_pkl] = identifier[kwargs] . identifier[get] ( literal[string] ) keyword[if] identifier[dump_pkl] : identifier[dump_statements] ( identifier[stmts_out] , identifier[dump_pkl] ) keyword[return] identifier[stmts_out]
def filter_transcription_factor(stmts_in, **kwargs): """Filter out RegulateAmounts where subject is not a transcription factor. Parameters ---------- stmts_in : list[indra.statements.Statement] A list of statements to filter. save : Optional[str] The name of a pickle file to save the results (stmts_out) into. Returns ------- stmts_out : list[indra.statements.Statement] A list of filtered statements. """ logger.info('Filtering %d statements to remove ' % len(stmts_in) + 'amount regulations by non-transcription-factors...') path = os.path.dirname(os.path.abspath(__file__)) tf_table = read_unicode_csv(path + '/../resources/transcription_factors.csv') gene_names = [lin[1] for lin in list(tf_table)[1:]] stmts_out = [] for st in stmts_in: if isinstance(st, RegulateAmount): if st.subj is not None: if st.subj.name in gene_names: stmts_out.append(st) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: stmts_out.append(st) # depends on [control=['for'], data=['st']] logger.info('%d statements after filter...' % len(stmts_out)) dump_pkl = kwargs.get('save') if dump_pkl: dump_statements(stmts_out, dump_pkl) # depends on [control=['if'], data=[]] return stmts_out
def remove_from_list(self, key: str, value, count: int = 0, pipeline: bool = False): """Remove specified value(s) from the list stored at key. Args: key (str): Key where the list is stored. value: value to remove count (int): Number of entries to remove, default 0 == all pipeline(bool): If True, start a transaction block. Default False. """ if pipeline: if redis.__version__ == '2.10.6': self._pipeline.lrem(name=key, value=value, num=count) else: self._pipeline.lrem(key, count, value) else: if self._db.exists(key): if redis.__version__ == '2.10.6': self._db.lrem(name=key, value=value, num=count) else: self._db.lrem(key, count, value)
def function[remove_from_list, parameter[self, key, value, count, pipeline]]: constant[Remove specified value(s) from the list stored at key. Args: key (str): Key where the list is stored. value: value to remove count (int): Number of entries to remove, default 0 == all pipeline(bool): If True, start a transaction block. Default False. ] if name[pipeline] begin[:] if compare[name[redis].__version__ equal[==] constant[2.10.6]] begin[:] call[name[self]._pipeline.lrem, parameter[]]
keyword[def] identifier[remove_from_list] ( identifier[self] , identifier[key] : identifier[str] , identifier[value] , identifier[count] : identifier[int] = literal[int] , identifier[pipeline] : identifier[bool] = keyword[False] ): literal[string] keyword[if] identifier[pipeline] : keyword[if] identifier[redis] . identifier[__version__] == literal[string] : identifier[self] . identifier[_pipeline] . identifier[lrem] ( identifier[name] = identifier[key] , identifier[value] = identifier[value] , identifier[num] = identifier[count] ) keyword[else] : identifier[self] . identifier[_pipeline] . identifier[lrem] ( identifier[key] , identifier[count] , identifier[value] ) keyword[else] : keyword[if] identifier[self] . identifier[_db] . identifier[exists] ( identifier[key] ): keyword[if] identifier[redis] . identifier[__version__] == literal[string] : identifier[self] . identifier[_db] . identifier[lrem] ( identifier[name] = identifier[key] , identifier[value] = identifier[value] , identifier[num] = identifier[count] ) keyword[else] : identifier[self] . identifier[_db] . identifier[lrem] ( identifier[key] , identifier[count] , identifier[value] )
def remove_from_list(self, key: str, value, count: int=0, pipeline: bool=False): """Remove specified value(s) from the list stored at key. Args: key (str): Key where the list is stored. value: value to remove count (int): Number of entries to remove, default 0 == all pipeline(bool): If True, start a transaction block. Default False. """ if pipeline: if redis.__version__ == '2.10.6': self._pipeline.lrem(name=key, value=value, num=count) # depends on [control=['if'], data=[]] else: self._pipeline.lrem(key, count, value) # depends on [control=['if'], data=[]] elif self._db.exists(key): if redis.__version__ == '2.10.6': self._db.lrem(name=key, value=value, num=count) # depends on [control=['if'], data=[]] else: self._db.lrem(key, count, value) # depends on [control=['if'], data=[]]
def assets(ctx): "List Assets" MAX_ASSET = 100000 assets = [] for i in range(0, MAX_ASSET): try: assets.append(Asset("1.3.{}".format(i))) except AssetDoesNotExistsException: break assetTable = PrettyTable() assetTable.field_names = ["ID", "Symbol", "Precision", "Description", "Max Supply"] for i in range (0, len(assets)): try: description = assets[i].description if description == "": description = "--" except AttributeError: description = "--" assetTable.add_row([assets[i].id, assets[i].symbol, assets[i].precision, description, assets[i].max_supply["amount"]]) click.echo(assetTable)
def function[assets, parameter[ctx]]: constant[List Assets] variable[MAX_ASSET] assign[=] constant[100000] variable[assets] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[MAX_ASSET]]]] begin[:] <ast.Try object at 0x7da1b100e380> variable[assetTable] assign[=] call[name[PrettyTable], parameter[]] name[assetTable].field_names assign[=] list[[<ast.Constant object at 0x7da1b100e410>, <ast.Constant object at 0x7da1b100f820>, <ast.Constant object at 0x7da1b100fbb0>, <ast.Constant object at 0x7da1b100eef0>, <ast.Constant object at 0x7da1b100d420>]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[assets]]]]]] begin[:] <ast.Try object at 0x7da1b100d120> call[name[assetTable].add_row, parameter[list[[<ast.Attribute object at 0x7da1b100e7d0>, <ast.Attribute object at 0x7da1b100e0b0>, <ast.Attribute object at 0x7da1b100c610>, <ast.Name object at 0x7da1b100f3d0>, <ast.Subscript object at 0x7da1b100f3a0>]]]] call[name[click].echo, parameter[name[assetTable]]]
keyword[def] identifier[assets] ( identifier[ctx] ): literal[string] identifier[MAX_ASSET] = literal[int] identifier[assets] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[MAX_ASSET] ): keyword[try] : identifier[assets] . identifier[append] ( identifier[Asset] ( literal[string] . identifier[format] ( identifier[i] ))) keyword[except] identifier[AssetDoesNotExistsException] : keyword[break] identifier[assetTable] = identifier[PrettyTable] () identifier[assetTable] . identifier[field_names] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[assets] )): keyword[try] : identifier[description] = identifier[assets] [ identifier[i] ]. identifier[description] keyword[if] identifier[description] == literal[string] : identifier[description] = literal[string] keyword[except] identifier[AttributeError] : identifier[description] = literal[string] identifier[assetTable] . identifier[add_row] ([ identifier[assets] [ identifier[i] ]. identifier[id] , identifier[assets] [ identifier[i] ]. identifier[symbol] , identifier[assets] [ identifier[i] ]. identifier[precision] , identifier[description] , identifier[assets] [ identifier[i] ]. identifier[max_supply] [ literal[string] ]]) identifier[click] . identifier[echo] ( identifier[assetTable] )
def assets(ctx): """List Assets""" MAX_ASSET = 100000 assets = [] for i in range(0, MAX_ASSET): try: assets.append(Asset('1.3.{}'.format(i))) # depends on [control=['try'], data=[]] except AssetDoesNotExistsException: break # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']] assetTable = PrettyTable() assetTable.field_names = ['ID', 'Symbol', 'Precision', 'Description', 'Max Supply'] for i in range(0, len(assets)): try: description = assets[i].description if description == '': description = '--' # depends on [control=['if'], data=['description']] # depends on [control=['try'], data=[]] except AttributeError: description = '--' # depends on [control=['except'], data=[]] assetTable.add_row([assets[i].id, assets[i].symbol, assets[i].precision, description, assets[i].max_supply['amount']]) # depends on [control=['for'], data=['i']] click.echo(assetTable)
def _read_settings_file(settings_file: 'Path') -> SettingsData: """ Read the settings file, which is a json object with settings IDs as keys and boolean values. For each key, look up the `Settings` object with that key. If the key is one of the old IDs (kebab case), replace it with the new ID and rewrite the settings file :param settings_file: the path to the settings file :return: a dict with all new settings IDs as the keys, and boolean values (the values stored in the settings file, or `False` if the key was not found). """ # Read settings from persistent file data = _read_json_file(settings_file) settings, version = _migrate(data) if (data.get('_version') != version): _write_settings_file(settings, version, settings_file) return settings, version
def function[_read_settings_file, parameter[settings_file]]: constant[ Read the settings file, which is a json object with settings IDs as keys and boolean values. For each key, look up the `Settings` object with that key. If the key is one of the old IDs (kebab case), replace it with the new ID and rewrite the settings file :param settings_file: the path to the settings file :return: a dict with all new settings IDs as the keys, and boolean values (the values stored in the settings file, or `False` if the key was not found). ] variable[data] assign[=] call[name[_read_json_file], parameter[name[settings_file]]] <ast.Tuple object at 0x7da1b086c8e0> assign[=] call[name[_migrate], parameter[name[data]]] if compare[call[name[data].get, parameter[constant[_version]]] not_equal[!=] name[version]] begin[:] call[name[_write_settings_file], parameter[name[settings], name[version], name[settings_file]]] return[tuple[[<ast.Name object at 0x7da204344fd0>, <ast.Name object at 0x7da204346e00>]]]
keyword[def] identifier[_read_settings_file] ( identifier[settings_file] : literal[string] )-> identifier[SettingsData] : literal[string] identifier[data] = identifier[_read_json_file] ( identifier[settings_file] ) identifier[settings] , identifier[version] = identifier[_migrate] ( identifier[data] ) keyword[if] ( identifier[data] . identifier[get] ( literal[string] )!= identifier[version] ): identifier[_write_settings_file] ( identifier[settings] , identifier[version] , identifier[settings_file] ) keyword[return] identifier[settings] , identifier[version]
def _read_settings_file(settings_file: 'Path') -> SettingsData: """ Read the settings file, which is a json object with settings IDs as keys and boolean values. For each key, look up the `Settings` object with that key. If the key is one of the old IDs (kebab case), replace it with the new ID and rewrite the settings file :param settings_file: the path to the settings file :return: a dict with all new settings IDs as the keys, and boolean values (the values stored in the settings file, or `False` if the key was not found). """ # Read settings from persistent file data = _read_json_file(settings_file) (settings, version) = _migrate(data) if data.get('_version') != version: _write_settings_file(settings, version, settings_file) # depends on [control=['if'], data=['version']] return (settings, version)
def set_snapshots(self,snapshots): """ Set the snapshots and reindex all time-dependent data. This will reindex all pandas.Panels of time-dependent data; NaNs are filled with the default value for that quantity. Parameters ---------- snapshots : list or pandas.Index All time steps. Returns ------- None """ self.snapshots = pd.Index(snapshots) self.snapshot_weightings = self.snapshot_weightings.reindex(self.snapshots,fill_value=1.) if isinstance(snapshots, pd.DatetimeIndex) and _pd_version < '0.18.0': snapshots = pd.Index(snapshots.values) for component in self.all_components: pnl = self.pnl(component) attrs = self.components[component]["attrs"] for k,default in attrs.default[attrs.varying].iteritems(): pnl[k] = pnl[k].reindex(self.snapshots).fillna(default)
def function[set_snapshots, parameter[self, snapshots]]: constant[ Set the snapshots and reindex all time-dependent data. This will reindex all pandas.Panels of time-dependent data; NaNs are filled with the default value for that quantity. Parameters ---------- snapshots : list or pandas.Index All time steps. Returns ------- None ] name[self].snapshots assign[=] call[name[pd].Index, parameter[name[snapshots]]] name[self].snapshot_weightings assign[=] call[name[self].snapshot_weightings.reindex, parameter[name[self].snapshots]] if <ast.BoolOp object at 0x7da18f58eda0> begin[:] variable[snapshots] assign[=] call[name[pd].Index, parameter[name[snapshots].values]] for taget[name[component]] in starred[name[self].all_components] begin[:] variable[pnl] assign[=] call[name[self].pnl, parameter[name[component]]] variable[attrs] assign[=] call[call[name[self].components][name[component]]][constant[attrs]] for taget[tuple[[<ast.Name object at 0x7da18f58ee90>, <ast.Name object at 0x7da18f58fe80>]]] in starred[call[call[name[attrs].default][name[attrs].varying].iteritems, parameter[]]] begin[:] call[name[pnl]][name[k]] assign[=] call[call[call[name[pnl]][name[k]].reindex, parameter[name[self].snapshots]].fillna, parameter[name[default]]]
keyword[def] identifier[set_snapshots] ( identifier[self] , identifier[snapshots] ): literal[string] identifier[self] . identifier[snapshots] = identifier[pd] . identifier[Index] ( identifier[snapshots] ) identifier[self] . identifier[snapshot_weightings] = identifier[self] . identifier[snapshot_weightings] . identifier[reindex] ( identifier[self] . identifier[snapshots] , identifier[fill_value] = literal[int] ) keyword[if] identifier[isinstance] ( identifier[snapshots] , identifier[pd] . identifier[DatetimeIndex] ) keyword[and] identifier[_pd_version] < literal[string] : identifier[snapshots] = identifier[pd] . identifier[Index] ( identifier[snapshots] . identifier[values] ) keyword[for] identifier[component] keyword[in] identifier[self] . identifier[all_components] : identifier[pnl] = identifier[self] . identifier[pnl] ( identifier[component] ) identifier[attrs] = identifier[self] . identifier[components] [ identifier[component] ][ literal[string] ] keyword[for] identifier[k] , identifier[default] keyword[in] identifier[attrs] . identifier[default] [ identifier[attrs] . identifier[varying] ]. identifier[iteritems] (): identifier[pnl] [ identifier[k] ]= identifier[pnl] [ identifier[k] ]. identifier[reindex] ( identifier[self] . identifier[snapshots] ). identifier[fillna] ( identifier[default] )
def set_snapshots(self, snapshots): """ Set the snapshots and reindex all time-dependent data. This will reindex all pandas.Panels of time-dependent data; NaNs are filled with the default value for that quantity. Parameters ---------- snapshots : list or pandas.Index All time steps. Returns ------- None """ self.snapshots = pd.Index(snapshots) self.snapshot_weightings = self.snapshot_weightings.reindex(self.snapshots, fill_value=1.0) if isinstance(snapshots, pd.DatetimeIndex) and _pd_version < '0.18.0': snapshots = pd.Index(snapshots.values) # depends on [control=['if'], data=[]] for component in self.all_components: pnl = self.pnl(component) attrs = self.components[component]['attrs'] for (k, default) in attrs.default[attrs.varying].iteritems(): pnl[k] = pnl[k].reindex(self.snapshots).fillna(default) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['component']]
def best_policy(mdp, U): """Given an MDP and a utility function U, determine the best policy, as a mapping from state to action. (Equation 17.4)""" pi = {} for s in mdp.states: pi[s] = argmax(mdp.actions(s), lambda a:expected_utility(a, s, U, mdp)) return pi
def function[best_policy, parameter[mdp, U]]: constant[Given an MDP and a utility function U, determine the best policy, as a mapping from state to action. (Equation 17.4)] variable[pi] assign[=] dictionary[[], []] for taget[name[s]] in starred[name[mdp].states] begin[:] call[name[pi]][name[s]] assign[=] call[name[argmax], parameter[call[name[mdp].actions, parameter[name[s]]], <ast.Lambda object at 0x7da207f02f50>]] return[name[pi]]
keyword[def] identifier[best_policy] ( identifier[mdp] , identifier[U] ): literal[string] identifier[pi] ={} keyword[for] identifier[s] keyword[in] identifier[mdp] . identifier[states] : identifier[pi] [ identifier[s] ]= identifier[argmax] ( identifier[mdp] . identifier[actions] ( identifier[s] ), keyword[lambda] identifier[a] : identifier[expected_utility] ( identifier[a] , identifier[s] , identifier[U] , identifier[mdp] )) keyword[return] identifier[pi]
def best_policy(mdp, U): """Given an MDP and a utility function U, determine the best policy, as a mapping from state to action. (Equation 17.4)""" pi = {} for s in mdp.states: pi[s] = argmax(mdp.actions(s), lambda a: expected_utility(a, s, U, mdp)) # depends on [control=['for'], data=['s']] return pi
def get_event_action(cls) -> Optional[str]: """Return the second part of the event_type e.g. >>> Event.event_type = 'experiment.deleted' >>> Event.get_event_action() == 'deleted' """ if not cls.actor: return None return event_context.get_event_action(cls.event_type)
def function[get_event_action, parameter[cls]]: constant[Return the second part of the event_type e.g. >>> Event.event_type = 'experiment.deleted' >>> Event.get_event_action() == 'deleted' ] if <ast.UnaryOp object at 0x7da20c9917b0> begin[:] return[constant[None]] return[call[name[event_context].get_event_action, parameter[name[cls].event_type]]]
keyword[def] identifier[get_event_action] ( identifier[cls] )-> identifier[Optional] [ identifier[str] ]: literal[string] keyword[if] keyword[not] identifier[cls] . identifier[actor] : keyword[return] keyword[None] keyword[return] identifier[event_context] . identifier[get_event_action] ( identifier[cls] . identifier[event_type] )
def get_event_action(cls) -> Optional[str]: """Return the second part of the event_type e.g. >>> Event.event_type = 'experiment.deleted' >>> Event.get_event_action() == 'deleted' """ if not cls.actor: return None # depends on [control=['if'], data=[]] return event_context.get_event_action(cls.event_type)
def fill_subparser(subparser): """Sets up a subparser to convert the MNIST dataset files. Parameters ---------- subparser : :class:`argparse.ArgumentParser` Subparser handling the `mnist` command. """ subparser.add_argument( "--dtype", help="dtype to save to; by default, images will be " + "returned in their original unsigned byte format", choices=('float32', 'float64', 'bool'), type=str, default=None) return convert_mnist
def function[fill_subparser, parameter[subparser]]: constant[Sets up a subparser to convert the MNIST dataset files. Parameters ---------- subparser : :class:`argparse.ArgumentParser` Subparser handling the `mnist` command. ] call[name[subparser].add_argument, parameter[constant[--dtype]]] return[name[convert_mnist]]
keyword[def] identifier[fill_subparser] ( identifier[subparser] ): literal[string] identifier[subparser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] + literal[string] , identifier[choices] =( literal[string] , literal[string] , literal[string] ), identifier[type] = identifier[str] , identifier[default] = keyword[None] ) keyword[return] identifier[convert_mnist]
def fill_subparser(subparser): """Sets up a subparser to convert the MNIST dataset files. Parameters ---------- subparser : :class:`argparse.ArgumentParser` Subparser handling the `mnist` command. """ subparser.add_argument('--dtype', help='dtype to save to; by default, images will be ' + 'returned in their original unsigned byte format', choices=('float32', 'float64', 'bool'), type=str, default=None) return convert_mnist
def _custom_context_menu_requested(self, pos): """ Shows a context menu at the given QPoint (in widget coordinates). """ menu = self._context_menu_make(pos) menu.exec_(self._control.mapToGlobal(pos))
def function[_custom_context_menu_requested, parameter[self, pos]]: constant[ Shows a context menu at the given QPoint (in widget coordinates). ] variable[menu] assign[=] call[name[self]._context_menu_make, parameter[name[pos]]] call[name[menu].exec_, parameter[call[name[self]._control.mapToGlobal, parameter[name[pos]]]]]
keyword[def] identifier[_custom_context_menu_requested] ( identifier[self] , identifier[pos] ): literal[string] identifier[menu] = identifier[self] . identifier[_context_menu_make] ( identifier[pos] ) identifier[menu] . identifier[exec_] ( identifier[self] . identifier[_control] . identifier[mapToGlobal] ( identifier[pos] ))
def _custom_context_menu_requested(self, pos): """ Shows a context menu at the given QPoint (in widget coordinates). """ menu = self._context_menu_make(pos) menu.exec_(self._control.mapToGlobal(pos))
def fitness(self, width, height): """ In guillotine algorithm case, returns the min of the fitness of all free sections, for the given dimension, both normal and rotated (if rotation enabled.) """ assert(width > 0 and height > 0) # Get best fitness section. section, rotated = self._select_fittest_section(width, height) if not section: return None # Return fitness of returned section, with correct dimmensions if the # the rectangle was rotated. if rotated: return self._section_fitness(section, height, width) else: return self._section_fitness(section, width, height)
def function[fitness, parameter[self, width, height]]: constant[ In guillotine algorithm case, returns the min of the fitness of all free sections, for the given dimension, both normal and rotated (if rotation enabled.) ] assert[<ast.BoolOp object at 0x7da20e9b1840>] <ast.Tuple object at 0x7da20e9b23b0> assign[=] call[name[self]._select_fittest_section, parameter[name[width], name[height]]] if <ast.UnaryOp object at 0x7da20e9b2590> begin[:] return[constant[None]] if name[rotated] begin[:] return[call[name[self]._section_fitness, parameter[name[section], name[height], name[width]]]]
keyword[def] identifier[fitness] ( identifier[self] , identifier[width] , identifier[height] ): literal[string] keyword[assert] ( identifier[width] > literal[int] keyword[and] identifier[height] > literal[int] ) identifier[section] , identifier[rotated] = identifier[self] . identifier[_select_fittest_section] ( identifier[width] , identifier[height] ) keyword[if] keyword[not] identifier[section] : keyword[return] keyword[None] keyword[if] identifier[rotated] : keyword[return] identifier[self] . identifier[_section_fitness] ( identifier[section] , identifier[height] , identifier[width] ) keyword[else] : keyword[return] identifier[self] . identifier[_section_fitness] ( identifier[section] , identifier[width] , identifier[height] )
def fitness(self, width, height): """ In guillotine algorithm case, returns the min of the fitness of all free sections, for the given dimension, both normal and rotated (if rotation enabled.) """ assert width > 0 and height > 0 # Get best fitness section. (section, rotated) = self._select_fittest_section(width, height) if not section: return None # depends on [control=['if'], data=[]] # Return fitness of returned section, with correct dimmensions if the # the rectangle was rotated. if rotated: return self._section_fitness(section, height, width) # depends on [control=['if'], data=[]] else: return self._section_fitness(section, width, height)
def _process_cell(i, state, finite=False): """Process 3 cells and return a value from 0 to 7. """ op_1 = state[i - 1] op_2 = state[i] if i == len(state) - 1: if finite: op_3 = state[0] else: op_3 = 0 else: op_3 = state[i + 1] result = 0 for i, val in enumerate([op_3, op_2, op_1]): if val: result += 2**i return result
def function[_process_cell, parameter[i, state, finite]]: constant[Process 3 cells and return a value from 0 to 7. ] variable[op_1] assign[=] call[name[state]][binary_operation[name[i] - constant[1]]] variable[op_2] assign[=] call[name[state]][name[i]] if compare[name[i] equal[==] binary_operation[call[name[len], parameter[name[state]]] - constant[1]]] begin[:] if name[finite] begin[:] variable[op_3] assign[=] call[name[state]][constant[0]] variable[result] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da18f09ead0>, <ast.Name object at 0x7da18f09dc30>]]] in starred[call[name[enumerate], parameter[list[[<ast.Name object at 0x7da18f09d630>, <ast.Name object at 0x7da18f09e2f0>, <ast.Name object at 0x7da18f09ee00>]]]]] begin[:] if name[val] begin[:] <ast.AugAssign object at 0x7da18f09e0b0> return[name[result]]
keyword[def] identifier[_process_cell] ( identifier[i] , identifier[state] , identifier[finite] = keyword[False] ): literal[string] identifier[op_1] = identifier[state] [ identifier[i] - literal[int] ] identifier[op_2] = identifier[state] [ identifier[i] ] keyword[if] identifier[i] == identifier[len] ( identifier[state] )- literal[int] : keyword[if] identifier[finite] : identifier[op_3] = identifier[state] [ literal[int] ] keyword[else] : identifier[op_3] = literal[int] keyword[else] : identifier[op_3] = identifier[state] [ identifier[i] + literal[int] ] identifier[result] = literal[int] keyword[for] identifier[i] , identifier[val] keyword[in] identifier[enumerate] ([ identifier[op_3] , identifier[op_2] , identifier[op_1] ]): keyword[if] identifier[val] : identifier[result] += literal[int] ** identifier[i] keyword[return] identifier[result]
def _process_cell(i, state, finite=False): """Process 3 cells and return a value from 0 to 7. """ op_1 = state[i - 1] op_2 = state[i] if i == len(state) - 1: if finite: op_3 = state[0] # depends on [control=['if'], data=[]] else: op_3 = 0 # depends on [control=['if'], data=[]] else: op_3 = state[i + 1] result = 0 for (i, val) in enumerate([op_3, op_2, op_1]): if val: result += 2 ** i # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return result
def GetMessages(self, files): # TODO(amauryfa): Fix the differences with MessageFactory. """Gets all registered messages from a specified file. Only messages already created and registered will be returned; (this is the case for imported _pb2 modules) But unlike MessageFactory, this version also returns already defined nested messages, but does not register any message extensions. Args: files: The file names to extract messages from. Returns: A dictionary mapping proto names to the message classes. Raises: KeyError: if a file could not be found. """ def _GetAllMessageNames(desc): """Walk a message Descriptor and recursively yields all message names.""" yield desc.full_name for msg_desc in desc.nested_types: for full_name in _GetAllMessageNames(msg_desc): yield full_name result = {} for file_name in files: file_desc = self.pool.FindFileByName(file_name) for msg_desc in file_desc.message_types_by_name.values(): for full_name in _GetAllMessageNames(msg_desc): try: result[full_name] = self._classes[full_name] except KeyError: # This descriptor has no registered class, skip it. pass return result
def function[GetMessages, parameter[self, files]]: constant[Gets all registered messages from a specified file. Only messages already created and registered will be returned; (this is the case for imported _pb2 modules) But unlike MessageFactory, this version also returns already defined nested messages, but does not register any message extensions. Args: files: The file names to extract messages from. Returns: A dictionary mapping proto names to the message classes. Raises: KeyError: if a file could not be found. ] def function[_GetAllMessageNames, parameter[desc]]: constant[Walk a message Descriptor and recursively yields all message names.] <ast.Yield object at 0x7da1b1ef3fd0> for taget[name[msg_desc]] in starred[name[desc].nested_types] begin[:] for taget[name[full_name]] in starred[call[name[_GetAllMessageNames], parameter[name[msg_desc]]]] begin[:] <ast.Yield object at 0x7da1b1ef25c0> variable[result] assign[=] dictionary[[], []] for taget[name[file_name]] in starred[name[files]] begin[:] variable[file_desc] assign[=] call[name[self].pool.FindFileByName, parameter[name[file_name]]] for taget[name[msg_desc]] in starred[call[name[file_desc].message_types_by_name.values, parameter[]]] begin[:] for taget[name[full_name]] in starred[call[name[_GetAllMessageNames], parameter[name[msg_desc]]]] begin[:] <ast.Try object at 0x7da1b1ef20b0> return[name[result]]
keyword[def] identifier[GetMessages] ( identifier[self] , identifier[files] ): literal[string] keyword[def] identifier[_GetAllMessageNames] ( identifier[desc] ): literal[string] keyword[yield] identifier[desc] . identifier[full_name] keyword[for] identifier[msg_desc] keyword[in] identifier[desc] . identifier[nested_types] : keyword[for] identifier[full_name] keyword[in] identifier[_GetAllMessageNames] ( identifier[msg_desc] ): keyword[yield] identifier[full_name] identifier[result] ={} keyword[for] identifier[file_name] keyword[in] identifier[files] : identifier[file_desc] = identifier[self] . identifier[pool] . identifier[FindFileByName] ( identifier[file_name] ) keyword[for] identifier[msg_desc] keyword[in] identifier[file_desc] . identifier[message_types_by_name] . identifier[values] (): keyword[for] identifier[full_name] keyword[in] identifier[_GetAllMessageNames] ( identifier[msg_desc] ): keyword[try] : identifier[result] [ identifier[full_name] ]= identifier[self] . identifier[_classes] [ identifier[full_name] ] keyword[except] identifier[KeyError] : keyword[pass] keyword[return] identifier[result]
def GetMessages(self, files): # TODO(amauryfa): Fix the differences with MessageFactory. 'Gets all registered messages from a specified file.\n\n Only messages already created and registered will be returned; (this is the\n case for imported _pb2 modules)\n But unlike MessageFactory, this version also returns already defined nested\n messages, but does not register any message extensions.\n\n Args:\n files: The file names to extract messages from.\n\n Returns:\n A dictionary mapping proto names to the message classes.\n\n Raises:\n KeyError: if a file could not be found.\n ' def _GetAllMessageNames(desc): """Walk a message Descriptor and recursively yields all message names.""" yield desc.full_name for msg_desc in desc.nested_types: for full_name in _GetAllMessageNames(msg_desc): yield full_name # depends on [control=['for'], data=['full_name']] # depends on [control=['for'], data=['msg_desc']] result = {} for file_name in files: file_desc = self.pool.FindFileByName(file_name) for msg_desc in file_desc.message_types_by_name.values(): for full_name in _GetAllMessageNames(msg_desc): try: result[full_name] = self._classes[full_name] # depends on [control=['try'], data=[]] except KeyError: # This descriptor has no registered class, skip it. pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['full_name']] # depends on [control=['for'], data=['msg_desc']] # depends on [control=['for'], data=['file_name']] return result
def _check_groups(s, groups): """Ensures that all particles are included in exactly 1 group""" ans = [] for g in groups: ans.extend(g) if np.unique(ans).size != np.size(ans): return False elif np.unique(ans).size != s.obj_get_positions().shape[0]: return False else: return (np.arange(s.obj_get_radii().size) == np.sort(ans)).all()
def function[_check_groups, parameter[s, groups]]: constant[Ensures that all particles are included in exactly 1 group] variable[ans] assign[=] list[[]] for taget[name[g]] in starred[name[groups]] begin[:] call[name[ans].extend, parameter[name[g]]] if compare[call[name[np].unique, parameter[name[ans]]].size not_equal[!=] call[name[np].size, parameter[name[ans]]]] begin[:] return[constant[False]]
keyword[def] identifier[_check_groups] ( identifier[s] , identifier[groups] ): literal[string] identifier[ans] =[] keyword[for] identifier[g] keyword[in] identifier[groups] : identifier[ans] . identifier[extend] ( identifier[g] ) keyword[if] identifier[np] . identifier[unique] ( identifier[ans] ). identifier[size] != identifier[np] . identifier[size] ( identifier[ans] ): keyword[return] keyword[False] keyword[elif] identifier[np] . identifier[unique] ( identifier[ans] ). identifier[size] != identifier[s] . identifier[obj_get_positions] (). identifier[shape] [ literal[int] ]: keyword[return] keyword[False] keyword[else] : keyword[return] ( identifier[np] . identifier[arange] ( identifier[s] . identifier[obj_get_radii] (). identifier[size] )== identifier[np] . identifier[sort] ( identifier[ans] )). identifier[all] ()
def _check_groups(s, groups): """Ensures that all particles are included in exactly 1 group""" ans = [] for g in groups: ans.extend(g) # depends on [control=['for'], data=['g']] if np.unique(ans).size != np.size(ans): return False # depends on [control=['if'], data=[]] elif np.unique(ans).size != s.obj_get_positions().shape[0]: return False # depends on [control=['if'], data=[]] else: return (np.arange(s.obj_get_radii().size) == np.sort(ans)).all()
def _transmogrophy(self, angle, percent, scaleFromCenter, flipH, flipV): ''' Internal method to scale and rotate ''' self.angle = angle % 360 self.percent = percent self.scaleFromCenter = scaleFromCenter previousRect = self.rect previousCenter = previousRect.center previousX = previousRect.x previousY = previousRect.y # Rotate - pygame rotates in the opposite direction pygameAngle = -self.angle rotatedImage = pygame.transform.rotate(self.originalImage, pygameAngle) rotatedRect = rotatedImage.get_rect() rotatedWidth = rotatedRect.width rotatedHeight = rotatedRect.height # Scale newWidth = int(rotatedWidth * .01 * self.percent) newHeight = int(rotatedHeight * .01 * self.percent) self.image = pygame.transform.scale(rotatedImage, (newWidth, newHeight)) # Flip if flipH: self.image = pygame.transform.flip(self.image, True, False) if flipV: self.image = pygame.transform.flip(self.image, False, True) # Placement self.rect = self.image.get_rect() if self.scaleFromCenter: self.rect.center = previousCenter else: # use previous X, Y self.rect.x = previousX self.rect.y = previousY self.setLoc((self.rect.left, self.rect.top))
def function[_transmogrophy, parameter[self, angle, percent, scaleFromCenter, flipH, flipV]]: constant[ Internal method to scale and rotate ] name[self].angle assign[=] binary_operation[name[angle] <ast.Mod object at 0x7da2590d6920> constant[360]] name[self].percent assign[=] name[percent] name[self].scaleFromCenter assign[=] name[scaleFromCenter] variable[previousRect] assign[=] name[self].rect variable[previousCenter] assign[=] name[previousRect].center variable[previousX] assign[=] name[previousRect].x variable[previousY] assign[=] name[previousRect].y variable[pygameAngle] assign[=] <ast.UnaryOp object at 0x7da207f9a560> variable[rotatedImage] assign[=] call[name[pygame].transform.rotate, parameter[name[self].originalImage, name[pygameAngle]]] variable[rotatedRect] assign[=] call[name[rotatedImage].get_rect, parameter[]] variable[rotatedWidth] assign[=] name[rotatedRect].width variable[rotatedHeight] assign[=] name[rotatedRect].height variable[newWidth] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[rotatedWidth] * constant[0.01]] * name[self].percent]]] variable[newHeight] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[rotatedHeight] * constant[0.01]] * name[self].percent]]] name[self].image assign[=] call[name[pygame].transform.scale, parameter[name[rotatedImage], tuple[[<ast.Name object at 0x7da207f997e0>, <ast.Name object at 0x7da207f99270>]]]] if name[flipH] begin[:] name[self].image assign[=] call[name[pygame].transform.flip, parameter[name[self].image, constant[True], constant[False]]] if name[flipV] begin[:] name[self].image assign[=] call[name[pygame].transform.flip, parameter[name[self].image, constant[False], constant[True]]] name[self].rect assign[=] call[name[self].image.get_rect, parameter[]] if name[self].scaleFromCenter begin[:] name[self].rect.center assign[=] name[previousCenter] call[name[self].setLoc, parameter[tuple[[<ast.Attribute object at 0x7da2044c3a30>, <ast.Attribute object at 0x7da2044c2dd0>]]]]
keyword[def] identifier[_transmogrophy] ( identifier[self] , identifier[angle] , identifier[percent] , identifier[scaleFromCenter] , identifier[flipH] , identifier[flipV] ): literal[string] identifier[self] . identifier[angle] = identifier[angle] % literal[int] identifier[self] . identifier[percent] = identifier[percent] identifier[self] . identifier[scaleFromCenter] = identifier[scaleFromCenter] identifier[previousRect] = identifier[self] . identifier[rect] identifier[previousCenter] = identifier[previousRect] . identifier[center] identifier[previousX] = identifier[previousRect] . identifier[x] identifier[previousY] = identifier[previousRect] . identifier[y] identifier[pygameAngle] =- identifier[self] . identifier[angle] identifier[rotatedImage] = identifier[pygame] . identifier[transform] . identifier[rotate] ( identifier[self] . identifier[originalImage] , identifier[pygameAngle] ) identifier[rotatedRect] = identifier[rotatedImage] . identifier[get_rect] () identifier[rotatedWidth] = identifier[rotatedRect] . identifier[width] identifier[rotatedHeight] = identifier[rotatedRect] . identifier[height] identifier[newWidth] = identifier[int] ( identifier[rotatedWidth] * literal[int] * identifier[self] . identifier[percent] ) identifier[newHeight] = identifier[int] ( identifier[rotatedHeight] * literal[int] * identifier[self] . identifier[percent] ) identifier[self] . identifier[image] = identifier[pygame] . identifier[transform] . identifier[scale] ( identifier[rotatedImage] ,( identifier[newWidth] , identifier[newHeight] )) keyword[if] identifier[flipH] : identifier[self] . identifier[image] = identifier[pygame] . identifier[transform] . identifier[flip] ( identifier[self] . identifier[image] , keyword[True] , keyword[False] ) keyword[if] identifier[flipV] : identifier[self] . identifier[image] = identifier[pygame] . identifier[transform] . identifier[flip] ( identifier[self] . identifier[image] , keyword[False] , keyword[True] ) identifier[self] . identifier[rect] = identifier[self] . identifier[image] . identifier[get_rect] () keyword[if] identifier[self] . identifier[scaleFromCenter] : identifier[self] . identifier[rect] . identifier[center] = identifier[previousCenter] keyword[else] : identifier[self] . identifier[rect] . identifier[x] = identifier[previousX] identifier[self] . identifier[rect] . identifier[y] = identifier[previousY] identifier[self] . identifier[setLoc] (( identifier[self] . identifier[rect] . identifier[left] , identifier[self] . identifier[rect] . identifier[top] ))
def _transmogrophy(self, angle, percent, scaleFromCenter, flipH, flipV): """ Internal method to scale and rotate """ self.angle = angle % 360 self.percent = percent self.scaleFromCenter = scaleFromCenter previousRect = self.rect previousCenter = previousRect.center previousX = previousRect.x previousY = previousRect.y # Rotate - pygame rotates in the opposite direction pygameAngle = -self.angle rotatedImage = pygame.transform.rotate(self.originalImage, pygameAngle) rotatedRect = rotatedImage.get_rect() rotatedWidth = rotatedRect.width rotatedHeight = rotatedRect.height # Scale newWidth = int(rotatedWidth * 0.01 * self.percent) newHeight = int(rotatedHeight * 0.01 * self.percent) self.image = pygame.transform.scale(rotatedImage, (newWidth, newHeight)) # Flip if flipH: self.image = pygame.transform.flip(self.image, True, False) # depends on [control=['if'], data=[]] if flipV: self.image = pygame.transform.flip(self.image, False, True) # depends on [control=['if'], data=[]] # Placement self.rect = self.image.get_rect() if self.scaleFromCenter: self.rect.center = previousCenter # depends on [control=['if'], data=[]] else: # use previous X, Y self.rect.x = previousX self.rect.y = previousY self.setLoc((self.rect.left, self.rect.top))
def _update(self, attrname, value=None): """ Updates a field in an existing tuple. This is not a datajoyous operation and should not be used routinely. Relational database maintain referential integrity on the level of a tuple. Therefore, the UPDATE operator can violate referential integrity. The datajoyous way to update information is to delete the entire tuple and insert the entire update tuple. Safety constraints: 1. self must be restricted to exactly one tuple 2. the update attribute must not be in primary key Example >>> (v2p.Mice() & key).update('mouse_dob', '2011-01-01') >>> (v2p.Mice() & key).update( 'lens') # set the value to NULL """ if len(self) != 1: raise DataJointError('Update is only allowed on one tuple at a time') if attrname not in self.heading: raise DataJointError('Invalid attribute name') if attrname in self.heading.primary_key: raise DataJointError('Cannot update a key value.') attr = self.heading[attrname] if attr.is_blob: value = pack(value) placeholder = '%s' elif attr.numeric: if value is None or np.isnan(np.float(value)): # nans are turned into NULLs placeholder = 'NULL' value = None else: placeholder = '%s' value = str(int(value) if isinstance(value, bool) else value) else: placeholder = '%s' command = "UPDATE {full_table_name} SET `{attrname}`={placeholder} {where_clause}".format( full_table_name=self.from_clause, attrname=attrname, placeholder=placeholder, where_clause=self.where_clause) self.connection.query(command, args=(value, ) if value is not None else ())
def function[_update, parameter[self, attrname, value]]: constant[ Updates a field in an existing tuple. This is not a datajoyous operation and should not be used routinely. Relational database maintain referential integrity on the level of a tuple. Therefore, the UPDATE operator can violate referential integrity. The datajoyous way to update information is to delete the entire tuple and insert the entire update tuple. Safety constraints: 1. self must be restricted to exactly one tuple 2. the update attribute must not be in primary key Example >>> (v2p.Mice() & key).update('mouse_dob', '2011-01-01') >>> (v2p.Mice() & key).update( 'lens') # set the value to NULL ] if compare[call[name[len], parameter[name[self]]] not_equal[!=] constant[1]] begin[:] <ast.Raise object at 0x7da1b113e800> if compare[name[attrname] <ast.NotIn object at 0x7da2590d7190> name[self].heading] begin[:] <ast.Raise object at 0x7da1b113f160> if compare[name[attrname] in name[self].heading.primary_key] begin[:] <ast.Raise object at 0x7da1b113f880> variable[attr] assign[=] call[name[self].heading][name[attrname]] if name[attr].is_blob begin[:] variable[value] assign[=] call[name[pack], parameter[name[value]]] variable[placeholder] assign[=] constant[%s] variable[command] assign[=] call[constant[UPDATE {full_table_name} SET `{attrname}`={placeholder} {where_clause}].format, parameter[]] call[name[self].connection.query, parameter[name[command]]]
keyword[def] identifier[_update] ( identifier[self] , identifier[attrname] , identifier[value] = keyword[None] ): literal[string] keyword[if] identifier[len] ( identifier[self] )!= literal[int] : keyword[raise] identifier[DataJointError] ( literal[string] ) keyword[if] identifier[attrname] keyword[not] keyword[in] identifier[self] . identifier[heading] : keyword[raise] identifier[DataJointError] ( literal[string] ) keyword[if] identifier[attrname] keyword[in] identifier[self] . identifier[heading] . identifier[primary_key] : keyword[raise] identifier[DataJointError] ( literal[string] ) identifier[attr] = identifier[self] . identifier[heading] [ identifier[attrname] ] keyword[if] identifier[attr] . identifier[is_blob] : identifier[value] = identifier[pack] ( identifier[value] ) identifier[placeholder] = literal[string] keyword[elif] identifier[attr] . identifier[numeric] : keyword[if] identifier[value] keyword[is] keyword[None] keyword[or] identifier[np] . identifier[isnan] ( identifier[np] . identifier[float] ( identifier[value] )): identifier[placeholder] = literal[string] identifier[value] = keyword[None] keyword[else] : identifier[placeholder] = literal[string] identifier[value] = identifier[str] ( identifier[int] ( identifier[value] ) keyword[if] identifier[isinstance] ( identifier[value] , identifier[bool] ) keyword[else] identifier[value] ) keyword[else] : identifier[placeholder] = literal[string] identifier[command] = literal[string] . identifier[format] ( identifier[full_table_name] = identifier[self] . identifier[from_clause] , identifier[attrname] = identifier[attrname] , identifier[placeholder] = identifier[placeholder] , identifier[where_clause] = identifier[self] . identifier[where_clause] ) identifier[self] . identifier[connection] . identifier[query] ( identifier[command] , identifier[args] =( identifier[value] ,) keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] keyword[else] ())
def _update(self, attrname, value=None): """ Updates a field in an existing tuple. This is not a datajoyous operation and should not be used routinely. Relational database maintain referential integrity on the level of a tuple. Therefore, the UPDATE operator can violate referential integrity. The datajoyous way to update information is to delete the entire tuple and insert the entire update tuple. Safety constraints: 1. self must be restricted to exactly one tuple 2. the update attribute must not be in primary key Example >>> (v2p.Mice() & key).update('mouse_dob', '2011-01-01') >>> (v2p.Mice() & key).update( 'lens') # set the value to NULL """ if len(self) != 1: raise DataJointError('Update is only allowed on one tuple at a time') # depends on [control=['if'], data=[]] if attrname not in self.heading: raise DataJointError('Invalid attribute name') # depends on [control=['if'], data=[]] if attrname in self.heading.primary_key: raise DataJointError('Cannot update a key value.') # depends on [control=['if'], data=[]] attr = self.heading[attrname] if attr.is_blob: value = pack(value) placeholder = '%s' # depends on [control=['if'], data=[]] elif attr.numeric: if value is None or np.isnan(np.float(value)): # nans are turned into NULLs placeholder = 'NULL' value = None # depends on [control=['if'], data=[]] else: placeholder = '%s' value = str(int(value) if isinstance(value, bool) else value) # depends on [control=['if'], data=[]] else: placeholder = '%s' command = 'UPDATE {full_table_name} SET `{attrname}`={placeholder} {where_clause}'.format(full_table_name=self.from_clause, attrname=attrname, placeholder=placeholder, where_clause=self.where_clause) self.connection.query(command, args=(value,) if value is not None else ())
def items(self, raw = False): """Like `items` for dicts but with a `raw` option # Parameters _raw_ : `optional [bool]` > Default `False`, if `True` the `KeysView` contains the raw values as the values # Returns `KeysView` > The key-value pairs of the record """ if raw: return self._fieldDict.items() else: return collections.abc.Mapping.items(self)
def function[items, parameter[self, raw]]: constant[Like `items` for dicts but with a `raw` option # Parameters _raw_ : `optional [bool]` > Default `False`, if `True` the `KeysView` contains the raw values as the values # Returns `KeysView` > The key-value pairs of the record ] if name[raw] begin[:] return[call[name[self]._fieldDict.items, parameter[]]]
keyword[def] identifier[items] ( identifier[self] , identifier[raw] = keyword[False] ): literal[string] keyword[if] identifier[raw] : keyword[return] identifier[self] . identifier[_fieldDict] . identifier[items] () keyword[else] : keyword[return] identifier[collections] . identifier[abc] . identifier[Mapping] . identifier[items] ( identifier[self] )
def items(self, raw=False): """Like `items` for dicts but with a `raw` option # Parameters _raw_ : `optional [bool]` > Default `False`, if `True` the `KeysView` contains the raw values as the values # Returns `KeysView` > The key-value pairs of the record """ if raw: return self._fieldDict.items() # depends on [control=['if'], data=[]] else: return collections.abc.Mapping.items(self)
def universal_exception(coro): """ Decorator. Reraising any exception (except `CancelledError` and `NotImplementedError`) with universal exception :py:class:`aioftp.PathIOError` """ @functools.wraps(coro) async def wrapper(*args, **kwargs): try: return await coro(*args, **kwargs) except (asyncio.CancelledError, NotImplementedError, StopAsyncIteration): raise except Exception: raise errors.PathIOError(reason=sys.exc_info()) return wrapper
def function[universal_exception, parameter[coro]]: constant[ Decorator. Reraising any exception (except `CancelledError` and `NotImplementedError`) with universal exception :py:class:`aioftp.PathIOError` ] <ast.AsyncFunctionDef object at 0x7da1b00b4e20> return[name[wrapper]]
keyword[def] identifier[universal_exception] ( identifier[coro] ): literal[string] @ identifier[functools] . identifier[wraps] ( identifier[coro] ) keyword[async] keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ): keyword[try] : keyword[return] keyword[await] identifier[coro] (* identifier[args] ,** identifier[kwargs] ) keyword[except] ( identifier[asyncio] . identifier[CancelledError] , identifier[NotImplementedError] , identifier[StopAsyncIteration] ): keyword[raise] keyword[except] identifier[Exception] : keyword[raise] identifier[errors] . identifier[PathIOError] ( identifier[reason] = identifier[sys] . identifier[exc_info] ()) keyword[return] identifier[wrapper]
def universal_exception(coro): """ Decorator. Reraising any exception (except `CancelledError` and `NotImplementedError`) with universal exception :py:class:`aioftp.PathIOError` """ @functools.wraps(coro) async def wrapper(*args, **kwargs): try: return await coro(*args, **kwargs) # depends on [control=['try'], data=[]] except (asyncio.CancelledError, NotImplementedError, StopAsyncIteration): raise # depends on [control=['except'], data=[]] except Exception: raise errors.PathIOError(reason=sys.exc_info()) # depends on [control=['except'], data=[]] return wrapper
def transform(self, X): """Transform block or partition-wise for dask inputs. For dask inputs, a dask array or dataframe is returned. For other inputs (NumPy array, pandas dataframe, scipy sparse matrix), the regular return value is returned. If the underlying estimator does not have a ``transform`` method, then an ``AttributeError`` is raised. Parameters ---------- X : array-like Returns ------- transformed : array-like """ self._check_method("transform") X = self._check_array(X) if isinstance(X, da.Array): return X.map_blocks(_transform, estimator=self._postfit_estimator) elif isinstance(X, dd._Frame): return X.map_partitions(_transform, estimator=self._postfit_estimator) else: return _transform(X, estimator=self._postfit_estimator)
def function[transform, parameter[self, X]]: constant[Transform block or partition-wise for dask inputs. For dask inputs, a dask array or dataframe is returned. For other inputs (NumPy array, pandas dataframe, scipy sparse matrix), the regular return value is returned. If the underlying estimator does not have a ``transform`` method, then an ``AttributeError`` is raised. Parameters ---------- X : array-like Returns ------- transformed : array-like ] call[name[self]._check_method, parameter[constant[transform]]] variable[X] assign[=] call[name[self]._check_array, parameter[name[X]]] if call[name[isinstance], parameter[name[X], name[da].Array]] begin[:] return[call[name[X].map_blocks, parameter[name[_transform]]]]
keyword[def] identifier[transform] ( identifier[self] , identifier[X] ): literal[string] identifier[self] . identifier[_check_method] ( literal[string] ) identifier[X] = identifier[self] . identifier[_check_array] ( identifier[X] ) keyword[if] identifier[isinstance] ( identifier[X] , identifier[da] . identifier[Array] ): keyword[return] identifier[X] . identifier[map_blocks] ( identifier[_transform] , identifier[estimator] = identifier[self] . identifier[_postfit_estimator] ) keyword[elif] identifier[isinstance] ( identifier[X] , identifier[dd] . identifier[_Frame] ): keyword[return] identifier[X] . identifier[map_partitions] ( identifier[_transform] , identifier[estimator] = identifier[self] . identifier[_postfit_estimator] ) keyword[else] : keyword[return] identifier[_transform] ( identifier[X] , identifier[estimator] = identifier[self] . identifier[_postfit_estimator] )
def transform(self, X): """Transform block or partition-wise for dask inputs. For dask inputs, a dask array or dataframe is returned. For other inputs (NumPy array, pandas dataframe, scipy sparse matrix), the regular return value is returned. If the underlying estimator does not have a ``transform`` method, then an ``AttributeError`` is raised. Parameters ---------- X : array-like Returns ------- transformed : array-like """ self._check_method('transform') X = self._check_array(X) if isinstance(X, da.Array): return X.map_blocks(_transform, estimator=self._postfit_estimator) # depends on [control=['if'], data=[]] elif isinstance(X, dd._Frame): return X.map_partitions(_transform, estimator=self._postfit_estimator) # depends on [control=['if'], data=[]] else: return _transform(X, estimator=self._postfit_estimator)
def format_exc(limit=None): """Like print_exc() but return a string. Backport for Python 2.3.""" try: etype, value, tb = sys.exc_info() return ''.join(traceback.format_exception(etype, value, tb, limit)) finally: etype = value = tb = None
def function[format_exc, parameter[limit]]: constant[Like print_exc() but return a string. Backport for Python 2.3.] <ast.Try object at 0x7da18f813af0>
keyword[def] identifier[format_exc] ( identifier[limit] = keyword[None] ): literal[string] keyword[try] : identifier[etype] , identifier[value] , identifier[tb] = identifier[sys] . identifier[exc_info] () keyword[return] literal[string] . identifier[join] ( identifier[traceback] . identifier[format_exception] ( identifier[etype] , identifier[value] , identifier[tb] , identifier[limit] )) keyword[finally] : identifier[etype] = identifier[value] = identifier[tb] = keyword[None]
def format_exc(limit=None): """Like print_exc() but return a string. Backport for Python 2.3.""" try: (etype, value, tb) = sys.exc_info() return ''.join(traceback.format_exception(etype, value, tb, limit)) # depends on [control=['try'], data=[]] finally: etype = value = tb = None
def outputs_of(self, idx, create=False): """ Get a set of the outputs for a given node index. """ if create and not idx in self.edges: self.edges[idx] = set() return self.edges[idx]
def function[outputs_of, parameter[self, idx, create]]: constant[ Get a set of the outputs for a given node index. ] if <ast.BoolOp object at 0x7da18dc9a050> begin[:] call[name[self].edges][name[idx]] assign[=] call[name[set], parameter[]] return[call[name[self].edges][name[idx]]]
keyword[def] identifier[outputs_of] ( identifier[self] , identifier[idx] , identifier[create] = keyword[False] ): literal[string] keyword[if] identifier[create] keyword[and] keyword[not] identifier[idx] keyword[in] identifier[self] . identifier[edges] : identifier[self] . identifier[edges] [ identifier[idx] ]= identifier[set] () keyword[return] identifier[self] . identifier[edges] [ identifier[idx] ]
def outputs_of(self, idx, create=False): """ Get a set of the outputs for a given node index. """ if create and (not idx in self.edges): self.edges[idx] = set() # depends on [control=['if'], data=[]] return self.edges[idx]
def actualize (self): """ Generates actual build instructions. """ if self.actualized_: return self.actualized_ = True ps = self.properties () properties = self.adjust_properties (ps) actual_targets = [] for i in self.targets (): actual_targets.append (i.actualize ()) self.actualize_sources (self.sources (), properties) self.engine_.add_dependency (actual_targets, self.actual_sources_ + self.dependency_only_sources_) # FIXME: check the comment below. Was self.action_name_ [1] # Action name can include additional rule arguments, which should not # be passed to 'set-target-variables'. # FIXME: breaking circular dependency import toolset toolset.set_target_variables (self.manager_, self.action_name_, actual_targets, properties) engine = self.manager_.engine () # FIXME: this is supposed to help --out-xml option, but we don't # implement that now, and anyway, we should handle it in Python, # not but putting variables on bjam-level targets. bjam.call("set-target-variable", actual_targets, ".action", repr(self)) self.manager_.engine ().set_update_action (self.action_name_, actual_targets, self.actual_sources_, properties) # Since we set up creating action here, we also set up # action for cleaning up self.manager_.engine ().set_update_action ('common.Clean', 'clean-all', actual_targets) return actual_targets
def function[actualize, parameter[self]]: constant[ Generates actual build instructions. ] if name[self].actualized_ begin[:] return[None] name[self].actualized_ assign[=] constant[True] variable[ps] assign[=] call[name[self].properties, parameter[]] variable[properties] assign[=] call[name[self].adjust_properties, parameter[name[ps]]] variable[actual_targets] assign[=] list[[]] for taget[name[i]] in starred[call[name[self].targets, parameter[]]] begin[:] call[name[actual_targets].append, parameter[call[name[i].actualize, parameter[]]]] call[name[self].actualize_sources, parameter[call[name[self].sources, parameter[]], name[properties]]] call[name[self].engine_.add_dependency, parameter[name[actual_targets], binary_operation[name[self].actual_sources_ + name[self].dependency_only_sources_]]] import module[toolset] call[name[toolset].set_target_variables, parameter[name[self].manager_, name[self].action_name_, name[actual_targets], name[properties]]] variable[engine] assign[=] call[name[self].manager_.engine, parameter[]] call[name[bjam].call, parameter[constant[set-target-variable], name[actual_targets], constant[.action], call[name[repr], parameter[name[self]]]]] call[call[name[self].manager_.engine, parameter[]].set_update_action, parameter[name[self].action_name_, name[actual_targets], name[self].actual_sources_, name[properties]]] call[call[name[self].manager_.engine, parameter[]].set_update_action, parameter[constant[common.Clean], constant[clean-all], name[actual_targets]]] return[name[actual_targets]]
keyword[def] identifier[actualize] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[actualized_] : keyword[return] identifier[self] . identifier[actualized_] = keyword[True] identifier[ps] = identifier[self] . identifier[properties] () identifier[properties] = identifier[self] . identifier[adjust_properties] ( identifier[ps] ) identifier[actual_targets] =[] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[targets] (): identifier[actual_targets] . identifier[append] ( identifier[i] . identifier[actualize] ()) identifier[self] . identifier[actualize_sources] ( identifier[self] . identifier[sources] (), identifier[properties] ) identifier[self] . identifier[engine_] . identifier[add_dependency] ( identifier[actual_targets] , identifier[self] . identifier[actual_sources_] + identifier[self] . identifier[dependency_only_sources_] ) keyword[import] identifier[toolset] identifier[toolset] . identifier[set_target_variables] ( identifier[self] . identifier[manager_] , identifier[self] . identifier[action_name_] , identifier[actual_targets] , identifier[properties] ) identifier[engine] = identifier[self] . identifier[manager_] . identifier[engine] () identifier[bjam] . identifier[call] ( literal[string] , identifier[actual_targets] , literal[string] , identifier[repr] ( identifier[self] )) identifier[self] . identifier[manager_] . identifier[engine] (). identifier[set_update_action] ( identifier[self] . identifier[action_name_] , identifier[actual_targets] , identifier[self] . identifier[actual_sources_] , identifier[properties] ) identifier[self] . identifier[manager_] . identifier[engine] (). identifier[set_update_action] ( literal[string] , literal[string] , identifier[actual_targets] ) keyword[return] identifier[actual_targets]
def actualize(self): """ Generates actual build instructions. """ if self.actualized_: return # depends on [control=['if'], data=[]] self.actualized_ = True ps = self.properties() properties = self.adjust_properties(ps) actual_targets = [] for i in self.targets(): actual_targets.append(i.actualize()) # depends on [control=['for'], data=['i']] self.actualize_sources(self.sources(), properties) self.engine_.add_dependency(actual_targets, self.actual_sources_ + self.dependency_only_sources_) # FIXME: check the comment below. Was self.action_name_ [1] # Action name can include additional rule arguments, which should not # be passed to 'set-target-variables'. # FIXME: breaking circular dependency import toolset toolset.set_target_variables(self.manager_, self.action_name_, actual_targets, properties) engine = self.manager_.engine() # FIXME: this is supposed to help --out-xml option, but we don't # implement that now, and anyway, we should handle it in Python, # not but putting variables on bjam-level targets. bjam.call('set-target-variable', actual_targets, '.action', repr(self)) self.manager_.engine().set_update_action(self.action_name_, actual_targets, self.actual_sources_, properties) # Since we set up creating action here, we also set up # action for cleaning up self.manager_.engine().set_update_action('common.Clean', 'clean-all', actual_targets) return actual_targets
def plotwrapper(f): """ This decorator allows for PyMC arguments of various types to be passed to the plotting functions. It identifies the type of object and locates its trace(s), then passes the data to the wrapped plotting function. """ def wrapper(pymc_obj, *args, **kwargs): start = 0 if 'start' in kwargs: start = kwargs.pop('start') # Figure out what type of object it is try: # First try Model type for variable in pymc_obj._variables_to_tally: # Plot object if variable._plot is not False: data = pymc_obj.trace(variable.__name__)[start:] if size(data[-1]) >= 10 and variable._plot != True: continue elif variable.dtype is dtype('object'): continue name = variable.__name__ if args: name = '%s_%s' % (args[0], variable.__name__) f(data, name, *args, **kwargs) return except AttributeError: pass try: # Then try Trace type data = pymc_obj()[:] name = pymc_obj.name f(data, name, *args, **kwargs) return except (AttributeError, TypeError): pass try: # Then try Node type if pymc_obj._plot is not False: data = pymc_obj.trace()[start:] # This is deprecated. DH name = pymc_obj.__name__ f(data, name, *args, **kwargs) return except AttributeError: pass if isinstance(pymc_obj, dict): # Then try dictionary for i in pymc_obj: data = pymc_obj[i][start:] if args: i = '%s_%s' % (args[0], i) elif 'name' in kwargs: i = '%s_%s' % (kwargs.pop('name'), i) f(data, i, *args, **kwargs) return # If others fail, assume that raw data is passed f(pymc_obj, *args, **kwargs) wrapper.__doc__ = f.__doc__ wrapper.__name__ = f.__name__ return wrapper
def function[plotwrapper, parameter[f]]: constant[ This decorator allows for PyMC arguments of various types to be passed to the plotting functions. It identifies the type of object and locates its trace(s), then passes the data to the wrapped plotting function. ] def function[wrapper, parameter[pymc_obj]]: variable[start] assign[=] constant[0] if compare[constant[start] in name[kwargs]] begin[:] variable[start] assign[=] call[name[kwargs].pop, parameter[constant[start]]] <ast.Try object at 0x7da2041db4c0> <ast.Try object at 0x7da2041db970> <ast.Try object at 0x7da2041da4a0> if call[name[isinstance], parameter[name[pymc_obj], name[dict]]] begin[:] for taget[name[i]] in starred[name[pymc_obj]] begin[:] variable[data] assign[=] call[call[name[pymc_obj]][name[i]]][<ast.Slice object at 0x7da2041d90f0>] if name[args] begin[:] variable[i] assign[=] binary_operation[constant[%s_%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da2041d9db0>, <ast.Name object at 0x7da18c4ce050>]]] call[name[f], parameter[name[data], name[i], <ast.Starred object at 0x7da18c4ce650>]] return[None] call[name[f], parameter[name[pymc_obj], <ast.Starred object at 0x7da18c4cc550>]] name[wrapper].__doc__ assign[=] name[f].__doc__ name[wrapper].__name__ assign[=] name[f].__name__ return[name[wrapper]]
keyword[def] identifier[plotwrapper] ( identifier[f] ): literal[string] keyword[def] identifier[wrapper] ( identifier[pymc_obj] ,* identifier[args] ,** identifier[kwargs] ): identifier[start] = literal[int] keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[start] = identifier[kwargs] . identifier[pop] ( literal[string] ) keyword[try] : keyword[for] identifier[variable] keyword[in] identifier[pymc_obj] . identifier[_variables_to_tally] : keyword[if] identifier[variable] . identifier[_plot] keyword[is] keyword[not] keyword[False] : identifier[data] = identifier[pymc_obj] . identifier[trace] ( identifier[variable] . identifier[__name__] )[ identifier[start] :] keyword[if] identifier[size] ( identifier[data] [- literal[int] ])>= literal[int] keyword[and] identifier[variable] . identifier[_plot] != keyword[True] : keyword[continue] keyword[elif] identifier[variable] . identifier[dtype] keyword[is] identifier[dtype] ( literal[string] ): keyword[continue] identifier[name] = identifier[variable] . identifier[__name__] keyword[if] identifier[args] : identifier[name] = literal[string] %( identifier[args] [ literal[int] ], identifier[variable] . identifier[__name__] ) identifier[f] ( identifier[data] , identifier[name] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] keyword[except] identifier[AttributeError] : keyword[pass] keyword[try] : identifier[data] = identifier[pymc_obj] ()[:] identifier[name] = identifier[pymc_obj] . identifier[name] identifier[f] ( identifier[data] , identifier[name] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] keyword[except] ( identifier[AttributeError] , identifier[TypeError] ): keyword[pass] keyword[try] : keyword[if] identifier[pymc_obj] . identifier[_plot] keyword[is] keyword[not] keyword[False] : identifier[data] = identifier[pymc_obj] . identifier[trace] ()[ identifier[start] :] identifier[name] = identifier[pymc_obj] . identifier[__name__] identifier[f] ( identifier[data] , identifier[name] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] keyword[except] identifier[AttributeError] : keyword[pass] keyword[if] identifier[isinstance] ( identifier[pymc_obj] , identifier[dict] ): keyword[for] identifier[i] keyword[in] identifier[pymc_obj] : identifier[data] = identifier[pymc_obj] [ identifier[i] ][ identifier[start] :] keyword[if] identifier[args] : identifier[i] = literal[string] %( identifier[args] [ literal[int] ], identifier[i] ) keyword[elif] literal[string] keyword[in] identifier[kwargs] : identifier[i] = literal[string] %( identifier[kwargs] . identifier[pop] ( literal[string] ), identifier[i] ) identifier[f] ( identifier[data] , identifier[i] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[f] ( identifier[pymc_obj] ,* identifier[args] ,** identifier[kwargs] ) identifier[wrapper] . identifier[__doc__] = identifier[f] . identifier[__doc__] identifier[wrapper] . identifier[__name__] = identifier[f] . identifier[__name__] keyword[return] identifier[wrapper]
def plotwrapper(f): """ This decorator allows for PyMC arguments of various types to be passed to the plotting functions. It identifies the type of object and locates its trace(s), then passes the data to the wrapped plotting function. """ def wrapper(pymc_obj, *args, **kwargs): start = 0 if 'start' in kwargs: start = kwargs.pop('start') # depends on [control=['if'], data=['kwargs']] # Figure out what type of object it is try: # First try Model type for variable in pymc_obj._variables_to_tally: # Plot object if variable._plot is not False: data = pymc_obj.trace(variable.__name__)[start:] if size(data[-1]) >= 10 and variable._plot != True: continue # depends on [control=['if'], data=[]] elif variable.dtype is dtype('object'): continue # depends on [control=['if'], data=[]] name = variable.__name__ if args: name = '%s_%s' % (args[0], variable.__name__) # depends on [control=['if'], data=[]] f(data, name, *args, **kwargs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['variable']] return # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] try: # Then try Trace type data = pymc_obj()[:] name = pymc_obj.name f(data, name, *args, **kwargs) return # depends on [control=['try'], data=[]] except (AttributeError, TypeError): pass # depends on [control=['except'], data=[]] try: # Then try Node type if pymc_obj._plot is not False: data = pymc_obj.trace()[start:] # This is deprecated. DH name = pymc_obj.__name__ f(data, name, *args, **kwargs) # depends on [control=['if'], data=[]] return # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] if isinstance(pymc_obj, dict): # Then try dictionary for i in pymc_obj: data = pymc_obj[i][start:] if args: i = '%s_%s' % (args[0], i) # depends on [control=['if'], data=[]] elif 'name' in kwargs: i = '%s_%s' % (kwargs.pop('name'), i) # depends on [control=['if'], data=['kwargs']] f(data, i, *args, **kwargs) # depends on [control=['for'], data=['i']] return # depends on [control=['if'], data=[]] # If others fail, assume that raw data is passed f(pymc_obj, *args, **kwargs) wrapper.__doc__ = f.__doc__ wrapper.__name__ = f.__name__ return wrapper
def clean_colnames(df): """ Cleans the column names on a DataFrame Parameters: df - DataFrame The DataFrame to clean """ col_list = [] for index in range(_dutils.cols(df)): col_list.append(df.columns[index].strip().lower().replace(' ','_')) df.columns = col_list
def function[clean_colnames, parameter[df]]: constant[ Cleans the column names on a DataFrame Parameters: df - DataFrame The DataFrame to clean ] variable[col_list] assign[=] list[[]] for taget[name[index]] in starred[call[name[range], parameter[call[name[_dutils].cols, parameter[name[df]]]]]] begin[:] call[name[col_list].append, parameter[call[call[call[call[name[df].columns][name[index]].strip, parameter[]].lower, parameter[]].replace, parameter[constant[ ], constant[_]]]]] name[df].columns assign[=] name[col_list]
keyword[def] identifier[clean_colnames] ( identifier[df] ): literal[string] identifier[col_list] =[] keyword[for] identifier[index] keyword[in] identifier[range] ( identifier[_dutils] . identifier[cols] ( identifier[df] )): identifier[col_list] . identifier[append] ( identifier[df] . identifier[columns] [ identifier[index] ]. identifier[strip] (). identifier[lower] (). identifier[replace] ( literal[string] , literal[string] )) identifier[df] . identifier[columns] = identifier[col_list]
def clean_colnames(df): """ Cleans the column names on a DataFrame Parameters: df - DataFrame The DataFrame to clean """ col_list = [] for index in range(_dutils.cols(df)): col_list.append(df.columns[index].strip().lower().replace(' ', '_')) # depends on [control=['for'], data=['index']] df.columns = col_list
def authenticate_external(self, auth_params): """Verify credentials using the external auth library. in auth_params of type str The auth parameters, credentials, etc. out result of type str The authentification result. """ if not isinstance(auth_params, list): raise TypeError("auth_params can only be an instance of type list") for a in auth_params[:10]: if not isinstance(a, basestring): raise TypeError( "array can only contain objects of type basestring") result = self._call("authenticateExternal", in_p=[auth_params]) return result
def function[authenticate_external, parameter[self, auth_params]]: constant[Verify credentials using the external auth library. in auth_params of type str The auth parameters, credentials, etc. out result of type str The authentification result. ] if <ast.UnaryOp object at 0x7da20e9b04c0> begin[:] <ast.Raise object at 0x7da20e9b15d0> for taget[name[a]] in starred[call[name[auth_params]][<ast.Slice object at 0x7da20e9b2b60>]] begin[:] if <ast.UnaryOp object at 0x7da20e9b3b20> begin[:] <ast.Raise object at 0x7da20e9b2ce0> variable[result] assign[=] call[name[self]._call, parameter[constant[authenticateExternal]]] return[name[result]]
keyword[def] identifier[authenticate_external] ( identifier[self] , identifier[auth_params] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[auth_params] , identifier[list] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[for] identifier[a] keyword[in] identifier[auth_params] [: literal[int] ]: keyword[if] keyword[not] identifier[isinstance] ( identifier[a] , identifier[basestring] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[result] = identifier[self] . identifier[_call] ( literal[string] , identifier[in_p] =[ identifier[auth_params] ]) keyword[return] identifier[result]
def authenticate_external(self, auth_params): """Verify credentials using the external auth library. in auth_params of type str The auth parameters, credentials, etc. out result of type str The authentification result. """ if not isinstance(auth_params, list): raise TypeError('auth_params can only be an instance of type list') # depends on [control=['if'], data=[]] for a in auth_params[:10]: if not isinstance(a, basestring): raise TypeError('array can only contain objects of type basestring') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']] result = self._call('authenticateExternal', in_p=[auth_params]) return result
def register_game(game_name, game_mode="NoFrameskip-v4"): """Create and register problems for the game. Args: game_name: str, one of the games in ATARI_GAMES, e.g. "bank_heist". game_mode: the frame skip and sticky keys config. Raises: ValueError: if game_name or game_mode are wrong. """ if game_name not in ATARI_GAMES: raise ValueError("Game %s not in ATARI_GAMES" % game_name) if game_mode not in ATARI_GAME_MODES: raise ValueError("Unknown ATARI game mode: %s." % game_mode) camel_game_name = misc_utils.snakecase_to_camelcase(game_name) + game_mode # Create and register the Problem cls = type("Gym%sRandom" % camel_game_name, (T2TGymEnv,), {"base_env_name": camel_game_name}) registry.register_problem(cls)
def function[register_game, parameter[game_name, game_mode]]: constant[Create and register problems for the game. Args: game_name: str, one of the games in ATARI_GAMES, e.g. "bank_heist". game_mode: the frame skip and sticky keys config. Raises: ValueError: if game_name or game_mode are wrong. ] if compare[name[game_name] <ast.NotIn object at 0x7da2590d7190> name[ATARI_GAMES]] begin[:] <ast.Raise object at 0x7da20e9b1e70> if compare[name[game_mode] <ast.NotIn object at 0x7da2590d7190> name[ATARI_GAME_MODES]] begin[:] <ast.Raise object at 0x7da1b1e15360> variable[camel_game_name] assign[=] binary_operation[call[name[misc_utils].snakecase_to_camelcase, parameter[name[game_name]]] + name[game_mode]] variable[cls] assign[=] call[name[type], parameter[binary_operation[constant[Gym%sRandom] <ast.Mod object at 0x7da2590d6920> name[camel_game_name]], tuple[[<ast.Name object at 0x7da1b1e179d0>]], dictionary[[<ast.Constant object at 0x7da1b1e14d00>], [<ast.Name object at 0x7da1b1e150c0>]]]] call[name[registry].register_problem, parameter[name[cls]]]
keyword[def] identifier[register_game] ( identifier[game_name] , identifier[game_mode] = literal[string] ): literal[string] keyword[if] identifier[game_name] keyword[not] keyword[in] identifier[ATARI_GAMES] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[game_name] ) keyword[if] identifier[game_mode] keyword[not] keyword[in] identifier[ATARI_GAME_MODES] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[game_mode] ) identifier[camel_game_name] = identifier[misc_utils] . identifier[snakecase_to_camelcase] ( identifier[game_name] )+ identifier[game_mode] identifier[cls] = identifier[type] ( literal[string] % identifier[camel_game_name] , ( identifier[T2TGymEnv] ,),{ literal[string] : identifier[camel_game_name] }) identifier[registry] . identifier[register_problem] ( identifier[cls] )
def register_game(game_name, game_mode='NoFrameskip-v4'): """Create and register problems for the game. Args: game_name: str, one of the games in ATARI_GAMES, e.g. "bank_heist". game_mode: the frame skip and sticky keys config. Raises: ValueError: if game_name or game_mode are wrong. """ if game_name not in ATARI_GAMES: raise ValueError('Game %s not in ATARI_GAMES' % game_name) # depends on [control=['if'], data=['game_name']] if game_mode not in ATARI_GAME_MODES: raise ValueError('Unknown ATARI game mode: %s.' % game_mode) # depends on [control=['if'], data=['game_mode']] camel_game_name = misc_utils.snakecase_to_camelcase(game_name) + game_mode # Create and register the Problem cls = type('Gym%sRandom' % camel_game_name, (T2TGymEnv,), {'base_env_name': camel_game_name}) registry.register_problem(cls)
def _set(name, value, function=None): """Internally set a config parameter. If you call it with no function, it sets the global parameter. If you call it with a function argument, it sets the value for the specified function. Normally, this should only be called with a function argument for internal code. This should not be called by code outside of the paranoid module. """ if name not in Settings.__global_setting_values.keys(): raise NameError("Invalid setting value") if name in Settings.__validate_settings.keys(): if not Settings.__validate_settings[name](value): raise ValueError("Invalid setting: %s = %s" % (name, value)) # Set the setting either globally (if no function is passed) # or else locally to the function (if a function is passed). if function: if not hasattr(function, Settings.FUNCTION_SETTINGS_NAME): setattr(function, Settings.FUNCTION_SETTINGS_NAME, {}) # Test if this wraps something. TODO this will fail # for nested decorators. This also assumes that, if # there is a wrapped function (super wraps sub), that # if super doesn't have settings, then sup doesn't # either. (This assumption is valid for paranoid # decorators since it properly uses update_wrapper, # but may not be valid for other decorators.) if hasattr(function, "__wrapped__"): setattr(function.__wrapped__, Settings.FUNCTION_SETTINGS_NAME, getattr(function, Settings.FUNCTION_SETTINGS_NAME)) getattr(function, Settings.FUNCTION_SETTINGS_NAME)[name] = value else: Settings.__global_setting_values[name] = value
def function[_set, parameter[name, value, function]]: constant[Internally set a config parameter. If you call it with no function, it sets the global parameter. If you call it with a function argument, it sets the value for the specified function. Normally, this should only be called with a function argument for internal code. This should not be called by code outside of the paranoid module. ] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> call[name[Settings].__global_setting_values.keys, parameter[]]] begin[:] <ast.Raise object at 0x7da1b049bc10> if compare[name[name] in call[name[Settings].__validate_settings.keys, parameter[]]] begin[:] if <ast.UnaryOp object at 0x7da1b0499900> begin[:] <ast.Raise object at 0x7da1b0498700> if name[function] begin[:] if <ast.UnaryOp object at 0x7da1b0498f10> begin[:] call[name[setattr], parameter[name[function], name[Settings].FUNCTION_SETTINGS_NAME, dictionary[[], []]]] if call[name[hasattr], parameter[name[function], constant[__wrapped__]]] begin[:] call[name[setattr], parameter[name[function].__wrapped__, name[Settings].FUNCTION_SETTINGS_NAME, call[name[getattr], parameter[name[function], name[Settings].FUNCTION_SETTINGS_NAME]]]] call[call[name[getattr], parameter[name[function], name[Settings].FUNCTION_SETTINGS_NAME]]][name[name]] assign[=] name[value]
keyword[def] identifier[_set] ( identifier[name] , identifier[value] , identifier[function] = keyword[None] ): literal[string] keyword[if] identifier[name] keyword[not] keyword[in] identifier[Settings] . identifier[__global_setting_values] . identifier[keys] (): keyword[raise] identifier[NameError] ( literal[string] ) keyword[if] identifier[name] keyword[in] identifier[Settings] . identifier[__validate_settings] . identifier[keys] (): keyword[if] keyword[not] identifier[Settings] . identifier[__validate_settings] [ identifier[name] ]( identifier[value] ): keyword[raise] identifier[ValueError] ( literal[string] % ( identifier[name] , identifier[value] )) keyword[if] identifier[function] : keyword[if] keyword[not] identifier[hasattr] ( identifier[function] , identifier[Settings] . identifier[FUNCTION_SETTINGS_NAME] ): identifier[setattr] ( identifier[function] , identifier[Settings] . identifier[FUNCTION_SETTINGS_NAME] ,{}) keyword[if] identifier[hasattr] ( identifier[function] , literal[string] ): identifier[setattr] ( identifier[function] . identifier[__wrapped__] , identifier[Settings] . identifier[FUNCTION_SETTINGS_NAME] , identifier[getattr] ( identifier[function] , identifier[Settings] . identifier[FUNCTION_SETTINGS_NAME] )) identifier[getattr] ( identifier[function] , identifier[Settings] . identifier[FUNCTION_SETTINGS_NAME] )[ identifier[name] ]= identifier[value] keyword[else] : identifier[Settings] . identifier[__global_setting_values] [ identifier[name] ]= identifier[value]
def _set(name, value, function=None): """Internally set a config parameter. If you call it with no function, it sets the global parameter. If you call it with a function argument, it sets the value for the specified function. Normally, this should only be called with a function argument for internal code. This should not be called by code outside of the paranoid module. """ if name not in Settings.__global_setting_values.keys(): raise NameError('Invalid setting value') # depends on [control=['if'], data=[]] if name in Settings.__validate_settings.keys(): if not Settings.__validate_settings[name](value): raise ValueError('Invalid setting: %s = %s' % (name, value)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['name']] # Set the setting either globally (if no function is passed) # or else locally to the function (if a function is passed). if function: if not hasattr(function, Settings.FUNCTION_SETTINGS_NAME): setattr(function, Settings.FUNCTION_SETTINGS_NAME, {}) # Test if this wraps something. TODO this will fail # for nested decorators. This also assumes that, if # there is a wrapped function (super wraps sub), that # if super doesn't have settings, then sup doesn't # either. (This assumption is valid for paranoid # decorators since it properly uses update_wrapper, # but may not be valid for other decorators.) if hasattr(function, '__wrapped__'): setattr(function.__wrapped__, Settings.FUNCTION_SETTINGS_NAME, getattr(function, Settings.FUNCTION_SETTINGS_NAME)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] getattr(function, Settings.FUNCTION_SETTINGS_NAME)[name] = value # depends on [control=['if'], data=[]] else: Settings.__global_setting_values[name] = value
def get_relationship_form_for_create(self, source_id=None, destination_id=None, relationship_record_types=None): """Gets the relationship form for creating new relationships. A new form should be requested for each create transaction. arg: source_id (osid.id.Id): ``Id`` of a peer arg: destination_id (osid.id.Id): ``Id`` of the related peer arg: relationship_record_types (osid.type.Type[]): array of relationship record types return: (osid.relationship.RelationshipForm) - the relationship form raise: NotFound - ``source_id`` or ``destination_id`` is not found raise: NullArgument - ``source_id`` or ``destination_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - unable to get form for requested recod types *compliance: mandatory -- This method must be implemented.* """ if source_id is None or destination_id is None: raise NullArgument() if relationship_record_types is None: pass # Still need to deal with the record_types argument relationship_form = objects.RelationshipForm(osid_object_map=None, source_id=source_id, destination_id=destination_id) self._forms[relationship_form.get_id().get_identifier()] = not CREATED return relationship_form
def function[get_relationship_form_for_create, parameter[self, source_id, destination_id, relationship_record_types]]: constant[Gets the relationship form for creating new relationships. A new form should be requested for each create transaction. arg: source_id (osid.id.Id): ``Id`` of a peer arg: destination_id (osid.id.Id): ``Id`` of the related peer arg: relationship_record_types (osid.type.Type[]): array of relationship record types return: (osid.relationship.RelationshipForm) - the relationship form raise: NotFound - ``source_id`` or ``destination_id`` is not found raise: NullArgument - ``source_id`` or ``destination_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - unable to get form for requested recod types *compliance: mandatory -- This method must be implemented.* ] if <ast.BoolOp object at 0x7da18f58dcc0> begin[:] <ast.Raise object at 0x7da20c6e74f0> if compare[name[relationship_record_types] is constant[None]] begin[:] pass variable[relationship_form] assign[=] call[name[objects].RelationshipForm, parameter[]] call[name[self]._forms][call[call[name[relationship_form].get_id, parameter[]].get_identifier, parameter[]]] assign[=] <ast.UnaryOp object at 0x7da20c6e6ce0> return[name[relationship_form]]
keyword[def] identifier[get_relationship_form_for_create] ( identifier[self] , identifier[source_id] = keyword[None] , identifier[destination_id] = keyword[None] , identifier[relationship_record_types] = keyword[None] ): literal[string] keyword[if] identifier[source_id] keyword[is] keyword[None] keyword[or] identifier[destination_id] keyword[is] keyword[None] : keyword[raise] identifier[NullArgument] () keyword[if] identifier[relationship_record_types] keyword[is] keyword[None] : keyword[pass] identifier[relationship_form] = identifier[objects] . identifier[RelationshipForm] ( identifier[osid_object_map] = keyword[None] , identifier[source_id] = identifier[source_id] , identifier[destination_id] = identifier[destination_id] ) identifier[self] . identifier[_forms] [ identifier[relationship_form] . identifier[get_id] (). identifier[get_identifier] ()]= keyword[not] identifier[CREATED] keyword[return] identifier[relationship_form]
def get_relationship_form_for_create(self, source_id=None, destination_id=None, relationship_record_types=None): """Gets the relationship form for creating new relationships. A new form should be requested for each create transaction. arg: source_id (osid.id.Id): ``Id`` of a peer arg: destination_id (osid.id.Id): ``Id`` of the related peer arg: relationship_record_types (osid.type.Type[]): array of relationship record types return: (osid.relationship.RelationshipForm) - the relationship form raise: NotFound - ``source_id`` or ``destination_id`` is not found raise: NullArgument - ``source_id`` or ``destination_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - unable to get form for requested recod types *compliance: mandatory -- This method must be implemented.* """ if source_id is None or destination_id is None: raise NullArgument() # depends on [control=['if'], data=[]] if relationship_record_types is None: pass # Still need to deal with the record_types argument # depends on [control=['if'], data=[]] relationship_form = objects.RelationshipForm(osid_object_map=None, source_id=source_id, destination_id=destination_id) self._forms[relationship_form.get_id().get_identifier()] = not CREATED return relationship_form
def walk_files(args, root, directory, action): """ Recusively go do the subdirectories of the directory, calling the action on each file """ for entry in os.listdir(directory): if is_hidden(args, entry): continue if is_excluded_directory(args, entry): continue if is_in_default_excludes(entry): continue if not is_included(args, entry): continue if is_excluded(args, entry, directory): continue entry = os.path.join(directory, entry) if os.path.isdir(entry): walk_files(args, root, entry, action) if os.path.isfile(entry): if is_binary(entry): continue action(entry)
def function[walk_files, parameter[args, root, directory, action]]: constant[ Recusively go do the subdirectories of the directory, calling the action on each file ] for taget[name[entry]] in starred[call[name[os].listdir, parameter[name[directory]]]] begin[:] if call[name[is_hidden], parameter[name[args], name[entry]]] begin[:] continue if call[name[is_excluded_directory], parameter[name[args], name[entry]]] begin[:] continue if call[name[is_in_default_excludes], parameter[name[entry]]] begin[:] continue if <ast.UnaryOp object at 0x7da20c795a20> begin[:] continue if call[name[is_excluded], parameter[name[args], name[entry], name[directory]]] begin[:] continue variable[entry] assign[=] call[name[os].path.join, parameter[name[directory], name[entry]]] if call[name[os].path.isdir, parameter[name[entry]]] begin[:] call[name[walk_files], parameter[name[args], name[root], name[entry], name[action]]] if call[name[os].path.isfile, parameter[name[entry]]] begin[:] if call[name[is_binary], parameter[name[entry]]] begin[:] continue call[name[action], parameter[name[entry]]]
keyword[def] identifier[walk_files] ( identifier[args] , identifier[root] , identifier[directory] , identifier[action] ): literal[string] keyword[for] identifier[entry] keyword[in] identifier[os] . identifier[listdir] ( identifier[directory] ): keyword[if] identifier[is_hidden] ( identifier[args] , identifier[entry] ): keyword[continue] keyword[if] identifier[is_excluded_directory] ( identifier[args] , identifier[entry] ): keyword[continue] keyword[if] identifier[is_in_default_excludes] ( identifier[entry] ): keyword[continue] keyword[if] keyword[not] identifier[is_included] ( identifier[args] , identifier[entry] ): keyword[continue] keyword[if] identifier[is_excluded] ( identifier[args] , identifier[entry] , identifier[directory] ): keyword[continue] identifier[entry] = identifier[os] . identifier[path] . identifier[join] ( identifier[directory] , identifier[entry] ) keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[entry] ): identifier[walk_files] ( identifier[args] , identifier[root] , identifier[entry] , identifier[action] ) keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[entry] ): keyword[if] identifier[is_binary] ( identifier[entry] ): keyword[continue] identifier[action] ( identifier[entry] )
def walk_files(args, root, directory, action): """ Recusively go do the subdirectories of the directory, calling the action on each file """ for entry in os.listdir(directory): if is_hidden(args, entry): continue # depends on [control=['if'], data=[]] if is_excluded_directory(args, entry): continue # depends on [control=['if'], data=[]] if is_in_default_excludes(entry): continue # depends on [control=['if'], data=[]] if not is_included(args, entry): continue # depends on [control=['if'], data=[]] if is_excluded(args, entry, directory): continue # depends on [control=['if'], data=[]] entry = os.path.join(directory, entry) if os.path.isdir(entry): walk_files(args, root, entry, action) # depends on [control=['if'], data=[]] if os.path.isfile(entry): if is_binary(entry): continue # depends on [control=['if'], data=[]] action(entry) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['entry']]
def go_to_column(self, column): """ Moves the text cursor to given column. :param column: Column to go to. :type column: int :return: Method success. :rtype: bool """ cursor = self.textCursor() cursor.setPosition(cursor.block().position() + column) self.setTextCursor(cursor) return True
def function[go_to_column, parameter[self, column]]: constant[ Moves the text cursor to given column. :param column: Column to go to. :type column: int :return: Method success. :rtype: bool ] variable[cursor] assign[=] call[name[self].textCursor, parameter[]] call[name[cursor].setPosition, parameter[binary_operation[call[call[name[cursor].block, parameter[]].position, parameter[]] + name[column]]]] call[name[self].setTextCursor, parameter[name[cursor]]] return[constant[True]]
keyword[def] identifier[go_to_column] ( identifier[self] , identifier[column] ): literal[string] identifier[cursor] = identifier[self] . identifier[textCursor] () identifier[cursor] . identifier[setPosition] ( identifier[cursor] . identifier[block] (). identifier[position] ()+ identifier[column] ) identifier[self] . identifier[setTextCursor] ( identifier[cursor] ) keyword[return] keyword[True]
def go_to_column(self, column): """ Moves the text cursor to given column. :param column: Column to go to. :type column: int :return: Method success. :rtype: bool """ cursor = self.textCursor() cursor.setPosition(cursor.block().position() + column) self.setTextCursor(cursor) return True
def constraint_df(self): """ A DataFrame representing all constraints, hidden or not """ df = pd.DataFrame() for name,c in self.constraints.items(): df[name] = c.ok for name,c in self.hidden_constraints.items(): df[name] = c.ok return df
def function[constraint_df, parameter[self]]: constant[ A DataFrame representing all constraints, hidden or not ] variable[df] assign[=] call[name[pd].DataFrame, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b28d4580>, <ast.Name object at 0x7da1b28d6f50>]]] in starred[call[name[self].constraints.items, parameter[]]] begin[:] call[name[df]][name[name]] assign[=] name[c].ok for taget[tuple[[<ast.Name object at 0x7da1b28d4a30>, <ast.Name object at 0x7da1b28d5cf0>]]] in starred[call[name[self].hidden_constraints.items, parameter[]]] begin[:] call[name[df]][name[name]] assign[=] name[c].ok return[name[df]]
keyword[def] identifier[constraint_df] ( identifier[self] ): literal[string] identifier[df] = identifier[pd] . identifier[DataFrame] () keyword[for] identifier[name] , identifier[c] keyword[in] identifier[self] . identifier[constraints] . identifier[items] (): identifier[df] [ identifier[name] ]= identifier[c] . identifier[ok] keyword[for] identifier[name] , identifier[c] keyword[in] identifier[self] . identifier[hidden_constraints] . identifier[items] (): identifier[df] [ identifier[name] ]= identifier[c] . identifier[ok] keyword[return] identifier[df]
def constraint_df(self): """ A DataFrame representing all constraints, hidden or not """ df = pd.DataFrame() for (name, c) in self.constraints.items(): df[name] = c.ok # depends on [control=['for'], data=[]] for (name, c) in self.hidden_constraints.items(): df[name] = c.ok # depends on [control=['for'], data=[]] return df
def return_markers(self, state='MicromedCode'): """Return all the markers (also called triggers or events). Returns ------- list of dict where each dict contains 'name' as str, 'start' and 'end' as float in seconds from the start of the recordings, and 'chan' as list of str with the channels involved (if not of relevance, it's None). Raises ------ FileNotFoundError when it cannot read the events for some reason (don't use other exceptions). """ markers = [] try: all_states = self._read_states() except ValueError: # cryptic error when reading states return markers try: x = all_states[state] except KeyError: return markers markers = [] i_mrk = hstack((0, where(diff(x))[0] + 1, len(x))) for i0, i1 in zip(i_mrk[:-1], i_mrk[1:]): marker = {'name': str(x[i0]), 'start': (i0) / self.s_freq, 'end': i1 / self.s_freq, } markers.append(marker) return markers
def function[return_markers, parameter[self, state]]: constant[Return all the markers (also called triggers or events). Returns ------- list of dict where each dict contains 'name' as str, 'start' and 'end' as float in seconds from the start of the recordings, and 'chan' as list of str with the channels involved (if not of relevance, it's None). Raises ------ FileNotFoundError when it cannot read the events for some reason (don't use other exceptions). ] variable[markers] assign[=] list[[]] <ast.Try object at 0x7da1b0dee590> <ast.Try object at 0x7da20c7951e0> variable[markers] assign[=] list[[]] variable[i_mrk] assign[=] call[name[hstack], parameter[tuple[[<ast.Constant object at 0x7da1b0e05de0>, <ast.BinOp object at 0x7da1b0e056c0>, <ast.Call object at 0x7da1b0e06f50>]]]] for taget[tuple[[<ast.Name object at 0x7da1b0e072e0>, <ast.Name object at 0x7da1b0e06e60>]]] in starred[call[name[zip], parameter[call[name[i_mrk]][<ast.Slice object at 0x7da1b0e07a60>], call[name[i_mrk]][<ast.Slice object at 0x7da1b0e05a20>]]]] begin[:] variable[marker] assign[=] dictionary[[<ast.Constant object at 0x7da1b0e070d0>, <ast.Constant object at 0x7da1b0e044c0>, <ast.Constant object at 0x7da1b0e07c40>], [<ast.Call object at 0x7da1b0e04cd0>, <ast.BinOp object at 0x7da1b0e06ef0>, <ast.BinOp object at 0x7da1b0e07760>]] call[name[markers].append, parameter[name[marker]]] return[name[markers]]
keyword[def] identifier[return_markers] ( identifier[self] , identifier[state] = literal[string] ): literal[string] identifier[markers] =[] keyword[try] : identifier[all_states] = identifier[self] . identifier[_read_states] () keyword[except] identifier[ValueError] : keyword[return] identifier[markers] keyword[try] : identifier[x] = identifier[all_states] [ identifier[state] ] keyword[except] identifier[KeyError] : keyword[return] identifier[markers] identifier[markers] =[] identifier[i_mrk] = identifier[hstack] (( literal[int] , identifier[where] ( identifier[diff] ( identifier[x] ))[ literal[int] ]+ literal[int] , identifier[len] ( identifier[x] ))) keyword[for] identifier[i0] , identifier[i1] keyword[in] identifier[zip] ( identifier[i_mrk] [:- literal[int] ], identifier[i_mrk] [ literal[int] :]): identifier[marker] ={ literal[string] : identifier[str] ( identifier[x] [ identifier[i0] ]), literal[string] :( identifier[i0] )/ identifier[self] . identifier[s_freq] , literal[string] : identifier[i1] / identifier[self] . identifier[s_freq] , } identifier[markers] . identifier[append] ( identifier[marker] ) keyword[return] identifier[markers]
def return_markers(self, state='MicromedCode'): """Return all the markers (also called triggers or events). Returns ------- list of dict where each dict contains 'name' as str, 'start' and 'end' as float in seconds from the start of the recordings, and 'chan' as list of str with the channels involved (if not of relevance, it's None). Raises ------ FileNotFoundError when it cannot read the events for some reason (don't use other exceptions). """ markers = [] try: all_states = self._read_states() # depends on [control=['try'], data=[]] except ValueError: # cryptic error when reading states return markers # depends on [control=['except'], data=[]] try: x = all_states[state] # depends on [control=['try'], data=[]] except KeyError: return markers # depends on [control=['except'], data=[]] markers = [] i_mrk = hstack((0, where(diff(x))[0] + 1, len(x))) for (i0, i1) in zip(i_mrk[:-1], i_mrk[1:]): marker = {'name': str(x[i0]), 'start': i0 / self.s_freq, 'end': i1 / self.s_freq} markers.append(marker) # depends on [control=['for'], data=[]] return markers
def isValidUnit(self, w): """Checks if a string represents a valid quantities unit. Args: w (str): A string to be tested against the set of valid quantities units. Returns: True if the string can be used as a unit in the quantities module. """ bad = set(['point', 'a']) if w in bad: return False try: pq.Quantity(0.0, w) return True except: return w == '/'
def function[isValidUnit, parameter[self, w]]: constant[Checks if a string represents a valid quantities unit. Args: w (str): A string to be tested against the set of valid quantities units. Returns: True if the string can be used as a unit in the quantities module. ] variable[bad] assign[=] call[name[set], parameter[list[[<ast.Constant object at 0x7da1b2455ba0>, <ast.Constant object at 0x7da1b2455c60>]]]] if compare[name[w] in name[bad]] begin[:] return[constant[False]] <ast.Try object at 0x7da1b2456170>
keyword[def] identifier[isValidUnit] ( identifier[self] , identifier[w] ): literal[string] identifier[bad] = identifier[set] ([ literal[string] , literal[string] ]) keyword[if] identifier[w] keyword[in] identifier[bad] : keyword[return] keyword[False] keyword[try] : identifier[pq] . identifier[Quantity] ( literal[int] , identifier[w] ) keyword[return] keyword[True] keyword[except] : keyword[return] identifier[w] == literal[string]
def isValidUnit(self, w): """Checks if a string represents a valid quantities unit. Args: w (str): A string to be tested against the set of valid quantities units. Returns: True if the string can be used as a unit in the quantities module. """ bad = set(['point', 'a']) if w in bad: return False # depends on [control=['if'], data=[]] try: pq.Quantity(0.0, w) return True # depends on [control=['try'], data=[]] except: return w == '/' # depends on [control=['except'], data=[]]
def update_gradients_diag(self, dL_dKdiag, X): """derivative of the diagonal of the covariance matrix with respect to the parameters.""" self.variance.gradient = np.sum(dL_dKdiag) self.period.gradient = 0 self.lengthscale.gradient = 0
def function[update_gradients_diag, parameter[self, dL_dKdiag, X]]: constant[derivative of the diagonal of the covariance matrix with respect to the parameters.] name[self].variance.gradient assign[=] call[name[np].sum, parameter[name[dL_dKdiag]]] name[self].period.gradient assign[=] constant[0] name[self].lengthscale.gradient assign[=] constant[0]
keyword[def] identifier[update_gradients_diag] ( identifier[self] , identifier[dL_dKdiag] , identifier[X] ): literal[string] identifier[self] . identifier[variance] . identifier[gradient] = identifier[np] . identifier[sum] ( identifier[dL_dKdiag] ) identifier[self] . identifier[period] . identifier[gradient] = literal[int] identifier[self] . identifier[lengthscale] . identifier[gradient] = literal[int]
def update_gradients_diag(self, dL_dKdiag, X): """derivative of the diagonal of the covariance matrix with respect to the parameters.""" self.variance.gradient = np.sum(dL_dKdiag) self.period.gradient = 0 self.lengthscale.gradient = 0
def distinct(self, field=None): """ If field is None, then it means that it'll create: select distinct * and if field is not None, for example: 'name', it'll create: select distinc(name), """ if field is None: self.funcs.append(('distinct', (), {})) else: self.distinct_field = field return self
def function[distinct, parameter[self, field]]: constant[ If field is None, then it means that it'll create: select distinct * and if field is not None, for example: 'name', it'll create: select distinc(name), ] if compare[name[field] is constant[None]] begin[:] call[name[self].funcs.append, parameter[tuple[[<ast.Constant object at 0x7da20c6ab8b0>, <ast.Tuple object at 0x7da20c6aa260>, <ast.Dict object at 0x7da20c6aaec0>]]]] return[name[self]]
keyword[def] identifier[distinct] ( identifier[self] , identifier[field] = keyword[None] ): literal[string] keyword[if] identifier[field] keyword[is] keyword[None] : identifier[self] . identifier[funcs] . identifier[append] (( literal[string] ,(),{})) keyword[else] : identifier[self] . identifier[distinct_field] = identifier[field] keyword[return] identifier[self]
def distinct(self, field=None): """ If field is None, then it means that it'll create: select distinct * and if field is not None, for example: 'name', it'll create: select distinc(name), """ if field is None: self.funcs.append(('distinct', (), {})) # depends on [control=['if'], data=[]] else: self.distinct_field = field return self
def _parse_transpile_args(circuits, backend, basis_gates, coupling_map, backend_properties, initial_layout, seed_transpiler, optimization_level, pass_manager): """Resolve the various types of args allowed to the transpile() function through duck typing, overriding args, etc. Refer to the transpile() docstring for details on what types of inputs are allowed. Here the args are resolved by converting them to standard instances, and prioritizing them in case a transpile option is passed through multiple args (explicitly setting an arg has more priority than the arg set by backend) Returns: list[TranspileConfig]: a transpile config for each circuit, which is a standardized object that configures the transpiler and determines the pass manager to use. """ # Each arg could be single or a list. If list, it must be the same size as # number of circuits. If single, duplicate to create a list of that size. num_circuits = len(circuits) basis_gates = _parse_basis_gates(basis_gates, backend, circuits) coupling_map = _parse_coupling_map(coupling_map, backend, num_circuits) backend_properties = _parse_backend_properties(backend_properties, backend, num_circuits) initial_layout = _parse_initial_layout(initial_layout, circuits) seed_transpiler = _parse_seed_transpiler(seed_transpiler, num_circuits) optimization_level = _parse_optimization_level(optimization_level, num_circuits) pass_manager = _parse_pass_manager(pass_manager, num_circuits) transpile_configs = [] for args in zip(basis_gates, coupling_map, backend_properties, initial_layout, seed_transpiler, optimization_level, pass_manager): transpile_config = TranspileConfig(basis_gates=args[0], coupling_map=args[1], backend_properties=args[2], initial_layout=args[3], seed_transpiler=args[4], optimization_level=args[5], pass_manager=args[6]) transpile_configs.append(transpile_config) return transpile_configs
def function[_parse_transpile_args, parameter[circuits, backend, basis_gates, coupling_map, backend_properties, initial_layout, seed_transpiler, optimization_level, pass_manager]]: constant[Resolve the various types of args allowed to the transpile() function through duck typing, overriding args, etc. Refer to the transpile() docstring for details on what types of inputs are allowed. Here the args are resolved by converting them to standard instances, and prioritizing them in case a transpile option is passed through multiple args (explicitly setting an arg has more priority than the arg set by backend) Returns: list[TranspileConfig]: a transpile config for each circuit, which is a standardized object that configures the transpiler and determines the pass manager to use. ] variable[num_circuits] assign[=] call[name[len], parameter[name[circuits]]] variable[basis_gates] assign[=] call[name[_parse_basis_gates], parameter[name[basis_gates], name[backend], name[circuits]]] variable[coupling_map] assign[=] call[name[_parse_coupling_map], parameter[name[coupling_map], name[backend], name[num_circuits]]] variable[backend_properties] assign[=] call[name[_parse_backend_properties], parameter[name[backend_properties], name[backend], name[num_circuits]]] variable[initial_layout] assign[=] call[name[_parse_initial_layout], parameter[name[initial_layout], name[circuits]]] variable[seed_transpiler] assign[=] call[name[_parse_seed_transpiler], parameter[name[seed_transpiler], name[num_circuits]]] variable[optimization_level] assign[=] call[name[_parse_optimization_level], parameter[name[optimization_level], name[num_circuits]]] variable[pass_manager] assign[=] call[name[_parse_pass_manager], parameter[name[pass_manager], name[num_circuits]]] variable[transpile_configs] assign[=] list[[]] for taget[name[args]] in starred[call[name[zip], parameter[name[basis_gates], name[coupling_map], name[backend_properties], name[initial_layout], name[seed_transpiler], name[optimization_level], name[pass_manager]]]] begin[:] variable[transpile_config] assign[=] call[name[TranspileConfig], parameter[]] call[name[transpile_configs].append, parameter[name[transpile_config]]] return[name[transpile_configs]]
keyword[def] identifier[_parse_transpile_args] ( identifier[circuits] , identifier[backend] , identifier[basis_gates] , identifier[coupling_map] , identifier[backend_properties] , identifier[initial_layout] , identifier[seed_transpiler] , identifier[optimization_level] , identifier[pass_manager] ): literal[string] identifier[num_circuits] = identifier[len] ( identifier[circuits] ) identifier[basis_gates] = identifier[_parse_basis_gates] ( identifier[basis_gates] , identifier[backend] , identifier[circuits] ) identifier[coupling_map] = identifier[_parse_coupling_map] ( identifier[coupling_map] , identifier[backend] , identifier[num_circuits] ) identifier[backend_properties] = identifier[_parse_backend_properties] ( identifier[backend_properties] , identifier[backend] , identifier[num_circuits] ) identifier[initial_layout] = identifier[_parse_initial_layout] ( identifier[initial_layout] , identifier[circuits] ) identifier[seed_transpiler] = identifier[_parse_seed_transpiler] ( identifier[seed_transpiler] , identifier[num_circuits] ) identifier[optimization_level] = identifier[_parse_optimization_level] ( identifier[optimization_level] , identifier[num_circuits] ) identifier[pass_manager] = identifier[_parse_pass_manager] ( identifier[pass_manager] , identifier[num_circuits] ) identifier[transpile_configs] =[] keyword[for] identifier[args] keyword[in] identifier[zip] ( identifier[basis_gates] , identifier[coupling_map] , identifier[backend_properties] , identifier[initial_layout] , identifier[seed_transpiler] , identifier[optimization_level] , identifier[pass_manager] ): identifier[transpile_config] = identifier[TranspileConfig] ( identifier[basis_gates] = identifier[args] [ literal[int] ], identifier[coupling_map] = identifier[args] [ literal[int] ], identifier[backend_properties] = identifier[args] [ literal[int] ], identifier[initial_layout] = identifier[args] [ literal[int] ], identifier[seed_transpiler] = identifier[args] [ literal[int] ], identifier[optimization_level] = identifier[args] [ literal[int] ], identifier[pass_manager] = identifier[args] [ literal[int] ]) identifier[transpile_configs] . identifier[append] ( identifier[transpile_config] ) keyword[return] identifier[transpile_configs]
def _parse_transpile_args(circuits, backend, basis_gates, coupling_map, backend_properties, initial_layout, seed_transpiler, optimization_level, pass_manager): """Resolve the various types of args allowed to the transpile() function through duck typing, overriding args, etc. Refer to the transpile() docstring for details on what types of inputs are allowed. Here the args are resolved by converting them to standard instances, and prioritizing them in case a transpile option is passed through multiple args (explicitly setting an arg has more priority than the arg set by backend) Returns: list[TranspileConfig]: a transpile config for each circuit, which is a standardized object that configures the transpiler and determines the pass manager to use. """ # Each arg could be single or a list. If list, it must be the same size as # number of circuits. If single, duplicate to create a list of that size. num_circuits = len(circuits) basis_gates = _parse_basis_gates(basis_gates, backend, circuits) coupling_map = _parse_coupling_map(coupling_map, backend, num_circuits) backend_properties = _parse_backend_properties(backend_properties, backend, num_circuits) initial_layout = _parse_initial_layout(initial_layout, circuits) seed_transpiler = _parse_seed_transpiler(seed_transpiler, num_circuits) optimization_level = _parse_optimization_level(optimization_level, num_circuits) pass_manager = _parse_pass_manager(pass_manager, num_circuits) transpile_configs = [] for args in zip(basis_gates, coupling_map, backend_properties, initial_layout, seed_transpiler, optimization_level, pass_manager): transpile_config = TranspileConfig(basis_gates=args[0], coupling_map=args[1], backend_properties=args[2], initial_layout=args[3], seed_transpiler=args[4], optimization_level=args[5], pass_manager=args[6]) transpile_configs.append(transpile_config) # depends on [control=['for'], data=['args']] return transpile_configs
def list_licenses(service_instance=None): ''' Lists all licenses on a vCenter. service_instance Service instance (vim.ServiceInstance) of the vCenter/ESXi host. Default is None. .. code-block:: bash salt '*' vsphere.list_licenses ''' log.trace('Retrieving all licenses') licenses = salt.utils.vmware.get_licenses(service_instance) ret_dict = [{'key': l.licenseKey, 'name': l.name, 'description': l.labels[0].value if l.labels else None, # VMware handles unlimited capacity as 0 'capacity': l.total if l.total > 0 else sys.maxsize, 'used': l.used if l.used else 0} for l in licenses] return ret_dict
def function[list_licenses, parameter[service_instance]]: constant[ Lists all licenses on a vCenter. service_instance Service instance (vim.ServiceInstance) of the vCenter/ESXi host. Default is None. .. code-block:: bash salt '*' vsphere.list_licenses ] call[name[log].trace, parameter[constant[Retrieving all licenses]]] variable[licenses] assign[=] call[name[salt].utils.vmware.get_licenses, parameter[name[service_instance]]] variable[ret_dict] assign[=] <ast.ListComp object at 0x7da207f02d70> return[name[ret_dict]]
keyword[def] identifier[list_licenses] ( identifier[service_instance] = keyword[None] ): literal[string] identifier[log] . identifier[trace] ( literal[string] ) identifier[licenses] = identifier[salt] . identifier[utils] . identifier[vmware] . identifier[get_licenses] ( identifier[service_instance] ) identifier[ret_dict] =[{ literal[string] : identifier[l] . identifier[licenseKey] , literal[string] : identifier[l] . identifier[name] , literal[string] : identifier[l] . identifier[labels] [ literal[int] ]. identifier[value] keyword[if] identifier[l] . identifier[labels] keyword[else] keyword[None] , literal[string] : identifier[l] . identifier[total] keyword[if] identifier[l] . identifier[total] > literal[int] keyword[else] identifier[sys] . identifier[maxsize] , literal[string] : identifier[l] . identifier[used] keyword[if] identifier[l] . identifier[used] keyword[else] literal[int] } keyword[for] identifier[l] keyword[in] identifier[licenses] ] keyword[return] identifier[ret_dict]
def list_licenses(service_instance=None): """ Lists all licenses on a vCenter. service_instance Service instance (vim.ServiceInstance) of the vCenter/ESXi host. Default is None. .. code-block:: bash salt '*' vsphere.list_licenses """ log.trace('Retrieving all licenses') licenses = salt.utils.vmware.get_licenses(service_instance) # VMware handles unlimited capacity as 0 ret_dict = [{'key': l.licenseKey, 'name': l.name, 'description': l.labels[0].value if l.labels else None, 'capacity': l.total if l.total > 0 else sys.maxsize, 'used': l.used if l.used else 0} for l in licenses] return ret_dict
def construct_arguments(self, args, kwargs, options, bound=False): """ Construct args list and kwargs dictionary for this signature. They are created such that: - the original explicit call arguments (args, kwargs) are preserved - missing arguments are filled in by name using options (if possible) - default arguments are overridden by options - TypeError is thrown if: * kwargs contains one or more unexpected keyword arguments * conflicting values for a parameter in both args and kwargs * there is an unfilled parameter at the end of this process """ expected_args = self._get_expected_args(bound) self._assert_no_unexpected_args(expected_args, args) self._assert_no_unexpected_kwargs(expected_args, kwargs) self._assert_no_duplicate_args(expected_args, args, kwargs) args, kwargs = self._fill_in_options(args, kwargs, options, bound) self._assert_no_missing_args(args, kwargs, bound) return args, kwargs
def function[construct_arguments, parameter[self, args, kwargs, options, bound]]: constant[ Construct args list and kwargs dictionary for this signature. They are created such that: - the original explicit call arguments (args, kwargs) are preserved - missing arguments are filled in by name using options (if possible) - default arguments are overridden by options - TypeError is thrown if: * kwargs contains one or more unexpected keyword arguments * conflicting values for a parameter in both args and kwargs * there is an unfilled parameter at the end of this process ] variable[expected_args] assign[=] call[name[self]._get_expected_args, parameter[name[bound]]] call[name[self]._assert_no_unexpected_args, parameter[name[expected_args], name[args]]] call[name[self]._assert_no_unexpected_kwargs, parameter[name[expected_args], name[kwargs]]] call[name[self]._assert_no_duplicate_args, parameter[name[expected_args], name[args], name[kwargs]]] <ast.Tuple object at 0x7da1b17d4f70> assign[=] call[name[self]._fill_in_options, parameter[name[args], name[kwargs], name[options], name[bound]]] call[name[self]._assert_no_missing_args, parameter[name[args], name[kwargs], name[bound]]] return[tuple[[<ast.Name object at 0x7da1b17d6530>, <ast.Name object at 0x7da1b17d64a0>]]]
keyword[def] identifier[construct_arguments] ( identifier[self] , identifier[args] , identifier[kwargs] , identifier[options] , identifier[bound] = keyword[False] ): literal[string] identifier[expected_args] = identifier[self] . identifier[_get_expected_args] ( identifier[bound] ) identifier[self] . identifier[_assert_no_unexpected_args] ( identifier[expected_args] , identifier[args] ) identifier[self] . identifier[_assert_no_unexpected_kwargs] ( identifier[expected_args] , identifier[kwargs] ) identifier[self] . identifier[_assert_no_duplicate_args] ( identifier[expected_args] , identifier[args] , identifier[kwargs] ) identifier[args] , identifier[kwargs] = identifier[self] . identifier[_fill_in_options] ( identifier[args] , identifier[kwargs] , identifier[options] , identifier[bound] ) identifier[self] . identifier[_assert_no_missing_args] ( identifier[args] , identifier[kwargs] , identifier[bound] ) keyword[return] identifier[args] , identifier[kwargs]
def construct_arguments(self, args, kwargs, options, bound=False): """ Construct args list and kwargs dictionary for this signature. They are created such that: - the original explicit call arguments (args, kwargs) are preserved - missing arguments are filled in by name using options (if possible) - default arguments are overridden by options - TypeError is thrown if: * kwargs contains one or more unexpected keyword arguments * conflicting values for a parameter in both args and kwargs * there is an unfilled parameter at the end of this process """ expected_args = self._get_expected_args(bound) self._assert_no_unexpected_args(expected_args, args) self._assert_no_unexpected_kwargs(expected_args, kwargs) self._assert_no_duplicate_args(expected_args, args, kwargs) (args, kwargs) = self._fill_in_options(args, kwargs, options, bound) self._assert_no_missing_args(args, kwargs, bound) return (args, kwargs)
def cleanlines(self): """Cleaned code lines. Implemented cleanups: * eventually remove method version * remove docstrings * remove comments * remove empty lines * remove line brackes within brackets * replace `modelutils` with nothing * remove complete lines containing `fastaccess` * replace shortcuts with complete references """ code = inspect.getsource(self.func) code = '\n'.join(code.split('"""')[::2]) code = code.replace('modelutils.', '') for (name, shortcut) in zip(self.collectornames, self.collectorshortcuts): code = code.replace('%s.' % shortcut, 'self.%s.' % name) code = self.remove_linebreaks_within_equations(code) lines = code.splitlines() self.remove_imath_operators(lines) lines[0] = 'def %s(self):' % self.funcname lines = [l.split('#')[0] for l in lines] lines = [l for l in lines if 'fastaccess' not in l] lines = [l.rstrip() for l in lines if l.rstrip()] return Lines(*lines)
def function[cleanlines, parameter[self]]: constant[Cleaned code lines. Implemented cleanups: * eventually remove method version * remove docstrings * remove comments * remove empty lines * remove line brackes within brackets * replace `modelutils` with nothing * remove complete lines containing `fastaccess` * replace shortcuts with complete references ] variable[code] assign[=] call[name[inspect].getsource, parameter[name[self].func]] variable[code] assign[=] call[constant[ ].join, parameter[call[call[name[code].split, parameter[constant["""]]]][<ast.Slice object at 0x7da18f7209d0>]]] variable[code] assign[=] call[name[code].replace, parameter[constant[modelutils.], constant[]]] for taget[tuple[[<ast.Name object at 0x7da18f7224d0>, <ast.Name object at 0x7da18f720850>]]] in starred[call[name[zip], parameter[name[self].collectornames, name[self].collectorshortcuts]]] begin[:] variable[code] assign[=] call[name[code].replace, parameter[binary_operation[constant[%s.] <ast.Mod object at 0x7da2590d6920> name[shortcut]], binary_operation[constant[self.%s.] <ast.Mod object at 0x7da2590d6920> name[name]]]] variable[code] assign[=] call[name[self].remove_linebreaks_within_equations, parameter[name[code]]] variable[lines] assign[=] call[name[code].splitlines, parameter[]] call[name[self].remove_imath_operators, parameter[name[lines]]] call[name[lines]][constant[0]] assign[=] binary_operation[constant[def %s(self):] <ast.Mod object at 0x7da2590d6920> name[self].funcname] variable[lines] assign[=] <ast.ListComp object at 0x7da18f720ee0> variable[lines] assign[=] <ast.ListComp object at 0x7da18f722f20> variable[lines] assign[=] <ast.ListComp object at 0x7da18f721510> return[call[name[Lines], parameter[<ast.Starred object at 0x7da18f7229e0>]]]
keyword[def] identifier[cleanlines] ( identifier[self] ): literal[string] identifier[code] = identifier[inspect] . identifier[getsource] ( identifier[self] . identifier[func] ) identifier[code] = literal[string] . identifier[join] ( identifier[code] . identifier[split] ( literal[string] )[:: literal[int] ]) identifier[code] = identifier[code] . identifier[replace] ( literal[string] , literal[string] ) keyword[for] ( identifier[name] , identifier[shortcut] ) keyword[in] identifier[zip] ( identifier[self] . identifier[collectornames] , identifier[self] . identifier[collectorshortcuts] ): identifier[code] = identifier[code] . identifier[replace] ( literal[string] % identifier[shortcut] , literal[string] % identifier[name] ) identifier[code] = identifier[self] . identifier[remove_linebreaks_within_equations] ( identifier[code] ) identifier[lines] = identifier[code] . identifier[splitlines] () identifier[self] . identifier[remove_imath_operators] ( identifier[lines] ) identifier[lines] [ literal[int] ]= literal[string] % identifier[self] . identifier[funcname] identifier[lines] =[ identifier[l] . identifier[split] ( literal[string] )[ literal[int] ] keyword[for] identifier[l] keyword[in] identifier[lines] ] identifier[lines] =[ identifier[l] keyword[for] identifier[l] keyword[in] identifier[lines] keyword[if] literal[string] keyword[not] keyword[in] identifier[l] ] identifier[lines] =[ identifier[l] . identifier[rstrip] () keyword[for] identifier[l] keyword[in] identifier[lines] keyword[if] identifier[l] . identifier[rstrip] ()] keyword[return] identifier[Lines] (* identifier[lines] )
def cleanlines(self): """Cleaned code lines. Implemented cleanups: * eventually remove method version * remove docstrings * remove comments * remove empty lines * remove line brackes within brackets * replace `modelutils` with nothing * remove complete lines containing `fastaccess` * replace shortcuts with complete references """ code = inspect.getsource(self.func) code = '\n'.join(code.split('"""')[::2]) code = code.replace('modelutils.', '') for (name, shortcut) in zip(self.collectornames, self.collectorshortcuts): code = code.replace('%s.' % shortcut, 'self.%s.' % name) # depends on [control=['for'], data=[]] code = self.remove_linebreaks_within_equations(code) lines = code.splitlines() self.remove_imath_operators(lines) lines[0] = 'def %s(self):' % self.funcname lines = [l.split('#')[0] for l in lines] lines = [l for l in lines if 'fastaccess' not in l] lines = [l.rstrip() for l in lines if l.rstrip()] return Lines(*lines)
def get_alias(self, alias): """ Given a mnemonic, get the alias name(s) it falls under. If there aren't any, you get an empty list. """ alias = alias or {} return [k for k, v in alias.items() if self.mnemonic in v]
def function[get_alias, parameter[self, alias]]: constant[ Given a mnemonic, get the alias name(s) it falls under. If there aren't any, you get an empty list. ] variable[alias] assign[=] <ast.BoolOp object at 0x7da1b22bfaf0> return[<ast.ListComp object at 0x7da1b22bf8e0>]
keyword[def] identifier[get_alias] ( identifier[self] , identifier[alias] ): literal[string] identifier[alias] = identifier[alias] keyword[or] {} keyword[return] [ identifier[k] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[alias] . identifier[items] () keyword[if] identifier[self] . identifier[mnemonic] keyword[in] identifier[v] ]
def get_alias(self, alias): """ Given a mnemonic, get the alias name(s) it falls under. If there aren't any, you get an empty list. """ alias = alias or {} return [k for (k, v) in alias.items() if self.mnemonic in v]
def parallel_periodicfeatures(pfpkl_list, lcbasedir, outdir, starfeaturesdir=None, fourierorder=5, # these are depth, duration, ingress duration transitparams=(-0.01,0.1,0.1), # these are depth, duration, depth ratio, secphase ebparams=(-0.2,0.3,0.7,0.5), pdiff_threshold=1.0e-4, sidereal_threshold=1.0e-4, sampling_peak_multiplier=5.0, sampling_startp=None, sampling_endp=None, timecols=None, magcols=None, errcols=None, lcformat='hat-sql', lcformatdir=None, sigclip=10.0, verbose=False, maxobjects=None, nworkers=NCPUS): '''This runs periodic feature generation in parallel for all periodfinding pickles in the input list. Parameters ---------- pfpkl_list : list of str The list of period-finding pickles to use. lcbasedir : str The base directory where the associated light curves are located. outdir : str The directory where the results will be written. starfeaturesdir : str or None The directory containing the `starfeatures-<objectid>.pkl` files for each object to use calculate neighbor proximity light curve features. fourierorder : int The Fourier order to use to generate sinusoidal function and fit that to the phased light curve. transitparams : list of floats The transit depth, duration, and ingress duration to use to generate a trapezoid planet transit model fit to the phased light curve. The period used is the one provided in `period`, while the epoch is automatically obtained from a spline fit to the phased light curve. ebparams : list of floats The primary eclipse depth, eclipse duration, the primary-secondary depth ratio, and the phase of the secondary eclipse to use to generate an eclipsing binary model fit to the phased light curve. The period used is the one provided in `period`, while the epoch is automatically obtained from a spline fit to the phased light curve. pdiff_threshold : float This is the max difference between periods to consider them the same. sidereal_threshold : float This is the max difference between any of the 'best' periods and the sidereal day periods to consider them the same. sampling_peak_multiplier : float This is the minimum multiplicative factor of a 'best' period's normalized periodogram peak over the sampling periodogram peak at the same period required to accept the 'best' period as possibly real. sampling_startp, sampling_endp : float If the `pgramlist` doesn't have a time-sampling Lomb-Scargle periodogram, it will be obtained automatically. Use these kwargs to control the minimum and maximum period interval to be searched when generating this periodogram. timecols : list of str or None The timecol keys to use from the lcdict in calculating the features. magcols : list of str or None The magcol keys to use from the lcdict in calculating the features. errcols : list of str or None The errcol keys to use from the lcdict in calculating the features. lcformat : str This is the `formatkey` associated with your light curve format, which you previously passed in to the `lcproc.register_lcformat` function. This will be used to look up how to find and read the light curves specified in `basedir` or `use_list_of_filenames`. lcformatdir : str or None If this is provided, gives the path to a directory when you've stored your lcformat description JSONs, other than the usual directories lcproc knows to search for them in. Use this along with `lcformat` to specify an LC format JSON file that's not currently registered with lcproc. sigclip : float or int or sequence of two floats/ints or None If a single float or int, a symmetric sigma-clip will be performed using the number provided as the sigma-multiplier to cut out from the input time-series. If a list of two ints/floats is provided, the function will perform an 'asymmetric' sigma-clip. The first element in this list is the sigma value to use for fainter flux/mag values; the second element in this list is the sigma value to use for brighter flux/mag values. For example, `sigclip=[10., 3.]`, will sigclip out greater than 10-sigma dimmings and greater than 3-sigma brightenings. Here the meaning of "dimming" and "brightening" is set by *physics* (not the magnitude system), which is why the `magsarefluxes` kwarg must be correctly set. If `sigclip` is None, no sigma-clipping will be performed, and the time-series (with non-finite elems removed) will be passed through to the output. verbose : bool If True, will indicate progress while working. maxobjects : int The total number of objects to process from `pfpkl_list`. nworkers : int The number of parallel workers to launch to process the input. Returns ------- dict A dict containing key: val pairs of the input period-finder result and the output periodic feature result pickles for each input pickle is returned. ''' # make sure to make the output directory if it doesn't exist if not os.path.exists(outdir): os.makedirs(outdir) if maxobjects: pfpkl_list = pfpkl_list[:maxobjects] LOGINFO('%s periodfinding pickles to process' % len(pfpkl_list)) # if the starfeaturedir is provided, try to find a starfeatures pickle for # each periodfinding pickle in pfpkl_list if starfeaturesdir and os.path.exists(starfeaturesdir): starfeatures_list = [] LOGINFO('collecting starfeatures pickles...') for pfpkl in pfpkl_list: sfpkl1 = os.path.basename(pfpkl).replace('periodfinding', 'starfeatures') sfpkl2 = sfpkl1.replace('.gz','') sfpath1 = os.path.join(starfeaturesdir, sfpkl1) sfpath2 = os.path.join(starfeaturesdir, sfpkl2) if os.path.exists(sfpath1): starfeatures_list.append(sfpkl1) elif os.path.exists(sfpath2): starfeatures_list.append(sfpkl2) else: starfeatures_list.append(None) else: starfeatures_list = [None for x in pfpkl_list] # generate the task list kwargs = {'fourierorder':fourierorder, 'transitparams':transitparams, 'ebparams':ebparams, 'pdiff_threshold':pdiff_threshold, 'sidereal_threshold':sidereal_threshold, 'sampling_peak_multiplier':sampling_peak_multiplier, 'sampling_startp':sampling_startp, 'sampling_endp':sampling_endp, 'timecols':timecols, 'magcols':magcols, 'errcols':errcols, 'lcformat':lcformat, 'lcformatdir':lcformat, 'sigclip':sigclip, 'verbose':verbose} tasks = [(x, lcbasedir, outdir, y, kwargs) for (x,y) in zip(pfpkl_list, starfeatures_list)] LOGINFO('processing periodfinding pickles...') with ProcessPoolExecutor(max_workers=nworkers) as executor: resultfutures = executor.map(_periodicfeatures_worker, tasks) results = [x for x in resultfutures] resdict = {os.path.basename(x):y for (x,y) in zip(pfpkl_list, results)} return resdict
def function[parallel_periodicfeatures, parameter[pfpkl_list, lcbasedir, outdir, starfeaturesdir, fourierorder, transitparams, ebparams, pdiff_threshold, sidereal_threshold, sampling_peak_multiplier, sampling_startp, sampling_endp, timecols, magcols, errcols, lcformat, lcformatdir, sigclip, verbose, maxobjects, nworkers]]: constant[This runs periodic feature generation in parallel for all periodfinding pickles in the input list. Parameters ---------- pfpkl_list : list of str The list of period-finding pickles to use. lcbasedir : str The base directory where the associated light curves are located. outdir : str The directory where the results will be written. starfeaturesdir : str or None The directory containing the `starfeatures-<objectid>.pkl` files for each object to use calculate neighbor proximity light curve features. fourierorder : int The Fourier order to use to generate sinusoidal function and fit that to the phased light curve. transitparams : list of floats The transit depth, duration, and ingress duration to use to generate a trapezoid planet transit model fit to the phased light curve. The period used is the one provided in `period`, while the epoch is automatically obtained from a spline fit to the phased light curve. ebparams : list of floats The primary eclipse depth, eclipse duration, the primary-secondary depth ratio, and the phase of the secondary eclipse to use to generate an eclipsing binary model fit to the phased light curve. The period used is the one provided in `period`, while the epoch is automatically obtained from a spline fit to the phased light curve. pdiff_threshold : float This is the max difference between periods to consider them the same. sidereal_threshold : float This is the max difference between any of the 'best' periods and the sidereal day periods to consider them the same. sampling_peak_multiplier : float This is the minimum multiplicative factor of a 'best' period's normalized periodogram peak over the sampling periodogram peak at the same period required to accept the 'best' period as possibly real. sampling_startp, sampling_endp : float If the `pgramlist` doesn't have a time-sampling Lomb-Scargle periodogram, it will be obtained automatically. Use these kwargs to control the minimum and maximum period interval to be searched when generating this periodogram. timecols : list of str or None The timecol keys to use from the lcdict in calculating the features. magcols : list of str or None The magcol keys to use from the lcdict in calculating the features. errcols : list of str or None The errcol keys to use from the lcdict in calculating the features. lcformat : str This is the `formatkey` associated with your light curve format, which you previously passed in to the `lcproc.register_lcformat` function. This will be used to look up how to find and read the light curves specified in `basedir` or `use_list_of_filenames`. lcformatdir : str or None If this is provided, gives the path to a directory when you've stored your lcformat description JSONs, other than the usual directories lcproc knows to search for them in. Use this along with `lcformat` to specify an LC format JSON file that's not currently registered with lcproc. sigclip : float or int or sequence of two floats/ints or None If a single float or int, a symmetric sigma-clip will be performed using the number provided as the sigma-multiplier to cut out from the input time-series. If a list of two ints/floats is provided, the function will perform an 'asymmetric' sigma-clip. The first element in this list is the sigma value to use for fainter flux/mag values; the second element in this list is the sigma value to use for brighter flux/mag values. For example, `sigclip=[10., 3.]`, will sigclip out greater than 10-sigma dimmings and greater than 3-sigma brightenings. Here the meaning of "dimming" and "brightening" is set by *physics* (not the magnitude system), which is why the `magsarefluxes` kwarg must be correctly set. If `sigclip` is None, no sigma-clipping will be performed, and the time-series (with non-finite elems removed) will be passed through to the output. verbose : bool If True, will indicate progress while working. maxobjects : int The total number of objects to process from `pfpkl_list`. nworkers : int The number of parallel workers to launch to process the input. Returns ------- dict A dict containing key: val pairs of the input period-finder result and the output periodic feature result pickles for each input pickle is returned. ] if <ast.UnaryOp object at 0x7da2041daaa0> begin[:] call[name[os].makedirs, parameter[name[outdir]]] if name[maxobjects] begin[:] variable[pfpkl_list] assign[=] call[name[pfpkl_list]][<ast.Slice object at 0x7da2041da9b0>] call[name[LOGINFO], parameter[binary_operation[constant[%s periodfinding pickles to process] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[pfpkl_list]]]]]] if <ast.BoolOp object at 0x7da2041da740> begin[:] variable[starfeatures_list] assign[=] list[[]] call[name[LOGINFO], parameter[constant[collecting starfeatures pickles...]]] for taget[name[pfpkl]] in starred[name[pfpkl_list]] begin[:] variable[sfpkl1] assign[=] call[call[name[os].path.basename, parameter[name[pfpkl]]].replace, parameter[constant[periodfinding], constant[starfeatures]]] variable[sfpkl2] assign[=] call[name[sfpkl1].replace, parameter[constant[.gz], constant[]]] variable[sfpath1] assign[=] call[name[os].path.join, parameter[name[starfeaturesdir], name[sfpkl1]]] variable[sfpath2] assign[=] call[name[os].path.join, parameter[name[starfeaturesdir], name[sfpkl2]]] if call[name[os].path.exists, parameter[name[sfpath1]]] begin[:] call[name[starfeatures_list].append, parameter[name[sfpkl1]]] variable[kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da2041db190>, <ast.Constant object at 0x7da2041dac20>, <ast.Constant object at 0x7da2041da500>, <ast.Constant object at 0x7da2041d9060>, <ast.Constant object at 0x7da2041d88e0>, <ast.Constant object at 0x7da2041d8610>, <ast.Constant object at 0x7da2041db340>, <ast.Constant object at 0x7da2041d8af0>, <ast.Constant object at 0x7da2041d9cf0>, <ast.Constant object at 0x7da2041dae90>, <ast.Constant object at 0x7da2041d9db0>, <ast.Constant object at 0x7da2041d92a0>, <ast.Constant object at 0x7da2041d93c0>, <ast.Constant object at 0x7da2041db6d0>, <ast.Constant object at 0x7da2041da980>], [<ast.Name object at 0x7da2041da050>, <ast.Name object at 0x7da2041dab30>, <ast.Name object at 0x7da2041dab90>, <ast.Name object at 0x7da2041da2f0>, <ast.Name object at 0x7da2041d9240>, <ast.Name object at 0x7da2041d8070>, <ast.Name object at 0x7da2041d8160>, <ast.Name object at 0x7da2041d9d80>, <ast.Name object at 0x7da2041db7c0>, <ast.Name object at 0x7da2041db010>, <ast.Name object at 0x7da2041db130>, <ast.Name object at 0x7da2041daa10>, <ast.Name object at 0x7da2041d9900>, <ast.Name object at 0x7da2041da800>, <ast.Name object at 0x7da2041da380>]] variable[tasks] assign[=] <ast.ListComp object at 0x7da2041d8910> call[name[LOGINFO], parameter[constant[processing periodfinding pickles...]]] with call[name[ProcessPoolExecutor], parameter[]] begin[:] variable[resultfutures] assign[=] call[name[executor].map, parameter[name[_periodicfeatures_worker], name[tasks]]] variable[results] assign[=] <ast.ListComp object at 0x7da18ede5000> variable[resdict] assign[=] <ast.DictComp object at 0x7da18ede4580> return[name[resdict]]
keyword[def] identifier[parallel_periodicfeatures] ( identifier[pfpkl_list] , identifier[lcbasedir] , identifier[outdir] , identifier[starfeaturesdir] = keyword[None] , identifier[fourierorder] = literal[int] , identifier[transitparams] =(- literal[int] , literal[int] , literal[int] ), identifier[ebparams] =(- literal[int] , literal[int] , literal[int] , literal[int] ), identifier[pdiff_threshold] = literal[int] , identifier[sidereal_threshold] = literal[int] , identifier[sampling_peak_multiplier] = literal[int] , identifier[sampling_startp] = keyword[None] , identifier[sampling_endp] = keyword[None] , identifier[timecols] = keyword[None] , identifier[magcols] = keyword[None] , identifier[errcols] = keyword[None] , identifier[lcformat] = literal[string] , identifier[lcformatdir] = keyword[None] , identifier[sigclip] = literal[int] , identifier[verbose] = keyword[False] , identifier[maxobjects] = keyword[None] , identifier[nworkers] = identifier[NCPUS] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[outdir] ): identifier[os] . identifier[makedirs] ( identifier[outdir] ) keyword[if] identifier[maxobjects] : identifier[pfpkl_list] = identifier[pfpkl_list] [: identifier[maxobjects] ] identifier[LOGINFO] ( literal[string] % identifier[len] ( identifier[pfpkl_list] )) keyword[if] identifier[starfeaturesdir] keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[starfeaturesdir] ): identifier[starfeatures_list] =[] identifier[LOGINFO] ( literal[string] ) keyword[for] identifier[pfpkl] keyword[in] identifier[pfpkl_list] : identifier[sfpkl1] = identifier[os] . identifier[path] . identifier[basename] ( identifier[pfpkl] ). identifier[replace] ( literal[string] , literal[string] ) identifier[sfpkl2] = identifier[sfpkl1] . identifier[replace] ( literal[string] , literal[string] ) identifier[sfpath1] = identifier[os] . identifier[path] . identifier[join] ( identifier[starfeaturesdir] , identifier[sfpkl1] ) identifier[sfpath2] = identifier[os] . identifier[path] . identifier[join] ( identifier[starfeaturesdir] , identifier[sfpkl2] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[sfpath1] ): identifier[starfeatures_list] . identifier[append] ( identifier[sfpkl1] ) keyword[elif] identifier[os] . identifier[path] . identifier[exists] ( identifier[sfpath2] ): identifier[starfeatures_list] . identifier[append] ( identifier[sfpkl2] ) keyword[else] : identifier[starfeatures_list] . identifier[append] ( keyword[None] ) keyword[else] : identifier[starfeatures_list] =[ keyword[None] keyword[for] identifier[x] keyword[in] identifier[pfpkl_list] ] identifier[kwargs] ={ literal[string] : identifier[fourierorder] , literal[string] : identifier[transitparams] , literal[string] : identifier[ebparams] , literal[string] : identifier[pdiff_threshold] , literal[string] : identifier[sidereal_threshold] , literal[string] : identifier[sampling_peak_multiplier] , literal[string] : identifier[sampling_startp] , literal[string] : identifier[sampling_endp] , literal[string] : identifier[timecols] , literal[string] : identifier[magcols] , literal[string] : identifier[errcols] , literal[string] : identifier[lcformat] , literal[string] : identifier[lcformat] , literal[string] : identifier[sigclip] , literal[string] : identifier[verbose] } identifier[tasks] =[( identifier[x] , identifier[lcbasedir] , identifier[outdir] , identifier[y] , identifier[kwargs] ) keyword[for] ( identifier[x] , identifier[y] ) keyword[in] identifier[zip] ( identifier[pfpkl_list] , identifier[starfeatures_list] )] identifier[LOGINFO] ( literal[string] ) keyword[with] identifier[ProcessPoolExecutor] ( identifier[max_workers] = identifier[nworkers] ) keyword[as] identifier[executor] : identifier[resultfutures] = identifier[executor] . identifier[map] ( identifier[_periodicfeatures_worker] , identifier[tasks] ) identifier[results] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[resultfutures] ] identifier[resdict] ={ identifier[os] . identifier[path] . identifier[basename] ( identifier[x] ): identifier[y] keyword[for] ( identifier[x] , identifier[y] ) keyword[in] identifier[zip] ( identifier[pfpkl_list] , identifier[results] )} keyword[return] identifier[resdict]
def parallel_periodicfeatures(pfpkl_list, lcbasedir, outdir, starfeaturesdir=None, fourierorder=5, transitparams=(-0.01, 0.1, 0.1), ebparams=(-0.2, 0.3, 0.7, 0.5), pdiff_threshold=0.0001, sidereal_threshold=0.0001, sampling_peak_multiplier=5.0, sampling_startp=None, sampling_endp=None, timecols=None, magcols=None, errcols=None, lcformat='hat-sql', lcformatdir=None, sigclip=10.0, verbose=False, maxobjects=None, nworkers=NCPUS): # these are depth, duration, ingress duration # these are depth, duration, depth ratio, secphase 'This runs periodic feature generation in parallel for all periodfinding\n pickles in the input list.\n\n Parameters\n ----------\n\n pfpkl_list : list of str\n The list of period-finding pickles to use.\n\n lcbasedir : str\n The base directory where the associated light curves are located.\n\n outdir : str\n The directory where the results will be written.\n\n starfeaturesdir : str or None\n The directory containing the `starfeatures-<objectid>.pkl` files for\n each object to use calculate neighbor proximity light curve features.\n\n fourierorder : int\n The Fourier order to use to generate sinusoidal function and fit that to\n the phased light curve.\n\n transitparams : list of floats\n The transit depth, duration, and ingress duration to use to generate a\n trapezoid planet transit model fit to the phased light curve. The period\n used is the one provided in `period`, while the epoch is automatically\n obtained from a spline fit to the phased light curve.\n\n ebparams : list of floats\n The primary eclipse depth, eclipse duration, the primary-secondary depth\n ratio, and the phase of the secondary eclipse to use to generate an\n eclipsing binary model fit to the phased light curve. The period used is\n the one provided in `period`, while the epoch is automatically obtained\n from a spline fit to the phased light curve.\n\n pdiff_threshold : float\n This is the max difference between periods to consider them the same.\n\n sidereal_threshold : float\n This is the max difference between any of the \'best\' periods and the\n sidereal day periods to consider them the same.\n\n sampling_peak_multiplier : float\n This is the minimum multiplicative factor of a \'best\' period\'s\n normalized periodogram peak over the sampling periodogram peak at the\n same period required to accept the \'best\' period as possibly real.\n\n sampling_startp, sampling_endp : float\n If the `pgramlist` doesn\'t have a time-sampling Lomb-Scargle\n periodogram, it will be obtained automatically. Use these kwargs to\n control the minimum and maximum period interval to be searched when\n generating this periodogram.\n\n timecols : list of str or None\n The timecol keys to use from the lcdict in calculating the features.\n\n magcols : list of str or None\n The magcol keys to use from the lcdict in calculating the features.\n\n errcols : list of str or None\n The errcol keys to use from the lcdict in calculating the features.\n\n lcformat : str\n This is the `formatkey` associated with your light curve format, which\n you previously passed in to the `lcproc.register_lcformat`\n function. This will be used to look up how to find and read the light\n curves specified in `basedir` or `use_list_of_filenames`.\n\n lcformatdir : str or None\n If this is provided, gives the path to a directory when you\'ve stored\n your lcformat description JSONs, other than the usual directories lcproc\n knows to search for them in. Use this along with `lcformat` to specify\n an LC format JSON file that\'s not currently registered with lcproc.\n\n sigclip : float or int or sequence of two floats/ints or None\n If a single float or int, a symmetric sigma-clip will be performed using\n the number provided as the sigma-multiplier to cut out from the input\n time-series.\n\n If a list of two ints/floats is provided, the function will perform an\n \'asymmetric\' sigma-clip. The first element in this list is the sigma\n value to use for fainter flux/mag values; the second element in this\n list is the sigma value to use for brighter flux/mag values. For\n example, `sigclip=[10., 3.]`, will sigclip out greater than 10-sigma\n dimmings and greater than 3-sigma brightenings. Here the meaning of\n "dimming" and "brightening" is set by *physics* (not the magnitude\n system), which is why the `magsarefluxes` kwarg must be correctly set.\n\n If `sigclip` is None, no sigma-clipping will be performed, and the\n time-series (with non-finite elems removed) will be passed through to\n the output.\n\n verbose : bool\n If True, will indicate progress while working.\n\n maxobjects : int\n The total number of objects to process from `pfpkl_list`.\n\n nworkers : int\n The number of parallel workers to launch to process the input.\n\n Returns\n -------\n\n dict\n A dict containing key: val pairs of the input period-finder result and\n the output periodic feature result pickles for each input pickle is\n returned.\n\n ' # make sure to make the output directory if it doesn't exist if not os.path.exists(outdir): os.makedirs(outdir) # depends on [control=['if'], data=[]] if maxobjects: pfpkl_list = pfpkl_list[:maxobjects] # depends on [control=['if'], data=[]] LOGINFO('%s periodfinding pickles to process' % len(pfpkl_list)) # if the starfeaturedir is provided, try to find a starfeatures pickle for # each periodfinding pickle in pfpkl_list if starfeaturesdir and os.path.exists(starfeaturesdir): starfeatures_list = [] LOGINFO('collecting starfeatures pickles...') for pfpkl in pfpkl_list: sfpkl1 = os.path.basename(pfpkl).replace('periodfinding', 'starfeatures') sfpkl2 = sfpkl1.replace('.gz', '') sfpath1 = os.path.join(starfeaturesdir, sfpkl1) sfpath2 = os.path.join(starfeaturesdir, sfpkl2) if os.path.exists(sfpath1): starfeatures_list.append(sfpkl1) # depends on [control=['if'], data=[]] elif os.path.exists(sfpath2): starfeatures_list.append(sfpkl2) # depends on [control=['if'], data=[]] else: starfeatures_list.append(None) # depends on [control=['for'], data=['pfpkl']] # depends on [control=['if'], data=[]] else: starfeatures_list = [None for x in pfpkl_list] # generate the task list kwargs = {'fourierorder': fourierorder, 'transitparams': transitparams, 'ebparams': ebparams, 'pdiff_threshold': pdiff_threshold, 'sidereal_threshold': sidereal_threshold, 'sampling_peak_multiplier': sampling_peak_multiplier, 'sampling_startp': sampling_startp, 'sampling_endp': sampling_endp, 'timecols': timecols, 'magcols': magcols, 'errcols': errcols, 'lcformat': lcformat, 'lcformatdir': lcformat, 'sigclip': sigclip, 'verbose': verbose} tasks = [(x, lcbasedir, outdir, y, kwargs) for (x, y) in zip(pfpkl_list, starfeatures_list)] LOGINFO('processing periodfinding pickles...') with ProcessPoolExecutor(max_workers=nworkers) as executor: resultfutures = executor.map(_periodicfeatures_worker, tasks) # depends on [control=['with'], data=['executor']] results = [x for x in resultfutures] resdict = {os.path.basename(x): y for (x, y) in zip(pfpkl_list, results)} return resdict
def make_definition_classes(mod): """Dynamically create the definition classes from module 'mod'.""" rootpath = "oandapyV20" PTH = "{}.definitions.{}".format(rootpath, mod) M = import_module(PTH) __ALL__ = [] # construct the __all__ variable for cls, cldef in M.definitions.items(): orig, fiV = next(six.iteritems(cldef)) fiK = orig.replace('-', '_') # create the docstring dynamically clsdoc = dyndoc.format(cls=cls, PTH=PTH, mod=mod, firstItem=fiK, orig=orig, firstItemVal=fiV) # Since we can't change the docstring afterwards (it's readonly) # figure this out before and not during ... for K, V in cldef.items(): attrName = K if "-" in K: attrName = K.replace('-', '_') adoc = _doc.format(K, attrName, K) clsdoc += adoc # the class dyncls = type(cls, (object,), {'__doc__': clsdoc}) definitions = dict() for K, V in cldef.items(): attrName = K if "-" in K: attrName = K.replace('-', '_') setattr(dyncls, attrName, K) # set as class attributes definitions.update({K: V}) # for mapping by __getitem__ def mkgi(): def __getitem__(self, definitionID): """return description for definitionID.""" return self._definitions[definitionID] return __getitem__ def mkinit(definitions): def __init__(self): self._definitions = definitions return __init__ def mkPropDefinitions(): def definitions(self): """readonly property holding definition dict.""" return self._definitions return property(definitions) setattr(dyncls, "__getitem__", mkgi()) setattr(dyncls, "__init__", mkinit(definitions)) setattr(dyncls, "definitions", mkPropDefinitions()) setattr(sys.modules["{}.definitions.{}".format(rootpath, mod)], cls, dyncls) __ALL__.append(cls) setattr(sys.modules["{}.definitions.{}".format(rootpath, mod)], "__all__", tuple(__ALL__))
def function[make_definition_classes, parameter[mod]]: constant[Dynamically create the definition classes from module 'mod'.] variable[rootpath] assign[=] constant[oandapyV20] variable[PTH] assign[=] call[constant[{}.definitions.{}].format, parameter[name[rootpath], name[mod]]] variable[M] assign[=] call[name[import_module], parameter[name[PTH]]] variable[__ALL__] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da204566830>, <ast.Name object at 0x7da204564f10>]]] in starred[call[name[M].definitions.items, parameter[]]] begin[:] <ast.Tuple object at 0x7da204566620> assign[=] call[name[next], parameter[call[name[six].iteritems, parameter[name[cldef]]]]] variable[fiK] assign[=] call[name[orig].replace, parameter[constant[-], constant[_]]] variable[clsdoc] assign[=] call[name[dyndoc].format, parameter[]] for taget[tuple[[<ast.Name object at 0x7da204565ea0>, <ast.Name object at 0x7da204566aa0>]]] in starred[call[name[cldef].items, parameter[]]] begin[:] variable[attrName] assign[=] name[K] if compare[constant[-] in name[K]] begin[:] variable[attrName] assign[=] call[name[K].replace, parameter[constant[-], constant[_]]] variable[adoc] assign[=] call[name[_doc].format, parameter[name[K], name[attrName], name[K]]] <ast.AugAssign object at 0x7da2045677c0> variable[dyncls] assign[=] call[name[type], parameter[name[cls], tuple[[<ast.Name object at 0x7da204567a90>]], dictionary[[<ast.Constant object at 0x7da2045678b0>], [<ast.Name object at 0x7da2045664d0>]]]] variable[definitions] assign[=] call[name[dict], parameter[]] for taget[tuple[[<ast.Name object at 0x7da20c76feb0>, <ast.Name object at 0x7da20c76eb60>]]] in starred[call[name[cldef].items, parameter[]]] begin[:] variable[attrName] assign[=] name[K] if compare[constant[-] in name[K]] begin[:] variable[attrName] assign[=] call[name[K].replace, parameter[constant[-], constant[_]]] call[name[setattr], parameter[name[dyncls], name[attrName], name[K]]] call[name[definitions].update, parameter[dictionary[[<ast.Name object at 0x7da20c76ec50>], [<ast.Name object at 0x7da20c76ec20>]]]] def function[mkgi, parameter[]]: def function[__getitem__, parameter[self, definitionID]]: constant[return description for definitionID.] return[call[name[self]._definitions][name[definitionID]]] return[name[__getitem__]] def function[mkinit, parameter[definitions]]: def function[__init__, parameter[self]]: name[self]._definitions assign[=] name[definitions] return[name[__init__]] def function[mkPropDefinitions, parameter[]]: def function[definitions, parameter[self]]: constant[readonly property holding definition dict.] return[name[self]._definitions] return[call[name[property], parameter[name[definitions]]]] call[name[setattr], parameter[name[dyncls], constant[__getitem__], call[name[mkgi], parameter[]]]] call[name[setattr], parameter[name[dyncls], constant[__init__], call[name[mkinit], parameter[name[definitions]]]]] call[name[setattr], parameter[name[dyncls], constant[definitions], call[name[mkPropDefinitions], parameter[]]]] call[name[setattr], parameter[call[name[sys].modules][call[constant[{}.definitions.{}].format, parameter[name[rootpath], name[mod]]]], name[cls], name[dyncls]]] call[name[__ALL__].append, parameter[name[cls]]] call[name[setattr], parameter[call[name[sys].modules][call[constant[{}.definitions.{}].format, parameter[name[rootpath], name[mod]]]], constant[__all__], call[name[tuple], parameter[name[__ALL__]]]]]
keyword[def] identifier[make_definition_classes] ( identifier[mod] ): literal[string] identifier[rootpath] = literal[string] identifier[PTH] = literal[string] . identifier[format] ( identifier[rootpath] , identifier[mod] ) identifier[M] = identifier[import_module] ( identifier[PTH] ) identifier[__ALL__] =[] keyword[for] identifier[cls] , identifier[cldef] keyword[in] identifier[M] . identifier[definitions] . identifier[items] (): identifier[orig] , identifier[fiV] = identifier[next] ( identifier[six] . identifier[iteritems] ( identifier[cldef] )) identifier[fiK] = identifier[orig] . identifier[replace] ( literal[string] , literal[string] ) identifier[clsdoc] = identifier[dyndoc] . identifier[format] ( identifier[cls] = identifier[cls] , identifier[PTH] = identifier[PTH] , identifier[mod] = identifier[mod] , identifier[firstItem] = identifier[fiK] , identifier[orig] = identifier[orig] , identifier[firstItemVal] = identifier[fiV] ) keyword[for] identifier[K] , identifier[V] keyword[in] identifier[cldef] . identifier[items] (): identifier[attrName] = identifier[K] keyword[if] literal[string] keyword[in] identifier[K] : identifier[attrName] = identifier[K] . identifier[replace] ( literal[string] , literal[string] ) identifier[adoc] = identifier[_doc] . identifier[format] ( identifier[K] , identifier[attrName] , identifier[K] ) identifier[clsdoc] += identifier[adoc] identifier[dyncls] = identifier[type] ( identifier[cls] ,( identifier[object] ,),{ literal[string] : identifier[clsdoc] }) identifier[definitions] = identifier[dict] () keyword[for] identifier[K] , identifier[V] keyword[in] identifier[cldef] . identifier[items] (): identifier[attrName] = identifier[K] keyword[if] literal[string] keyword[in] identifier[K] : identifier[attrName] = identifier[K] . identifier[replace] ( literal[string] , literal[string] ) identifier[setattr] ( identifier[dyncls] , identifier[attrName] , identifier[K] ) identifier[definitions] . identifier[update] ({ identifier[K] : identifier[V] }) keyword[def] identifier[mkgi] (): keyword[def] identifier[__getitem__] ( identifier[self] , identifier[definitionID] ): literal[string] keyword[return] identifier[self] . identifier[_definitions] [ identifier[definitionID] ] keyword[return] identifier[__getitem__] keyword[def] identifier[mkinit] ( identifier[definitions] ): keyword[def] identifier[__init__] ( identifier[self] ): identifier[self] . identifier[_definitions] = identifier[definitions] keyword[return] identifier[__init__] keyword[def] identifier[mkPropDefinitions] (): keyword[def] identifier[definitions] ( identifier[self] ): literal[string] keyword[return] identifier[self] . identifier[_definitions] keyword[return] identifier[property] ( identifier[definitions] ) identifier[setattr] ( identifier[dyncls] , literal[string] , identifier[mkgi] ()) identifier[setattr] ( identifier[dyncls] , literal[string] , identifier[mkinit] ( identifier[definitions] )) identifier[setattr] ( identifier[dyncls] , literal[string] , identifier[mkPropDefinitions] ()) identifier[setattr] ( identifier[sys] . identifier[modules] [ literal[string] . identifier[format] ( identifier[rootpath] , identifier[mod] )], identifier[cls] , identifier[dyncls] ) identifier[__ALL__] . identifier[append] ( identifier[cls] ) identifier[setattr] ( identifier[sys] . identifier[modules] [ literal[string] . identifier[format] ( identifier[rootpath] , identifier[mod] )], literal[string] , identifier[tuple] ( identifier[__ALL__] ))
def make_definition_classes(mod): """Dynamically create the definition classes from module 'mod'.""" rootpath = 'oandapyV20' PTH = '{}.definitions.{}'.format(rootpath, mod) M = import_module(PTH) __ALL__ = [] # construct the __all__ variable for (cls, cldef) in M.definitions.items(): (orig, fiV) = next(six.iteritems(cldef)) fiK = orig.replace('-', '_') # create the docstring dynamically clsdoc = dyndoc.format(cls=cls, PTH=PTH, mod=mod, firstItem=fiK, orig=orig, firstItemVal=fiV) # Since we can't change the docstring afterwards (it's readonly) # figure this out before and not during ... for (K, V) in cldef.items(): attrName = K if '-' in K: attrName = K.replace('-', '_') adoc = _doc.format(K, attrName, K) clsdoc += adoc # depends on [control=['if'], data=['K']] # depends on [control=['for'], data=[]] # the class dyncls = type(cls, (object,), {'__doc__': clsdoc}) definitions = dict() for (K, V) in cldef.items(): attrName = K if '-' in K: attrName = K.replace('-', '_') # depends on [control=['if'], data=['K']] setattr(dyncls, attrName, K) # set as class attributes definitions.update({K: V}) # for mapping by __getitem__ # depends on [control=['for'], data=[]] def mkgi(): def __getitem__(self, definitionID): """return description for definitionID.""" return self._definitions[definitionID] return __getitem__ def mkinit(definitions): def __init__(self): self._definitions = definitions return __init__ def mkPropDefinitions(): def definitions(self): """readonly property holding definition dict.""" return self._definitions return property(definitions) setattr(dyncls, '__getitem__', mkgi()) setattr(dyncls, '__init__', mkinit(definitions)) setattr(dyncls, 'definitions', mkPropDefinitions()) setattr(sys.modules['{}.definitions.{}'.format(rootpath, mod)], cls, dyncls) __ALL__.append(cls) # depends on [control=['for'], data=[]] setattr(sys.modules['{}.definitions.{}'.format(rootpath, mod)], '__all__', tuple(__ALL__))
def dump_database_object(obj: T, fieldlist: Iterable[str]) -> None: """Prints key/value pairs for an object's dictionary.""" log.info(_LINE_EQUALS) log.info("DUMP OF: {}", obj) for f in fieldlist: log.info(u"{f}: {v}", f=f, v=getattr(obj, f)) log.info(_LINE_EQUALS)
def function[dump_database_object, parameter[obj, fieldlist]]: constant[Prints key/value pairs for an object's dictionary.] call[name[log].info, parameter[name[_LINE_EQUALS]]] call[name[log].info, parameter[constant[DUMP OF: {}], name[obj]]] for taget[name[f]] in starred[name[fieldlist]] begin[:] call[name[log].info, parameter[constant[{f}: {v}]]] call[name[log].info, parameter[name[_LINE_EQUALS]]]
keyword[def] identifier[dump_database_object] ( identifier[obj] : identifier[T] , identifier[fieldlist] : identifier[Iterable] [ identifier[str] ])-> keyword[None] : literal[string] identifier[log] . identifier[info] ( identifier[_LINE_EQUALS] ) identifier[log] . identifier[info] ( literal[string] , identifier[obj] ) keyword[for] identifier[f] keyword[in] identifier[fieldlist] : identifier[log] . identifier[info] ( literal[string] , identifier[f] = identifier[f] , identifier[v] = identifier[getattr] ( identifier[obj] , identifier[f] )) identifier[log] . identifier[info] ( identifier[_LINE_EQUALS] )
def dump_database_object(obj: T, fieldlist: Iterable[str]) -> None: """Prints key/value pairs for an object's dictionary.""" log.info(_LINE_EQUALS) log.info('DUMP OF: {}', obj) for f in fieldlist: log.info(u'{f}: {v}', f=f, v=getattr(obj, f)) # depends on [control=['for'], data=['f']] log.info(_LINE_EQUALS)
def output_to_terminal(sources): """Print statistics to the terminal""" results = OrderedDict() for source in sources: if source.get_is_available(): source.update() results.update(source.get_summary()) for key, value in results.items(): sys.stdout.write(str(key) + ": " + str(value) + ", ") sys.stdout.write("\n") sys.exit()
def function[output_to_terminal, parameter[sources]]: constant[Print statistics to the terminal] variable[results] assign[=] call[name[OrderedDict], parameter[]] for taget[name[source]] in starred[name[sources]] begin[:] if call[name[source].get_is_available, parameter[]] begin[:] call[name[source].update, parameter[]] call[name[results].update, parameter[call[name[source].get_summary, parameter[]]]] for taget[tuple[[<ast.Name object at 0x7da204346470>, <ast.Name object at 0x7da204345810>]]] in starred[call[name[results].items, parameter[]]] begin[:] call[name[sys].stdout.write, parameter[binary_operation[binary_operation[binary_operation[call[name[str], parameter[name[key]]] + constant[: ]] + call[name[str], parameter[name[value]]]] + constant[, ]]]] call[name[sys].stdout.write, parameter[constant[ ]]] call[name[sys].exit, parameter[]]
keyword[def] identifier[output_to_terminal] ( identifier[sources] ): literal[string] identifier[results] = identifier[OrderedDict] () keyword[for] identifier[source] keyword[in] identifier[sources] : keyword[if] identifier[source] . identifier[get_is_available] (): identifier[source] . identifier[update] () identifier[results] . identifier[update] ( identifier[source] . identifier[get_summary] ()) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[results] . identifier[items] (): identifier[sys] . identifier[stdout] . identifier[write] ( identifier[str] ( identifier[key] )+ literal[string] + identifier[str] ( identifier[value] )+ literal[string] ) identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] ) identifier[sys] . identifier[exit] ()
def output_to_terminal(sources): """Print statistics to the terminal""" results = OrderedDict() for source in sources: if source.get_is_available(): source.update() results.update(source.get_summary()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['source']] for (key, value) in results.items(): sys.stdout.write(str(key) + ': ' + str(value) + ', ') # depends on [control=['for'], data=[]] sys.stdout.write('\n') sys.exit()
def elements_equal(first, *others): """ Check elements for equality """ f = first lf = list(f) for e in others: le = list(e) if (len(lf) != len(le) or f.tag != e.tag or f.text != e.text or f.tail != e.tail or f.attrib != e.attrib or (not all(map(elements_equal, lf, le))) ): return False return True
def function[elements_equal, parameter[first]]: constant[ Check elements for equality ] variable[f] assign[=] name[first] variable[lf] assign[=] call[name[list], parameter[name[f]]] for taget[name[e]] in starred[name[others]] begin[:] variable[le] assign[=] call[name[list], parameter[name[e]]] if <ast.BoolOp object at 0x7da1b146f9a0> begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[elements_equal] ( identifier[first] ,* identifier[others] ): literal[string] identifier[f] = identifier[first] identifier[lf] = identifier[list] ( identifier[f] ) keyword[for] identifier[e] keyword[in] identifier[others] : identifier[le] = identifier[list] ( identifier[e] ) keyword[if] ( identifier[len] ( identifier[lf] )!= identifier[len] ( identifier[le] ) keyword[or] identifier[f] . identifier[tag] != identifier[e] . identifier[tag] keyword[or] identifier[f] . identifier[text] != identifier[e] . identifier[text] keyword[or] identifier[f] . identifier[tail] != identifier[e] . identifier[tail] keyword[or] identifier[f] . identifier[attrib] != identifier[e] . identifier[attrib] keyword[or] ( keyword[not] identifier[all] ( identifier[map] ( identifier[elements_equal] , identifier[lf] , identifier[le] ))) ): keyword[return] keyword[False] keyword[return] keyword[True]
def elements_equal(first, *others): """ Check elements for equality """ f = first lf = list(f) for e in others: le = list(e) if len(lf) != len(le) or f.tag != e.tag or f.text != e.text or (f.tail != e.tail) or (f.attrib != e.attrib) or (not all(map(elements_equal, lf, le))): return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['e']] return True
def _get_method(name, func): "Turns a callable object (like a mock) into a real function" def method(self, *args, **kw): return func(self, *args, **kw) method.__name__ = name return method
def function[_get_method, parameter[name, func]]: constant[Turns a callable object (like a mock) into a real function] def function[method, parameter[self]]: return[call[name[func], parameter[name[self], <ast.Starred object at 0x7da1b08fae00>]]] name[method].__name__ assign[=] name[name] return[name[method]]
keyword[def] identifier[_get_method] ( identifier[name] , identifier[func] ): literal[string] keyword[def] identifier[method] ( identifier[self] ,* identifier[args] ,** identifier[kw] ): keyword[return] identifier[func] ( identifier[self] ,* identifier[args] ,** identifier[kw] ) identifier[method] . identifier[__name__] = identifier[name] keyword[return] identifier[method]
def _get_method(name, func): """Turns a callable object (like a mock) into a real function""" def method(self, *args, **kw): return func(self, *args, **kw) method.__name__ = name return method
def logger(self): """ Returns the logger object. :return: the logger :rtype: logger """ if self._logger is None: self._logger = self.new_logger() return self._logger
def function[logger, parameter[self]]: constant[ Returns the logger object. :return: the logger :rtype: logger ] if compare[name[self]._logger is constant[None]] begin[:] name[self]._logger assign[=] call[name[self].new_logger, parameter[]] return[name[self]._logger]
keyword[def] identifier[logger] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_logger] keyword[is] keyword[None] : identifier[self] . identifier[_logger] = identifier[self] . identifier[new_logger] () keyword[return] identifier[self] . identifier[_logger]
def logger(self): """ Returns the logger object. :return: the logger :rtype: logger """ if self._logger is None: self._logger = self.new_logger() # depends on [control=['if'], data=[]] return self._logger
async def close_async(self, exception=None): """ Close down the handler. If the handler has already closed, this will be a no op. An optional exception can be passed in to indicate that the handler was shutdown due to error. :param exception: An optional exception if the handler is closing due to an error. :type exception: Exception """ self.running = False if self.error: return if isinstance(exception, errors.LinkRedirect): self.redirected = exception elif isinstance(exception, EventHubError): self.error = exception elif isinstance(exception, (errors.LinkDetach, errors.ConnectionClose)): self.error = EventHubError(str(exception), exception) elif exception: self.error = EventHubError(str(exception)) else: self.error = EventHubError("This send handler is now closed.") await self._handler.close_async()
<ast.AsyncFunctionDef object at 0x7da1b1c18f70>
keyword[async] keyword[def] identifier[close_async] ( identifier[self] , identifier[exception] = keyword[None] ): literal[string] identifier[self] . identifier[running] = keyword[False] keyword[if] identifier[self] . identifier[error] : keyword[return] keyword[if] identifier[isinstance] ( identifier[exception] , identifier[errors] . identifier[LinkRedirect] ): identifier[self] . identifier[redirected] = identifier[exception] keyword[elif] identifier[isinstance] ( identifier[exception] , identifier[EventHubError] ): identifier[self] . identifier[error] = identifier[exception] keyword[elif] identifier[isinstance] ( identifier[exception] ,( identifier[errors] . identifier[LinkDetach] , identifier[errors] . identifier[ConnectionClose] )): identifier[self] . identifier[error] = identifier[EventHubError] ( identifier[str] ( identifier[exception] ), identifier[exception] ) keyword[elif] identifier[exception] : identifier[self] . identifier[error] = identifier[EventHubError] ( identifier[str] ( identifier[exception] )) keyword[else] : identifier[self] . identifier[error] = identifier[EventHubError] ( literal[string] ) keyword[await] identifier[self] . identifier[_handler] . identifier[close_async] ()
async def close_async(self, exception=None): """ Close down the handler. If the handler has already closed, this will be a no op. An optional exception can be passed in to indicate that the handler was shutdown due to error. :param exception: An optional exception if the handler is closing due to an error. :type exception: Exception """ self.running = False if self.error: return # depends on [control=['if'], data=[]] if isinstance(exception, errors.LinkRedirect): self.redirected = exception # depends on [control=['if'], data=[]] elif isinstance(exception, EventHubError): self.error = exception # depends on [control=['if'], data=[]] elif isinstance(exception, (errors.LinkDetach, errors.ConnectionClose)): self.error = EventHubError(str(exception), exception) # depends on [control=['if'], data=[]] elif exception: self.error = EventHubError(str(exception)) # depends on [control=['if'], data=[]] else: self.error = EventHubError('This send handler is now closed.') await self._handler.close_async()
def _fit_orbit(orb,vxvv,vxvv_err,pot,radec=False,lb=False, customsky=False,lb_to_customsky=None, pmllpmbb_to_customsky=None, tintJ=100,ntintJ=1000,integrate_method='dopr54_c', ro=None,vo=None,obs=None,disp=False): """Fit an orbit to data in a given potential""" # Need to turn this off for speed coords._APY_COORDS_ORIG= coords._APY_COORDS coords._APY_COORDS= False #Import here, because otherwise there is an infinite loop of imports from galpy.actionAngle import actionAngleIsochroneApprox, actionAngle #Mock this up, bc we want to use its orbit-integration routines class mockActionAngleIsochroneApprox(actionAngleIsochroneApprox): def __init__(self,tintJ,ntintJ,pot,integrate_method='dopr54_c'): actionAngle.__init__(self) self._tintJ= tintJ self._ntintJ=ntintJ self._tsJ= nu.linspace(0.,self._tintJ,self._ntintJ) self._integrate_dt= None self._pot= pot self._integrate_method= integrate_method return None tmockAA= mockActionAngleIsochroneApprox(tintJ,ntintJ,pot, integrate_method=integrate_method) opt_vxvv= optimize.fmin_powell(_fit_orbit_mlogl,orb.vxvv, args=(vxvv,vxvv_err,pot,radec,lb, customsky,lb_to_customsky, pmllpmbb_to_customsky, tmockAA, ro,vo,obs), disp=disp) maxLogL= -_fit_orbit_mlogl(opt_vxvv,vxvv,vxvv_err,pot,radec,lb, customsky,lb_to_customsky,pmllpmbb_to_customsky, tmockAA, ro,vo,obs) coords._APY_COORDS= coords._APY_COORDS_ORIG return (opt_vxvv,maxLogL)
def function[_fit_orbit, parameter[orb, vxvv, vxvv_err, pot, radec, lb, customsky, lb_to_customsky, pmllpmbb_to_customsky, tintJ, ntintJ, integrate_method, ro, vo, obs, disp]]: constant[Fit an orbit to data in a given potential] name[coords]._APY_COORDS_ORIG assign[=] name[coords]._APY_COORDS name[coords]._APY_COORDS assign[=] constant[False] from relative_module[galpy.actionAngle] import module[actionAngleIsochroneApprox], module[actionAngle] class class[mockActionAngleIsochroneApprox, parameter[]] begin[:] def function[__init__, parameter[self, tintJ, ntintJ, pot, integrate_method]]: call[name[actionAngle].__init__, parameter[name[self]]] name[self]._tintJ assign[=] name[tintJ] name[self]._ntintJ assign[=] name[ntintJ] name[self]._tsJ assign[=] call[name[nu].linspace, parameter[constant[0.0], name[self]._tintJ, name[self]._ntintJ]] name[self]._integrate_dt assign[=] constant[None] name[self]._pot assign[=] name[pot] name[self]._integrate_method assign[=] name[integrate_method] return[constant[None]] variable[tmockAA] assign[=] call[name[mockActionAngleIsochroneApprox], parameter[name[tintJ], name[ntintJ], name[pot]]] variable[opt_vxvv] assign[=] call[name[optimize].fmin_powell, parameter[name[_fit_orbit_mlogl], name[orb].vxvv]] variable[maxLogL] assign[=] <ast.UnaryOp object at 0x7da1b0da2f50> name[coords]._APY_COORDS assign[=] name[coords]._APY_COORDS_ORIG return[tuple[[<ast.Name object at 0x7da1b0da2500>, <ast.Name object at 0x7da1b0da2380>]]]
keyword[def] identifier[_fit_orbit] ( identifier[orb] , identifier[vxvv] , identifier[vxvv_err] , identifier[pot] , identifier[radec] = keyword[False] , identifier[lb] = keyword[False] , identifier[customsky] = keyword[False] , identifier[lb_to_customsky] = keyword[None] , identifier[pmllpmbb_to_customsky] = keyword[None] , identifier[tintJ] = literal[int] , identifier[ntintJ] = literal[int] , identifier[integrate_method] = literal[string] , identifier[ro] = keyword[None] , identifier[vo] = keyword[None] , identifier[obs] = keyword[None] , identifier[disp] = keyword[False] ): literal[string] identifier[coords] . identifier[_APY_COORDS_ORIG] = identifier[coords] . identifier[_APY_COORDS] identifier[coords] . identifier[_APY_COORDS] = keyword[False] keyword[from] identifier[galpy] . identifier[actionAngle] keyword[import] identifier[actionAngleIsochroneApprox] , identifier[actionAngle] keyword[class] identifier[mockActionAngleIsochroneApprox] ( identifier[actionAngleIsochroneApprox] ): keyword[def] identifier[__init__] ( identifier[self] , identifier[tintJ] , identifier[ntintJ] , identifier[pot] , identifier[integrate_method] = literal[string] ): identifier[actionAngle] . identifier[__init__] ( identifier[self] ) identifier[self] . identifier[_tintJ] = identifier[tintJ] identifier[self] . identifier[_ntintJ] = identifier[ntintJ] identifier[self] . identifier[_tsJ] = identifier[nu] . identifier[linspace] ( literal[int] , identifier[self] . identifier[_tintJ] , identifier[self] . identifier[_ntintJ] ) identifier[self] . identifier[_integrate_dt] = keyword[None] identifier[self] . identifier[_pot] = identifier[pot] identifier[self] . identifier[_integrate_method] = identifier[integrate_method] keyword[return] keyword[None] identifier[tmockAA] = identifier[mockActionAngleIsochroneApprox] ( identifier[tintJ] , identifier[ntintJ] , identifier[pot] , identifier[integrate_method] = identifier[integrate_method] ) identifier[opt_vxvv] = identifier[optimize] . identifier[fmin_powell] ( identifier[_fit_orbit_mlogl] , identifier[orb] . identifier[vxvv] , identifier[args] =( identifier[vxvv] , identifier[vxvv_err] , identifier[pot] , identifier[radec] , identifier[lb] , identifier[customsky] , identifier[lb_to_customsky] , identifier[pmllpmbb_to_customsky] , identifier[tmockAA] , identifier[ro] , identifier[vo] , identifier[obs] ), identifier[disp] = identifier[disp] ) identifier[maxLogL] =- identifier[_fit_orbit_mlogl] ( identifier[opt_vxvv] , identifier[vxvv] , identifier[vxvv_err] , identifier[pot] , identifier[radec] , identifier[lb] , identifier[customsky] , identifier[lb_to_customsky] , identifier[pmllpmbb_to_customsky] , identifier[tmockAA] , identifier[ro] , identifier[vo] , identifier[obs] ) identifier[coords] . identifier[_APY_COORDS] = identifier[coords] . identifier[_APY_COORDS_ORIG] keyword[return] ( identifier[opt_vxvv] , identifier[maxLogL] )
def _fit_orbit(orb, vxvv, vxvv_err, pot, radec=False, lb=False, customsky=False, lb_to_customsky=None, pmllpmbb_to_customsky=None, tintJ=100, ntintJ=1000, integrate_method='dopr54_c', ro=None, vo=None, obs=None, disp=False): """Fit an orbit to data in a given potential""" # Need to turn this off for speed coords._APY_COORDS_ORIG = coords._APY_COORDS coords._APY_COORDS = False #Import here, because otherwise there is an infinite loop of imports from galpy.actionAngle import actionAngleIsochroneApprox, actionAngle #Mock this up, bc we want to use its orbit-integration routines class mockActionAngleIsochroneApprox(actionAngleIsochroneApprox): def __init__(self, tintJ, ntintJ, pot, integrate_method='dopr54_c'): actionAngle.__init__(self) self._tintJ = tintJ self._ntintJ = ntintJ self._tsJ = nu.linspace(0.0, self._tintJ, self._ntintJ) self._integrate_dt = None self._pot = pot self._integrate_method = integrate_method return None tmockAA = mockActionAngleIsochroneApprox(tintJ, ntintJ, pot, integrate_method=integrate_method) opt_vxvv = optimize.fmin_powell(_fit_orbit_mlogl, orb.vxvv, args=(vxvv, vxvv_err, pot, radec, lb, customsky, lb_to_customsky, pmllpmbb_to_customsky, tmockAA, ro, vo, obs), disp=disp) maxLogL = -_fit_orbit_mlogl(opt_vxvv, vxvv, vxvv_err, pot, radec, lb, customsky, lb_to_customsky, pmllpmbb_to_customsky, tmockAA, ro, vo, obs) coords._APY_COORDS = coords._APY_COORDS_ORIG return (opt_vxvv, maxLogL)
def pop(self, key): """ Removes @key from the instance, returns its value """ r = self[key] self.remove(key) return r
def function[pop, parameter[self, key]]: constant[ Removes @key from the instance, returns its value ] variable[r] assign[=] call[name[self]][name[key]] call[name[self].remove, parameter[name[key]]] return[name[r]]
keyword[def] identifier[pop] ( identifier[self] , identifier[key] ): literal[string] identifier[r] = identifier[self] [ identifier[key] ] identifier[self] . identifier[remove] ( identifier[key] ) keyword[return] identifier[r]
def pop(self, key): """ Removes @key from the instance, returns its value """ r = self[key] self.remove(key) return r
def error_handler(response, **kwargs): """Error Handler to surface 4XX and 5XX errors. Attached as a callback hook on the Request object. Parameters response (requests.Response) The HTTP response from an API request. **kwargs Arbitrary keyword arguments. Raises ClientError (ApiError) Raised if response contains a 4XX status code. ServerError (ApiError) Raised if response contains a 5XX status code. Returns response (requests.Response) The original HTTP response from the API request. """ if 400 <= response.status_code <= 499: message = response.json()['error_description'] \ if 'error_description' in response.json() \ else response.json()['error_detail'] raise ClientError(response, message) elif 500 <= response.status_code <= 599: raise ServerError(response) return response
def function[error_handler, parameter[response]]: constant[Error Handler to surface 4XX and 5XX errors. Attached as a callback hook on the Request object. Parameters response (requests.Response) The HTTP response from an API request. **kwargs Arbitrary keyword arguments. Raises ClientError (ApiError) Raised if response contains a 4XX status code. ServerError (ApiError) Raised if response contains a 5XX status code. Returns response (requests.Response) The original HTTP response from the API request. ] if compare[constant[400] less_or_equal[<=] name[response].status_code] begin[:] variable[message] assign[=] <ast.IfExp object at 0x7da20c7c9690> <ast.Raise object at 0x7da18dc04580> return[name[response]]
keyword[def] identifier[error_handler] ( identifier[response] ,** identifier[kwargs] ): literal[string] keyword[if] literal[int] <= identifier[response] . identifier[status_code] <= literal[int] : identifier[message] = identifier[response] . identifier[json] ()[ literal[string] ] keyword[if] literal[string] keyword[in] identifier[response] . identifier[json] () keyword[else] identifier[response] . identifier[json] ()[ literal[string] ] keyword[raise] identifier[ClientError] ( identifier[response] , identifier[message] ) keyword[elif] literal[int] <= identifier[response] . identifier[status_code] <= literal[int] : keyword[raise] identifier[ServerError] ( identifier[response] ) keyword[return] identifier[response]
def error_handler(response, **kwargs): """Error Handler to surface 4XX and 5XX errors. Attached as a callback hook on the Request object. Parameters response (requests.Response) The HTTP response from an API request. **kwargs Arbitrary keyword arguments. Raises ClientError (ApiError) Raised if response contains a 4XX status code. ServerError (ApiError) Raised if response contains a 5XX status code. Returns response (requests.Response) The original HTTP response from the API request. """ if 400 <= response.status_code <= 499: message = response.json()['error_description'] if 'error_description' in response.json() else response.json()['error_detail'] raise ClientError(response, message) # depends on [control=['if'], data=[]] elif 500 <= response.status_code <= 599: raise ServerError(response) # depends on [control=['if'], data=[]] return response
def shellcode(executables, use_defaults=True, shell='bash', complete_arguments=None): ''' Provide the shell code required to register a python executable for use with the argcomplete module. :param str executables: Executables to be completed (when invoked exactly with this name :param bool use_defaults: Whether to fallback to readline's default completion when no matches are generated. :param str shell: Name of the shell to output code for (bash or tcsh) :param complete_arguments: Arguments to call complete with :type complete_arguments: list(str) or None ''' if complete_arguments is None: complete_options = '-o nospace -o default' if use_defaults else '-o nospace' else: complete_options = " ".join(complete_arguments) if shell == 'bash': quoted_executables = [quote(i) for i in executables] executables_list = " ".join(quoted_executables) code = bashcode % dict(complete_opts=complete_options, executables=executables_list) else: code = "" for executable in executables: code += tcshcode % dict(executable=executable) return code
def function[shellcode, parameter[executables, use_defaults, shell, complete_arguments]]: constant[ Provide the shell code required to register a python executable for use with the argcomplete module. :param str executables: Executables to be completed (when invoked exactly with this name :param bool use_defaults: Whether to fallback to readline's default completion when no matches are generated. :param str shell: Name of the shell to output code for (bash or tcsh) :param complete_arguments: Arguments to call complete with :type complete_arguments: list(str) or None ] if compare[name[complete_arguments] is constant[None]] begin[:] variable[complete_options] assign[=] <ast.IfExp object at 0x7da18eb57880> if compare[name[shell] equal[==] constant[bash]] begin[:] variable[quoted_executables] assign[=] <ast.ListComp object at 0x7da18eb55d20> variable[executables_list] assign[=] call[constant[ ].join, parameter[name[quoted_executables]]] variable[code] assign[=] binary_operation[name[bashcode] <ast.Mod object at 0x7da2590d6920> call[name[dict], parameter[]]] return[name[code]]
keyword[def] identifier[shellcode] ( identifier[executables] , identifier[use_defaults] = keyword[True] , identifier[shell] = literal[string] , identifier[complete_arguments] = keyword[None] ): literal[string] keyword[if] identifier[complete_arguments] keyword[is] keyword[None] : identifier[complete_options] = literal[string] keyword[if] identifier[use_defaults] keyword[else] literal[string] keyword[else] : identifier[complete_options] = literal[string] . identifier[join] ( identifier[complete_arguments] ) keyword[if] identifier[shell] == literal[string] : identifier[quoted_executables] =[ identifier[quote] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[executables] ] identifier[executables_list] = literal[string] . identifier[join] ( identifier[quoted_executables] ) identifier[code] = identifier[bashcode] % identifier[dict] ( identifier[complete_opts] = identifier[complete_options] , identifier[executables] = identifier[executables_list] ) keyword[else] : identifier[code] = literal[string] keyword[for] identifier[executable] keyword[in] identifier[executables] : identifier[code] += identifier[tcshcode] % identifier[dict] ( identifier[executable] = identifier[executable] ) keyword[return] identifier[code]
def shellcode(executables, use_defaults=True, shell='bash', complete_arguments=None): """ Provide the shell code required to register a python executable for use with the argcomplete module. :param str executables: Executables to be completed (when invoked exactly with this name :param bool use_defaults: Whether to fallback to readline's default completion when no matches are generated. :param str shell: Name of the shell to output code for (bash or tcsh) :param complete_arguments: Arguments to call complete with :type complete_arguments: list(str) or None """ if complete_arguments is None: complete_options = '-o nospace -o default' if use_defaults else '-o nospace' # depends on [control=['if'], data=[]] else: complete_options = ' '.join(complete_arguments) if shell == 'bash': quoted_executables = [quote(i) for i in executables] executables_list = ' '.join(quoted_executables) code = bashcode % dict(complete_opts=complete_options, executables=executables_list) # depends on [control=['if'], data=[]] else: code = '' for executable in executables: code += tcshcode % dict(executable=executable) # depends on [control=['for'], data=['executable']] return code
def _MStep(self, l, K, x, z_h1, pi_h1, p_h1, p_h, delta_i_j_s): """ Description: Internal function for computing the M-Step of the EMM algorithm, which is itself an MM algorithm. """ for k in range(K): normconst = 0 if l == 0: # only need to compute pi at first MM iteration pi_h1[k] = np.sum(z_h1.T[k]) / len(z_h1) for j in range(self.m): omega_k_j = EMMMixPLAggregator.omega(k, j, z_h1, x) # numerator denom_sum = 0 for i in range(self.n): sum1 = 0 for t in range(len(x[i])): sum2 = 0 sum3 = 0 for s in range(t, self.m): sum2 += p_h[k][EMMMixPLAggregator.c(x[i], s)] for s in range(t, self.m + 1): sum3 += delta_i_j_s[i][j][s] sum1 += z_h1[i][k] * (sum2 ** -1) * sum3 denom_sum += sum1 p_h1[k][j] = omega_k_j / denom_sum normconst += p_h1[k][j] for j in range(self.m): p_h1[k][j] /= normconst
def function[_MStep, parameter[self, l, K, x, z_h1, pi_h1, p_h1, p_h, delta_i_j_s]]: constant[ Description: Internal function for computing the M-Step of the EMM algorithm, which is itself an MM algorithm. ] for taget[name[k]] in starred[call[name[range], parameter[name[K]]]] begin[:] variable[normconst] assign[=] constant[0] if compare[name[l] equal[==] constant[0]] begin[:] call[name[pi_h1]][name[k]] assign[=] binary_operation[call[name[np].sum, parameter[call[name[z_h1].T][name[k]]]] / call[name[len], parameter[name[z_h1]]]] for taget[name[j]] in starred[call[name[range], parameter[name[self].m]]] begin[:] variable[omega_k_j] assign[=] call[name[EMMMixPLAggregator].omega, parameter[name[k], name[j], name[z_h1], name[x]]] variable[denom_sum] assign[=] constant[0] for taget[name[i]] in starred[call[name[range], parameter[name[self].n]]] begin[:] variable[sum1] assign[=] constant[0] for taget[name[t]] in starred[call[name[range], parameter[call[name[len], parameter[call[name[x]][name[i]]]]]]] begin[:] variable[sum2] assign[=] constant[0] variable[sum3] assign[=] constant[0] for taget[name[s]] in starred[call[name[range], parameter[name[t], name[self].m]]] begin[:] <ast.AugAssign object at 0x7da1b2352290> for taget[name[s]] in starred[call[name[range], parameter[name[t], binary_operation[name[self].m + constant[1]]]]] begin[:] <ast.AugAssign object at 0x7da1b2352590> <ast.AugAssign object at 0x7da1b2352f50> <ast.AugAssign object at 0x7da1b2352080> call[call[name[p_h1]][name[k]]][name[j]] assign[=] binary_operation[name[omega_k_j] / name[denom_sum]] <ast.AugAssign object at 0x7da1b23513c0> for taget[name[j]] in starred[call[name[range], parameter[name[self].m]]] begin[:] <ast.AugAssign object at 0x7da1b2353ee0>
keyword[def] identifier[_MStep] ( identifier[self] , identifier[l] , identifier[K] , identifier[x] , identifier[z_h1] , identifier[pi_h1] , identifier[p_h1] , identifier[p_h] , identifier[delta_i_j_s] ): literal[string] keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[K] ): identifier[normconst] = literal[int] keyword[if] identifier[l] == literal[int] : identifier[pi_h1] [ identifier[k] ]= identifier[np] . identifier[sum] ( identifier[z_h1] . identifier[T] [ identifier[k] ])/ identifier[len] ( identifier[z_h1] ) keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[self] . identifier[m] ): identifier[omega_k_j] = identifier[EMMMixPLAggregator] . identifier[omega] ( identifier[k] , identifier[j] , identifier[z_h1] , identifier[x] ) identifier[denom_sum] = literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[n] ): identifier[sum1] = literal[int] keyword[for] identifier[t] keyword[in] identifier[range] ( identifier[len] ( identifier[x] [ identifier[i] ])): identifier[sum2] = literal[int] identifier[sum3] = literal[int] keyword[for] identifier[s] keyword[in] identifier[range] ( identifier[t] , identifier[self] . identifier[m] ): identifier[sum2] += identifier[p_h] [ identifier[k] ][ identifier[EMMMixPLAggregator] . identifier[c] ( identifier[x] [ identifier[i] ], identifier[s] )] keyword[for] identifier[s] keyword[in] identifier[range] ( identifier[t] , identifier[self] . identifier[m] + literal[int] ): identifier[sum3] += identifier[delta_i_j_s] [ identifier[i] ][ identifier[j] ][ identifier[s] ] identifier[sum1] += identifier[z_h1] [ identifier[i] ][ identifier[k] ]*( identifier[sum2] **- literal[int] )* identifier[sum3] identifier[denom_sum] += identifier[sum1] identifier[p_h1] [ identifier[k] ][ identifier[j] ]= identifier[omega_k_j] / identifier[denom_sum] identifier[normconst] += identifier[p_h1] [ identifier[k] ][ identifier[j] ] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[self] . identifier[m] ): identifier[p_h1] [ identifier[k] ][ identifier[j] ]/= identifier[normconst]
def _MStep(self, l, K, x, z_h1, pi_h1, p_h1, p_h, delta_i_j_s): """ Description: Internal function for computing the M-Step of the EMM algorithm, which is itself an MM algorithm. """ for k in range(K): normconst = 0 if l == 0: # only need to compute pi at first MM iteration pi_h1[k] = np.sum(z_h1.T[k]) / len(z_h1) # depends on [control=['if'], data=[]] for j in range(self.m): omega_k_j = EMMMixPLAggregator.omega(k, j, z_h1, x) # numerator denom_sum = 0 for i in range(self.n): sum1 = 0 for t in range(len(x[i])): sum2 = 0 sum3 = 0 for s in range(t, self.m): sum2 += p_h[k][EMMMixPLAggregator.c(x[i], s)] # depends on [control=['for'], data=['s']] for s in range(t, self.m + 1): sum3 += delta_i_j_s[i][j][s] # depends on [control=['for'], data=['s']] sum1 += z_h1[i][k] * sum2 ** (-1) * sum3 # depends on [control=['for'], data=['t']] denom_sum += sum1 # depends on [control=['for'], data=['i']] p_h1[k][j] = omega_k_j / denom_sum normconst += p_h1[k][j] # depends on [control=['for'], data=['j']] for j in range(self.m): p_h1[k][j] /= normconst # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['k']]
def declared(self, annotationtype, set): """Checks if the annotation type is present (i.e. declared) in the document. Arguments: annotationtype: The type of annotation, this is conveyed by passing the corresponding annototion class (such as :class:`PosAnnotation` for example), or a member of :class:`AnnotationType`, such as ``AnnotationType.POS``. set (str): the set, should formally be a URL pointing to the set definition (aliases are also supported) Example:: if doc.declared(folia.PosAnnotation, 'http://some/path/brown-tag-set'): .. Returns: bool """ if inspect.isclass(annotationtype): annotationtype = annotationtype.ANNOTATIONTYPE return ( (annotationtype,set) in self.annotations) or (set in self.alias_set and self.alias_set[set] and (annotationtype, self.alias_set[set]) in self.annotations )
def function[declared, parameter[self, annotationtype, set]]: constant[Checks if the annotation type is present (i.e. declared) in the document. Arguments: annotationtype: The type of annotation, this is conveyed by passing the corresponding annototion class (such as :class:`PosAnnotation` for example), or a member of :class:`AnnotationType`, such as ``AnnotationType.POS``. set (str): the set, should formally be a URL pointing to the set definition (aliases are also supported) Example:: if doc.declared(folia.PosAnnotation, 'http://some/path/brown-tag-set'): .. Returns: bool ] if call[name[inspect].isclass, parameter[name[annotationtype]]] begin[:] variable[annotationtype] assign[=] name[annotationtype].ANNOTATIONTYPE return[<ast.BoolOp object at 0x7da18eb57490>]
keyword[def] identifier[declared] ( identifier[self] , identifier[annotationtype] , identifier[set] ): literal[string] keyword[if] identifier[inspect] . identifier[isclass] ( identifier[annotationtype] ): identifier[annotationtype] = identifier[annotationtype] . identifier[ANNOTATIONTYPE] keyword[return] (( identifier[annotationtype] , identifier[set] ) keyword[in] identifier[self] . identifier[annotations] ) keyword[or] ( identifier[set] keyword[in] identifier[self] . identifier[alias_set] keyword[and] identifier[self] . identifier[alias_set] [ identifier[set] ] keyword[and] ( identifier[annotationtype] , identifier[self] . identifier[alias_set] [ identifier[set] ]) keyword[in] identifier[self] . identifier[annotations] )
def declared(self, annotationtype, set): """Checks if the annotation type is present (i.e. declared) in the document. Arguments: annotationtype: The type of annotation, this is conveyed by passing the corresponding annototion class (such as :class:`PosAnnotation` for example), or a member of :class:`AnnotationType`, such as ``AnnotationType.POS``. set (str): the set, should formally be a URL pointing to the set definition (aliases are also supported) Example:: if doc.declared(folia.PosAnnotation, 'http://some/path/brown-tag-set'): .. Returns: bool """ if inspect.isclass(annotationtype): annotationtype = annotationtype.ANNOTATIONTYPE # depends on [control=['if'], data=[]] return (annotationtype, set) in self.annotations or (set in self.alias_set and self.alias_set[set] and ((annotationtype, self.alias_set[set]) in self.annotations))
def update_from_tuple(self, the_tuple): """ Update task from tuple. """ if not the_tuple.rowcount: raise Queue.ZeroTupleException("Error updating task") row = the_tuple[0] if self.task_id != row[0]: raise Queue.BadTupleException("Wrong task: id's are not match") self.state = row[1] self.data = row[2]
def function[update_from_tuple, parameter[self, the_tuple]]: constant[ Update task from tuple. ] if <ast.UnaryOp object at 0x7da204347160> begin[:] <ast.Raise object at 0x7da204347f70> variable[row] assign[=] call[name[the_tuple]][constant[0]] if compare[name[self].task_id not_equal[!=] call[name[row]][constant[0]]] begin[:] <ast.Raise object at 0x7da2043459c0> name[self].state assign[=] call[name[row]][constant[1]] name[self].data assign[=] call[name[row]][constant[2]]
keyword[def] identifier[update_from_tuple] ( identifier[self] , identifier[the_tuple] ): literal[string] keyword[if] keyword[not] identifier[the_tuple] . identifier[rowcount] : keyword[raise] identifier[Queue] . identifier[ZeroTupleException] ( literal[string] ) identifier[row] = identifier[the_tuple] [ literal[int] ] keyword[if] identifier[self] . identifier[task_id] != identifier[row] [ literal[int] ]: keyword[raise] identifier[Queue] . identifier[BadTupleException] ( literal[string] ) identifier[self] . identifier[state] = identifier[row] [ literal[int] ] identifier[self] . identifier[data] = identifier[row] [ literal[int] ]
def update_from_tuple(self, the_tuple): """ Update task from tuple. """ if not the_tuple.rowcount: raise Queue.ZeroTupleException('Error updating task') # depends on [control=['if'], data=[]] row = the_tuple[0] if self.task_id != row[0]: raise Queue.BadTupleException("Wrong task: id's are not match") # depends on [control=['if'], data=[]] self.state = row[1] self.data = row[2]
def opt(self, x_init, f_fp=None, f=None, fp=None): """ The simplex optimizer does not require gradients. """ statuses = ['Converged', 'Maximum number of function evaluations made', 'Maximum number of iterations reached'] opt_dict = {} if self.xtol is not None: opt_dict['xtol'] = self.xtol if self.ftol is not None: opt_dict['ftol'] = self.ftol if self.gtol is not None: print("WARNING: simplex doesn't have an gtol arg, so I'm going to ignore it") opt_result = optimize.fmin(f, x_init, (), disp=self.messages, maxfun=self.max_f_eval, full_output=True, **opt_dict) self.x_opt = opt_result[0] self.f_opt = opt_result[1] self.funct_eval = opt_result[3] self.status = statuses[opt_result[4]] self.trace = None
def function[opt, parameter[self, x_init, f_fp, f, fp]]: constant[ The simplex optimizer does not require gradients. ] variable[statuses] assign[=] list[[<ast.Constant object at 0x7da1b0d317b0>, <ast.Constant object at 0x7da1b0d304f0>, <ast.Constant object at 0x7da1b0d32fb0>]] variable[opt_dict] assign[=] dictionary[[], []] if compare[name[self].xtol is_not constant[None]] begin[:] call[name[opt_dict]][constant[xtol]] assign[=] name[self].xtol if compare[name[self].ftol is_not constant[None]] begin[:] call[name[opt_dict]][constant[ftol]] assign[=] name[self].ftol if compare[name[self].gtol is_not constant[None]] begin[:] call[name[print], parameter[constant[WARNING: simplex doesn't have an gtol arg, so I'm going to ignore it]]] variable[opt_result] assign[=] call[name[optimize].fmin, parameter[name[f], name[x_init], tuple[[]]]] name[self].x_opt assign[=] call[name[opt_result]][constant[0]] name[self].f_opt assign[=] call[name[opt_result]][constant[1]] name[self].funct_eval assign[=] call[name[opt_result]][constant[3]] name[self].status assign[=] call[name[statuses]][call[name[opt_result]][constant[4]]] name[self].trace assign[=] constant[None]
keyword[def] identifier[opt] ( identifier[self] , identifier[x_init] , identifier[f_fp] = keyword[None] , identifier[f] = keyword[None] , identifier[fp] = keyword[None] ): literal[string] identifier[statuses] =[ literal[string] , literal[string] , literal[string] ] identifier[opt_dict] ={} keyword[if] identifier[self] . identifier[xtol] keyword[is] keyword[not] keyword[None] : identifier[opt_dict] [ literal[string] ]= identifier[self] . identifier[xtol] keyword[if] identifier[self] . identifier[ftol] keyword[is] keyword[not] keyword[None] : identifier[opt_dict] [ literal[string] ]= identifier[self] . identifier[ftol] keyword[if] identifier[self] . identifier[gtol] keyword[is] keyword[not] keyword[None] : identifier[print] ( literal[string] ) identifier[opt_result] = identifier[optimize] . identifier[fmin] ( identifier[f] , identifier[x_init] ,(), identifier[disp] = identifier[self] . identifier[messages] , identifier[maxfun] = identifier[self] . identifier[max_f_eval] , identifier[full_output] = keyword[True] ,** identifier[opt_dict] ) identifier[self] . identifier[x_opt] = identifier[opt_result] [ literal[int] ] identifier[self] . identifier[f_opt] = identifier[opt_result] [ literal[int] ] identifier[self] . identifier[funct_eval] = identifier[opt_result] [ literal[int] ] identifier[self] . identifier[status] = identifier[statuses] [ identifier[opt_result] [ literal[int] ]] identifier[self] . identifier[trace] = keyword[None]
def opt(self, x_init, f_fp=None, f=None, fp=None): """ The simplex optimizer does not require gradients. """ statuses = ['Converged', 'Maximum number of function evaluations made', 'Maximum number of iterations reached'] opt_dict = {} if self.xtol is not None: opt_dict['xtol'] = self.xtol # depends on [control=['if'], data=[]] if self.ftol is not None: opt_dict['ftol'] = self.ftol # depends on [control=['if'], data=[]] if self.gtol is not None: print("WARNING: simplex doesn't have an gtol arg, so I'm going to ignore it") # depends on [control=['if'], data=[]] opt_result = optimize.fmin(f, x_init, (), disp=self.messages, maxfun=self.max_f_eval, full_output=True, **opt_dict) self.x_opt = opt_result[0] self.f_opt = opt_result[1] self.funct_eval = opt_result[3] self.status = statuses[opt_result[4]] self.trace = None
def list_versions(self, symbol=None, snapshot=None, latest_only=False): """ Return a list of versions filtered by the passed in parameters. Parameters ---------- symbol : `str` Symbol to return versions for. If None returns versions across all symbols in the library. snapshot : `str` Return the versions contained in the named snapshot latest_only : `bool` Only include the latest version for a specific symbol Returns ------- List of dictionaries describing the discovered versions in the library """ if symbol is None: symbols = self.list_symbols(snapshot=snapshot) else: symbols = [symbol] query = {} if snapshot is not None: try: query['parent'] = self._snapshots.find_one({'name': snapshot})['_id'] except TypeError: raise NoDataFoundException('No snapshot %s in library %s' % (snapshot, self._arctic_lib.get_name())) versions = [] snapshots = {ss.get('_id'): ss.get('name') for ss in self._snapshots.find()} for symbol in symbols: query['symbol'] = symbol seen_symbols = set() for version in self._versions.find(query, projection=['symbol', 'version', 'parent', 'metadata.deleted'], sort=[('version', -1)]): if latest_only and version['symbol'] in seen_symbols: continue seen_symbols.add(version['symbol']) meta = version.get('metadata') versions.append({'symbol': version['symbol'], 'version': version['version'], 'deleted': meta.get('deleted', False) if meta else False, # We return offset-aware datetimes in Local Time. 'date': ms_to_datetime(datetime_to_ms(version['_id'].generation_time)), 'snapshots': [snapshots[s] for s in version.get('parent', []) if s in snapshots]}) return versions
def function[list_versions, parameter[self, symbol, snapshot, latest_only]]: constant[ Return a list of versions filtered by the passed in parameters. Parameters ---------- symbol : `str` Symbol to return versions for. If None returns versions across all symbols in the library. snapshot : `str` Return the versions contained in the named snapshot latest_only : `bool` Only include the latest version for a specific symbol Returns ------- List of dictionaries describing the discovered versions in the library ] if compare[name[symbol] is constant[None]] begin[:] variable[symbols] assign[=] call[name[self].list_symbols, parameter[]] variable[query] assign[=] dictionary[[], []] if compare[name[snapshot] is_not constant[None]] begin[:] <ast.Try object at 0x7da20c76dab0> variable[versions] assign[=] list[[]] variable[snapshots] assign[=] <ast.DictComp object at 0x7da20c76eef0> for taget[name[symbol]] in starred[name[symbols]] begin[:] call[name[query]][constant[symbol]] assign[=] name[symbol] variable[seen_symbols] assign[=] call[name[set], parameter[]] for taget[name[version]] in starred[call[name[self]._versions.find, parameter[name[query]]]] begin[:] if <ast.BoolOp object at 0x7da20c76c2e0> begin[:] continue call[name[seen_symbols].add, parameter[call[name[version]][constant[symbol]]]] variable[meta] assign[=] call[name[version].get, parameter[constant[metadata]]] call[name[versions].append, parameter[dictionary[[<ast.Constant object at 0x7da20c76e530>, <ast.Constant object at 0x7da20c76ce50>, <ast.Constant object at 0x7da20c76d780>, <ast.Constant object at 0x7da20c76c0d0>, <ast.Constant object at 0x7da20c76d3f0>], [<ast.Subscript object at 0x7da20c76d090>, <ast.Subscript object at 0x7da20c76ff70>, <ast.IfExp object at 0x7da20c76da50>, <ast.Call object at 0x7da1b1b441c0>, <ast.ListComp object at 0x7da20c76f700>]]]] return[name[versions]]
keyword[def] identifier[list_versions] ( identifier[self] , identifier[symbol] = keyword[None] , identifier[snapshot] = keyword[None] , identifier[latest_only] = keyword[False] ): literal[string] keyword[if] identifier[symbol] keyword[is] keyword[None] : identifier[symbols] = identifier[self] . identifier[list_symbols] ( identifier[snapshot] = identifier[snapshot] ) keyword[else] : identifier[symbols] =[ identifier[symbol] ] identifier[query] ={} keyword[if] identifier[snapshot] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[query] [ literal[string] ]= identifier[self] . identifier[_snapshots] . identifier[find_one] ({ literal[string] : identifier[snapshot] })[ literal[string] ] keyword[except] identifier[TypeError] : keyword[raise] identifier[NoDataFoundException] ( literal[string] %( identifier[snapshot] , identifier[self] . identifier[_arctic_lib] . identifier[get_name] ())) identifier[versions] =[] identifier[snapshots] ={ identifier[ss] . identifier[get] ( literal[string] ): identifier[ss] . identifier[get] ( literal[string] ) keyword[for] identifier[ss] keyword[in] identifier[self] . identifier[_snapshots] . identifier[find] ()} keyword[for] identifier[symbol] keyword[in] identifier[symbols] : identifier[query] [ literal[string] ]= identifier[symbol] identifier[seen_symbols] = identifier[set] () keyword[for] identifier[version] keyword[in] identifier[self] . identifier[_versions] . identifier[find] ( identifier[query] , identifier[projection] =[ literal[string] , literal[string] , literal[string] , literal[string] ], identifier[sort] =[( literal[string] ,- literal[int] )]): keyword[if] identifier[latest_only] keyword[and] identifier[version] [ literal[string] ] keyword[in] identifier[seen_symbols] : keyword[continue] identifier[seen_symbols] . identifier[add] ( identifier[version] [ literal[string] ]) identifier[meta] = identifier[version] . identifier[get] ( literal[string] ) identifier[versions] . identifier[append] ({ literal[string] : identifier[version] [ literal[string] ], literal[string] : identifier[version] [ literal[string] ], literal[string] : identifier[meta] . identifier[get] ( literal[string] , keyword[False] ) keyword[if] identifier[meta] keyword[else] keyword[False] , literal[string] : identifier[ms_to_datetime] ( identifier[datetime_to_ms] ( identifier[version] [ literal[string] ]. identifier[generation_time] )), literal[string] :[ identifier[snapshots] [ identifier[s] ] keyword[for] identifier[s] keyword[in] identifier[version] . identifier[get] ( literal[string] ,[]) keyword[if] identifier[s] keyword[in] identifier[snapshots] ]}) keyword[return] identifier[versions]
def list_versions(self, symbol=None, snapshot=None, latest_only=False): """ Return a list of versions filtered by the passed in parameters. Parameters ---------- symbol : `str` Symbol to return versions for. If None returns versions across all symbols in the library. snapshot : `str` Return the versions contained in the named snapshot latest_only : `bool` Only include the latest version for a specific symbol Returns ------- List of dictionaries describing the discovered versions in the library """ if symbol is None: symbols = self.list_symbols(snapshot=snapshot) # depends on [control=['if'], data=[]] else: symbols = [symbol] query = {} if snapshot is not None: try: query['parent'] = self._snapshots.find_one({'name': snapshot})['_id'] # depends on [control=['try'], data=[]] except TypeError: raise NoDataFoundException('No snapshot %s in library %s' % (snapshot, self._arctic_lib.get_name())) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['snapshot']] versions = [] snapshots = {ss.get('_id'): ss.get('name') for ss in self._snapshots.find()} for symbol in symbols: query['symbol'] = symbol seen_symbols = set() for version in self._versions.find(query, projection=['symbol', 'version', 'parent', 'metadata.deleted'], sort=[('version', -1)]): if latest_only and version['symbol'] in seen_symbols: continue # depends on [control=['if'], data=[]] seen_symbols.add(version['symbol']) meta = version.get('metadata') # We return offset-aware datetimes in Local Time. versions.append({'symbol': version['symbol'], 'version': version['version'], 'deleted': meta.get('deleted', False) if meta else False, 'date': ms_to_datetime(datetime_to_ms(version['_id'].generation_time)), 'snapshots': [snapshots[s] for s in version.get('parent', []) if s in snapshots]}) # depends on [control=['for'], data=['version']] # depends on [control=['for'], data=['symbol']] return versions
def forwards(self, orm): "Write your forwards methods here." for document in orm['document_library.Document'].objects.all(): self.migrate_placeholder( orm, document, 'content', 'document_library_content', 'content')
def function[forwards, parameter[self, orm]]: constant[Write your forwards methods here.] for taget[name[document]] in starred[call[call[name[orm]][constant[document_library.Document]].objects.all, parameter[]]] begin[:] call[name[self].migrate_placeholder, parameter[name[orm], name[document], constant[content], constant[document_library_content], constant[content]]]
keyword[def] identifier[forwards] ( identifier[self] , identifier[orm] ): literal[string] keyword[for] identifier[document] keyword[in] identifier[orm] [ literal[string] ]. identifier[objects] . identifier[all] (): identifier[self] . identifier[migrate_placeholder] ( identifier[orm] , identifier[document] , literal[string] , literal[string] , literal[string] )
def forwards(self, orm): """Write your forwards methods here.""" for document in orm['document_library.Document'].objects.all(): self.migrate_placeholder(orm, document, 'content', 'document_library_content', 'content') # depends on [control=['for'], data=['document']]
def confirm_login_allowed(self, user): """ Controls whether the given User may log in. This is a policy setting, independent of end-user authentication. This default behavior is to allow login by active users, and reject login by inactive users. If the given user cannot log in, this method should raise a ``forms.ValidationError``. If the given user may log in, this method should return None. """ if not user.is_active: raise forms.ValidationError( self.error_messages['inactive'], code='inactive', )
def function[confirm_login_allowed, parameter[self, user]]: constant[ Controls whether the given User may log in. This is a policy setting, independent of end-user authentication. This default behavior is to allow login by active users, and reject login by inactive users. If the given user cannot log in, this method should raise a ``forms.ValidationError``. If the given user may log in, this method should return None. ] if <ast.UnaryOp object at 0x7da1b0f61510> begin[:] <ast.Raise object at 0x7da1b0f61660>
keyword[def] identifier[confirm_login_allowed] ( identifier[self] , identifier[user] ): literal[string] keyword[if] keyword[not] identifier[user] . identifier[is_active] : keyword[raise] identifier[forms] . identifier[ValidationError] ( identifier[self] . identifier[error_messages] [ literal[string] ], identifier[code] = literal[string] , )
def confirm_login_allowed(self, user): """ Controls whether the given User may log in. This is a policy setting, independent of end-user authentication. This default behavior is to allow login by active users, and reject login by inactive users. If the given user cannot log in, this method should raise a ``forms.ValidationError``. If the given user may log in, this method should return None. """ if not user.is_active: raise forms.ValidationError(self.error_messages['inactive'], code='inactive') # depends on [control=['if'], data=[]]
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, use_decimal=False, **kw): """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON document) to a Python object. *encoding* determines the encoding used to interpret any :class:`str` objects decoded by this instance (``'utf-8'`` by default). It has no effect when decoding :class:`unicode` objects. Note that currently only encodings that are a superset of ASCII work, strings of other encodings should be passed in as :class:`unicode`. *object_hook*, if specified, will be called with the result of every JSON object decoded and its return value will be used in place of the given :class:`dict`. This can be used to provide custom deserializations (e.g. to support JSON-RPC class hinting). *object_pairs_hook* is an optional function that will be called with the result of any object literal decode with an ordered list of pairs. The return value of *object_pairs_hook* will be used instead of the :class:`dict`. This feature can be used to implement custom decoders that rely on the order that the key and value pairs are decoded (for example, :func:`collections.OrderedDict` will remember the order of insertion). If *object_hook* is also defined, the *object_pairs_hook* takes priority. *parse_float*, if specified, will be called with the string of every JSON float to be decoded. By default, this is equivalent to ``float(num_str)``. This can be used to use another datatype or parser for JSON floats (e.g. :class:`decimal.Decimal`). *parse_int*, if specified, will be called with the string of every JSON int to be decoded. By default, this is equivalent to ``int(num_str)``. This can be used to use another datatype or parser for JSON integers (e.g. :class:`float`). *parse_constant*, if specified, will be called with one of the following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This can be used to raise an exception if invalid JSON numbers are encountered. If *use_decimal* is true (default: ``False``) then it implies parse_float=decimal.Decimal for parity with ``dump``. To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead of subclassing whenever possible. """ if (cls is None and encoding is None and object_hook is None and parse_int is None and parse_float is None and parse_constant is None and object_pairs_hook is None and not use_decimal and not kw): return _default_decoder.decode(s) if cls is None: cls = JSONDecoder if object_hook is not None: kw['object_hook'] = object_hook if object_pairs_hook is not None: kw['object_pairs_hook'] = object_pairs_hook if parse_float is not None: kw['parse_float'] = parse_float if parse_int is not None: kw['parse_int'] = parse_int if parse_constant is not None: kw['parse_constant'] = parse_constant if use_decimal: if parse_float is not None: raise TypeError("use_decimal=True implies parse_float=Decimal") kw['parse_float'] = Decimal return cls(encoding=encoding, **kw).decode(s)
def function[loads, parameter[s, encoding, cls, object_hook, parse_float, parse_int, parse_constant, object_pairs_hook, use_decimal]]: constant[Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON document) to a Python object. *encoding* determines the encoding used to interpret any :class:`str` objects decoded by this instance (``'utf-8'`` by default). It has no effect when decoding :class:`unicode` objects. Note that currently only encodings that are a superset of ASCII work, strings of other encodings should be passed in as :class:`unicode`. *object_hook*, if specified, will be called with the result of every JSON object decoded and its return value will be used in place of the given :class:`dict`. This can be used to provide custom deserializations (e.g. to support JSON-RPC class hinting). *object_pairs_hook* is an optional function that will be called with the result of any object literal decode with an ordered list of pairs. The return value of *object_pairs_hook* will be used instead of the :class:`dict`. This feature can be used to implement custom decoders that rely on the order that the key and value pairs are decoded (for example, :func:`collections.OrderedDict` will remember the order of insertion). If *object_hook* is also defined, the *object_pairs_hook* takes priority. *parse_float*, if specified, will be called with the string of every JSON float to be decoded. By default, this is equivalent to ``float(num_str)``. This can be used to use another datatype or parser for JSON floats (e.g. :class:`decimal.Decimal`). *parse_int*, if specified, will be called with the string of every JSON int to be decoded. By default, this is equivalent to ``int(num_str)``. This can be used to use another datatype or parser for JSON integers (e.g. :class:`float`). *parse_constant*, if specified, will be called with one of the following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This can be used to raise an exception if invalid JSON numbers are encountered. If *use_decimal* is true (default: ``False``) then it implies parse_float=decimal.Decimal for parity with ``dump``. To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead of subclassing whenever possible. ] if <ast.BoolOp object at 0x7da20e960640> begin[:] return[call[name[_default_decoder].decode, parameter[name[s]]]] if compare[name[cls] is constant[None]] begin[:] variable[cls] assign[=] name[JSONDecoder] if compare[name[object_hook] is_not constant[None]] begin[:] call[name[kw]][constant[object_hook]] assign[=] name[object_hook] if compare[name[object_pairs_hook] is_not constant[None]] begin[:] call[name[kw]][constant[object_pairs_hook]] assign[=] name[object_pairs_hook] if compare[name[parse_float] is_not constant[None]] begin[:] call[name[kw]][constant[parse_float]] assign[=] name[parse_float] if compare[name[parse_int] is_not constant[None]] begin[:] call[name[kw]][constant[parse_int]] assign[=] name[parse_int] if compare[name[parse_constant] is_not constant[None]] begin[:] call[name[kw]][constant[parse_constant]] assign[=] name[parse_constant] if name[use_decimal] begin[:] if compare[name[parse_float] is_not constant[None]] begin[:] <ast.Raise object at 0x7da1b12c9c00> call[name[kw]][constant[parse_float]] assign[=] name[Decimal] return[call[call[name[cls], parameter[]].decode, parameter[name[s]]]]
keyword[def] identifier[loads] ( identifier[s] , identifier[encoding] = keyword[None] , identifier[cls] = keyword[None] , identifier[object_hook] = keyword[None] , identifier[parse_float] = keyword[None] , identifier[parse_int] = keyword[None] , identifier[parse_constant] = keyword[None] , identifier[object_pairs_hook] = keyword[None] , identifier[use_decimal] = keyword[False] ,** identifier[kw] ): literal[string] keyword[if] ( identifier[cls] keyword[is] keyword[None] keyword[and] identifier[encoding] keyword[is] keyword[None] keyword[and] identifier[object_hook] keyword[is] keyword[None] keyword[and] identifier[parse_int] keyword[is] keyword[None] keyword[and] identifier[parse_float] keyword[is] keyword[None] keyword[and] identifier[parse_constant] keyword[is] keyword[None] keyword[and] identifier[object_pairs_hook] keyword[is] keyword[None] keyword[and] keyword[not] identifier[use_decimal] keyword[and] keyword[not] identifier[kw] ): keyword[return] identifier[_default_decoder] . identifier[decode] ( identifier[s] ) keyword[if] identifier[cls] keyword[is] keyword[None] : identifier[cls] = identifier[JSONDecoder] keyword[if] identifier[object_hook] keyword[is] keyword[not] keyword[None] : identifier[kw] [ literal[string] ]= identifier[object_hook] keyword[if] identifier[object_pairs_hook] keyword[is] keyword[not] keyword[None] : identifier[kw] [ literal[string] ]= identifier[object_pairs_hook] keyword[if] identifier[parse_float] keyword[is] keyword[not] keyword[None] : identifier[kw] [ literal[string] ]= identifier[parse_float] keyword[if] identifier[parse_int] keyword[is] keyword[not] keyword[None] : identifier[kw] [ literal[string] ]= identifier[parse_int] keyword[if] identifier[parse_constant] keyword[is] keyword[not] keyword[None] : identifier[kw] [ literal[string] ]= identifier[parse_constant] keyword[if] identifier[use_decimal] : keyword[if] identifier[parse_float] keyword[is] keyword[not] keyword[None] : keyword[raise] identifier[TypeError] ( literal[string] ) identifier[kw] [ literal[string] ]= identifier[Decimal] keyword[return] identifier[cls] ( identifier[encoding] = identifier[encoding] ,** identifier[kw] ). identifier[decode] ( identifier[s] )
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, use_decimal=False, **kw): """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON document) to a Python object. *encoding* determines the encoding used to interpret any :class:`str` objects decoded by this instance (``'utf-8'`` by default). It has no effect when decoding :class:`unicode` objects. Note that currently only encodings that are a superset of ASCII work, strings of other encodings should be passed in as :class:`unicode`. *object_hook*, if specified, will be called with the result of every JSON object decoded and its return value will be used in place of the given :class:`dict`. This can be used to provide custom deserializations (e.g. to support JSON-RPC class hinting). *object_pairs_hook* is an optional function that will be called with the result of any object literal decode with an ordered list of pairs. The return value of *object_pairs_hook* will be used instead of the :class:`dict`. This feature can be used to implement custom decoders that rely on the order that the key and value pairs are decoded (for example, :func:`collections.OrderedDict` will remember the order of insertion). If *object_hook* is also defined, the *object_pairs_hook* takes priority. *parse_float*, if specified, will be called with the string of every JSON float to be decoded. By default, this is equivalent to ``float(num_str)``. This can be used to use another datatype or parser for JSON floats (e.g. :class:`decimal.Decimal`). *parse_int*, if specified, will be called with the string of every JSON int to be decoded. By default, this is equivalent to ``int(num_str)``. This can be used to use another datatype or parser for JSON integers (e.g. :class:`float`). *parse_constant*, if specified, will be called with one of the following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This can be used to raise an exception if invalid JSON numbers are encountered. If *use_decimal* is true (default: ``False``) then it implies parse_float=decimal.Decimal for parity with ``dump``. To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead of subclassing whenever possible. """ if cls is None and encoding is None and (object_hook is None) and (parse_int is None) and (parse_float is None) and (parse_constant is None) and (object_pairs_hook is None) and (not use_decimal) and (not kw): return _default_decoder.decode(s) # depends on [control=['if'], data=[]] if cls is None: cls = JSONDecoder # depends on [control=['if'], data=['cls']] if object_hook is not None: kw['object_hook'] = object_hook # depends on [control=['if'], data=['object_hook']] if object_pairs_hook is not None: kw['object_pairs_hook'] = object_pairs_hook # depends on [control=['if'], data=['object_pairs_hook']] if parse_float is not None: kw['parse_float'] = parse_float # depends on [control=['if'], data=['parse_float']] if parse_int is not None: kw['parse_int'] = parse_int # depends on [control=['if'], data=['parse_int']] if parse_constant is not None: kw['parse_constant'] = parse_constant # depends on [control=['if'], data=['parse_constant']] if use_decimal: if parse_float is not None: raise TypeError('use_decimal=True implies parse_float=Decimal') # depends on [control=['if'], data=[]] kw['parse_float'] = Decimal # depends on [control=['if'], data=[]] return cls(encoding=encoding, **kw).decode(s)
def trackVehicle(self, viewID, vehID): """trackVehicle(string, string) -> None Start visually tracking the given vehicle on the given view. """ self._connection._sendStringCmd( tc.CMD_SET_GUI_VARIABLE, tc.VAR_TRACK_VEHICLE, viewID, vehID)
def function[trackVehicle, parameter[self, viewID, vehID]]: constant[trackVehicle(string, string) -> None Start visually tracking the given vehicle on the given view. ] call[name[self]._connection._sendStringCmd, parameter[name[tc].CMD_SET_GUI_VARIABLE, name[tc].VAR_TRACK_VEHICLE, name[viewID], name[vehID]]]
keyword[def] identifier[trackVehicle] ( identifier[self] , identifier[viewID] , identifier[vehID] ): literal[string] identifier[self] . identifier[_connection] . identifier[_sendStringCmd] ( identifier[tc] . identifier[CMD_SET_GUI_VARIABLE] , identifier[tc] . identifier[VAR_TRACK_VEHICLE] , identifier[viewID] , identifier[vehID] )
def trackVehicle(self, viewID, vehID): """trackVehicle(string, string) -> None Start visually tracking the given vehicle on the given view. """ self._connection._sendStringCmd(tc.CMD_SET_GUI_VARIABLE, tc.VAR_TRACK_VEHICLE, viewID, vehID)
def symlink_list(saltenv='base', backend=None): ''' Return a list of symlinked files and dirs saltenv : base The salt fileserver environment to be listed backend Narrow fileserver backends to a subset of the enabled ones. If all passed backends start with a minus sign (``-``), then these backends will be excluded from the enabled backends. However, if there is a mix of backends with and without a minus sign (ex: ``backend=-roots,git``) then the ones starting with a minus sign will be disregarded. .. versionadded:: 2015.5.0 .. note: Keep in mind that executing this function spawns a new process, separate from the master. This means that if the fileserver configuration has been changed in some way since the master has been restarted (e.g. if :conf_master:`fileserver_backend`, :conf_master:`gitfs_remotes`, :conf_master:`hgfs_remotes`, etc. have been updated), then the results of this runner will not accurately reflect what symlinks are available to minions. When in doubt, use :py:func:`cp.list_master_symlinks <salt.modules.cp.list_master_symlinks>` to see what symlinks the minion can see, and always remember to restart the salt-master daemon when updating the fileserver configuration. CLI Example: .. code-block:: bash salt-run fileserver.symlink_list salt-run fileserver.symlink_list saltenv=prod salt-run fileserver.symlink_list saltenv=dev backend=git salt-run fileserver.symlink_list base hg,roots salt-run fileserver.symlink_list -git ''' fileserver = salt.fileserver.Fileserver(__opts__) load = {'saltenv': saltenv, 'fsbackend': backend} return fileserver.symlink_list(load=load)
def function[symlink_list, parameter[saltenv, backend]]: constant[ Return a list of symlinked files and dirs saltenv : base The salt fileserver environment to be listed backend Narrow fileserver backends to a subset of the enabled ones. If all passed backends start with a minus sign (``-``), then these backends will be excluded from the enabled backends. However, if there is a mix of backends with and without a minus sign (ex: ``backend=-roots,git``) then the ones starting with a minus sign will be disregarded. .. versionadded:: 2015.5.0 .. note: Keep in mind that executing this function spawns a new process, separate from the master. This means that if the fileserver configuration has been changed in some way since the master has been restarted (e.g. if :conf_master:`fileserver_backend`, :conf_master:`gitfs_remotes`, :conf_master:`hgfs_remotes`, etc. have been updated), then the results of this runner will not accurately reflect what symlinks are available to minions. When in doubt, use :py:func:`cp.list_master_symlinks <salt.modules.cp.list_master_symlinks>` to see what symlinks the minion can see, and always remember to restart the salt-master daemon when updating the fileserver configuration. CLI Example: .. code-block:: bash salt-run fileserver.symlink_list salt-run fileserver.symlink_list saltenv=prod salt-run fileserver.symlink_list saltenv=dev backend=git salt-run fileserver.symlink_list base hg,roots salt-run fileserver.symlink_list -git ] variable[fileserver] assign[=] call[name[salt].fileserver.Fileserver, parameter[name[__opts__]]] variable[load] assign[=] dictionary[[<ast.Constant object at 0x7da18dc988b0>, <ast.Constant object at 0x7da18dc99780>], [<ast.Name object at 0x7da18dc98220>, <ast.Name object at 0x7da18dc9b160>]] return[call[name[fileserver].symlink_list, parameter[]]]
keyword[def] identifier[symlink_list] ( identifier[saltenv] = literal[string] , identifier[backend] = keyword[None] ): literal[string] identifier[fileserver] = identifier[salt] . identifier[fileserver] . identifier[Fileserver] ( identifier[__opts__] ) identifier[load] ={ literal[string] : identifier[saltenv] , literal[string] : identifier[backend] } keyword[return] identifier[fileserver] . identifier[symlink_list] ( identifier[load] = identifier[load] )
def symlink_list(saltenv='base', backend=None): """ Return a list of symlinked files and dirs saltenv : base The salt fileserver environment to be listed backend Narrow fileserver backends to a subset of the enabled ones. If all passed backends start with a minus sign (``-``), then these backends will be excluded from the enabled backends. However, if there is a mix of backends with and without a minus sign (ex: ``backend=-roots,git``) then the ones starting with a minus sign will be disregarded. .. versionadded:: 2015.5.0 .. note: Keep in mind that executing this function spawns a new process, separate from the master. This means that if the fileserver configuration has been changed in some way since the master has been restarted (e.g. if :conf_master:`fileserver_backend`, :conf_master:`gitfs_remotes`, :conf_master:`hgfs_remotes`, etc. have been updated), then the results of this runner will not accurately reflect what symlinks are available to minions. When in doubt, use :py:func:`cp.list_master_symlinks <salt.modules.cp.list_master_symlinks>` to see what symlinks the minion can see, and always remember to restart the salt-master daemon when updating the fileserver configuration. CLI Example: .. code-block:: bash salt-run fileserver.symlink_list salt-run fileserver.symlink_list saltenv=prod salt-run fileserver.symlink_list saltenv=dev backend=git salt-run fileserver.symlink_list base hg,roots salt-run fileserver.symlink_list -git """ fileserver = salt.fileserver.Fileserver(__opts__) load = {'saltenv': saltenv, 'fsbackend': backend} return fileserver.symlink_list(load=load)
def group_padding(blocks): """Consolidate FLAC padding metadata blocks. The overall size of the rendered blocks does not change, so this adds several bytes of padding for each merged block. """ paddings = [b for b in blocks if isinstance(b, Padding)] for p in paddings: blocks.remove(p) # total padding size is the sum of padding sizes plus 4 bytes # per removed header. size = sum(padding.length for padding in paddings) padding = Padding() padding.length = size + 4 * (len(paddings) - 1) blocks.append(padding)
def function[group_padding, parameter[blocks]]: constant[Consolidate FLAC padding metadata blocks. The overall size of the rendered blocks does not change, so this adds several bytes of padding for each merged block. ] variable[paddings] assign[=] <ast.ListComp object at 0x7da1b023fa30> for taget[name[p]] in starred[name[paddings]] begin[:] call[name[blocks].remove, parameter[name[p]]] variable[size] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b023d2d0>]] variable[padding] assign[=] call[name[Padding], parameter[]] name[padding].length assign[=] binary_operation[name[size] + binary_operation[constant[4] * binary_operation[call[name[len], parameter[name[paddings]]] - constant[1]]]] call[name[blocks].append, parameter[name[padding]]]
keyword[def] identifier[group_padding] ( identifier[blocks] ): literal[string] identifier[paddings] =[ identifier[b] keyword[for] identifier[b] keyword[in] identifier[blocks] keyword[if] identifier[isinstance] ( identifier[b] , identifier[Padding] )] keyword[for] identifier[p] keyword[in] identifier[paddings] : identifier[blocks] . identifier[remove] ( identifier[p] ) identifier[size] = identifier[sum] ( identifier[padding] . identifier[length] keyword[for] identifier[padding] keyword[in] identifier[paddings] ) identifier[padding] = identifier[Padding] () identifier[padding] . identifier[length] = identifier[size] + literal[int] *( identifier[len] ( identifier[paddings] )- literal[int] ) identifier[blocks] . identifier[append] ( identifier[padding] )
def group_padding(blocks): """Consolidate FLAC padding metadata blocks. The overall size of the rendered blocks does not change, so this adds several bytes of padding for each merged block. """ paddings = [b for b in blocks if isinstance(b, Padding)] for p in paddings: blocks.remove(p) # depends on [control=['for'], data=['p']] # total padding size is the sum of padding sizes plus 4 bytes # per removed header. size = sum((padding.length for padding in paddings)) padding = Padding() padding.length = size + 4 * (len(paddings) - 1) blocks.append(padding)
def _calc_covar_matrix(self, profile): """Calculate the covariance matrix. Parameters ---------- profile : site.Profile Input site profile Yields ------ covar : `class`:numpy.array Covariance matrix """ corr = self._calc_corr(profile) std = self._calc_ln_std(profile) # Modify the standard deviation by the truncated norm scale std *= randnorm.scale var = std ** 2 covar = corr * std[:-1] * std[1:] # Main diagonal is the variance mat = diags([covar, var, covar], [-1, 0, 1]).toarray() return mat
def function[_calc_covar_matrix, parameter[self, profile]]: constant[Calculate the covariance matrix. Parameters ---------- profile : site.Profile Input site profile Yields ------ covar : `class`:numpy.array Covariance matrix ] variable[corr] assign[=] call[name[self]._calc_corr, parameter[name[profile]]] variable[std] assign[=] call[name[self]._calc_ln_std, parameter[name[profile]]] <ast.AugAssign object at 0x7da20c76dbd0> variable[var] assign[=] binary_operation[name[std] ** constant[2]] variable[covar] assign[=] binary_operation[binary_operation[name[corr] * call[name[std]][<ast.Slice object at 0x7da20c76ffa0>]] * call[name[std]][<ast.Slice object at 0x7da20c76ee60>]] variable[mat] assign[=] call[call[name[diags], parameter[list[[<ast.Name object at 0x7da20c76efe0>, <ast.Name object at 0x7da20c76df60>, <ast.Name object at 0x7da20c76c250>]], list[[<ast.UnaryOp object at 0x7da20c76eb30>, <ast.Constant object at 0x7da20c76c490>, <ast.Constant object at 0x7da20c76d870>]]]].toarray, parameter[]] return[name[mat]]
keyword[def] identifier[_calc_covar_matrix] ( identifier[self] , identifier[profile] ): literal[string] identifier[corr] = identifier[self] . identifier[_calc_corr] ( identifier[profile] ) identifier[std] = identifier[self] . identifier[_calc_ln_std] ( identifier[profile] ) identifier[std] *= identifier[randnorm] . identifier[scale] identifier[var] = identifier[std] ** literal[int] identifier[covar] = identifier[corr] * identifier[std] [:- literal[int] ]* identifier[std] [ literal[int] :] identifier[mat] = identifier[diags] ([ identifier[covar] , identifier[var] , identifier[covar] ],[- literal[int] , literal[int] , literal[int] ]). identifier[toarray] () keyword[return] identifier[mat]
def _calc_covar_matrix(self, profile): """Calculate the covariance matrix. Parameters ---------- profile : site.Profile Input site profile Yields ------ covar : `class`:numpy.array Covariance matrix """ corr = self._calc_corr(profile) std = self._calc_ln_std(profile) # Modify the standard deviation by the truncated norm scale std *= randnorm.scale var = std ** 2 covar = corr * std[:-1] * std[1:] # Main diagonal is the variance mat = diags([covar, var, covar], [-1, 0, 1]).toarray() return mat
def read_set_from_file(filename: str) -> Set[str]: """ Extract a de-duped collection (set) of text from a file. Expected file format is one item per line. """ collection = set() with open(filename, 'r') as file_: for line in file_: collection.add(line.rstrip()) return collection
def function[read_set_from_file, parameter[filename]]: constant[ Extract a de-duped collection (set) of text from a file. Expected file format is one item per line. ] variable[collection] assign[=] call[name[set], parameter[]] with call[name[open], parameter[name[filename], constant[r]]] begin[:] for taget[name[line]] in starred[name[file_]] begin[:] call[name[collection].add, parameter[call[name[line].rstrip, parameter[]]]] return[name[collection]]
keyword[def] identifier[read_set_from_file] ( identifier[filename] : identifier[str] )-> identifier[Set] [ identifier[str] ]: literal[string] identifier[collection] = identifier[set] () keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[file_] : keyword[for] identifier[line] keyword[in] identifier[file_] : identifier[collection] . identifier[add] ( identifier[line] . identifier[rstrip] ()) keyword[return] identifier[collection]
def read_set_from_file(filename: str) -> Set[str]: """ Extract a de-duped collection (set) of text from a file. Expected file format is one item per line. """ collection = set() with open(filename, 'r') as file_: for line in file_: collection.add(line.rstrip()) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['file_']] return collection
def list_by_group(self, id_egroup): """Search Group Equipment from by the identifier. :param id_egroup: Identifier of the Group Equipment. Integer value and greater than zero. :return: Dictionary with the following structure: :: {'equipaments': [{'nome': < name_equipament >, 'grupos': < id_group >, 'mark': {'id': < id_mark >, 'nome': < name_mark >},'modelo': < id_model >, 'tipo_equipamento': < id_type >, 'model': {'nome': , 'id': < id_model >, 'marca': < id_mark >}, 'type': {id': < id_type >, 'tipo_equipamento': < name_type >}, 'id': < id_equipment >}, ... ]} :raise InvalidParameterError: Group Equipment is null and invalid. :raise GrupoEquipamentoNaoExisteError: Group Equipment not registered. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response. """ if id_egroup is None: raise InvalidParameterError( u'The identifier of Group Equipament is invalid or was not informed.') url = 'equipment/group/' + str(id_egroup) + '/' code, xml = self.submit(None, 'GET', url) return self.response(code, xml)
def function[list_by_group, parameter[self, id_egroup]]: constant[Search Group Equipment from by the identifier. :param id_egroup: Identifier of the Group Equipment. Integer value and greater than zero. :return: Dictionary with the following structure: :: {'equipaments': [{'nome': < name_equipament >, 'grupos': < id_group >, 'mark': {'id': < id_mark >, 'nome': < name_mark >},'modelo': < id_model >, 'tipo_equipamento': < id_type >, 'model': {'nome': , 'id': < id_model >, 'marca': < id_mark >}, 'type': {id': < id_type >, 'tipo_equipamento': < name_type >}, 'id': < id_equipment >}, ... ]} :raise InvalidParameterError: Group Equipment is null and invalid. :raise GrupoEquipamentoNaoExisteError: Group Equipment not registered. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response. ] if compare[name[id_egroup] is constant[None]] begin[:] <ast.Raise object at 0x7da2041dae30> variable[url] assign[=] binary_operation[binary_operation[constant[equipment/group/] + call[name[str], parameter[name[id_egroup]]]] + constant[/]] <ast.Tuple object at 0x7da2041d84f0> assign[=] call[name[self].submit, parameter[constant[None], constant[GET], name[url]]] return[call[name[self].response, parameter[name[code], name[xml]]]]
keyword[def] identifier[list_by_group] ( identifier[self] , identifier[id_egroup] ): literal[string] keyword[if] identifier[id_egroup] keyword[is] keyword[None] : keyword[raise] identifier[InvalidParameterError] ( literal[string] ) identifier[url] = literal[string] + identifier[str] ( identifier[id_egroup] )+ literal[string] identifier[code] , identifier[xml] = identifier[self] . identifier[submit] ( keyword[None] , literal[string] , identifier[url] ) keyword[return] identifier[self] . identifier[response] ( identifier[code] , identifier[xml] )
def list_by_group(self, id_egroup): """Search Group Equipment from by the identifier. :param id_egroup: Identifier of the Group Equipment. Integer value and greater than zero. :return: Dictionary with the following structure: :: {'equipaments': [{'nome': < name_equipament >, 'grupos': < id_group >, 'mark': {'id': < id_mark >, 'nome': < name_mark >},'modelo': < id_model >, 'tipo_equipamento': < id_type >, 'model': {'nome': , 'id': < id_model >, 'marca': < id_mark >}, 'type': {id': < id_type >, 'tipo_equipamento': < name_type >}, 'id': < id_equipment >}, ... ]} :raise InvalidParameterError: Group Equipment is null and invalid. :raise GrupoEquipamentoNaoExisteError: Group Equipment not registered. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response. """ if id_egroup is None: raise InvalidParameterError(u'The identifier of Group Equipament is invalid or was not informed.') # depends on [control=['if'], data=[]] url = 'equipment/group/' + str(id_egroup) + '/' (code, xml) = self.submit(None, 'GET', url) return self.response(code, xml)
def load_profiles_definitions(filename): """ Load the registered profiles defined in the file filename. This is a yml file that defines the basic characteristics of each profile with the following variables: It produces a dictionary that can be accessed with the a string that defines the profile organization and name in the form <org>:<profile name> }, """ with open(filename, 'r') as fp: profile_definitions = yaml.load(fp) # assume profile definitions are case insensitive profiles_dict = NocaseDict() for profile in profile_definitions: value = ProfileDef(profile["central_class"], profile["scoping_class"], profile["scoping_path"], profile['type'], profile['doc']) key = "%s:%s" % (profile["registered_org"], profile["registered_name"]) profiles_dict[key] = value return profiles_dict
def function[load_profiles_definitions, parameter[filename]]: constant[ Load the registered profiles defined in the file filename. This is a yml file that defines the basic characteristics of each profile with the following variables: It produces a dictionary that can be accessed with the a string that defines the profile organization and name in the form <org>:<profile name> }, ] with call[name[open], parameter[name[filename], constant[r]]] begin[:] variable[profile_definitions] assign[=] call[name[yaml].load, parameter[name[fp]]] variable[profiles_dict] assign[=] call[name[NocaseDict], parameter[]] for taget[name[profile]] in starred[name[profile_definitions]] begin[:] variable[value] assign[=] call[name[ProfileDef], parameter[call[name[profile]][constant[central_class]], call[name[profile]][constant[scoping_class]], call[name[profile]][constant[scoping_path]], call[name[profile]][constant[type]], call[name[profile]][constant[doc]]]] variable[key] assign[=] binary_operation[constant[%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da204565b10>, <ast.Subscript object at 0x7da204564370>]]] call[name[profiles_dict]][name[key]] assign[=] name[value] return[name[profiles_dict]]
keyword[def] identifier[load_profiles_definitions] ( identifier[filename] ): literal[string] keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[fp] : identifier[profile_definitions] = identifier[yaml] . identifier[load] ( identifier[fp] ) identifier[profiles_dict] = identifier[NocaseDict] () keyword[for] identifier[profile] keyword[in] identifier[profile_definitions] : identifier[value] = identifier[ProfileDef] ( identifier[profile] [ literal[string] ], identifier[profile] [ literal[string] ], identifier[profile] [ literal[string] ], identifier[profile] [ literal[string] ], identifier[profile] [ literal[string] ]) identifier[key] = literal[string] %( identifier[profile] [ literal[string] ], identifier[profile] [ literal[string] ]) identifier[profiles_dict] [ identifier[key] ]= identifier[value] keyword[return] identifier[profiles_dict]
def load_profiles_definitions(filename): """ Load the registered profiles defined in the file filename. This is a yml file that defines the basic characteristics of each profile with the following variables: It produces a dictionary that can be accessed with the a string that defines the profile organization and name in the form <org>:<profile name> }, """ with open(filename, 'r') as fp: profile_definitions = yaml.load(fp) # depends on [control=['with'], data=['fp']] # assume profile definitions are case insensitive profiles_dict = NocaseDict() for profile in profile_definitions: value = ProfileDef(profile['central_class'], profile['scoping_class'], profile['scoping_path'], profile['type'], profile['doc']) key = '%s:%s' % (profile['registered_org'], profile['registered_name']) profiles_dict[key] = value # depends on [control=['for'], data=['profile']] return profiles_dict
def get_recover_position(gzfile, last_good_position): # type: (gzip.GzipFile, int) -> int """ Return position of a next gzip stream in a GzipFile, or -1 if it is not found. XXX: caller must ensure that the same last_good_position is not used multiple times for the same gzfile. """ with closing(mmap.mmap(gzfile.fileno(), 0, access=mmap.ACCESS_READ)) as m: return m.find(GZIP_SIGNATURE, last_good_position + 1)
def function[get_recover_position, parameter[gzfile, last_good_position]]: constant[ Return position of a next gzip stream in a GzipFile, or -1 if it is not found. XXX: caller must ensure that the same last_good_position is not used multiple times for the same gzfile. ] with call[name[closing], parameter[call[name[mmap].mmap, parameter[call[name[gzfile].fileno, parameter[]], constant[0]]]]] begin[:] return[call[name[m].find, parameter[name[GZIP_SIGNATURE], binary_operation[name[last_good_position] + constant[1]]]]]
keyword[def] identifier[get_recover_position] ( identifier[gzfile] , identifier[last_good_position] ): literal[string] keyword[with] identifier[closing] ( identifier[mmap] . identifier[mmap] ( identifier[gzfile] . identifier[fileno] (), literal[int] , identifier[access] = identifier[mmap] . identifier[ACCESS_READ] )) keyword[as] identifier[m] : keyword[return] identifier[m] . identifier[find] ( identifier[GZIP_SIGNATURE] , identifier[last_good_position] + literal[int] )
def get_recover_position(gzfile, last_good_position): # type: (gzip.GzipFile, int) -> int '\n Return position of a next gzip stream in a GzipFile, \n or -1 if it is not found.\n \n XXX: caller must ensure that the same last_good_position\n is not used multiple times for the same gzfile.\n ' with closing(mmap.mmap(gzfile.fileno(), 0, access=mmap.ACCESS_READ)) as m: return m.find(GZIP_SIGNATURE, last_good_position + 1) # depends on [control=['with'], data=['m']]
def get_server_capabilities(self): """Get hardware properties which can be used for scheduling :return: a dictionary of server capabilities. :raises: IloError, on an error from iLO. :raises: IloCommandNotSupportedError, if the command is not supported on the server. """ capabilities = self._call_method('get_server_capabilities') # TODO(nisha): Assumption is that Redfish always see the pci_device # member name field populated similarly to IPMI. # If redfish is not able to get nic_capacity, we can fall back to # IPMI way of retrieving nic_capacity in the future. As of now # the IPMI is not tested on Gen10, hence assuming that # Redfish will always be able to give the data. if ('Gen10' not in self.model): major_minor = ( self._call_method('get_ilo_firmware_version_as_major_minor')) # NOTE(vmud213): Even if it is None, pass it on to get_nic_capacity # as we still want to try getting nic capacity through ipmitool # irrespective of what firmware we are using. nic_capacity = ipmi.get_nic_capacity(self.ipmi_host_info, major_minor) if nic_capacity: capabilities.update({'nic_capacity': nic_capacity}) if capabilities: return capabilities
def function[get_server_capabilities, parameter[self]]: constant[Get hardware properties which can be used for scheduling :return: a dictionary of server capabilities. :raises: IloError, on an error from iLO. :raises: IloCommandNotSupportedError, if the command is not supported on the server. ] variable[capabilities] assign[=] call[name[self]._call_method, parameter[constant[get_server_capabilities]]] if compare[constant[Gen10] <ast.NotIn object at 0x7da2590d7190> name[self].model] begin[:] variable[major_minor] assign[=] call[name[self]._call_method, parameter[constant[get_ilo_firmware_version_as_major_minor]]] variable[nic_capacity] assign[=] call[name[ipmi].get_nic_capacity, parameter[name[self].ipmi_host_info, name[major_minor]]] if name[nic_capacity] begin[:] call[name[capabilities].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1a6d450>], [<ast.Name object at 0x7da1b1a6d690>]]]] if name[capabilities] begin[:] return[name[capabilities]]
keyword[def] identifier[get_server_capabilities] ( identifier[self] ): literal[string] identifier[capabilities] = identifier[self] . identifier[_call_method] ( literal[string] ) keyword[if] ( literal[string] keyword[not] keyword[in] identifier[self] . identifier[model] ): identifier[major_minor] =( identifier[self] . identifier[_call_method] ( literal[string] )) identifier[nic_capacity] = identifier[ipmi] . identifier[get_nic_capacity] ( identifier[self] . identifier[ipmi_host_info] , identifier[major_minor] ) keyword[if] identifier[nic_capacity] : identifier[capabilities] . identifier[update] ({ literal[string] : identifier[nic_capacity] }) keyword[if] identifier[capabilities] : keyword[return] identifier[capabilities]
def get_server_capabilities(self): """Get hardware properties which can be used for scheduling :return: a dictionary of server capabilities. :raises: IloError, on an error from iLO. :raises: IloCommandNotSupportedError, if the command is not supported on the server. """ capabilities = self._call_method('get_server_capabilities') # TODO(nisha): Assumption is that Redfish always see the pci_device # member name field populated similarly to IPMI. # If redfish is not able to get nic_capacity, we can fall back to # IPMI way of retrieving nic_capacity in the future. As of now # the IPMI is not tested on Gen10, hence assuming that # Redfish will always be able to give the data. if 'Gen10' not in self.model: major_minor = self._call_method('get_ilo_firmware_version_as_major_minor') # NOTE(vmud213): Even if it is None, pass it on to get_nic_capacity # as we still want to try getting nic capacity through ipmitool # irrespective of what firmware we are using. nic_capacity = ipmi.get_nic_capacity(self.ipmi_host_info, major_minor) if nic_capacity: capabilities.update({'nic_capacity': nic_capacity}) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if capabilities: return capabilities # depends on [control=['if'], data=[]]
async def accept( self, headers: Optional[Union[dict, CIMultiDict, Headers]] = None, subprotocol: Optional[str] = None, ) -> None: """Manually chose to accept the websocket connection. Arguments: headers: Additional headers to send with the acceptance response. subprotocol: The chosen subprotocol, optional. """ if headers is None: headers_ = Headers() else: headers_ = Headers(headers) await self._accept(headers_, subprotocol)
<ast.AsyncFunctionDef object at 0x7da20e9b35e0>
keyword[async] keyword[def] identifier[accept] ( identifier[self] , identifier[headers] : identifier[Optional] [ identifier[Union] [ identifier[dict] , identifier[CIMultiDict] , identifier[Headers] ]]= keyword[None] , identifier[subprotocol] : identifier[Optional] [ identifier[str] ]= keyword[None] , )-> keyword[None] : literal[string] keyword[if] identifier[headers] keyword[is] keyword[None] : identifier[headers_] = identifier[Headers] () keyword[else] : identifier[headers_] = identifier[Headers] ( identifier[headers] ) keyword[await] identifier[self] . identifier[_accept] ( identifier[headers_] , identifier[subprotocol] )
async def accept(self, headers: Optional[Union[dict, CIMultiDict, Headers]]=None, subprotocol: Optional[str]=None) -> None: """Manually chose to accept the websocket connection. Arguments: headers: Additional headers to send with the acceptance response. subprotocol: The chosen subprotocol, optional. """ if headers is None: headers_ = Headers() # depends on [control=['if'], data=[]] else: headers_ = Headers(headers) await self._accept(headers_, subprotocol)
def empty(shape, dtype=None, **kwargs): """Create an array of given shape and type, without initializing entries. Args: shape (sequence of ints): 2D shape of the array. dtype (data-type, optional): Desired data-type for the array. kwargs (optional): Other arguments of the array (*coords, attrs, and name). Returns: array (decode.array): Decode array without initializing entries. """ data = np.empty(shape, dtype) return dc.array(data, **kwargs)
def function[empty, parameter[shape, dtype]]: constant[Create an array of given shape and type, without initializing entries. Args: shape (sequence of ints): 2D shape of the array. dtype (data-type, optional): Desired data-type for the array. kwargs (optional): Other arguments of the array (*coords, attrs, and name). Returns: array (decode.array): Decode array without initializing entries. ] variable[data] assign[=] call[name[np].empty, parameter[name[shape], name[dtype]]] return[call[name[dc].array, parameter[name[data]]]]
keyword[def] identifier[empty] ( identifier[shape] , identifier[dtype] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[data] = identifier[np] . identifier[empty] ( identifier[shape] , identifier[dtype] ) keyword[return] identifier[dc] . identifier[array] ( identifier[data] ,** identifier[kwargs] )
def empty(shape, dtype=None, **kwargs): """Create an array of given shape and type, without initializing entries. Args: shape (sequence of ints): 2D shape of the array. dtype (data-type, optional): Desired data-type for the array. kwargs (optional): Other arguments of the array (*coords, attrs, and name). Returns: array (decode.array): Decode array without initializing entries. """ data = np.empty(shape, dtype) return dc.array(data, **kwargs)
def byte_len(int_type): """ Get the number of byte needed to encode the int passed. :param int_type: the int to be converted :return: the number of bits needed to encode the int passed. """ length = 0 while int_type: int_type >>= 1 length += 1 if length > 0: if length % 8 != 0: length = int(length / 8) + 1 else: length = int(length / 8) return length
def function[byte_len, parameter[int_type]]: constant[ Get the number of byte needed to encode the int passed. :param int_type: the int to be converted :return: the number of bits needed to encode the int passed. ] variable[length] assign[=] constant[0] while name[int_type] begin[:] <ast.AugAssign object at 0x7da20e955e40> <ast.AugAssign object at 0x7da20e9553f0> if compare[name[length] greater[>] constant[0]] begin[:] if compare[binary_operation[name[length] <ast.Mod object at 0x7da2590d6920> constant[8]] not_equal[!=] constant[0]] begin[:] variable[length] assign[=] binary_operation[call[name[int], parameter[binary_operation[name[length] / constant[8]]]] + constant[1]] return[name[length]]
keyword[def] identifier[byte_len] ( identifier[int_type] ): literal[string] identifier[length] = literal[int] keyword[while] identifier[int_type] : identifier[int_type] >>= literal[int] identifier[length] += literal[int] keyword[if] identifier[length] > literal[int] : keyword[if] identifier[length] % literal[int] != literal[int] : identifier[length] = identifier[int] ( identifier[length] / literal[int] )+ literal[int] keyword[else] : identifier[length] = identifier[int] ( identifier[length] / literal[int] ) keyword[return] identifier[length]
def byte_len(int_type): """ Get the number of byte needed to encode the int passed. :param int_type: the int to be converted :return: the number of bits needed to encode the int passed. """ length = 0 while int_type: int_type >>= 1 length += 1 # depends on [control=['while'], data=[]] if length > 0: if length % 8 != 0: length = int(length / 8) + 1 # depends on [control=['if'], data=[]] else: length = int(length / 8) # depends on [control=['if'], data=['length']] return length
def segment_midpoints(neurites, neurite_type=NeuriteType.all): '''Return a list of segment mid-points in a collection of neurites''' def _seg_midpoint(sec): '''Return the mid-points of segments in a section''' pts = sec.points[:, COLS.XYZ] return np.divide(np.add(pts[:-1], pts[1:]), 2.0) return map_segments(_seg_midpoint, neurites, neurite_type)
def function[segment_midpoints, parameter[neurites, neurite_type]]: constant[Return a list of segment mid-points in a collection of neurites] def function[_seg_midpoint, parameter[sec]]: constant[Return the mid-points of segments in a section] variable[pts] assign[=] call[name[sec].points][tuple[[<ast.Slice object at 0x7da20e960bb0>, <ast.Attribute object at 0x7da20e9608e0>]]] return[call[name[np].divide, parameter[call[name[np].add, parameter[call[name[pts]][<ast.Slice object at 0x7da20e961630>], call[name[pts]][<ast.Slice object at 0x7da20e962da0>]]], constant[2.0]]]] return[call[name[map_segments], parameter[name[_seg_midpoint], name[neurites], name[neurite_type]]]]
keyword[def] identifier[segment_midpoints] ( identifier[neurites] , identifier[neurite_type] = identifier[NeuriteType] . identifier[all] ): literal[string] keyword[def] identifier[_seg_midpoint] ( identifier[sec] ): literal[string] identifier[pts] = identifier[sec] . identifier[points] [:, identifier[COLS] . identifier[XYZ] ] keyword[return] identifier[np] . identifier[divide] ( identifier[np] . identifier[add] ( identifier[pts] [:- literal[int] ], identifier[pts] [ literal[int] :]), literal[int] ) keyword[return] identifier[map_segments] ( identifier[_seg_midpoint] , identifier[neurites] , identifier[neurite_type] )
def segment_midpoints(neurites, neurite_type=NeuriteType.all): """Return a list of segment mid-points in a collection of neurites""" def _seg_midpoint(sec): """Return the mid-points of segments in a section""" pts = sec.points[:, COLS.XYZ] return np.divide(np.add(pts[:-1], pts[1:]), 2.0) return map_segments(_seg_midpoint, neurites, neurite_type)
def get_by_index(self, index): """ Returns the entry specified by index Note that the table is 1-based ie an index of 0 is invalid. This is due to the fact that a zero value index signals that a completely unindexed header follows. The entry will either be from the static table or the dynamic table depending on the value of index. """ index -= 1 if 0 <= index < len(CocaineHeaders.STATIC_TABLE): return CocaineHeaders.STATIC_TABLE[index] index -= len(CocaineHeaders.STATIC_TABLE) if 0 <= index < len(self.dynamic_entries): return self.dynamic_entries[index] raise InvalidTableIndex("Invalid table index %d" % index)
def function[get_by_index, parameter[self, index]]: constant[ Returns the entry specified by index Note that the table is 1-based ie an index of 0 is invalid. This is due to the fact that a zero value index signals that a completely unindexed header follows. The entry will either be from the static table or the dynamic table depending on the value of index. ] <ast.AugAssign object at 0x7da20c795330> if compare[constant[0] less_or_equal[<=] name[index]] begin[:] return[call[name[CocaineHeaders].STATIC_TABLE][name[index]]] <ast.AugAssign object at 0x7da20c796a70> if compare[constant[0] less_or_equal[<=] name[index]] begin[:] return[call[name[self].dynamic_entries][name[index]]] <ast.Raise object at 0x7da20c796920>
keyword[def] identifier[get_by_index] ( identifier[self] , identifier[index] ): literal[string] identifier[index] -= literal[int] keyword[if] literal[int] <= identifier[index] < identifier[len] ( identifier[CocaineHeaders] . identifier[STATIC_TABLE] ): keyword[return] identifier[CocaineHeaders] . identifier[STATIC_TABLE] [ identifier[index] ] identifier[index] -= identifier[len] ( identifier[CocaineHeaders] . identifier[STATIC_TABLE] ) keyword[if] literal[int] <= identifier[index] < identifier[len] ( identifier[self] . identifier[dynamic_entries] ): keyword[return] identifier[self] . identifier[dynamic_entries] [ identifier[index] ] keyword[raise] identifier[InvalidTableIndex] ( literal[string] % identifier[index] )
def get_by_index(self, index): """ Returns the entry specified by index Note that the table is 1-based ie an index of 0 is invalid. This is due to the fact that a zero value index signals that a completely unindexed header follows. The entry will either be from the static table or the dynamic table depending on the value of index. """ index -= 1 if 0 <= index < len(CocaineHeaders.STATIC_TABLE): return CocaineHeaders.STATIC_TABLE[index] # depends on [control=['if'], data=['index']] index -= len(CocaineHeaders.STATIC_TABLE) if 0 <= index < len(self.dynamic_entries): return self.dynamic_entries[index] # depends on [control=['if'], data=['index']] raise InvalidTableIndex('Invalid table index %d' % index)
def __validInputs(self): """Validates the inputs of the constructor.""" #if not isinstance(self.__column, Column): # raise Sitools2Exception("column must be an instance of Column") try: float(self.__minVal) except ValueError as ex: raise Sitools2Exception(ex) try: float(self.__maxVal) except ValueError as ex: raise Sitools2Exception(ex) if float(self.__minVal) >= float(self.__maxVal): raise Sitools2Exception("maxVal must be superior to minVal")
def function[__validInputs, parameter[self]]: constant[Validates the inputs of the constructor.] <ast.Try object at 0x7da1b09bf160> <ast.Try object at 0x7da1b09bd2d0> if compare[call[name[float], parameter[name[self].__minVal]] greater_or_equal[>=] call[name[float], parameter[name[self].__maxVal]]] begin[:] <ast.Raise object at 0x7da1b0913640>
keyword[def] identifier[__validInputs] ( identifier[self] ): literal[string] keyword[try] : identifier[float] ( identifier[self] . identifier[__minVal] ) keyword[except] identifier[ValueError] keyword[as] identifier[ex] : keyword[raise] identifier[Sitools2Exception] ( identifier[ex] ) keyword[try] : identifier[float] ( identifier[self] . identifier[__maxVal] ) keyword[except] identifier[ValueError] keyword[as] identifier[ex] : keyword[raise] identifier[Sitools2Exception] ( identifier[ex] ) keyword[if] identifier[float] ( identifier[self] . identifier[__minVal] )>= identifier[float] ( identifier[self] . identifier[__maxVal] ): keyword[raise] identifier[Sitools2Exception] ( literal[string] )
def __validInputs(self): """Validates the inputs of the constructor.""" #if not isinstance(self.__column, Column): # raise Sitools2Exception("column must be an instance of Column") try: float(self.__minVal) # depends on [control=['try'], data=[]] except ValueError as ex: raise Sitools2Exception(ex) # depends on [control=['except'], data=['ex']] try: float(self.__maxVal) # depends on [control=['try'], data=[]] except ValueError as ex: raise Sitools2Exception(ex) # depends on [control=['except'], data=['ex']] if float(self.__minVal) >= float(self.__maxVal): raise Sitools2Exception('maxVal must be superior to minVal') # depends on [control=['if'], data=[]]
def store_user_documents(user_document_gen, client, mongo_database_name, mongo_collection_name): """ Stores Twitter list objects that a Twitter user is a member of in different mongo collections. Inputs: - user_document_gen: A python generator that yields a Twitter user id and an associated document list. - client: A pymongo MongoClient object. - mongo_database_name: The name of a Mongo database as a string. - mongo_collection_name: The name of the mongo collection as a string. """ mongo_database = client[mongo_database_name] mongo_collection = mongo_database[mongo_collection_name] # Iterate over all users to be annotated and store the Twitter lists in mongo. for user_twitter_id, user_document_list in user_document_gen: document = user_document_list document["_id"] = int(user_twitter_id) mongo_collection.update({"_id": user_twitter_id}, document, upsert=True)
def function[store_user_documents, parameter[user_document_gen, client, mongo_database_name, mongo_collection_name]]: constant[ Stores Twitter list objects that a Twitter user is a member of in different mongo collections. Inputs: - user_document_gen: A python generator that yields a Twitter user id and an associated document list. - client: A pymongo MongoClient object. - mongo_database_name: The name of a Mongo database as a string. - mongo_collection_name: The name of the mongo collection as a string. ] variable[mongo_database] assign[=] call[name[client]][name[mongo_database_name]] variable[mongo_collection] assign[=] call[name[mongo_database]][name[mongo_collection_name]] for taget[tuple[[<ast.Name object at 0x7da207f01ed0>, <ast.Name object at 0x7da207f00b80>]]] in starred[name[user_document_gen]] begin[:] variable[document] assign[=] name[user_document_list] call[name[document]][constant[_id]] assign[=] call[name[int], parameter[name[user_twitter_id]]] call[name[mongo_collection].update, parameter[dictionary[[<ast.Constant object at 0x7da207f00f70>], [<ast.Name object at 0x7da207f02110>]], name[document]]]
keyword[def] identifier[store_user_documents] ( identifier[user_document_gen] , identifier[client] , identifier[mongo_database_name] , identifier[mongo_collection_name] ): literal[string] identifier[mongo_database] = identifier[client] [ identifier[mongo_database_name] ] identifier[mongo_collection] = identifier[mongo_database] [ identifier[mongo_collection_name] ] keyword[for] identifier[user_twitter_id] , identifier[user_document_list] keyword[in] identifier[user_document_gen] : identifier[document] = identifier[user_document_list] identifier[document] [ literal[string] ]= identifier[int] ( identifier[user_twitter_id] ) identifier[mongo_collection] . identifier[update] ({ literal[string] : identifier[user_twitter_id] }, identifier[document] , identifier[upsert] = keyword[True] )
def store_user_documents(user_document_gen, client, mongo_database_name, mongo_collection_name): """ Stores Twitter list objects that a Twitter user is a member of in different mongo collections. Inputs: - user_document_gen: A python generator that yields a Twitter user id and an associated document list. - client: A pymongo MongoClient object. - mongo_database_name: The name of a Mongo database as a string. - mongo_collection_name: The name of the mongo collection as a string. """ mongo_database = client[mongo_database_name] mongo_collection = mongo_database[mongo_collection_name] # Iterate over all users to be annotated and store the Twitter lists in mongo. for (user_twitter_id, user_document_list) in user_document_gen: document = user_document_list document['_id'] = int(user_twitter_id) mongo_collection.update({'_id': user_twitter_id}, document, upsert=True) # depends on [control=['for'], data=[]]