code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def phi( n ): """Return the Euler totient function of n.""" assert isinstance( n, integer_types ) if n < 3: return 1 result = 1 ff = factorization( n ) for f in ff: e = f[1] if e > 1: result = result * f[0] ** (e-1) * ( f[0] - 1 ) else: result = result * ( f[0] - 1 ) return result
def function[phi, parameter[n]]: constant[Return the Euler totient function of n.] assert[call[name[isinstance], parameter[name[n], name[integer_types]]]] if compare[name[n] less[<] constant[3]] begin[:] return[constant[1]] variable[result] assign[=] constant[1] variable[ff] assign[=] call[name[factorization], parameter[name[n]]] for taget[name[f]] in starred[name[ff]] begin[:] variable[e] assign[=] call[name[f]][constant[1]] if compare[name[e] greater[>] constant[1]] begin[:] variable[result] assign[=] binary_operation[binary_operation[name[result] * binary_operation[call[name[f]][constant[0]] ** binary_operation[name[e] - constant[1]]]] * binary_operation[call[name[f]][constant[0]] - constant[1]]] return[name[result]]
keyword[def] identifier[phi] ( identifier[n] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[n] , identifier[integer_types] ) keyword[if] identifier[n] < literal[int] : keyword[return] literal[int] identifier[result] = literal[int] identifier[ff] = identifier[factorization] ( identifier[n] ) keyword[for] identifier[f] keyword[in] identifier[ff] : identifier[e] = identifier[f] [ literal[int] ] keyword[if] identifier[e] > literal[int] : identifier[result] = identifier[result] * identifier[f] [ literal[int] ]**( identifier[e] - literal[int] )*( identifier[f] [ literal[int] ]- literal[int] ) keyword[else] : identifier[result] = identifier[result] *( identifier[f] [ literal[int] ]- literal[int] ) keyword[return] identifier[result]
def phi(n): """Return the Euler totient function of n.""" assert isinstance(n, integer_types) if n < 3: return 1 # depends on [control=['if'], data=[]] result = 1 ff = factorization(n) for f in ff: e = f[1] if e > 1: result = result * f[0] ** (e - 1) * (f[0] - 1) # depends on [control=['if'], data=['e']] else: result = result * (f[0] - 1) # depends on [control=['for'], data=['f']] return result
def remove_bucket_list_item(self, id, collection, item): """ Removes an item from the bucket list Args: id: the CRITs object id of the TLO collection: The db collection. See main class documentation. item: the bucket list item to remove Returns: The mongodb result """ if type(id) is not ObjectId: id = ObjectId(id) obj = getattr(self.db, collection) result = obj.update( {'_id': id}, {'$pull': {'bucket_list': item}} ) return result
def function[remove_bucket_list_item, parameter[self, id, collection, item]]: constant[ Removes an item from the bucket list Args: id: the CRITs object id of the TLO collection: The db collection. See main class documentation. item: the bucket list item to remove Returns: The mongodb result ] if compare[call[name[type], parameter[name[id]]] is_not name[ObjectId]] begin[:] variable[id] assign[=] call[name[ObjectId], parameter[name[id]]] variable[obj] assign[=] call[name[getattr], parameter[name[self].db, name[collection]]] variable[result] assign[=] call[name[obj].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1594a30>], [<ast.Name object at 0x7da1b1595840>]], dictionary[[<ast.Constant object at 0x7da1b1596c50>], [<ast.Dict object at 0x7da1b1594280>]]]] return[name[result]]
keyword[def] identifier[remove_bucket_list_item] ( identifier[self] , identifier[id] , identifier[collection] , identifier[item] ): literal[string] keyword[if] identifier[type] ( identifier[id] ) keyword[is] keyword[not] identifier[ObjectId] : identifier[id] = identifier[ObjectId] ( identifier[id] ) identifier[obj] = identifier[getattr] ( identifier[self] . identifier[db] , identifier[collection] ) identifier[result] = identifier[obj] . identifier[update] ( { literal[string] : identifier[id] }, { literal[string] :{ literal[string] : identifier[item] }} ) keyword[return] identifier[result]
def remove_bucket_list_item(self, id, collection, item): """ Removes an item from the bucket list Args: id: the CRITs object id of the TLO collection: The db collection. See main class documentation. item: the bucket list item to remove Returns: The mongodb result """ if type(id) is not ObjectId: id = ObjectId(id) # depends on [control=['if'], data=['ObjectId']] obj = getattr(self.db, collection) result = obj.update({'_id': id}, {'$pull': {'bucket_list': item}}) return result
def network_info(name=None, **kwargs): ''' Return informations on a virtual network provided its name. :param name: virtual network name :param connection: libvirt connection URI, overriding defaults :param username: username to connect with, overriding defaults :param password: password to connect with, overriding defaults If no name is provided, return the infos for all defined virtual networks. .. versionadded:: 2019.2.0 CLI Example: .. code-block:: bash salt '*' virt.network_info default ''' result = {} conn = __get_conn(**kwargs) def _net_get_leases(net): ''' Get all DHCP leases for a network ''' leases = net.DHCPLeases() for lease in leases: if lease['type'] == libvirt.VIR_IP_ADDR_TYPE_IPV4: lease['type'] = 'ipv4' elif lease['type'] == libvirt.VIR_IP_ADDR_TYPE_IPV6: lease['type'] = 'ipv6' else: lease['type'] = 'unknown' return leases try: nets = [net for net in conn.listAllNetworks() if name is None or net.name() == name] result = {net.name(): { 'uuid': net.UUIDString(), 'bridge': net.bridgeName(), 'autostart': net.autostart(), 'active': net.isActive(), 'persistent': net.isPersistent(), 'leases': _net_get_leases(net)} for net in nets} except libvirt.libvirtError as err: log.debug('Silenced libvirt error: %s', str(err)) finally: conn.close() return result
def function[network_info, parameter[name]]: constant[ Return informations on a virtual network provided its name. :param name: virtual network name :param connection: libvirt connection URI, overriding defaults :param username: username to connect with, overriding defaults :param password: password to connect with, overriding defaults If no name is provided, return the infos for all defined virtual networks. .. versionadded:: 2019.2.0 CLI Example: .. code-block:: bash salt '*' virt.network_info default ] variable[result] assign[=] dictionary[[], []] variable[conn] assign[=] call[name[__get_conn], parameter[]] def function[_net_get_leases, parameter[net]]: constant[ Get all DHCP leases for a network ] variable[leases] assign[=] call[name[net].DHCPLeases, parameter[]] for taget[name[lease]] in starred[name[leases]] begin[:] if compare[call[name[lease]][constant[type]] equal[==] name[libvirt].VIR_IP_ADDR_TYPE_IPV4] begin[:] call[name[lease]][constant[type]] assign[=] constant[ipv4] return[name[leases]] <ast.Try object at 0x7da207f03c10> return[name[result]]
keyword[def] identifier[network_info] ( identifier[name] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[result] ={} identifier[conn] = identifier[__get_conn] (** identifier[kwargs] ) keyword[def] identifier[_net_get_leases] ( identifier[net] ): literal[string] identifier[leases] = identifier[net] . identifier[DHCPLeases] () keyword[for] identifier[lease] keyword[in] identifier[leases] : keyword[if] identifier[lease] [ literal[string] ]== identifier[libvirt] . identifier[VIR_IP_ADDR_TYPE_IPV4] : identifier[lease] [ literal[string] ]= literal[string] keyword[elif] identifier[lease] [ literal[string] ]== identifier[libvirt] . identifier[VIR_IP_ADDR_TYPE_IPV6] : identifier[lease] [ literal[string] ]= literal[string] keyword[else] : identifier[lease] [ literal[string] ]= literal[string] keyword[return] identifier[leases] keyword[try] : identifier[nets] =[ identifier[net] keyword[for] identifier[net] keyword[in] identifier[conn] . identifier[listAllNetworks] () keyword[if] identifier[name] keyword[is] keyword[None] keyword[or] identifier[net] . identifier[name] ()== identifier[name] ] identifier[result] ={ identifier[net] . identifier[name] ():{ literal[string] : identifier[net] . identifier[UUIDString] (), literal[string] : identifier[net] . identifier[bridgeName] (), literal[string] : identifier[net] . identifier[autostart] (), literal[string] : identifier[net] . identifier[isActive] (), literal[string] : identifier[net] . identifier[isPersistent] (), literal[string] : identifier[_net_get_leases] ( identifier[net] )} keyword[for] identifier[net] keyword[in] identifier[nets] } keyword[except] identifier[libvirt] . identifier[libvirtError] keyword[as] identifier[err] : identifier[log] . identifier[debug] ( literal[string] , identifier[str] ( identifier[err] )) keyword[finally] : identifier[conn] . identifier[close] () keyword[return] identifier[result]
def network_info(name=None, **kwargs): """ Return informations on a virtual network provided its name. :param name: virtual network name :param connection: libvirt connection URI, overriding defaults :param username: username to connect with, overriding defaults :param password: password to connect with, overriding defaults If no name is provided, return the infos for all defined virtual networks. .. versionadded:: 2019.2.0 CLI Example: .. code-block:: bash salt '*' virt.network_info default """ result = {} conn = __get_conn(**kwargs) def _net_get_leases(net): """ Get all DHCP leases for a network """ leases = net.DHCPLeases() for lease in leases: if lease['type'] == libvirt.VIR_IP_ADDR_TYPE_IPV4: lease['type'] = 'ipv4' # depends on [control=['if'], data=[]] elif lease['type'] == libvirt.VIR_IP_ADDR_TYPE_IPV6: lease['type'] = 'ipv6' # depends on [control=['if'], data=[]] else: lease['type'] = 'unknown' # depends on [control=['for'], data=['lease']] return leases try: nets = [net for net in conn.listAllNetworks() if name is None or net.name() == name] result = {net.name(): {'uuid': net.UUIDString(), 'bridge': net.bridgeName(), 'autostart': net.autostart(), 'active': net.isActive(), 'persistent': net.isPersistent(), 'leases': _net_get_leases(net)} for net in nets} # depends on [control=['try'], data=[]] except libvirt.libvirtError as err: log.debug('Silenced libvirt error: %s', str(err)) # depends on [control=['except'], data=['err']] finally: conn.close() return result
def _get_line_no_from_comments(py_line): """Return the line number parsed from the comment or 0.""" matched = LINECOL_COMMENT_RE.match(py_line) if matched: return int(matched.group(1)) else: return 0
def function[_get_line_no_from_comments, parameter[py_line]]: constant[Return the line number parsed from the comment or 0.] variable[matched] assign[=] call[name[LINECOL_COMMENT_RE].match, parameter[name[py_line]]] if name[matched] begin[:] return[call[name[int], parameter[call[name[matched].group, parameter[constant[1]]]]]]
keyword[def] identifier[_get_line_no_from_comments] ( identifier[py_line] ): literal[string] identifier[matched] = identifier[LINECOL_COMMENT_RE] . identifier[match] ( identifier[py_line] ) keyword[if] identifier[matched] : keyword[return] identifier[int] ( identifier[matched] . identifier[group] ( literal[int] )) keyword[else] : keyword[return] literal[int]
def _get_line_no_from_comments(py_line): """Return the line number parsed from the comment or 0.""" matched = LINECOL_COMMENT_RE.match(py_line) if matched: return int(matched.group(1)) # depends on [control=['if'], data=[]] else: return 0
def compute(chart): """ Computes the behavior. """ factors = [] # Planets in House1 or Conjunct Asc house1 = chart.getHouse(const.HOUSE1) planetsHouse1 = chart.objects.getObjectsInHouse(house1) asc = chart.getAngle(const.ASC) planetsConjAsc = chart.objects.getObjectsAspecting(asc, [0]) _set = _merge(planetsHouse1, planetsConjAsc) factors.append(['Planets in House1 or Conj Asc', _set]) # Planets conjunct Moon or Mercury moon = chart.get(const.MOON) mercury = chart.get(const.MERCURY) planetsConjMoon = chart.objects.getObjectsAspecting(moon, [0]) planetsConjMercury = chart.objects.getObjectsAspecting(mercury, [0]) _set = _merge(planetsConjMoon, planetsConjMercury) factors.append(['Planets Conj Moon or Mercury', _set]) # Asc ruler if aspected by disposer ascRulerID = essential.ruler(asc.sign) ascRuler = chart.getObject(ascRulerID) disposerID = essential.ruler(ascRuler.sign) disposer = chart.getObject(disposerID) _set = [] if aspects.isAspecting(disposer, ascRuler, const.MAJOR_ASPECTS): _set = [ascRuler.id] factors.append(['Asc Ruler if aspected by its disposer', _set]); # Planets aspecting Moon or Mercury aspMoon = chart.objects.getObjectsAspecting(moon, [60,90,120,180]) aspMercury = chart.objects.getObjectsAspecting(mercury, [60,90,120,180]) _set = _merge(aspMoon, aspMercury) factors.append(['Planets Asp Moon or Mercury', _set]) return factors
def function[compute, parameter[chart]]: constant[ Computes the behavior. ] variable[factors] assign[=] list[[]] variable[house1] assign[=] call[name[chart].getHouse, parameter[name[const].HOUSE1]] variable[planetsHouse1] assign[=] call[name[chart].objects.getObjectsInHouse, parameter[name[house1]]] variable[asc] assign[=] call[name[chart].getAngle, parameter[name[const].ASC]] variable[planetsConjAsc] assign[=] call[name[chart].objects.getObjectsAspecting, parameter[name[asc], list[[<ast.Constant object at 0x7da18f58c280>]]]] variable[_set] assign[=] call[name[_merge], parameter[name[planetsHouse1], name[planetsConjAsc]]] call[name[factors].append, parameter[list[[<ast.Constant object at 0x7da18f58f1f0>, <ast.Name object at 0x7da18f58e440>]]]] variable[moon] assign[=] call[name[chart].get, parameter[name[const].MOON]] variable[mercury] assign[=] call[name[chart].get, parameter[name[const].MERCURY]] variable[planetsConjMoon] assign[=] call[name[chart].objects.getObjectsAspecting, parameter[name[moon], list[[<ast.Constant object at 0x7da18f58ee30>]]]] variable[planetsConjMercury] assign[=] call[name[chart].objects.getObjectsAspecting, parameter[name[mercury], list[[<ast.Constant object at 0x7da18f58dc90>]]]] variable[_set] assign[=] call[name[_merge], parameter[name[planetsConjMoon], name[planetsConjMercury]]] call[name[factors].append, parameter[list[[<ast.Constant object at 0x7da18f58d0f0>, <ast.Name object at 0x7da18f58dab0>]]]] variable[ascRulerID] assign[=] call[name[essential].ruler, parameter[name[asc].sign]] variable[ascRuler] assign[=] call[name[chart].getObject, parameter[name[ascRulerID]]] variable[disposerID] assign[=] call[name[essential].ruler, parameter[name[ascRuler].sign]] variable[disposer] assign[=] call[name[chart].getObject, parameter[name[disposerID]]] variable[_set] assign[=] list[[]] if call[name[aspects].isAspecting, parameter[name[disposer], name[ascRuler], name[const].MAJOR_ASPECTS]] begin[:] variable[_set] assign[=] list[[<ast.Attribute object at 0x7da18f58f280>]] call[name[factors].append, parameter[list[[<ast.Constant object at 0x7da18f58e9e0>, <ast.Name object at 0x7da18f58c7c0>]]]] variable[aspMoon] assign[=] call[name[chart].objects.getObjectsAspecting, parameter[name[moon], list[[<ast.Constant object at 0x7da1b11a52a0>, <ast.Constant object at 0x7da1b11a53c0>, <ast.Constant object at 0x7da1b11a6590>, <ast.Constant object at 0x7da1b11a5840>]]]] variable[aspMercury] assign[=] call[name[chart].objects.getObjectsAspecting, parameter[name[mercury], list[[<ast.Constant object at 0x7da1b11a70a0>, <ast.Constant object at 0x7da1b11a4a00>, <ast.Constant object at 0x7da1b11a7580>, <ast.Constant object at 0x7da1b11a6230>]]]] variable[_set] assign[=] call[name[_merge], parameter[name[aspMoon], name[aspMercury]]] call[name[factors].append, parameter[list[[<ast.Constant object at 0x7da1b11a73d0>, <ast.Name object at 0x7da1b11a64d0>]]]] return[name[factors]]
keyword[def] identifier[compute] ( identifier[chart] ): literal[string] identifier[factors] =[] identifier[house1] = identifier[chart] . identifier[getHouse] ( identifier[const] . identifier[HOUSE1] ) identifier[planetsHouse1] = identifier[chart] . identifier[objects] . identifier[getObjectsInHouse] ( identifier[house1] ) identifier[asc] = identifier[chart] . identifier[getAngle] ( identifier[const] . identifier[ASC] ) identifier[planetsConjAsc] = identifier[chart] . identifier[objects] . identifier[getObjectsAspecting] ( identifier[asc] ,[ literal[int] ]) identifier[_set] = identifier[_merge] ( identifier[planetsHouse1] , identifier[planetsConjAsc] ) identifier[factors] . identifier[append] ([ literal[string] , identifier[_set] ]) identifier[moon] = identifier[chart] . identifier[get] ( identifier[const] . identifier[MOON] ) identifier[mercury] = identifier[chart] . identifier[get] ( identifier[const] . identifier[MERCURY] ) identifier[planetsConjMoon] = identifier[chart] . identifier[objects] . identifier[getObjectsAspecting] ( identifier[moon] ,[ literal[int] ]) identifier[planetsConjMercury] = identifier[chart] . identifier[objects] . identifier[getObjectsAspecting] ( identifier[mercury] ,[ literal[int] ]) identifier[_set] = identifier[_merge] ( identifier[planetsConjMoon] , identifier[planetsConjMercury] ) identifier[factors] . identifier[append] ([ literal[string] , identifier[_set] ]) identifier[ascRulerID] = identifier[essential] . identifier[ruler] ( identifier[asc] . identifier[sign] ) identifier[ascRuler] = identifier[chart] . identifier[getObject] ( identifier[ascRulerID] ) identifier[disposerID] = identifier[essential] . identifier[ruler] ( identifier[ascRuler] . identifier[sign] ) identifier[disposer] = identifier[chart] . identifier[getObject] ( identifier[disposerID] ) identifier[_set] =[] keyword[if] identifier[aspects] . identifier[isAspecting] ( identifier[disposer] , identifier[ascRuler] , identifier[const] . identifier[MAJOR_ASPECTS] ): identifier[_set] =[ identifier[ascRuler] . identifier[id] ] identifier[factors] . identifier[append] ([ literal[string] , identifier[_set] ]); identifier[aspMoon] = identifier[chart] . identifier[objects] . identifier[getObjectsAspecting] ( identifier[moon] ,[ literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[aspMercury] = identifier[chart] . identifier[objects] . identifier[getObjectsAspecting] ( identifier[mercury] ,[ literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[_set] = identifier[_merge] ( identifier[aspMoon] , identifier[aspMercury] ) identifier[factors] . identifier[append] ([ literal[string] , identifier[_set] ]) keyword[return] identifier[factors]
def compute(chart): """ Computes the behavior. """ factors = [] # Planets in House1 or Conjunct Asc house1 = chart.getHouse(const.HOUSE1) planetsHouse1 = chart.objects.getObjectsInHouse(house1) asc = chart.getAngle(const.ASC) planetsConjAsc = chart.objects.getObjectsAspecting(asc, [0]) _set = _merge(planetsHouse1, planetsConjAsc) factors.append(['Planets in House1 or Conj Asc', _set]) # Planets conjunct Moon or Mercury moon = chart.get(const.MOON) mercury = chart.get(const.MERCURY) planetsConjMoon = chart.objects.getObjectsAspecting(moon, [0]) planetsConjMercury = chart.objects.getObjectsAspecting(mercury, [0]) _set = _merge(planetsConjMoon, planetsConjMercury) factors.append(['Planets Conj Moon or Mercury', _set]) # Asc ruler if aspected by disposer ascRulerID = essential.ruler(asc.sign) ascRuler = chart.getObject(ascRulerID) disposerID = essential.ruler(ascRuler.sign) disposer = chart.getObject(disposerID) _set = [] if aspects.isAspecting(disposer, ascRuler, const.MAJOR_ASPECTS): _set = [ascRuler.id] # depends on [control=['if'], data=[]] factors.append(['Asc Ruler if aspected by its disposer', _set]) # Planets aspecting Moon or Mercury aspMoon = chart.objects.getObjectsAspecting(moon, [60, 90, 120, 180]) aspMercury = chart.objects.getObjectsAspecting(mercury, [60, 90, 120, 180]) _set = _merge(aspMoon, aspMercury) factors.append(['Planets Asp Moon or Mercury', _set]) return factors
def line_content_counts_as_uncovered_manual(content: str) -> bool: """ Args: content: A line with indentation and tail comments/space removed. Returns: Whether the line could be included in the coverage report. """ # Omit empty lines. if not content: return False # Omit declarations. for keyword in ['def', 'class']: if content.startswith(keyword) and content.endswith(':'): return False # TODO: multiline comments, multiline strings, etc, etc. return True
def function[line_content_counts_as_uncovered_manual, parameter[content]]: constant[ Args: content: A line with indentation and tail comments/space removed. Returns: Whether the line could be included in the coverage report. ] if <ast.UnaryOp object at 0x7da1b1c1b190> begin[:] return[constant[False]] for taget[name[keyword]] in starred[list[[<ast.Constant object at 0x7da1b1c1a6b0>, <ast.Constant object at 0x7da1b1c1aa10>]]] begin[:] if <ast.BoolOp object at 0x7da1b1c1b3a0> begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[line_content_counts_as_uncovered_manual] ( identifier[content] : identifier[str] )-> identifier[bool] : literal[string] keyword[if] keyword[not] identifier[content] : keyword[return] keyword[False] keyword[for] identifier[keyword] keyword[in] [ literal[string] , literal[string] ]: keyword[if] identifier[content] . identifier[startswith] ( identifier[keyword] ) keyword[and] identifier[content] . identifier[endswith] ( literal[string] ): keyword[return] keyword[False] keyword[return] keyword[True]
def line_content_counts_as_uncovered_manual(content: str) -> bool: """ Args: content: A line with indentation and tail comments/space removed. Returns: Whether the line could be included in the coverage report. """ # Omit empty lines. if not content: return False # depends on [control=['if'], data=[]] # Omit declarations. for keyword in ['def', 'class']: if content.startswith(keyword) and content.endswith(':'): return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['keyword']] # TODO: multiline comments, multiline strings, etc, etc. return True
def RetryUpload(self, job, job_id, error): """Retry the BigQuery upload job. Using the same job id protects us from duplicating data on the server. If we fail all of our retries we raise. Args: job: BigQuery job object job_id: ID string for this upload job error: errors.HttpError object from the first error Returns: API response object on success, None on failure Raises: BigQueryJobUploadError: if we can't get the bigquery job started after retry_max_attempts """ if self.IsErrorRetryable(error): retry_count = 0 sleep_interval = config.CONFIG["BigQuery.retry_interval"] while retry_count < config.CONFIG["BigQuery.retry_max_attempts"]: time.sleep(sleep_interval.seconds) logging.info("Retrying job_id: %s", job_id) retry_count += 1 try: response = job.execute() return response except errors.HttpError as e: if self.IsErrorRetryable(e): sleep_interval *= config.CONFIG["BigQuery.retry_multiplier"] logging.exception("Error with job: %s, will retry in %s", job_id, sleep_interval) else: raise BigQueryJobUploadError( "Can't retry error code %s. Giving up" " on job: %s." % (e.resp.status, job_id)) else: raise BigQueryJobUploadError("Can't retry error code %s. Giving up on " "job: %s." % (error.resp.status, job_id)) raise BigQueryJobUploadError( "Giving up on job:%s after %s retries." % (job_id, retry_count))
def function[RetryUpload, parameter[self, job, job_id, error]]: constant[Retry the BigQuery upload job. Using the same job id protects us from duplicating data on the server. If we fail all of our retries we raise. Args: job: BigQuery job object job_id: ID string for this upload job error: errors.HttpError object from the first error Returns: API response object on success, None on failure Raises: BigQueryJobUploadError: if we can't get the bigquery job started after retry_max_attempts ] if call[name[self].IsErrorRetryable, parameter[name[error]]] begin[:] variable[retry_count] assign[=] constant[0] variable[sleep_interval] assign[=] call[name[config].CONFIG][constant[BigQuery.retry_interval]] while compare[name[retry_count] less[<] call[name[config].CONFIG][constant[BigQuery.retry_max_attempts]]] begin[:] call[name[time].sleep, parameter[name[sleep_interval].seconds]] call[name[logging].info, parameter[constant[Retrying job_id: %s], name[job_id]]] <ast.AugAssign object at 0x7da1b1b4b880> <ast.Try object at 0x7da1b1b294e0> <ast.Raise object at 0x7da1b1b4a170>
keyword[def] identifier[RetryUpload] ( identifier[self] , identifier[job] , identifier[job_id] , identifier[error] ): literal[string] keyword[if] identifier[self] . identifier[IsErrorRetryable] ( identifier[error] ): identifier[retry_count] = literal[int] identifier[sleep_interval] = identifier[config] . identifier[CONFIG] [ literal[string] ] keyword[while] identifier[retry_count] < identifier[config] . identifier[CONFIG] [ literal[string] ]: identifier[time] . identifier[sleep] ( identifier[sleep_interval] . identifier[seconds] ) identifier[logging] . identifier[info] ( literal[string] , identifier[job_id] ) identifier[retry_count] += literal[int] keyword[try] : identifier[response] = identifier[job] . identifier[execute] () keyword[return] identifier[response] keyword[except] identifier[errors] . identifier[HttpError] keyword[as] identifier[e] : keyword[if] identifier[self] . identifier[IsErrorRetryable] ( identifier[e] ): identifier[sleep_interval] *= identifier[config] . identifier[CONFIG] [ literal[string] ] identifier[logging] . identifier[exception] ( literal[string] , identifier[job_id] , identifier[sleep_interval] ) keyword[else] : keyword[raise] identifier[BigQueryJobUploadError] ( literal[string] literal[string] %( identifier[e] . identifier[resp] . identifier[status] , identifier[job_id] )) keyword[else] : keyword[raise] identifier[BigQueryJobUploadError] ( literal[string] literal[string] %( identifier[error] . identifier[resp] . identifier[status] , identifier[job_id] )) keyword[raise] identifier[BigQueryJobUploadError] ( literal[string] %( identifier[job_id] , identifier[retry_count] ))
def RetryUpload(self, job, job_id, error): """Retry the BigQuery upload job. Using the same job id protects us from duplicating data on the server. If we fail all of our retries we raise. Args: job: BigQuery job object job_id: ID string for this upload job error: errors.HttpError object from the first error Returns: API response object on success, None on failure Raises: BigQueryJobUploadError: if we can't get the bigquery job started after retry_max_attempts """ if self.IsErrorRetryable(error): retry_count = 0 sleep_interval = config.CONFIG['BigQuery.retry_interval'] while retry_count < config.CONFIG['BigQuery.retry_max_attempts']: time.sleep(sleep_interval.seconds) logging.info('Retrying job_id: %s', job_id) retry_count += 1 try: response = job.execute() return response # depends on [control=['try'], data=[]] except errors.HttpError as e: if self.IsErrorRetryable(e): sleep_interval *= config.CONFIG['BigQuery.retry_multiplier'] logging.exception('Error with job: %s, will retry in %s', job_id, sleep_interval) # depends on [control=['if'], data=[]] else: raise BigQueryJobUploadError("Can't retry error code %s. Giving up on job: %s." % (e.resp.status, job_id)) # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=['retry_count']] # depends on [control=['if'], data=[]] else: raise BigQueryJobUploadError("Can't retry error code %s. Giving up on job: %s." % (error.resp.status, job_id)) raise BigQueryJobUploadError('Giving up on job:%s after %s retries.' % (job_id, retry_count))
def Ravipudi_Godbold(m, x, D, rhol, rhog, Cpl, kl, mug, mu_b, mu_w=None): r'''Calculates the two-phase non-boiling heat transfer coefficient of a liquid and gas flowing inside a tube of any inclination, as in [1]_ and reviewed in [2]_. .. math:: Nu = \frac{h_{TP} D}{k_l} = 0.56 \left(\frac{V_{gs}}{V_{ls}} \right)^{0.3}\left(\frac{\mu_g}{\mu_l}\right)^{0.2} Re_{ls}^{0.6} Pr_l^{1/3}\left(\frac{\mu_b}{\mu_w}\right)^{0.14} Parameters ---------- m : float Mass flow rate [kg/s] x : float Quality at the specific tube interval [-] D : float Diameter of the tube [m] rhol : float Density of the liquid [kg/m^3] rhog : float Density of the gas [kg/m^3] Cpl : float Constant-pressure heat capacity of liquid [J/kg/K] kl : float Thermal conductivity of liquid [W/m/K] mug : float Viscosity of gas [Pa*s] mu_b : float Viscosity of liquid at bulk conditions (average of inlet/outlet temperature) [Pa*s] mu_w : float, optional Viscosity of liquid at wall temperature [Pa*s] Returns ------- h : float Heat transfer coefficient [W/m^2/K] Notes ----- If the viscosity at the wall temperature is not given, the liquid viscosity correction is not applied. Developed with a vertical pipe, superficial gas/liquid velocity ratios of 1-90, in the froth regime, and for fluid mixtures of air and water, toluene, benzene, and methanol. Examples -------- >>> Ravipudi_Godbold(m=1, x=.9, D=.3, rhol=1000, rhog=2.5, Cpl=2300, kl=.6, mug=1E-5, mu_b=1E-3, mu_w=1.2E-3) 299.3796286459285 References ---------- .. [1] Ravipudi, S., and Godbold, T., The Effect of Mass Transfer on Heat Transfer Rates for Two-Phase Flow in a Vertical Pipe, Proceedings 6th International Heat Transfer Conference, Toronto, V. 1, p. 505-510, 1978. .. [2] Dongwoo Kim, Venkata K. Ryali, Afshin J. Ghajar, Ronald L. Dougherty. "Comparison of 20 Two-Phase Heat Transfer Correlations with Seven Sets of Experimental Data, Including Flow Pattern and Tube Inclination Effects." Heat Transfer Engineering 20, no. 1 (February 1, 1999): 15-40. doi:10.1080/014576399271691. ''' Vgs = m*x/(rhog*pi/4*D**2) Vls = m*(1-x)/(rhol*pi/4*D**2) Prl = Prandtl(Cp=Cpl, mu=mu_b, k=kl) Rels = D*Vls*rhol/mu_b Nu = 0.56*(Vgs/Vls)**0.3*(mug/mu_b)**0.2*Rels**0.6*Prl**(1/3.) if mu_w: Nu *= (mu_b/mu_w)**0.14 return Nu*kl/D
def function[Ravipudi_Godbold, parameter[m, x, D, rhol, rhog, Cpl, kl, mug, mu_b, mu_w]]: constant[Calculates the two-phase non-boiling heat transfer coefficient of a liquid and gas flowing inside a tube of any inclination, as in [1]_ and reviewed in [2]_. .. math:: Nu = \frac{h_{TP} D}{k_l} = 0.56 \left(\frac{V_{gs}}{V_{ls}} \right)^{0.3}\left(\frac{\mu_g}{\mu_l}\right)^{0.2} Re_{ls}^{0.6} Pr_l^{1/3}\left(\frac{\mu_b}{\mu_w}\right)^{0.14} Parameters ---------- m : float Mass flow rate [kg/s] x : float Quality at the specific tube interval [-] D : float Diameter of the tube [m] rhol : float Density of the liquid [kg/m^3] rhog : float Density of the gas [kg/m^3] Cpl : float Constant-pressure heat capacity of liquid [J/kg/K] kl : float Thermal conductivity of liquid [W/m/K] mug : float Viscosity of gas [Pa*s] mu_b : float Viscosity of liquid at bulk conditions (average of inlet/outlet temperature) [Pa*s] mu_w : float, optional Viscosity of liquid at wall temperature [Pa*s] Returns ------- h : float Heat transfer coefficient [W/m^2/K] Notes ----- If the viscosity at the wall temperature is not given, the liquid viscosity correction is not applied. Developed with a vertical pipe, superficial gas/liquid velocity ratios of 1-90, in the froth regime, and for fluid mixtures of air and water, toluene, benzene, and methanol. Examples -------- >>> Ravipudi_Godbold(m=1, x=.9, D=.3, rhol=1000, rhog=2.5, Cpl=2300, kl=.6, mug=1E-5, mu_b=1E-3, mu_w=1.2E-3) 299.3796286459285 References ---------- .. [1] Ravipudi, S., and Godbold, T., The Effect of Mass Transfer on Heat Transfer Rates for Two-Phase Flow in a Vertical Pipe, Proceedings 6th International Heat Transfer Conference, Toronto, V. 1, p. 505-510, 1978. .. [2] Dongwoo Kim, Venkata K. Ryali, Afshin J. Ghajar, Ronald L. Dougherty. "Comparison of 20 Two-Phase Heat Transfer Correlations with Seven Sets of Experimental Data, Including Flow Pattern and Tube Inclination Effects." Heat Transfer Engineering 20, no. 1 (February 1, 1999): 15-40. doi:10.1080/014576399271691. ] variable[Vgs] assign[=] binary_operation[binary_operation[name[m] * name[x]] / binary_operation[binary_operation[binary_operation[name[rhog] * name[pi]] / constant[4]] * binary_operation[name[D] ** constant[2]]]] variable[Vls] assign[=] binary_operation[binary_operation[name[m] * binary_operation[constant[1] - name[x]]] / binary_operation[binary_operation[binary_operation[name[rhol] * name[pi]] / constant[4]] * binary_operation[name[D] ** constant[2]]]] variable[Prl] assign[=] call[name[Prandtl], parameter[]] variable[Rels] assign[=] binary_operation[binary_operation[binary_operation[name[D] * name[Vls]] * name[rhol]] / name[mu_b]] variable[Nu] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[0.56] * binary_operation[binary_operation[name[Vgs] / name[Vls]] ** constant[0.3]]] * binary_operation[binary_operation[name[mug] / name[mu_b]] ** constant[0.2]]] * binary_operation[name[Rels] ** constant[0.6]]] * binary_operation[name[Prl] ** binary_operation[constant[1] / constant[3.0]]]] if name[mu_w] begin[:] <ast.AugAssign object at 0x7da2047e8070> return[binary_operation[binary_operation[name[Nu] * name[kl]] / name[D]]]
keyword[def] identifier[Ravipudi_Godbold] ( identifier[m] , identifier[x] , identifier[D] , identifier[rhol] , identifier[rhog] , identifier[Cpl] , identifier[kl] , identifier[mug] , identifier[mu_b] , identifier[mu_w] = keyword[None] ): literal[string] identifier[Vgs] = identifier[m] * identifier[x] /( identifier[rhog] * identifier[pi] / literal[int] * identifier[D] ** literal[int] ) identifier[Vls] = identifier[m] *( literal[int] - identifier[x] )/( identifier[rhol] * identifier[pi] / literal[int] * identifier[D] ** literal[int] ) identifier[Prl] = identifier[Prandtl] ( identifier[Cp] = identifier[Cpl] , identifier[mu] = identifier[mu_b] , identifier[k] = identifier[kl] ) identifier[Rels] = identifier[D] * identifier[Vls] * identifier[rhol] / identifier[mu_b] identifier[Nu] = literal[int] *( identifier[Vgs] / identifier[Vls] )** literal[int] *( identifier[mug] / identifier[mu_b] )** literal[int] * identifier[Rels] ** literal[int] * identifier[Prl] **( literal[int] / literal[int] ) keyword[if] identifier[mu_w] : identifier[Nu] *=( identifier[mu_b] / identifier[mu_w] )** literal[int] keyword[return] identifier[Nu] * identifier[kl] / identifier[D]
def Ravipudi_Godbold(m, x, D, rhol, rhog, Cpl, kl, mug, mu_b, mu_w=None): """Calculates the two-phase non-boiling heat transfer coefficient of a liquid and gas flowing inside a tube of any inclination, as in [1]_ and reviewed in [2]_. .. math:: Nu = \\frac{h_{TP} D}{k_l} = 0.56 \\left(\\frac{V_{gs}}{V_{ls}} \\right)^{0.3}\\left(\\frac{\\mu_g}{\\mu_l}\\right)^{0.2} Re_{ls}^{0.6} Pr_l^{1/3}\\left(\\frac{\\mu_b}{\\mu_w}\\right)^{0.14} Parameters ---------- m : float Mass flow rate [kg/s] x : float Quality at the specific tube interval [-] D : float Diameter of the tube [m] rhol : float Density of the liquid [kg/m^3] rhog : float Density of the gas [kg/m^3] Cpl : float Constant-pressure heat capacity of liquid [J/kg/K] kl : float Thermal conductivity of liquid [W/m/K] mug : float Viscosity of gas [Pa*s] mu_b : float Viscosity of liquid at bulk conditions (average of inlet/outlet temperature) [Pa*s] mu_w : float, optional Viscosity of liquid at wall temperature [Pa*s] Returns ------- h : float Heat transfer coefficient [W/m^2/K] Notes ----- If the viscosity at the wall temperature is not given, the liquid viscosity correction is not applied. Developed with a vertical pipe, superficial gas/liquid velocity ratios of 1-90, in the froth regime, and for fluid mixtures of air and water, toluene, benzene, and methanol. Examples -------- >>> Ravipudi_Godbold(m=1, x=.9, D=.3, rhol=1000, rhog=2.5, Cpl=2300, kl=.6, mug=1E-5, mu_b=1E-3, mu_w=1.2E-3) 299.3796286459285 References ---------- .. [1] Ravipudi, S., and Godbold, T., The Effect of Mass Transfer on Heat Transfer Rates for Two-Phase Flow in a Vertical Pipe, Proceedings 6th International Heat Transfer Conference, Toronto, V. 1, p. 505-510, 1978. .. [2] Dongwoo Kim, Venkata K. Ryali, Afshin J. Ghajar, Ronald L. Dougherty. "Comparison of 20 Two-Phase Heat Transfer Correlations with Seven Sets of Experimental Data, Including Flow Pattern and Tube Inclination Effects." Heat Transfer Engineering 20, no. 1 (February 1, 1999): 15-40. doi:10.1080/014576399271691. """ Vgs = m * x / (rhog * pi / 4 * D ** 2) Vls = m * (1 - x) / (rhol * pi / 4 * D ** 2) Prl = Prandtl(Cp=Cpl, mu=mu_b, k=kl) Rels = D * Vls * rhol / mu_b Nu = 0.56 * (Vgs / Vls) ** 0.3 * (mug / mu_b) ** 0.2 * Rels ** 0.6 * Prl ** (1 / 3.0) if mu_w: Nu *= (mu_b / mu_w) ** 0.14 # depends on [control=['if'], data=[]] return Nu * kl / D
def _dispatch_handler(args, cell, parser, handler, cell_required=False, cell_prohibited=False): """ Makes sure cell magics include cell and line magics don't, before dispatching to handler. Args: args: the parsed arguments from the magic line. cell: the contents of the cell, if any. parser: the argument parser for <cmd>; used for error message. handler: the handler to call if the cell present/absent check passes. cell_required: True for cell magics, False for line magics that can't be cell magics. cell_prohibited: True for line magics, False for cell magics that can't be line magics. Returns: The result of calling the handler. Raises: Exception if the invocation is not valid. """ if cell_prohibited: if cell and len(cell.strip()): parser.print_help() raise Exception( 'Additional data is not supported with the %s command.' % parser.prog) return handler(args) if cell_required and not cell: parser.print_help() raise Exception('The %s command requires additional data' % parser.prog) return handler(args, cell)
def function[_dispatch_handler, parameter[args, cell, parser, handler, cell_required, cell_prohibited]]: constant[ Makes sure cell magics include cell and line magics don't, before dispatching to handler. Args: args: the parsed arguments from the magic line. cell: the contents of the cell, if any. parser: the argument parser for <cmd>; used for error message. handler: the handler to call if the cell present/absent check passes. cell_required: True for cell magics, False for line magics that can't be cell magics. cell_prohibited: True for line magics, False for cell magics that can't be line magics. Returns: The result of calling the handler. Raises: Exception if the invocation is not valid. ] if name[cell_prohibited] begin[:] if <ast.BoolOp object at 0x7da1b23461a0> begin[:] call[name[parser].print_help, parameter[]] <ast.Raise object at 0x7da1b2345240> return[call[name[handler], parameter[name[args]]]] if <ast.BoolOp object at 0x7da1b2347d90> begin[:] call[name[parser].print_help, parameter[]] <ast.Raise object at 0x7da2044c0700> return[call[name[handler], parameter[name[args], name[cell]]]]
keyword[def] identifier[_dispatch_handler] ( identifier[args] , identifier[cell] , identifier[parser] , identifier[handler] , identifier[cell_required] = keyword[False] , identifier[cell_prohibited] = keyword[False] ): literal[string] keyword[if] identifier[cell_prohibited] : keyword[if] identifier[cell] keyword[and] identifier[len] ( identifier[cell] . identifier[strip] ()): identifier[parser] . identifier[print_help] () keyword[raise] identifier[Exception] ( literal[string] % identifier[parser] . identifier[prog] ) keyword[return] identifier[handler] ( identifier[args] ) keyword[if] identifier[cell_required] keyword[and] keyword[not] identifier[cell] : identifier[parser] . identifier[print_help] () keyword[raise] identifier[Exception] ( literal[string] % identifier[parser] . identifier[prog] ) keyword[return] identifier[handler] ( identifier[args] , identifier[cell] )
def _dispatch_handler(args, cell, parser, handler, cell_required=False, cell_prohibited=False): """ Makes sure cell magics include cell and line magics don't, before dispatching to handler. Args: args: the parsed arguments from the magic line. cell: the contents of the cell, if any. parser: the argument parser for <cmd>; used for error message. handler: the handler to call if the cell present/absent check passes. cell_required: True for cell magics, False for line magics that can't be cell magics. cell_prohibited: True for line magics, False for cell magics that can't be line magics. Returns: The result of calling the handler. Raises: Exception if the invocation is not valid. """ if cell_prohibited: if cell and len(cell.strip()): parser.print_help() raise Exception('Additional data is not supported with the %s command.' % parser.prog) # depends on [control=['if'], data=[]] return handler(args) # depends on [control=['if'], data=[]] if cell_required and (not cell): parser.print_help() raise Exception('The %s command requires additional data' % parser.prog) # depends on [control=['if'], data=[]] return handler(args, cell)
def jupytext_cli(args=None): """Entry point for the jupytext script""" try: jupytext(args) except (ValueError, TypeError, IOError) as err: sys.stderr.write('[jupytext] Error: ' + str(err) + '\n') exit(1)
def function[jupytext_cli, parameter[args]]: constant[Entry point for the jupytext script] <ast.Try object at 0x7da18f58d4b0>
keyword[def] identifier[jupytext_cli] ( identifier[args] = keyword[None] ): literal[string] keyword[try] : identifier[jupytext] ( identifier[args] ) keyword[except] ( identifier[ValueError] , identifier[TypeError] , identifier[IOError] ) keyword[as] identifier[err] : identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] + identifier[str] ( identifier[err] )+ literal[string] ) identifier[exit] ( literal[int] )
def jupytext_cli(args=None): """Entry point for the jupytext script""" try: jupytext(args) # depends on [control=['try'], data=[]] except (ValueError, TypeError, IOError) as err: sys.stderr.write('[jupytext] Error: ' + str(err) + '\n') exit(1) # depends on [control=['except'], data=['err']]
def install_labels(cls, quiet=True, stdout=None): """ Setup labels with indexes and constraints for a given class :param cls: StructuredNode class :type: class :param quiet: (default true) enable standard output :param stdout: stdout stream :type: bool :return: None """ if not hasattr(cls, '__label__'): if not quiet: stdout.write(' ! Skipping class {0}.{1} is abstract\n'.format(cls.__module__, cls.__name__)) return for name, property in cls.defined_properties(aliases=False, rels=False).items(): db_property = property.db_property or name if property.index: if not quiet: stdout.write(' + Creating index {0} on label {1} for class {2}.{3}\n'.format( name, cls.__label__, cls.__module__, cls.__name__)) db.cypher_query("CREATE INDEX on :{0}({1}); ".format( cls.__label__, db_property)) elif property.unique_index: if not quiet: stdout.write(' + Creating unique constraint for {0} on label {1} for class {2}.{3}\n'.format( name, cls.__label__, cls.__module__, cls.__name__)) db.cypher_query("CREATE CONSTRAINT " "on (n:{0}) ASSERT n.{1} IS UNIQUE; ".format( cls.__label__, db_property))
def function[install_labels, parameter[cls, quiet, stdout]]: constant[ Setup labels with indexes and constraints for a given class :param cls: StructuredNode class :type: class :param quiet: (default true) enable standard output :param stdout: stdout stream :type: bool :return: None ] if <ast.UnaryOp object at 0x7da18eb57f70> begin[:] if <ast.UnaryOp object at 0x7da18eb54fd0> begin[:] call[name[stdout].write, parameter[call[constant[ ! Skipping class {0}.{1} is abstract ].format, parameter[name[cls].__module__, name[cls].__name__]]]] return[None] for taget[tuple[[<ast.Name object at 0x7da18f58c2b0>, <ast.Name object at 0x7da18f58c460>]]] in starred[call[call[name[cls].defined_properties, parameter[]].items, parameter[]]] begin[:] variable[db_property] assign[=] <ast.BoolOp object at 0x7da18f58d480> if name[property].index begin[:] if <ast.UnaryOp object at 0x7da18f58d810> begin[:] call[name[stdout].write, parameter[call[constant[ + Creating index {0} on label {1} for class {2}.{3} ].format, parameter[name[name], name[cls].__label__, name[cls].__module__, name[cls].__name__]]]] call[name[db].cypher_query, parameter[call[constant[CREATE INDEX on :{0}({1}); ].format, parameter[name[cls].__label__, name[db_property]]]]]
keyword[def] identifier[install_labels] ( identifier[cls] , identifier[quiet] = keyword[True] , identifier[stdout] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[cls] , literal[string] ): keyword[if] keyword[not] identifier[quiet] : identifier[stdout] . identifier[write] ( literal[string] . identifier[format] ( identifier[cls] . identifier[__module__] , identifier[cls] . identifier[__name__] )) keyword[return] keyword[for] identifier[name] , identifier[property] keyword[in] identifier[cls] . identifier[defined_properties] ( identifier[aliases] = keyword[False] , identifier[rels] = keyword[False] ). identifier[items] (): identifier[db_property] = identifier[property] . identifier[db_property] keyword[or] identifier[name] keyword[if] identifier[property] . identifier[index] : keyword[if] keyword[not] identifier[quiet] : identifier[stdout] . identifier[write] ( literal[string] . identifier[format] ( identifier[name] , identifier[cls] . identifier[__label__] , identifier[cls] . identifier[__module__] , identifier[cls] . identifier[__name__] )) identifier[db] . identifier[cypher_query] ( literal[string] . identifier[format] ( identifier[cls] . identifier[__label__] , identifier[db_property] )) keyword[elif] identifier[property] . identifier[unique_index] : keyword[if] keyword[not] identifier[quiet] : identifier[stdout] . identifier[write] ( literal[string] . identifier[format] ( identifier[name] , identifier[cls] . identifier[__label__] , identifier[cls] . identifier[__module__] , identifier[cls] . identifier[__name__] )) identifier[db] . identifier[cypher_query] ( literal[string] literal[string] . identifier[format] ( identifier[cls] . identifier[__label__] , identifier[db_property] ))
def install_labels(cls, quiet=True, stdout=None): """ Setup labels with indexes and constraints for a given class :param cls: StructuredNode class :type: class :param quiet: (default true) enable standard output :param stdout: stdout stream :type: bool :return: None """ if not hasattr(cls, '__label__'): if not quiet: stdout.write(' ! Skipping class {0}.{1} is abstract\n'.format(cls.__module__, cls.__name__)) # depends on [control=['if'], data=[]] return # depends on [control=['if'], data=[]] for (name, property) in cls.defined_properties(aliases=False, rels=False).items(): db_property = property.db_property or name if property.index: if not quiet: stdout.write(' + Creating index {0} on label {1} for class {2}.{3}\n'.format(name, cls.__label__, cls.__module__, cls.__name__)) # depends on [control=['if'], data=[]] db.cypher_query('CREATE INDEX on :{0}({1}); '.format(cls.__label__, db_property)) # depends on [control=['if'], data=[]] elif property.unique_index: if not quiet: stdout.write(' + Creating unique constraint for {0} on label {1} for class {2}.{3}\n'.format(name, cls.__label__, cls.__module__, cls.__name__)) # depends on [control=['if'], data=[]] db.cypher_query('CREATE CONSTRAINT on (n:{0}) ASSERT n.{1} IS UNIQUE; '.format(cls.__label__, db_property)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def _get_prtfmt(self, objgowr, verbose): """Get print format containing markers.""" prtfmt = objgowr.get_prtfmt('fmt') prtfmt = prtfmt.replace('# ', '') # print('PPPPPPPPPPP', prtfmt) if not verbose: prtfmt = prtfmt.replace('{hdr1usr01:2}', '') prtfmt = prtfmt.replace('{childcnt:3} L{level:02} ', '') prtfmt = prtfmt.replace('{num_usrgos:>4} uGOs ', '') prtfmt = prtfmt.replace('{D1:5} {REL} {rel}', '') prtfmt = prtfmt.replace('R{reldepth:02} ', '') # print('PPPPPPPPPPP', prtfmt) marks = ''.join(['{{{}}}'.format(nt.hdr) for nt in self.go_ntsets]) return '{MARKS} {PRTFMT}'.format(MARKS=marks, PRTFMT=prtfmt)
def function[_get_prtfmt, parameter[self, objgowr, verbose]]: constant[Get print format containing markers.] variable[prtfmt] assign[=] call[name[objgowr].get_prtfmt, parameter[constant[fmt]]] variable[prtfmt] assign[=] call[name[prtfmt].replace, parameter[constant[# ], constant[]]] if <ast.UnaryOp object at 0x7da18c4ce050> begin[:] variable[prtfmt] assign[=] call[name[prtfmt].replace, parameter[constant[{hdr1usr01:2}], constant[]]] variable[prtfmt] assign[=] call[name[prtfmt].replace, parameter[constant[{childcnt:3} L{level:02} ], constant[]]] variable[prtfmt] assign[=] call[name[prtfmt].replace, parameter[constant[{num_usrgos:>4} uGOs ], constant[]]] variable[prtfmt] assign[=] call[name[prtfmt].replace, parameter[constant[{D1:5} {REL} {rel}], constant[]]] variable[prtfmt] assign[=] call[name[prtfmt].replace, parameter[constant[R{reldepth:02} ], constant[]]] variable[marks] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da2054a6b30>]] return[call[constant[{MARKS} {PRTFMT}].format, parameter[]]]
keyword[def] identifier[_get_prtfmt] ( identifier[self] , identifier[objgowr] , identifier[verbose] ): literal[string] identifier[prtfmt] = identifier[objgowr] . identifier[get_prtfmt] ( literal[string] ) identifier[prtfmt] = identifier[prtfmt] . identifier[replace] ( literal[string] , literal[string] ) keyword[if] keyword[not] identifier[verbose] : identifier[prtfmt] = identifier[prtfmt] . identifier[replace] ( literal[string] , literal[string] ) identifier[prtfmt] = identifier[prtfmt] . identifier[replace] ( literal[string] , literal[string] ) identifier[prtfmt] = identifier[prtfmt] . identifier[replace] ( literal[string] , literal[string] ) identifier[prtfmt] = identifier[prtfmt] . identifier[replace] ( literal[string] , literal[string] ) identifier[prtfmt] = identifier[prtfmt] . identifier[replace] ( literal[string] , literal[string] ) identifier[marks] = literal[string] . identifier[join] ([ literal[string] . identifier[format] ( identifier[nt] . identifier[hdr] ) keyword[for] identifier[nt] keyword[in] identifier[self] . identifier[go_ntsets] ]) keyword[return] literal[string] . identifier[format] ( identifier[MARKS] = identifier[marks] , identifier[PRTFMT] = identifier[prtfmt] )
def _get_prtfmt(self, objgowr, verbose): """Get print format containing markers.""" prtfmt = objgowr.get_prtfmt('fmt') prtfmt = prtfmt.replace('# ', '') # print('PPPPPPPPPPP', prtfmt) if not verbose: prtfmt = prtfmt.replace('{hdr1usr01:2}', '') prtfmt = prtfmt.replace('{childcnt:3} L{level:02} ', '') prtfmt = prtfmt.replace('{num_usrgos:>4} uGOs ', '') prtfmt = prtfmt.replace('{D1:5} {REL} {rel}', '') prtfmt = prtfmt.replace('R{reldepth:02} ', '') # depends on [control=['if'], data=[]] # print('PPPPPPPPPPP', prtfmt) marks = ''.join(['{{{}}}'.format(nt.hdr) for nt in self.go_ntsets]) return '{MARKS} {PRTFMT}'.format(MARKS=marks, PRTFMT=prtfmt)
def list_endpoint_groups(self, retrieve_all=True, **_params): """Fetches a list of all VPN endpoint groups for a project.""" return self.list('endpoint_groups', self.endpoint_groups_path, retrieve_all, **_params)
def function[list_endpoint_groups, parameter[self, retrieve_all]]: constant[Fetches a list of all VPN endpoint groups for a project.] return[call[name[self].list, parameter[constant[endpoint_groups], name[self].endpoint_groups_path, name[retrieve_all]]]]
keyword[def] identifier[list_endpoint_groups] ( identifier[self] , identifier[retrieve_all] = keyword[True] ,** identifier[_params] ): literal[string] keyword[return] identifier[self] . identifier[list] ( literal[string] , identifier[self] . identifier[endpoint_groups_path] , identifier[retrieve_all] ,** identifier[_params] )
def list_endpoint_groups(self, retrieve_all=True, **_params): """Fetches a list of all VPN endpoint groups for a project.""" return self.list('endpoint_groups', self.endpoint_groups_path, retrieve_all, **_params)
def load_template(self, name): """Attempts to load the relevant template from our templating system/environment. Args: name: The name of the template to load. Return: On success, a StatikTemplate object that can be used to render content. """ # hopefully speeds up loading of templates a little, especially when loaded multiple times if name in self.cached_templates: logger.debug("Using cached template: %s", name) return self.cached_templates[name] logger.debug("Attempting to find template by name: %s", name) name_with_ext, provider_name, base_path = self.find_template_details(name) full_path = None if base_path is not None: full_path = os.path.join(base_path, name_with_ext) # load it with the relevant provider template = template_exception_handler( lambda: self.get_provider(provider_name).load_template( name_with_ext, full_path=full_path ), self.error_context, filename=full_path ) # cache it for potential later use self.cached_templates[name] = template return template
def function[load_template, parameter[self, name]]: constant[Attempts to load the relevant template from our templating system/environment. Args: name: The name of the template to load. Return: On success, a StatikTemplate object that can be used to render content. ] if compare[name[name] in name[self].cached_templates] begin[:] call[name[logger].debug, parameter[constant[Using cached template: %s], name[name]]] return[call[name[self].cached_templates][name[name]]] call[name[logger].debug, parameter[constant[Attempting to find template by name: %s], name[name]]] <ast.Tuple object at 0x7da18f09cd30> assign[=] call[name[self].find_template_details, parameter[name[name]]] variable[full_path] assign[=] constant[None] if compare[name[base_path] is_not constant[None]] begin[:] variable[full_path] assign[=] call[name[os].path.join, parameter[name[base_path], name[name_with_ext]]] variable[template] assign[=] call[name[template_exception_handler], parameter[<ast.Lambda object at 0x7da18f09dba0>, name[self].error_context]] call[name[self].cached_templates][name[name]] assign[=] name[template] return[name[template]]
keyword[def] identifier[load_template] ( identifier[self] , identifier[name] ): literal[string] keyword[if] identifier[name] keyword[in] identifier[self] . identifier[cached_templates] : identifier[logger] . identifier[debug] ( literal[string] , identifier[name] ) keyword[return] identifier[self] . identifier[cached_templates] [ identifier[name] ] identifier[logger] . identifier[debug] ( literal[string] , identifier[name] ) identifier[name_with_ext] , identifier[provider_name] , identifier[base_path] = identifier[self] . identifier[find_template_details] ( identifier[name] ) identifier[full_path] = keyword[None] keyword[if] identifier[base_path] keyword[is] keyword[not] keyword[None] : identifier[full_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[base_path] , identifier[name_with_ext] ) identifier[template] = identifier[template_exception_handler] ( keyword[lambda] : identifier[self] . identifier[get_provider] ( identifier[provider_name] ). identifier[load_template] ( identifier[name_with_ext] , identifier[full_path] = identifier[full_path] ), identifier[self] . identifier[error_context] , identifier[filename] = identifier[full_path] ) identifier[self] . identifier[cached_templates] [ identifier[name] ]= identifier[template] keyword[return] identifier[template]
def load_template(self, name): """Attempts to load the relevant template from our templating system/environment. Args: name: The name of the template to load. Return: On success, a StatikTemplate object that can be used to render content. """ # hopefully speeds up loading of templates a little, especially when loaded multiple times if name in self.cached_templates: logger.debug('Using cached template: %s', name) return self.cached_templates[name] # depends on [control=['if'], data=['name']] logger.debug('Attempting to find template by name: %s', name) (name_with_ext, provider_name, base_path) = self.find_template_details(name) full_path = None if base_path is not None: full_path = os.path.join(base_path, name_with_ext) # depends on [control=['if'], data=['base_path']] # load it with the relevant provider template = template_exception_handler(lambda : self.get_provider(provider_name).load_template(name_with_ext, full_path=full_path), self.error_context, filename=full_path) # cache it for potential later use self.cached_templates[name] = template return template
def add_attr2fields(self, attr_name, attr_val, fields=[], exclude=[], include_all_if_empty=True): """ add attr to fields """ for f in self.filter_fields(fields, exclude, include_all_if_empty): f = self.fields[f.name] org_val = f.widget.attrs.get(attr_name, '') f.widget.attrs[attr_name] = '%s %s' % (org_val, attr_val) if org_val else attr_val
def function[add_attr2fields, parameter[self, attr_name, attr_val, fields, exclude, include_all_if_empty]]: constant[ add attr to fields ] for taget[name[f]] in starred[call[name[self].filter_fields, parameter[name[fields], name[exclude], name[include_all_if_empty]]]] begin[:] variable[f] assign[=] call[name[self].fields][name[f].name] variable[org_val] assign[=] call[name[f].widget.attrs.get, parameter[name[attr_name], constant[]]] call[name[f].widget.attrs][name[attr_name]] assign[=] <ast.IfExp object at 0x7da20c76d600>
keyword[def] identifier[add_attr2fields] ( identifier[self] , identifier[attr_name] , identifier[attr_val] , identifier[fields] =[], identifier[exclude] =[], identifier[include_all_if_empty] = keyword[True] ): literal[string] keyword[for] identifier[f] keyword[in] identifier[self] . identifier[filter_fields] ( identifier[fields] , identifier[exclude] , identifier[include_all_if_empty] ): identifier[f] = identifier[self] . identifier[fields] [ identifier[f] . identifier[name] ] identifier[org_val] = identifier[f] . identifier[widget] . identifier[attrs] . identifier[get] ( identifier[attr_name] , literal[string] ) identifier[f] . identifier[widget] . identifier[attrs] [ identifier[attr_name] ]= literal[string] %( identifier[org_val] , identifier[attr_val] ) keyword[if] identifier[org_val] keyword[else] identifier[attr_val]
def add_attr2fields(self, attr_name, attr_val, fields=[], exclude=[], include_all_if_empty=True): """ add attr to fields """ for f in self.filter_fields(fields, exclude, include_all_if_empty): f = self.fields[f.name] org_val = f.widget.attrs.get(attr_name, '') f.widget.attrs[attr_name] = '%s %s' % (org_val, attr_val) if org_val else attr_val # depends on [control=['for'], data=['f']]
def _pop_claims_from_payload(self): """ Check for registered claims in the payload and move them to the registered_claims property, overwriting any extant claims. """ claims_in_payload = [k for k in self.payload.keys() if k in registered_claims.values()] for name in claims_in_payload: self.registered_claims[name] = self.payload.pop(name)
def function[_pop_claims_from_payload, parameter[self]]: constant[ Check for registered claims in the payload and move them to the registered_claims property, overwriting any extant claims. ] variable[claims_in_payload] assign[=] <ast.ListComp object at 0x7da20c6ab760> for taget[name[name]] in starred[name[claims_in_payload]] begin[:] call[name[self].registered_claims][name[name]] assign[=] call[name[self].payload.pop, parameter[name[name]]]
keyword[def] identifier[_pop_claims_from_payload] ( identifier[self] ): literal[string] identifier[claims_in_payload] =[ identifier[k] keyword[for] identifier[k] keyword[in] identifier[self] . identifier[payload] . identifier[keys] () keyword[if] identifier[k] keyword[in] identifier[registered_claims] . identifier[values] ()] keyword[for] identifier[name] keyword[in] identifier[claims_in_payload] : identifier[self] . identifier[registered_claims] [ identifier[name] ]= identifier[self] . identifier[payload] . identifier[pop] ( identifier[name] )
def _pop_claims_from_payload(self): """ Check for registered claims in the payload and move them to the registered_claims property, overwriting any extant claims. """ claims_in_payload = [k for k in self.payload.keys() if k in registered_claims.values()] for name in claims_in_payload: self.registered_claims[name] = self.payload.pop(name) # depends on [control=['for'], data=['name']]
def get(self, key, default=None, remote=False): """ Overrides dictionary get behavior to retrieve database objects with support for returning a default. If remote=True then a remote request is made to retrieve the database from the remote server, otherwise the client's locally cached database object is returned. :param str key: Database name used to retrieve the database object. :param str default: Default database name. Defaults to None. :param bool remote: Dictates whether the locally cached database is returned or a remote request is made to retrieve the database from the server. Defaults to False. :returns: Database object """ if not remote: return super(CouchDB, self).get(key, default) db = self._DATABASE_CLASS(self, key) if db.exists(): super(CouchDB, self).__setitem__(key, db) return db return default
def function[get, parameter[self, key, default, remote]]: constant[ Overrides dictionary get behavior to retrieve database objects with support for returning a default. If remote=True then a remote request is made to retrieve the database from the remote server, otherwise the client's locally cached database object is returned. :param str key: Database name used to retrieve the database object. :param str default: Default database name. Defaults to None. :param bool remote: Dictates whether the locally cached database is returned or a remote request is made to retrieve the database from the server. Defaults to False. :returns: Database object ] if <ast.UnaryOp object at 0x7da20e74baf0> begin[:] return[call[call[name[super], parameter[name[CouchDB], name[self]]].get, parameter[name[key], name[default]]]] variable[db] assign[=] call[name[self]._DATABASE_CLASS, parameter[name[self], name[key]]] if call[name[db].exists, parameter[]] begin[:] call[call[name[super], parameter[name[CouchDB], name[self]]].__setitem__, parameter[name[key], name[db]]] return[name[db]] return[name[default]]
keyword[def] identifier[get] ( identifier[self] , identifier[key] , identifier[default] = keyword[None] , identifier[remote] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[remote] : keyword[return] identifier[super] ( identifier[CouchDB] , identifier[self] ). identifier[get] ( identifier[key] , identifier[default] ) identifier[db] = identifier[self] . identifier[_DATABASE_CLASS] ( identifier[self] , identifier[key] ) keyword[if] identifier[db] . identifier[exists] (): identifier[super] ( identifier[CouchDB] , identifier[self] ). identifier[__setitem__] ( identifier[key] , identifier[db] ) keyword[return] identifier[db] keyword[return] identifier[default]
def get(self, key, default=None, remote=False): """ Overrides dictionary get behavior to retrieve database objects with support for returning a default. If remote=True then a remote request is made to retrieve the database from the remote server, otherwise the client's locally cached database object is returned. :param str key: Database name used to retrieve the database object. :param str default: Default database name. Defaults to None. :param bool remote: Dictates whether the locally cached database is returned or a remote request is made to retrieve the database from the server. Defaults to False. :returns: Database object """ if not remote: return super(CouchDB, self).get(key, default) # depends on [control=['if'], data=[]] db = self._DATABASE_CLASS(self, key) if db.exists(): super(CouchDB, self).__setitem__(key, db) return db # depends on [control=['if'], data=[]] return default
def get_common_xs(entries): """Return a mask of where there are Xs in all routing table entries. For example ``01XX`` and ``XX1X`` have common Xs in the LSB only, for this input this method would return ``0b0001``:: >>> from rig.routing_table import RoutingTableEntry >>> entries = [ ... RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1100), # 01XX ... RoutingTableEntry(set(), 0b0010, 0xfffffff0 | 0b0010), # XX1X ... ] >>> print("{:#06b}".format(get_common_xs(entries))) 0b0001 """ # Determine where there are never 1s in the key and mask key = 0x00000000 mask = 0x00000000 for entry in entries: key |= entry.key mask |= entry.mask # Where there are never 1s in the key or the mask there are Xs which are # common to all entries. return (~(key | mask)) & 0xffffffff
def function[get_common_xs, parameter[entries]]: constant[Return a mask of where there are Xs in all routing table entries. For example ``01XX`` and ``XX1X`` have common Xs in the LSB only, for this input this method would return ``0b0001``:: >>> from rig.routing_table import RoutingTableEntry >>> entries = [ ... RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1100), # 01XX ... RoutingTableEntry(set(), 0b0010, 0xfffffff0 | 0b0010), # XX1X ... ] >>> print("{:#06b}".format(get_common_xs(entries))) 0b0001 ] variable[key] assign[=] constant[0] variable[mask] assign[=] constant[0] for taget[name[entry]] in starred[name[entries]] begin[:] <ast.AugAssign object at 0x7da1b195feb0> <ast.AugAssign object at 0x7da1b195fa30> return[binary_operation[<ast.UnaryOp object at 0x7da1b195f9d0> <ast.BitAnd object at 0x7da2590d6b60> constant[4294967295]]]
keyword[def] identifier[get_common_xs] ( identifier[entries] ): literal[string] identifier[key] = literal[int] identifier[mask] = literal[int] keyword[for] identifier[entry] keyword[in] identifier[entries] : identifier[key] |= identifier[entry] . identifier[key] identifier[mask] |= identifier[entry] . identifier[mask] keyword[return] (~( identifier[key] | identifier[mask] ))& literal[int]
def get_common_xs(entries): """Return a mask of where there are Xs in all routing table entries. For example ``01XX`` and ``XX1X`` have common Xs in the LSB only, for this input this method would return ``0b0001``:: >>> from rig.routing_table import RoutingTableEntry >>> entries = [ ... RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1100), # 01XX ... RoutingTableEntry(set(), 0b0010, 0xfffffff0 | 0b0010), # XX1X ... ] >>> print("{:#06b}".format(get_common_xs(entries))) 0b0001 """ # Determine where there are never 1s in the key and mask key = 0 mask = 0 for entry in entries: key |= entry.key mask |= entry.mask # depends on [control=['for'], data=['entry']] # Where there are never 1s in the key or the mask there are Xs which are # common to all entries. return ~(key | mask) & 4294967295
def reset(self): """ Command the PCAN driver to reset the bus after an error. """ status = self.m_objPCANBasic.Reset(self.m_PcanHandle) return status == PCAN_ERROR_OK
def function[reset, parameter[self]]: constant[ Command the PCAN driver to reset the bus after an error. ] variable[status] assign[=] call[name[self].m_objPCANBasic.Reset, parameter[name[self].m_PcanHandle]] return[compare[name[status] equal[==] name[PCAN_ERROR_OK]]]
keyword[def] identifier[reset] ( identifier[self] ): literal[string] identifier[status] = identifier[self] . identifier[m_objPCANBasic] . identifier[Reset] ( identifier[self] . identifier[m_PcanHandle] ) keyword[return] identifier[status] == identifier[PCAN_ERROR_OK]
def reset(self): """ Command the PCAN driver to reset the bus after an error. """ status = self.m_objPCANBasic.Reset(self.m_PcanHandle) return status == PCAN_ERROR_OK
def _check_valid_standard_units(self, ds, variable_name): ''' Checks that the variable's units are appropriate for the standard name according to the CF standard name table and coordinate sections in CF 1.6 :param netCDF4.Dataset ds: An open netCDF dataset :param str variable_name: Name of the variable to be checked ''' variable = ds.variables[variable_name] units = getattr(variable, 'units', None) standard_name = getattr(variable, 'standard_name', None) valid_standard_units = TestCtx(BaseCheck.HIGH, self.section_titles["3.1"]) # If the variable is supposed to be dimensionless, it automatically passes std_name_units_dimensionless = cfutil.is_dimensionless_standard_name(self._std_names._root, standard_name) standard_name, standard_name_modifier = self._split_standard_name(standard_name) standard_entry = self._std_names.get(standard_name, None) if standard_entry is not None: canonical_units = standard_entry.canonical_units else: # Any unit comparisons with None returns False canonical_units = None # Other standard_name modifiers have the same units as the # unmodified standard name or are not checked for units. if standard_name_modifier == 'number_of_observations': canonical_units = '1' # This section represents the different cases where simple udunits # comparison isn't comprehensive enough to determine if the units are # appropriate under CF # UDUnits accepts "s" as a unit of time but it should be <unit> since <epoch> if standard_name == 'time': valid_standard_units.assert_true(util.units_convertible(units, 'seconds since 1970-01-01'), 'time must be in a valid units format <unit> since <epoch> ' 'not {}'.format(units)) # UDunits can't tell the difference between east and north facing coordinates elif standard_name == 'latitude': # degrees is allowed if using a transformed grid allowed_units = cfutil.VALID_LAT_UNITS | {'degrees'} valid_standard_units.assert_true(units.lower() in allowed_units, 'variables defining latitude ("{}") must use degrees_north ' 'or degrees if defining a transformed grid. Currently ' '{}'.format(variable_name, units)) # UDunits can't tell the difference between east and north facing coordinates elif standard_name == 'longitude': # degrees is allowed if using a transformed grid allowed_units = cfutil.VALID_LON_UNITS | {'degrees'} valid_standard_units.assert_true(units.lower() in allowed_units, 'variables defining longitude ("{}") must use degrees_east ' 'or degrees if defining a transformed grid. Currently ' '{}'.format(variable_name, units)) # Standard Name table agrees the unit should be dimensionless elif std_name_units_dimensionless: valid_standard_units.assert_true(True, '') elif canonical_units is not None: valid_standard_units.assert_true(util.units_convertible(canonical_units, units), 'units for variable {} must be convertible to {} ' 'currently they are {}'.format(variable_name, canonical_units, units)) return valid_standard_units.to_result()
def function[_check_valid_standard_units, parameter[self, ds, variable_name]]: constant[ Checks that the variable's units are appropriate for the standard name according to the CF standard name table and coordinate sections in CF 1.6 :param netCDF4.Dataset ds: An open netCDF dataset :param str variable_name: Name of the variable to be checked ] variable[variable] assign[=] call[name[ds].variables][name[variable_name]] variable[units] assign[=] call[name[getattr], parameter[name[variable], constant[units], constant[None]]] variable[standard_name] assign[=] call[name[getattr], parameter[name[variable], constant[standard_name], constant[None]]] variable[valid_standard_units] assign[=] call[name[TestCtx], parameter[name[BaseCheck].HIGH, call[name[self].section_titles][constant[3.1]]]] variable[std_name_units_dimensionless] assign[=] call[name[cfutil].is_dimensionless_standard_name, parameter[name[self]._std_names._root, name[standard_name]]] <ast.Tuple object at 0x7da2041d9030> assign[=] call[name[self]._split_standard_name, parameter[name[standard_name]]] variable[standard_entry] assign[=] call[name[self]._std_names.get, parameter[name[standard_name], constant[None]]] if compare[name[standard_entry] is_not constant[None]] begin[:] variable[canonical_units] assign[=] name[standard_entry].canonical_units if compare[name[standard_name_modifier] equal[==] constant[number_of_observations]] begin[:] variable[canonical_units] assign[=] constant[1] if compare[name[standard_name] equal[==] constant[time]] begin[:] call[name[valid_standard_units].assert_true, parameter[call[name[util].units_convertible, parameter[name[units], constant[seconds since 1970-01-01]]], call[constant[time must be in a valid units format <unit> since <epoch> not {}].format, parameter[name[units]]]]] return[call[name[valid_standard_units].to_result, parameter[]]]
keyword[def] identifier[_check_valid_standard_units] ( identifier[self] , identifier[ds] , identifier[variable_name] ): literal[string] identifier[variable] = identifier[ds] . identifier[variables] [ identifier[variable_name] ] identifier[units] = identifier[getattr] ( identifier[variable] , literal[string] , keyword[None] ) identifier[standard_name] = identifier[getattr] ( identifier[variable] , literal[string] , keyword[None] ) identifier[valid_standard_units] = identifier[TestCtx] ( identifier[BaseCheck] . identifier[HIGH] , identifier[self] . identifier[section_titles] [ literal[string] ]) identifier[std_name_units_dimensionless] = identifier[cfutil] . identifier[is_dimensionless_standard_name] ( identifier[self] . identifier[_std_names] . identifier[_root] , identifier[standard_name] ) identifier[standard_name] , identifier[standard_name_modifier] = identifier[self] . identifier[_split_standard_name] ( identifier[standard_name] ) identifier[standard_entry] = identifier[self] . identifier[_std_names] . identifier[get] ( identifier[standard_name] , keyword[None] ) keyword[if] identifier[standard_entry] keyword[is] keyword[not] keyword[None] : identifier[canonical_units] = identifier[standard_entry] . identifier[canonical_units] keyword[else] : identifier[canonical_units] = keyword[None] keyword[if] identifier[standard_name_modifier] == literal[string] : identifier[canonical_units] = literal[string] keyword[if] identifier[standard_name] == literal[string] : identifier[valid_standard_units] . identifier[assert_true] ( identifier[util] . identifier[units_convertible] ( identifier[units] , literal[string] ), literal[string] literal[string] . identifier[format] ( identifier[units] )) keyword[elif] identifier[standard_name] == literal[string] : identifier[allowed_units] = identifier[cfutil] . identifier[VALID_LAT_UNITS] |{ literal[string] } identifier[valid_standard_units] . identifier[assert_true] ( identifier[units] . identifier[lower] () keyword[in] identifier[allowed_units] , literal[string] literal[string] literal[string] . identifier[format] ( identifier[variable_name] , identifier[units] )) keyword[elif] identifier[standard_name] == literal[string] : identifier[allowed_units] = identifier[cfutil] . identifier[VALID_LON_UNITS] |{ literal[string] } identifier[valid_standard_units] . identifier[assert_true] ( identifier[units] . identifier[lower] () keyword[in] identifier[allowed_units] , literal[string] literal[string] literal[string] . identifier[format] ( identifier[variable_name] , identifier[units] )) keyword[elif] identifier[std_name_units_dimensionless] : identifier[valid_standard_units] . identifier[assert_true] ( keyword[True] , literal[string] ) keyword[elif] identifier[canonical_units] keyword[is] keyword[not] keyword[None] : identifier[valid_standard_units] . identifier[assert_true] ( identifier[util] . identifier[units_convertible] ( identifier[canonical_units] , identifier[units] ), literal[string] literal[string] . identifier[format] ( identifier[variable_name] , identifier[canonical_units] , identifier[units] )) keyword[return] identifier[valid_standard_units] . identifier[to_result] ()
def _check_valid_standard_units(self, ds, variable_name): """ Checks that the variable's units are appropriate for the standard name according to the CF standard name table and coordinate sections in CF 1.6 :param netCDF4.Dataset ds: An open netCDF dataset :param str variable_name: Name of the variable to be checked """ variable = ds.variables[variable_name] units = getattr(variable, 'units', None) standard_name = getattr(variable, 'standard_name', None) valid_standard_units = TestCtx(BaseCheck.HIGH, self.section_titles['3.1']) # If the variable is supposed to be dimensionless, it automatically passes std_name_units_dimensionless = cfutil.is_dimensionless_standard_name(self._std_names._root, standard_name) (standard_name, standard_name_modifier) = self._split_standard_name(standard_name) standard_entry = self._std_names.get(standard_name, None) if standard_entry is not None: canonical_units = standard_entry.canonical_units # depends on [control=['if'], data=['standard_entry']] else: # Any unit comparisons with None returns False canonical_units = None # Other standard_name modifiers have the same units as the # unmodified standard name or are not checked for units. if standard_name_modifier == 'number_of_observations': canonical_units = '1' # depends on [control=['if'], data=[]] # This section represents the different cases where simple udunits # comparison isn't comprehensive enough to determine if the units are # appropriate under CF # UDUnits accepts "s" as a unit of time but it should be <unit> since <epoch> if standard_name == 'time': valid_standard_units.assert_true(util.units_convertible(units, 'seconds since 1970-01-01'), 'time must be in a valid units format <unit> since <epoch> not {}'.format(units)) # depends on [control=['if'], data=[]] # UDunits can't tell the difference between east and north facing coordinates elif standard_name == 'latitude': # degrees is allowed if using a transformed grid allowed_units = cfutil.VALID_LAT_UNITS | {'degrees'} valid_standard_units.assert_true(units.lower() in allowed_units, 'variables defining latitude ("{}") must use degrees_north or degrees if defining a transformed grid. Currently {}'.format(variable_name, units)) # depends on [control=['if'], data=[]] # UDunits can't tell the difference between east and north facing coordinates elif standard_name == 'longitude': # degrees is allowed if using a transformed grid allowed_units = cfutil.VALID_LON_UNITS | {'degrees'} valid_standard_units.assert_true(units.lower() in allowed_units, 'variables defining longitude ("{}") must use degrees_east or degrees if defining a transformed grid. Currently {}'.format(variable_name, units)) # depends on [control=['if'], data=[]] # Standard Name table agrees the unit should be dimensionless elif std_name_units_dimensionless: valid_standard_units.assert_true(True, '') # depends on [control=['if'], data=[]] elif canonical_units is not None: valid_standard_units.assert_true(util.units_convertible(canonical_units, units), 'units for variable {} must be convertible to {} currently they are {}'.format(variable_name, canonical_units, units)) # depends on [control=['if'], data=['canonical_units']] return valid_standard_units.to_result()
def sort(line): """ change point position if x1,y0 < x0,y0 """ x0, y0, x1, y1 = line # if (x0**2+y0**2)**0.5 < (x1**2+y1**2)**0.5: # return (x1,y1,x0,y0) # return line # # if x1 < x0: # return (x1,y1,x0,y0) # return line turn = False if abs(x1 - x0) > abs(y1 - y0): if x1 < x0: turn = True elif y1 < y0: turn = True if turn: return (x1, y1, x0, y0) # return line[(2,3,0,1)] return line
def function[sort, parameter[line]]: constant[ change point position if x1,y0 < x0,y0 ] <ast.Tuple object at 0x7da1b14d23e0> assign[=] name[line] variable[turn] assign[=] constant[False] if compare[call[name[abs], parameter[binary_operation[name[x1] - name[x0]]]] greater[>] call[name[abs], parameter[binary_operation[name[y1] - name[y0]]]]] begin[:] if compare[name[x1] less[<] name[x0]] begin[:] variable[turn] assign[=] constant[True] if name[turn] begin[:] return[tuple[[<ast.Name object at 0x7da1b14d0880>, <ast.Name object at 0x7da1b14d31c0>, <ast.Name object at 0x7da1b14d2380>, <ast.Name object at 0x7da1b14d2860>]]] return[name[line]]
keyword[def] identifier[sort] ( identifier[line] ): literal[string] identifier[x0] , identifier[y0] , identifier[x1] , identifier[y1] = identifier[line] identifier[turn] = keyword[False] keyword[if] identifier[abs] ( identifier[x1] - identifier[x0] )> identifier[abs] ( identifier[y1] - identifier[y0] ): keyword[if] identifier[x1] < identifier[x0] : identifier[turn] = keyword[True] keyword[elif] identifier[y1] < identifier[y0] : identifier[turn] = keyword[True] keyword[if] identifier[turn] : keyword[return] ( identifier[x1] , identifier[y1] , identifier[x0] , identifier[y0] ) keyword[return] identifier[line]
def sort(line): """ change point position if x1,y0 < x0,y0 """ (x0, y0, x1, y1) = line # if (x0**2+y0**2)**0.5 < (x1**2+y1**2)**0.5: # return (x1,y1,x0,y0) # return line # # if x1 < x0: # return (x1,y1,x0,y0) # return line turn = False if abs(x1 - x0) > abs(y1 - y0): if x1 < x0: turn = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif y1 < y0: turn = True # depends on [control=['if'], data=[]] if turn: return (x1, y1, x0, y0) # depends on [control=['if'], data=[]] # return line[(2,3,0,1)] return line
def write(self, buffer): """Write a string to the socket, doubling any IAC characters. Can block if the connection is blocked. May raise socket.error if the connection is closed. """ if type(buffer) == type(0): buffer = chr(buffer) elif not isinstance(buffer, bytes): buffer = buffer.encode(self.encoding) if IAC in buffer: buffer = buffer.replace(IAC, IAC+IAC) self.msg("send %s", repr(buffer)) self.sock.send(buffer)
def function[write, parameter[self, buffer]]: constant[Write a string to the socket, doubling any IAC characters. Can block if the connection is blocked. May raise socket.error if the connection is closed. ] if compare[call[name[type], parameter[name[buffer]]] equal[==] call[name[type], parameter[constant[0]]]] begin[:] variable[buffer] assign[=] call[name[chr], parameter[name[buffer]]] if compare[name[IAC] in name[buffer]] begin[:] variable[buffer] assign[=] call[name[buffer].replace, parameter[name[IAC], binary_operation[name[IAC] + name[IAC]]]] call[name[self].msg, parameter[constant[send %s], call[name[repr], parameter[name[buffer]]]]] call[name[self].sock.send, parameter[name[buffer]]]
keyword[def] identifier[write] ( identifier[self] , identifier[buffer] ): literal[string] keyword[if] identifier[type] ( identifier[buffer] )== identifier[type] ( literal[int] ): identifier[buffer] = identifier[chr] ( identifier[buffer] ) keyword[elif] keyword[not] identifier[isinstance] ( identifier[buffer] , identifier[bytes] ): identifier[buffer] = identifier[buffer] . identifier[encode] ( identifier[self] . identifier[encoding] ) keyword[if] identifier[IAC] keyword[in] identifier[buffer] : identifier[buffer] = identifier[buffer] . identifier[replace] ( identifier[IAC] , identifier[IAC] + identifier[IAC] ) identifier[self] . identifier[msg] ( literal[string] , identifier[repr] ( identifier[buffer] )) identifier[self] . identifier[sock] . identifier[send] ( identifier[buffer] )
def write(self, buffer): """Write a string to the socket, doubling any IAC characters. Can block if the connection is blocked. May raise socket.error if the connection is closed. """ if type(buffer) == type(0): buffer = chr(buffer) # depends on [control=['if'], data=[]] elif not isinstance(buffer, bytes): buffer = buffer.encode(self.encoding) # depends on [control=['if'], data=[]] if IAC in buffer: buffer = buffer.replace(IAC, IAC + IAC) # depends on [control=['if'], data=['IAC', 'buffer']] self.msg('send %s', repr(buffer)) self.sock.send(buffer)
def calc_surface_intensity(self, factor=10): """Calculate the surface intensity for each pixel in the interior region of the ROI. Pixels are adaptively subsampled around the kernel centroid out to a radius of 'factor * max_pixrad'. Parameters: ----------- factor : the radius of the oversample region in units of max_pixrad Returns: -------- surface_intensity : the surface intensity at each pixel """ # First we calculate the surface intensity at native resolution pixels = self.roi.pixels_interior nside_in = self.config['coords']['nside_pixel'] surface_intensity = self.kernel.pdf(pixels.lon,pixels.lat) # Then we recalculate the surface intensity around the kernel # centroid at higher resolution for i in np.arange(1,5): # Select pixels within the region of interest nside_out = 2**i * nside_in radius = factor*np.degrees(hp.max_pixrad(nside_out)) pix = ang2disc(nside_in,self.kernel.lon,self.kernel.lat, radius,inclusive=True) # Select pix within the interior region of the ROI idx = ugali.utils.healpix.index_pix_in_pixels(pix,pixels) pix = pix[(idx >= 0)]; idx = idx[(idx >= 0)] # Reset the surface intensity for the subsampled pixels subpix = ugali.utils.healpix.ud_grade_ipix(pix,nside_in,nside_out) pix_lon,pix_lat = pix2ang(nside_out,subpix) surface_intensity[idx]=np.mean(self.kernel.pdf(pix_lon,pix_lat),axis=1) return surface_intensity
def function[calc_surface_intensity, parameter[self, factor]]: constant[Calculate the surface intensity for each pixel in the interior region of the ROI. Pixels are adaptively subsampled around the kernel centroid out to a radius of 'factor * max_pixrad'. Parameters: ----------- factor : the radius of the oversample region in units of max_pixrad Returns: -------- surface_intensity : the surface intensity at each pixel ] variable[pixels] assign[=] name[self].roi.pixels_interior variable[nside_in] assign[=] call[call[name[self].config][constant[coords]]][constant[nside_pixel]] variable[surface_intensity] assign[=] call[name[self].kernel.pdf, parameter[name[pixels].lon, name[pixels].lat]] for taget[name[i]] in starred[call[name[np].arange, parameter[constant[1], constant[5]]]] begin[:] variable[nside_out] assign[=] binary_operation[binary_operation[constant[2] ** name[i]] * name[nside_in]] variable[radius] assign[=] binary_operation[name[factor] * call[name[np].degrees, parameter[call[name[hp].max_pixrad, parameter[name[nside_out]]]]]] variable[pix] assign[=] call[name[ang2disc], parameter[name[nside_in], name[self].kernel.lon, name[self].kernel.lat, name[radius]]] variable[idx] assign[=] call[name[ugali].utils.healpix.index_pix_in_pixels, parameter[name[pix], name[pixels]]] variable[pix] assign[=] call[name[pix]][compare[name[idx] greater_or_equal[>=] constant[0]]] variable[idx] assign[=] call[name[idx]][compare[name[idx] greater_or_equal[>=] constant[0]]] variable[subpix] assign[=] call[name[ugali].utils.healpix.ud_grade_ipix, parameter[name[pix], name[nside_in], name[nside_out]]] <ast.Tuple object at 0x7da2047e92d0> assign[=] call[name[pix2ang], parameter[name[nside_out], name[subpix]]] call[name[surface_intensity]][name[idx]] assign[=] call[name[np].mean, parameter[call[name[self].kernel.pdf, parameter[name[pix_lon], name[pix_lat]]]]] return[name[surface_intensity]]
keyword[def] identifier[calc_surface_intensity] ( identifier[self] , identifier[factor] = literal[int] ): literal[string] identifier[pixels] = identifier[self] . identifier[roi] . identifier[pixels_interior] identifier[nside_in] = identifier[self] . identifier[config] [ literal[string] ][ literal[string] ] identifier[surface_intensity] = identifier[self] . identifier[kernel] . identifier[pdf] ( identifier[pixels] . identifier[lon] , identifier[pixels] . identifier[lat] ) keyword[for] identifier[i] keyword[in] identifier[np] . identifier[arange] ( literal[int] , literal[int] ): identifier[nside_out] = literal[int] ** identifier[i] * identifier[nside_in] identifier[radius] = identifier[factor] * identifier[np] . identifier[degrees] ( identifier[hp] . identifier[max_pixrad] ( identifier[nside_out] )) identifier[pix] = identifier[ang2disc] ( identifier[nside_in] , identifier[self] . identifier[kernel] . identifier[lon] , identifier[self] . identifier[kernel] . identifier[lat] , identifier[radius] , identifier[inclusive] = keyword[True] ) identifier[idx] = identifier[ugali] . identifier[utils] . identifier[healpix] . identifier[index_pix_in_pixels] ( identifier[pix] , identifier[pixels] ) identifier[pix] = identifier[pix] [( identifier[idx] >= literal[int] )]; identifier[idx] = identifier[idx] [( identifier[idx] >= literal[int] )] identifier[subpix] = identifier[ugali] . identifier[utils] . identifier[healpix] . identifier[ud_grade_ipix] ( identifier[pix] , identifier[nside_in] , identifier[nside_out] ) identifier[pix_lon] , identifier[pix_lat] = identifier[pix2ang] ( identifier[nside_out] , identifier[subpix] ) identifier[surface_intensity] [ identifier[idx] ]= identifier[np] . identifier[mean] ( identifier[self] . identifier[kernel] . identifier[pdf] ( identifier[pix_lon] , identifier[pix_lat] ), identifier[axis] = literal[int] ) keyword[return] identifier[surface_intensity]
def calc_surface_intensity(self, factor=10): """Calculate the surface intensity for each pixel in the interior region of the ROI. Pixels are adaptively subsampled around the kernel centroid out to a radius of 'factor * max_pixrad'. Parameters: ----------- factor : the radius of the oversample region in units of max_pixrad Returns: -------- surface_intensity : the surface intensity at each pixel """ # First we calculate the surface intensity at native resolution pixels = self.roi.pixels_interior nside_in = self.config['coords']['nside_pixel'] surface_intensity = self.kernel.pdf(pixels.lon, pixels.lat) # Then we recalculate the surface intensity around the kernel # centroid at higher resolution for i in np.arange(1, 5): # Select pixels within the region of interest nside_out = 2 ** i * nside_in radius = factor * np.degrees(hp.max_pixrad(nside_out)) pix = ang2disc(nside_in, self.kernel.lon, self.kernel.lat, radius, inclusive=True) # Select pix within the interior region of the ROI idx = ugali.utils.healpix.index_pix_in_pixels(pix, pixels) pix = pix[idx >= 0] idx = idx[idx >= 0] # Reset the surface intensity for the subsampled pixels subpix = ugali.utils.healpix.ud_grade_ipix(pix, nside_in, nside_out) (pix_lon, pix_lat) = pix2ang(nside_out, subpix) surface_intensity[idx] = np.mean(self.kernel.pdf(pix_lon, pix_lat), axis=1) # depends on [control=['for'], data=['i']] return surface_intensity
def with_timeout(timeout, d, reactor=reactor): """Returns a `Deferred` that is in all respects equivalent to `d`, e.g. when `cancel()` is called on it `Deferred`, the wrapped `Deferred` will also be cancelled; however, a `Timeout` will be fired after the `timeout` number of seconds if `d` has not fired by that time. When a `Timeout` is raised, `d` will be cancelled. It is up to the caller to worry about how `d` handles cancellation, i.e. whether it has full/true support for cancelling, or does cancelling it just prevent its callbacks from being fired but doesn't cancel the underlying operation. """ if timeout is None or not isinstance(d, Deferred): return d ret = Deferred(canceller=lambda _: ( d.cancel(), timeout_d.cancel(), )) timeout_d = sleep(timeout, reactor) timeout_d.addCallback(lambda _: ( d.cancel(), ret.errback(Failure(Timeout())) if not ret.called else None, )) timeout_d.addErrback(lambda f: f.trap(CancelledError)) d.addCallback(lambda result: ( timeout_d.cancel(), ret.callback(result), )) d.addErrback(lambda f: ( if_(not f.check(CancelledError), lambda: ( timeout_d.cancel(), ret.errback(f), )), )) return ret
def function[with_timeout, parameter[timeout, d, reactor]]: constant[Returns a `Deferred` that is in all respects equivalent to `d`, e.g. when `cancel()` is called on it `Deferred`, the wrapped `Deferred` will also be cancelled; however, a `Timeout` will be fired after the `timeout` number of seconds if `d` has not fired by that time. When a `Timeout` is raised, `d` will be cancelled. It is up to the caller to worry about how `d` handles cancellation, i.e. whether it has full/true support for cancelling, or does cancelling it just prevent its callbacks from being fired but doesn't cancel the underlying operation. ] if <ast.BoolOp object at 0x7da1b1501780> begin[:] return[name[d]] variable[ret] assign[=] call[name[Deferred], parameter[]] variable[timeout_d] assign[=] call[name[sleep], parameter[name[timeout], name[reactor]]] call[name[timeout_d].addCallback, parameter[<ast.Lambda object at 0x7da1b15019f0>]] call[name[timeout_d].addErrback, parameter[<ast.Lambda object at 0x7da1b1501690>]] call[name[d].addCallback, parameter[<ast.Lambda object at 0x7da1b1502ec0>]] call[name[d].addErrback, parameter[<ast.Lambda object at 0x7da1b1501bd0>]] return[name[ret]]
keyword[def] identifier[with_timeout] ( identifier[timeout] , identifier[d] , identifier[reactor] = identifier[reactor] ): literal[string] keyword[if] identifier[timeout] keyword[is] keyword[None] keyword[or] keyword[not] identifier[isinstance] ( identifier[d] , identifier[Deferred] ): keyword[return] identifier[d] identifier[ret] = identifier[Deferred] ( identifier[canceller] = keyword[lambda] identifier[_] :( identifier[d] . identifier[cancel] (), identifier[timeout_d] . identifier[cancel] (), )) identifier[timeout_d] = identifier[sleep] ( identifier[timeout] , identifier[reactor] ) identifier[timeout_d] . identifier[addCallback] ( keyword[lambda] identifier[_] :( identifier[d] . identifier[cancel] (), identifier[ret] . identifier[errback] ( identifier[Failure] ( identifier[Timeout] ())) keyword[if] keyword[not] identifier[ret] . identifier[called] keyword[else] keyword[None] , )) identifier[timeout_d] . identifier[addErrback] ( keyword[lambda] identifier[f] : identifier[f] . identifier[trap] ( identifier[CancelledError] )) identifier[d] . identifier[addCallback] ( keyword[lambda] identifier[result] :( identifier[timeout_d] . identifier[cancel] (), identifier[ret] . identifier[callback] ( identifier[result] ), )) identifier[d] . identifier[addErrback] ( keyword[lambda] identifier[f] :( identifier[if_] ( keyword[not] identifier[f] . identifier[check] ( identifier[CancelledError] ), keyword[lambda] :( identifier[timeout_d] . identifier[cancel] (), identifier[ret] . identifier[errback] ( identifier[f] ), )), )) keyword[return] identifier[ret]
def with_timeout(timeout, d, reactor=reactor): """Returns a `Deferred` that is in all respects equivalent to `d`, e.g. when `cancel()` is called on it `Deferred`, the wrapped `Deferred` will also be cancelled; however, a `Timeout` will be fired after the `timeout` number of seconds if `d` has not fired by that time. When a `Timeout` is raised, `d` will be cancelled. It is up to the caller to worry about how `d` handles cancellation, i.e. whether it has full/true support for cancelling, or does cancelling it just prevent its callbacks from being fired but doesn't cancel the underlying operation. """ if timeout is None or not isinstance(d, Deferred): return d # depends on [control=['if'], data=[]] ret = Deferred(canceller=lambda _: (d.cancel(), timeout_d.cancel())) timeout_d = sleep(timeout, reactor) timeout_d.addCallback(lambda _: (d.cancel(), ret.errback(Failure(Timeout())) if not ret.called else None)) timeout_d.addErrback(lambda f: f.trap(CancelledError)) d.addCallback(lambda result: (timeout_d.cancel(), ret.callback(result))) d.addErrback(lambda f: (if_(not f.check(CancelledError), lambda : (timeout_d.cancel(), ret.errback(f))),)) return ret
def get_queryset(self, request): """ Add number of photos to each gallery. """ qs = super(GalleryAdmin, self).get_queryset(request) return qs.annotate(photo_count=Count('photos'))
def function[get_queryset, parameter[self, request]]: constant[ Add number of photos to each gallery. ] variable[qs] assign[=] call[call[name[super], parameter[name[GalleryAdmin], name[self]]].get_queryset, parameter[name[request]]] return[call[name[qs].annotate, parameter[]]]
keyword[def] identifier[get_queryset] ( identifier[self] , identifier[request] ): literal[string] identifier[qs] = identifier[super] ( identifier[GalleryAdmin] , identifier[self] ). identifier[get_queryset] ( identifier[request] ) keyword[return] identifier[qs] . identifier[annotate] ( identifier[photo_count] = identifier[Count] ( literal[string] ))
def get_queryset(self, request): """ Add number of photos to each gallery. """ qs = super(GalleryAdmin, self).get_queryset(request) return qs.annotate(photo_count=Count('photos'))
def environment(request=None): """ Return ``COMPRESS_ENABLED``, ``SITE_NAME``, and any settings listed in ``ICEKIT_CONTEXT_PROCESSOR_SETTINGS`` as context. """ context = { 'COMPRESS_ENABLED': settings.COMPRESS_ENABLED, 'SITE_NAME': settings.SITE_NAME, } for key in settings.ICEKIT_CONTEXT_PROCESSOR_SETTINGS: context[key] = getattr(settings, key, None) return context
def function[environment, parameter[request]]: constant[ Return ``COMPRESS_ENABLED``, ``SITE_NAME``, and any settings listed in ``ICEKIT_CONTEXT_PROCESSOR_SETTINGS`` as context. ] variable[context] assign[=] dictionary[[<ast.Constant object at 0x7da204564490>, <ast.Constant object at 0x7da204565450>], [<ast.Attribute object at 0x7da204564a30>, <ast.Attribute object at 0x7da204567070>]] for taget[name[key]] in starred[name[settings].ICEKIT_CONTEXT_PROCESSOR_SETTINGS] begin[:] call[name[context]][name[key]] assign[=] call[name[getattr], parameter[name[settings], name[key], constant[None]]] return[name[context]]
keyword[def] identifier[environment] ( identifier[request] = keyword[None] ): literal[string] identifier[context] ={ literal[string] : identifier[settings] . identifier[COMPRESS_ENABLED] , literal[string] : identifier[settings] . identifier[SITE_NAME] , } keyword[for] identifier[key] keyword[in] identifier[settings] . identifier[ICEKIT_CONTEXT_PROCESSOR_SETTINGS] : identifier[context] [ identifier[key] ]= identifier[getattr] ( identifier[settings] , identifier[key] , keyword[None] ) keyword[return] identifier[context]
def environment(request=None): """ Return ``COMPRESS_ENABLED``, ``SITE_NAME``, and any settings listed in ``ICEKIT_CONTEXT_PROCESSOR_SETTINGS`` as context. """ context = {'COMPRESS_ENABLED': settings.COMPRESS_ENABLED, 'SITE_NAME': settings.SITE_NAME} for key in settings.ICEKIT_CONTEXT_PROCESSOR_SETTINGS: context[key] = getattr(settings, key, None) # depends on [control=['for'], data=['key']] return context
def visit_subscript(self, node): """ Look for indexing exceptions. """ try: for inferred in node.value.infer(): if not isinstance(inferred, astroid.Instance): continue if utils.inherit_from_std_ex(inferred): self.add_message("indexing-exception", node=node) except astroid.InferenceError: return
def function[visit_subscript, parameter[self, node]]: constant[ Look for indexing exceptions. ] <ast.Try object at 0x7da1b0243b20>
keyword[def] identifier[visit_subscript] ( identifier[self] , identifier[node] ): literal[string] keyword[try] : keyword[for] identifier[inferred] keyword[in] identifier[node] . identifier[value] . identifier[infer] (): keyword[if] keyword[not] identifier[isinstance] ( identifier[inferred] , identifier[astroid] . identifier[Instance] ): keyword[continue] keyword[if] identifier[utils] . identifier[inherit_from_std_ex] ( identifier[inferred] ): identifier[self] . identifier[add_message] ( literal[string] , identifier[node] = identifier[node] ) keyword[except] identifier[astroid] . identifier[InferenceError] : keyword[return]
def visit_subscript(self, node): """ Look for indexing exceptions. """ try: for inferred in node.value.infer(): if not isinstance(inferred, astroid.Instance): continue # depends on [control=['if'], data=[]] if utils.inherit_from_std_ex(inferred): self.add_message('indexing-exception', node=node) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['inferred']] # depends on [control=['try'], data=[]] except astroid.InferenceError: return # depends on [control=['except'], data=[]]
def compute_mel_filterbank_features( waveforms, sample_rate=16000, dither=1.0 / np.iinfo(np.int16).max, preemphasis=0.97, frame_length=25, frame_step=10, fft_length=None, window_fn=functools.partial(tf.contrib.signal.hann_window, periodic=True), lower_edge_hertz=80.0, upper_edge_hertz=7600.0, num_mel_bins=80, log_noise_floor=1e-3, apply_mask=True): """Implement mel-filterbank extraction using tf ops. Args: waveforms: float32 tensor with shape [batch_size, max_len] sample_rate: sampling rate of the waveform dither: stddev of Gaussian noise added to waveform to prevent quantization artefacts preemphasis: waveform high-pass filtering constant frame_length: frame length in ms frame_step: frame_Step in ms fft_length: number of fft bins window_fn: windowing function lower_edge_hertz: lowest frequency of the filterbank upper_edge_hertz: highest frequency of the filterbank num_mel_bins: filterbank size log_noise_floor: clip small values to prevent numeric overflow in log apply_mask: When working on a batch of samples, set padding frames to zero Returns: filterbanks: a float32 tensor with shape [batch_size, len, num_bins, 1] """ # `stfts` is a complex64 Tensor representing the short-time Fourier # Transform of each signal in `signals`. Its shape is # [batch_size, ?, fft_unique_bins] # where fft_unique_bins = fft_length // 2 + 1 # Find the wave length: the largest index for which the value is !=0 # note that waveforms samples that are exactly 0.0 are quite common, so # simply doing sum(waveforms != 0, axis=-1) will not work correctly. wav_lens = tf.reduce_max( tf.expand_dims(tf.range(tf.shape(waveforms)[1]), 0) * tf.to_int32(tf.not_equal(waveforms, 0.0)), axis=-1) + 1 if dither > 0: waveforms += tf.random_normal(tf.shape(waveforms), stddev=dither) if preemphasis > 0: waveforms = waveforms[:, 1:] - preemphasis * waveforms[:, :-1] wav_lens -= 1 frame_length = int(frame_length * sample_rate / 1e3) frame_step = int(frame_step * sample_rate / 1e3) if fft_length is None: fft_length = int(2**(np.ceil(np.log2(frame_length)))) stfts = tf.contrib.signal.stft( waveforms, frame_length=frame_length, frame_step=frame_step, fft_length=fft_length, window_fn=window_fn, pad_end=True) stft_lens = (wav_lens + (frame_step - 1)) // frame_step masks = tf.to_float(tf.less_equal( tf.expand_dims(tf.range(tf.shape(stfts)[1]), 0), tf.expand_dims(stft_lens, 1))) # An energy spectrogram is the magnitude of the complex-valued STFT. # A float32 Tensor of shape [batch_size, ?, 257]. magnitude_spectrograms = tf.abs(stfts) # Warp the linear-scale, magnitude spectrograms into the mel-scale. num_spectrogram_bins = magnitude_spectrograms.shape[-1].value linear_to_mel_weight_matrix = ( tf.contrib.signal.linear_to_mel_weight_matrix( num_mel_bins, num_spectrogram_bins, sample_rate, lower_edge_hertz, upper_edge_hertz)) mel_spectrograms = tf.tensordot( magnitude_spectrograms, linear_to_mel_weight_matrix, 1) # Note: Shape inference for tensordot does not currently handle this case. mel_spectrograms.set_shape(magnitude_spectrograms.shape[:-1].concatenate( linear_to_mel_weight_matrix.shape[-1:])) log_mel_sgram = tf.log(tf.maximum(log_noise_floor, mel_spectrograms)) if apply_mask: log_mel_sgram *= tf.expand_dims(tf.to_float(masks), -1) return tf.expand_dims(log_mel_sgram, -1, name="mel_sgrams")
def function[compute_mel_filterbank_features, parameter[waveforms, sample_rate, dither, preemphasis, frame_length, frame_step, fft_length, window_fn, lower_edge_hertz, upper_edge_hertz, num_mel_bins, log_noise_floor, apply_mask]]: constant[Implement mel-filterbank extraction using tf ops. Args: waveforms: float32 tensor with shape [batch_size, max_len] sample_rate: sampling rate of the waveform dither: stddev of Gaussian noise added to waveform to prevent quantization artefacts preemphasis: waveform high-pass filtering constant frame_length: frame length in ms frame_step: frame_Step in ms fft_length: number of fft bins window_fn: windowing function lower_edge_hertz: lowest frequency of the filterbank upper_edge_hertz: highest frequency of the filterbank num_mel_bins: filterbank size log_noise_floor: clip small values to prevent numeric overflow in log apply_mask: When working on a batch of samples, set padding frames to zero Returns: filterbanks: a float32 tensor with shape [batch_size, len, num_bins, 1] ] variable[wav_lens] assign[=] binary_operation[call[name[tf].reduce_max, parameter[binary_operation[call[name[tf].expand_dims, parameter[call[name[tf].range, parameter[call[call[name[tf].shape, parameter[name[waveforms]]]][constant[1]]]], constant[0]]] * call[name[tf].to_int32, parameter[call[name[tf].not_equal, parameter[name[waveforms], constant[0.0]]]]]]]] + constant[1]] if compare[name[dither] greater[>] constant[0]] begin[:] <ast.AugAssign object at 0x7da1b1ff1720> if compare[name[preemphasis] greater[>] constant[0]] begin[:] variable[waveforms] assign[=] binary_operation[call[name[waveforms]][tuple[[<ast.Slice object at 0x7da1b1ff1fc0>, <ast.Slice object at 0x7da1b1ff0e50>]]] - binary_operation[name[preemphasis] * call[name[waveforms]][tuple[[<ast.Slice object at 0x7da1b1ff11e0>, <ast.Slice object at 0x7da1b1ff1630>]]]]] <ast.AugAssign object at 0x7da1b1ff0760> variable[frame_length] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[frame_length] * name[sample_rate]] / constant[1000.0]]]] variable[frame_step] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[frame_step] * name[sample_rate]] / constant[1000.0]]]] if compare[name[fft_length] is constant[None]] begin[:] variable[fft_length] assign[=] call[name[int], parameter[binary_operation[constant[2] ** call[name[np].ceil, parameter[call[name[np].log2, parameter[name[frame_length]]]]]]]] variable[stfts] assign[=] call[name[tf].contrib.signal.stft, parameter[name[waveforms]]] variable[stft_lens] assign[=] binary_operation[binary_operation[name[wav_lens] + binary_operation[name[frame_step] - constant[1]]] <ast.FloorDiv object at 0x7da2590d6bc0> name[frame_step]] variable[masks] assign[=] call[name[tf].to_float, parameter[call[name[tf].less_equal, parameter[call[name[tf].expand_dims, parameter[call[name[tf].range, parameter[call[call[name[tf].shape, parameter[name[stfts]]]][constant[1]]]], constant[0]]], call[name[tf].expand_dims, parameter[name[stft_lens], constant[1]]]]]]] variable[magnitude_spectrograms] assign[=] call[name[tf].abs, parameter[name[stfts]]] variable[num_spectrogram_bins] assign[=] call[name[magnitude_spectrograms].shape][<ast.UnaryOp object at 0x7da1b1ff1570>].value variable[linear_to_mel_weight_matrix] assign[=] call[name[tf].contrib.signal.linear_to_mel_weight_matrix, parameter[name[num_mel_bins], name[num_spectrogram_bins], name[sample_rate], name[lower_edge_hertz], name[upper_edge_hertz]]] variable[mel_spectrograms] assign[=] call[name[tf].tensordot, parameter[name[magnitude_spectrograms], name[linear_to_mel_weight_matrix], constant[1]]] call[name[mel_spectrograms].set_shape, parameter[call[call[name[magnitude_spectrograms].shape][<ast.Slice object at 0x7da1b1ff2380>].concatenate, parameter[call[name[linear_to_mel_weight_matrix].shape][<ast.Slice object at 0x7da1b1ff1060>]]]]] variable[log_mel_sgram] assign[=] call[name[tf].log, parameter[call[name[tf].maximum, parameter[name[log_noise_floor], name[mel_spectrograms]]]]] if name[apply_mask] begin[:] <ast.AugAssign object at 0x7da1b1ff2320> return[call[name[tf].expand_dims, parameter[name[log_mel_sgram], <ast.UnaryOp object at 0x7da1b1ff1150>]]]
keyword[def] identifier[compute_mel_filterbank_features] ( identifier[waveforms] , identifier[sample_rate] = literal[int] , identifier[dither] = literal[int] / identifier[np] . identifier[iinfo] ( identifier[np] . identifier[int16] ). identifier[max] , identifier[preemphasis] = literal[int] , identifier[frame_length] = literal[int] , identifier[frame_step] = literal[int] , identifier[fft_length] = keyword[None] , identifier[window_fn] = identifier[functools] . identifier[partial] ( identifier[tf] . identifier[contrib] . identifier[signal] . identifier[hann_window] , identifier[periodic] = keyword[True] ), identifier[lower_edge_hertz] = literal[int] , identifier[upper_edge_hertz] = literal[int] , identifier[num_mel_bins] = literal[int] , identifier[log_noise_floor] = literal[int] , identifier[apply_mask] = keyword[True] ): literal[string] identifier[wav_lens] = identifier[tf] . identifier[reduce_max] ( identifier[tf] . identifier[expand_dims] ( identifier[tf] . identifier[range] ( identifier[tf] . identifier[shape] ( identifier[waveforms] )[ literal[int] ]), literal[int] )* identifier[tf] . identifier[to_int32] ( identifier[tf] . identifier[not_equal] ( identifier[waveforms] , literal[int] )), identifier[axis] =- literal[int] )+ literal[int] keyword[if] identifier[dither] > literal[int] : identifier[waveforms] += identifier[tf] . identifier[random_normal] ( identifier[tf] . identifier[shape] ( identifier[waveforms] ), identifier[stddev] = identifier[dither] ) keyword[if] identifier[preemphasis] > literal[int] : identifier[waveforms] = identifier[waveforms] [:, literal[int] :]- identifier[preemphasis] * identifier[waveforms] [:,:- literal[int] ] identifier[wav_lens] -= literal[int] identifier[frame_length] = identifier[int] ( identifier[frame_length] * identifier[sample_rate] / literal[int] ) identifier[frame_step] = identifier[int] ( identifier[frame_step] * identifier[sample_rate] / literal[int] ) keyword[if] identifier[fft_length] keyword[is] keyword[None] : identifier[fft_length] = identifier[int] ( literal[int] **( identifier[np] . identifier[ceil] ( identifier[np] . identifier[log2] ( identifier[frame_length] )))) identifier[stfts] = identifier[tf] . identifier[contrib] . identifier[signal] . identifier[stft] ( identifier[waveforms] , identifier[frame_length] = identifier[frame_length] , identifier[frame_step] = identifier[frame_step] , identifier[fft_length] = identifier[fft_length] , identifier[window_fn] = identifier[window_fn] , identifier[pad_end] = keyword[True] ) identifier[stft_lens] =( identifier[wav_lens] +( identifier[frame_step] - literal[int] ))// identifier[frame_step] identifier[masks] = identifier[tf] . identifier[to_float] ( identifier[tf] . identifier[less_equal] ( identifier[tf] . identifier[expand_dims] ( identifier[tf] . identifier[range] ( identifier[tf] . identifier[shape] ( identifier[stfts] )[ literal[int] ]), literal[int] ), identifier[tf] . identifier[expand_dims] ( identifier[stft_lens] , literal[int] ))) identifier[magnitude_spectrograms] = identifier[tf] . identifier[abs] ( identifier[stfts] ) identifier[num_spectrogram_bins] = identifier[magnitude_spectrograms] . identifier[shape] [- literal[int] ]. identifier[value] identifier[linear_to_mel_weight_matrix] =( identifier[tf] . identifier[contrib] . identifier[signal] . identifier[linear_to_mel_weight_matrix] ( identifier[num_mel_bins] , identifier[num_spectrogram_bins] , identifier[sample_rate] , identifier[lower_edge_hertz] , identifier[upper_edge_hertz] )) identifier[mel_spectrograms] = identifier[tf] . identifier[tensordot] ( identifier[magnitude_spectrograms] , identifier[linear_to_mel_weight_matrix] , literal[int] ) identifier[mel_spectrograms] . identifier[set_shape] ( identifier[magnitude_spectrograms] . identifier[shape] [:- literal[int] ]. identifier[concatenate] ( identifier[linear_to_mel_weight_matrix] . identifier[shape] [- literal[int] :])) identifier[log_mel_sgram] = identifier[tf] . identifier[log] ( identifier[tf] . identifier[maximum] ( identifier[log_noise_floor] , identifier[mel_spectrograms] )) keyword[if] identifier[apply_mask] : identifier[log_mel_sgram] *= identifier[tf] . identifier[expand_dims] ( identifier[tf] . identifier[to_float] ( identifier[masks] ),- literal[int] ) keyword[return] identifier[tf] . identifier[expand_dims] ( identifier[log_mel_sgram] ,- literal[int] , identifier[name] = literal[string] )
def compute_mel_filterbank_features(waveforms, sample_rate=16000, dither=1.0 / np.iinfo(np.int16).max, preemphasis=0.97, frame_length=25, frame_step=10, fft_length=None, window_fn=functools.partial(tf.contrib.signal.hann_window, periodic=True), lower_edge_hertz=80.0, upper_edge_hertz=7600.0, num_mel_bins=80, log_noise_floor=0.001, apply_mask=True): """Implement mel-filterbank extraction using tf ops. Args: waveforms: float32 tensor with shape [batch_size, max_len] sample_rate: sampling rate of the waveform dither: stddev of Gaussian noise added to waveform to prevent quantization artefacts preemphasis: waveform high-pass filtering constant frame_length: frame length in ms frame_step: frame_Step in ms fft_length: number of fft bins window_fn: windowing function lower_edge_hertz: lowest frequency of the filterbank upper_edge_hertz: highest frequency of the filterbank num_mel_bins: filterbank size log_noise_floor: clip small values to prevent numeric overflow in log apply_mask: When working on a batch of samples, set padding frames to zero Returns: filterbanks: a float32 tensor with shape [batch_size, len, num_bins, 1] """ # `stfts` is a complex64 Tensor representing the short-time Fourier # Transform of each signal in `signals`. Its shape is # [batch_size, ?, fft_unique_bins] # where fft_unique_bins = fft_length // 2 + 1 # Find the wave length: the largest index for which the value is !=0 # note that waveforms samples that are exactly 0.0 are quite common, so # simply doing sum(waveforms != 0, axis=-1) will not work correctly. wav_lens = tf.reduce_max(tf.expand_dims(tf.range(tf.shape(waveforms)[1]), 0) * tf.to_int32(tf.not_equal(waveforms, 0.0)), axis=-1) + 1 if dither > 0: waveforms += tf.random_normal(tf.shape(waveforms), stddev=dither) # depends on [control=['if'], data=['dither']] if preemphasis > 0: waveforms = waveforms[:, 1:] - preemphasis * waveforms[:, :-1] wav_lens -= 1 # depends on [control=['if'], data=['preemphasis']] frame_length = int(frame_length * sample_rate / 1000.0) frame_step = int(frame_step * sample_rate / 1000.0) if fft_length is None: fft_length = int(2 ** np.ceil(np.log2(frame_length))) # depends on [control=['if'], data=['fft_length']] stfts = tf.contrib.signal.stft(waveforms, frame_length=frame_length, frame_step=frame_step, fft_length=fft_length, window_fn=window_fn, pad_end=True) stft_lens = (wav_lens + (frame_step - 1)) // frame_step masks = tf.to_float(tf.less_equal(tf.expand_dims(tf.range(tf.shape(stfts)[1]), 0), tf.expand_dims(stft_lens, 1))) # An energy spectrogram is the magnitude of the complex-valued STFT. # A float32 Tensor of shape [batch_size, ?, 257]. magnitude_spectrograms = tf.abs(stfts) # Warp the linear-scale, magnitude spectrograms into the mel-scale. num_spectrogram_bins = magnitude_spectrograms.shape[-1].value linear_to_mel_weight_matrix = tf.contrib.signal.linear_to_mel_weight_matrix(num_mel_bins, num_spectrogram_bins, sample_rate, lower_edge_hertz, upper_edge_hertz) mel_spectrograms = tf.tensordot(magnitude_spectrograms, linear_to_mel_weight_matrix, 1) # Note: Shape inference for tensordot does not currently handle this case. mel_spectrograms.set_shape(magnitude_spectrograms.shape[:-1].concatenate(linear_to_mel_weight_matrix.shape[-1:])) log_mel_sgram = tf.log(tf.maximum(log_noise_floor, mel_spectrograms)) if apply_mask: log_mel_sgram *= tf.expand_dims(tf.to_float(masks), -1) # depends on [control=['if'], data=[]] return tf.expand_dims(log_mel_sgram, -1, name='mel_sgrams')
def add_component(self, component): ''' Adds a Component to an Entity ''' if component not in self._components: self._components.append(component) else: # Replace Component self._components[self._components.index(component)] = component
def function[add_component, parameter[self, component]]: constant[ Adds a Component to an Entity ] if compare[name[component] <ast.NotIn object at 0x7da2590d7190> name[self]._components] begin[:] call[name[self]._components.append, parameter[name[component]]]
keyword[def] identifier[add_component] ( identifier[self] , identifier[component] ): literal[string] keyword[if] identifier[component] keyword[not] keyword[in] identifier[self] . identifier[_components] : identifier[self] . identifier[_components] . identifier[append] ( identifier[component] ) keyword[else] : identifier[self] . identifier[_components] [ identifier[self] . identifier[_components] . identifier[index] ( identifier[component] )]= identifier[component]
def add_component(self, component): """ Adds a Component to an Entity """ if component not in self._components: self._components.append(component) # depends on [control=['if'], data=['component']] else: # Replace Component self._components[self._components.index(component)] = component
def get_suggested_saxis(magmoms): """ This method returns a suggested spin axis for a set of magmoms, taking the largest magnetic moment as the reference. For calculations with collinear spins, this would give a sensible saxis for a ncl calculation. :param magmoms: list of magmoms (Magmoms, scalars or vectors) :return: np.ndarray of length 3 """ # heuristic, will pick largest magmom as reference # useful for creating collinear approximations of # e.g. slightly canted magnetic structures # for fully collinear structures, will return expected # result magmoms = [Magmom(magmom) for magmom in magmoms] # filter only non-zero magmoms magmoms = [magmom for magmom in magmoms if abs(magmom)] magmoms.sort(reverse=True) if len(magmoms) > 0: return magmoms[0].get_00t_magmom_with_xyz_saxis().saxis else: return np.array([0, 0, 1], dtype="d")
def function[get_suggested_saxis, parameter[magmoms]]: constant[ This method returns a suggested spin axis for a set of magmoms, taking the largest magnetic moment as the reference. For calculations with collinear spins, this would give a sensible saxis for a ncl calculation. :param magmoms: list of magmoms (Magmoms, scalars or vectors) :return: np.ndarray of length 3 ] variable[magmoms] assign[=] <ast.ListComp object at 0x7da2047ebfd0> variable[magmoms] assign[=] <ast.ListComp object at 0x7da2047eb880> call[name[magmoms].sort, parameter[]] if compare[call[name[len], parameter[name[magmoms]]] greater[>] constant[0]] begin[:] return[call[call[name[magmoms]][constant[0]].get_00t_magmom_with_xyz_saxis, parameter[]].saxis]
keyword[def] identifier[get_suggested_saxis] ( identifier[magmoms] ): literal[string] identifier[magmoms] =[ identifier[Magmom] ( identifier[magmom] ) keyword[for] identifier[magmom] keyword[in] identifier[magmoms] ] identifier[magmoms] =[ identifier[magmom] keyword[for] identifier[magmom] keyword[in] identifier[magmoms] keyword[if] identifier[abs] ( identifier[magmom] )] identifier[magmoms] . identifier[sort] ( identifier[reverse] = keyword[True] ) keyword[if] identifier[len] ( identifier[magmoms] )> literal[int] : keyword[return] identifier[magmoms] [ literal[int] ]. identifier[get_00t_magmom_with_xyz_saxis] (). identifier[saxis] keyword[else] : keyword[return] identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] ], identifier[dtype] = literal[string] )
def get_suggested_saxis(magmoms): """ This method returns a suggested spin axis for a set of magmoms, taking the largest magnetic moment as the reference. For calculations with collinear spins, this would give a sensible saxis for a ncl calculation. :param magmoms: list of magmoms (Magmoms, scalars or vectors) :return: np.ndarray of length 3 """ # heuristic, will pick largest magmom as reference # useful for creating collinear approximations of # e.g. slightly canted magnetic structures # for fully collinear structures, will return expected # result magmoms = [Magmom(magmom) for magmom in magmoms] # filter only non-zero magmoms magmoms = [magmom for magmom in magmoms if abs(magmom)] magmoms.sort(reverse=True) if len(magmoms) > 0: return magmoms[0].get_00t_magmom_with_xyz_saxis().saxis # depends on [control=['if'], data=[]] else: return np.array([0, 0, 1], dtype='d')
def rouge_n(evaluated_sentences, reference_sentences, n=2): """ Computes ROUGE-N of two text collections of sentences. Sourece: http://research.microsoft.com/en-us/um/people/cyl/download/ papers/rouge-working-note-v1.3.1.pdf Args: evaluated_sentences: The sentences that have been picked by the summarizer reference_sentences: The sentences from the referene set n: Size of ngram. Defaults to 2. Returns: A tuple (f1, precision, recall) for ROUGE-N Raises: ValueError: raises exception if a param has len <= 0 """ if len(evaluated_sentences) <= 0 or len(reference_sentences) <= 0: raise ValueError("Collections must contain at least 1 sentence.") evaluated_ngrams = _get_word_ngrams(n, evaluated_sentences) reference_ngrams = _get_word_ngrams(n, reference_sentences) reference_count = len(reference_ngrams) evaluated_count = len(evaluated_ngrams) # Gets the overlapping ngrams between evaluated and reference overlapping_ngrams = evaluated_ngrams.intersection(reference_ngrams) overlapping_count = len(overlapping_ngrams) return f_r_p_rouge_n(evaluated_count, reference_count, overlapping_count)
def function[rouge_n, parameter[evaluated_sentences, reference_sentences, n]]: constant[ Computes ROUGE-N of two text collections of sentences. Sourece: http://research.microsoft.com/en-us/um/people/cyl/download/ papers/rouge-working-note-v1.3.1.pdf Args: evaluated_sentences: The sentences that have been picked by the summarizer reference_sentences: The sentences from the referene set n: Size of ngram. Defaults to 2. Returns: A tuple (f1, precision, recall) for ROUGE-N Raises: ValueError: raises exception if a param has len <= 0 ] if <ast.BoolOp object at 0x7da1b024e470> begin[:] <ast.Raise object at 0x7da1b024f280> variable[evaluated_ngrams] assign[=] call[name[_get_word_ngrams], parameter[name[n], name[evaluated_sentences]]] variable[reference_ngrams] assign[=] call[name[_get_word_ngrams], parameter[name[n], name[reference_sentences]]] variable[reference_count] assign[=] call[name[len], parameter[name[reference_ngrams]]] variable[evaluated_count] assign[=] call[name[len], parameter[name[evaluated_ngrams]]] variable[overlapping_ngrams] assign[=] call[name[evaluated_ngrams].intersection, parameter[name[reference_ngrams]]] variable[overlapping_count] assign[=] call[name[len], parameter[name[overlapping_ngrams]]] return[call[name[f_r_p_rouge_n], parameter[name[evaluated_count], name[reference_count], name[overlapping_count]]]]
keyword[def] identifier[rouge_n] ( identifier[evaluated_sentences] , identifier[reference_sentences] , identifier[n] = literal[int] ): literal[string] keyword[if] identifier[len] ( identifier[evaluated_sentences] )<= literal[int] keyword[or] identifier[len] ( identifier[reference_sentences] )<= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[evaluated_ngrams] = identifier[_get_word_ngrams] ( identifier[n] , identifier[evaluated_sentences] ) identifier[reference_ngrams] = identifier[_get_word_ngrams] ( identifier[n] , identifier[reference_sentences] ) identifier[reference_count] = identifier[len] ( identifier[reference_ngrams] ) identifier[evaluated_count] = identifier[len] ( identifier[evaluated_ngrams] ) identifier[overlapping_ngrams] = identifier[evaluated_ngrams] . identifier[intersection] ( identifier[reference_ngrams] ) identifier[overlapping_count] = identifier[len] ( identifier[overlapping_ngrams] ) keyword[return] identifier[f_r_p_rouge_n] ( identifier[evaluated_count] , identifier[reference_count] , identifier[overlapping_count] )
def rouge_n(evaluated_sentences, reference_sentences, n=2): """ Computes ROUGE-N of two text collections of sentences. Sourece: http://research.microsoft.com/en-us/um/people/cyl/download/ papers/rouge-working-note-v1.3.1.pdf Args: evaluated_sentences: The sentences that have been picked by the summarizer reference_sentences: The sentences from the referene set n: Size of ngram. Defaults to 2. Returns: A tuple (f1, precision, recall) for ROUGE-N Raises: ValueError: raises exception if a param has len <= 0 """ if len(evaluated_sentences) <= 0 or len(reference_sentences) <= 0: raise ValueError('Collections must contain at least 1 sentence.') # depends on [control=['if'], data=[]] evaluated_ngrams = _get_word_ngrams(n, evaluated_sentences) reference_ngrams = _get_word_ngrams(n, reference_sentences) reference_count = len(reference_ngrams) evaluated_count = len(evaluated_ngrams) # Gets the overlapping ngrams between evaluated and reference overlapping_ngrams = evaluated_ngrams.intersection(reference_ngrams) overlapping_count = len(overlapping_ngrams) return f_r_p_rouge_n(evaluated_count, reference_count, overlapping_count)
def _init_metadata(self): """stub""" self._start_timestamp_metadata = { 'element_id': Id(self.my_osid_object_form._authority, self.my_osid_object_form._namespace, 'start_timestamp'), 'element_label': 'start timestamp', 'instructions': 'enter an integer number of seconds for the start time', 'required': False, 'read_only': False, 'linked': False, 'array': False, 'syntax': 'INTEGER', 'minimum_integer': 0, 'maximum_integer': None, 'integer_set': [], 'default_integer_values': [0] } self._end_timestamp_metadata = { 'element_id': Id(self.my_osid_object_form._authority, self.my_osid_object_form._namespace, 'end_timestamp'), 'element_label': 'end timestamp', 'instructions': 'enter an integer number of seconds for the end time', 'required': False, 'read_only': False, 'linked': False, 'array': False, 'syntax': 'INTEGER', 'minimum_integer': 0, 'maximum_integer': None, 'integer_set': [], 'default_integer_values': [0] }
def function[_init_metadata, parameter[self]]: constant[stub] name[self]._start_timestamp_metadata assign[=] dictionary[[<ast.Constant object at 0x7da1b0a739d0>, <ast.Constant object at 0x7da1b0a71bd0>, <ast.Constant object at 0x7da1b0a71ab0>, <ast.Constant object at 0x7da1b0a73dc0>, <ast.Constant object at 0x7da1b0a70670>, <ast.Constant object at 0x7da1b0a73f40>, <ast.Constant object at 0x7da1b0a72950>, <ast.Constant object at 0x7da1b0a70040>, <ast.Constant object at 0x7da1b0a71e10>, <ast.Constant object at 0x7da1b0a73d00>, <ast.Constant object at 0x7da1b0a732e0>, <ast.Constant object at 0x7da1b0a70520>], [<ast.Call object at 0x7da1b0a73910>, <ast.Constant object at 0x7da1b0a700d0>, <ast.Constant object at 0x7da1b0a73ee0>, <ast.Constant object at 0x7da1b0a70ac0>, <ast.Constant object at 0x7da1b0a72440>, <ast.Constant object at 0x7da1b0a71630>, <ast.Constant object at 0x7da1b0a71e40>, <ast.Constant object at 0x7da1b0a72710>, <ast.Constant object at 0x7da1b0a71540>, <ast.Constant object at 0x7da1b0a73be0>, <ast.List object at 0x7da1b0a727a0>, <ast.List object at 0x7da1b0a702b0>]] name[self]._end_timestamp_metadata assign[=] dictionary[[<ast.Constant object at 0x7da1b0a72b30>, <ast.Constant object at 0x7da1b0a72f80>, <ast.Constant object at 0x7da1b0a733a0>, <ast.Constant object at 0x7da1b0a73b20>, <ast.Constant object at 0x7da1b0a73790>, <ast.Constant object at 0x7da1b0a72ec0>, <ast.Constant object at 0x7da1b0a73b80>, <ast.Constant object at 0x7da1b0a71c30>, <ast.Constant object at 0x7da1b0a73f70>, <ast.Constant object at 0x7da1b0a71d80>, <ast.Constant object at 0x7da1b0a72620>, <ast.Constant object at 0x7da1b0a73970>], [<ast.Call object at 0x7da1b0a70910>, <ast.Constant object at 0x7da1b0a70b50>, <ast.Constant object at 0x7da1b0a70dc0>, <ast.Constant object at 0x7da1b0a703d0>, <ast.Constant object at 0x7da1b09168f0>, <ast.Constant object at 0x7da1b0917bb0>, <ast.Constant object at 0x7da1b0917fa0>, <ast.Constant object at 0x7da1b0917070>, <ast.Constant object at 0x7da1b0917160>, <ast.Constant object at 0x7da1b09148e0>, <ast.List object at 0x7da1b09158d0>, <ast.List object at 0x7da1b0916e60>]]
keyword[def] identifier[_init_metadata] ( identifier[self] ): literal[string] identifier[self] . identifier[_start_timestamp_metadata] ={ literal[string] : identifier[Id] ( identifier[self] . identifier[my_osid_object_form] . identifier[_authority] , identifier[self] . identifier[my_osid_object_form] . identifier[_namespace] , literal[string] ), literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[False] , literal[string] : keyword[False] , literal[string] : keyword[False] , literal[string] : keyword[False] , literal[string] : literal[string] , literal[string] : literal[int] , literal[string] : keyword[None] , literal[string] :[], literal[string] :[ literal[int] ] } identifier[self] . identifier[_end_timestamp_metadata] ={ literal[string] : identifier[Id] ( identifier[self] . identifier[my_osid_object_form] . identifier[_authority] , identifier[self] . identifier[my_osid_object_form] . identifier[_namespace] , literal[string] ), literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[False] , literal[string] : keyword[False] , literal[string] : keyword[False] , literal[string] : keyword[False] , literal[string] : literal[string] , literal[string] : literal[int] , literal[string] : keyword[None] , literal[string] :[], literal[string] :[ literal[int] ] }
def _init_metadata(self): """stub""" self._start_timestamp_metadata = {'element_id': Id(self.my_osid_object_form._authority, self.my_osid_object_form._namespace, 'start_timestamp'), 'element_label': 'start timestamp', 'instructions': 'enter an integer number of seconds for the start time', 'required': False, 'read_only': False, 'linked': False, 'array': False, 'syntax': 'INTEGER', 'minimum_integer': 0, 'maximum_integer': None, 'integer_set': [], 'default_integer_values': [0]} self._end_timestamp_metadata = {'element_id': Id(self.my_osid_object_form._authority, self.my_osid_object_form._namespace, 'end_timestamp'), 'element_label': 'end timestamp', 'instructions': 'enter an integer number of seconds for the end time', 'required': False, 'read_only': False, 'linked': False, 'array': False, 'syntax': 'INTEGER', 'minimum_integer': 0, 'maximum_integer': None, 'integer_set': [], 'default_integer_values': [0]}
def get_device(self, device_id, refresh=False): """Get a single device.""" if self._devices is None: self.get_devices() refresh = False device = self._devices.get(device_id) if device and refresh: device.refresh() return device
def function[get_device, parameter[self, device_id, refresh]]: constant[Get a single device.] if compare[name[self]._devices is constant[None]] begin[:] call[name[self].get_devices, parameter[]] variable[refresh] assign[=] constant[False] variable[device] assign[=] call[name[self]._devices.get, parameter[name[device_id]]] if <ast.BoolOp object at 0x7da1b0cbafe0> begin[:] call[name[device].refresh, parameter[]] return[name[device]]
keyword[def] identifier[get_device] ( identifier[self] , identifier[device_id] , identifier[refresh] = keyword[False] ): literal[string] keyword[if] identifier[self] . identifier[_devices] keyword[is] keyword[None] : identifier[self] . identifier[get_devices] () identifier[refresh] = keyword[False] identifier[device] = identifier[self] . identifier[_devices] . identifier[get] ( identifier[device_id] ) keyword[if] identifier[device] keyword[and] identifier[refresh] : identifier[device] . identifier[refresh] () keyword[return] identifier[device]
def get_device(self, device_id, refresh=False): """Get a single device.""" if self._devices is None: self.get_devices() refresh = False # depends on [control=['if'], data=[]] device = self._devices.get(device_id) if device and refresh: device.refresh() # depends on [control=['if'], data=[]] return device
def delete_model(self, model): """Delete a record.""" try: if model.json is None: return True record = Record(model.json, model=model) record.delete() db.session.commit() except SQLAlchemyError as e: if not self.handle_view_exception(e): flash(_('Failed to delete record. %(error)s', error=str(e)), category='error') db.session.rollback() return False return True
def function[delete_model, parameter[self, model]]: constant[Delete a record.] <ast.Try object at 0x7da1b0c8a380> return[constant[True]]
keyword[def] identifier[delete_model] ( identifier[self] , identifier[model] ): literal[string] keyword[try] : keyword[if] identifier[model] . identifier[json] keyword[is] keyword[None] : keyword[return] keyword[True] identifier[record] = identifier[Record] ( identifier[model] . identifier[json] , identifier[model] = identifier[model] ) identifier[record] . identifier[delete] () identifier[db] . identifier[session] . identifier[commit] () keyword[except] identifier[SQLAlchemyError] keyword[as] identifier[e] : keyword[if] keyword[not] identifier[self] . identifier[handle_view_exception] ( identifier[e] ): identifier[flash] ( identifier[_] ( literal[string] , identifier[error] = identifier[str] ( identifier[e] )), identifier[category] = literal[string] ) identifier[db] . identifier[session] . identifier[rollback] () keyword[return] keyword[False] keyword[return] keyword[True]
def delete_model(self, model): """Delete a record.""" try: if model.json is None: return True # depends on [control=['if'], data=[]] record = Record(model.json, model=model) record.delete() db.session.commit() # depends on [control=['try'], data=[]] except SQLAlchemyError as e: if not self.handle_view_exception(e): flash(_('Failed to delete record. %(error)s', error=str(e)), category='error') # depends on [control=['if'], data=[]] db.session.rollback() return False # depends on [control=['except'], data=['e']] return True
def points_close(a,b): ''' points_close(a,b) yields True if points a and b are close to each other and False otherwise. ''' (a,b) = [np.asarray(u) for u in (a,b)] if len(a.shape) == 2 or len(b.shape) == 2: (a,b) = [np.reshape(u,(len(u),-1)) for u in (a,b)] return np.isclose(np.sqrt(np.sum((a - b)**2, axis=0)), 0)
def function[points_close, parameter[a, b]]: constant[ points_close(a,b) yields True if points a and b are close to each other and False otherwise. ] <ast.Tuple object at 0x7da204347910> assign[=] <ast.ListComp object at 0x7da204344190> if <ast.BoolOp object at 0x7da204347d90> begin[:] <ast.Tuple object at 0x7da204346e30> assign[=] <ast.ListComp object at 0x7da204347e50> return[call[name[np].isclose, parameter[call[name[np].sqrt, parameter[call[name[np].sum, parameter[binary_operation[binary_operation[name[a] - name[b]] ** constant[2]]]]]], constant[0]]]]
keyword[def] identifier[points_close] ( identifier[a] , identifier[b] ): literal[string] ( identifier[a] , identifier[b] )=[ identifier[np] . identifier[asarray] ( identifier[u] ) keyword[for] identifier[u] keyword[in] ( identifier[a] , identifier[b] )] keyword[if] identifier[len] ( identifier[a] . identifier[shape] )== literal[int] keyword[or] identifier[len] ( identifier[b] . identifier[shape] )== literal[int] :( identifier[a] , identifier[b] )=[ identifier[np] . identifier[reshape] ( identifier[u] ,( identifier[len] ( identifier[u] ),- literal[int] )) keyword[for] identifier[u] keyword[in] ( identifier[a] , identifier[b] )] keyword[return] identifier[np] . identifier[isclose] ( identifier[np] . identifier[sqrt] ( identifier[np] . identifier[sum] (( identifier[a] - identifier[b] )** literal[int] , identifier[axis] = literal[int] )), literal[int] )
def points_close(a, b): """ points_close(a,b) yields True if points a and b are close to each other and False otherwise. """ (a, b) = [np.asarray(u) for u in (a, b)] if len(a.shape) == 2 or len(b.shape) == 2: (a, b) = [np.reshape(u, (len(u), -1)) for u in (a, b)] # depends on [control=['if'], data=[]] return np.isclose(np.sqrt(np.sum((a - b) ** 2, axis=0)), 0)
def load_code(self): """ Returns a Python code object like xdis.unmarshal.load_code(), but in we decrypt the data in self.bufstr. That is: * calculate the TEA key, * decrypt self.bufstr * create and return a Python code-object """ a = self.load_int() b = self.load_int() key = get_keys(a, b) padsize = (b + 15) & ~0xf intsize = padsize/4 data = self.bufstr[self.bufpos:self.bufpos+padsize] # print("%d: %d (%d=%d)" % (self.bufpos, b, padsize, len(data))) data = list(struct.unpack('<%dL' % intsize, data)) tea_decipher(data, key) self.bufpos += padsize obj = xmarshal._FastUnmarshaller(struct.pack('<%dL' % intsize, *data)) code = obj.load_code() co_code = patch(code.co_code) if PYTHON3: return Code2Compat(code.co_argcount, code.co_nlocals, code.co_stacksize, code.co_flags, co_code, code.co_consts, code.co_names, code.co_varnames, code.co_filename, code.co_name, code.co_firstlineno, code.co_lnotab, code.co_freevars, code.co_cellvars) else: return types.CodeType(code.co_argcount, code.co_nlocals, code.co_stacksize, code.co_flags, co_code, code.co_consts, code.co_names, code.co_varnames, code.co_filename, code.co_name, code.co_firstlineno, code.co_lnotab, code.co_freevars, code.co_cellvars)
def function[load_code, parameter[self]]: constant[ Returns a Python code object like xdis.unmarshal.load_code(), but in we decrypt the data in self.bufstr. That is: * calculate the TEA key, * decrypt self.bufstr * create and return a Python code-object ] variable[a] assign[=] call[name[self].load_int, parameter[]] variable[b] assign[=] call[name[self].load_int, parameter[]] variable[key] assign[=] call[name[get_keys], parameter[name[a], name[b]]] variable[padsize] assign[=] binary_operation[binary_operation[name[b] + constant[15]] <ast.BitAnd object at 0x7da2590d6b60> <ast.UnaryOp object at 0x7da1b0547a60>] variable[intsize] assign[=] binary_operation[name[padsize] / constant[4]] variable[data] assign[=] call[name[self].bufstr][<ast.Slice object at 0x7da1b0547820>] variable[data] assign[=] call[name[list], parameter[call[name[struct].unpack, parameter[binary_operation[constant[<%dL] <ast.Mod object at 0x7da2590d6920> name[intsize]], name[data]]]]] call[name[tea_decipher], parameter[name[data], name[key]]] <ast.AugAssign object at 0x7da1b0546140> variable[obj] assign[=] call[name[xmarshal]._FastUnmarshaller, parameter[call[name[struct].pack, parameter[binary_operation[constant[<%dL] <ast.Mod object at 0x7da2590d6920> name[intsize]], <ast.Starred object at 0x7da1b05441f0>]]]] variable[code] assign[=] call[name[obj].load_code, parameter[]] variable[co_code] assign[=] call[name[patch], parameter[name[code].co_code]] if name[PYTHON3] begin[:] return[call[name[Code2Compat], parameter[name[code].co_argcount, name[code].co_nlocals, name[code].co_stacksize, name[code].co_flags, name[co_code], name[code].co_consts, name[code].co_names, name[code].co_varnames, name[code].co_filename, name[code].co_name, name[code].co_firstlineno, name[code].co_lnotab, name[code].co_freevars, name[code].co_cellvars]]]
keyword[def] identifier[load_code] ( identifier[self] ): literal[string] identifier[a] = identifier[self] . identifier[load_int] () identifier[b] = identifier[self] . identifier[load_int] () identifier[key] = identifier[get_keys] ( identifier[a] , identifier[b] ) identifier[padsize] =( identifier[b] + literal[int] )&~ literal[int] identifier[intsize] = identifier[padsize] / literal[int] identifier[data] = identifier[self] . identifier[bufstr] [ identifier[self] . identifier[bufpos] : identifier[self] . identifier[bufpos] + identifier[padsize] ] identifier[data] = identifier[list] ( identifier[struct] . identifier[unpack] ( literal[string] % identifier[intsize] , identifier[data] )) identifier[tea_decipher] ( identifier[data] , identifier[key] ) identifier[self] . identifier[bufpos] += identifier[padsize] identifier[obj] = identifier[xmarshal] . identifier[_FastUnmarshaller] ( identifier[struct] . identifier[pack] ( literal[string] % identifier[intsize] ,* identifier[data] )) identifier[code] = identifier[obj] . identifier[load_code] () identifier[co_code] = identifier[patch] ( identifier[code] . identifier[co_code] ) keyword[if] identifier[PYTHON3] : keyword[return] identifier[Code2Compat] ( identifier[code] . identifier[co_argcount] , identifier[code] . identifier[co_nlocals] , identifier[code] . identifier[co_stacksize] , identifier[code] . identifier[co_flags] , identifier[co_code] , identifier[code] . identifier[co_consts] , identifier[code] . identifier[co_names] , identifier[code] . identifier[co_varnames] , identifier[code] . identifier[co_filename] , identifier[code] . identifier[co_name] , identifier[code] . identifier[co_firstlineno] , identifier[code] . identifier[co_lnotab] , identifier[code] . identifier[co_freevars] , identifier[code] . identifier[co_cellvars] ) keyword[else] : keyword[return] identifier[types] . identifier[CodeType] ( identifier[code] . identifier[co_argcount] , identifier[code] . identifier[co_nlocals] , identifier[code] . identifier[co_stacksize] , identifier[code] . identifier[co_flags] , identifier[co_code] , identifier[code] . identifier[co_consts] , identifier[code] . identifier[co_names] , identifier[code] . identifier[co_varnames] , identifier[code] . identifier[co_filename] , identifier[code] . identifier[co_name] , identifier[code] . identifier[co_firstlineno] , identifier[code] . identifier[co_lnotab] , identifier[code] . identifier[co_freevars] , identifier[code] . identifier[co_cellvars] )
def load_code(self): """ Returns a Python code object like xdis.unmarshal.load_code(), but in we decrypt the data in self.bufstr. That is: * calculate the TEA key, * decrypt self.bufstr * create and return a Python code-object """ a = self.load_int() b = self.load_int() key = get_keys(a, b) padsize = b + 15 & ~15 intsize = padsize / 4 data = self.bufstr[self.bufpos:self.bufpos + padsize] # print("%d: %d (%d=%d)" % (self.bufpos, b, padsize, len(data))) data = list(struct.unpack('<%dL' % intsize, data)) tea_decipher(data, key) self.bufpos += padsize obj = xmarshal._FastUnmarshaller(struct.pack('<%dL' % intsize, *data)) code = obj.load_code() co_code = patch(code.co_code) if PYTHON3: return Code2Compat(code.co_argcount, code.co_nlocals, code.co_stacksize, code.co_flags, co_code, code.co_consts, code.co_names, code.co_varnames, code.co_filename, code.co_name, code.co_firstlineno, code.co_lnotab, code.co_freevars, code.co_cellvars) # depends on [control=['if'], data=[]] else: return types.CodeType(code.co_argcount, code.co_nlocals, code.co_stacksize, code.co_flags, co_code, code.co_consts, code.co_names, code.co_varnames, code.co_filename, code.co_name, code.co_firstlineno, code.co_lnotab, code.co_freevars, code.co_cellvars)
def ut12frame(treq, ind, ut1_unix): """ Given treq, output index(ces) to extract via rawDMCreader treq: scalar or vector of ut1_unix time (seconds since Jan 1, 1970) ind: zero-based frame index corresponding to ut1_unix, corresponding to input data file. """ if treq is None: # have to do this since interp1 will return last index otherwise return treq = atleast_1d(treq) # %% handle human specified string scalar case if treq.size == 1: treq = datetime2unix(treq[0]) # %% handle time range case elif treq.size == 2: tstartreq = datetime2unix(treq[0]) tendreq = datetime2unix(treq[1]) treq = ut1_unix[(ut1_unix > tstartreq) & (ut1_unix < tendreq)] else: # otherwise, it's a vector of requested values treq = datetime2unix(treq) # %% get indices """ We use nearest neighbor interpolation to pick a frame index for each requested time. """ f = interp1d(ut1_unix, ind, kind='nearest', bounds_error=True, assume_sorted=True) # it won't output nan for int case in Numpy 1.10 and other versions too framereq = f(treq).astype(int64) framereq = framereq[framereq >= 0] # discard outside time limits return framereq
def function[ut12frame, parameter[treq, ind, ut1_unix]]: constant[ Given treq, output index(ces) to extract via rawDMCreader treq: scalar or vector of ut1_unix time (seconds since Jan 1, 1970) ind: zero-based frame index corresponding to ut1_unix, corresponding to input data file. ] if compare[name[treq] is constant[None]] begin[:] return[None] variable[treq] assign[=] call[name[atleast_1d], parameter[name[treq]]] if compare[name[treq].size equal[==] constant[1]] begin[:] variable[treq] assign[=] call[name[datetime2unix], parameter[call[name[treq]][constant[0]]]] constant[ We use nearest neighbor interpolation to pick a frame index for each requested time. ] variable[f] assign[=] call[name[interp1d], parameter[name[ut1_unix], name[ind]]] variable[framereq] assign[=] call[call[name[f], parameter[name[treq]]].astype, parameter[name[int64]]] variable[framereq] assign[=] call[name[framereq]][compare[name[framereq] greater_or_equal[>=] constant[0]]] return[name[framereq]]
keyword[def] identifier[ut12frame] ( identifier[treq] , identifier[ind] , identifier[ut1_unix] ): literal[string] keyword[if] identifier[treq] keyword[is] keyword[None] : keyword[return] identifier[treq] = identifier[atleast_1d] ( identifier[treq] ) keyword[if] identifier[treq] . identifier[size] == literal[int] : identifier[treq] = identifier[datetime2unix] ( identifier[treq] [ literal[int] ]) keyword[elif] identifier[treq] . identifier[size] == literal[int] : identifier[tstartreq] = identifier[datetime2unix] ( identifier[treq] [ literal[int] ]) identifier[tendreq] = identifier[datetime2unix] ( identifier[treq] [ literal[int] ]) identifier[treq] = identifier[ut1_unix] [( identifier[ut1_unix] > identifier[tstartreq] )&( identifier[ut1_unix] < identifier[tendreq] )] keyword[else] : identifier[treq] = identifier[datetime2unix] ( identifier[treq] ) literal[string] identifier[f] = identifier[interp1d] ( identifier[ut1_unix] , identifier[ind] , identifier[kind] = literal[string] , identifier[bounds_error] = keyword[True] , identifier[assume_sorted] = keyword[True] ) identifier[framereq] = identifier[f] ( identifier[treq] ). identifier[astype] ( identifier[int64] ) identifier[framereq] = identifier[framereq] [ identifier[framereq] >= literal[int] ] keyword[return] identifier[framereq]
def ut12frame(treq, ind, ut1_unix): """ Given treq, output index(ces) to extract via rawDMCreader treq: scalar or vector of ut1_unix time (seconds since Jan 1, 1970) ind: zero-based frame index corresponding to ut1_unix, corresponding to input data file. """ if treq is None: # have to do this since interp1 will return last index otherwise return # depends on [control=['if'], data=[]] treq = atleast_1d(treq) # %% handle human specified string scalar case if treq.size == 1: treq = datetime2unix(treq[0]) # depends on [control=['if'], data=[]] # %% handle time range case elif treq.size == 2: tstartreq = datetime2unix(treq[0]) tendreq = datetime2unix(treq[1]) treq = ut1_unix[(ut1_unix > tstartreq) & (ut1_unix < tendreq)] # depends on [control=['if'], data=[]] else: # otherwise, it's a vector of requested values treq = datetime2unix(treq) # %% get indices '\n We use nearest neighbor interpolation to pick a frame index for each requested time.\n ' f = interp1d(ut1_unix, ind, kind='nearest', bounds_error=True, assume_sorted=True) # it won't output nan for int case in Numpy 1.10 and other versions too framereq = f(treq).astype(int64) framereq = framereq[framereq >= 0] # discard outside time limits return framereq
def sheet_to_table(worksheet): """Transforma una hoja de libro de Excel en una lista de diccionarios. Args: worksheet (Workbook.worksheet): Hoja de cálculo de un archivo XLSX según los lee `openpyxl` Returns: list_of_dicts: Lista de diccionarios, con tantos elementos como registros incluya la hoja, y con tantas claves por diccionario como campos tenga la hoja. """ headers = [] value_rows = [] for row_i, row in enumerate(worksheet.iter_rows()): # lee los headers y el tamaño máximo de la hoja en columnas en fila 1 if row_i == 0: for header_cell in row: if header_cell.value: headers.append(parse_value(header_cell)) else: break continue # limita la cantidad de celdas a considerar, por la cantidad de headers row_cells = [parse_value(cell) for index, cell in enumerate(row) if index < len(headers)] # agrega las filas siguientes que tengan al menos un campo no nulo if any(row_cells): value_rows.append(row_cells) # no se admiten filas vacías, eso determina el fin de la hoja else: break # convierte las filas en diccionarios con los headers como keys table = [ # Ignoro los campos con valores nulos (None) {k: v for (k, v) in zip(headers, row) if v is not None} for row in value_rows ] return table
def function[sheet_to_table, parameter[worksheet]]: constant[Transforma una hoja de libro de Excel en una lista de diccionarios. Args: worksheet (Workbook.worksheet): Hoja de cálculo de un archivo XLSX según los lee `openpyxl` Returns: list_of_dicts: Lista de diccionarios, con tantos elementos como registros incluya la hoja, y con tantas claves por diccionario como campos tenga la hoja. ] variable[headers] assign[=] list[[]] variable[value_rows] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b0443dc0>, <ast.Name object at 0x7da1b0441090>]]] in starred[call[name[enumerate], parameter[call[name[worksheet].iter_rows, parameter[]]]]] begin[:] if compare[name[row_i] equal[==] constant[0]] begin[:] for taget[name[header_cell]] in starred[name[row]] begin[:] if name[header_cell].value begin[:] call[name[headers].append, parameter[call[name[parse_value], parameter[name[header_cell]]]]] continue variable[row_cells] assign[=] <ast.ListComp object at 0x7da1b0440820> if call[name[any], parameter[name[row_cells]]] begin[:] call[name[value_rows].append, parameter[name[row_cells]]] variable[table] assign[=] <ast.ListComp object at 0x7da1b04411e0> return[name[table]]
keyword[def] identifier[sheet_to_table] ( identifier[worksheet] ): literal[string] identifier[headers] =[] identifier[value_rows] =[] keyword[for] identifier[row_i] , identifier[row] keyword[in] identifier[enumerate] ( identifier[worksheet] . identifier[iter_rows] ()): keyword[if] identifier[row_i] == literal[int] : keyword[for] identifier[header_cell] keyword[in] identifier[row] : keyword[if] identifier[header_cell] . identifier[value] : identifier[headers] . identifier[append] ( identifier[parse_value] ( identifier[header_cell] )) keyword[else] : keyword[break] keyword[continue] identifier[row_cells] =[ identifier[parse_value] ( identifier[cell] ) keyword[for] identifier[index] , identifier[cell] keyword[in] identifier[enumerate] ( identifier[row] ) keyword[if] identifier[index] < identifier[len] ( identifier[headers] )] keyword[if] identifier[any] ( identifier[row_cells] ): identifier[value_rows] . identifier[append] ( identifier[row_cells] ) keyword[else] : keyword[break] identifier[table] =[ { identifier[k] : identifier[v] keyword[for] ( identifier[k] , identifier[v] ) keyword[in] identifier[zip] ( identifier[headers] , identifier[row] ) keyword[if] identifier[v] keyword[is] keyword[not] keyword[None] } keyword[for] identifier[row] keyword[in] identifier[value_rows] ] keyword[return] identifier[table]
def sheet_to_table(worksheet): """Transforma una hoja de libro de Excel en una lista de diccionarios. Args: worksheet (Workbook.worksheet): Hoja de cálculo de un archivo XLSX según los lee `openpyxl` Returns: list_of_dicts: Lista de diccionarios, con tantos elementos como registros incluya la hoja, y con tantas claves por diccionario como campos tenga la hoja. """ headers = [] value_rows = [] for (row_i, row) in enumerate(worksheet.iter_rows()): # lee los headers y el tamaño máximo de la hoja en columnas en fila 1 if row_i == 0: for header_cell in row: if header_cell.value: headers.append(parse_value(header_cell)) # depends on [control=['if'], data=[]] else: break # depends on [control=['for'], data=['header_cell']] continue # depends on [control=['if'], data=[]] # limita la cantidad de celdas a considerar, por la cantidad de headers row_cells = [parse_value(cell) for (index, cell) in enumerate(row) if index < len(headers)] # agrega las filas siguientes que tengan al menos un campo no nulo if any(row_cells): value_rows.append(row_cells) # depends on [control=['if'], data=[]] else: # no se admiten filas vacías, eso determina el fin de la hoja break # depends on [control=['for'], data=[]] # convierte las filas en diccionarios con los headers como keys # Ignoro los campos con valores nulos (None) table = [{k: v for (k, v) in zip(headers, row) if v is not None} for row in value_rows] return table
def vars_(object=None): """ Clean all of the property starts with "_" then return result of vars(object). """ filtered_vars = {} vars_dict = vars(object) for key, value in six.iteritems(vars_dict): if key.startswith("_"): continue filtered_vars[_normalize_arg_name(key)] = value return filtered_vars
def function[vars_, parameter[object]]: constant[ Clean all of the property starts with "_" then return result of vars(object). ] variable[filtered_vars] assign[=] dictionary[[], []] variable[vars_dict] assign[=] call[name[vars], parameter[name[object]]] for taget[tuple[[<ast.Name object at 0x7da1b0a1d2d0>, <ast.Name object at 0x7da1b0a1f940>]]] in starred[call[name[six].iteritems, parameter[name[vars_dict]]]] begin[:] if call[name[key].startswith, parameter[constant[_]]] begin[:] continue call[name[filtered_vars]][call[name[_normalize_arg_name], parameter[name[key]]]] assign[=] name[value] return[name[filtered_vars]]
keyword[def] identifier[vars_] ( identifier[object] = keyword[None] ): literal[string] identifier[filtered_vars] ={} identifier[vars_dict] = identifier[vars] ( identifier[object] ) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[vars_dict] ): keyword[if] identifier[key] . identifier[startswith] ( literal[string] ): keyword[continue] identifier[filtered_vars] [ identifier[_normalize_arg_name] ( identifier[key] )]= identifier[value] keyword[return] identifier[filtered_vars]
def vars_(object=None): """ Clean all of the property starts with "_" then return result of vars(object). """ filtered_vars = {} vars_dict = vars(object) for (key, value) in six.iteritems(vars_dict): if key.startswith('_'): continue # depends on [control=['if'], data=[]] filtered_vars[_normalize_arg_name(key)] = value # depends on [control=['for'], data=[]] return filtered_vars
def save_downloaded_file(filename, save_file_at, file_stream): """ Save Downloaded File to Disk Helper Function :param save_file_at: Path of where to save the file. :param file_stream: File stream :param filename: Name to save the file. """ filename = os.path.join(save_file_at, filename) with open(filename, 'wb') as f: f.write(file_stream) f.flush()
def function[save_downloaded_file, parameter[filename, save_file_at, file_stream]]: constant[ Save Downloaded File to Disk Helper Function :param save_file_at: Path of where to save the file. :param file_stream: File stream :param filename: Name to save the file. ] variable[filename] assign[=] call[name[os].path.join, parameter[name[save_file_at], name[filename]]] with call[name[open], parameter[name[filename], constant[wb]]] begin[:] call[name[f].write, parameter[name[file_stream]]] call[name[f].flush, parameter[]]
keyword[def] identifier[save_downloaded_file] ( identifier[filename] , identifier[save_file_at] , identifier[file_stream] ): literal[string] identifier[filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[save_file_at] , identifier[filename] ) keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[file_stream] ) identifier[f] . identifier[flush] ()
def save_downloaded_file(filename, save_file_at, file_stream): """ Save Downloaded File to Disk Helper Function :param save_file_at: Path of where to save the file. :param file_stream: File stream :param filename: Name to save the file. """ filename = os.path.join(save_file_at, filename) with open(filename, 'wb') as f: f.write(file_stream) f.flush() # depends on [control=['with'], data=['f']]
def merge_visual(self, mujoco_objects): """Adds visual objects to the MJCF model.""" self.visual_obj_mjcf = [] for obj_name, obj_mjcf in mujoco_objects.items(): self.merge_asset(obj_mjcf) # Load object obj = obj_mjcf.get_visual(name=obj_name, site=False) self.visual_obj_mjcf.append(obj) self.worldbody.append(obj)
def function[merge_visual, parameter[self, mujoco_objects]]: constant[Adds visual objects to the MJCF model.] name[self].visual_obj_mjcf assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18f58cd90>, <ast.Name object at 0x7da18f58c8b0>]]] in starred[call[name[mujoco_objects].items, parameter[]]] begin[:] call[name[self].merge_asset, parameter[name[obj_mjcf]]] variable[obj] assign[=] call[name[obj_mjcf].get_visual, parameter[]] call[name[self].visual_obj_mjcf.append, parameter[name[obj]]] call[name[self].worldbody.append, parameter[name[obj]]]
keyword[def] identifier[merge_visual] ( identifier[self] , identifier[mujoco_objects] ): literal[string] identifier[self] . identifier[visual_obj_mjcf] =[] keyword[for] identifier[obj_name] , identifier[obj_mjcf] keyword[in] identifier[mujoco_objects] . identifier[items] (): identifier[self] . identifier[merge_asset] ( identifier[obj_mjcf] ) identifier[obj] = identifier[obj_mjcf] . identifier[get_visual] ( identifier[name] = identifier[obj_name] , identifier[site] = keyword[False] ) identifier[self] . identifier[visual_obj_mjcf] . identifier[append] ( identifier[obj] ) identifier[self] . identifier[worldbody] . identifier[append] ( identifier[obj] )
def merge_visual(self, mujoco_objects): """Adds visual objects to the MJCF model.""" self.visual_obj_mjcf = [] for (obj_name, obj_mjcf) in mujoco_objects.items(): self.merge_asset(obj_mjcf) # Load object obj = obj_mjcf.get_visual(name=obj_name, site=False) self.visual_obj_mjcf.append(obj) self.worldbody.append(obj) # depends on [control=['for'], data=[]]
def mode_group(self): """The mode group that the button, ring, or strip that triggered this event is considered in. The mode is a virtual grouping of functionality, usually based on some visual feedback like LEDs on the pad. See `Tablet pad modes`_ for details. Returns: ~libinput.define.TabletPadModeGroup: The mode group of the button, ring or strip that caused this event. """ hmodegroup = self._libinput.libinput_event_tablet_pad_get_mode_group( self._handle) return TabletPadModeGroup(hmodegroup, self._libinput)
def function[mode_group, parameter[self]]: constant[The mode group that the button, ring, or strip that triggered this event is considered in. The mode is a virtual grouping of functionality, usually based on some visual feedback like LEDs on the pad. See `Tablet pad modes`_ for details. Returns: ~libinput.define.TabletPadModeGroup: The mode group of the button, ring or strip that caused this event. ] variable[hmodegroup] assign[=] call[name[self]._libinput.libinput_event_tablet_pad_get_mode_group, parameter[name[self]._handle]] return[call[name[TabletPadModeGroup], parameter[name[hmodegroup], name[self]._libinput]]]
keyword[def] identifier[mode_group] ( identifier[self] ): literal[string] identifier[hmodegroup] = identifier[self] . identifier[_libinput] . identifier[libinput_event_tablet_pad_get_mode_group] ( identifier[self] . identifier[_handle] ) keyword[return] identifier[TabletPadModeGroup] ( identifier[hmodegroup] , identifier[self] . identifier[_libinput] )
def mode_group(self): """The mode group that the button, ring, or strip that triggered this event is considered in. The mode is a virtual grouping of functionality, usually based on some visual feedback like LEDs on the pad. See `Tablet pad modes`_ for details. Returns: ~libinput.define.TabletPadModeGroup: The mode group of the button, ring or strip that caused this event. """ hmodegroup = self._libinput.libinput_event_tablet_pad_get_mode_group(self._handle) return TabletPadModeGroup(hmodegroup, self._libinput)
def create_venv_with_package(packages): """Create a venv with these packages in a temp dir and yielf the env. packages should be an iterable of pip version instructio (e.g. package~=1.2.3) """ with tempfile.TemporaryDirectory() as tempdir: myenv = create(tempdir, with_pip=True) pip_call = [ myenv.env_exe, "-m", "pip", "install", ] subprocess.check_call(pip_call + ['-U', 'pip']) if packages: subprocess.check_call(pip_call + packages) yield myenv
def function[create_venv_with_package, parameter[packages]]: constant[Create a venv with these packages in a temp dir and yielf the env. packages should be an iterable of pip version instructio (e.g. package~=1.2.3) ] with call[name[tempfile].TemporaryDirectory, parameter[]] begin[:] variable[myenv] assign[=] call[name[create], parameter[name[tempdir]]] variable[pip_call] assign[=] list[[<ast.Attribute object at 0x7da2041d9510>, <ast.Constant object at 0x7da2041d80a0>, <ast.Constant object at 0x7da2041d9d20>, <ast.Constant object at 0x7da2041d9780>]] call[name[subprocess].check_call, parameter[binary_operation[name[pip_call] + list[[<ast.Constant object at 0x7da2041da4d0>, <ast.Constant object at 0x7da2041d8040>]]]]] if name[packages] begin[:] call[name[subprocess].check_call, parameter[binary_operation[name[pip_call] + name[packages]]]] <ast.Yield object at 0x7da2041d9cc0>
keyword[def] identifier[create_venv_with_package] ( identifier[packages] ): literal[string] keyword[with] identifier[tempfile] . identifier[TemporaryDirectory] () keyword[as] identifier[tempdir] : identifier[myenv] = identifier[create] ( identifier[tempdir] , identifier[with_pip] = keyword[True] ) identifier[pip_call] =[ identifier[myenv] . identifier[env_exe] , literal[string] , literal[string] , literal[string] , ] identifier[subprocess] . identifier[check_call] ( identifier[pip_call] +[ literal[string] , literal[string] ]) keyword[if] identifier[packages] : identifier[subprocess] . identifier[check_call] ( identifier[pip_call] + identifier[packages] ) keyword[yield] identifier[myenv]
def create_venv_with_package(packages): """Create a venv with these packages in a temp dir and yielf the env. packages should be an iterable of pip version instructio (e.g. package~=1.2.3) """ with tempfile.TemporaryDirectory() as tempdir: myenv = create(tempdir, with_pip=True) pip_call = [myenv.env_exe, '-m', 'pip', 'install'] subprocess.check_call(pip_call + ['-U', 'pip']) if packages: subprocess.check_call(pip_call + packages) # depends on [control=['if'], data=[]] yield myenv # depends on [control=['with'], data=['tempdir']]
def transpile(circuits, backend=None, basis_gates=None, coupling_map=None, initial_layout=None, seed_mapper=None, pass_manager=None): """transpile one or more circuits. Args: circuits (QuantumCircuit or list[QuantumCircuit]): circuits to compile backend (BaseBackend): a backend to compile for basis_gates (list[str]): list of basis gate names supported by the target. Default: ['u1','u2','u3','cx','id'] coupling_map (list): coupling map (perhaps custom) to target in mapping initial_layout (Layout or dict or list): Initial position of virtual qubits on physical qubits. The final layout is not guaranteed to be the same, as the transpiler may permute qubits through swaps or other means. seed_mapper (int): random seed for the swap_mapper pass_manager (PassManager): a pass_manager for the transpiler stages Returns: QuantumCircuit or list[QuantumCircuit]: transpiled circuit(s). Raises: TranspilerError: in case of bad inputs to transpiler or errors in passes """ warnings.warn("qiskit.transpiler.transpile() has been deprecated and will be " "removed in the 0.9 release. Use qiskit.compiler.transpile() instead.", DeprecationWarning) return compiler.transpile(circuits=circuits, backend=backend, basis_gates=basis_gates, coupling_map=coupling_map, initial_layout=initial_layout, seed_transpiler=seed_mapper, pass_manager=pass_manager)
def function[transpile, parameter[circuits, backend, basis_gates, coupling_map, initial_layout, seed_mapper, pass_manager]]: constant[transpile one or more circuits. Args: circuits (QuantumCircuit or list[QuantumCircuit]): circuits to compile backend (BaseBackend): a backend to compile for basis_gates (list[str]): list of basis gate names supported by the target. Default: ['u1','u2','u3','cx','id'] coupling_map (list): coupling map (perhaps custom) to target in mapping initial_layout (Layout or dict or list): Initial position of virtual qubits on physical qubits. The final layout is not guaranteed to be the same, as the transpiler may permute qubits through swaps or other means. seed_mapper (int): random seed for the swap_mapper pass_manager (PassManager): a pass_manager for the transpiler stages Returns: QuantumCircuit or list[QuantumCircuit]: transpiled circuit(s). Raises: TranspilerError: in case of bad inputs to transpiler or errors in passes ] call[name[warnings].warn, parameter[constant[qiskit.transpiler.transpile() has been deprecated and will be removed in the 0.9 release. Use qiskit.compiler.transpile() instead.], name[DeprecationWarning]]] return[call[name[compiler].transpile, parameter[]]]
keyword[def] identifier[transpile] ( identifier[circuits] , identifier[backend] = keyword[None] , identifier[basis_gates] = keyword[None] , identifier[coupling_map] = keyword[None] , identifier[initial_layout] = keyword[None] , identifier[seed_mapper] = keyword[None] , identifier[pass_manager] = keyword[None] ): literal[string] identifier[warnings] . identifier[warn] ( literal[string] literal[string] , identifier[DeprecationWarning] ) keyword[return] identifier[compiler] . identifier[transpile] ( identifier[circuits] = identifier[circuits] , identifier[backend] = identifier[backend] , identifier[basis_gates] = identifier[basis_gates] , identifier[coupling_map] = identifier[coupling_map] , identifier[initial_layout] = identifier[initial_layout] , identifier[seed_transpiler] = identifier[seed_mapper] , identifier[pass_manager] = identifier[pass_manager] )
def transpile(circuits, backend=None, basis_gates=None, coupling_map=None, initial_layout=None, seed_mapper=None, pass_manager=None): """transpile one or more circuits. Args: circuits (QuantumCircuit or list[QuantumCircuit]): circuits to compile backend (BaseBackend): a backend to compile for basis_gates (list[str]): list of basis gate names supported by the target. Default: ['u1','u2','u3','cx','id'] coupling_map (list): coupling map (perhaps custom) to target in mapping initial_layout (Layout or dict or list): Initial position of virtual qubits on physical qubits. The final layout is not guaranteed to be the same, as the transpiler may permute qubits through swaps or other means. seed_mapper (int): random seed for the swap_mapper pass_manager (PassManager): a pass_manager for the transpiler stages Returns: QuantumCircuit or list[QuantumCircuit]: transpiled circuit(s). Raises: TranspilerError: in case of bad inputs to transpiler or errors in passes """ warnings.warn('qiskit.transpiler.transpile() has been deprecated and will be removed in the 0.9 release. Use qiskit.compiler.transpile() instead.', DeprecationWarning) return compiler.transpile(circuits=circuits, backend=backend, basis_gates=basis_gates, coupling_map=coupling_map, initial_layout=initial_layout, seed_transpiler=seed_mapper, pass_manager=pass_manager)
def get_profiles_in_svr(nickname, server, all_profiles_dict, org_vm, add_error_list=False): """ Test all profiles in server.profiles to determine if profile is in the all_profiles_dict. Returns list of profiles in the profile_dict and in the defined server. If add_error_list is True, it also adds profiles not found to PROFILES_WITH_NO_DEFINITIONS. """ profiles_in_dict = [] for profile_inst in server.profiles: pn = profile_name(org_vm, profile_inst, short=True) if pn in all_profiles_dict: profiles_in_dict.append(profile_inst) else: if add_error_list: print('PROFILES_WITH_NO_DEFINITIONS svr=%s: %s' % (nickname, pn)) PROFILES_WITH_NO_DEFINITIONS[nickname] = pn return profiles_in_dict
def function[get_profiles_in_svr, parameter[nickname, server, all_profiles_dict, org_vm, add_error_list]]: constant[ Test all profiles in server.profiles to determine if profile is in the all_profiles_dict. Returns list of profiles in the profile_dict and in the defined server. If add_error_list is True, it also adds profiles not found to PROFILES_WITH_NO_DEFINITIONS. ] variable[profiles_in_dict] assign[=] list[[]] for taget[name[profile_inst]] in starred[name[server].profiles] begin[:] variable[pn] assign[=] call[name[profile_name], parameter[name[org_vm], name[profile_inst]]] if compare[name[pn] in name[all_profiles_dict]] begin[:] call[name[profiles_in_dict].append, parameter[name[profile_inst]]] return[name[profiles_in_dict]]
keyword[def] identifier[get_profiles_in_svr] ( identifier[nickname] , identifier[server] , identifier[all_profiles_dict] , identifier[org_vm] , identifier[add_error_list] = keyword[False] ): literal[string] identifier[profiles_in_dict] =[] keyword[for] identifier[profile_inst] keyword[in] identifier[server] . identifier[profiles] : identifier[pn] = identifier[profile_name] ( identifier[org_vm] , identifier[profile_inst] , identifier[short] = keyword[True] ) keyword[if] identifier[pn] keyword[in] identifier[all_profiles_dict] : identifier[profiles_in_dict] . identifier[append] ( identifier[profile_inst] ) keyword[else] : keyword[if] identifier[add_error_list] : identifier[print] ( literal[string] % ( identifier[nickname] , identifier[pn] )) identifier[PROFILES_WITH_NO_DEFINITIONS] [ identifier[nickname] ]= identifier[pn] keyword[return] identifier[profiles_in_dict]
def get_profiles_in_svr(nickname, server, all_profiles_dict, org_vm, add_error_list=False): """ Test all profiles in server.profiles to determine if profile is in the all_profiles_dict. Returns list of profiles in the profile_dict and in the defined server. If add_error_list is True, it also adds profiles not found to PROFILES_WITH_NO_DEFINITIONS. """ profiles_in_dict = [] for profile_inst in server.profiles: pn = profile_name(org_vm, profile_inst, short=True) if pn in all_profiles_dict: profiles_in_dict.append(profile_inst) # depends on [control=['if'], data=[]] elif add_error_list: print('PROFILES_WITH_NO_DEFINITIONS svr=%s: %s' % (nickname, pn)) PROFILES_WITH_NO_DEFINITIONS[nickname] = pn # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['profile_inst']] return profiles_in_dict
def editLogSettings(self, logLocation, logLevel="WARNING", maxLogFileAge=90): """ edits the log settings for the portal site Inputs: logLocation - file path to where you want the log files saved on disk logLevel - this is the level of detail saved in the log files Levels are: OFF, SEVERE, WARNING, INFO, FINE, VERBOSE, and DEBUG maxLogFileAge - the numbers of days to keep a single log file """ url = self._url + "/settings/edit" params = { "f" : "json", "logDir" : logLocation, "logLevel" : logLevel, "maxLogFileAge" : maxLogFileAge } return self._post(url=url, param_dict=params, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port)
def function[editLogSettings, parameter[self, logLocation, logLevel, maxLogFileAge]]: constant[ edits the log settings for the portal site Inputs: logLocation - file path to where you want the log files saved on disk logLevel - this is the level of detail saved in the log files Levels are: OFF, SEVERE, WARNING, INFO, FINE, VERBOSE, and DEBUG maxLogFileAge - the numbers of days to keep a single log file ] variable[url] assign[=] binary_operation[name[self]._url + constant[/settings/edit]] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18dc9a200>, <ast.Constant object at 0x7da18dc9a620>, <ast.Constant object at 0x7da18dc9abf0>, <ast.Constant object at 0x7da18dc9b910>], [<ast.Constant object at 0x7da18dc99090>, <ast.Name object at 0x7da18dc9a530>, <ast.Name object at 0x7da18dc992d0>, <ast.Name object at 0x7da18dc9bfa0>]] return[call[name[self]._post, parameter[]]]
keyword[def] identifier[editLogSettings] ( identifier[self] , identifier[logLocation] , identifier[logLevel] = literal[string] , identifier[maxLogFileAge] = literal[int] ): literal[string] identifier[url] = identifier[self] . identifier[_url] + literal[string] identifier[params] ={ literal[string] : literal[string] , literal[string] : identifier[logLocation] , literal[string] : identifier[logLevel] , literal[string] : identifier[maxLogFileAge] } keyword[return] identifier[self] . identifier[_post] ( identifier[url] = identifier[url] , identifier[param_dict] = identifier[params] , identifier[securityHandler] = identifier[self] . identifier[_securityHandler] , identifier[proxy_url] = identifier[self] . identifier[_proxy_url] , identifier[proxy_port] = identifier[self] . identifier[_proxy_port] )
def editLogSettings(self, logLocation, logLevel='WARNING', maxLogFileAge=90): """ edits the log settings for the portal site Inputs: logLocation - file path to where you want the log files saved on disk logLevel - this is the level of detail saved in the log files Levels are: OFF, SEVERE, WARNING, INFO, FINE, VERBOSE, and DEBUG maxLogFileAge - the numbers of days to keep a single log file """ url = self._url + '/settings/edit' params = {'f': 'json', 'logDir': logLocation, 'logLevel': logLevel, 'maxLogFileAge': maxLogFileAge} return self._post(url=url, param_dict=params, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port)
def dicomdir_info(dirpath, *args, **kwargs): """ Get information about series in dir""" dr = DicomReader(dirpath=dirpath, *args, **kwargs) info = dr.dicomdirectory.get_stats_of_series_in_dir() return info
def function[dicomdir_info, parameter[dirpath]]: constant[ Get information about series in dir] variable[dr] assign[=] call[name[DicomReader], parameter[<ast.Starred object at 0x7da204347df0>]] variable[info] assign[=] call[name[dr].dicomdirectory.get_stats_of_series_in_dir, parameter[]] return[name[info]]
keyword[def] identifier[dicomdir_info] ( identifier[dirpath] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[dr] = identifier[DicomReader] ( identifier[dirpath] = identifier[dirpath] ,* identifier[args] ,** identifier[kwargs] ) identifier[info] = identifier[dr] . identifier[dicomdirectory] . identifier[get_stats_of_series_in_dir] () keyword[return] identifier[info]
def dicomdir_info(dirpath, *args, **kwargs): """ Get information about series in dir""" dr = DicomReader(*args, dirpath=dirpath, **kwargs) info = dr.dicomdirectory.get_stats_of_series_in_dir() return info
def read_data(self): # type: () -> ByteString """ Reads all the data in the input stream :return: The read data """ try: size = int(self.get_header("content-length")) except (ValueError, TypeError): size = -1 return self.get_rfile().read(size)
def function[read_data, parameter[self]]: constant[ Reads all the data in the input stream :return: The read data ] <ast.Try object at 0x7da1b034ab90> return[call[call[name[self].get_rfile, parameter[]].read, parameter[name[size]]]]
keyword[def] identifier[read_data] ( identifier[self] ): literal[string] keyword[try] : identifier[size] = identifier[int] ( identifier[self] . identifier[get_header] ( literal[string] )) keyword[except] ( identifier[ValueError] , identifier[TypeError] ): identifier[size] =- literal[int] keyword[return] identifier[self] . identifier[get_rfile] (). identifier[read] ( identifier[size] )
def read_data(self): # type: () -> ByteString '\n Reads all the data in the input stream\n\n :return: The read data\n ' try: size = int(self.get_header('content-length')) # depends on [control=['try'], data=[]] except (ValueError, TypeError): size = -1 # depends on [control=['except'], data=[]] return self.get_rfile().read(size)
def bandana(self, emin=-np.inf, emax=np.inf): """Cut out bands outside the range (emin,emax)""" bandmin = np.min(self.ebands, axis=1) bandmax = np.max(self.ebands, axis=1) ii = np.nonzero(bandmin < emax) nemax = ii[0][-1] ii = np.nonzero(bandmax > emin) nemin = ii[0][0] # BoltzTraP2.misc.info("BANDANA output") # for iband in range(len(self.ebands)): # BoltzTraP2.misc.info(iband, bandmin[iband], bandmax[iband], ( # (bandmin[iband] < emax) & (bandmax[iband] > emin))) self.ebands = self.ebands[nemin:nemax] if isinstance(self.proj, np.ndarray): self.proj = self.proj[:,nemin:nemax,:,:] if self.mommat is not None: self.mommat = self.mommat[:, nemin:nemax, :] # Removing bands may change the number of valence electrons if self.nelect is not None: self.nelect -= self.dosweight * nemin return nemin, nemax
def function[bandana, parameter[self, emin, emax]]: constant[Cut out bands outside the range (emin,emax)] variable[bandmin] assign[=] call[name[np].min, parameter[name[self].ebands]] variable[bandmax] assign[=] call[name[np].max, parameter[name[self].ebands]] variable[ii] assign[=] call[name[np].nonzero, parameter[compare[name[bandmin] less[<] name[emax]]]] variable[nemax] assign[=] call[call[name[ii]][constant[0]]][<ast.UnaryOp object at 0x7da204345090>] variable[ii] assign[=] call[name[np].nonzero, parameter[compare[name[bandmax] greater[>] name[emin]]]] variable[nemin] assign[=] call[call[name[ii]][constant[0]]][constant[0]] name[self].ebands assign[=] call[name[self].ebands][<ast.Slice object at 0x7da204345fc0>] if call[name[isinstance], parameter[name[self].proj, name[np].ndarray]] begin[:] name[self].proj assign[=] call[name[self].proj][tuple[[<ast.Slice object at 0x7da2043448b0>, <ast.Slice object at 0x7da204345810>, <ast.Slice object at 0x7da2043465c0>, <ast.Slice object at 0x7da204344760>]]] if compare[name[self].mommat is_not constant[None]] begin[:] name[self].mommat assign[=] call[name[self].mommat][tuple[[<ast.Slice object at 0x7da1b26af5e0>, <ast.Slice object at 0x7da1b26afd00>, <ast.Slice object at 0x7da1b26aeb30>]]] if compare[name[self].nelect is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da1b26af9a0> return[tuple[[<ast.Name object at 0x7da1b26adc60>, <ast.Name object at 0x7da1b26adf30>]]]
keyword[def] identifier[bandana] ( identifier[self] , identifier[emin] =- identifier[np] . identifier[inf] , identifier[emax] = identifier[np] . identifier[inf] ): literal[string] identifier[bandmin] = identifier[np] . identifier[min] ( identifier[self] . identifier[ebands] , identifier[axis] = literal[int] ) identifier[bandmax] = identifier[np] . identifier[max] ( identifier[self] . identifier[ebands] , identifier[axis] = literal[int] ) identifier[ii] = identifier[np] . identifier[nonzero] ( identifier[bandmin] < identifier[emax] ) identifier[nemax] = identifier[ii] [ literal[int] ][- literal[int] ] identifier[ii] = identifier[np] . identifier[nonzero] ( identifier[bandmax] > identifier[emin] ) identifier[nemin] = identifier[ii] [ literal[int] ][ literal[int] ] identifier[self] . identifier[ebands] = identifier[self] . identifier[ebands] [ identifier[nemin] : identifier[nemax] ] keyword[if] identifier[isinstance] ( identifier[self] . identifier[proj] , identifier[np] . identifier[ndarray] ): identifier[self] . identifier[proj] = identifier[self] . identifier[proj] [:, identifier[nemin] : identifier[nemax] ,:,:] keyword[if] identifier[self] . identifier[mommat] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[mommat] = identifier[self] . identifier[mommat] [:, identifier[nemin] : identifier[nemax] ,:] keyword[if] identifier[self] . identifier[nelect] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[nelect] -= identifier[self] . identifier[dosweight] * identifier[nemin] keyword[return] identifier[nemin] , identifier[nemax]
def bandana(self, emin=-np.inf, emax=np.inf): """Cut out bands outside the range (emin,emax)""" bandmin = np.min(self.ebands, axis=1) bandmax = np.max(self.ebands, axis=1) ii = np.nonzero(bandmin < emax) nemax = ii[0][-1] ii = np.nonzero(bandmax > emin) nemin = ii[0][0] # BoltzTraP2.misc.info("BANDANA output") # for iband in range(len(self.ebands)): # BoltzTraP2.misc.info(iband, bandmin[iband], bandmax[iband], ( # (bandmin[iband] < emax) & (bandmax[iband] > emin))) self.ebands = self.ebands[nemin:nemax] if isinstance(self.proj, np.ndarray): self.proj = self.proj[:, nemin:nemax, :, :] # depends on [control=['if'], data=[]] if self.mommat is not None: self.mommat = self.mommat[:, nemin:nemax, :] # depends on [control=['if'], data=[]] # Removing bands may change the number of valence electrons if self.nelect is not None: self.nelect -= self.dosweight * nemin # depends on [control=['if'], data=[]] return (nemin, nemax)
def set_title(self, title): """Sets the title. arg: title (string): the new title raise: InvalidArgument - ``title`` is invalid raise: NoAccess - ``Metadata.isReadOnly()`` is ``true`` raise: NullArgument - ``title`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for osid.repository.AssetForm.set_title_template self._my_map['title'] = self._get_display_text(title, self.get_title_metadata())
def function[set_title, parameter[self, title]]: constant[Sets the title. arg: title (string): the new title raise: InvalidArgument - ``title`` is invalid raise: NoAccess - ``Metadata.isReadOnly()`` is ``true`` raise: NullArgument - ``title`` is ``null`` *compliance: mandatory -- This method must be implemented.* ] call[name[self]._my_map][constant[title]] assign[=] call[name[self]._get_display_text, parameter[name[title], call[name[self].get_title_metadata, parameter[]]]]
keyword[def] identifier[set_title] ( identifier[self] , identifier[title] ): literal[string] identifier[self] . identifier[_my_map] [ literal[string] ]= identifier[self] . identifier[_get_display_text] ( identifier[title] , identifier[self] . identifier[get_title_metadata] ())
def set_title(self, title): """Sets the title. arg: title (string): the new title raise: InvalidArgument - ``title`` is invalid raise: NoAccess - ``Metadata.isReadOnly()`` is ``true`` raise: NullArgument - ``title`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for osid.repository.AssetForm.set_title_template self._my_map['title'] = self._get_display_text(title, self.get_title_metadata())
def coord_pyramid(coord, zoom_start, zoom_stop): """ generate full pyramid for coord Generate the full pyramid for a single coordinate. Note that zoom_stop is exclusive. """ if zoom_start <= coord.zoom: yield coord for child_coord in coord_children_range(coord, zoom_stop): if zoom_start <= child_coord.zoom: yield child_coord
def function[coord_pyramid, parameter[coord, zoom_start, zoom_stop]]: constant[ generate full pyramid for coord Generate the full pyramid for a single coordinate. Note that zoom_stop is exclusive. ] if compare[name[zoom_start] less_or_equal[<=] name[coord].zoom] begin[:] <ast.Yield object at 0x7da1b04a7af0> for taget[name[child_coord]] in starred[call[name[coord_children_range], parameter[name[coord], name[zoom_stop]]]] begin[:] if compare[name[zoom_start] less_or_equal[<=] name[child_coord].zoom] begin[:] <ast.Yield object at 0x7da1b04a7d00>
keyword[def] identifier[coord_pyramid] ( identifier[coord] , identifier[zoom_start] , identifier[zoom_stop] ): literal[string] keyword[if] identifier[zoom_start] <= identifier[coord] . identifier[zoom] : keyword[yield] identifier[coord] keyword[for] identifier[child_coord] keyword[in] identifier[coord_children_range] ( identifier[coord] , identifier[zoom_stop] ): keyword[if] identifier[zoom_start] <= identifier[child_coord] . identifier[zoom] : keyword[yield] identifier[child_coord]
def coord_pyramid(coord, zoom_start, zoom_stop): """ generate full pyramid for coord Generate the full pyramid for a single coordinate. Note that zoom_stop is exclusive. """ if zoom_start <= coord.zoom: yield coord # depends on [control=['if'], data=[]] for child_coord in coord_children_range(coord, zoom_stop): if zoom_start <= child_coord.zoom: yield child_coord # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['child_coord']]
def insert(self, i, tag1, tag2, cmd="prevtag", x=None, y=None): """ Inserts a new rule that updates words with tag1 to tag2, given constraints x and y, e.g., Context.append("TO < NN", "VB") """ if " < " in tag1 and not x and not y: tag1, x = tag1.split(" < "); cmd="prevtag" if " > " in tag1 and not x and not y: x, tag1 = tag1.split(" > "); cmd="nexttag" lazylist.insert(self, i, [tag1, tag2, cmd, x or "", y or ""])
def function[insert, parameter[self, i, tag1, tag2, cmd, x, y]]: constant[ Inserts a new rule that updates words with tag1 to tag2, given constraints x and y, e.g., Context.append("TO < NN", "VB") ] if <ast.BoolOp object at 0x7da20c991cc0> begin[:] <ast.Tuple object at 0x7da20c9906d0> assign[=] call[name[tag1].split, parameter[constant[ < ]]] variable[cmd] assign[=] constant[prevtag] if <ast.BoolOp object at 0x7da20c992bc0> begin[:] <ast.Tuple object at 0x7da18f58c370> assign[=] call[name[tag1].split, parameter[constant[ > ]]] variable[cmd] assign[=] constant[nexttag] call[name[lazylist].insert, parameter[name[self], name[i], list[[<ast.Name object at 0x7da1b2345060>, <ast.Name object at 0x7da1b2345000>, <ast.Name object at 0x7da1b23445e0>, <ast.BoolOp object at 0x7da1b2346bf0>, <ast.BoolOp object at 0x7da1b23460b0>]]]]
keyword[def] identifier[insert] ( identifier[self] , identifier[i] , identifier[tag1] , identifier[tag2] , identifier[cmd] = literal[string] , identifier[x] = keyword[None] , identifier[y] = keyword[None] ): literal[string] keyword[if] literal[string] keyword[in] identifier[tag1] keyword[and] keyword[not] identifier[x] keyword[and] keyword[not] identifier[y] : identifier[tag1] , identifier[x] = identifier[tag1] . identifier[split] ( literal[string] ); identifier[cmd] = literal[string] keyword[if] literal[string] keyword[in] identifier[tag1] keyword[and] keyword[not] identifier[x] keyword[and] keyword[not] identifier[y] : identifier[x] , identifier[tag1] = identifier[tag1] . identifier[split] ( literal[string] ); identifier[cmd] = literal[string] identifier[lazylist] . identifier[insert] ( identifier[self] , identifier[i] ,[ identifier[tag1] , identifier[tag2] , identifier[cmd] , identifier[x] keyword[or] literal[string] , identifier[y] keyword[or] literal[string] ])
def insert(self, i, tag1, tag2, cmd='prevtag', x=None, y=None): """ Inserts a new rule that updates words with tag1 to tag2, given constraints x and y, e.g., Context.append("TO < NN", "VB") """ if ' < ' in tag1 and (not x) and (not y): (tag1, x) = tag1.split(' < ') cmd = 'prevtag' # depends on [control=['if'], data=[]] if ' > ' in tag1 and (not x) and (not y): (x, tag1) = tag1.split(' > ') cmd = 'nexttag' # depends on [control=['if'], data=[]] lazylist.insert(self, i, [tag1, tag2, cmd, x or '', y or ''])
def _parse_track_dim(self,d1,interp=True,phys=False): """Parse the dimension to plot the stream track for""" if interp: interpStr= 'interpolated' else: interpStr= '' if d1.lower() == 'x': tx= self.__dict__['_%sObsTrackXY' % interpStr][:,0] elif d1.lower() == 'y': tx= self.__dict__['_%sObsTrackXY' % interpStr][:,1] elif d1.lower() == 'z': tx= self.__dict__['_%sObsTrackXY' % interpStr][:,2] elif d1.lower() == 'r': tx= self.__dict__['_%sObsTrack' % interpStr][:,0] elif d1.lower() == 'phi': tx= self.__dict__['_%sObsTrack' % interpStr][:,5] elif d1.lower() == 'vx': tx= self.__dict__['_%sObsTrackXY' % interpStr][:,3] elif d1.lower() == 'vy': tx= self.__dict__['_%sObsTrackXY' % interpStr][:,4] elif d1.lower() == 'vz': tx= self.__dict__['_%sObsTrackXY' % interpStr][:,5] elif d1.lower() == 'vr': tx= self.__dict__['_%sObsTrack' % interpStr][:,1] elif d1.lower() == 'vt': tx= self.__dict__['_%sObsTrack' % interpStr][:,2] elif d1.lower() == 'll': tx= self.__dict__['_%sObsTrackLB' % interpStr][:,0] elif d1.lower() == 'bb': tx= self.__dict__['_%sObsTrackLB' % interpStr][:,1] elif d1.lower() == 'dist': tx= self.__dict__['_%sObsTrackLB' % interpStr][:,2] elif d1.lower() == 'pmll': tx= self.__dict__['_%sObsTrackLB' % interpStr][:,4] elif d1.lower() == 'pmbb': tx= self.__dict__['_%sObsTrackLB' % interpStr][:,5] elif d1.lower() == 'vlos': tx= self.__dict__['_%sObsTrackLB' % interpStr][:,3] if phys and (d1.lower() == 'x' or d1.lower() == 'y' \ or d1.lower() == 'z' or d1.lower() == 'r'): tx= copy.copy(tx) tx*= self._ro if phys and (d1.lower() == 'vx' or d1.lower() == 'vy' \ or d1.lower() == 'vz' or d1.lower() == 'vr' \ or d1.lower() == 'vt'): tx= copy.copy(tx) tx*= self._vo return tx
def function[_parse_track_dim, parameter[self, d1, interp, phys]]: constant[Parse the dimension to plot the stream track for] if name[interp] begin[:] variable[interpStr] assign[=] constant[interpolated] if compare[call[name[d1].lower, parameter[]] equal[==] constant[x]] begin[:] variable[tx] assign[=] call[call[name[self].__dict__][binary_operation[constant[_%sObsTrackXY] <ast.Mod object at 0x7da2590d6920> name[interpStr]]]][tuple[[<ast.Slice object at 0x7da18bc72470>, <ast.Constant object at 0x7da18bc721d0>]]] if <ast.BoolOp object at 0x7da20c993a30> begin[:] variable[tx] assign[=] call[name[copy].copy, parameter[name[tx]]] <ast.AugAssign object at 0x7da1b0da27a0> if <ast.BoolOp object at 0x7da1b0da3eb0> begin[:] variable[tx] assign[=] call[name[copy].copy, parameter[name[tx]]] <ast.AugAssign object at 0x7da1b0da2e60> return[name[tx]]
keyword[def] identifier[_parse_track_dim] ( identifier[self] , identifier[d1] , identifier[interp] = keyword[True] , identifier[phys] = keyword[False] ): literal[string] keyword[if] identifier[interp] : identifier[interpStr] = literal[string] keyword[else] : identifier[interpStr] = literal[string] keyword[if] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[elif] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[elif] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[elif] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[elif] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[elif] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[elif] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[elif] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[elif] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[elif] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[elif] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[elif] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[elif] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[elif] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[elif] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[elif] identifier[d1] . identifier[lower] ()== literal[string] : identifier[tx] = identifier[self] . identifier[__dict__] [ literal[string] % identifier[interpStr] ][:, literal[int] ] keyword[if] identifier[phys] keyword[and] ( identifier[d1] . identifier[lower] ()== literal[string] keyword[or] identifier[d1] . identifier[lower] ()== literal[string] keyword[or] identifier[d1] . identifier[lower] ()== literal[string] keyword[or] identifier[d1] . identifier[lower] ()== literal[string] ): identifier[tx] = identifier[copy] . identifier[copy] ( identifier[tx] ) identifier[tx] *= identifier[self] . identifier[_ro] keyword[if] identifier[phys] keyword[and] ( identifier[d1] . identifier[lower] ()== literal[string] keyword[or] identifier[d1] . identifier[lower] ()== literal[string] keyword[or] identifier[d1] . identifier[lower] ()== literal[string] keyword[or] identifier[d1] . identifier[lower] ()== literal[string] keyword[or] identifier[d1] . identifier[lower] ()== literal[string] ): identifier[tx] = identifier[copy] . identifier[copy] ( identifier[tx] ) identifier[tx] *= identifier[self] . identifier[_vo] keyword[return] identifier[tx]
def _parse_track_dim(self, d1, interp=True, phys=False): """Parse the dimension to plot the stream track for""" if interp: interpStr = 'interpolated' # depends on [control=['if'], data=[]] else: interpStr = '' if d1.lower() == 'x': tx = self.__dict__['_%sObsTrackXY' % interpStr][:, 0] # depends on [control=['if'], data=[]] elif d1.lower() == 'y': tx = self.__dict__['_%sObsTrackXY' % interpStr][:, 1] # depends on [control=['if'], data=[]] elif d1.lower() == 'z': tx = self.__dict__['_%sObsTrackXY' % interpStr][:, 2] # depends on [control=['if'], data=[]] elif d1.lower() == 'r': tx = self.__dict__['_%sObsTrack' % interpStr][:, 0] # depends on [control=['if'], data=[]] elif d1.lower() == 'phi': tx = self.__dict__['_%sObsTrack' % interpStr][:, 5] # depends on [control=['if'], data=[]] elif d1.lower() == 'vx': tx = self.__dict__['_%sObsTrackXY' % interpStr][:, 3] # depends on [control=['if'], data=[]] elif d1.lower() == 'vy': tx = self.__dict__['_%sObsTrackXY' % interpStr][:, 4] # depends on [control=['if'], data=[]] elif d1.lower() == 'vz': tx = self.__dict__['_%sObsTrackXY' % interpStr][:, 5] # depends on [control=['if'], data=[]] elif d1.lower() == 'vr': tx = self.__dict__['_%sObsTrack' % interpStr][:, 1] # depends on [control=['if'], data=[]] elif d1.lower() == 'vt': tx = self.__dict__['_%sObsTrack' % interpStr][:, 2] # depends on [control=['if'], data=[]] elif d1.lower() == 'll': tx = self.__dict__['_%sObsTrackLB' % interpStr][:, 0] # depends on [control=['if'], data=[]] elif d1.lower() == 'bb': tx = self.__dict__['_%sObsTrackLB' % interpStr][:, 1] # depends on [control=['if'], data=[]] elif d1.lower() == 'dist': tx = self.__dict__['_%sObsTrackLB' % interpStr][:, 2] # depends on [control=['if'], data=[]] elif d1.lower() == 'pmll': tx = self.__dict__['_%sObsTrackLB' % interpStr][:, 4] # depends on [control=['if'], data=[]] elif d1.lower() == 'pmbb': tx = self.__dict__['_%sObsTrackLB' % interpStr][:, 5] # depends on [control=['if'], data=[]] elif d1.lower() == 'vlos': tx = self.__dict__['_%sObsTrackLB' % interpStr][:, 3] # depends on [control=['if'], data=[]] if phys and (d1.lower() == 'x' or d1.lower() == 'y' or d1.lower() == 'z' or (d1.lower() == 'r')): tx = copy.copy(tx) tx *= self._ro # depends on [control=['if'], data=[]] if phys and (d1.lower() == 'vx' or d1.lower() == 'vy' or d1.lower() == 'vz' or (d1.lower() == 'vr') or (d1.lower() == 'vt')): tx = copy.copy(tx) tx *= self._vo # depends on [control=['if'], data=[]] return tx
def to_cloudformation(self, **kwargs): """Returns the API Gateway RestApi, Deployment, and Stage to which this SAM Api corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list """ resources = [] api_generator = ApiGenerator(self.logical_id, self.CacheClusterEnabled, self.CacheClusterSize, self.Variables, self.depends_on, self.DefinitionBody, self.DefinitionUri, self.Name, self.StageName, endpoint_configuration=self.EndpointConfiguration, method_settings=self.MethodSettings, binary_media=self.BinaryMediaTypes, minimum_compression_size=self.MinimumCompressionSize, cors=self.Cors, auth=self.Auth, gateway_responses=self.GatewayResponses, access_log_setting=self.AccessLogSetting, canary_setting=self.CanarySetting, tracing_enabled=self.TracingEnabled, resource_attributes=self.resource_attributes, passthrough_resource_attributes=self.get_passthrough_resource_attributes()) rest_api, deployment, stage, permissions = api_generator.to_cloudformation() resources.extend([rest_api, deployment, stage]) resources.extend(permissions) return resources
def function[to_cloudformation, parameter[self]]: constant[Returns the API Gateway RestApi, Deployment, and Stage to which this SAM Api corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list ] variable[resources] assign[=] list[[]] variable[api_generator] assign[=] call[name[ApiGenerator], parameter[name[self].logical_id, name[self].CacheClusterEnabled, name[self].CacheClusterSize, name[self].Variables, name[self].depends_on, name[self].DefinitionBody, name[self].DefinitionUri, name[self].Name, name[self].StageName]] <ast.Tuple object at 0x7da20e957610> assign[=] call[name[api_generator].to_cloudformation, parameter[]] call[name[resources].extend, parameter[list[[<ast.Name object at 0x7da20e955db0>, <ast.Name object at 0x7da20e956920>, <ast.Name object at 0x7da20e955540>]]]] call[name[resources].extend, parameter[name[permissions]]] return[name[resources]]
keyword[def] identifier[to_cloudformation] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[resources] =[] identifier[api_generator] = identifier[ApiGenerator] ( identifier[self] . identifier[logical_id] , identifier[self] . identifier[CacheClusterEnabled] , identifier[self] . identifier[CacheClusterSize] , identifier[self] . identifier[Variables] , identifier[self] . identifier[depends_on] , identifier[self] . identifier[DefinitionBody] , identifier[self] . identifier[DefinitionUri] , identifier[self] . identifier[Name] , identifier[self] . identifier[StageName] , identifier[endpoint_configuration] = identifier[self] . identifier[EndpointConfiguration] , identifier[method_settings] = identifier[self] . identifier[MethodSettings] , identifier[binary_media] = identifier[self] . identifier[BinaryMediaTypes] , identifier[minimum_compression_size] = identifier[self] . identifier[MinimumCompressionSize] , identifier[cors] = identifier[self] . identifier[Cors] , identifier[auth] = identifier[self] . identifier[Auth] , identifier[gateway_responses] = identifier[self] . identifier[GatewayResponses] , identifier[access_log_setting] = identifier[self] . identifier[AccessLogSetting] , identifier[canary_setting] = identifier[self] . identifier[CanarySetting] , identifier[tracing_enabled] = identifier[self] . identifier[TracingEnabled] , identifier[resource_attributes] = identifier[self] . identifier[resource_attributes] , identifier[passthrough_resource_attributes] = identifier[self] . identifier[get_passthrough_resource_attributes] ()) identifier[rest_api] , identifier[deployment] , identifier[stage] , identifier[permissions] = identifier[api_generator] . identifier[to_cloudformation] () identifier[resources] . identifier[extend] ([ identifier[rest_api] , identifier[deployment] , identifier[stage] ]) identifier[resources] . identifier[extend] ( identifier[permissions] ) keyword[return] identifier[resources]
def to_cloudformation(self, **kwargs): """Returns the API Gateway RestApi, Deployment, and Stage to which this SAM Api corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list """ resources = [] api_generator = ApiGenerator(self.logical_id, self.CacheClusterEnabled, self.CacheClusterSize, self.Variables, self.depends_on, self.DefinitionBody, self.DefinitionUri, self.Name, self.StageName, endpoint_configuration=self.EndpointConfiguration, method_settings=self.MethodSettings, binary_media=self.BinaryMediaTypes, minimum_compression_size=self.MinimumCompressionSize, cors=self.Cors, auth=self.Auth, gateway_responses=self.GatewayResponses, access_log_setting=self.AccessLogSetting, canary_setting=self.CanarySetting, tracing_enabled=self.TracingEnabled, resource_attributes=self.resource_attributes, passthrough_resource_attributes=self.get_passthrough_resource_attributes()) (rest_api, deployment, stage, permissions) = api_generator.to_cloudformation() resources.extend([rest_api, deployment, stage]) resources.extend(permissions) return resources
def connect_elb(aws_access_key_id=None, aws_secret_access_key=None, **kwargs): """ :type aws_access_key_id: string :param aws_access_key_id: Your AWS Access Key ID :type aws_secret_access_key: string :param aws_secret_access_key: Your AWS Secret Access Key :rtype: :class:`boto.ec2.elb.ELBConnection` :return: A connection to Amazon's Load Balancing Service """ from boto.ec2.elb import ELBConnection return ELBConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def function[connect_elb, parameter[aws_access_key_id, aws_secret_access_key]]: constant[ :type aws_access_key_id: string :param aws_access_key_id: Your AWS Access Key ID :type aws_secret_access_key: string :param aws_secret_access_key: Your AWS Secret Access Key :rtype: :class:`boto.ec2.elb.ELBConnection` :return: A connection to Amazon's Load Balancing Service ] from relative_module[boto.ec2.elb] import module[ELBConnection] return[call[name[ELBConnection], parameter[name[aws_access_key_id], name[aws_secret_access_key]]]]
keyword[def] identifier[connect_elb] ( identifier[aws_access_key_id] = keyword[None] , identifier[aws_secret_access_key] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[from] identifier[boto] . identifier[ec2] . identifier[elb] keyword[import] identifier[ELBConnection] keyword[return] identifier[ELBConnection] ( identifier[aws_access_key_id] , identifier[aws_secret_access_key] ,** identifier[kwargs] )
def connect_elb(aws_access_key_id=None, aws_secret_access_key=None, **kwargs): """ :type aws_access_key_id: string :param aws_access_key_id: Your AWS Access Key ID :type aws_secret_access_key: string :param aws_secret_access_key: Your AWS Secret Access Key :rtype: :class:`boto.ec2.elb.ELBConnection` :return: A connection to Amazon's Load Balancing Service """ from boto.ec2.elb import ELBConnection return ELBConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def trim(self, lower=None, upper=None): """Trim upper values in accordance with :math:`EQI1 \\leq EQB`. >>> from hydpy.models.lland import * >>> parameterstep('1d') >>> eqi1.value = 2.0 >>> eqb(1.0) >>> eqb eqb(2.0) >>> eqb(2.0) >>> eqb eqb(2.0) >>> eqb(3.0) >>> eqb eqb(3.0) """ if lower is None: lower = getattr(self.subpars.eqi1, 'value', None) super().trim(lower, upper)
def function[trim, parameter[self, lower, upper]]: constant[Trim upper values in accordance with :math:`EQI1 \leq EQB`. >>> from hydpy.models.lland import * >>> parameterstep('1d') >>> eqi1.value = 2.0 >>> eqb(1.0) >>> eqb eqb(2.0) >>> eqb(2.0) >>> eqb eqb(2.0) >>> eqb(3.0) >>> eqb eqb(3.0) ] if compare[name[lower] is constant[None]] begin[:] variable[lower] assign[=] call[name[getattr], parameter[name[self].subpars.eqi1, constant[value], constant[None]]] call[call[name[super], parameter[]].trim, parameter[name[lower], name[upper]]]
keyword[def] identifier[trim] ( identifier[self] , identifier[lower] = keyword[None] , identifier[upper] = keyword[None] ): literal[string] keyword[if] identifier[lower] keyword[is] keyword[None] : identifier[lower] = identifier[getattr] ( identifier[self] . identifier[subpars] . identifier[eqi1] , literal[string] , keyword[None] ) identifier[super] (). identifier[trim] ( identifier[lower] , identifier[upper] )
def trim(self, lower=None, upper=None): """Trim upper values in accordance with :math:`EQI1 \\leq EQB`. >>> from hydpy.models.lland import * >>> parameterstep('1d') >>> eqi1.value = 2.0 >>> eqb(1.0) >>> eqb eqb(2.0) >>> eqb(2.0) >>> eqb eqb(2.0) >>> eqb(3.0) >>> eqb eqb(3.0) """ if lower is None: lower = getattr(self.subpars.eqi1, 'value', None) # depends on [control=['if'], data=['lower']] super().trim(lower, upper)
def get_states(self): """ Add states to variable of BIF Returns ------- dict: dict of type {variable: a list of states} Example ------- >>> from pgmpy.readwrite import BIFReader, BIFWriter >>> model = BIFReader('dog-problem.bif').get_model() >>> writer = BIFWriter(model) >>> writer.get_states() {'bowel-problem': ['bowel-problem_0', 'bowel-problem_1'], 'dog-out': ['dog-out_0', 'dog-out_1'], 'family-out': ['family-out_0', 'family-out_1'], 'hear-bark': ['hear-bark_0', 'hear-bark_1'], 'light-on': ['light-on_0', 'light-on_1']} """ variable_states = {} cpds = self.model.get_cpds() for cpd in cpds: variable = cpd.variable variable_states[variable] = [] for state in range(cpd.get_cardinality([variable])[variable]): variable_states[variable].append(str(variable) + '_' + str(state)) return variable_states
def function[get_states, parameter[self]]: constant[ Add states to variable of BIF Returns ------- dict: dict of type {variable: a list of states} Example ------- >>> from pgmpy.readwrite import BIFReader, BIFWriter >>> model = BIFReader('dog-problem.bif').get_model() >>> writer = BIFWriter(model) >>> writer.get_states() {'bowel-problem': ['bowel-problem_0', 'bowel-problem_1'], 'dog-out': ['dog-out_0', 'dog-out_1'], 'family-out': ['family-out_0', 'family-out_1'], 'hear-bark': ['hear-bark_0', 'hear-bark_1'], 'light-on': ['light-on_0', 'light-on_1']} ] variable[variable_states] assign[=] dictionary[[], []] variable[cpds] assign[=] call[name[self].model.get_cpds, parameter[]] for taget[name[cpd]] in starred[name[cpds]] begin[:] variable[variable] assign[=] name[cpd].variable call[name[variable_states]][name[variable]] assign[=] list[[]] for taget[name[state]] in starred[call[name[range], parameter[call[call[name[cpd].get_cardinality, parameter[list[[<ast.Name object at 0x7da20c6abb80>]]]]][name[variable]]]]] begin[:] call[call[name[variable_states]][name[variable]].append, parameter[binary_operation[binary_operation[call[name[str], parameter[name[variable]]] + constant[_]] + call[name[str], parameter[name[state]]]]]] return[name[variable_states]]
keyword[def] identifier[get_states] ( identifier[self] ): literal[string] identifier[variable_states] ={} identifier[cpds] = identifier[self] . identifier[model] . identifier[get_cpds] () keyword[for] identifier[cpd] keyword[in] identifier[cpds] : identifier[variable] = identifier[cpd] . identifier[variable] identifier[variable_states] [ identifier[variable] ]=[] keyword[for] identifier[state] keyword[in] identifier[range] ( identifier[cpd] . identifier[get_cardinality] ([ identifier[variable] ])[ identifier[variable] ]): identifier[variable_states] [ identifier[variable] ]. identifier[append] ( identifier[str] ( identifier[variable] )+ literal[string] + identifier[str] ( identifier[state] )) keyword[return] identifier[variable_states]
def get_states(self): """ Add states to variable of BIF Returns ------- dict: dict of type {variable: a list of states} Example ------- >>> from pgmpy.readwrite import BIFReader, BIFWriter >>> model = BIFReader('dog-problem.bif').get_model() >>> writer = BIFWriter(model) >>> writer.get_states() {'bowel-problem': ['bowel-problem_0', 'bowel-problem_1'], 'dog-out': ['dog-out_0', 'dog-out_1'], 'family-out': ['family-out_0', 'family-out_1'], 'hear-bark': ['hear-bark_0', 'hear-bark_1'], 'light-on': ['light-on_0', 'light-on_1']} """ variable_states = {} cpds = self.model.get_cpds() for cpd in cpds: variable = cpd.variable variable_states[variable] = [] for state in range(cpd.get_cardinality([variable])[variable]): variable_states[variable].append(str(variable) + '_' + str(state)) # depends on [control=['for'], data=['state']] # depends on [control=['for'], data=['cpd']] return variable_states
def evaluate( references, estimates, win=1*44100, hop=1*44100, mode='v4', padding=True ): """BSS_EVAL images evaluation using metrics module Parameters ---------- references : np.ndarray, shape=(nsrc, nsampl, nchan) array containing true reference sources estimates : np.ndarray, shape=(nsrc, nsampl, nchan) array containing estimated sources window : int, defaults to 44100 window size in samples hop : int hop size in samples, defaults to 44100 (no overlap) mode : str BSSEval version, default to `v4` Returns ------- SDR : np.ndarray, shape=(nsrc,) vector of Signal to Distortion Ratios (SDR) ISR : np.ndarray, shape=(nsrc,) vector of Source to Spatial Distortion Image (ISR) SIR : np.ndarray, shape=(nsrc,) vector of Source to Interference Ratios (SIR) SAR : np.ndarray, shape=(nsrc,) vector of Sources to Artifacts Ratios (SAR) """ estimates = np.array(estimates) references = np.array(references) if padding: references, estimates = pad_or_truncate(references, estimates) SDR, ISR, SIR, SAR, _ = metrics.bss_eval( references, estimates, compute_permutation=False, window=win, hop=hop, framewise_filters=(mode == "v3"), bsseval_sources_version=False ) return SDR, ISR, SIR, SAR
def function[evaluate, parameter[references, estimates, win, hop, mode, padding]]: constant[BSS_EVAL images evaluation using metrics module Parameters ---------- references : np.ndarray, shape=(nsrc, nsampl, nchan) array containing true reference sources estimates : np.ndarray, shape=(nsrc, nsampl, nchan) array containing estimated sources window : int, defaults to 44100 window size in samples hop : int hop size in samples, defaults to 44100 (no overlap) mode : str BSSEval version, default to `v4` Returns ------- SDR : np.ndarray, shape=(nsrc,) vector of Signal to Distortion Ratios (SDR) ISR : np.ndarray, shape=(nsrc,) vector of Source to Spatial Distortion Image (ISR) SIR : np.ndarray, shape=(nsrc,) vector of Source to Interference Ratios (SIR) SAR : np.ndarray, shape=(nsrc,) vector of Sources to Artifacts Ratios (SAR) ] variable[estimates] assign[=] call[name[np].array, parameter[name[estimates]]] variable[references] assign[=] call[name[np].array, parameter[name[references]]] if name[padding] begin[:] <ast.Tuple object at 0x7da1b039bfa0> assign[=] call[name[pad_or_truncate], parameter[name[references], name[estimates]]] <ast.Tuple object at 0x7da1b03993c0> assign[=] call[name[metrics].bss_eval, parameter[name[references], name[estimates]]] return[tuple[[<ast.Name object at 0x7da1b0398a30>, <ast.Name object at 0x7da1b0399510>, <ast.Name object at 0x7da1b0399870>, <ast.Name object at 0x7da1b039aa70>]]]
keyword[def] identifier[evaluate] ( identifier[references] , identifier[estimates] , identifier[win] = literal[int] * literal[int] , identifier[hop] = literal[int] * literal[int] , identifier[mode] = literal[string] , identifier[padding] = keyword[True] ): literal[string] identifier[estimates] = identifier[np] . identifier[array] ( identifier[estimates] ) identifier[references] = identifier[np] . identifier[array] ( identifier[references] ) keyword[if] identifier[padding] : identifier[references] , identifier[estimates] = identifier[pad_or_truncate] ( identifier[references] , identifier[estimates] ) identifier[SDR] , identifier[ISR] , identifier[SIR] , identifier[SAR] , identifier[_] = identifier[metrics] . identifier[bss_eval] ( identifier[references] , identifier[estimates] , identifier[compute_permutation] = keyword[False] , identifier[window] = identifier[win] , identifier[hop] = identifier[hop] , identifier[framewise_filters] =( identifier[mode] == literal[string] ), identifier[bsseval_sources_version] = keyword[False] ) keyword[return] identifier[SDR] , identifier[ISR] , identifier[SIR] , identifier[SAR]
def evaluate(references, estimates, win=1 * 44100, hop=1 * 44100, mode='v4', padding=True): """BSS_EVAL images evaluation using metrics module Parameters ---------- references : np.ndarray, shape=(nsrc, nsampl, nchan) array containing true reference sources estimates : np.ndarray, shape=(nsrc, nsampl, nchan) array containing estimated sources window : int, defaults to 44100 window size in samples hop : int hop size in samples, defaults to 44100 (no overlap) mode : str BSSEval version, default to `v4` Returns ------- SDR : np.ndarray, shape=(nsrc,) vector of Signal to Distortion Ratios (SDR) ISR : np.ndarray, shape=(nsrc,) vector of Source to Spatial Distortion Image (ISR) SIR : np.ndarray, shape=(nsrc,) vector of Source to Interference Ratios (SIR) SAR : np.ndarray, shape=(nsrc,) vector of Sources to Artifacts Ratios (SAR) """ estimates = np.array(estimates) references = np.array(references) if padding: (references, estimates) = pad_or_truncate(references, estimates) # depends on [control=['if'], data=[]] (SDR, ISR, SIR, SAR, _) = metrics.bss_eval(references, estimates, compute_permutation=False, window=win, hop=hop, framewise_filters=mode == 'v3', bsseval_sources_version=False) return (SDR, ISR, SIR, SAR)
def parse_extra(self, extra): """Parse extra request parameters to IIIFRequest object.""" if extra.startswith('/'): extra = extra[1:] r = IIIFRequest(identifier='dummy', api_version=self.api_version) r.parse_url(extra) if (r.info): raise IIIFStaticError("Attempt to specify Image Information in extras.") return(r)
def function[parse_extra, parameter[self, extra]]: constant[Parse extra request parameters to IIIFRequest object.] if call[name[extra].startswith, parameter[constant[/]]] begin[:] variable[extra] assign[=] call[name[extra]][<ast.Slice object at 0x7da18bc73a30>] variable[r] assign[=] call[name[IIIFRequest], parameter[]] call[name[r].parse_url, parameter[name[extra]]] if name[r].info begin[:] <ast.Raise object at 0x7da18bc73790> return[name[r]]
keyword[def] identifier[parse_extra] ( identifier[self] , identifier[extra] ): literal[string] keyword[if] identifier[extra] . identifier[startswith] ( literal[string] ): identifier[extra] = identifier[extra] [ literal[int] :] identifier[r] = identifier[IIIFRequest] ( identifier[identifier] = literal[string] , identifier[api_version] = identifier[self] . identifier[api_version] ) identifier[r] . identifier[parse_url] ( identifier[extra] ) keyword[if] ( identifier[r] . identifier[info] ): keyword[raise] identifier[IIIFStaticError] ( literal[string] ) keyword[return] ( identifier[r] )
def parse_extra(self, extra): """Parse extra request parameters to IIIFRequest object.""" if extra.startswith('/'): extra = extra[1:] # depends on [control=['if'], data=[]] r = IIIFRequest(identifier='dummy', api_version=self.api_version) r.parse_url(extra) if r.info: raise IIIFStaticError('Attempt to specify Image Information in extras.') # depends on [control=['if'], data=[]] return r
def create_namespaced_role(self, namespace, body, **kwargs): """ create a Role This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_namespaced_role(namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1Role body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :return: V1Role If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_namespaced_role_with_http_info(namespace, body, **kwargs) else: (data) = self.create_namespaced_role_with_http_info(namespace, body, **kwargs) return data
def function[create_namespaced_role, parameter[self, namespace, body]]: constant[ create a Role This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_namespaced_role(namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1Role body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :return: V1Role If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async_req]]] begin[:] return[call[name[self].create_namespaced_role_with_http_info, parameter[name[namespace], name[body]]]]
keyword[def] identifier[create_namespaced_role] ( identifier[self] , identifier[namespace] , identifier[body] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[create_namespaced_role_with_http_info] ( identifier[namespace] , identifier[body] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[create_namespaced_role_with_http_info] ( identifier[namespace] , identifier[body] ,** identifier[kwargs] ) keyword[return] identifier[data]
def create_namespaced_role(self, namespace, body, **kwargs): """ create a Role This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_namespaced_role(namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1Role body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :return: V1Role If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_namespaced_role_with_http_info(namespace, body, **kwargs) # depends on [control=['if'], data=[]] else: data = self.create_namespaced_role_with_http_info(namespace, body, **kwargs) return data
def ensure_exists(p, assume_dirs=False): """ Ensures a given path *p* exists. If a path to a file is passed in, then the path to the file will be checked. This can be overridden by passing a value of ``True`` to ``assume_dirs``, in which case the paths will be assumed to be to directories, not files. """ if path(p).ext and not assume_dirs: path(p).dirname().makedirs_p() else: path(p).makedirs_p() return p
def function[ensure_exists, parameter[p, assume_dirs]]: constant[ Ensures a given path *p* exists. If a path to a file is passed in, then the path to the file will be checked. This can be overridden by passing a value of ``True`` to ``assume_dirs``, in which case the paths will be assumed to be to directories, not files. ] if <ast.BoolOp object at 0x7da20e955750> begin[:] call[call[call[name[path], parameter[name[p]]].dirname, parameter[]].makedirs_p, parameter[]] return[name[p]]
keyword[def] identifier[ensure_exists] ( identifier[p] , identifier[assume_dirs] = keyword[False] ): literal[string] keyword[if] identifier[path] ( identifier[p] ). identifier[ext] keyword[and] keyword[not] identifier[assume_dirs] : identifier[path] ( identifier[p] ). identifier[dirname] (). identifier[makedirs_p] () keyword[else] : identifier[path] ( identifier[p] ). identifier[makedirs_p] () keyword[return] identifier[p]
def ensure_exists(p, assume_dirs=False): """ Ensures a given path *p* exists. If a path to a file is passed in, then the path to the file will be checked. This can be overridden by passing a value of ``True`` to ``assume_dirs``, in which case the paths will be assumed to be to directories, not files. """ if path(p).ext and (not assume_dirs): path(p).dirname().makedirs_p() # depends on [control=['if'], data=[]] else: path(p).makedirs_p() return p
def import_module(self, module=None, recursive=False, **params): """Create a child space from an module. Args: module: a module object or name of the module object. recursive: Not yet implemented. **params: arguments to pass to ``new_space`` Returns: The new child space created from the module. """ if module is None: if "module_" in params: warnings.warn( "Parameter 'module_' is deprecated. Use 'module' instead.") module = params.pop("module_") else: raise ValueError("no module specified") if "bases" in params: params["bases"] = get_impls(params["bases"]) space = ( self._impl.model.currentspace ) = self._impl.new_space_from_module( module, recursive=recursive, **params ) return get_interfaces(space)
def function[import_module, parameter[self, module, recursive]]: constant[Create a child space from an module. Args: module: a module object or name of the module object. recursive: Not yet implemented. **params: arguments to pass to ``new_space`` Returns: The new child space created from the module. ] if compare[name[module] is constant[None]] begin[:] if compare[constant[module_] in name[params]] begin[:] call[name[warnings].warn, parameter[constant[Parameter 'module_' is deprecated. Use 'module' instead.]]] variable[module] assign[=] call[name[params].pop, parameter[constant[module_]]] if compare[constant[bases] in name[params]] begin[:] call[name[params]][constant[bases]] assign[=] call[name[get_impls], parameter[call[name[params]][constant[bases]]]] variable[space] assign[=] call[name[self]._impl.new_space_from_module, parameter[name[module]]] return[call[name[get_interfaces], parameter[name[space]]]]
keyword[def] identifier[import_module] ( identifier[self] , identifier[module] = keyword[None] , identifier[recursive] = keyword[False] ,** identifier[params] ): literal[string] keyword[if] identifier[module] keyword[is] keyword[None] : keyword[if] literal[string] keyword[in] identifier[params] : identifier[warnings] . identifier[warn] ( literal[string] ) identifier[module] = identifier[params] . identifier[pop] ( literal[string] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[params] : identifier[params] [ literal[string] ]= identifier[get_impls] ( identifier[params] [ literal[string] ]) identifier[space] =( identifier[self] . identifier[_impl] . identifier[model] . identifier[currentspace] )= identifier[self] . identifier[_impl] . identifier[new_space_from_module] ( identifier[module] , identifier[recursive] = identifier[recursive] ,** identifier[params] ) keyword[return] identifier[get_interfaces] ( identifier[space] )
def import_module(self, module=None, recursive=False, **params): """Create a child space from an module. Args: module: a module object or name of the module object. recursive: Not yet implemented. **params: arguments to pass to ``new_space`` Returns: The new child space created from the module. """ if module is None: if 'module_' in params: warnings.warn("Parameter 'module_' is deprecated. Use 'module' instead.") module = params.pop('module_') # depends on [control=['if'], data=['params']] else: raise ValueError('no module specified') # depends on [control=['if'], data=['module']] if 'bases' in params: params['bases'] = get_impls(params['bases']) # depends on [control=['if'], data=['params']] space = self._impl.model.currentspace = self._impl.new_space_from_module(module, recursive=recursive, **params) return get_interfaces(space)
def list_entities_for_policy(policy_name, path_prefix=None, entity_filter=None, region=None, key=None, keyid=None, profile=None): ''' List entities that a policy is attached to. CLI Example: .. code-block:: bash salt myminion boto_iam.list_entities_for_policy mypolicy ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) retries = 30 params = {} for arg in ('path_prefix', 'entity_filter'): if locals()[arg] is not None: params[arg] = locals()[arg] policy_arn = _get_policy_arn(policy_name, region, key, keyid, profile) while retries: try: allret = { 'policy_groups': [], 'policy_users': [], 'policy_roles': [], } for ret in __utils__['boto.paged_call'](conn.list_entities_for_policy, policy_arn=policy_arn, **params): for k, v in six.iteritems(allret): v.extend(ret.get('list_entities_for_policy_response', {}).get('list_entities_for_policy_result', {}).get(k)) return allret except boto.exception.BotoServerError as e: if e.error_code == 'Throttling': log.debug("Throttled by AWS API, will retry in 5 seconds...") time.sleep(5) retries -= 1 continue log.error('Failed to list entities for IAM policy %s: %s', policy_name, e.message) return {} return {}
def function[list_entities_for_policy, parameter[policy_name, path_prefix, entity_filter, region, key, keyid, profile]]: constant[ List entities that a policy is attached to. CLI Example: .. code-block:: bash salt myminion boto_iam.list_entities_for_policy mypolicy ] variable[conn] assign[=] call[name[_get_conn], parameter[]] variable[retries] assign[=] constant[30] variable[params] assign[=] dictionary[[], []] for taget[name[arg]] in starred[tuple[[<ast.Constant object at 0x7da1b1fd5d20>, <ast.Constant object at 0x7da1b1fd5cf0>]]] begin[:] if compare[call[call[name[locals], parameter[]]][name[arg]] is_not constant[None]] begin[:] call[name[params]][name[arg]] assign[=] call[call[name[locals], parameter[]]][name[arg]] variable[policy_arn] assign[=] call[name[_get_policy_arn], parameter[name[policy_name], name[region], name[key], name[keyid], name[profile]]] while name[retries] begin[:] <ast.Try object at 0x7da1b1fd7220> return[dictionary[[], []]]
keyword[def] identifier[list_entities_for_policy] ( identifier[policy_name] , identifier[path_prefix] = keyword[None] , identifier[entity_filter] = keyword[None] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ): literal[string] identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) identifier[retries] = literal[int] identifier[params] ={} keyword[for] identifier[arg] keyword[in] ( literal[string] , literal[string] ): keyword[if] identifier[locals] ()[ identifier[arg] ] keyword[is] keyword[not] keyword[None] : identifier[params] [ identifier[arg] ]= identifier[locals] ()[ identifier[arg] ] identifier[policy_arn] = identifier[_get_policy_arn] ( identifier[policy_name] , identifier[region] , identifier[key] , identifier[keyid] , identifier[profile] ) keyword[while] identifier[retries] : keyword[try] : identifier[allret] ={ literal[string] :[], literal[string] :[], literal[string] :[], } keyword[for] identifier[ret] keyword[in] identifier[__utils__] [ literal[string] ]( identifier[conn] . identifier[list_entities_for_policy] , identifier[policy_arn] = identifier[policy_arn] ,** identifier[params] ): keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[allret] ): identifier[v] . identifier[extend] ( identifier[ret] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ,{}). identifier[get] ( identifier[k] )) keyword[return] identifier[allret] keyword[except] identifier[boto] . identifier[exception] . identifier[BotoServerError] keyword[as] identifier[e] : keyword[if] identifier[e] . identifier[error_code] == literal[string] : identifier[log] . identifier[debug] ( literal[string] ) identifier[time] . identifier[sleep] ( literal[int] ) identifier[retries] -= literal[int] keyword[continue] identifier[log] . identifier[error] ( literal[string] , identifier[policy_name] , identifier[e] . identifier[message] ) keyword[return] {} keyword[return] {}
def list_entities_for_policy(policy_name, path_prefix=None, entity_filter=None, region=None, key=None, keyid=None, profile=None): """ List entities that a policy is attached to. CLI Example: .. code-block:: bash salt myminion boto_iam.list_entities_for_policy mypolicy """ conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) retries = 30 params = {} for arg in ('path_prefix', 'entity_filter'): if locals()[arg] is not None: params[arg] = locals()[arg] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['arg']] policy_arn = _get_policy_arn(policy_name, region, key, keyid, profile) while retries: try: allret = {'policy_groups': [], 'policy_users': [], 'policy_roles': []} for ret in __utils__['boto.paged_call'](conn.list_entities_for_policy, policy_arn=policy_arn, **params): for (k, v) in six.iteritems(allret): v.extend(ret.get('list_entities_for_policy_response', {}).get('list_entities_for_policy_result', {}).get(k)) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['ret']] return allret # depends on [control=['try'], data=[]] except boto.exception.BotoServerError as e: if e.error_code == 'Throttling': log.debug('Throttled by AWS API, will retry in 5 seconds...') time.sleep(5) retries -= 1 continue # depends on [control=['if'], data=[]] log.error('Failed to list entities for IAM policy %s: %s', policy_name, e.message) return {} # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]] return {}
def starmap(function, iterables, *args, **kwargs): """ Equivalent to: >>> return ([function(x1,x2,x3,..., args[0], args[1],...) for >>> (x1,x2,x3...) in iterable]) :param pm_parallel: Force parallelization on/off :type pm_parallel: bool :param pm_chunksize: see :py:class:`multiprocessing.pool.Pool` :type pm_chunksize: int :param pm_pool: Pass an existing pool :type pm_pool: multiprocessing.pool.Pool :param pm_processes: Number of processes to use in the pool. See :py:class:`multiprocessing.pool.Pool` :type pm_processes: int :param pm_pbar: Show progress bar :type pm_pbar: bool """ return _map_or_starmap(function, iterables, args, kwargs, "starmap")
def function[starmap, parameter[function, iterables]]: constant[ Equivalent to: >>> return ([function(x1,x2,x3,..., args[0], args[1],...) for >>> (x1,x2,x3...) in iterable]) :param pm_parallel: Force parallelization on/off :type pm_parallel: bool :param pm_chunksize: see :py:class:`multiprocessing.pool.Pool` :type pm_chunksize: int :param pm_pool: Pass an existing pool :type pm_pool: multiprocessing.pool.Pool :param pm_processes: Number of processes to use in the pool. See :py:class:`multiprocessing.pool.Pool` :type pm_processes: int :param pm_pbar: Show progress bar :type pm_pbar: bool ] return[call[name[_map_or_starmap], parameter[name[function], name[iterables], name[args], name[kwargs], constant[starmap]]]]
keyword[def] identifier[starmap] ( identifier[function] , identifier[iterables] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[_map_or_starmap] ( identifier[function] , identifier[iterables] , identifier[args] , identifier[kwargs] , literal[string] )
def starmap(function, iterables, *args, **kwargs): """ Equivalent to: >>> return ([function(x1,x2,x3,..., args[0], args[1],...) for >>> (x1,x2,x3...) in iterable]) :param pm_parallel: Force parallelization on/off :type pm_parallel: bool :param pm_chunksize: see :py:class:`multiprocessing.pool.Pool` :type pm_chunksize: int :param pm_pool: Pass an existing pool :type pm_pool: multiprocessing.pool.Pool :param pm_processes: Number of processes to use in the pool. See :py:class:`multiprocessing.pool.Pool` :type pm_processes: int :param pm_pbar: Show progress bar :type pm_pbar: bool """ return _map_or_starmap(function, iterables, args, kwargs, 'starmap')
def overview(game_id): """Gets the overview information for the game with matching id.""" output = {} # get data overview = mlbgame.data.get_overview(game_id) # parse data overview_root = etree.parse(overview).getroot() try: output = add_raw_box_score_attributes(output, game_id) except ValueError: pass # get overview attributes for x in overview_root.attrib: output[x] = overview_root.attrib[x] # Get probable starter attributes if they exist home_pitcher_tree = overview_root.find('home_probable_pitcher') if home_pitcher_tree is not None: output.update(build_namespaced_attributes( 'home_probable_pitcher', home_pitcher_tree)) else: output.update(build_probable_starter_defaults('home')) away_pitcher_tree = overview_root.find('away_probable_pitcher') if away_pitcher_tree is not None: output.update(build_namespaced_attributes( 'away_probable_pitcher', away_pitcher_tree)) else: output.update(build_probable_starter_defaults('away')) return output
def function[overview, parameter[game_id]]: constant[Gets the overview information for the game with matching id.] variable[output] assign[=] dictionary[[], []] variable[overview] assign[=] call[name[mlbgame].data.get_overview, parameter[name[game_id]]] variable[overview_root] assign[=] call[call[name[etree].parse, parameter[name[overview]]].getroot, parameter[]] <ast.Try object at 0x7da18f721ae0> for taget[name[x]] in starred[name[overview_root].attrib] begin[:] call[name[output]][name[x]] assign[=] call[name[overview_root].attrib][name[x]] variable[home_pitcher_tree] assign[=] call[name[overview_root].find, parameter[constant[home_probable_pitcher]]] if compare[name[home_pitcher_tree] is_not constant[None]] begin[:] call[name[output].update, parameter[call[name[build_namespaced_attributes], parameter[constant[home_probable_pitcher], name[home_pitcher_tree]]]]] variable[away_pitcher_tree] assign[=] call[name[overview_root].find, parameter[constant[away_probable_pitcher]]] if compare[name[away_pitcher_tree] is_not constant[None]] begin[:] call[name[output].update, parameter[call[name[build_namespaced_attributes], parameter[constant[away_probable_pitcher], name[away_pitcher_tree]]]]] return[name[output]]
keyword[def] identifier[overview] ( identifier[game_id] ): literal[string] identifier[output] ={} identifier[overview] = identifier[mlbgame] . identifier[data] . identifier[get_overview] ( identifier[game_id] ) identifier[overview_root] = identifier[etree] . identifier[parse] ( identifier[overview] ). identifier[getroot] () keyword[try] : identifier[output] = identifier[add_raw_box_score_attributes] ( identifier[output] , identifier[game_id] ) keyword[except] identifier[ValueError] : keyword[pass] keyword[for] identifier[x] keyword[in] identifier[overview_root] . identifier[attrib] : identifier[output] [ identifier[x] ]= identifier[overview_root] . identifier[attrib] [ identifier[x] ] identifier[home_pitcher_tree] = identifier[overview_root] . identifier[find] ( literal[string] ) keyword[if] identifier[home_pitcher_tree] keyword[is] keyword[not] keyword[None] : identifier[output] . identifier[update] ( identifier[build_namespaced_attributes] ( literal[string] , identifier[home_pitcher_tree] )) keyword[else] : identifier[output] . identifier[update] ( identifier[build_probable_starter_defaults] ( literal[string] )) identifier[away_pitcher_tree] = identifier[overview_root] . identifier[find] ( literal[string] ) keyword[if] identifier[away_pitcher_tree] keyword[is] keyword[not] keyword[None] : identifier[output] . identifier[update] ( identifier[build_namespaced_attributes] ( literal[string] , identifier[away_pitcher_tree] )) keyword[else] : identifier[output] . identifier[update] ( identifier[build_probable_starter_defaults] ( literal[string] )) keyword[return] identifier[output]
def overview(game_id): """Gets the overview information for the game with matching id.""" output = {} # get data overview = mlbgame.data.get_overview(game_id) # parse data overview_root = etree.parse(overview).getroot() try: output = add_raw_box_score_attributes(output, game_id) # depends on [control=['try'], data=[]] except ValueError: pass # depends on [control=['except'], data=[]] # get overview attributes for x in overview_root.attrib: output[x] = overview_root.attrib[x] # depends on [control=['for'], data=['x']] # Get probable starter attributes if they exist home_pitcher_tree = overview_root.find('home_probable_pitcher') if home_pitcher_tree is not None: output.update(build_namespaced_attributes('home_probable_pitcher', home_pitcher_tree)) # depends on [control=['if'], data=['home_pitcher_tree']] else: output.update(build_probable_starter_defaults('home')) away_pitcher_tree = overview_root.find('away_probable_pitcher') if away_pitcher_tree is not None: output.update(build_namespaced_attributes('away_probable_pitcher', away_pitcher_tree)) # depends on [control=['if'], data=['away_pitcher_tree']] else: output.update(build_probable_starter_defaults('away')) return output
def _RawData(self, data): """Convert data to common format. Configuration options are normally grouped by the functional component which define it (e.g. Logging.path is the path parameter for the logging subsystem). However, sometimes it is more intuitive to write the config as a flat string (e.g. Logging.path). In this case we group all the flat strings in their respective sections and create the sections automatically. Args: data: A dict of raw data. Returns: a dict in common format. Any keys in the raw data which have a "." in them are separated into their own sections. This allows the config to be written explicitly in dot notation instead of using a section. """ if not isinstance(data, dict): return data result = collections.OrderedDict() for k, v in iteritems(data): result[k] = self._RawData(v) return result
def function[_RawData, parameter[self, data]]: constant[Convert data to common format. Configuration options are normally grouped by the functional component which define it (e.g. Logging.path is the path parameter for the logging subsystem). However, sometimes it is more intuitive to write the config as a flat string (e.g. Logging.path). In this case we group all the flat strings in their respective sections and create the sections automatically. Args: data: A dict of raw data. Returns: a dict in common format. Any keys in the raw data which have a "." in them are separated into their own sections. This allows the config to be written explicitly in dot notation instead of using a section. ] if <ast.UnaryOp object at 0x7da1b1c18e80> begin[:] return[name[data]] variable[result] assign[=] call[name[collections].OrderedDict, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b1c0c3d0>, <ast.Name object at 0x7da1b1c0ebc0>]]] in starred[call[name[iteritems], parameter[name[data]]]] begin[:] call[name[result]][name[k]] assign[=] call[name[self]._RawData, parameter[name[v]]] return[name[result]]
keyword[def] identifier[_RawData] ( identifier[self] , identifier[data] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[data] , identifier[dict] ): keyword[return] identifier[data] identifier[result] = identifier[collections] . identifier[OrderedDict] () keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[data] ): identifier[result] [ identifier[k] ]= identifier[self] . identifier[_RawData] ( identifier[v] ) keyword[return] identifier[result]
def _RawData(self, data): """Convert data to common format. Configuration options are normally grouped by the functional component which define it (e.g. Logging.path is the path parameter for the logging subsystem). However, sometimes it is more intuitive to write the config as a flat string (e.g. Logging.path). In this case we group all the flat strings in their respective sections and create the sections automatically. Args: data: A dict of raw data. Returns: a dict in common format. Any keys in the raw data which have a "." in them are separated into their own sections. This allows the config to be written explicitly in dot notation instead of using a section. """ if not isinstance(data, dict): return data # depends on [control=['if'], data=[]] result = collections.OrderedDict() for (k, v) in iteritems(data): result[k] = self._RawData(v) # depends on [control=['for'], data=[]] return result
def select_best_candidate(candidate_models): """ Select and return the best candidate model based on r-squared and qualification. Parameters ---------- candidate_models : :any:`list` of :any:`eemeter.CalTRACKUsagePerDayCandidateModel` Candidate models to select from. Returns ------- (best_candidate, warnings) : :any:`tuple` of :any:`eemeter.CalTRACKUsagePerDayCandidateModel` or :any:`None` and :any:`list` of `eemeter.EEMeterWarning` Return the candidate model with highest r-squared or None if none meet the requirements, and a list of warnings about this selection (or lack of selection). """ best_r_squared_adj = -np.inf best_candidate = None # CalTrack 3.4.3.3 for candidate in candidate_models: if ( candidate.status == "QUALIFIED" and candidate.r_squared_adj > best_r_squared_adj ): best_candidate = candidate best_r_squared_adj = candidate.r_squared_adj if best_candidate is None: warnings = [ EEMeterWarning( qualified_name="eemeter.caltrack_daily.select_best_candidate.no_candidates", description="No qualified model candidates available.", data={ "status_count:{}".format(status): count for status, count in Counter( [c.status for c in candidate_models] ).items() }, ) ] return None, warnings return best_candidate, []
def function[select_best_candidate, parameter[candidate_models]]: constant[ Select and return the best candidate model based on r-squared and qualification. Parameters ---------- candidate_models : :any:`list` of :any:`eemeter.CalTRACKUsagePerDayCandidateModel` Candidate models to select from. Returns ------- (best_candidate, warnings) : :any:`tuple` of :any:`eemeter.CalTRACKUsagePerDayCandidateModel` or :any:`None` and :any:`list` of `eemeter.EEMeterWarning` Return the candidate model with highest r-squared or None if none meet the requirements, and a list of warnings about this selection (or lack of selection). ] variable[best_r_squared_adj] assign[=] <ast.UnaryOp object at 0x7da20e9b3e80> variable[best_candidate] assign[=] constant[None] for taget[name[candidate]] in starred[name[candidate_models]] begin[:] if <ast.BoolOp object at 0x7da20c6c4dc0> begin[:] variable[best_candidate] assign[=] name[candidate] variable[best_r_squared_adj] assign[=] name[candidate].r_squared_adj if compare[name[best_candidate] is constant[None]] begin[:] variable[warnings] assign[=] list[[<ast.Call object at 0x7da20c6c6e90>]] return[tuple[[<ast.Constant object at 0x7da20c6c5480>, <ast.Name object at 0x7da20c6c5f60>]]] return[tuple[[<ast.Name object at 0x7da20c6c6c20>, <ast.List object at 0x7da20c6c4670>]]]
keyword[def] identifier[select_best_candidate] ( identifier[candidate_models] ): literal[string] identifier[best_r_squared_adj] =- identifier[np] . identifier[inf] identifier[best_candidate] = keyword[None] keyword[for] identifier[candidate] keyword[in] identifier[candidate_models] : keyword[if] ( identifier[candidate] . identifier[status] == literal[string] keyword[and] identifier[candidate] . identifier[r_squared_adj] > identifier[best_r_squared_adj] ): identifier[best_candidate] = identifier[candidate] identifier[best_r_squared_adj] = identifier[candidate] . identifier[r_squared_adj] keyword[if] identifier[best_candidate] keyword[is] keyword[None] : identifier[warnings] =[ identifier[EEMeterWarning] ( identifier[qualified_name] = literal[string] , identifier[description] = literal[string] , identifier[data] ={ literal[string] . identifier[format] ( identifier[status] ): identifier[count] keyword[for] identifier[status] , identifier[count] keyword[in] identifier[Counter] ( [ identifier[c] . identifier[status] keyword[for] identifier[c] keyword[in] identifier[candidate_models] ] ). identifier[items] () }, ) ] keyword[return] keyword[None] , identifier[warnings] keyword[return] identifier[best_candidate] ,[]
def select_best_candidate(candidate_models): """ Select and return the best candidate model based on r-squared and qualification. Parameters ---------- candidate_models : :any:`list` of :any:`eemeter.CalTRACKUsagePerDayCandidateModel` Candidate models to select from. Returns ------- (best_candidate, warnings) : :any:`tuple` of :any:`eemeter.CalTRACKUsagePerDayCandidateModel` or :any:`None` and :any:`list` of `eemeter.EEMeterWarning` Return the candidate model with highest r-squared or None if none meet the requirements, and a list of warnings about this selection (or lack of selection). """ best_r_squared_adj = -np.inf best_candidate = None # CalTrack 3.4.3.3 for candidate in candidate_models: if candidate.status == 'QUALIFIED' and candidate.r_squared_adj > best_r_squared_adj: best_candidate = candidate best_r_squared_adj = candidate.r_squared_adj # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['candidate']] if best_candidate is None: warnings = [EEMeterWarning(qualified_name='eemeter.caltrack_daily.select_best_candidate.no_candidates', description='No qualified model candidates available.', data={'status_count:{}'.format(status): count for (status, count) in Counter([c.status for c in candidate_models]).items()})] return (None, warnings) # depends on [control=['if'], data=[]] return (best_candidate, [])
def update_conf(self): """Update configuration values from database. This method should be called when there is an update notification. """ parsed = self.parse_conf() if not parsed: return None # Update app config self.app.config.update(parsed)
def function[update_conf, parameter[self]]: constant[Update configuration values from database. This method should be called when there is an update notification. ] variable[parsed] assign[=] call[name[self].parse_conf, parameter[]] if <ast.UnaryOp object at 0x7da18f810f40> begin[:] return[constant[None]] call[name[self].app.config.update, parameter[name[parsed]]]
keyword[def] identifier[update_conf] ( identifier[self] ): literal[string] identifier[parsed] = identifier[self] . identifier[parse_conf] () keyword[if] keyword[not] identifier[parsed] : keyword[return] keyword[None] identifier[self] . identifier[app] . identifier[config] . identifier[update] ( identifier[parsed] )
def update_conf(self): """Update configuration values from database. This method should be called when there is an update notification. """ parsed = self.parse_conf() if not parsed: return None # depends on [control=['if'], data=[]] # Update app config self.app.config.update(parsed)
def get_start_and_end_time(self, ref=None): """Specific function to get start time and end time for MonthWeekDayDaterange :param ref: time in seconds :type ref: int | None :return: tuple with start and end time :rtype: tuple """ now = time.localtime(ref) if self.syear == 0: self.syear = now.tm_year day_start = find_day_by_weekday_offset(self.syear, self.smon, self.swday, self.swday_offset) start_time = get_start_of_day(self.syear, self.smon, day_start) if self.eyear == 0: self.eyear = now.tm_year day_end = find_day_by_weekday_offset(self.eyear, self.emon, self.ewday, self.ewday_offset) end_time = get_end_of_day(self.eyear, self.emon, day_end) now_epoch = time.mktime(now) if start_time > end_time: # the period is between years if now_epoch > end_time: # check for next year day_end = find_day_by_weekday_offset(self.eyear + 1, self.emon, self.ewday, self.ewday_offset) end_time = get_end_of_day(self.eyear + 1, self.emon, day_end) else: # it s just that the start was the last year day_start = find_day_by_weekday_offset(self.syear - 1, self.smon, self.swday, self.swday_offset) start_time = get_start_of_day(self.syear - 1, self.smon, day_start) else: if now_epoch > end_time: # just have to check for next year if necessary day_start = find_day_by_weekday_offset(self.syear + 1, self.smon, self.swday, self.swday_offset) start_time = get_start_of_day(self.syear + 1, self.smon, day_start) day_end = find_day_by_weekday_offset(self.eyear + 1, self.emon, self.ewday, self.ewday_offset) end_time = get_end_of_day(self.eyear + 1, self.emon, day_end) return (start_time, end_time)
def function[get_start_and_end_time, parameter[self, ref]]: constant[Specific function to get start time and end time for MonthWeekDayDaterange :param ref: time in seconds :type ref: int | None :return: tuple with start and end time :rtype: tuple ] variable[now] assign[=] call[name[time].localtime, parameter[name[ref]]] if compare[name[self].syear equal[==] constant[0]] begin[:] name[self].syear assign[=] name[now].tm_year variable[day_start] assign[=] call[name[find_day_by_weekday_offset], parameter[name[self].syear, name[self].smon, name[self].swday, name[self].swday_offset]] variable[start_time] assign[=] call[name[get_start_of_day], parameter[name[self].syear, name[self].smon, name[day_start]]] if compare[name[self].eyear equal[==] constant[0]] begin[:] name[self].eyear assign[=] name[now].tm_year variable[day_end] assign[=] call[name[find_day_by_weekday_offset], parameter[name[self].eyear, name[self].emon, name[self].ewday, name[self].ewday_offset]] variable[end_time] assign[=] call[name[get_end_of_day], parameter[name[self].eyear, name[self].emon, name[day_end]]] variable[now_epoch] assign[=] call[name[time].mktime, parameter[name[now]]] if compare[name[start_time] greater[>] name[end_time]] begin[:] if compare[name[now_epoch] greater[>] name[end_time]] begin[:] variable[day_end] assign[=] call[name[find_day_by_weekday_offset], parameter[binary_operation[name[self].eyear + constant[1]], name[self].emon, name[self].ewday, name[self].ewday_offset]] variable[end_time] assign[=] call[name[get_end_of_day], parameter[binary_operation[name[self].eyear + constant[1]], name[self].emon, name[day_end]]] return[tuple[[<ast.Name object at 0x7da2054a5120>, <ast.Name object at 0x7da2054a7b20>]]]
keyword[def] identifier[get_start_and_end_time] ( identifier[self] , identifier[ref] = keyword[None] ): literal[string] identifier[now] = identifier[time] . identifier[localtime] ( identifier[ref] ) keyword[if] identifier[self] . identifier[syear] == literal[int] : identifier[self] . identifier[syear] = identifier[now] . identifier[tm_year] identifier[day_start] = identifier[find_day_by_weekday_offset] ( identifier[self] . identifier[syear] , identifier[self] . identifier[smon] , identifier[self] . identifier[swday] , identifier[self] . identifier[swday_offset] ) identifier[start_time] = identifier[get_start_of_day] ( identifier[self] . identifier[syear] , identifier[self] . identifier[smon] , identifier[day_start] ) keyword[if] identifier[self] . identifier[eyear] == literal[int] : identifier[self] . identifier[eyear] = identifier[now] . identifier[tm_year] identifier[day_end] = identifier[find_day_by_weekday_offset] ( identifier[self] . identifier[eyear] , identifier[self] . identifier[emon] , identifier[self] . identifier[ewday] , identifier[self] . identifier[ewday_offset] ) identifier[end_time] = identifier[get_end_of_day] ( identifier[self] . identifier[eyear] , identifier[self] . identifier[emon] , identifier[day_end] ) identifier[now_epoch] = identifier[time] . identifier[mktime] ( identifier[now] ) keyword[if] identifier[start_time] > identifier[end_time] : keyword[if] identifier[now_epoch] > identifier[end_time] : identifier[day_end] = identifier[find_day_by_weekday_offset] ( identifier[self] . identifier[eyear] + literal[int] , identifier[self] . identifier[emon] , identifier[self] . identifier[ewday] , identifier[self] . identifier[ewday_offset] ) identifier[end_time] = identifier[get_end_of_day] ( identifier[self] . identifier[eyear] + literal[int] , identifier[self] . identifier[emon] , identifier[day_end] ) keyword[else] : identifier[day_start] = identifier[find_day_by_weekday_offset] ( identifier[self] . identifier[syear] - literal[int] , identifier[self] . identifier[smon] , identifier[self] . identifier[swday] , identifier[self] . identifier[swday_offset] ) identifier[start_time] = identifier[get_start_of_day] ( identifier[self] . identifier[syear] - literal[int] , identifier[self] . identifier[smon] , identifier[day_start] ) keyword[else] : keyword[if] identifier[now_epoch] > identifier[end_time] : identifier[day_start] = identifier[find_day_by_weekday_offset] ( identifier[self] . identifier[syear] + literal[int] , identifier[self] . identifier[smon] , identifier[self] . identifier[swday] , identifier[self] . identifier[swday_offset] ) identifier[start_time] = identifier[get_start_of_day] ( identifier[self] . identifier[syear] + literal[int] , identifier[self] . identifier[smon] , identifier[day_start] ) identifier[day_end] = identifier[find_day_by_weekday_offset] ( identifier[self] . identifier[eyear] + literal[int] , identifier[self] . identifier[emon] , identifier[self] . identifier[ewday] , identifier[self] . identifier[ewday_offset] ) identifier[end_time] = identifier[get_end_of_day] ( identifier[self] . identifier[eyear] + literal[int] , identifier[self] . identifier[emon] , identifier[day_end] ) keyword[return] ( identifier[start_time] , identifier[end_time] )
def get_start_and_end_time(self, ref=None): """Specific function to get start time and end time for MonthWeekDayDaterange :param ref: time in seconds :type ref: int | None :return: tuple with start and end time :rtype: tuple """ now = time.localtime(ref) if self.syear == 0: self.syear = now.tm_year # depends on [control=['if'], data=[]] day_start = find_day_by_weekday_offset(self.syear, self.smon, self.swday, self.swday_offset) start_time = get_start_of_day(self.syear, self.smon, day_start) if self.eyear == 0: self.eyear = now.tm_year # depends on [control=['if'], data=[]] day_end = find_day_by_weekday_offset(self.eyear, self.emon, self.ewday, self.ewday_offset) end_time = get_end_of_day(self.eyear, self.emon, day_end) now_epoch = time.mktime(now) if start_time > end_time: # the period is between years if now_epoch > end_time: # check for next year day_end = find_day_by_weekday_offset(self.eyear + 1, self.emon, self.ewday, self.ewday_offset) end_time = get_end_of_day(self.eyear + 1, self.emon, day_end) # depends on [control=['if'], data=['end_time']] else: # it s just that the start was the last year day_start = find_day_by_weekday_offset(self.syear - 1, self.smon, self.swday, self.swday_offset) start_time = get_start_of_day(self.syear - 1, self.smon, day_start) # depends on [control=['if'], data=['start_time', 'end_time']] elif now_epoch > end_time: # just have to check for next year if necessary day_start = find_day_by_weekday_offset(self.syear + 1, self.smon, self.swday, self.swday_offset) start_time = get_start_of_day(self.syear + 1, self.smon, day_start) day_end = find_day_by_weekday_offset(self.eyear + 1, self.emon, self.ewday, self.ewday_offset) end_time = get_end_of_day(self.eyear + 1, self.emon, day_end) # depends on [control=['if'], data=['end_time']] return (start_time, end_time)
def getEffort(self, edgeID, time): """getEffort(string, double) -> double Returns the effort value used for (re-)routing which is valid on the edge at the given time. """ self._connection._beginMessage(tc.CMD_GET_EDGE_VARIABLE, tc.VAR_EDGE_EFFORT, edgeID, 1 + 4) self._connection._string += struct.pack( "!Bi", tc.TYPE_INTEGER, time) return self._connection._checkResult(tc.CMD_GET_EDGE_VARIABLE, tc.VAR_EDGE_EFFORT, edgeID).readDouble()
def function[getEffort, parameter[self, edgeID, time]]: constant[getEffort(string, double) -> double Returns the effort value used for (re-)routing which is valid on the edge at the given time. ] call[name[self]._connection._beginMessage, parameter[name[tc].CMD_GET_EDGE_VARIABLE, name[tc].VAR_EDGE_EFFORT, name[edgeID], binary_operation[constant[1] + constant[4]]]] <ast.AugAssign object at 0x7da1b09bd840> return[call[call[name[self]._connection._checkResult, parameter[name[tc].CMD_GET_EDGE_VARIABLE, name[tc].VAR_EDGE_EFFORT, name[edgeID]]].readDouble, parameter[]]]
keyword[def] identifier[getEffort] ( identifier[self] , identifier[edgeID] , identifier[time] ): literal[string] identifier[self] . identifier[_connection] . identifier[_beginMessage] ( identifier[tc] . identifier[CMD_GET_EDGE_VARIABLE] , identifier[tc] . identifier[VAR_EDGE_EFFORT] , identifier[edgeID] , literal[int] + literal[int] ) identifier[self] . identifier[_connection] . identifier[_string] += identifier[struct] . identifier[pack] ( literal[string] , identifier[tc] . identifier[TYPE_INTEGER] , identifier[time] ) keyword[return] identifier[self] . identifier[_connection] . identifier[_checkResult] ( identifier[tc] . identifier[CMD_GET_EDGE_VARIABLE] , identifier[tc] . identifier[VAR_EDGE_EFFORT] , identifier[edgeID] ). identifier[readDouble] ()
def getEffort(self, edgeID, time): """getEffort(string, double) -> double Returns the effort value used for (re-)routing which is valid on the edge at the given time. """ self._connection._beginMessage(tc.CMD_GET_EDGE_VARIABLE, tc.VAR_EDGE_EFFORT, edgeID, 1 + 4) self._connection._string += struct.pack('!Bi', tc.TYPE_INTEGER, time) return self._connection._checkResult(tc.CMD_GET_EDGE_VARIABLE, tc.VAR_EDGE_EFFORT, edgeID).readDouble()
def datetime(self, start: int = 2000, end: int = 2035, timezone: Optional[str] = None) -> DateTime: """Generate random datetime. :param start: Minimum value of year. :param end: Maximum value of year. :param timezone: Set custom timezone (pytz required). :return: Datetime """ datetime_obj = datetime.combine( date=self.date(start, end), time=self.time(), ) if timezone: if not pytz: raise ImportError('Timezones are supported only with pytz') tz = pytz.timezone(timezone) datetime_obj = tz.localize(datetime_obj) return datetime_obj
def function[datetime, parameter[self, start, end, timezone]]: constant[Generate random datetime. :param start: Minimum value of year. :param end: Maximum value of year. :param timezone: Set custom timezone (pytz required). :return: Datetime ] variable[datetime_obj] assign[=] call[name[datetime].combine, parameter[]] if name[timezone] begin[:] if <ast.UnaryOp object at 0x7da2054a7910> begin[:] <ast.Raise object at 0x7da2054a4130> variable[tz] assign[=] call[name[pytz].timezone, parameter[name[timezone]]] variable[datetime_obj] assign[=] call[name[tz].localize, parameter[name[datetime_obj]]] return[name[datetime_obj]]
keyword[def] identifier[datetime] ( identifier[self] , identifier[start] : identifier[int] = literal[int] , identifier[end] : identifier[int] = literal[int] , identifier[timezone] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> identifier[DateTime] : literal[string] identifier[datetime_obj] = identifier[datetime] . identifier[combine] ( identifier[date] = identifier[self] . identifier[date] ( identifier[start] , identifier[end] ), identifier[time] = identifier[self] . identifier[time] (), ) keyword[if] identifier[timezone] : keyword[if] keyword[not] identifier[pytz] : keyword[raise] identifier[ImportError] ( literal[string] ) identifier[tz] = identifier[pytz] . identifier[timezone] ( identifier[timezone] ) identifier[datetime_obj] = identifier[tz] . identifier[localize] ( identifier[datetime_obj] ) keyword[return] identifier[datetime_obj]
def datetime(self, start: int=2000, end: int=2035, timezone: Optional[str]=None) -> DateTime: """Generate random datetime. :param start: Minimum value of year. :param end: Maximum value of year. :param timezone: Set custom timezone (pytz required). :return: Datetime """ datetime_obj = datetime.combine(date=self.date(start, end), time=self.time()) if timezone: if not pytz: raise ImportError('Timezones are supported only with pytz') # depends on [control=['if'], data=[]] tz = pytz.timezone(timezone) datetime_obj = tz.localize(datetime_obj) # depends on [control=['if'], data=[]] return datetime_obj
def _set_from_whole_string(rop, s, base, rnd): """ Helper function for set_str2: accept a string, set rop, and return the appropriate ternary value. Raise ValueError if ``s`` doesn't represent a valid string in the given base. """ s = s.strip() ternary, endindex = mpfr.mpfr_strtofr(rop, s, base, rnd) if len(s) != endindex: raise ValueError("not a valid numeric string") return ternary
def function[_set_from_whole_string, parameter[rop, s, base, rnd]]: constant[ Helper function for set_str2: accept a string, set rop, and return the appropriate ternary value. Raise ValueError if ``s`` doesn't represent a valid string in the given base. ] variable[s] assign[=] call[name[s].strip, parameter[]] <ast.Tuple object at 0x7da1b26f3010> assign[=] call[name[mpfr].mpfr_strtofr, parameter[name[rop], name[s], name[base], name[rnd]]] if compare[call[name[len], parameter[name[s]]] not_equal[!=] name[endindex]] begin[:] <ast.Raise object at 0x7da1b26f3a00> return[name[ternary]]
keyword[def] identifier[_set_from_whole_string] ( identifier[rop] , identifier[s] , identifier[base] , identifier[rnd] ): literal[string] identifier[s] = identifier[s] . identifier[strip] () identifier[ternary] , identifier[endindex] = identifier[mpfr] . identifier[mpfr_strtofr] ( identifier[rop] , identifier[s] , identifier[base] , identifier[rnd] ) keyword[if] identifier[len] ( identifier[s] )!= identifier[endindex] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[ternary]
def _set_from_whole_string(rop, s, base, rnd): """ Helper function for set_str2: accept a string, set rop, and return the appropriate ternary value. Raise ValueError if ``s`` doesn't represent a valid string in the given base. """ s = s.strip() (ternary, endindex) = mpfr.mpfr_strtofr(rop, s, base, rnd) if len(s) != endindex: raise ValueError('not a valid numeric string') # depends on [control=['if'], data=[]] return ternary
def get_files (self): """Get list of filenames in directory. Subdirectories have an ending slash.""" files = [] def add_entry (line): """Parse list line and add the entry it points to to the file list.""" log.debug(LOG_CHECK, "Directory entry %r", line) from ..ftpparse import ftpparse fpo = ftpparse(line) if fpo is not None and fpo["name"]: name = fpo["name"] if fpo["trycwd"]: name += "/" if fpo["trycwd"] or fpo["tryretr"]: files.append(name) self.url_connection.dir(add_entry) return files
def function[get_files, parameter[self]]: constant[Get list of filenames in directory. Subdirectories have an ending slash.] variable[files] assign[=] list[[]] def function[add_entry, parameter[line]]: constant[Parse list line and add the entry it points to to the file list.] call[name[log].debug, parameter[name[LOG_CHECK], constant[Directory entry %r], name[line]]] from relative_module[ftpparse] import module[ftpparse] variable[fpo] assign[=] call[name[ftpparse], parameter[name[line]]] if <ast.BoolOp object at 0x7da20e954130> begin[:] variable[name] assign[=] call[name[fpo]][constant[name]] if call[name[fpo]][constant[trycwd]] begin[:] <ast.AugAssign object at 0x7da20e957d60> if <ast.BoolOp object at 0x7da20e957f10> begin[:] call[name[files].append, parameter[name[name]]] call[name[self].url_connection.dir, parameter[name[add_entry]]] return[name[files]]
keyword[def] identifier[get_files] ( identifier[self] ): literal[string] identifier[files] =[] keyword[def] identifier[add_entry] ( identifier[line] ): literal[string] identifier[log] . identifier[debug] ( identifier[LOG_CHECK] , literal[string] , identifier[line] ) keyword[from] .. identifier[ftpparse] keyword[import] identifier[ftpparse] identifier[fpo] = identifier[ftpparse] ( identifier[line] ) keyword[if] identifier[fpo] keyword[is] keyword[not] keyword[None] keyword[and] identifier[fpo] [ literal[string] ]: identifier[name] = identifier[fpo] [ literal[string] ] keyword[if] identifier[fpo] [ literal[string] ]: identifier[name] += literal[string] keyword[if] identifier[fpo] [ literal[string] ] keyword[or] identifier[fpo] [ literal[string] ]: identifier[files] . identifier[append] ( identifier[name] ) identifier[self] . identifier[url_connection] . identifier[dir] ( identifier[add_entry] ) keyword[return] identifier[files]
def get_files(self): """Get list of filenames in directory. Subdirectories have an ending slash.""" files = [] def add_entry(line): """Parse list line and add the entry it points to to the file list.""" log.debug(LOG_CHECK, 'Directory entry %r', line) from ..ftpparse import ftpparse fpo = ftpparse(line) if fpo is not None and fpo['name']: name = fpo['name'] if fpo['trycwd']: name += '/' # depends on [control=['if'], data=[]] if fpo['trycwd'] or fpo['tryretr']: files.append(name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] self.url_connection.dir(add_entry) return files
def __patch_attr_methods(tango_device_klass, attribute): """ Checks if the read and write methods have the correct signature. If a read/write method doesn't have a parameter (the traditional Attribute), then the method is wrapped into another method to make this work. :param tango_device_klass: a DeviceImpl class :type tango_device_klass: class :param attribute: the attribute data information :type attribute: AttrData """ if attribute.attr_write in (AttrWriteType.READ, AttrWriteType.READ_WRITE): __patch_read_method(tango_device_klass, attribute) if attribute.attr_write in (AttrWriteType.WRITE, AttrWriteType.READ_WRITE): __patch_write_method(tango_device_klass, attribute)
def function[__patch_attr_methods, parameter[tango_device_klass, attribute]]: constant[ Checks if the read and write methods have the correct signature. If a read/write method doesn't have a parameter (the traditional Attribute), then the method is wrapped into another method to make this work. :param tango_device_klass: a DeviceImpl class :type tango_device_klass: class :param attribute: the attribute data information :type attribute: AttrData ] if compare[name[attribute].attr_write in tuple[[<ast.Attribute object at 0x7da18dc07130>, <ast.Attribute object at 0x7da18dc06b90>]]] begin[:] call[name[__patch_read_method], parameter[name[tango_device_klass], name[attribute]]] if compare[name[attribute].attr_write in tuple[[<ast.Attribute object at 0x7da18dc05210>, <ast.Attribute object at 0x7da18dc05030>]]] begin[:] call[name[__patch_write_method], parameter[name[tango_device_klass], name[attribute]]]
keyword[def] identifier[__patch_attr_methods] ( identifier[tango_device_klass] , identifier[attribute] ): literal[string] keyword[if] identifier[attribute] . identifier[attr_write] keyword[in] ( identifier[AttrWriteType] . identifier[READ] , identifier[AttrWriteType] . identifier[READ_WRITE] ): identifier[__patch_read_method] ( identifier[tango_device_klass] , identifier[attribute] ) keyword[if] identifier[attribute] . identifier[attr_write] keyword[in] ( identifier[AttrWriteType] . identifier[WRITE] , identifier[AttrWriteType] . identifier[READ_WRITE] ): identifier[__patch_write_method] ( identifier[tango_device_klass] , identifier[attribute] )
def __patch_attr_methods(tango_device_klass, attribute): """ Checks if the read and write methods have the correct signature. If a read/write method doesn't have a parameter (the traditional Attribute), then the method is wrapped into another method to make this work. :param tango_device_klass: a DeviceImpl class :type tango_device_klass: class :param attribute: the attribute data information :type attribute: AttrData """ if attribute.attr_write in (AttrWriteType.READ, AttrWriteType.READ_WRITE): __patch_read_method(tango_device_klass, attribute) # depends on [control=['if'], data=[]] if attribute.attr_write in (AttrWriteType.WRITE, AttrWriteType.READ_WRITE): __patch_write_method(tango_device_klass, attribute) # depends on [control=['if'], data=[]]
def classify(cls, o): """Break an Identity name into parts, or describe the type of other forms. Break a name or object number into parts and classify them. Returns a named tuple that indicates which parts of input string are name components, object number and version number. Does not completely parse the name components. Also can handle Name, Identity and ObjectNumbers :param o: Input object to split """ # from collections import namedtuple s = str(o) if o is None: raise ValueError("Input cannot be None") class IdentityParts(object): on = None name = None isa = None name = None vname = None sname = None name_parts = None version = None cache_key = None # namedtuple('IdentityParts', ['isa', 'name', 'name_parts','on','version', 'vspec']) ip = IdentityParts() if isinstance(o, (DatasetNumber, PartitionNumber)): ip.on = o ip.name = None ip.isa = type(ip.on) ip.name_parts = None elif isinstance(o, Name): ip.on = None ip.isa = type(o) ip.name = str(o) ip.name_parts = ip.name.split(Name.NAME_PART_SEP) elif '/' in s: # A cache key ip.cache_key = s.strip() ip.isa = str elif cls.OBJECT_NUMBER_SEP in s: # Must be a fqname ip.name, on_s = s.strip().split(cls.OBJECT_NUMBER_SEP) ip.on = ObjectNumber.parse(on_s) ip.name_parts = ip.name.split(Name.NAME_PART_SEP) ip.isa = type(ip.on) elif Name.NAME_PART_SEP in s: # Must be an sname or vname ip.name = s ip.on = None ip.name_parts = ip.name.split(Name.NAME_PART_SEP) ip.isa = Name else: # Probably an Object Number in string form ip.name = None ip.name_parts = None ip.on = ObjectNumber.parse(s.strip()) ip.isa = type(ip.on) if ip.name_parts: last = ip.name_parts[-1] try: ip.version = sv.Version(last) ip.vname = ip.name except ValueError: try: ip.version = sv.Spec(last) ip.vname = None # Specs aren't vnames you can query except ValueError: pass if ip.version: ip.name_parts.pop() ip.sname = Name.NAME_PART_SEP.join(ip.name_parts) else: ip.sname = ip.name return ip
def function[classify, parameter[cls, o]]: constant[Break an Identity name into parts, or describe the type of other forms. Break a name or object number into parts and classify them. Returns a named tuple that indicates which parts of input string are name components, object number and version number. Does not completely parse the name components. Also can handle Name, Identity and ObjectNumbers :param o: Input object to split ] variable[s] assign[=] call[name[str], parameter[name[o]]] if compare[name[o] is constant[None]] begin[:] <ast.Raise object at 0x7da2041d8fa0> class class[IdentityParts, parameter[]] begin[:] variable[on] assign[=] constant[None] variable[name] assign[=] constant[None] variable[isa] assign[=] constant[None] variable[name] assign[=] constant[None] variable[vname] assign[=] constant[None] variable[sname] assign[=] constant[None] variable[name_parts] assign[=] constant[None] variable[version] assign[=] constant[None] variable[cache_key] assign[=] constant[None] variable[ip] assign[=] call[name[IdentityParts], parameter[]] if call[name[isinstance], parameter[name[o], tuple[[<ast.Name object at 0x7da2041d9450>, <ast.Name object at 0x7da2041dbc40>]]]] begin[:] name[ip].on assign[=] name[o] name[ip].name assign[=] constant[None] name[ip].isa assign[=] call[name[type], parameter[name[ip].on]] name[ip].name_parts assign[=] constant[None] if name[ip].name_parts begin[:] variable[last] assign[=] call[name[ip].name_parts][<ast.UnaryOp object at 0x7da204347e80>] <ast.Try object at 0x7da204345d20> if name[ip].version begin[:] call[name[ip].name_parts.pop, parameter[]] name[ip].sname assign[=] call[name[Name].NAME_PART_SEP.join, parameter[name[ip].name_parts]] return[name[ip]]
keyword[def] identifier[classify] ( identifier[cls] , identifier[o] ): literal[string] identifier[s] = identifier[str] ( identifier[o] ) keyword[if] identifier[o] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[class] identifier[IdentityParts] ( identifier[object] ): identifier[on] = keyword[None] identifier[name] = keyword[None] identifier[isa] = keyword[None] identifier[name] = keyword[None] identifier[vname] = keyword[None] identifier[sname] = keyword[None] identifier[name_parts] = keyword[None] identifier[version] = keyword[None] identifier[cache_key] = keyword[None] identifier[ip] = identifier[IdentityParts] () keyword[if] identifier[isinstance] ( identifier[o] ,( identifier[DatasetNumber] , identifier[PartitionNumber] )): identifier[ip] . identifier[on] = identifier[o] identifier[ip] . identifier[name] = keyword[None] identifier[ip] . identifier[isa] = identifier[type] ( identifier[ip] . identifier[on] ) identifier[ip] . identifier[name_parts] = keyword[None] keyword[elif] identifier[isinstance] ( identifier[o] , identifier[Name] ): identifier[ip] . identifier[on] = keyword[None] identifier[ip] . identifier[isa] = identifier[type] ( identifier[o] ) identifier[ip] . identifier[name] = identifier[str] ( identifier[o] ) identifier[ip] . identifier[name_parts] = identifier[ip] . identifier[name] . identifier[split] ( identifier[Name] . identifier[NAME_PART_SEP] ) keyword[elif] literal[string] keyword[in] identifier[s] : identifier[ip] . identifier[cache_key] = identifier[s] . identifier[strip] () identifier[ip] . identifier[isa] = identifier[str] keyword[elif] identifier[cls] . identifier[OBJECT_NUMBER_SEP] keyword[in] identifier[s] : identifier[ip] . identifier[name] , identifier[on_s] = identifier[s] . identifier[strip] (). identifier[split] ( identifier[cls] . identifier[OBJECT_NUMBER_SEP] ) identifier[ip] . identifier[on] = identifier[ObjectNumber] . identifier[parse] ( identifier[on_s] ) identifier[ip] . identifier[name_parts] = identifier[ip] . identifier[name] . identifier[split] ( identifier[Name] . identifier[NAME_PART_SEP] ) identifier[ip] . identifier[isa] = identifier[type] ( identifier[ip] . identifier[on] ) keyword[elif] identifier[Name] . identifier[NAME_PART_SEP] keyword[in] identifier[s] : identifier[ip] . identifier[name] = identifier[s] identifier[ip] . identifier[on] = keyword[None] identifier[ip] . identifier[name_parts] = identifier[ip] . identifier[name] . identifier[split] ( identifier[Name] . identifier[NAME_PART_SEP] ) identifier[ip] . identifier[isa] = identifier[Name] keyword[else] : identifier[ip] . identifier[name] = keyword[None] identifier[ip] . identifier[name_parts] = keyword[None] identifier[ip] . identifier[on] = identifier[ObjectNumber] . identifier[parse] ( identifier[s] . identifier[strip] ()) identifier[ip] . identifier[isa] = identifier[type] ( identifier[ip] . identifier[on] ) keyword[if] identifier[ip] . identifier[name_parts] : identifier[last] = identifier[ip] . identifier[name_parts] [- literal[int] ] keyword[try] : identifier[ip] . identifier[version] = identifier[sv] . identifier[Version] ( identifier[last] ) identifier[ip] . identifier[vname] = identifier[ip] . identifier[name] keyword[except] identifier[ValueError] : keyword[try] : identifier[ip] . identifier[version] = identifier[sv] . identifier[Spec] ( identifier[last] ) identifier[ip] . identifier[vname] = keyword[None] keyword[except] identifier[ValueError] : keyword[pass] keyword[if] identifier[ip] . identifier[version] : identifier[ip] . identifier[name_parts] . identifier[pop] () identifier[ip] . identifier[sname] = identifier[Name] . identifier[NAME_PART_SEP] . identifier[join] ( identifier[ip] . identifier[name_parts] ) keyword[else] : identifier[ip] . identifier[sname] = identifier[ip] . identifier[name] keyword[return] identifier[ip]
def classify(cls, o): """Break an Identity name into parts, or describe the type of other forms. Break a name or object number into parts and classify them. Returns a named tuple that indicates which parts of input string are name components, object number and version number. Does not completely parse the name components. Also can handle Name, Identity and ObjectNumbers :param o: Input object to split """ # from collections import namedtuple s = str(o) if o is None: raise ValueError('Input cannot be None') # depends on [control=['if'], data=[]] class IdentityParts(object): on = None name = None isa = None name = None vname = None sname = None name_parts = None version = None cache_key = None # namedtuple('IdentityParts', ['isa', 'name', 'name_parts','on','version', 'vspec']) ip = IdentityParts() if isinstance(o, (DatasetNumber, PartitionNumber)): ip.on = o ip.name = None ip.isa = type(ip.on) ip.name_parts = None # depends on [control=['if'], data=[]] elif isinstance(o, Name): ip.on = None ip.isa = type(o) ip.name = str(o) ip.name_parts = ip.name.split(Name.NAME_PART_SEP) # depends on [control=['if'], data=[]] elif '/' in s: # A cache key ip.cache_key = s.strip() ip.isa = str # depends on [control=['if'], data=['s']] elif cls.OBJECT_NUMBER_SEP in s: # Must be a fqname (ip.name, on_s) = s.strip().split(cls.OBJECT_NUMBER_SEP) ip.on = ObjectNumber.parse(on_s) ip.name_parts = ip.name.split(Name.NAME_PART_SEP) ip.isa = type(ip.on) # depends on [control=['if'], data=['s']] elif Name.NAME_PART_SEP in s: # Must be an sname or vname ip.name = s ip.on = None ip.name_parts = ip.name.split(Name.NAME_PART_SEP) ip.isa = Name # depends on [control=['if'], data=['s']] else: # Probably an Object Number in string form ip.name = None ip.name_parts = None ip.on = ObjectNumber.parse(s.strip()) ip.isa = type(ip.on) if ip.name_parts: last = ip.name_parts[-1] try: ip.version = sv.Version(last) ip.vname = ip.name # depends on [control=['try'], data=[]] except ValueError: try: ip.version = sv.Spec(last) ip.vname = None # Specs aren't vnames you can query # depends on [control=['try'], data=[]] except ValueError: pass # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] if ip.version: ip.name_parts.pop() ip.sname = Name.NAME_PART_SEP.join(ip.name_parts) # depends on [control=['if'], data=[]] else: ip.sname = ip.name # depends on [control=['if'], data=[]] return ip
def emit(self, record): """Send log message to the listener.""" future = asyncio.ensure_future(send_manager_command( ExecutorProtocol.LOG, extra_fields={ ExecutorProtocol.LOG_MESSAGE: self.format(record), }, expect_reply=False )) self.emit_list.append(future)
def function[emit, parameter[self, record]]: constant[Send log message to the listener.] variable[future] assign[=] call[name[asyncio].ensure_future, parameter[call[name[send_manager_command], parameter[name[ExecutorProtocol].LOG]]]] call[name[self].emit_list.append, parameter[name[future]]]
keyword[def] identifier[emit] ( identifier[self] , identifier[record] ): literal[string] identifier[future] = identifier[asyncio] . identifier[ensure_future] ( identifier[send_manager_command] ( identifier[ExecutorProtocol] . identifier[LOG] , identifier[extra_fields] ={ identifier[ExecutorProtocol] . identifier[LOG_MESSAGE] : identifier[self] . identifier[format] ( identifier[record] ), }, identifier[expect_reply] = keyword[False] )) identifier[self] . identifier[emit_list] . identifier[append] ( identifier[future] )
def emit(self, record): """Send log message to the listener.""" future = asyncio.ensure_future(send_manager_command(ExecutorProtocol.LOG, extra_fields={ExecutorProtocol.LOG_MESSAGE: self.format(record)}, expect_reply=False)) self.emit_list.append(future)
def get_resource(self): ''' Returns a BytesResource to build the viewers JavaScript ''' # Basename could be used for controlling caching # basename = 'viewers_%s' % settings.get_cache_string() node_packages = self.get_node_packages() # sort_keys is essential to ensure resulting string is # deterministic (and thus hashable) viewers_data_str = json.dumps(node_packages, sort_keys=True) viewers_data = viewers_data_str.encode('utf8') viewers_resource = ForeignBytesResource( viewers_data, extension=VIEWER_EXT, # basename=basename, ) return viewers_resource
def function[get_resource, parameter[self]]: constant[ Returns a BytesResource to build the viewers JavaScript ] variable[node_packages] assign[=] call[name[self].get_node_packages, parameter[]] variable[viewers_data_str] assign[=] call[name[json].dumps, parameter[name[node_packages]]] variable[viewers_data] assign[=] call[name[viewers_data_str].encode, parameter[constant[utf8]]] variable[viewers_resource] assign[=] call[name[ForeignBytesResource], parameter[name[viewers_data]]] return[name[viewers_resource]]
keyword[def] identifier[get_resource] ( identifier[self] ): literal[string] identifier[node_packages] = identifier[self] . identifier[get_node_packages] () identifier[viewers_data_str] = identifier[json] . identifier[dumps] ( identifier[node_packages] , identifier[sort_keys] = keyword[True] ) identifier[viewers_data] = identifier[viewers_data_str] . identifier[encode] ( literal[string] ) identifier[viewers_resource] = identifier[ForeignBytesResource] ( identifier[viewers_data] , identifier[extension] = identifier[VIEWER_EXT] , ) keyword[return] identifier[viewers_resource]
def get_resource(self): """ Returns a BytesResource to build the viewers JavaScript """ # Basename could be used for controlling caching # basename = 'viewers_%s' % settings.get_cache_string() node_packages = self.get_node_packages() # sort_keys is essential to ensure resulting string is # deterministic (and thus hashable) viewers_data_str = json.dumps(node_packages, sort_keys=True) viewers_data = viewers_data_str.encode('utf8') # basename=basename, viewers_resource = ForeignBytesResource(viewers_data, extension=VIEWER_EXT) return viewers_resource
def check_for_missing_options(config): """Iter over a config and raise if a required option is still not set. Args: config (confpy.core.config.Configuration): The configuration object to validate. Raises: MissingRequiredOption: If any required options are not set in the configuration object. Required options with default values are considered set and will not cause this function to raise. """ for section_name, section in config: for option_name, option in section: if option.required and option.value is None: raise exc.MissingRequiredOption( "Option {0} in namespace {1} is required.".format( option_name, section_name, ) ) return config
def function[check_for_missing_options, parameter[config]]: constant[Iter over a config and raise if a required option is still not set. Args: config (confpy.core.config.Configuration): The configuration object to validate. Raises: MissingRequiredOption: If any required options are not set in the configuration object. Required options with default values are considered set and will not cause this function to raise. ] for taget[tuple[[<ast.Name object at 0x7da204564b50>, <ast.Name object at 0x7da204566710>]]] in starred[name[config]] begin[:] for taget[tuple[[<ast.Name object at 0x7da204567130>, <ast.Name object at 0x7da204565870>]]] in starred[name[section]] begin[:] if <ast.BoolOp object at 0x7da204567430> begin[:] <ast.Raise object at 0x7da204567850> return[name[config]]
keyword[def] identifier[check_for_missing_options] ( identifier[config] ): literal[string] keyword[for] identifier[section_name] , identifier[section] keyword[in] identifier[config] : keyword[for] identifier[option_name] , identifier[option] keyword[in] identifier[section] : keyword[if] identifier[option] . identifier[required] keyword[and] identifier[option] . identifier[value] keyword[is] keyword[None] : keyword[raise] identifier[exc] . identifier[MissingRequiredOption] ( literal[string] . identifier[format] ( identifier[option_name] , identifier[section_name] , ) ) keyword[return] identifier[config]
def check_for_missing_options(config): """Iter over a config and raise if a required option is still not set. Args: config (confpy.core.config.Configuration): The configuration object to validate. Raises: MissingRequiredOption: If any required options are not set in the configuration object. Required options with default values are considered set and will not cause this function to raise. """ for (section_name, section) in config: for (option_name, option) in section: if option.required and option.value is None: raise exc.MissingRequiredOption('Option {0} in namespace {1} is required.'.format(option_name, section_name)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] return config
def acknowledge_time(self): """ Processor time when the alarm was acknowledged. :type: :class:`~datetime.datetime` """ if (self.is_acknowledged and self._proto.acknowledgeInfo.HasField('acknowledgeTime')): return parse_isostring(self._proto.acknowledgeInfo.acknowledgeTime) return None
def function[acknowledge_time, parameter[self]]: constant[ Processor time when the alarm was acknowledged. :type: :class:`~datetime.datetime` ] if <ast.BoolOp object at 0x7da1b1f20550> begin[:] return[call[name[parse_isostring], parameter[name[self]._proto.acknowledgeInfo.acknowledgeTime]]] return[constant[None]]
keyword[def] identifier[acknowledge_time] ( identifier[self] ): literal[string] keyword[if] ( identifier[self] . identifier[is_acknowledged] keyword[and] identifier[self] . identifier[_proto] . identifier[acknowledgeInfo] . identifier[HasField] ( literal[string] )): keyword[return] identifier[parse_isostring] ( identifier[self] . identifier[_proto] . identifier[acknowledgeInfo] . identifier[acknowledgeTime] ) keyword[return] keyword[None]
def acknowledge_time(self): """ Processor time when the alarm was acknowledged. :type: :class:`~datetime.datetime` """ if self.is_acknowledged and self._proto.acknowledgeInfo.HasField('acknowledgeTime'): return parse_isostring(self._proto.acknowledgeInfo.acknowledgeTime) # depends on [control=['if'], data=[]] return None
def update(request, ident, stateless=False, **kwargs): 'Generate update json response' dash_app, app = DashApp.locate_item(ident, stateless) request_body = json.loads(request.body.decode('utf-8')) if app.use_dash_dispatch(): # Force call through dash view_func = app.locate_endpoint_function('dash-update-component') import flask with app.test_request_context(): # Fudge request object # pylint: disable=protected-access flask.request._cached_json = (request_body, flask.request._cached_json[True]) resp = view_func() else: # Use direct dispatch with extra arguments in the argMap app_state = request.session.get("django_plotly_dash", dict()) arg_map = {'dash_app_id': ident, 'dash_app': dash_app, 'user': request.user, 'session_state': app_state} resp = app.dispatch_with_args(request_body, arg_map) request.session['django_plotly_dash'] = app_state dash_app.handle_current_state() # Special for ws-driven edge case if str(resp) == 'EDGECASEEXIT': return HttpResponse("") # Change in returned value type try: rdata = resp.data rtype = resp.mimetype except: rdata = resp rtype = "application/json" return HttpResponse(rdata, content_type=rtype)
def function[update, parameter[request, ident, stateless]]: constant[Generate update json response] <ast.Tuple object at 0x7da18bc726b0> assign[=] call[name[DashApp].locate_item, parameter[name[ident], name[stateless]]] variable[request_body] assign[=] call[name[json].loads, parameter[call[name[request].body.decode, parameter[constant[utf-8]]]]] if call[name[app].use_dash_dispatch, parameter[]] begin[:] variable[view_func] assign[=] call[name[app].locate_endpoint_function, parameter[constant[dash-update-component]]] import module[flask] with call[name[app].test_request_context, parameter[]] begin[:] name[flask].request._cached_json assign[=] tuple[[<ast.Name object at 0x7da18bc70eb0>, <ast.Subscript object at 0x7da18bc73640>]] variable[resp] assign[=] call[name[view_func], parameter[]] if compare[call[name[str], parameter[name[resp]]] equal[==] constant[EDGECASEEXIT]] begin[:] return[call[name[HttpResponse], parameter[constant[]]]] <ast.Try object at 0x7da18bc70f40> return[call[name[HttpResponse], parameter[name[rdata]]]]
keyword[def] identifier[update] ( identifier[request] , identifier[ident] , identifier[stateless] = keyword[False] ,** identifier[kwargs] ): literal[string] identifier[dash_app] , identifier[app] = identifier[DashApp] . identifier[locate_item] ( identifier[ident] , identifier[stateless] ) identifier[request_body] = identifier[json] . identifier[loads] ( identifier[request] . identifier[body] . identifier[decode] ( literal[string] )) keyword[if] identifier[app] . identifier[use_dash_dispatch] (): identifier[view_func] = identifier[app] . identifier[locate_endpoint_function] ( literal[string] ) keyword[import] identifier[flask] keyword[with] identifier[app] . identifier[test_request_context] (): identifier[flask] . identifier[request] . identifier[_cached_json] =( identifier[request_body] , identifier[flask] . identifier[request] . identifier[_cached_json] [ keyword[True] ]) identifier[resp] = identifier[view_func] () keyword[else] : identifier[app_state] = identifier[request] . identifier[session] . identifier[get] ( literal[string] , identifier[dict] ()) identifier[arg_map] ={ literal[string] : identifier[ident] , literal[string] : identifier[dash_app] , literal[string] : identifier[request] . identifier[user] , literal[string] : identifier[app_state] } identifier[resp] = identifier[app] . identifier[dispatch_with_args] ( identifier[request_body] , identifier[arg_map] ) identifier[request] . identifier[session] [ literal[string] ]= identifier[app_state] identifier[dash_app] . identifier[handle_current_state] () keyword[if] identifier[str] ( identifier[resp] )== literal[string] : keyword[return] identifier[HttpResponse] ( literal[string] ) keyword[try] : identifier[rdata] = identifier[resp] . identifier[data] identifier[rtype] = identifier[resp] . identifier[mimetype] keyword[except] : identifier[rdata] = identifier[resp] identifier[rtype] = literal[string] keyword[return] identifier[HttpResponse] ( identifier[rdata] , identifier[content_type] = identifier[rtype] )
def update(request, ident, stateless=False, **kwargs): """Generate update json response""" (dash_app, app) = DashApp.locate_item(ident, stateless) request_body = json.loads(request.body.decode('utf-8')) if app.use_dash_dispatch(): # Force call through dash view_func = app.locate_endpoint_function('dash-update-component') import flask with app.test_request_context(): # Fudge request object # pylint: disable=protected-access flask.request._cached_json = (request_body, flask.request._cached_json[True]) resp = view_func() # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] else: # Use direct dispatch with extra arguments in the argMap app_state = request.session.get('django_plotly_dash', dict()) arg_map = {'dash_app_id': ident, 'dash_app': dash_app, 'user': request.user, 'session_state': app_state} resp = app.dispatch_with_args(request_body, arg_map) request.session['django_plotly_dash'] = app_state dash_app.handle_current_state() # Special for ws-driven edge case if str(resp) == 'EDGECASEEXIT': return HttpResponse('') # depends on [control=['if'], data=[]] # Change in returned value type try: rdata = resp.data rtype = resp.mimetype # depends on [control=['try'], data=[]] except: rdata = resp rtype = 'application/json' # depends on [control=['except'], data=[]] return HttpResponse(rdata, content_type=rtype)
def setMotionInt(self, motDHPF = 0x01, motTHR = 0x14, motDUR = 0x30, motDeteDec = 0x15 ): """! Set to enable Motion Detection Interrupt @param motDHPF Set the Digital High Pass Filter. Default is 0x01 (5Hz) @param motTHR Desired motion threshold. Default is 20 (0x14) @param motDUR Desired motion duration. Default is 48ms (0x30) @param motDeteDec Motion detection decrement. Default is 21 (0x15) @note <b>motDHPF</b> should be one of the following values:<br> 0x00: RESET,<br> 0x01: 5Hz,<br> 0x02: 2.5Hz,<br> 0x03: 1.25Hz,<br> 0x04: 0.63Hz,<br> 0x07: HOLD<br> """ #After power on (0x00 to register (decimal) 107), the Motion Detection Interrupt can be enabled as follows: #self._sendCmd( self.REG_PWR_MGMT_1, 0x00 ) #(optionally?) Reset all internal signal paths in the MPU-6050 by writing 0x07 to register 0x68; self._sendCmd( self.REG_SIGNAL_PATH_RESET, 0x07 ) #write register 0x37 to select how to use the interrupt pin. #For an active high, push-pull signal that stays until register #(decimal) 58 is read, write 0x20 (need read to clear INT state) or 0x00 (auto clear INT state). self._sendCmd( self.REG_INT_PIN_CFG, 0x00 ) orgAccelConf = self._readByte(self.REG_ACCEL_CONFIG) newAccelConf = ( (orgAccelConf | 0xE7) ^ 0xE7 ) | motDHPF # Write register 28 (==0x1C) to set the Digital High Pass Filter, # bits 3:0. For example set it to 0x01 for 5Hz. # (These 3 bits are grey in the data sheet, but they are used! # Leaving them 0 means the filter always outputs 0.) # # 0x00: RESET, # 0x01: 5Hz, # 0x02: 2.5Hz, # 0x03: 1.25Hz, # 0x04: 0.63Hz, # 0x07: hold # # 高通滤波器灵敏度调节 # self._sendCmd( self.REG_ACCEL_CONFIG, newAccelConf ) #Write the desired Motion threshold to register 0x1F (For example, write decimal 20). self._sendCmd( self.REG_MOTION_DET, motTHR ) #To register 0x20 (hex), write the desired motion duration, for example 40ms (0x28). # 灵敏度调节 self._sendCmd( self.REG_MOTION_DET_DUR, motDUR ) #to register 0x69, write the motion detection decrement and #a few other settings (for example write 0x15 to set both #free-fall and motion decrements to 1 and accelerome0x00ter #start-up delay to 5ms total by adding 1ms. ) self._sendCmd( self.REG_MOTION_DET_CTRL, motDeteDec ) #write register 0x38, bit 6 (0x40), to enable motion detection interrupt. self._sendCmd( self.REG_INT_ENABLE, self.VAL_INT_ENABLE_MOTION )
def function[setMotionInt, parameter[self, motDHPF, motTHR, motDUR, motDeteDec]]: constant[! Set to enable Motion Detection Interrupt @param motDHPF Set the Digital High Pass Filter. Default is 0x01 (5Hz) @param motTHR Desired motion threshold. Default is 20 (0x14) @param motDUR Desired motion duration. Default is 48ms (0x30) @param motDeteDec Motion detection decrement. Default is 21 (0x15) @note <b>motDHPF</b> should be one of the following values:<br> 0x00: RESET,<br> 0x01: 5Hz,<br> 0x02: 2.5Hz,<br> 0x03: 1.25Hz,<br> 0x04: 0.63Hz,<br> 0x07: HOLD<br> ] call[name[self]._sendCmd, parameter[name[self].REG_SIGNAL_PATH_RESET, constant[7]]] call[name[self]._sendCmd, parameter[name[self].REG_INT_PIN_CFG, constant[0]]] variable[orgAccelConf] assign[=] call[name[self]._readByte, parameter[name[self].REG_ACCEL_CONFIG]] variable[newAccelConf] assign[=] binary_operation[binary_operation[binary_operation[name[orgAccelConf] <ast.BitOr object at 0x7da2590d6aa0> constant[231]] <ast.BitXor object at 0x7da2590d6b00> constant[231]] <ast.BitOr object at 0x7da2590d6aa0> name[motDHPF]] call[name[self]._sendCmd, parameter[name[self].REG_ACCEL_CONFIG, name[newAccelConf]]] call[name[self]._sendCmd, parameter[name[self].REG_MOTION_DET, name[motTHR]]] call[name[self]._sendCmd, parameter[name[self].REG_MOTION_DET_DUR, name[motDUR]]] call[name[self]._sendCmd, parameter[name[self].REG_MOTION_DET_CTRL, name[motDeteDec]]] call[name[self]._sendCmd, parameter[name[self].REG_INT_ENABLE, name[self].VAL_INT_ENABLE_MOTION]]
keyword[def] identifier[setMotionInt] ( identifier[self] , identifier[motDHPF] = literal[int] , identifier[motTHR] = literal[int] , identifier[motDUR] = literal[int] , identifier[motDeteDec] = literal[int] ): literal[string] identifier[self] . identifier[_sendCmd] ( identifier[self] . identifier[REG_SIGNAL_PATH_RESET] , literal[int] ) identifier[self] . identifier[_sendCmd] ( identifier[self] . identifier[REG_INT_PIN_CFG] , literal[int] ) identifier[orgAccelConf] = identifier[self] . identifier[_readByte] ( identifier[self] . identifier[REG_ACCEL_CONFIG] ) identifier[newAccelConf] =(( identifier[orgAccelConf] | literal[int] )^ literal[int] )| identifier[motDHPF] identifier[self] . identifier[_sendCmd] ( identifier[self] . identifier[REG_ACCEL_CONFIG] , identifier[newAccelConf] ) identifier[self] . identifier[_sendCmd] ( identifier[self] . identifier[REG_MOTION_DET] , identifier[motTHR] ) identifier[self] . identifier[_sendCmd] ( identifier[self] . identifier[REG_MOTION_DET_DUR] , identifier[motDUR] ) identifier[self] . identifier[_sendCmd] ( identifier[self] . identifier[REG_MOTION_DET_CTRL] , identifier[motDeteDec] ) identifier[self] . identifier[_sendCmd] ( identifier[self] . identifier[REG_INT_ENABLE] , identifier[self] . identifier[VAL_INT_ENABLE_MOTION] )
def setMotionInt(self, motDHPF=1, motTHR=20, motDUR=48, motDeteDec=21): """! Set to enable Motion Detection Interrupt @param motDHPF Set the Digital High Pass Filter. Default is 0x01 (5Hz) @param motTHR Desired motion threshold. Default is 20 (0x14) @param motDUR Desired motion duration. Default is 48ms (0x30) @param motDeteDec Motion detection decrement. Default is 21 (0x15) @note <b>motDHPF</b> should be one of the following values:<br> 0x00: RESET,<br> 0x01: 5Hz,<br> 0x02: 2.5Hz,<br> 0x03: 1.25Hz,<br> 0x04: 0.63Hz,<br> 0x07: HOLD<br> """ #After power on (0x00 to register (decimal) 107), the Motion Detection Interrupt can be enabled as follows: #self._sendCmd( self.REG_PWR_MGMT_1, 0x00 ) #(optionally?) Reset all internal signal paths in the MPU-6050 by writing 0x07 to register 0x68; self._sendCmd(self.REG_SIGNAL_PATH_RESET, 7) #write register 0x37 to select how to use the interrupt pin. #For an active high, push-pull signal that stays until register #(decimal) 58 is read, write 0x20 (need read to clear INT state) or 0x00 (auto clear INT state). self._sendCmd(self.REG_INT_PIN_CFG, 0) orgAccelConf = self._readByte(self.REG_ACCEL_CONFIG) newAccelConf = (orgAccelConf | 231) ^ 231 | motDHPF # Write register 28 (==0x1C) to set the Digital High Pass Filter, # bits 3:0. For example set it to 0x01 for 5Hz. # (These 3 bits are grey in the data sheet, but they are used! # Leaving them 0 means the filter always outputs 0.) # # 0x00: RESET, # 0x01: 5Hz, # 0x02: 2.5Hz, # 0x03: 1.25Hz, # 0x04: 0.63Hz, # 0x07: hold # # 高通滤波器灵敏度调节 # self._sendCmd(self.REG_ACCEL_CONFIG, newAccelConf) #Write the desired Motion threshold to register 0x1F (For example, write decimal 20). self._sendCmd(self.REG_MOTION_DET, motTHR) #To register 0x20 (hex), write the desired motion duration, for example 40ms (0x28). # 灵敏度调节 self._sendCmd(self.REG_MOTION_DET_DUR, motDUR) #to register 0x69, write the motion detection decrement and #a few other settings (for example write 0x15 to set both #free-fall and motion decrements to 1 and accelerome0x00ter #start-up delay to 5ms total by adding 1ms. ) self._sendCmd(self.REG_MOTION_DET_CTRL, motDeteDec) #write register 0x38, bit 6 (0x40), to enable motion detection interrupt. self._sendCmd(self.REG_INT_ENABLE, self.VAL_INT_ENABLE_MOTION)
def encrypt_to(self, f, mac_bytes=10): """ Returns a file like object `ef'. Anything written to `ef' will be encrypted for this pubkey and written to `f'. """ ctx = EncryptionContext(f, self.p, mac_bytes) yield ctx ctx.finish()
def function[encrypt_to, parameter[self, f, mac_bytes]]: constant[ Returns a file like object `ef'. Anything written to `ef' will be encrypted for this pubkey and written to `f'. ] variable[ctx] assign[=] call[name[EncryptionContext], parameter[name[f], name[self].p, name[mac_bytes]]] <ast.Yield object at 0x7da1b040a7a0> call[name[ctx].finish, parameter[]]
keyword[def] identifier[encrypt_to] ( identifier[self] , identifier[f] , identifier[mac_bytes] = literal[int] ): literal[string] identifier[ctx] = identifier[EncryptionContext] ( identifier[f] , identifier[self] . identifier[p] , identifier[mac_bytes] ) keyword[yield] identifier[ctx] identifier[ctx] . identifier[finish] ()
def encrypt_to(self, f, mac_bytes=10): """ Returns a file like object `ef'. Anything written to `ef' will be encrypted for this pubkey and written to `f'. """ ctx = EncryptionContext(f, self.p, mac_bytes) yield ctx ctx.finish()
def get_truncation_language() -> ParserElement: """Build a parser for protein truncations.""" l1 = truncation_tag + nest(amino_acid(AMINO_ACID) + ppc.integer(TRUNCATION_POSITION)) l1.setParseAction(_handle_trunc) l2 = truncation_tag + nest(ppc.integer(TRUNCATION_POSITION)) l2.setParseAction(_handle_trunc_legacy) return l1 | l2
def function[get_truncation_language, parameter[]]: constant[Build a parser for protein truncations.] variable[l1] assign[=] binary_operation[name[truncation_tag] + call[name[nest], parameter[binary_operation[call[name[amino_acid], parameter[name[AMINO_ACID]]] + call[name[ppc].integer, parameter[name[TRUNCATION_POSITION]]]]]]] call[name[l1].setParseAction, parameter[name[_handle_trunc]]] variable[l2] assign[=] binary_operation[name[truncation_tag] + call[name[nest], parameter[call[name[ppc].integer, parameter[name[TRUNCATION_POSITION]]]]]] call[name[l2].setParseAction, parameter[name[_handle_trunc_legacy]]] return[binary_operation[name[l1] <ast.BitOr object at 0x7da2590d6aa0> name[l2]]]
keyword[def] identifier[get_truncation_language] ()-> identifier[ParserElement] : literal[string] identifier[l1] = identifier[truncation_tag] + identifier[nest] ( identifier[amino_acid] ( identifier[AMINO_ACID] )+ identifier[ppc] . identifier[integer] ( identifier[TRUNCATION_POSITION] )) identifier[l1] . identifier[setParseAction] ( identifier[_handle_trunc] ) identifier[l2] = identifier[truncation_tag] + identifier[nest] ( identifier[ppc] . identifier[integer] ( identifier[TRUNCATION_POSITION] )) identifier[l2] . identifier[setParseAction] ( identifier[_handle_trunc_legacy] ) keyword[return] identifier[l1] | identifier[l2]
def get_truncation_language() -> ParserElement: """Build a parser for protein truncations.""" l1 = truncation_tag + nest(amino_acid(AMINO_ACID) + ppc.integer(TRUNCATION_POSITION)) l1.setParseAction(_handle_trunc) l2 = truncation_tag + nest(ppc.integer(TRUNCATION_POSITION)) l2.setParseAction(_handle_trunc_legacy) return l1 | l2
def write_error(self, status_code, **kwargs): """Override to implement custom error pages. http://tornado.readthedocs.org/en/stable/_modules/tornado/web.html#RequestHandler.write_error """ super(BaseHandler, self).write_error(status_code, **kwargs)
def function[write_error, parameter[self, status_code]]: constant[Override to implement custom error pages. http://tornado.readthedocs.org/en/stable/_modules/tornado/web.html#RequestHandler.write_error ] call[call[name[super], parameter[name[BaseHandler], name[self]]].write_error, parameter[name[status_code]]]
keyword[def] identifier[write_error] ( identifier[self] , identifier[status_code] ,** identifier[kwargs] ): literal[string] identifier[super] ( identifier[BaseHandler] , identifier[self] ). identifier[write_error] ( identifier[status_code] ,** identifier[kwargs] )
def write_error(self, status_code, **kwargs): """Override to implement custom error pages. http://tornado.readthedocs.org/en/stable/_modules/tornado/web.html#RequestHandler.write_error """ super(BaseHandler, self).write_error(status_code, **kwargs)
def get_template_uuid(self): """ Retrieves the uuid of the given template name. """ response = requests.get(self.url + 'editor/scan/templates', headers=self.headers, verify=False) templates = json.loads(response.text) for template in templates['templates']: if template['name'] == self.template_name: return template['uuid']
def function[get_template_uuid, parameter[self]]: constant[ Retrieves the uuid of the given template name. ] variable[response] assign[=] call[name[requests].get, parameter[binary_operation[name[self].url + constant[editor/scan/templates]]]] variable[templates] assign[=] call[name[json].loads, parameter[name[response].text]] for taget[name[template]] in starred[call[name[templates]][constant[templates]]] begin[:] if compare[call[name[template]][constant[name]] equal[==] name[self].template_name] begin[:] return[call[name[template]][constant[uuid]]]
keyword[def] identifier[get_template_uuid] ( identifier[self] ): literal[string] identifier[response] = identifier[requests] . identifier[get] ( identifier[self] . identifier[url] + literal[string] , identifier[headers] = identifier[self] . identifier[headers] , identifier[verify] = keyword[False] ) identifier[templates] = identifier[json] . identifier[loads] ( identifier[response] . identifier[text] ) keyword[for] identifier[template] keyword[in] identifier[templates] [ literal[string] ]: keyword[if] identifier[template] [ literal[string] ]== identifier[self] . identifier[template_name] : keyword[return] identifier[template] [ literal[string] ]
def get_template_uuid(self): """ Retrieves the uuid of the given template name. """ response = requests.get(self.url + 'editor/scan/templates', headers=self.headers, verify=False) templates = json.loads(response.text) for template in templates['templates']: if template['name'] == self.template_name: return template['uuid'] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['template']]
def repeat(self): """Repeat mode.""" info = self._get_command_info(CommandInfo_pb2.ChangeRepeatMode) return None if info is None else info.repeatMode
def function[repeat, parameter[self]]: constant[Repeat mode.] variable[info] assign[=] call[name[self]._get_command_info, parameter[name[CommandInfo_pb2].ChangeRepeatMode]] return[<ast.IfExp object at 0x7da18fe91120>]
keyword[def] identifier[repeat] ( identifier[self] ): literal[string] identifier[info] = identifier[self] . identifier[_get_command_info] ( identifier[CommandInfo_pb2] . identifier[ChangeRepeatMode] ) keyword[return] keyword[None] keyword[if] identifier[info] keyword[is] keyword[None] keyword[else] identifier[info] . identifier[repeatMode]
def repeat(self): """Repeat mode.""" info = self._get_command_info(CommandInfo_pb2.ChangeRepeatMode) return None if info is None else info.repeatMode
def patch_lines(x): """ Draw lines between groups """ for idx in range(len(x)-1): x[idx] = np.vstack([x[idx], x[idx+1][0,:]]) return x
def function[patch_lines, parameter[x]]: constant[ Draw lines between groups ] for taget[name[idx]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[x]]] - constant[1]]]]] begin[:] call[name[x]][name[idx]] assign[=] call[name[np].vstack, parameter[list[[<ast.Subscript object at 0x7da18f7220e0>, <ast.Subscript object at 0x7da18f723b20>]]]] return[name[x]]
keyword[def] identifier[patch_lines] ( identifier[x] ): literal[string] keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[len] ( identifier[x] )- literal[int] ): identifier[x] [ identifier[idx] ]= identifier[np] . identifier[vstack] ([ identifier[x] [ identifier[idx] ], identifier[x] [ identifier[idx] + literal[int] ][ literal[int] ,:]]) keyword[return] identifier[x]
def patch_lines(x): """ Draw lines between groups """ for idx in range(len(x) - 1): x[idx] = np.vstack([x[idx], x[idx + 1][0, :]]) # depends on [control=['for'], data=['idx']] return x
def killCellRegion(self, centerColumn, radius): """ Kill cells around a centerColumn, within radius """ self.deadCols = topology.wrappingNeighborhood(centerColumn, radius, self._columnDimensions) self.deadColumnInputSpan = self.getConnectedSpan(self.deadCols) self.removeDeadColumns()
def function[killCellRegion, parameter[self, centerColumn, radius]]: constant[ Kill cells around a centerColumn, within radius ] name[self].deadCols assign[=] call[name[topology].wrappingNeighborhood, parameter[name[centerColumn], name[radius], name[self]._columnDimensions]] name[self].deadColumnInputSpan assign[=] call[name[self].getConnectedSpan, parameter[name[self].deadCols]] call[name[self].removeDeadColumns, parameter[]]
keyword[def] identifier[killCellRegion] ( identifier[self] , identifier[centerColumn] , identifier[radius] ): literal[string] identifier[self] . identifier[deadCols] = identifier[topology] . identifier[wrappingNeighborhood] ( identifier[centerColumn] , identifier[radius] , identifier[self] . identifier[_columnDimensions] ) identifier[self] . identifier[deadColumnInputSpan] = identifier[self] . identifier[getConnectedSpan] ( identifier[self] . identifier[deadCols] ) identifier[self] . identifier[removeDeadColumns] ()
def killCellRegion(self, centerColumn, radius): """ Kill cells around a centerColumn, within radius """ self.deadCols = topology.wrappingNeighborhood(centerColumn, radius, self._columnDimensions) self.deadColumnInputSpan = self.getConnectedSpan(self.deadCols) self.removeDeadColumns()
def cmd_wp_undo(self): '''handle wp undo''' if self.undo_wp_idx == -1 or self.undo_wp is None: print("No undo information") return wp = self.undo_wp if self.undo_type == 'move': wp.target_system = self.target_system wp.target_component = self.target_component self.loading_waypoints = True self.loading_waypoint_lasttime = time.time() self.master.mav.mission_write_partial_list_send(self.target_system, self.target_component, self.undo_wp_idx, self.undo_wp_idx) self.wploader.set(wp, self.undo_wp_idx) print("Undid WP move") elif self.undo_type == 'remove': self.wploader.insert(self.undo_wp_idx, wp) self.fix_jumps(self.undo_wp_idx, 1) self.send_all_waypoints() print("Undid WP remove") else: print("bad undo type") self.undo_wp = None self.undo_wp_idx = -1
def function[cmd_wp_undo, parameter[self]]: constant[handle wp undo] if <ast.BoolOp object at 0x7da1b26aedd0> begin[:] call[name[print], parameter[constant[No undo information]]] return[None] variable[wp] assign[=] name[self].undo_wp if compare[name[self].undo_type equal[==] constant[move]] begin[:] name[wp].target_system assign[=] name[self].target_system name[wp].target_component assign[=] name[self].target_component name[self].loading_waypoints assign[=] constant[True] name[self].loading_waypoint_lasttime assign[=] call[name[time].time, parameter[]] call[name[self].master.mav.mission_write_partial_list_send, parameter[name[self].target_system, name[self].target_component, name[self].undo_wp_idx, name[self].undo_wp_idx]] call[name[self].wploader.set, parameter[name[wp], name[self].undo_wp_idx]] call[name[print], parameter[constant[Undid WP move]]] name[self].undo_wp assign[=] constant[None] name[self].undo_wp_idx assign[=] <ast.UnaryOp object at 0x7da1b26ad2a0>
keyword[def] identifier[cmd_wp_undo] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[undo_wp_idx] ==- literal[int] keyword[or] identifier[self] . identifier[undo_wp] keyword[is] keyword[None] : identifier[print] ( literal[string] ) keyword[return] identifier[wp] = identifier[self] . identifier[undo_wp] keyword[if] identifier[self] . identifier[undo_type] == literal[string] : identifier[wp] . identifier[target_system] = identifier[self] . identifier[target_system] identifier[wp] . identifier[target_component] = identifier[self] . identifier[target_component] identifier[self] . identifier[loading_waypoints] = keyword[True] identifier[self] . identifier[loading_waypoint_lasttime] = identifier[time] . identifier[time] () identifier[self] . identifier[master] . identifier[mav] . identifier[mission_write_partial_list_send] ( identifier[self] . identifier[target_system] , identifier[self] . identifier[target_component] , identifier[self] . identifier[undo_wp_idx] , identifier[self] . identifier[undo_wp_idx] ) identifier[self] . identifier[wploader] . identifier[set] ( identifier[wp] , identifier[self] . identifier[undo_wp_idx] ) identifier[print] ( literal[string] ) keyword[elif] identifier[self] . identifier[undo_type] == literal[string] : identifier[self] . identifier[wploader] . identifier[insert] ( identifier[self] . identifier[undo_wp_idx] , identifier[wp] ) identifier[self] . identifier[fix_jumps] ( identifier[self] . identifier[undo_wp_idx] , literal[int] ) identifier[self] . identifier[send_all_waypoints] () identifier[print] ( literal[string] ) keyword[else] : identifier[print] ( literal[string] ) identifier[self] . identifier[undo_wp] = keyword[None] identifier[self] . identifier[undo_wp_idx] =- literal[int]
def cmd_wp_undo(self): """handle wp undo""" if self.undo_wp_idx == -1 or self.undo_wp is None: print('No undo information') return # depends on [control=['if'], data=[]] wp = self.undo_wp if self.undo_type == 'move': wp.target_system = self.target_system wp.target_component = self.target_component self.loading_waypoints = True self.loading_waypoint_lasttime = time.time() self.master.mav.mission_write_partial_list_send(self.target_system, self.target_component, self.undo_wp_idx, self.undo_wp_idx) self.wploader.set(wp, self.undo_wp_idx) print('Undid WP move') # depends on [control=['if'], data=[]] elif self.undo_type == 'remove': self.wploader.insert(self.undo_wp_idx, wp) self.fix_jumps(self.undo_wp_idx, 1) self.send_all_waypoints() print('Undid WP remove') # depends on [control=['if'], data=[]] else: print('bad undo type') self.undo_wp = None self.undo_wp_idx = -1
def translate(self): """Compile the function call.""" varnames = set() ident = self.ident funcnames = set([ident]) arg_exprs = [] for arg in self.args: subexprs, subvars, subfuncs = arg.translate() varnames.update(subvars) funcnames.update(subfuncs) # Create a subexpression that joins the result components of # the arguments. arg_exprs.append(ex_call( ast.Attribute(ex_literal(u''), 'join', ast.Load()), [ex_call( 'map', [ ex_rvalue(str.__name__), ast.List(subexprs, ast.Load()), ] )], )) subexpr_call = ex_call( FUNCTION_PREFIX + ident, arg_exprs ) return [subexpr_call], varnames, funcnames
def function[translate, parameter[self]]: constant[Compile the function call.] variable[varnames] assign[=] call[name[set], parameter[]] variable[ident] assign[=] name[self].ident variable[funcnames] assign[=] call[name[set], parameter[list[[<ast.Name object at 0x7da18f00c460>]]]] variable[arg_exprs] assign[=] list[[]] for taget[name[arg]] in starred[name[self].args] begin[:] <ast.Tuple object at 0x7da18f00fbb0> assign[=] call[name[arg].translate, parameter[]] call[name[varnames].update, parameter[name[subvars]]] call[name[funcnames].update, parameter[name[subfuncs]]] call[name[arg_exprs].append, parameter[call[name[ex_call], parameter[call[name[ast].Attribute, parameter[call[name[ex_literal], parameter[constant[]]], constant[join], call[name[ast].Load, parameter[]]]], list[[<ast.Call object at 0x7da18f00cf40>]]]]]] variable[subexpr_call] assign[=] call[name[ex_call], parameter[binary_operation[name[FUNCTION_PREFIX] + name[ident]], name[arg_exprs]]] return[tuple[[<ast.List object at 0x7da18f00f9a0>, <ast.Name object at 0x7da18f00ec50>, <ast.Name object at 0x7da18f00dd20>]]]
keyword[def] identifier[translate] ( identifier[self] ): literal[string] identifier[varnames] = identifier[set] () identifier[ident] = identifier[self] . identifier[ident] identifier[funcnames] = identifier[set] ([ identifier[ident] ]) identifier[arg_exprs] =[] keyword[for] identifier[arg] keyword[in] identifier[self] . identifier[args] : identifier[subexprs] , identifier[subvars] , identifier[subfuncs] = identifier[arg] . identifier[translate] () identifier[varnames] . identifier[update] ( identifier[subvars] ) identifier[funcnames] . identifier[update] ( identifier[subfuncs] ) identifier[arg_exprs] . identifier[append] ( identifier[ex_call] ( identifier[ast] . identifier[Attribute] ( identifier[ex_literal] ( literal[string] ), literal[string] , identifier[ast] . identifier[Load] ()), [ identifier[ex_call] ( literal[string] , [ identifier[ex_rvalue] ( identifier[str] . identifier[__name__] ), identifier[ast] . identifier[List] ( identifier[subexprs] , identifier[ast] . identifier[Load] ()), ] )], )) identifier[subexpr_call] = identifier[ex_call] ( identifier[FUNCTION_PREFIX] + identifier[ident] , identifier[arg_exprs] ) keyword[return] [ identifier[subexpr_call] ], identifier[varnames] , identifier[funcnames]
def translate(self): """Compile the function call.""" varnames = set() ident = self.ident funcnames = set([ident]) arg_exprs = [] for arg in self.args: (subexprs, subvars, subfuncs) = arg.translate() varnames.update(subvars) funcnames.update(subfuncs) # Create a subexpression that joins the result components of # the arguments. arg_exprs.append(ex_call(ast.Attribute(ex_literal(u''), 'join', ast.Load()), [ex_call('map', [ex_rvalue(str.__name__), ast.List(subexprs, ast.Load())])])) # depends on [control=['for'], data=['arg']] subexpr_call = ex_call(FUNCTION_PREFIX + ident, arg_exprs) return ([subexpr_call], varnames, funcnames)
def doubleclick(self, window_name, object_name): """ Double click on the object @param window_name: Window name to look for, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to look for, either full name, LDTP's name convention, or a Unix glob. Or menu heirarchy @type object_name: string @return: 1 on success. @rtype: integer """ object_handle = self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u"Object %s state disabled" % object_name) self._grabfocus(object_handle) x, y, width, height = self._getobjectsize(object_handle) window = self._get_front_most_window() # Mouse double click on the object # object_handle.doubleClick() window.doubleClickMouse((x + width / 2, y + height / 2)) return 1
def function[doubleclick, parameter[self, window_name, object_name]]: constant[ Double click on the object @param window_name: Window name to look for, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to look for, either full name, LDTP's name convention, or a Unix glob. Or menu heirarchy @type object_name: string @return: 1 on success. @rtype: integer ] variable[object_handle] assign[=] call[name[self]._get_object_handle, parameter[name[window_name], name[object_name]]] if <ast.UnaryOp object at 0x7da18dc065c0> begin[:] <ast.Raise object at 0x7da18dc07df0> call[name[self]._grabfocus, parameter[name[object_handle]]] <ast.Tuple object at 0x7da18dc04100> assign[=] call[name[self]._getobjectsize, parameter[name[object_handle]]] variable[window] assign[=] call[name[self]._get_front_most_window, parameter[]] call[name[window].doubleClickMouse, parameter[tuple[[<ast.BinOp object at 0x7da18dc04640>, <ast.BinOp object at 0x7da18dc04280>]]]] return[constant[1]]
keyword[def] identifier[doubleclick] ( identifier[self] , identifier[window_name] , identifier[object_name] ): literal[string] identifier[object_handle] = identifier[self] . identifier[_get_object_handle] ( identifier[window_name] , identifier[object_name] ) keyword[if] keyword[not] identifier[object_handle] . identifier[AXEnabled] : keyword[raise] identifier[LdtpServerException] ( literal[string] % identifier[object_name] ) identifier[self] . identifier[_grabfocus] ( identifier[object_handle] ) identifier[x] , identifier[y] , identifier[width] , identifier[height] = identifier[self] . identifier[_getobjectsize] ( identifier[object_handle] ) identifier[window] = identifier[self] . identifier[_get_front_most_window] () identifier[window] . identifier[doubleClickMouse] (( identifier[x] + identifier[width] / literal[int] , identifier[y] + identifier[height] / literal[int] )) keyword[return] literal[int]
def doubleclick(self, window_name, object_name): """ Double click on the object @param window_name: Window name to look for, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to look for, either full name, LDTP's name convention, or a Unix glob. Or menu heirarchy @type object_name: string @return: 1 on success. @rtype: integer """ object_handle = self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u'Object %s state disabled' % object_name) # depends on [control=['if'], data=[]] self._grabfocus(object_handle) (x, y, width, height) = self._getobjectsize(object_handle) window = self._get_front_most_window() # Mouse double click on the object # object_handle.doubleClick() window.doubleClickMouse((x + width / 2, y + height / 2)) return 1
def diagnostic_send(self, diagFl1, diagFl2, diagFl3, diagSh1, diagSh2, diagSh3, force_mavlink1=False): ''' Configurable diagnostic messages. diagFl1 : Diagnostic float 1 (float) diagFl2 : Diagnostic float 2 (float) diagFl3 : Diagnostic float 3 (float) diagSh1 : Diagnostic short 1 (int16_t) diagSh2 : Diagnostic short 2 (int16_t) diagSh3 : Diagnostic short 3 (int16_t) ''' return self.send(self.diagnostic_encode(diagFl1, diagFl2, diagFl3, diagSh1, diagSh2, diagSh3), force_mavlink1=force_mavlink1)
def function[diagnostic_send, parameter[self, diagFl1, diagFl2, diagFl3, diagSh1, diagSh2, diagSh3, force_mavlink1]]: constant[ Configurable diagnostic messages. diagFl1 : Diagnostic float 1 (float) diagFl2 : Diagnostic float 2 (float) diagFl3 : Diagnostic float 3 (float) diagSh1 : Diagnostic short 1 (int16_t) diagSh2 : Diagnostic short 2 (int16_t) diagSh3 : Diagnostic short 3 (int16_t) ] return[call[name[self].send, parameter[call[name[self].diagnostic_encode, parameter[name[diagFl1], name[diagFl2], name[diagFl3], name[diagSh1], name[diagSh2], name[diagSh3]]]]]]
keyword[def] identifier[diagnostic_send] ( identifier[self] , identifier[diagFl1] , identifier[diagFl2] , identifier[diagFl3] , identifier[diagSh1] , identifier[diagSh2] , identifier[diagSh3] , identifier[force_mavlink1] = keyword[False] ): literal[string] keyword[return] identifier[self] . identifier[send] ( identifier[self] . identifier[diagnostic_encode] ( identifier[diagFl1] , identifier[diagFl2] , identifier[diagFl3] , identifier[diagSh1] , identifier[diagSh2] , identifier[diagSh3] ), identifier[force_mavlink1] = identifier[force_mavlink1] )
def diagnostic_send(self, diagFl1, diagFl2, diagFl3, diagSh1, diagSh2, diagSh3, force_mavlink1=False): """ Configurable diagnostic messages. diagFl1 : Diagnostic float 1 (float) diagFl2 : Diagnostic float 2 (float) diagFl3 : Diagnostic float 3 (float) diagSh1 : Diagnostic short 1 (int16_t) diagSh2 : Diagnostic short 2 (int16_t) diagSh3 : Diagnostic short 3 (int16_t) """ return self.send(self.diagnostic_encode(diagFl1, diagFl2, diagFl3, diagSh1, diagSh2, diagSh3), force_mavlink1=force_mavlink1)
def from_file(path): """ Crawls articles from the urls and extracts relevant information. :param path: path to file containing urls (each line contains one URL) :return: A dict containing given URLs as keys, and extracted information as corresponding values. """ with open(path) as f: content = f.readlines() content = [x.strip() for x in content] urls = list(filter(None, content)) return NewsPlease.from_urls(urls)
def function[from_file, parameter[path]]: constant[ Crawls articles from the urls and extracts relevant information. :param path: path to file containing urls (each line contains one URL) :return: A dict containing given URLs as keys, and extracted information as corresponding values. ] with call[name[open], parameter[name[path]]] begin[:] variable[content] assign[=] call[name[f].readlines, parameter[]] variable[content] assign[=] <ast.ListComp object at 0x7da18f810970> variable[urls] assign[=] call[name[list], parameter[call[name[filter], parameter[constant[None], name[content]]]]] return[call[name[NewsPlease].from_urls, parameter[name[urls]]]]
keyword[def] identifier[from_file] ( identifier[path] ): literal[string] keyword[with] identifier[open] ( identifier[path] ) keyword[as] identifier[f] : identifier[content] = identifier[f] . identifier[readlines] () identifier[content] =[ identifier[x] . identifier[strip] () keyword[for] identifier[x] keyword[in] identifier[content] ] identifier[urls] = identifier[list] ( identifier[filter] ( keyword[None] , identifier[content] )) keyword[return] identifier[NewsPlease] . identifier[from_urls] ( identifier[urls] )
def from_file(path): """ Crawls articles from the urls and extracts relevant information. :param path: path to file containing urls (each line contains one URL) :return: A dict containing given URLs as keys, and extracted information as corresponding values. """ with open(path) as f: content = f.readlines() # depends on [control=['with'], data=['f']] content = [x.strip() for x in content] urls = list(filter(None, content)) return NewsPlease.from_urls(urls)
def detectSmartphone(self): """Return detection of a general smartphone device Checks to see whether the device is *any* 'smartphone'. Note: It's better to use DetectTierIphone() for modern touchscreen devices. """ return self.detectTierIphone() \ or self.detectS60OssBrowser() \ or self.detectSymbianOS() \ or self.detectWindowsMobile() \ or self.detectBlackBerry() \ or self.detectMeegoPhone() \ or self.detectPalmWebOS()
def function[detectSmartphone, parameter[self]]: constant[Return detection of a general smartphone device Checks to see whether the device is *any* 'smartphone'. Note: It's better to use DetectTierIphone() for modern touchscreen devices. ] return[<ast.BoolOp object at 0x7da1b0aa6cb0>]
keyword[def] identifier[detectSmartphone] ( identifier[self] ): literal[string] keyword[return] identifier[self] . identifier[detectTierIphone] () keyword[or] identifier[self] . identifier[detectS60OssBrowser] () keyword[or] identifier[self] . identifier[detectSymbianOS] () keyword[or] identifier[self] . identifier[detectWindowsMobile] () keyword[or] identifier[self] . identifier[detectBlackBerry] () keyword[or] identifier[self] . identifier[detectMeegoPhone] () keyword[or] identifier[self] . identifier[detectPalmWebOS] ()
def detectSmartphone(self): """Return detection of a general smartphone device Checks to see whether the device is *any* 'smartphone'. Note: It's better to use DetectTierIphone() for modern touchscreen devices. """ return self.detectTierIphone() or self.detectS60OssBrowser() or self.detectSymbianOS() or self.detectWindowsMobile() or self.detectBlackBerry() or self.detectMeegoPhone() or self.detectPalmWebOS()
def str2long(s): """Convert a string to a long integer.""" if type(s) not in (types.StringType, types.UnicodeType): raise ValueError('the input must be a string') l = 0 for i in s: l <<= 8 l |= ord(i) return l
def function[str2long, parameter[s]]: constant[Convert a string to a long integer.] if compare[call[name[type], parameter[name[s]]] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Attribute object at 0x7da20c7957e0>, <ast.Attribute object at 0x7da1b0a678e0>]]] begin[:] <ast.Raise object at 0x7da1b0a67fa0> variable[l] assign[=] constant[0] for taget[name[i]] in starred[name[s]] begin[:] <ast.AugAssign object at 0x7da18dc99c00> <ast.AugAssign object at 0x7da18dc9a5f0> return[name[l]]
keyword[def] identifier[str2long] ( identifier[s] ): literal[string] keyword[if] identifier[type] ( identifier[s] ) keyword[not] keyword[in] ( identifier[types] . identifier[StringType] , identifier[types] . identifier[UnicodeType] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[l] = literal[int] keyword[for] identifier[i] keyword[in] identifier[s] : identifier[l] <<= literal[int] identifier[l] |= identifier[ord] ( identifier[i] ) keyword[return] identifier[l]
def str2long(s): """Convert a string to a long integer.""" if type(s) not in (types.StringType, types.UnicodeType): raise ValueError('the input must be a string') # depends on [control=['if'], data=[]] l = 0 for i in s: l <<= 8 l |= ord(i) # depends on [control=['for'], data=['i']] return l
def dependency_graph(self): r""" Returns a NetworkX graph object of the dependencies See Also -------- dependency_list dependency_map Notes ----- To visualize the dependencies, the following NetworkX function and settings is helpful: nx.draw_spectral(d, arrowsize=50, font_size=32, with_labels=True, node_size=2000, width=3.0, edge_color='lightgrey', font_weight='bold') """ dtree = nx.DiGraph() for propname in self.keys(): dtree.add_node(propname) for dependency in self[propname].values(): if dependency in list(self.keys()): dtree.add_edge(dependency, propname) return dtree
def function[dependency_graph, parameter[self]]: constant[ Returns a NetworkX graph object of the dependencies See Also -------- dependency_list dependency_map Notes ----- To visualize the dependencies, the following NetworkX function and settings is helpful: nx.draw_spectral(d, arrowsize=50, font_size=32, with_labels=True, node_size=2000, width=3.0, edge_color='lightgrey', font_weight='bold') ] variable[dtree] assign[=] call[name[nx].DiGraph, parameter[]] for taget[name[propname]] in starred[call[name[self].keys, parameter[]]] begin[:] call[name[dtree].add_node, parameter[name[propname]]] for taget[name[dependency]] in starred[call[call[name[self]][name[propname]].values, parameter[]]] begin[:] if compare[name[dependency] in call[name[list], parameter[call[name[self].keys, parameter[]]]]] begin[:] call[name[dtree].add_edge, parameter[name[dependency], name[propname]]] return[name[dtree]]
keyword[def] identifier[dependency_graph] ( identifier[self] ): literal[string] identifier[dtree] = identifier[nx] . identifier[DiGraph] () keyword[for] identifier[propname] keyword[in] identifier[self] . identifier[keys] (): identifier[dtree] . identifier[add_node] ( identifier[propname] ) keyword[for] identifier[dependency] keyword[in] identifier[self] [ identifier[propname] ]. identifier[values] (): keyword[if] identifier[dependency] keyword[in] identifier[list] ( identifier[self] . identifier[keys] ()): identifier[dtree] . identifier[add_edge] ( identifier[dependency] , identifier[propname] ) keyword[return] identifier[dtree]
def dependency_graph(self): """ Returns a NetworkX graph object of the dependencies See Also -------- dependency_list dependency_map Notes ----- To visualize the dependencies, the following NetworkX function and settings is helpful: nx.draw_spectral(d, arrowsize=50, font_size=32, with_labels=True, node_size=2000, width=3.0, edge_color='lightgrey', font_weight='bold') """ dtree = nx.DiGraph() for propname in self.keys(): dtree.add_node(propname) for dependency in self[propname].values(): if dependency in list(self.keys()): dtree.add_edge(dependency, propname) # depends on [control=['if'], data=['dependency']] # depends on [control=['for'], data=['dependency']] # depends on [control=['for'], data=['propname']] return dtree
def findAllPrevious(self, name=None, attrs={}, text=None, limit=None, **kwargs): """Returns all items that match the given criteria and appear before this Tag in the document.""" return self._findAll(name, attrs, text, limit, self.previousGenerator, **kwargs)
def function[findAllPrevious, parameter[self, name, attrs, text, limit]]: constant[Returns all items that match the given criteria and appear before this Tag in the document.] return[call[name[self]._findAll, parameter[name[name], name[attrs], name[text], name[limit], name[self].previousGenerator]]]
keyword[def] identifier[findAllPrevious] ( identifier[self] , identifier[name] = keyword[None] , identifier[attrs] ={}, identifier[text] = keyword[None] , identifier[limit] = keyword[None] , ** identifier[kwargs] ): literal[string] keyword[return] identifier[self] . identifier[_findAll] ( identifier[name] , identifier[attrs] , identifier[text] , identifier[limit] , identifier[self] . identifier[previousGenerator] , ** identifier[kwargs] )
def findAllPrevious(self, name=None, attrs={}, text=None, limit=None, **kwargs): """Returns all items that match the given criteria and appear before this Tag in the document.""" return self._findAll(name, attrs, text, limit, self.previousGenerator, **kwargs)
def get_auth_from_url(url): """Given a url with authentication components, extract them into a tuple of username,password. :rtype: (str,str) """ parsed = urlparse(url) try: auth = (unquote(parsed.username), unquote(parsed.password)) except (AttributeError, TypeError): auth = ('', '') return auth
def function[get_auth_from_url, parameter[url]]: constant[Given a url with authentication components, extract them into a tuple of username,password. :rtype: (str,str) ] variable[parsed] assign[=] call[name[urlparse], parameter[name[url]]] <ast.Try object at 0x7da1b1ea1b70> return[name[auth]]
keyword[def] identifier[get_auth_from_url] ( identifier[url] ): literal[string] identifier[parsed] = identifier[urlparse] ( identifier[url] ) keyword[try] : identifier[auth] =( identifier[unquote] ( identifier[parsed] . identifier[username] ), identifier[unquote] ( identifier[parsed] . identifier[password] )) keyword[except] ( identifier[AttributeError] , identifier[TypeError] ): identifier[auth] =( literal[string] , literal[string] ) keyword[return] identifier[auth]
def get_auth_from_url(url): """Given a url with authentication components, extract them into a tuple of username,password. :rtype: (str,str) """ parsed = urlparse(url) try: auth = (unquote(parsed.username), unquote(parsed.password)) # depends on [control=['try'], data=[]] except (AttributeError, TypeError): auth = ('', '') # depends on [control=['except'], data=[]] return auth
def write(self, f, time_start, time_stop, start, stop, step, samples, pwr_array): """Write data to file-like object""" f.write(self.magic) f.write(self.header_struct.pack( self.version, time_start, time_stop, start, stop, step, samples, pwr_array.nbytes )) #pwr_array.tofile(f) f.write(pwr_array.tobytes()) f.flush()
def function[write, parameter[self, f, time_start, time_stop, start, stop, step, samples, pwr_array]]: constant[Write data to file-like object] call[name[f].write, parameter[name[self].magic]] call[name[f].write, parameter[call[name[self].header_struct.pack, parameter[name[self].version, name[time_start], name[time_stop], name[start], name[stop], name[step], name[samples], name[pwr_array].nbytes]]]] call[name[f].write, parameter[call[name[pwr_array].tobytes, parameter[]]]] call[name[f].flush, parameter[]]
keyword[def] identifier[write] ( identifier[self] , identifier[f] , identifier[time_start] , identifier[time_stop] , identifier[start] , identifier[stop] , identifier[step] , identifier[samples] , identifier[pwr_array] ): literal[string] identifier[f] . identifier[write] ( identifier[self] . identifier[magic] ) identifier[f] . identifier[write] ( identifier[self] . identifier[header_struct] . identifier[pack] ( identifier[self] . identifier[version] , identifier[time_start] , identifier[time_stop] , identifier[start] , identifier[stop] , identifier[step] , identifier[samples] , identifier[pwr_array] . identifier[nbytes] )) identifier[f] . identifier[write] ( identifier[pwr_array] . identifier[tobytes] ()) identifier[f] . identifier[flush] ()
def write(self, f, time_start, time_stop, start, stop, step, samples, pwr_array): """Write data to file-like object""" f.write(self.magic) f.write(self.header_struct.pack(self.version, time_start, time_stop, start, stop, step, samples, pwr_array.nbytes)) #pwr_array.tofile(f) f.write(pwr_array.tobytes()) f.flush()
def _encode(cls, lits, weights=None, bound=1, top_id=None, encoding=EncType.best, comparator='<'): """ This is the method that wraps the encoder of PyPBLib. Although the method can be invoked directly, a user is expected to call one of the following methods instead: :meth:`atmost`, :meth:`atleast`, or :meth:`equals`. The list of literals can contain either integers or pairs ``(l, w)``, where ``l`` is an integer literal and ``w`` is an integer weight. The latter can be done only if no ``weights`` are specified separately. :param lits: a list of literals in the sum. :param weights: a list of weights :param bound: the value of bound :math:`k`. :param top_id: top variable identifier used so far. :param encoding: identifier of the encoding to use. :param comparator: identifier of the comparison operator :type lits: iterable(int) :type weights: iterable(int) :type bound: int :type top_id: integer or None :type encoding: integer :type comparator: str :rtype: :class:`pysat.formula.CNF` """ if encoding < 0 or encoding > 5: raise(NoSuchEncodingError(encoding)) assert lits, 'No literals are provided.' # preparing weighted literals if weights: assert len(lits) == len(weights), 'Same number of literals and weights is expected.' wlits = [pblib.WeightedLit(l, w) for l, w in zip(lits, weights)] else: if all(map(lambda lw: (type(lw) in (list, tuple)) and len(lw) == 2, lits)): # literals are already weighted wlits = [pblib.WeightedLit(*wl) for wl in lits] lits = zip(*lits)[0] # unweighted literals for getting top_id elif all(map(lambda l: type(l) is int, lits)): # no weights are provided => all weights are units wlits = [pblib.WeightedLit(l, 1) for l in lits] else: assert 0, 'Incorrect literals given.' if not top_id: top_id = max(map(lambda x: abs(x), lits)) # pseudo-Boolean constraint and variable manager constr = pblib.PBConstraint(wlits, EncType._to_pbcmp[comparator], bound) varmgr = pblib.AuxVarManager(top_id + 1) # encoder configuration config = pblib.PBConfig() config.set_PB_Encoder(EncType._to_pbenc[encoding]) # encoding result = pblib.VectorClauseDatabase(config) pb2cnf = pblib.Pb2cnf(config) pb2cnf.encode(constr, result, varmgr) # extracting clauses return CNF(from_clauses=result.get_clauses())
def function[_encode, parameter[cls, lits, weights, bound, top_id, encoding, comparator]]: constant[ This is the method that wraps the encoder of PyPBLib. Although the method can be invoked directly, a user is expected to call one of the following methods instead: :meth:`atmost`, :meth:`atleast`, or :meth:`equals`. The list of literals can contain either integers or pairs ``(l, w)``, where ``l`` is an integer literal and ``w`` is an integer weight. The latter can be done only if no ``weights`` are specified separately. :param lits: a list of literals in the sum. :param weights: a list of weights :param bound: the value of bound :math:`k`. :param top_id: top variable identifier used so far. :param encoding: identifier of the encoding to use. :param comparator: identifier of the comparison operator :type lits: iterable(int) :type weights: iterable(int) :type bound: int :type top_id: integer or None :type encoding: integer :type comparator: str :rtype: :class:`pysat.formula.CNF` ] if <ast.BoolOp object at 0x7da1b112a200> begin[:] <ast.Raise object at 0x7da1b112ae90> assert[name[lits]] if name[weights] begin[:] assert[compare[call[name[len], parameter[name[lits]]] equal[==] call[name[len], parameter[name[weights]]]]] variable[wlits] assign[=] <ast.ListComp object at 0x7da1b112a1d0> if <ast.UnaryOp object at 0x7da1b11520e0> begin[:] variable[top_id] assign[=] call[name[max], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b1151b10>, name[lits]]]]] variable[constr] assign[=] call[name[pblib].PBConstraint, parameter[name[wlits], call[name[EncType]._to_pbcmp][name[comparator]], name[bound]]] variable[varmgr] assign[=] call[name[pblib].AuxVarManager, parameter[binary_operation[name[top_id] + constant[1]]]] variable[config] assign[=] call[name[pblib].PBConfig, parameter[]] call[name[config].set_PB_Encoder, parameter[call[name[EncType]._to_pbenc][name[encoding]]]] variable[result] assign[=] call[name[pblib].VectorClauseDatabase, parameter[name[config]]] variable[pb2cnf] assign[=] call[name[pblib].Pb2cnf, parameter[name[config]]] call[name[pb2cnf].encode, parameter[name[constr], name[result], name[varmgr]]] return[call[name[CNF], parameter[]]]
keyword[def] identifier[_encode] ( identifier[cls] , identifier[lits] , identifier[weights] = keyword[None] , identifier[bound] = literal[int] , identifier[top_id] = keyword[None] , identifier[encoding] = identifier[EncType] . identifier[best] , identifier[comparator] = literal[string] ): literal[string] keyword[if] identifier[encoding] < literal[int] keyword[or] identifier[encoding] > literal[int] : keyword[raise] ( identifier[NoSuchEncodingError] ( identifier[encoding] )) keyword[assert] identifier[lits] , literal[string] keyword[if] identifier[weights] : keyword[assert] identifier[len] ( identifier[lits] )== identifier[len] ( identifier[weights] ), literal[string] identifier[wlits] =[ identifier[pblib] . identifier[WeightedLit] ( identifier[l] , identifier[w] ) keyword[for] identifier[l] , identifier[w] keyword[in] identifier[zip] ( identifier[lits] , identifier[weights] )] keyword[else] : keyword[if] identifier[all] ( identifier[map] ( keyword[lambda] identifier[lw] :( identifier[type] ( identifier[lw] ) keyword[in] ( identifier[list] , identifier[tuple] )) keyword[and] identifier[len] ( identifier[lw] )== literal[int] , identifier[lits] )): identifier[wlits] =[ identifier[pblib] . identifier[WeightedLit] (* identifier[wl] ) keyword[for] identifier[wl] keyword[in] identifier[lits] ] identifier[lits] = identifier[zip] (* identifier[lits] )[ literal[int] ] keyword[elif] identifier[all] ( identifier[map] ( keyword[lambda] identifier[l] : identifier[type] ( identifier[l] ) keyword[is] identifier[int] , identifier[lits] )): identifier[wlits] =[ identifier[pblib] . identifier[WeightedLit] ( identifier[l] , literal[int] ) keyword[for] identifier[l] keyword[in] identifier[lits] ] keyword[else] : keyword[assert] literal[int] , literal[string] keyword[if] keyword[not] identifier[top_id] : identifier[top_id] = identifier[max] ( identifier[map] ( keyword[lambda] identifier[x] : identifier[abs] ( identifier[x] ), identifier[lits] )) identifier[constr] = identifier[pblib] . identifier[PBConstraint] ( identifier[wlits] , identifier[EncType] . identifier[_to_pbcmp] [ identifier[comparator] ], identifier[bound] ) identifier[varmgr] = identifier[pblib] . identifier[AuxVarManager] ( identifier[top_id] + literal[int] ) identifier[config] = identifier[pblib] . identifier[PBConfig] () identifier[config] . identifier[set_PB_Encoder] ( identifier[EncType] . identifier[_to_pbenc] [ identifier[encoding] ]) identifier[result] = identifier[pblib] . identifier[VectorClauseDatabase] ( identifier[config] ) identifier[pb2cnf] = identifier[pblib] . identifier[Pb2cnf] ( identifier[config] ) identifier[pb2cnf] . identifier[encode] ( identifier[constr] , identifier[result] , identifier[varmgr] ) keyword[return] identifier[CNF] ( identifier[from_clauses] = identifier[result] . identifier[get_clauses] ())
def _encode(cls, lits, weights=None, bound=1, top_id=None, encoding=EncType.best, comparator='<'): """ This is the method that wraps the encoder of PyPBLib. Although the method can be invoked directly, a user is expected to call one of the following methods instead: :meth:`atmost`, :meth:`atleast`, or :meth:`equals`. The list of literals can contain either integers or pairs ``(l, w)``, where ``l`` is an integer literal and ``w`` is an integer weight. The latter can be done only if no ``weights`` are specified separately. :param lits: a list of literals in the sum. :param weights: a list of weights :param bound: the value of bound :math:`k`. :param top_id: top variable identifier used so far. :param encoding: identifier of the encoding to use. :param comparator: identifier of the comparison operator :type lits: iterable(int) :type weights: iterable(int) :type bound: int :type top_id: integer or None :type encoding: integer :type comparator: str :rtype: :class:`pysat.formula.CNF` """ if encoding < 0 or encoding > 5: raise NoSuchEncodingError(encoding) # depends on [control=['if'], data=[]] assert lits, 'No literals are provided.' # preparing weighted literals if weights: assert len(lits) == len(weights), 'Same number of literals and weights is expected.' wlits = [pblib.WeightedLit(l, w) for (l, w) in zip(lits, weights)] # depends on [control=['if'], data=[]] elif all(map(lambda lw: type(lw) in (list, tuple) and len(lw) == 2, lits)): # literals are already weighted wlits = [pblib.WeightedLit(*wl) for wl in lits] lits = zip(*lits)[0] # unweighted literals for getting top_id # depends on [control=['if'], data=[]] elif all(map(lambda l: type(l) is int, lits)): # no weights are provided => all weights are units wlits = [pblib.WeightedLit(l, 1) for l in lits] # depends on [control=['if'], data=[]] else: assert 0, 'Incorrect literals given.' if not top_id: top_id = max(map(lambda x: abs(x), lits)) # depends on [control=['if'], data=[]] # pseudo-Boolean constraint and variable manager constr = pblib.PBConstraint(wlits, EncType._to_pbcmp[comparator], bound) varmgr = pblib.AuxVarManager(top_id + 1) # encoder configuration config = pblib.PBConfig() config.set_PB_Encoder(EncType._to_pbenc[encoding]) # encoding result = pblib.VectorClauseDatabase(config) pb2cnf = pblib.Pb2cnf(config) pb2cnf.encode(constr, result, varmgr) # extracting clauses return CNF(from_clauses=result.get_clauses())