code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def filter(resources, query): """Filter a list of resources according to a query expression. The search criteria specified in the query parameter has two parts: 1. a VISA regular expression over a resource string. 2. optional logical expression over attribute values (not implemented in this function, see below). .. note: The VISA regular expression syntax is not the same as the Python regular expression syntax. (see below) The regular expression is matched against the resource strings of resources known to the VISA Resource Manager. If the resource string matches the regular expression, the attribute values of the resource are then matched against the expression over attribute values. If the match is successful, the resource has met the search criteria and gets added to the list of resources found. By using the optional attribute expression, you can construct flexible and powerful expressions with the use of logical ANDs (&&), ORs(||), and NOTs (!). You can use equal (==) and unequal (!=) comparators to compare attributes of any type, and other inequality comparators (>, <, >=, <=) to compare attributes of numeric type. Use only global attributes in the attribute expression. Local attributes are not allowed in the logical expression part of the expr parameter. Symbol Meaning ---------- ---------- ? Matches any one character. \ Makes the character that follows it an ordinary character instead of special character. For example, when a question mark follows a backslash (\?), it matches the ? character instead of any one character. [list] Matches any one character from the enclosed list. You can use a hyphen to match a range of characters. [^list] Matches any character not in the enclosed list. You can use a hyphen to match a range of characters. * Matches 0 or more occurrences of the preceding character or expression. + Matches 1 or more occurrences of the preceding character or expression. Exp|exp Matches either the preceding or following expression. The or operator | matches the entire expression that precedes or follows it and not just the character that precedes or follows it. For example, VXI|GPIB means (VXI)|(GPIB), not VX(I|G)PIB. (exp) Grouping characters or expressions. :param resources: iterable of resources. :param query: query expression. """ if '{' in query: query, _ = query.split('{') logger.warning('optional part of the query expression not supported. ' 'See filter2') try: query = query.replace('?', '.') matcher = re.compile(query, re.IGNORECASE) except re.error: raise errors.VisaIOError(constants.VI_ERROR_INV_EXPR) return tuple(res for res in resources if matcher.match(res))
def function[filter, parameter[resources, query]]: constant[Filter a list of resources according to a query expression. The search criteria specified in the query parameter has two parts: 1. a VISA regular expression over a resource string. 2. optional logical expression over attribute values (not implemented in this function, see below). .. note: The VISA regular expression syntax is not the same as the Python regular expression syntax. (see below) The regular expression is matched against the resource strings of resources known to the VISA Resource Manager. If the resource string matches the regular expression, the attribute values of the resource are then matched against the expression over attribute values. If the match is successful, the resource has met the search criteria and gets added to the list of resources found. By using the optional attribute expression, you can construct flexible and powerful expressions with the use of logical ANDs (&&), ORs(||), and NOTs (!). You can use equal (==) and unequal (!=) comparators to compare attributes of any type, and other inequality comparators (>, <, >=, <=) to compare attributes of numeric type. Use only global attributes in the attribute expression. Local attributes are not allowed in the logical expression part of the expr parameter. Symbol Meaning ---------- ---------- ? Matches any one character. \ Makes the character that follows it an ordinary character instead of special character. For example, when a question mark follows a backslash (\?), it matches the ? character instead of any one character. [list] Matches any one character from the enclosed list. You can use a hyphen to match a range of characters. [^list] Matches any character not in the enclosed list. You can use a hyphen to match a range of characters. * Matches 0 or more occurrences of the preceding character or expression. + Matches 1 or more occurrences of the preceding character or expression. Exp|exp Matches either the preceding or following expression. The or operator | matches the entire expression that precedes or follows it and not just the character that precedes or follows it. For example, VXI|GPIB means (VXI)|(GPIB), not VX(I|G)PIB. (exp) Grouping characters or expressions. :param resources: iterable of resources. :param query: query expression. ] if compare[constant[{] in name[query]] begin[:] <ast.Tuple object at 0x7da2054a7b20> assign[=] call[name[query].split, parameter[constant[{]]] call[name[logger].warning, parameter[constant[optional part of the query expression not supported. See filter2]]] <ast.Try object at 0x7da2054a6c80> return[call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da2054a4e80>]]]
keyword[def] identifier[filter] ( identifier[resources] , identifier[query] ): literal[string] keyword[if] literal[string] keyword[in] identifier[query] : identifier[query] , identifier[_] = identifier[query] . identifier[split] ( literal[string] ) identifier[logger] . identifier[warning] ( literal[string] literal[string] ) keyword[try] : identifier[query] = identifier[query] . identifier[replace] ( literal[string] , literal[string] ) identifier[matcher] = identifier[re] . identifier[compile] ( identifier[query] , identifier[re] . identifier[IGNORECASE] ) keyword[except] identifier[re] . identifier[error] : keyword[raise] identifier[errors] . identifier[VisaIOError] ( identifier[constants] . identifier[VI_ERROR_INV_EXPR] ) keyword[return] identifier[tuple] ( identifier[res] keyword[for] identifier[res] keyword[in] identifier[resources] keyword[if] identifier[matcher] . identifier[match] ( identifier[res] ))
def filter(resources, query): """Filter a list of resources according to a query expression. The search criteria specified in the query parameter has two parts: 1. a VISA regular expression over a resource string. 2. optional logical expression over attribute values (not implemented in this function, see below). .. note: The VISA regular expression syntax is not the same as the Python regular expression syntax. (see below) The regular expression is matched against the resource strings of resources known to the VISA Resource Manager. If the resource string matches the regular expression, the attribute values of the resource are then matched against the expression over attribute values. If the match is successful, the resource has met the search criteria and gets added to the list of resources found. By using the optional attribute expression, you can construct flexible and powerful expressions with the use of logical ANDs (&&), ORs(||), and NOTs (!). You can use equal (==) and unequal (!=) comparators to compare attributes of any type, and other inequality comparators (>, <, >=, <=) to compare attributes of numeric type. Use only global attributes in the attribute expression. Local attributes are not allowed in the logical expression part of the expr parameter. Symbol Meaning ---------- ---------- ? Matches any one character. \\ Makes the character that follows it an ordinary character instead of special character. For example, when a question mark follows a backslash (\\?), it matches the ? character instead of any one character. [list] Matches any one character from the enclosed list. You can use a hyphen to match a range of characters. [^list] Matches any character not in the enclosed list. You can use a hyphen to match a range of characters. * Matches 0 or more occurrences of the preceding character or expression. + Matches 1 or more occurrences of the preceding character or expression. Exp|exp Matches either the preceding or following expression. The or operator | matches the entire expression that precedes or follows it and not just the character that precedes or follows it. For example, VXI|GPIB means (VXI)|(GPIB), not VX(I|G)PIB. (exp) Grouping characters or expressions. :param resources: iterable of resources. :param query: query expression. """ if '{' in query: (query, _) = query.split('{') logger.warning('optional part of the query expression not supported. See filter2') # depends on [control=['if'], data=['query']] try: query = query.replace('?', '.') matcher = re.compile(query, re.IGNORECASE) # depends on [control=['try'], data=[]] except re.error: raise errors.VisaIOError(constants.VI_ERROR_INV_EXPR) # depends on [control=['except'], data=[]] return tuple((res for res in resources if matcher.match(res)))
def multiply(self, p, e): """ multiply a point by an integer. :param p: a point :param e: an integer :returns: the result, equivalent to adding p to itself e times """ if self._order: e %= self._order if p == self._infinity or e == 0: return self._infinity e3 = 3 * e i = _leftmost_bit(e3) >> 1 result = p while i > 1: result += result if (e3 & i): v = [result, result+p] else: v = [result-p, result] result = v[0 if (e & i) else 1] i >>= 1 return result
def function[multiply, parameter[self, p, e]]: constant[ multiply a point by an integer. :param p: a point :param e: an integer :returns: the result, equivalent to adding p to itself e times ] if name[self]._order begin[:] <ast.AugAssign object at 0x7da1b1d77220> if <ast.BoolOp object at 0x7da1b1d74400> begin[:] return[name[self]._infinity] variable[e3] assign[=] binary_operation[constant[3] * name[e]] variable[i] assign[=] binary_operation[call[name[_leftmost_bit], parameter[name[e3]]] <ast.RShift object at 0x7da2590d6a40> constant[1]] variable[result] assign[=] name[p] while compare[name[i] greater[>] constant[1]] begin[:] <ast.AugAssign object at 0x7da1b1d76dd0> if binary_operation[name[e3] <ast.BitAnd object at 0x7da2590d6b60> name[i]] begin[:] variable[v] assign[=] list[[<ast.Name object at 0x7da1b1d76020>, <ast.BinOp object at 0x7da1b1d757b0>]] variable[result] assign[=] call[name[v]][<ast.IfExp object at 0x7da1b1ddc070>] <ast.AugAssign object at 0x7da1b1ddec80> return[name[result]]
keyword[def] identifier[multiply] ( identifier[self] , identifier[p] , identifier[e] ): literal[string] keyword[if] identifier[self] . identifier[_order] : identifier[e] %= identifier[self] . identifier[_order] keyword[if] identifier[p] == identifier[self] . identifier[_infinity] keyword[or] identifier[e] == literal[int] : keyword[return] identifier[self] . identifier[_infinity] identifier[e3] = literal[int] * identifier[e] identifier[i] = identifier[_leftmost_bit] ( identifier[e3] )>> literal[int] identifier[result] = identifier[p] keyword[while] identifier[i] > literal[int] : identifier[result] += identifier[result] keyword[if] ( identifier[e3] & identifier[i] ): identifier[v] =[ identifier[result] , identifier[result] + identifier[p] ] keyword[else] : identifier[v] =[ identifier[result] - identifier[p] , identifier[result] ] identifier[result] = identifier[v] [ literal[int] keyword[if] ( identifier[e] & identifier[i] ) keyword[else] literal[int] ] identifier[i] >>= literal[int] keyword[return] identifier[result]
def multiply(self, p, e): """ multiply a point by an integer. :param p: a point :param e: an integer :returns: the result, equivalent to adding p to itself e times """ if self._order: e %= self._order # depends on [control=['if'], data=[]] if p == self._infinity or e == 0: return self._infinity # depends on [control=['if'], data=[]] e3 = 3 * e i = _leftmost_bit(e3) >> 1 result = p while i > 1: result += result if e3 & i: v = [result, result + p] # depends on [control=['if'], data=[]] else: v = [result - p, result] result = v[0 if e & i else 1] i >>= 1 # depends on [control=['while'], data=['i']] return result
def fsl2antstransform(matrix, reference, moving): """ Convert an FSL linear transform to an antsrTransform ANTsR function: `fsl2antsrtransform` Arguments --------- matrix : ndarray/list 4x4 matrix of transform parameters reference : ANTsImage target image moving : ANTsImage moving image Returns ------- ANTsTransform Examples -------- >>> import ants >>> import numpy as np >>> fslmat = np.zeros((4,4)) >>> np.fill_diagonal(fslmat, 1) >>> img = ants.image_read(ants.get_ants_data('ch2')) >>> tx = ants.fsl2antstransform(fslmat, img, img) """ if reference.dimension != 3: raise ValueError('reference image must be 3 dimensions') if reference.pixeltype != 'float': reference = reference.clone('float') if moving.pixeltype != 'float': moving = moving.clone('float') libfn = utils.get_lib_fn('fsl2antstransformF3') tx_ptr = libfn(list(matrix), reference.pointer, moving.pointer, 1) return tio.ANTsTransform(precision='float', dimension=reference.dimension, transform_type='AffineTransform', pointer=tx_ptr)
def function[fsl2antstransform, parameter[matrix, reference, moving]]: constant[ Convert an FSL linear transform to an antsrTransform ANTsR function: `fsl2antsrtransform` Arguments --------- matrix : ndarray/list 4x4 matrix of transform parameters reference : ANTsImage target image moving : ANTsImage moving image Returns ------- ANTsTransform Examples -------- >>> import ants >>> import numpy as np >>> fslmat = np.zeros((4,4)) >>> np.fill_diagonal(fslmat, 1) >>> img = ants.image_read(ants.get_ants_data('ch2')) >>> tx = ants.fsl2antstransform(fslmat, img, img) ] if compare[name[reference].dimension not_equal[!=] constant[3]] begin[:] <ast.Raise object at 0x7da1b1631870> if compare[name[reference].pixeltype not_equal[!=] constant[float]] begin[:] variable[reference] assign[=] call[name[reference].clone, parameter[constant[float]]] if compare[name[moving].pixeltype not_equal[!=] constant[float]] begin[:] variable[moving] assign[=] call[name[moving].clone, parameter[constant[float]]] variable[libfn] assign[=] call[name[utils].get_lib_fn, parameter[constant[fsl2antstransformF3]]] variable[tx_ptr] assign[=] call[name[libfn], parameter[call[name[list], parameter[name[matrix]]], name[reference].pointer, name[moving].pointer, constant[1]]] return[call[name[tio].ANTsTransform, parameter[]]]
keyword[def] identifier[fsl2antstransform] ( identifier[matrix] , identifier[reference] , identifier[moving] ): literal[string] keyword[if] identifier[reference] . identifier[dimension] != literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[reference] . identifier[pixeltype] != literal[string] : identifier[reference] = identifier[reference] . identifier[clone] ( literal[string] ) keyword[if] identifier[moving] . identifier[pixeltype] != literal[string] : identifier[moving] = identifier[moving] . identifier[clone] ( literal[string] ) identifier[libfn] = identifier[utils] . identifier[get_lib_fn] ( literal[string] ) identifier[tx_ptr] = identifier[libfn] ( identifier[list] ( identifier[matrix] ), identifier[reference] . identifier[pointer] , identifier[moving] . identifier[pointer] , literal[int] ) keyword[return] identifier[tio] . identifier[ANTsTransform] ( identifier[precision] = literal[string] , identifier[dimension] = identifier[reference] . identifier[dimension] , identifier[transform_type] = literal[string] , identifier[pointer] = identifier[tx_ptr] )
def fsl2antstransform(matrix, reference, moving): """ Convert an FSL linear transform to an antsrTransform ANTsR function: `fsl2antsrtransform` Arguments --------- matrix : ndarray/list 4x4 matrix of transform parameters reference : ANTsImage target image moving : ANTsImage moving image Returns ------- ANTsTransform Examples -------- >>> import ants >>> import numpy as np >>> fslmat = np.zeros((4,4)) >>> np.fill_diagonal(fslmat, 1) >>> img = ants.image_read(ants.get_ants_data('ch2')) >>> tx = ants.fsl2antstransform(fslmat, img, img) """ if reference.dimension != 3: raise ValueError('reference image must be 3 dimensions') # depends on [control=['if'], data=[]] if reference.pixeltype != 'float': reference = reference.clone('float') # depends on [control=['if'], data=[]] if moving.pixeltype != 'float': moving = moving.clone('float') # depends on [control=['if'], data=[]] libfn = utils.get_lib_fn('fsl2antstransformF3') tx_ptr = libfn(list(matrix), reference.pointer, moving.pointer, 1) return tio.ANTsTransform(precision='float', dimension=reference.dimension, transform_type='AffineTransform', pointer=tx_ptr)
def new_page(self, page_number, new_chapter, **kwargs): """Called by :meth:`render` with the :class:`Chain`s that need more :class:`Container`s. This method should create a new :class:`Page` which contains a container associated with `chain`.""" right_template = self.document.get_page_template(self, 'right') left_template = self.document.get_page_template(self, 'left') page_template = right_template if page_number % 2 else left_template return page_template.page(self, page_number, self.chain, new_chapter, **kwargs)
def function[new_page, parameter[self, page_number, new_chapter]]: constant[Called by :meth:`render` with the :class:`Chain`s that need more :class:`Container`s. This method should create a new :class:`Page` which contains a container associated with `chain`.] variable[right_template] assign[=] call[name[self].document.get_page_template, parameter[name[self], constant[right]]] variable[left_template] assign[=] call[name[self].document.get_page_template, parameter[name[self], constant[left]]] variable[page_template] assign[=] <ast.IfExp object at 0x7da204345270> return[call[name[page_template].page, parameter[name[self], name[page_number], name[self].chain, name[new_chapter]]]]
keyword[def] identifier[new_page] ( identifier[self] , identifier[page_number] , identifier[new_chapter] ,** identifier[kwargs] ): literal[string] identifier[right_template] = identifier[self] . identifier[document] . identifier[get_page_template] ( identifier[self] , literal[string] ) identifier[left_template] = identifier[self] . identifier[document] . identifier[get_page_template] ( identifier[self] , literal[string] ) identifier[page_template] = identifier[right_template] keyword[if] identifier[page_number] % literal[int] keyword[else] identifier[left_template] keyword[return] identifier[page_template] . identifier[page] ( identifier[self] , identifier[page_number] , identifier[self] . identifier[chain] , identifier[new_chapter] , ** identifier[kwargs] )
def new_page(self, page_number, new_chapter, **kwargs): """Called by :meth:`render` with the :class:`Chain`s that need more :class:`Container`s. This method should create a new :class:`Page` which contains a container associated with `chain`.""" right_template = self.document.get_page_template(self, 'right') left_template = self.document.get_page_template(self, 'left') page_template = right_template if page_number % 2 else left_template return page_template.page(self, page_number, self.chain, new_chapter, **kwargs)
def normalize_ip(ip): """ Transform the address into a fixed-length form, such as:: 192.168.0.1 -> 192.168.000.001 :type ip: string :param ip: An IP address. :rtype: string :return: The normalized IP. """ theip = ip.split('.') if len(theip) != 4: raise ValueError('ip should be 4 tuples') return '.'.join(str(int(l)).rjust(3, '0') for l in theip)
def function[normalize_ip, parameter[ip]]: constant[ Transform the address into a fixed-length form, such as:: 192.168.0.1 -> 192.168.000.001 :type ip: string :param ip: An IP address. :rtype: string :return: The normalized IP. ] variable[theip] assign[=] call[name[ip].split, parameter[constant[.]]] if compare[call[name[len], parameter[name[theip]]] not_equal[!=] constant[4]] begin[:] <ast.Raise object at 0x7da1b0677610> return[call[constant[.].join, parameter[<ast.GeneratorExp object at 0x7da1b0677d00>]]]
keyword[def] identifier[normalize_ip] ( identifier[ip] ): literal[string] identifier[theip] = identifier[ip] . identifier[split] ( literal[string] ) keyword[if] identifier[len] ( identifier[theip] )!= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] literal[string] . identifier[join] ( identifier[str] ( identifier[int] ( identifier[l] )). identifier[rjust] ( literal[int] , literal[string] ) keyword[for] identifier[l] keyword[in] identifier[theip] )
def normalize_ip(ip): """ Transform the address into a fixed-length form, such as:: 192.168.0.1 -> 192.168.000.001 :type ip: string :param ip: An IP address. :rtype: string :return: The normalized IP. """ theip = ip.split('.') if len(theip) != 4: raise ValueError('ip should be 4 tuples') # depends on [control=['if'], data=[]] return '.'.join((str(int(l)).rjust(3, '0') for l in theip))
def _collected_label(collect, label): """Label of a collected column.""" if not collect.__name__.startswith('<'): return label + ' ' + collect.__name__ else: return label
def function[_collected_label, parameter[collect, label]]: constant[Label of a collected column.] if <ast.UnaryOp object at 0x7da1b0860520> begin[:] return[binary_operation[binary_operation[name[label] + constant[ ]] + name[collect].__name__]]
keyword[def] identifier[_collected_label] ( identifier[collect] , identifier[label] ): literal[string] keyword[if] keyword[not] identifier[collect] . identifier[__name__] . identifier[startswith] ( literal[string] ): keyword[return] identifier[label] + literal[string] + identifier[collect] . identifier[__name__] keyword[else] : keyword[return] identifier[label]
def _collected_label(collect, label): """Label of a collected column.""" if not collect.__name__.startswith('<'): return label + ' ' + collect.__name__ # depends on [control=['if'], data=[]] else: return label
def map_blocks(data, f, blen=None, storage=None, create='array', **kwargs): """Apply function `f` block-wise over `data`.""" # setup storage = _util.get_storage(storage) if isinstance(data, tuple): blen = max(_util.get_blen_array(d, blen) for d in data) else: blen = _util.get_blen_array(data, blen) if isinstance(data, tuple): _util.check_equal_length(*data) length = len(data[0]) else: length = len(data) # block-wise iteration out = None for i in range(0, length, blen): j = min(i+blen, length) # obtain blocks if isinstance(data, tuple): blocks = [d[i:j] for d in data] else: blocks = [data[i:j]] # map res = f(*blocks) # store if out is None: out = getattr(storage, create)(res, expectedlen=length, **kwargs) else: out.append(res) return out
def function[map_blocks, parameter[data, f, blen, storage, create]]: constant[Apply function `f` block-wise over `data`.] variable[storage] assign[=] call[name[_util].get_storage, parameter[name[storage]]] if call[name[isinstance], parameter[name[data], name[tuple]]] begin[:] variable[blen] assign[=] call[name[max], parameter[<ast.GeneratorExp object at 0x7da18ede5210>]] if call[name[isinstance], parameter[name[data], name[tuple]]] begin[:] call[name[_util].check_equal_length, parameter[<ast.Starred object at 0x7da18bc71cc0>]] variable[length] assign[=] call[name[len], parameter[call[name[data]][constant[0]]]] variable[out] assign[=] constant[None] for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[length], name[blen]]]] begin[:] variable[j] assign[=] call[name[min], parameter[binary_operation[name[i] + name[blen]], name[length]]] if call[name[isinstance], parameter[name[data], name[tuple]]] begin[:] variable[blocks] assign[=] <ast.ListComp object at 0x7da18bc726e0> variable[res] assign[=] call[name[f], parameter[<ast.Starred object at 0x7da18bc718d0>]] if compare[name[out] is constant[None]] begin[:] variable[out] assign[=] call[call[name[getattr], parameter[name[storage], name[create]]], parameter[name[res]]] return[name[out]]
keyword[def] identifier[map_blocks] ( identifier[data] , identifier[f] , identifier[blen] = keyword[None] , identifier[storage] = keyword[None] , identifier[create] = literal[string] ,** identifier[kwargs] ): literal[string] identifier[storage] = identifier[_util] . identifier[get_storage] ( identifier[storage] ) keyword[if] identifier[isinstance] ( identifier[data] , identifier[tuple] ): identifier[blen] = identifier[max] ( identifier[_util] . identifier[get_blen_array] ( identifier[d] , identifier[blen] ) keyword[for] identifier[d] keyword[in] identifier[data] ) keyword[else] : identifier[blen] = identifier[_util] . identifier[get_blen_array] ( identifier[data] , identifier[blen] ) keyword[if] identifier[isinstance] ( identifier[data] , identifier[tuple] ): identifier[_util] . identifier[check_equal_length] (* identifier[data] ) identifier[length] = identifier[len] ( identifier[data] [ literal[int] ]) keyword[else] : identifier[length] = identifier[len] ( identifier[data] ) identifier[out] = keyword[None] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[length] , identifier[blen] ): identifier[j] = identifier[min] ( identifier[i] + identifier[blen] , identifier[length] ) keyword[if] identifier[isinstance] ( identifier[data] , identifier[tuple] ): identifier[blocks] =[ identifier[d] [ identifier[i] : identifier[j] ] keyword[for] identifier[d] keyword[in] identifier[data] ] keyword[else] : identifier[blocks] =[ identifier[data] [ identifier[i] : identifier[j] ]] identifier[res] = identifier[f] (* identifier[blocks] ) keyword[if] identifier[out] keyword[is] keyword[None] : identifier[out] = identifier[getattr] ( identifier[storage] , identifier[create] )( identifier[res] , identifier[expectedlen] = identifier[length] ,** identifier[kwargs] ) keyword[else] : identifier[out] . identifier[append] ( identifier[res] ) keyword[return] identifier[out]
def map_blocks(data, f, blen=None, storage=None, create='array', **kwargs): """Apply function `f` block-wise over `data`.""" # setup storage = _util.get_storage(storage) if isinstance(data, tuple): blen = max((_util.get_blen_array(d, blen) for d in data)) # depends on [control=['if'], data=[]] else: blen = _util.get_blen_array(data, blen) if isinstance(data, tuple): _util.check_equal_length(*data) length = len(data[0]) # depends on [control=['if'], data=[]] else: length = len(data) # block-wise iteration out = None for i in range(0, length, blen): j = min(i + blen, length) # obtain blocks if isinstance(data, tuple): blocks = [d[i:j] for d in data] # depends on [control=['if'], data=[]] else: blocks = [data[i:j]] # map res = f(*blocks) # store if out is None: out = getattr(storage, create)(res, expectedlen=length, **kwargs) # depends on [control=['if'], data=['out']] else: out.append(res) # depends on [control=['for'], data=['i']] return out
def from_p12_keyfile_buffer(cls, service_account_email, file_buffer, private_key_password=None, scopes='', token_uri=oauth2client.GOOGLE_TOKEN_URI, revoke_uri=oauth2client.GOOGLE_REVOKE_URI): """Factory constructor from JSON keyfile. Args: service_account_email: string, The email associated with the service account. file_buffer: stream, A buffer that implements ``read()`` and contains the PKCS#12 key contents. private_key_password: string, (Optional) Password for PKCS#12 private key. Defaults to ``notasecret``. scopes: List or string, (Optional) Scopes to use when acquiring an access token. token_uri: string, URI for token endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. revoke_uri: string, URI for revoke endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. Returns: ServiceAccountCredentials, a credentials object created from the keyfile. Raises: NotImplementedError if pyOpenSSL is not installed / not the active crypto library. """ private_key_pkcs12 = file_buffer.read() return cls._from_p12_keyfile_contents( service_account_email, private_key_pkcs12, private_key_password=private_key_password, scopes=scopes, token_uri=token_uri, revoke_uri=revoke_uri)
def function[from_p12_keyfile_buffer, parameter[cls, service_account_email, file_buffer, private_key_password, scopes, token_uri, revoke_uri]]: constant[Factory constructor from JSON keyfile. Args: service_account_email: string, The email associated with the service account. file_buffer: stream, A buffer that implements ``read()`` and contains the PKCS#12 key contents. private_key_password: string, (Optional) Password for PKCS#12 private key. Defaults to ``notasecret``. scopes: List or string, (Optional) Scopes to use when acquiring an access token. token_uri: string, URI for token endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. revoke_uri: string, URI for revoke endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. Returns: ServiceAccountCredentials, a credentials object created from the keyfile. Raises: NotImplementedError if pyOpenSSL is not installed / not the active crypto library. ] variable[private_key_pkcs12] assign[=] call[name[file_buffer].read, parameter[]] return[call[name[cls]._from_p12_keyfile_contents, parameter[name[service_account_email], name[private_key_pkcs12]]]]
keyword[def] identifier[from_p12_keyfile_buffer] ( identifier[cls] , identifier[service_account_email] , identifier[file_buffer] , identifier[private_key_password] = keyword[None] , identifier[scopes] = literal[string] , identifier[token_uri] = identifier[oauth2client] . identifier[GOOGLE_TOKEN_URI] , identifier[revoke_uri] = identifier[oauth2client] . identifier[GOOGLE_REVOKE_URI] ): literal[string] identifier[private_key_pkcs12] = identifier[file_buffer] . identifier[read] () keyword[return] identifier[cls] . identifier[_from_p12_keyfile_contents] ( identifier[service_account_email] , identifier[private_key_pkcs12] , identifier[private_key_password] = identifier[private_key_password] , identifier[scopes] = identifier[scopes] , identifier[token_uri] = identifier[token_uri] , identifier[revoke_uri] = identifier[revoke_uri] )
def from_p12_keyfile_buffer(cls, service_account_email, file_buffer, private_key_password=None, scopes='', token_uri=oauth2client.GOOGLE_TOKEN_URI, revoke_uri=oauth2client.GOOGLE_REVOKE_URI): """Factory constructor from JSON keyfile. Args: service_account_email: string, The email associated with the service account. file_buffer: stream, A buffer that implements ``read()`` and contains the PKCS#12 key contents. private_key_password: string, (Optional) Password for PKCS#12 private key. Defaults to ``notasecret``. scopes: List or string, (Optional) Scopes to use when acquiring an access token. token_uri: string, URI for token endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. revoke_uri: string, URI for revoke endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. Returns: ServiceAccountCredentials, a credentials object created from the keyfile. Raises: NotImplementedError if pyOpenSSL is not installed / not the active crypto library. """ private_key_pkcs12 = file_buffer.read() return cls._from_p12_keyfile_contents(service_account_email, private_key_pkcs12, private_key_password=private_key_password, scopes=scopes, token_uri=token_uri, revoke_uri=revoke_uri)
def create(self, bundle, container_id=None, empty_process=False, log_path=None, pid_file=None, sync_socket=None, log_format="kubernetes"): ''' use the client to create a container from a bundle directory. The bundle directory should have a config.json. You must be the root user to create a runtime. Equivalent command line example: singularity oci create [create options...] <container_ID> Parameters ========== bundle: the full path to the bundle folder container_id: an optional container_id. If not provided, use same container_id used to generate OciImage instance empty_process: run container without executing container process (for example, for a pod container waiting for signals). This is a specific use case for tools like Kubernetes log_path: the path to store the log. pid_file: specify the pid file path to use sync_socket: the path to the unix socket for state synchronization. log_format: defaults to kubernetes. Can also be "basic" or "json" ''' return self._run(bundle, container_id=container_id, empty_process=empty_process, log_path=log_path, pid_file=pid_file, sync_socket=sync_socket, command="create", log_format=log_format)
def function[create, parameter[self, bundle, container_id, empty_process, log_path, pid_file, sync_socket, log_format]]: constant[ use the client to create a container from a bundle directory. The bundle directory should have a config.json. You must be the root user to create a runtime. Equivalent command line example: singularity oci create [create options...] <container_ID> Parameters ========== bundle: the full path to the bundle folder container_id: an optional container_id. If not provided, use same container_id used to generate OciImage instance empty_process: run container without executing container process (for example, for a pod container waiting for signals). This is a specific use case for tools like Kubernetes log_path: the path to store the log. pid_file: specify the pid file path to use sync_socket: the path to the unix socket for state synchronization. log_format: defaults to kubernetes. Can also be "basic" or "json" ] return[call[name[self]._run, parameter[name[bundle]]]]
keyword[def] identifier[create] ( identifier[self] , identifier[bundle] , identifier[container_id] = keyword[None] , identifier[empty_process] = keyword[False] , identifier[log_path] = keyword[None] , identifier[pid_file] = keyword[None] , identifier[sync_socket] = keyword[None] , identifier[log_format] = literal[string] ): literal[string] keyword[return] identifier[self] . identifier[_run] ( identifier[bundle] , identifier[container_id] = identifier[container_id] , identifier[empty_process] = identifier[empty_process] , identifier[log_path] = identifier[log_path] , identifier[pid_file] = identifier[pid_file] , identifier[sync_socket] = identifier[sync_socket] , identifier[command] = literal[string] , identifier[log_format] = identifier[log_format] )
def create(self, bundle, container_id=None, empty_process=False, log_path=None, pid_file=None, sync_socket=None, log_format='kubernetes'): """ use the client to create a container from a bundle directory. The bundle directory should have a config.json. You must be the root user to create a runtime. Equivalent command line example: singularity oci create [create options...] <container_ID> Parameters ========== bundle: the full path to the bundle folder container_id: an optional container_id. If not provided, use same container_id used to generate OciImage instance empty_process: run container without executing container process (for example, for a pod container waiting for signals). This is a specific use case for tools like Kubernetes log_path: the path to store the log. pid_file: specify the pid file path to use sync_socket: the path to the unix socket for state synchronization. log_format: defaults to kubernetes. Can also be "basic" or "json" """ return self._run(bundle, container_id=container_id, empty_process=empty_process, log_path=log_path, pid_file=pid_file, sync_socket=sync_socket, command='create', log_format=log_format)
def route(self, request, service): """ :meth:`.WWebRouteMapProto.route` method implementation """ for route in self.__routes: result = route.match(request, service) if result is not None: if self.target_route_valid(result) is True: return result
def function[route, parameter[self, request, service]]: constant[ :meth:`.WWebRouteMapProto.route` method implementation ] for taget[name[route]] in starred[name[self].__routes] begin[:] variable[result] assign[=] call[name[route].match, parameter[name[request], name[service]]] if compare[name[result] is_not constant[None]] begin[:] if compare[call[name[self].target_route_valid, parameter[name[result]]] is constant[True]] begin[:] return[name[result]]
keyword[def] identifier[route] ( identifier[self] , identifier[request] , identifier[service] ): literal[string] keyword[for] identifier[route] keyword[in] identifier[self] . identifier[__routes] : identifier[result] = identifier[route] . identifier[match] ( identifier[request] , identifier[service] ) keyword[if] identifier[result] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[self] . identifier[target_route_valid] ( identifier[result] ) keyword[is] keyword[True] : keyword[return] identifier[result]
def route(self, request, service): """ :meth:`.WWebRouteMapProto.route` method implementation """ for route in self.__routes: result = route.match(request, service) if result is not None: if self.target_route_valid(result) is True: return result # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['result']] # depends on [control=['for'], data=['route']]
def routeargs(path, host = None, vhost = None, method = [b'POST'], **kwargs): "For extra arguments, see Dispatcher.routeargs. They must be specified by keyword arguments" def decorator(func): func.routemode = 'routeargs' func.route_path = path func.route_host = host func.route_vhost = vhost func.route_method = method func.route_kwargs = kwargs return func return decorator
def function[routeargs, parameter[path, host, vhost, method]]: constant[For extra arguments, see Dispatcher.routeargs. They must be specified by keyword arguments] def function[decorator, parameter[func]]: name[func].routemode assign[=] constant[routeargs] name[func].route_path assign[=] name[path] name[func].route_host assign[=] name[host] name[func].route_vhost assign[=] name[vhost] name[func].route_method assign[=] name[method] name[func].route_kwargs assign[=] name[kwargs] return[name[func]] return[name[decorator]]
keyword[def] identifier[routeargs] ( identifier[path] , identifier[host] = keyword[None] , identifier[vhost] = keyword[None] , identifier[method] =[ literal[string] ],** identifier[kwargs] ): literal[string] keyword[def] identifier[decorator] ( identifier[func] ): identifier[func] . identifier[routemode] = literal[string] identifier[func] . identifier[route_path] = identifier[path] identifier[func] . identifier[route_host] = identifier[host] identifier[func] . identifier[route_vhost] = identifier[vhost] identifier[func] . identifier[route_method] = identifier[method] identifier[func] . identifier[route_kwargs] = identifier[kwargs] keyword[return] identifier[func] keyword[return] identifier[decorator]
def routeargs(path, host=None, vhost=None, method=[b'POST'], **kwargs): """For extra arguments, see Dispatcher.routeargs. They must be specified by keyword arguments""" def decorator(func): func.routemode = 'routeargs' func.route_path = path func.route_host = host func.route_vhost = vhost func.route_method = method func.route_kwargs = kwargs return func return decorator
def get_stub(self, name, arch): """ Get a stub procedure for the given function, regardless of if a real implementation is available. This will apply any metadata, such as a default calling convention or a function prototype. By stub, we pretty much always mean a ``ReturnUnconstrained`` SimProcedure with the appropriate display name and metadata set. This will appear in ``state.history.descriptions`` as ``<SimProcedure display_name (stub)>`` :param name: The name of the function as a string :param arch: The architecture to use, as either a string or an archinfo.Arch instance :return: A SimProcedure instance representing a plausable stub as could be found in the library. """ proc = self.fallback_proc(display_name=name, is_stub=True) self._apply_metadata(proc, arch) return proc
def function[get_stub, parameter[self, name, arch]]: constant[ Get a stub procedure for the given function, regardless of if a real implementation is available. This will apply any metadata, such as a default calling convention or a function prototype. By stub, we pretty much always mean a ``ReturnUnconstrained`` SimProcedure with the appropriate display name and metadata set. This will appear in ``state.history.descriptions`` as ``<SimProcedure display_name (stub)>`` :param name: The name of the function as a string :param arch: The architecture to use, as either a string or an archinfo.Arch instance :return: A SimProcedure instance representing a plausable stub as could be found in the library. ] variable[proc] assign[=] call[name[self].fallback_proc, parameter[]] call[name[self]._apply_metadata, parameter[name[proc], name[arch]]] return[name[proc]]
keyword[def] identifier[get_stub] ( identifier[self] , identifier[name] , identifier[arch] ): literal[string] identifier[proc] = identifier[self] . identifier[fallback_proc] ( identifier[display_name] = identifier[name] , identifier[is_stub] = keyword[True] ) identifier[self] . identifier[_apply_metadata] ( identifier[proc] , identifier[arch] ) keyword[return] identifier[proc]
def get_stub(self, name, arch): """ Get a stub procedure for the given function, regardless of if a real implementation is available. This will apply any metadata, such as a default calling convention or a function prototype. By stub, we pretty much always mean a ``ReturnUnconstrained`` SimProcedure with the appropriate display name and metadata set. This will appear in ``state.history.descriptions`` as ``<SimProcedure display_name (stub)>`` :param name: The name of the function as a string :param arch: The architecture to use, as either a string or an archinfo.Arch instance :return: A SimProcedure instance representing a plausable stub as could be found in the library. """ proc = self.fallback_proc(display_name=name, is_stub=True) self._apply_metadata(proc, arch) return proc
def _conditional_committors(source, sink, waypoint, tprob): """ Computes the conditional committors :math:`q^{ABC^+}` which are is the probability of starting in one state and visiting state B before A while also visiting state C at some point. Note that in the notation of Dickson et. al. this computes :math:`h_c(A,B)`, with ``sources = A``, ``sinks = B``, ``waypoint = C`` Parameters ---------- waypoint : int The index of the intermediate state source : int The index of the source state sink : int The index of the sink state tprob : np.ndarray Transition matrix Returns ------- cond_committors : np.ndarray Conditional committors, i.e. the probability of visiting a waypoint when on a path between source and sink. Notes ----- Employs dense linear algebra, memory use scales as N^2, and cycle use scales as N^3 References ---------- .. [1] Dickson & Brooks (2012), J. Chem. Theory Comput., 8, 3044-3052. """ n_states = np.shape(tprob)[0] forward_committors = _committors([source], [sink], tprob) # permute the transition matrix into cannonical form - send waypoint the the # last row, and source + sink to the end after that Bsink_indices = [source, sink, waypoint] perm = np.array([i for i in xrange(n_states) if i not in Bsink_indices], dtype=int) perm = np.concatenate([perm, Bsink_indices]) permuted_tprob = tprob[perm, :][:, perm] # extract P, R n = n_states - len(Bsink_indices) P = permuted_tprob[:n, :n] R = permuted_tprob[:n, n:] # calculate the conditional committors ( B = N*R ), B[i,j] is the prob # state i ends in j, where j runs over the source + sink + waypoint # (waypoint is position -1) B = np.dot(np.linalg.inv(np.eye(n) - P), R) # add probs for the sinks, waypoint / b[i] is P( i --> {C & not A, B} ) b = np.append(B[:, -1].flatten(), [0.0] * (len(Bsink_indices) - 1) + [1.0]) cond_committors = b * forward_committors[waypoint] # get the original order cond_committors = cond_committors[np.argsort(perm)] return cond_committors
def function[_conditional_committors, parameter[source, sink, waypoint, tprob]]: constant[ Computes the conditional committors :math:`q^{ABC^+}` which are is the probability of starting in one state and visiting state B before A while also visiting state C at some point. Note that in the notation of Dickson et. al. this computes :math:`h_c(A,B)`, with ``sources = A``, ``sinks = B``, ``waypoint = C`` Parameters ---------- waypoint : int The index of the intermediate state source : int The index of the source state sink : int The index of the sink state tprob : np.ndarray Transition matrix Returns ------- cond_committors : np.ndarray Conditional committors, i.e. the probability of visiting a waypoint when on a path between source and sink. Notes ----- Employs dense linear algebra, memory use scales as N^2, and cycle use scales as N^3 References ---------- .. [1] Dickson & Brooks (2012), J. Chem. Theory Comput., 8, 3044-3052. ] variable[n_states] assign[=] call[call[name[np].shape, parameter[name[tprob]]]][constant[0]] variable[forward_committors] assign[=] call[name[_committors], parameter[list[[<ast.Name object at 0x7da1b066aef0>]], list[[<ast.Name object at 0x7da1b06687c0>]], name[tprob]]] variable[Bsink_indices] assign[=] list[[<ast.Name object at 0x7da1b0668460>, <ast.Name object at 0x7da1b0668520>, <ast.Name object at 0x7da1b06685b0>]] variable[perm] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b066af80>]] variable[perm] assign[=] call[name[np].concatenate, parameter[list[[<ast.Name object at 0x7da1b066a110>, <ast.Name object at 0x7da1b066a800>]]]] variable[permuted_tprob] assign[=] call[call[name[tprob]][tuple[[<ast.Name object at 0x7da1b066b520>, <ast.Slice object at 0x7da1b066a620>]]]][tuple[[<ast.Slice object at 0x7da1b066a3b0>, <ast.Name object at 0x7da1b066a9b0>]]] variable[n] assign[=] binary_operation[name[n_states] - call[name[len], parameter[name[Bsink_indices]]]] variable[P] assign[=] call[name[permuted_tprob]][tuple[[<ast.Slice object at 0x7da1b0669540>, <ast.Slice object at 0x7da1b06694b0>]]] variable[R] assign[=] call[name[permuted_tprob]][tuple[[<ast.Slice object at 0x7da1b0669510>, <ast.Slice object at 0x7da1b0669780>]]] variable[B] assign[=] call[name[np].dot, parameter[call[name[np].linalg.inv, parameter[binary_operation[call[name[np].eye, parameter[name[n]]] - name[P]]]], name[R]]] variable[b] assign[=] call[name[np].append, parameter[call[call[name[B]][tuple[[<ast.Slice object at 0x7da1b066bca0>, <ast.UnaryOp object at 0x7da1b06688b0>]]].flatten, parameter[]], binary_operation[binary_operation[list[[<ast.Constant object at 0x7da1b0668d00>]] * binary_operation[call[name[len], parameter[name[Bsink_indices]]] - constant[1]]] + list[[<ast.Constant object at 0x7da1b0668b80>]]]]] variable[cond_committors] assign[=] binary_operation[name[b] * call[name[forward_committors]][name[waypoint]]] variable[cond_committors] assign[=] call[name[cond_committors]][call[name[np].argsort, parameter[name[perm]]]] return[name[cond_committors]]
keyword[def] identifier[_conditional_committors] ( identifier[source] , identifier[sink] , identifier[waypoint] , identifier[tprob] ): literal[string] identifier[n_states] = identifier[np] . identifier[shape] ( identifier[tprob] )[ literal[int] ] identifier[forward_committors] = identifier[_committors] ([ identifier[source] ],[ identifier[sink] ], identifier[tprob] ) identifier[Bsink_indices] =[ identifier[source] , identifier[sink] , identifier[waypoint] ] identifier[perm] = identifier[np] . identifier[array] ([ identifier[i] keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[n_states] ) keyword[if] identifier[i] keyword[not] keyword[in] identifier[Bsink_indices] ], identifier[dtype] = identifier[int] ) identifier[perm] = identifier[np] . identifier[concatenate] ([ identifier[perm] , identifier[Bsink_indices] ]) identifier[permuted_tprob] = identifier[tprob] [ identifier[perm] ,:][:, identifier[perm] ] identifier[n] = identifier[n_states] - identifier[len] ( identifier[Bsink_indices] ) identifier[P] = identifier[permuted_tprob] [: identifier[n] ,: identifier[n] ] identifier[R] = identifier[permuted_tprob] [: identifier[n] , identifier[n] :] identifier[B] = identifier[np] . identifier[dot] ( identifier[np] . identifier[linalg] . identifier[inv] ( identifier[np] . identifier[eye] ( identifier[n] )- identifier[P] ), identifier[R] ) identifier[b] = identifier[np] . identifier[append] ( identifier[B] [:,- literal[int] ]. identifier[flatten] (),[ literal[int] ]*( identifier[len] ( identifier[Bsink_indices] )- literal[int] )+[ literal[int] ]) identifier[cond_committors] = identifier[b] * identifier[forward_committors] [ identifier[waypoint] ] identifier[cond_committors] = identifier[cond_committors] [ identifier[np] . identifier[argsort] ( identifier[perm] )] keyword[return] identifier[cond_committors]
def _conditional_committors(source, sink, waypoint, tprob): """ Computes the conditional committors :math:`q^{ABC^+}` which are is the probability of starting in one state and visiting state B before A while also visiting state C at some point. Note that in the notation of Dickson et. al. this computes :math:`h_c(A,B)`, with ``sources = A``, ``sinks = B``, ``waypoint = C`` Parameters ---------- waypoint : int The index of the intermediate state source : int The index of the source state sink : int The index of the sink state tprob : np.ndarray Transition matrix Returns ------- cond_committors : np.ndarray Conditional committors, i.e. the probability of visiting a waypoint when on a path between source and sink. Notes ----- Employs dense linear algebra, memory use scales as N^2, and cycle use scales as N^3 References ---------- .. [1] Dickson & Brooks (2012), J. Chem. Theory Comput., 8, 3044-3052. """ n_states = np.shape(tprob)[0] forward_committors = _committors([source], [sink], tprob) # permute the transition matrix into cannonical form - send waypoint the the # last row, and source + sink to the end after that Bsink_indices = [source, sink, waypoint] perm = np.array([i for i in xrange(n_states) if i not in Bsink_indices], dtype=int) perm = np.concatenate([perm, Bsink_indices]) permuted_tprob = tprob[perm, :][:, perm] # extract P, R n = n_states - len(Bsink_indices) P = permuted_tprob[:n, :n] R = permuted_tprob[:n, n:] # calculate the conditional committors ( B = N*R ), B[i,j] is the prob # state i ends in j, where j runs over the source + sink + waypoint # (waypoint is position -1) B = np.dot(np.linalg.inv(np.eye(n) - P), R) # add probs for the sinks, waypoint / b[i] is P( i --> {C & not A, B} ) b = np.append(B[:, -1].flatten(), [0.0] * (len(Bsink_indices) - 1) + [1.0]) cond_committors = b * forward_committors[waypoint] # get the original order cond_committors = cond_committors[np.argsort(perm)] return cond_committors
def wait_socket(host, port, timeout=120): ''' Wait for socket opened on remote side. Return False after timeout ''' return wait_result(lambda: check_socket(host, port), True, timeout)
def function[wait_socket, parameter[host, port, timeout]]: constant[ Wait for socket opened on remote side. Return False after timeout ] return[call[name[wait_result], parameter[<ast.Lambda object at 0x7da20e9b3490>, constant[True], name[timeout]]]]
keyword[def] identifier[wait_socket] ( identifier[host] , identifier[port] , identifier[timeout] = literal[int] ): literal[string] keyword[return] identifier[wait_result] ( keyword[lambda] : identifier[check_socket] ( identifier[host] , identifier[port] ), keyword[True] , identifier[timeout] )
def wait_socket(host, port, timeout=120): """ Wait for socket opened on remote side. Return False after timeout """ return wait_result(lambda : check_socket(host, port), True, timeout)
def proxy_urls_from_dns(local_hostname=None): """ Generate URLs from which to look for a PAC file, based on a hostname. Fully-qualified hostnames are checked against the Mozilla Public Suffix List to ensure that generated URLs don't go outside the scope of the organization. If the fully-qualified hostname doesn't have a recognized TLD, such as in the case of intranets with '.local' or '.internal', the TLD is assumed to be the part following the rightmost dot. :param str local_hostname: Hostname to use for generating the WPAD URLs. If not provided, the local hostname is used. :return: PAC URLs to try in order, according to the WPAD protocol. If the hostname isn't qualified or is otherwise invalid, an empty list is returned. :rtype: list[str] """ if not local_hostname: local_hostname = socket.getfqdn() if '.' not in local_hostname or len(local_hostname) < 3 or \ local_hostname.startswith('.') or local_hostname.endswith('.'): return [] try: parsed = get_tld('http://' + local_hostname, as_object=True) subdomain, tld = parsed.subdomain, parsed.fld except TldDomainNotFound: final_dot_index = local_hostname.rfind('.') subdomain, tld = local_hostname[0:final_dot_index], local_hostname[final_dot_index+1:] return wpad_search_urls(subdomain, tld)
def function[proxy_urls_from_dns, parameter[local_hostname]]: constant[ Generate URLs from which to look for a PAC file, based on a hostname. Fully-qualified hostnames are checked against the Mozilla Public Suffix List to ensure that generated URLs don't go outside the scope of the organization. If the fully-qualified hostname doesn't have a recognized TLD, such as in the case of intranets with '.local' or '.internal', the TLD is assumed to be the part following the rightmost dot. :param str local_hostname: Hostname to use for generating the WPAD URLs. If not provided, the local hostname is used. :return: PAC URLs to try in order, according to the WPAD protocol. If the hostname isn't qualified or is otherwise invalid, an empty list is returned. :rtype: list[str] ] if <ast.UnaryOp object at 0x7da18eb546a0> begin[:] variable[local_hostname] assign[=] call[name[socket].getfqdn, parameter[]] if <ast.BoolOp object at 0x7da18eb57130> begin[:] return[list[[]]] <ast.Try object at 0x7da18eb57d60> return[call[name[wpad_search_urls], parameter[name[subdomain], name[tld]]]]
keyword[def] identifier[proxy_urls_from_dns] ( identifier[local_hostname] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[local_hostname] : identifier[local_hostname] = identifier[socket] . identifier[getfqdn] () keyword[if] literal[string] keyword[not] keyword[in] identifier[local_hostname] keyword[or] identifier[len] ( identifier[local_hostname] )< literal[int] keyword[or] identifier[local_hostname] . identifier[startswith] ( literal[string] ) keyword[or] identifier[local_hostname] . identifier[endswith] ( literal[string] ): keyword[return] [] keyword[try] : identifier[parsed] = identifier[get_tld] ( literal[string] + identifier[local_hostname] , identifier[as_object] = keyword[True] ) identifier[subdomain] , identifier[tld] = identifier[parsed] . identifier[subdomain] , identifier[parsed] . identifier[fld] keyword[except] identifier[TldDomainNotFound] : identifier[final_dot_index] = identifier[local_hostname] . identifier[rfind] ( literal[string] ) identifier[subdomain] , identifier[tld] = identifier[local_hostname] [ literal[int] : identifier[final_dot_index] ], identifier[local_hostname] [ identifier[final_dot_index] + literal[int] :] keyword[return] identifier[wpad_search_urls] ( identifier[subdomain] , identifier[tld] )
def proxy_urls_from_dns(local_hostname=None): """ Generate URLs from which to look for a PAC file, based on a hostname. Fully-qualified hostnames are checked against the Mozilla Public Suffix List to ensure that generated URLs don't go outside the scope of the organization. If the fully-qualified hostname doesn't have a recognized TLD, such as in the case of intranets with '.local' or '.internal', the TLD is assumed to be the part following the rightmost dot. :param str local_hostname: Hostname to use for generating the WPAD URLs. If not provided, the local hostname is used. :return: PAC URLs to try in order, according to the WPAD protocol. If the hostname isn't qualified or is otherwise invalid, an empty list is returned. :rtype: list[str] """ if not local_hostname: local_hostname = socket.getfqdn() # depends on [control=['if'], data=[]] if '.' not in local_hostname or len(local_hostname) < 3 or local_hostname.startswith('.') or local_hostname.endswith('.'): return [] # depends on [control=['if'], data=[]] try: parsed = get_tld('http://' + local_hostname, as_object=True) (subdomain, tld) = (parsed.subdomain, parsed.fld) # depends on [control=['try'], data=[]] except TldDomainNotFound: final_dot_index = local_hostname.rfind('.') (subdomain, tld) = (local_hostname[0:final_dot_index], local_hostname[final_dot_index + 1:]) # depends on [control=['except'], data=[]] return wpad_search_urls(subdomain, tld)
def get(self, path): # pylint: disable=W0221 """Renders a GET request, by showing this nodes stats and children.""" path = path or '' path = path.lstrip('/') parts = path.split('/') if not parts[0]: parts = parts[1:] statDict = util.lookup(scales.getStats(), parts) if statDict is None: self.set_status(404) self.finish('Path not found.') return outputFormat = self.get_argument('format', default='html') query = self.get_argument('query', default=None) if outputFormat == 'json': formats.jsonFormat(self, statDict, query) elif outputFormat == 'prettyjson': formats.jsonFormat(self, statDict, query, pretty=True) else: formats.htmlHeader(self, '/' + path, self.serverName, query) formats.htmlFormat(self, tuple(parts), statDict, query) return None
def function[get, parameter[self, path]]: constant[Renders a GET request, by showing this nodes stats and children.] variable[path] assign[=] <ast.BoolOp object at 0x7da20c993a90> variable[path] assign[=] call[name[path].lstrip, parameter[constant[/]]] variable[parts] assign[=] call[name[path].split, parameter[constant[/]]] if <ast.UnaryOp object at 0x7da20c9903d0> begin[:] variable[parts] assign[=] call[name[parts]][<ast.Slice object at 0x7da20c990f40>] variable[statDict] assign[=] call[name[util].lookup, parameter[call[name[scales].getStats, parameter[]], name[parts]]] if compare[name[statDict] is constant[None]] begin[:] call[name[self].set_status, parameter[constant[404]]] call[name[self].finish, parameter[constant[Path not found.]]] return[None] variable[outputFormat] assign[=] call[name[self].get_argument, parameter[constant[format]]] variable[query] assign[=] call[name[self].get_argument, parameter[constant[query]]] if compare[name[outputFormat] equal[==] constant[json]] begin[:] call[name[formats].jsonFormat, parameter[name[self], name[statDict], name[query]]] return[constant[None]]
keyword[def] identifier[get] ( identifier[self] , identifier[path] ): literal[string] identifier[path] = identifier[path] keyword[or] literal[string] identifier[path] = identifier[path] . identifier[lstrip] ( literal[string] ) identifier[parts] = identifier[path] . identifier[split] ( literal[string] ) keyword[if] keyword[not] identifier[parts] [ literal[int] ]: identifier[parts] = identifier[parts] [ literal[int] :] identifier[statDict] = identifier[util] . identifier[lookup] ( identifier[scales] . identifier[getStats] (), identifier[parts] ) keyword[if] identifier[statDict] keyword[is] keyword[None] : identifier[self] . identifier[set_status] ( literal[int] ) identifier[self] . identifier[finish] ( literal[string] ) keyword[return] identifier[outputFormat] = identifier[self] . identifier[get_argument] ( literal[string] , identifier[default] = literal[string] ) identifier[query] = identifier[self] . identifier[get_argument] ( literal[string] , identifier[default] = keyword[None] ) keyword[if] identifier[outputFormat] == literal[string] : identifier[formats] . identifier[jsonFormat] ( identifier[self] , identifier[statDict] , identifier[query] ) keyword[elif] identifier[outputFormat] == literal[string] : identifier[formats] . identifier[jsonFormat] ( identifier[self] , identifier[statDict] , identifier[query] , identifier[pretty] = keyword[True] ) keyword[else] : identifier[formats] . identifier[htmlHeader] ( identifier[self] , literal[string] + identifier[path] , identifier[self] . identifier[serverName] , identifier[query] ) identifier[formats] . identifier[htmlFormat] ( identifier[self] , identifier[tuple] ( identifier[parts] ), identifier[statDict] , identifier[query] ) keyword[return] keyword[None]
def get(self, path): # pylint: disable=W0221 'Renders a GET request, by showing this nodes stats and children.' path = path or '' path = path.lstrip('/') parts = path.split('/') if not parts[0]: parts = parts[1:] # depends on [control=['if'], data=[]] statDict = util.lookup(scales.getStats(), parts) if statDict is None: self.set_status(404) self.finish('Path not found.') return # depends on [control=['if'], data=[]] outputFormat = self.get_argument('format', default='html') query = self.get_argument('query', default=None) if outputFormat == 'json': formats.jsonFormat(self, statDict, query) # depends on [control=['if'], data=[]] elif outputFormat == 'prettyjson': formats.jsonFormat(self, statDict, query, pretty=True) # depends on [control=['if'], data=[]] else: formats.htmlHeader(self, '/' + path, self.serverName, query) formats.htmlFormat(self, tuple(parts), statDict, query) return None
def get_cbm_vbm(self, tol=0.001, abs_tol=False, spin=None): """ Expects a DOS object and finds the cbm and vbm. Args: tol: tolerance in occupations for determining the gap abs_tol: An absolute tolerance (True) and a relative one (False) spin: Possible values are None - finds the gap in the summed densities, Up - finds the gap in the up spin channel, Down - finds the gap in the down spin channel. Returns: (cbm, vbm): float in eV corresponding to the gap """ # determine tolerance if spin is None: tdos = self.y if len(self.ydim) == 1 else np.sum(self.y, axis=1) elif spin == Spin.up: tdos = self.y[:, 0] else: tdos = self.y[:, 1] if not abs_tol: tol = tol * tdos.sum() / tdos.shape[0] # find index of fermi energy i_fermi = 0 while self.x[i_fermi] <= self.efermi: i_fermi += 1 # work backwards until tolerance is reached i_gap_start = i_fermi while i_gap_start - 1 >= 0 and tdos[i_gap_start - 1] <= tol: i_gap_start -= 1 # work forwards until tolerance is reached i_gap_end = i_gap_start while i_gap_end < tdos.shape[0] and tdos[i_gap_end] <= tol: i_gap_end += 1 i_gap_end -= 1 return self.x[i_gap_end], self.x[i_gap_start]
def function[get_cbm_vbm, parameter[self, tol, abs_tol, spin]]: constant[ Expects a DOS object and finds the cbm and vbm. Args: tol: tolerance in occupations for determining the gap abs_tol: An absolute tolerance (True) and a relative one (False) spin: Possible values are None - finds the gap in the summed densities, Up - finds the gap in the up spin channel, Down - finds the gap in the down spin channel. Returns: (cbm, vbm): float in eV corresponding to the gap ] if compare[name[spin] is constant[None]] begin[:] variable[tdos] assign[=] <ast.IfExp object at 0x7da1b1c961a0> if <ast.UnaryOp object at 0x7da1b1c95fc0> begin[:] variable[tol] assign[=] binary_operation[binary_operation[name[tol] * call[name[tdos].sum, parameter[]]] / call[name[tdos].shape][constant[0]]] variable[i_fermi] assign[=] constant[0] while compare[call[name[self].x][name[i_fermi]] less_or_equal[<=] name[self].efermi] begin[:] <ast.AugAssign object at 0x7da1b1cd73a0> variable[i_gap_start] assign[=] name[i_fermi] while <ast.BoolOp object at 0x7da1b1cd6e00> begin[:] <ast.AugAssign object at 0x7da1b1cd7160> variable[i_gap_end] assign[=] name[i_gap_start] while <ast.BoolOp object at 0x7da1b1cd4d30> begin[:] <ast.AugAssign object at 0x7da1b1cd50f0> <ast.AugAssign object at 0x7da1b1cd71c0> return[tuple[[<ast.Subscript object at 0x7da1b1cd7e50>, <ast.Subscript object at 0x7da1b1cd7400>]]]
keyword[def] identifier[get_cbm_vbm] ( identifier[self] , identifier[tol] = literal[int] , identifier[abs_tol] = keyword[False] , identifier[spin] = keyword[None] ): literal[string] keyword[if] identifier[spin] keyword[is] keyword[None] : identifier[tdos] = identifier[self] . identifier[y] keyword[if] identifier[len] ( identifier[self] . identifier[ydim] )== literal[int] keyword[else] identifier[np] . identifier[sum] ( identifier[self] . identifier[y] , identifier[axis] = literal[int] ) keyword[elif] identifier[spin] == identifier[Spin] . identifier[up] : identifier[tdos] = identifier[self] . identifier[y] [:, literal[int] ] keyword[else] : identifier[tdos] = identifier[self] . identifier[y] [:, literal[int] ] keyword[if] keyword[not] identifier[abs_tol] : identifier[tol] = identifier[tol] * identifier[tdos] . identifier[sum] ()/ identifier[tdos] . identifier[shape] [ literal[int] ] identifier[i_fermi] = literal[int] keyword[while] identifier[self] . identifier[x] [ identifier[i_fermi] ]<= identifier[self] . identifier[efermi] : identifier[i_fermi] += literal[int] identifier[i_gap_start] = identifier[i_fermi] keyword[while] identifier[i_gap_start] - literal[int] >= literal[int] keyword[and] identifier[tdos] [ identifier[i_gap_start] - literal[int] ]<= identifier[tol] : identifier[i_gap_start] -= literal[int] identifier[i_gap_end] = identifier[i_gap_start] keyword[while] identifier[i_gap_end] < identifier[tdos] . identifier[shape] [ literal[int] ] keyword[and] identifier[tdos] [ identifier[i_gap_end] ]<= identifier[tol] : identifier[i_gap_end] += literal[int] identifier[i_gap_end] -= literal[int] keyword[return] identifier[self] . identifier[x] [ identifier[i_gap_end] ], identifier[self] . identifier[x] [ identifier[i_gap_start] ]
def get_cbm_vbm(self, tol=0.001, abs_tol=False, spin=None): """ Expects a DOS object and finds the cbm and vbm. Args: tol: tolerance in occupations for determining the gap abs_tol: An absolute tolerance (True) and a relative one (False) spin: Possible values are None - finds the gap in the summed densities, Up - finds the gap in the up spin channel, Down - finds the gap in the down spin channel. Returns: (cbm, vbm): float in eV corresponding to the gap """ # determine tolerance if spin is None: tdos = self.y if len(self.ydim) == 1 else np.sum(self.y, axis=1) # depends on [control=['if'], data=[]] elif spin == Spin.up: tdos = self.y[:, 0] # depends on [control=['if'], data=[]] else: tdos = self.y[:, 1] if not abs_tol: tol = tol * tdos.sum() / tdos.shape[0] # depends on [control=['if'], data=[]] # find index of fermi energy i_fermi = 0 while self.x[i_fermi] <= self.efermi: i_fermi += 1 # depends on [control=['while'], data=[]] # work backwards until tolerance is reached i_gap_start = i_fermi while i_gap_start - 1 >= 0 and tdos[i_gap_start - 1] <= tol: i_gap_start -= 1 # depends on [control=['while'], data=[]] # work forwards until tolerance is reached i_gap_end = i_gap_start while i_gap_end < tdos.shape[0] and tdos[i_gap_end] <= tol: i_gap_end += 1 # depends on [control=['while'], data=[]] i_gap_end -= 1 return (self.x[i_gap_end], self.x[i_gap_start])
def normalize_cmd(self, command): """Normalize CLI commands to have a single trailing newline. :param command: Command that may require line feed to be normalized :type command: str """ command = command.rstrip() command += self.RETURN return command
def function[normalize_cmd, parameter[self, command]]: constant[Normalize CLI commands to have a single trailing newline. :param command: Command that may require line feed to be normalized :type command: str ] variable[command] assign[=] call[name[command].rstrip, parameter[]] <ast.AugAssign object at 0x7da2041dab90> return[name[command]]
keyword[def] identifier[normalize_cmd] ( identifier[self] , identifier[command] ): literal[string] identifier[command] = identifier[command] . identifier[rstrip] () identifier[command] += identifier[self] . identifier[RETURN] keyword[return] identifier[command]
def normalize_cmd(self, command): """Normalize CLI commands to have a single trailing newline. :param command: Command that may require line feed to be normalized :type command: str """ command = command.rstrip() command += self.RETURN return command
def load_reader(reader_configs, **reader_kwargs): """Import and setup the reader from *reader_info*.""" reader_info = read_reader_config(reader_configs) reader_instance = reader_info['reader'](config_files=reader_configs, **reader_kwargs) return reader_instance
def function[load_reader, parameter[reader_configs]]: constant[Import and setup the reader from *reader_info*.] variable[reader_info] assign[=] call[name[read_reader_config], parameter[name[reader_configs]]] variable[reader_instance] assign[=] call[call[name[reader_info]][constant[reader]], parameter[]] return[name[reader_instance]]
keyword[def] identifier[load_reader] ( identifier[reader_configs] ,** identifier[reader_kwargs] ): literal[string] identifier[reader_info] = identifier[read_reader_config] ( identifier[reader_configs] ) identifier[reader_instance] = identifier[reader_info] [ literal[string] ]( identifier[config_files] = identifier[reader_configs] ,** identifier[reader_kwargs] ) keyword[return] identifier[reader_instance]
def load_reader(reader_configs, **reader_kwargs): """Import and setup the reader from *reader_info*.""" reader_info = read_reader_config(reader_configs) reader_instance = reader_info['reader'](config_files=reader_configs, **reader_kwargs) return reader_instance
def set_validation(self, batch_size, val_rdd, trigger, val_method=None): """ Configure validation settings. :param batch_size: validation batch size :param val_rdd: validation dataset :param trigger: validation interval :param val_method: the ValidationMethod to use,e.g. "Top1Accuracy", "Top5Accuracy", "Loss" """ if val_method is None: val_method = [Top1Accuracy()] func_name = "setValidation" if isinstance(val_rdd, DataSet): func_name = "setValidationFromDataSet" callBigDlFunc(self.bigdl_type, func_name, self.value, batch_size, trigger, val_rdd, to_list(val_method))
def function[set_validation, parameter[self, batch_size, val_rdd, trigger, val_method]]: constant[ Configure validation settings. :param batch_size: validation batch size :param val_rdd: validation dataset :param trigger: validation interval :param val_method: the ValidationMethod to use,e.g. "Top1Accuracy", "Top5Accuracy", "Loss" ] if compare[name[val_method] is constant[None]] begin[:] variable[val_method] assign[=] list[[<ast.Call object at 0x7da1b0371b10>]] variable[func_name] assign[=] constant[setValidation] if call[name[isinstance], parameter[name[val_rdd], name[DataSet]]] begin[:] variable[func_name] assign[=] constant[setValidationFromDataSet] call[name[callBigDlFunc], parameter[name[self].bigdl_type, name[func_name], name[self].value, name[batch_size], name[trigger], name[val_rdd], call[name[to_list], parameter[name[val_method]]]]]
keyword[def] identifier[set_validation] ( identifier[self] , identifier[batch_size] , identifier[val_rdd] , identifier[trigger] , identifier[val_method] = keyword[None] ): literal[string] keyword[if] identifier[val_method] keyword[is] keyword[None] : identifier[val_method] =[ identifier[Top1Accuracy] ()] identifier[func_name] = literal[string] keyword[if] identifier[isinstance] ( identifier[val_rdd] , identifier[DataSet] ): identifier[func_name] = literal[string] identifier[callBigDlFunc] ( identifier[self] . identifier[bigdl_type] , identifier[func_name] , identifier[self] . identifier[value] , identifier[batch_size] , identifier[trigger] , identifier[val_rdd] , identifier[to_list] ( identifier[val_method] ))
def set_validation(self, batch_size, val_rdd, trigger, val_method=None): """ Configure validation settings. :param batch_size: validation batch size :param val_rdd: validation dataset :param trigger: validation interval :param val_method: the ValidationMethod to use,e.g. "Top1Accuracy", "Top5Accuracy", "Loss" """ if val_method is None: val_method = [Top1Accuracy()] # depends on [control=['if'], data=['val_method']] func_name = 'setValidation' if isinstance(val_rdd, DataSet): func_name = 'setValidationFromDataSet' # depends on [control=['if'], data=[]] callBigDlFunc(self.bigdl_type, func_name, self.value, batch_size, trigger, val_rdd, to_list(val_method))
def requeue(self, **kwargs): """ Respond to ``nsqd`` that you've failed to process this message successfully (and would like it to be requeued). :param backoff: whether or not :class:`nsq.Reader` should apply backoff handling :type backoff: bool :param delay: the amount of time (in seconds) that this message should be delayed if -1 it will be calculated based on # of attempts :type delay: int """ # convert delay to time_ms for fixing # https://github.com/nsqio/pynsq/issues/71 and maintaining # backward compatibility if 'delay' in kwargs and isinstance(kwargs['delay'], int) and kwargs['delay'] >= 0: kwargs['time_ms'] = kwargs['delay'] * 1000 assert not self._has_responded self._has_responded = True self.trigger(event.REQUEUE, message=self, **kwargs)
def function[requeue, parameter[self]]: constant[ Respond to ``nsqd`` that you've failed to process this message successfully (and would like it to be requeued). :param backoff: whether or not :class:`nsq.Reader` should apply backoff handling :type backoff: bool :param delay: the amount of time (in seconds) that this message should be delayed if -1 it will be calculated based on # of attempts :type delay: int ] if <ast.BoolOp object at 0x7da1b0286ad0> begin[:] call[name[kwargs]][constant[time_ms]] assign[=] binary_operation[call[name[kwargs]][constant[delay]] * constant[1000]] assert[<ast.UnaryOp object at 0x7da1b03958d0>] name[self]._has_responded assign[=] constant[True] call[name[self].trigger, parameter[name[event].REQUEUE]]
keyword[def] identifier[requeue] ( identifier[self] ,** identifier[kwargs] ): literal[string] keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[and] identifier[isinstance] ( identifier[kwargs] [ literal[string] ], identifier[int] ) keyword[and] identifier[kwargs] [ literal[string] ]>= literal[int] : identifier[kwargs] [ literal[string] ]= identifier[kwargs] [ literal[string] ]* literal[int] keyword[assert] keyword[not] identifier[self] . identifier[_has_responded] identifier[self] . identifier[_has_responded] = keyword[True] identifier[self] . identifier[trigger] ( identifier[event] . identifier[REQUEUE] , identifier[message] = identifier[self] ,** identifier[kwargs] )
def requeue(self, **kwargs): """ Respond to ``nsqd`` that you've failed to process this message successfully (and would like it to be requeued). :param backoff: whether or not :class:`nsq.Reader` should apply backoff handling :type backoff: bool :param delay: the amount of time (in seconds) that this message should be delayed if -1 it will be calculated based on # of attempts :type delay: int """ # convert delay to time_ms for fixing # https://github.com/nsqio/pynsq/issues/71 and maintaining # backward compatibility if 'delay' in kwargs and isinstance(kwargs['delay'], int) and (kwargs['delay'] >= 0): kwargs['time_ms'] = kwargs['delay'] * 1000 # depends on [control=['if'], data=[]] assert not self._has_responded self._has_responded = True self.trigger(event.REQUEUE, message=self, **kwargs)
def stage_signature(vcs, signature): """Add `signature` to the list of staged signatures Args: vcs (easyci.vcs.base.Vcs) signature (basestring) Raises: AlreadyStagedError """ evidence_path = _get_staged_history_path(vcs) staged = get_staged_signatures(vcs) if signature in staged: raise AlreadyStagedError staged.append(signature) string = '\n'.join(staged) with open(evidence_path, 'w') as f: f.write(string)
def function[stage_signature, parameter[vcs, signature]]: constant[Add `signature` to the list of staged signatures Args: vcs (easyci.vcs.base.Vcs) signature (basestring) Raises: AlreadyStagedError ] variable[evidence_path] assign[=] call[name[_get_staged_history_path], parameter[name[vcs]]] variable[staged] assign[=] call[name[get_staged_signatures], parameter[name[vcs]]] if compare[name[signature] in name[staged]] begin[:] <ast.Raise object at 0x7da1b13b8760> call[name[staged].append, parameter[name[signature]]] variable[string] assign[=] call[constant[ ].join, parameter[name[staged]]] with call[name[open], parameter[name[evidence_path], constant[w]]] begin[:] call[name[f].write, parameter[name[string]]]
keyword[def] identifier[stage_signature] ( identifier[vcs] , identifier[signature] ): literal[string] identifier[evidence_path] = identifier[_get_staged_history_path] ( identifier[vcs] ) identifier[staged] = identifier[get_staged_signatures] ( identifier[vcs] ) keyword[if] identifier[signature] keyword[in] identifier[staged] : keyword[raise] identifier[AlreadyStagedError] identifier[staged] . identifier[append] ( identifier[signature] ) identifier[string] = literal[string] . identifier[join] ( identifier[staged] ) keyword[with] identifier[open] ( identifier[evidence_path] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[string] )
def stage_signature(vcs, signature): """Add `signature` to the list of staged signatures Args: vcs (easyci.vcs.base.Vcs) signature (basestring) Raises: AlreadyStagedError """ evidence_path = _get_staged_history_path(vcs) staged = get_staged_signatures(vcs) if signature in staged: raise AlreadyStagedError # depends on [control=['if'], data=[]] staged.append(signature) string = '\n'.join(staged) with open(evidence_path, 'w') as f: f.write(string) # depends on [control=['with'], data=['f']]
def mag_discrepancy(RAW_IMU, ATTITUDE, inclination, declination=None): '''give the magnitude of the discrepancy between observed and expected magnetic field''' if declination is None: import mavutil declination = degrees(mavutil.mavfile_global.param('COMPASS_DEC', 0)) expected = expected_mag(RAW_IMU, ATTITUDE, inclination, declination) mag = Vector3(RAW_IMU.xmag, RAW_IMU.ymag, RAW_IMU.zmag) return degrees(expected.angle(mag))
def function[mag_discrepancy, parameter[RAW_IMU, ATTITUDE, inclination, declination]]: constant[give the magnitude of the discrepancy between observed and expected magnetic field] if compare[name[declination] is constant[None]] begin[:] import module[mavutil] variable[declination] assign[=] call[name[degrees], parameter[call[name[mavutil].mavfile_global.param, parameter[constant[COMPASS_DEC], constant[0]]]]] variable[expected] assign[=] call[name[expected_mag], parameter[name[RAW_IMU], name[ATTITUDE], name[inclination], name[declination]]] variable[mag] assign[=] call[name[Vector3], parameter[name[RAW_IMU].xmag, name[RAW_IMU].ymag, name[RAW_IMU].zmag]] return[call[name[degrees], parameter[call[name[expected].angle, parameter[name[mag]]]]]]
keyword[def] identifier[mag_discrepancy] ( identifier[RAW_IMU] , identifier[ATTITUDE] , identifier[inclination] , identifier[declination] = keyword[None] ): literal[string] keyword[if] identifier[declination] keyword[is] keyword[None] : keyword[import] identifier[mavutil] identifier[declination] = identifier[degrees] ( identifier[mavutil] . identifier[mavfile_global] . identifier[param] ( literal[string] , literal[int] )) identifier[expected] = identifier[expected_mag] ( identifier[RAW_IMU] , identifier[ATTITUDE] , identifier[inclination] , identifier[declination] ) identifier[mag] = identifier[Vector3] ( identifier[RAW_IMU] . identifier[xmag] , identifier[RAW_IMU] . identifier[ymag] , identifier[RAW_IMU] . identifier[zmag] ) keyword[return] identifier[degrees] ( identifier[expected] . identifier[angle] ( identifier[mag] ))
def mag_discrepancy(RAW_IMU, ATTITUDE, inclination, declination=None): """give the magnitude of the discrepancy between observed and expected magnetic field""" if declination is None: import mavutil declination = degrees(mavutil.mavfile_global.param('COMPASS_DEC', 0)) # depends on [control=['if'], data=['declination']] expected = expected_mag(RAW_IMU, ATTITUDE, inclination, declination) mag = Vector3(RAW_IMU.xmag, RAW_IMU.ymag, RAW_IMU.zmag) return degrees(expected.angle(mag))
def T_dependent_property_integral(self, T1, T2): r'''Method to calculate the integral of a property with respect to temperature, using a specified method. Methods found valid by `select_valid_methods` are attempted until a method succeeds. If no methods are valid and succeed, None is returned. Calls `calculate_integral` internally to perform the actual calculation. .. math:: \text{integral} = \int_{T_1}^{T_2} \text{property} \; dT Parameters ---------- T1 : float Lower limit of integration, [K] T2 : float Upper limit of integration, [K] method : str Method for which to find the integral Returns ------- integral : float Calculated integral of the property over the given range, [`units*K`] ''' Tavg = 0.5*(T1+T2) if self.method: # retest within range if self.test_method_validity(Tavg, self.method): try: return self.calculate_integral(T1, T2, self.method) except: # pragma: no cover pass sorted_valid_methods = self.select_valid_methods(Tavg) for method in sorted_valid_methods: try: return self.calculate_integral(T1, T2, method) except: pass return None
def function[T_dependent_property_integral, parameter[self, T1, T2]]: constant[Method to calculate the integral of a property with respect to temperature, using a specified method. Methods found valid by `select_valid_methods` are attempted until a method succeeds. If no methods are valid and succeed, None is returned. Calls `calculate_integral` internally to perform the actual calculation. .. math:: \text{integral} = \int_{T_1}^{T_2} \text{property} \; dT Parameters ---------- T1 : float Lower limit of integration, [K] T2 : float Upper limit of integration, [K] method : str Method for which to find the integral Returns ------- integral : float Calculated integral of the property over the given range, [`units*K`] ] variable[Tavg] assign[=] binary_operation[constant[0.5] * binary_operation[name[T1] + name[T2]]] if name[self].method begin[:] if call[name[self].test_method_validity, parameter[name[Tavg], name[self].method]] begin[:] <ast.Try object at 0x7da20c6c5c60> variable[sorted_valid_methods] assign[=] call[name[self].select_valid_methods, parameter[name[Tavg]]] for taget[name[method]] in starred[name[sorted_valid_methods]] begin[:] <ast.Try object at 0x7da20c992e60> return[constant[None]]
keyword[def] identifier[T_dependent_property_integral] ( identifier[self] , identifier[T1] , identifier[T2] ): literal[string] identifier[Tavg] = literal[int] *( identifier[T1] + identifier[T2] ) keyword[if] identifier[self] . identifier[method] : keyword[if] identifier[self] . identifier[test_method_validity] ( identifier[Tavg] , identifier[self] . identifier[method] ): keyword[try] : keyword[return] identifier[self] . identifier[calculate_integral] ( identifier[T1] , identifier[T2] , identifier[self] . identifier[method] ) keyword[except] : keyword[pass] identifier[sorted_valid_methods] = identifier[self] . identifier[select_valid_methods] ( identifier[Tavg] ) keyword[for] identifier[method] keyword[in] identifier[sorted_valid_methods] : keyword[try] : keyword[return] identifier[self] . identifier[calculate_integral] ( identifier[T1] , identifier[T2] , identifier[method] ) keyword[except] : keyword[pass] keyword[return] keyword[None]
def T_dependent_property_integral(self, T1, T2): """Method to calculate the integral of a property with respect to temperature, using a specified method. Methods found valid by `select_valid_methods` are attempted until a method succeeds. If no methods are valid and succeed, None is returned. Calls `calculate_integral` internally to perform the actual calculation. .. math:: \\text{integral} = \\int_{T_1}^{T_2} \\text{property} \\; dT Parameters ---------- T1 : float Lower limit of integration, [K] T2 : float Upper limit of integration, [K] method : str Method for which to find the integral Returns ------- integral : float Calculated integral of the property over the given range, [`units*K`] """ Tavg = 0.5 * (T1 + T2) if self.method: # retest within range if self.test_method_validity(Tavg, self.method): try: return self.calculate_integral(T1, T2, self.method) # depends on [control=['try'], data=[]] except: # pragma: no cover pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] sorted_valid_methods = self.select_valid_methods(Tavg) for method in sorted_valid_methods: try: return self.calculate_integral(T1, T2, method) # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['method']] return None
def run(self, *args): """Run the listener and answer to requests. """ del args arec = AddressReceiver(max_age=self._max_age, multicast_enabled=self._multicast_enabled) arec.start() port = PORT try: with nslock: self.listener = get_context().socket(REP) self.listener.bind("tcp://*:" + str(port)) logger.debug('Listening on port %s', str(port)) poller = Poller() poller.register(self.listener, POLLIN) while self.loop: with nslock: socks = dict(poller.poll(1000)) if socks: if socks.get(self.listener) == POLLIN: msg = self.listener.recv_string() else: continue logger.debug("Replying to request: " + str(msg)) msg = Message.decode(msg) self.listener.send_unicode(six.text_type(get_active_address( msg.data["service"], arec))) except KeyboardInterrupt: # Needed to stop the nameserver. pass finally: arec.stop() self.stop()
def function[run, parameter[self]]: constant[Run the listener and answer to requests. ] <ast.Delete object at 0x7da1b26afcd0> variable[arec] assign[=] call[name[AddressReceiver], parameter[]] call[name[arec].start, parameter[]] variable[port] assign[=] name[PORT] <ast.Try object at 0x7da18f09dbd0>
keyword[def] identifier[run] ( identifier[self] ,* identifier[args] ): literal[string] keyword[del] identifier[args] identifier[arec] = identifier[AddressReceiver] ( identifier[max_age] = identifier[self] . identifier[_max_age] , identifier[multicast_enabled] = identifier[self] . identifier[_multicast_enabled] ) identifier[arec] . identifier[start] () identifier[port] = identifier[PORT] keyword[try] : keyword[with] identifier[nslock] : identifier[self] . identifier[listener] = identifier[get_context] (). identifier[socket] ( identifier[REP] ) identifier[self] . identifier[listener] . identifier[bind] ( literal[string] + identifier[str] ( identifier[port] )) identifier[logger] . identifier[debug] ( literal[string] , identifier[str] ( identifier[port] )) identifier[poller] = identifier[Poller] () identifier[poller] . identifier[register] ( identifier[self] . identifier[listener] , identifier[POLLIN] ) keyword[while] identifier[self] . identifier[loop] : keyword[with] identifier[nslock] : identifier[socks] = identifier[dict] ( identifier[poller] . identifier[poll] ( literal[int] )) keyword[if] identifier[socks] : keyword[if] identifier[socks] . identifier[get] ( identifier[self] . identifier[listener] )== identifier[POLLIN] : identifier[msg] = identifier[self] . identifier[listener] . identifier[recv_string] () keyword[else] : keyword[continue] identifier[logger] . identifier[debug] ( literal[string] + identifier[str] ( identifier[msg] )) identifier[msg] = identifier[Message] . identifier[decode] ( identifier[msg] ) identifier[self] . identifier[listener] . identifier[send_unicode] ( identifier[six] . identifier[text_type] ( identifier[get_active_address] ( identifier[msg] . identifier[data] [ literal[string] ], identifier[arec] ))) keyword[except] identifier[KeyboardInterrupt] : keyword[pass] keyword[finally] : identifier[arec] . identifier[stop] () identifier[self] . identifier[stop] ()
def run(self, *args): """Run the listener and answer to requests. """ del args arec = AddressReceiver(max_age=self._max_age, multicast_enabled=self._multicast_enabled) arec.start() port = PORT try: with nslock: self.listener = get_context().socket(REP) self.listener.bind('tcp://*:' + str(port)) logger.debug('Listening on port %s', str(port)) poller = Poller() poller.register(self.listener, POLLIN) # depends on [control=['with'], data=[]] while self.loop: with nslock: socks = dict(poller.poll(1000)) if socks: if socks.get(self.listener) == POLLIN: msg = self.listener.recv_string() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: continue logger.debug('Replying to request: ' + str(msg)) msg = Message.decode(msg) self.listener.send_unicode(six.text_type(get_active_address(msg.data['service'], arec))) # depends on [control=['with'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]] except KeyboardInterrupt: # Needed to stop the nameserver. pass # depends on [control=['except'], data=[]] finally: arec.stop() self.stop()
def dir_exists(self): """ Makes a ``HEAD`` requests to the URI. :returns: ``True`` if status code is 2xx. """ r = requests.request(self.method if self.method else 'HEAD', self.url, **self.storage_args) try: r.raise_for_status() except Exception: return False return True
def function[dir_exists, parameter[self]]: constant[ Makes a ``HEAD`` requests to the URI. :returns: ``True`` if status code is 2xx. ] variable[r] assign[=] call[name[requests].request, parameter[<ast.IfExp object at 0x7da1b0b38e50>, name[self].url]] <ast.Try object at 0x7da1b0b39810> return[constant[True]]
keyword[def] identifier[dir_exists] ( identifier[self] ): literal[string] identifier[r] = identifier[requests] . identifier[request] ( identifier[self] . identifier[method] keyword[if] identifier[self] . identifier[method] keyword[else] literal[string] , identifier[self] . identifier[url] ,** identifier[self] . identifier[storage_args] ) keyword[try] : identifier[r] . identifier[raise_for_status] () keyword[except] identifier[Exception] : keyword[return] keyword[False] keyword[return] keyword[True]
def dir_exists(self): """ Makes a ``HEAD`` requests to the URI. :returns: ``True`` if status code is 2xx. """ r = requests.request(self.method if self.method else 'HEAD', self.url, **self.storage_args) try: r.raise_for_status() # depends on [control=['try'], data=[]] except Exception: return False # depends on [control=['except'], data=[]] return True
def calc_max_bits(self, signed, values): """ Calculates the maximim needed bits to represent a value """ b = 0 vmax = -10000000 for val in values: if signed: b = b | val if val >= 0 else b | ~val << 1 vmax = val if vmax < val else vmax else: b |= val; bits = 0 if b > 0: bits = len(self.bin(b)) - 2 if signed and vmax > 0 and len(self.bin(vmax)) - 2 >= bits: bits += 1 return bits
def function[calc_max_bits, parameter[self, signed, values]]: constant[ Calculates the maximim needed bits to represent a value ] variable[b] assign[=] constant[0] variable[vmax] assign[=] <ast.UnaryOp object at 0x7da1b0d116c0> for taget[name[val]] in starred[name[values]] begin[:] if name[signed] begin[:] variable[b] assign[=] <ast.IfExp object at 0x7da1b0d11060> variable[vmax] assign[=] <ast.IfExp object at 0x7da1b0d12fb0> variable[bits] assign[=] constant[0] if compare[name[b] greater[>] constant[0]] begin[:] variable[bits] assign[=] binary_operation[call[name[len], parameter[call[name[self].bin, parameter[name[b]]]]] - constant[2]] if <ast.BoolOp object at 0x7da1b0d11e70> begin[:] <ast.AugAssign object at 0x7da1b0eefa90> return[name[bits]]
keyword[def] identifier[calc_max_bits] ( identifier[self] , identifier[signed] , identifier[values] ): literal[string] identifier[b] = literal[int] identifier[vmax] =- literal[int] keyword[for] identifier[val] keyword[in] identifier[values] : keyword[if] identifier[signed] : identifier[b] = identifier[b] | identifier[val] keyword[if] identifier[val] >= literal[int] keyword[else] identifier[b] |~ identifier[val] << literal[int] identifier[vmax] = identifier[val] keyword[if] identifier[vmax] < identifier[val] keyword[else] identifier[vmax] keyword[else] : identifier[b] |= identifier[val] ; identifier[bits] = literal[int] keyword[if] identifier[b] > literal[int] : identifier[bits] = identifier[len] ( identifier[self] . identifier[bin] ( identifier[b] ))- literal[int] keyword[if] identifier[signed] keyword[and] identifier[vmax] > literal[int] keyword[and] identifier[len] ( identifier[self] . identifier[bin] ( identifier[vmax] ))- literal[int] >= identifier[bits] : identifier[bits] += literal[int] keyword[return] identifier[bits]
def calc_max_bits(self, signed, values): """ Calculates the maximim needed bits to represent a value """ b = 0 vmax = -10000000 for val in values: if signed: b = b | val if val >= 0 else b | ~val << 1 vmax = val if vmax < val else vmax # depends on [control=['if'], data=[]] else: b |= val # depends on [control=['for'], data=['val']] bits = 0 if b > 0: bits = len(self.bin(b)) - 2 if signed and vmax > 0 and (len(self.bin(vmax)) - 2 >= bits): bits += 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['b']] return bits
def drawGrid(self, painter, opt, rect, index): """ Draws the grid lines for this delegate. :param painter | <QtGui.QPainter> opt | <QtGui.QStyleOptionItem> rect | <QtCore.QRect> index | <QtGui.QModelIndex> """ if not self.showGrid(): return painter.setBrush(QtCore.Qt.NoBrush) painter.setPen(self.gridPen()) size = self.gridPen().width() + 1 # draw the lines lines = [] # add the column line if self.showGridColumns(): lines.append(QtCore.QLine(rect.width() - size, 0, rect.width() - size, rect.height() - size)) # add the row line if (self.showGridRows()): lines.append(QtCore.QLine(0, rect.height() - size, rect.width() - size, rect.height() - size)) painter.drawLines(lines)
def function[drawGrid, parameter[self, painter, opt, rect, index]]: constant[ Draws the grid lines for this delegate. :param painter | <QtGui.QPainter> opt | <QtGui.QStyleOptionItem> rect | <QtCore.QRect> index | <QtGui.QModelIndex> ] if <ast.UnaryOp object at 0x7da1b24fe8f0> begin[:] return[None] call[name[painter].setBrush, parameter[name[QtCore].Qt.NoBrush]] call[name[painter].setPen, parameter[call[name[self].gridPen, parameter[]]]] variable[size] assign[=] binary_operation[call[call[name[self].gridPen, parameter[]].width, parameter[]] + constant[1]] variable[lines] assign[=] list[[]] if call[name[self].showGridColumns, parameter[]] begin[:] call[name[lines].append, parameter[call[name[QtCore].QLine, parameter[binary_operation[call[name[rect].width, parameter[]] - name[size]], constant[0], binary_operation[call[name[rect].width, parameter[]] - name[size]], binary_operation[call[name[rect].height, parameter[]] - name[size]]]]]] if call[name[self].showGridRows, parameter[]] begin[:] call[name[lines].append, parameter[call[name[QtCore].QLine, parameter[constant[0], binary_operation[call[name[rect].height, parameter[]] - name[size]], binary_operation[call[name[rect].width, parameter[]] - name[size]], binary_operation[call[name[rect].height, parameter[]] - name[size]]]]]] call[name[painter].drawLines, parameter[name[lines]]]
keyword[def] identifier[drawGrid] ( identifier[self] , identifier[painter] , identifier[opt] , identifier[rect] , identifier[index] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[showGrid] (): keyword[return] identifier[painter] . identifier[setBrush] ( identifier[QtCore] . identifier[Qt] . identifier[NoBrush] ) identifier[painter] . identifier[setPen] ( identifier[self] . identifier[gridPen] ()) identifier[size] = identifier[self] . identifier[gridPen] (). identifier[width] ()+ literal[int] identifier[lines] =[] keyword[if] identifier[self] . identifier[showGridColumns] (): identifier[lines] . identifier[append] ( identifier[QtCore] . identifier[QLine] ( identifier[rect] . identifier[width] ()- identifier[size] , literal[int] , identifier[rect] . identifier[width] ()- identifier[size] , identifier[rect] . identifier[height] ()- identifier[size] )) keyword[if] ( identifier[self] . identifier[showGridRows] ()): identifier[lines] . identifier[append] ( identifier[QtCore] . identifier[QLine] ( literal[int] , identifier[rect] . identifier[height] ()- identifier[size] , identifier[rect] . identifier[width] ()- identifier[size] , identifier[rect] . identifier[height] ()- identifier[size] )) identifier[painter] . identifier[drawLines] ( identifier[lines] )
def drawGrid(self, painter, opt, rect, index): """ Draws the grid lines for this delegate. :param painter | <QtGui.QPainter> opt | <QtGui.QStyleOptionItem> rect | <QtCore.QRect> index | <QtGui.QModelIndex> """ if not self.showGrid(): return # depends on [control=['if'], data=[]] painter.setBrush(QtCore.Qt.NoBrush) painter.setPen(self.gridPen()) size = self.gridPen().width() + 1 # draw the lines lines = [] # add the column line if self.showGridColumns(): lines.append(QtCore.QLine(rect.width() - size, 0, rect.width() - size, rect.height() - size)) # depends on [control=['if'], data=[]] # add the row line if self.showGridRows(): lines.append(QtCore.QLine(0, rect.height() - size, rect.width() - size, rect.height() - size)) # depends on [control=['if'], data=[]] painter.drawLines(lines)
def get_month_list(to_date, from_date): """ Generate a list containing year+month between two dates. Returns: [(2013, 11), (2013, 12), (2014, 1)] """ num_months = get_months_apart(to_date, from_date) month_offset = from_date.month month_list = [] for month in range(month_offset-1, month_offset+num_months): year = from_date.year+(month/12) real_month = (month % 12) + 1 month_list.append((year, real_month)) return month_list
def function[get_month_list, parameter[to_date, from_date]]: constant[ Generate a list containing year+month between two dates. Returns: [(2013, 11), (2013, 12), (2014, 1)] ] variable[num_months] assign[=] call[name[get_months_apart], parameter[name[to_date], name[from_date]]] variable[month_offset] assign[=] name[from_date].month variable[month_list] assign[=] list[[]] for taget[name[month]] in starred[call[name[range], parameter[binary_operation[name[month_offset] - constant[1]], binary_operation[name[month_offset] + name[num_months]]]]] begin[:] variable[year] assign[=] binary_operation[name[from_date].year + binary_operation[name[month] / constant[12]]] variable[real_month] assign[=] binary_operation[binary_operation[name[month] <ast.Mod object at 0x7da2590d6920> constant[12]] + constant[1]] call[name[month_list].append, parameter[tuple[[<ast.Name object at 0x7da18f58d8d0>, <ast.Name object at 0x7da18f58fdf0>]]]] return[name[month_list]]
keyword[def] identifier[get_month_list] ( identifier[to_date] , identifier[from_date] ): literal[string] identifier[num_months] = identifier[get_months_apart] ( identifier[to_date] , identifier[from_date] ) identifier[month_offset] = identifier[from_date] . identifier[month] identifier[month_list] =[] keyword[for] identifier[month] keyword[in] identifier[range] ( identifier[month_offset] - literal[int] , identifier[month_offset] + identifier[num_months] ): identifier[year] = identifier[from_date] . identifier[year] +( identifier[month] / literal[int] ) identifier[real_month] =( identifier[month] % literal[int] )+ literal[int] identifier[month_list] . identifier[append] (( identifier[year] , identifier[real_month] )) keyword[return] identifier[month_list]
def get_month_list(to_date, from_date): """ Generate a list containing year+month between two dates. Returns: [(2013, 11), (2013, 12), (2014, 1)] """ num_months = get_months_apart(to_date, from_date) month_offset = from_date.month month_list = [] for month in range(month_offset - 1, month_offset + num_months): year = from_date.year + month / 12 real_month = month % 12 + 1 month_list.append((year, real_month)) # depends on [control=['for'], data=['month']] return month_list
def _op_generic_pack_StoU_saturation(self, args, src_size, dst_size): """ Generic pack with unsigned saturation. Split args in chunks of src_size signed bits and in pack them into unsigned saturated chunks of dst_size bits. Then chunks are concatenated resulting in a BV of len(args)*dst_size//src_size*len(args[0]) bits. """ if src_size <= 0 or dst_size <= 0: raise SimOperationError("Can't pack from or to zero or negative size" % self.name) result = None max_value = claripy.BVV(-1, dst_size).zero_extend(src_size - dst_size) #max value for unsigned saturation min_value = claripy.BVV(0, src_size) #min unsigned value always 0 for v in args: for src_value in v.chop(src_size): dst_value = self._op_generic_StoU_saturation(src_value, min_value, max_value) dst_value = dst_value.zero_extend(dst_size - src_size) if result is None: result = dst_value else: result = self._op_concat((result, dst_value)) return result
def function[_op_generic_pack_StoU_saturation, parameter[self, args, src_size, dst_size]]: constant[ Generic pack with unsigned saturation. Split args in chunks of src_size signed bits and in pack them into unsigned saturated chunks of dst_size bits. Then chunks are concatenated resulting in a BV of len(args)*dst_size//src_size*len(args[0]) bits. ] if <ast.BoolOp object at 0x7da207f03580> begin[:] <ast.Raise object at 0x7da207f02c20> variable[result] assign[=] constant[None] variable[max_value] assign[=] call[call[name[claripy].BVV, parameter[<ast.UnaryOp object at 0x7da207f03550>, name[dst_size]]].zero_extend, parameter[binary_operation[name[src_size] - name[dst_size]]]] variable[min_value] assign[=] call[name[claripy].BVV, parameter[constant[0], name[src_size]]] for taget[name[v]] in starred[name[args]] begin[:] for taget[name[src_value]] in starred[call[name[v].chop, parameter[name[src_size]]]] begin[:] variable[dst_value] assign[=] call[name[self]._op_generic_StoU_saturation, parameter[name[src_value], name[min_value], name[max_value]]] variable[dst_value] assign[=] call[name[dst_value].zero_extend, parameter[binary_operation[name[dst_size] - name[src_size]]]] if compare[name[result] is constant[None]] begin[:] variable[result] assign[=] name[dst_value] return[name[result]]
keyword[def] identifier[_op_generic_pack_StoU_saturation] ( identifier[self] , identifier[args] , identifier[src_size] , identifier[dst_size] ): literal[string] keyword[if] identifier[src_size] <= literal[int] keyword[or] identifier[dst_size] <= literal[int] : keyword[raise] identifier[SimOperationError] ( literal[string] % identifier[self] . identifier[name] ) identifier[result] = keyword[None] identifier[max_value] = identifier[claripy] . identifier[BVV] (- literal[int] , identifier[dst_size] ). identifier[zero_extend] ( identifier[src_size] - identifier[dst_size] ) identifier[min_value] = identifier[claripy] . identifier[BVV] ( literal[int] , identifier[src_size] ) keyword[for] identifier[v] keyword[in] identifier[args] : keyword[for] identifier[src_value] keyword[in] identifier[v] . identifier[chop] ( identifier[src_size] ): identifier[dst_value] = identifier[self] . identifier[_op_generic_StoU_saturation] ( identifier[src_value] , identifier[min_value] , identifier[max_value] ) identifier[dst_value] = identifier[dst_value] . identifier[zero_extend] ( identifier[dst_size] - identifier[src_size] ) keyword[if] identifier[result] keyword[is] keyword[None] : identifier[result] = identifier[dst_value] keyword[else] : identifier[result] = identifier[self] . identifier[_op_concat] (( identifier[result] , identifier[dst_value] )) keyword[return] identifier[result]
def _op_generic_pack_StoU_saturation(self, args, src_size, dst_size): """ Generic pack with unsigned saturation. Split args in chunks of src_size signed bits and in pack them into unsigned saturated chunks of dst_size bits. Then chunks are concatenated resulting in a BV of len(args)*dst_size//src_size*len(args[0]) bits. """ if src_size <= 0 or dst_size <= 0: raise SimOperationError("Can't pack from or to zero or negative size" % self.name) # depends on [control=['if'], data=[]] result = None max_value = claripy.BVV(-1, dst_size).zero_extend(src_size - dst_size) #max value for unsigned saturation min_value = claripy.BVV(0, src_size) #min unsigned value always 0 for v in args: for src_value in v.chop(src_size): dst_value = self._op_generic_StoU_saturation(src_value, min_value, max_value) dst_value = dst_value.zero_extend(dst_size - src_size) if result is None: result = dst_value # depends on [control=['if'], data=['result']] else: result = self._op_concat((result, dst_value)) # depends on [control=['for'], data=['src_value']] # depends on [control=['for'], data=['v']] return result
def do_edit(self, line): """edit Edit the queue of write operations.""" self._split_args(line, 0, 0) self._command_processor.get_operation_queue().edit() self._print_info_if_verbose("The write operation queue was successfully edited")
def function[do_edit, parameter[self, line]]: constant[edit Edit the queue of write operations.] call[name[self]._split_args, parameter[name[line], constant[0], constant[0]]] call[call[name[self]._command_processor.get_operation_queue, parameter[]].edit, parameter[]] call[name[self]._print_info_if_verbose, parameter[constant[The write operation queue was successfully edited]]]
keyword[def] identifier[do_edit] ( identifier[self] , identifier[line] ): literal[string] identifier[self] . identifier[_split_args] ( identifier[line] , literal[int] , literal[int] ) identifier[self] . identifier[_command_processor] . identifier[get_operation_queue] (). identifier[edit] () identifier[self] . identifier[_print_info_if_verbose] ( literal[string] )
def do_edit(self, line): """edit Edit the queue of write operations.""" self._split_args(line, 0, 0) self._command_processor.get_operation_queue().edit() self._print_info_if_verbose('The write operation queue was successfully edited')
def _scope_vars(scope, trainable_only=False): """ Get variables inside a scope The scope can be specified as a string Parameters ---------- scope: str or VariableScope scope in which the variables reside. trainable_only: bool whether or not to return only the variables that were marked as trainable. Returns ------- vars: [tf.Variable] list of variables in `scope`. """ return tf.get_collection( tf.GraphKeys.TRAINABLE_VARIABLES if trainable_only else tf.GraphKeys.VARIABLES, scope=scope if isinstance(scope, str) else scope.name)
def function[_scope_vars, parameter[scope, trainable_only]]: constant[ Get variables inside a scope The scope can be specified as a string Parameters ---------- scope: str or VariableScope scope in which the variables reside. trainable_only: bool whether or not to return only the variables that were marked as trainable. Returns ------- vars: [tf.Variable] list of variables in `scope`. ] return[call[name[tf].get_collection, parameter[<ast.IfExp object at 0x7da1b23468c0>]]]
keyword[def] identifier[_scope_vars] ( identifier[scope] , identifier[trainable_only] = keyword[False] ): literal[string] keyword[return] identifier[tf] . identifier[get_collection] ( identifier[tf] . identifier[GraphKeys] . identifier[TRAINABLE_VARIABLES] keyword[if] identifier[trainable_only] keyword[else] identifier[tf] . identifier[GraphKeys] . identifier[VARIABLES] , identifier[scope] = identifier[scope] keyword[if] identifier[isinstance] ( identifier[scope] , identifier[str] ) keyword[else] identifier[scope] . identifier[name] )
def _scope_vars(scope, trainable_only=False): """ Get variables inside a scope The scope can be specified as a string Parameters ---------- scope: str or VariableScope scope in which the variables reside. trainable_only: bool whether or not to return only the variables that were marked as trainable. Returns ------- vars: [tf.Variable] list of variables in `scope`. """ return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES if trainable_only else tf.GraphKeys.VARIABLES, scope=scope if isinstance(scope, str) else scope.name)
def _getXrefStream(self, xref): """_getXrefStream(self, xref) -> PyObject *""" if self.isClosed or self.isEncrypted: raise ValueError("operation illegal for closed / encrypted doc") return _fitz.Document__getXrefStream(self, xref)
def function[_getXrefStream, parameter[self, xref]]: constant[_getXrefStream(self, xref) -> PyObject *] if <ast.BoolOp object at 0x7da20c6abcd0> begin[:] <ast.Raise object at 0x7da20c6abdc0> return[call[name[_fitz].Document__getXrefStream, parameter[name[self], name[xref]]]]
keyword[def] identifier[_getXrefStream] ( identifier[self] , identifier[xref] ): literal[string] keyword[if] identifier[self] . identifier[isClosed] keyword[or] identifier[self] . identifier[isEncrypted] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[_fitz] . identifier[Document__getXrefStream] ( identifier[self] , identifier[xref] )
def _getXrefStream(self, xref): """_getXrefStream(self, xref) -> PyObject *""" if self.isClosed or self.isEncrypted: raise ValueError('operation illegal for closed / encrypted doc') # depends on [control=['if'], data=[]] return _fitz.Document__getXrefStream(self, xref)
def qteAutoremoveDeletedWidgets(self): """ Remove all widgets from the internal widget list that do not exist anymore according to SIP. |Args| * **None** |Returns| * **None** |Raises| * **None** """ widget_list = self._qteAdmin.widgetList deleted_widgets = [_ for _ in widget_list if sip.isdeleted(_)] for widgetObj in deleted_widgets: self._qteAdmin.widgetList.remove(widgetObj)
def function[qteAutoremoveDeletedWidgets, parameter[self]]: constant[ Remove all widgets from the internal widget list that do not exist anymore according to SIP. |Args| * **None** |Returns| * **None** |Raises| * **None** ] variable[widget_list] assign[=] name[self]._qteAdmin.widgetList variable[deleted_widgets] assign[=] <ast.ListComp object at 0x7da2054a6620> for taget[name[widgetObj]] in starred[name[deleted_widgets]] begin[:] call[name[self]._qteAdmin.widgetList.remove, parameter[name[widgetObj]]]
keyword[def] identifier[qteAutoremoveDeletedWidgets] ( identifier[self] ): literal[string] identifier[widget_list] = identifier[self] . identifier[_qteAdmin] . identifier[widgetList] identifier[deleted_widgets] =[ identifier[_] keyword[for] identifier[_] keyword[in] identifier[widget_list] keyword[if] identifier[sip] . identifier[isdeleted] ( identifier[_] )] keyword[for] identifier[widgetObj] keyword[in] identifier[deleted_widgets] : identifier[self] . identifier[_qteAdmin] . identifier[widgetList] . identifier[remove] ( identifier[widgetObj] )
def qteAutoremoveDeletedWidgets(self): """ Remove all widgets from the internal widget list that do not exist anymore according to SIP. |Args| * **None** |Returns| * **None** |Raises| * **None** """ widget_list = self._qteAdmin.widgetList deleted_widgets = [_ for _ in widget_list if sip.isdeleted(_)] for widgetObj in deleted_widgets: self._qteAdmin.widgetList.remove(widgetObj) # depends on [control=['for'], data=['widgetObj']]
def SQRT(argument): """ Computes the square matrix of the argument :param argument: a dataset region field (dataset.field) or metadata (dataset['field']) """ if isinstance(argument, MetaField): return argument._unary_expression("SQRT") elif isinstance(argument, RegField): return argument._unary_expression("SQRT") else: raise TypeError("You have to give as input a RegField (dataset.field)" "or a MetaField (dataset['field']")
def function[SQRT, parameter[argument]]: constant[ Computes the square matrix of the argument :param argument: a dataset region field (dataset.field) or metadata (dataset['field']) ] if call[name[isinstance], parameter[name[argument], name[MetaField]]] begin[:] return[call[name[argument]._unary_expression, parameter[constant[SQRT]]]]
keyword[def] identifier[SQRT] ( identifier[argument] ): literal[string] keyword[if] identifier[isinstance] ( identifier[argument] , identifier[MetaField] ): keyword[return] identifier[argument] . identifier[_unary_expression] ( literal[string] ) keyword[elif] identifier[isinstance] ( identifier[argument] , identifier[RegField] ): keyword[return] identifier[argument] . identifier[_unary_expression] ( literal[string] ) keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] literal[string] )
def SQRT(argument): """ Computes the square matrix of the argument :param argument: a dataset region field (dataset.field) or metadata (dataset['field']) """ if isinstance(argument, MetaField): return argument._unary_expression('SQRT') # depends on [control=['if'], data=[]] elif isinstance(argument, RegField): return argument._unary_expression('SQRT') # depends on [control=['if'], data=[]] else: raise TypeError("You have to give as input a RegField (dataset.field)or a MetaField (dataset['field']")
def registerkbevent(self, keys, modifiers, fn_name, *args): """ Register keystroke events @param keys: key to listen @type keys: string @param modifiers: control / alt combination using gtk MODIFIERS @type modifiers: int @param fn_name: Callback function @type fn_name: function @param *args: arguments to be passed to the callback function @type *args: var args @return: 1 if registration was successful, 0 if not. @rtype: integer """ event_name = "kbevent%s%s" % (keys, modifiers) self._pollEvents._callback[event_name] = [event_name, fn_name, args] return self._remote_registerkbevent(keys, modifiers)
def function[registerkbevent, parameter[self, keys, modifiers, fn_name]]: constant[ Register keystroke events @param keys: key to listen @type keys: string @param modifiers: control / alt combination using gtk MODIFIERS @type modifiers: int @param fn_name: Callback function @type fn_name: function @param *args: arguments to be passed to the callback function @type *args: var args @return: 1 if registration was successful, 0 if not. @rtype: integer ] variable[event_name] assign[=] binary_operation[constant[kbevent%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f09fb20>, <ast.Name object at 0x7da18f09d570>]]] call[name[self]._pollEvents._callback][name[event_name]] assign[=] list[[<ast.Name object at 0x7da18f09ed70>, <ast.Name object at 0x7da18f09ea70>, <ast.Name object at 0x7da18f09fb50>]] return[call[name[self]._remote_registerkbevent, parameter[name[keys], name[modifiers]]]]
keyword[def] identifier[registerkbevent] ( identifier[self] , identifier[keys] , identifier[modifiers] , identifier[fn_name] ,* identifier[args] ): literal[string] identifier[event_name] = literal[string] %( identifier[keys] , identifier[modifiers] ) identifier[self] . identifier[_pollEvents] . identifier[_callback] [ identifier[event_name] ]=[ identifier[event_name] , identifier[fn_name] , identifier[args] ] keyword[return] identifier[self] . identifier[_remote_registerkbevent] ( identifier[keys] , identifier[modifiers] )
def registerkbevent(self, keys, modifiers, fn_name, *args): """ Register keystroke events @param keys: key to listen @type keys: string @param modifiers: control / alt combination using gtk MODIFIERS @type modifiers: int @param fn_name: Callback function @type fn_name: function @param *args: arguments to be passed to the callback function @type *args: var args @return: 1 if registration was successful, 0 if not. @rtype: integer """ event_name = 'kbevent%s%s' % (keys, modifiers) self._pollEvents._callback[event_name] = [event_name, fn_name, args] return self._remote_registerkbevent(keys, modifiers)
def get_image(self, path): """Return tk image corresponding to name which is taken form path.""" image = '' name = os.path.basename(path) if not StockImage.is_registered(name): ipath = self.__find_image(path) if ipath is not None: StockImage.register(name, ipath) else: msg = "Image '{0}' not found in resource paths.".format(name) logger.warning(msg) try: image = StockImage.get(name) except StockImageException: # TODO: notify something here. pass return image
def function[get_image, parameter[self, path]]: constant[Return tk image corresponding to name which is taken form path.] variable[image] assign[=] constant[] variable[name] assign[=] call[name[os].path.basename, parameter[name[path]]] if <ast.UnaryOp object at 0x7da1b16b48b0> begin[:] variable[ipath] assign[=] call[name[self].__find_image, parameter[name[path]]] if compare[name[ipath] is_not constant[None]] begin[:] call[name[StockImage].register, parameter[name[name], name[ipath]]] <ast.Try object at 0x7da1b16b7640> return[name[image]]
keyword[def] identifier[get_image] ( identifier[self] , identifier[path] ): literal[string] identifier[image] = literal[string] identifier[name] = identifier[os] . identifier[path] . identifier[basename] ( identifier[path] ) keyword[if] keyword[not] identifier[StockImage] . identifier[is_registered] ( identifier[name] ): identifier[ipath] = identifier[self] . identifier[__find_image] ( identifier[path] ) keyword[if] identifier[ipath] keyword[is] keyword[not] keyword[None] : identifier[StockImage] . identifier[register] ( identifier[name] , identifier[ipath] ) keyword[else] : identifier[msg] = literal[string] . identifier[format] ( identifier[name] ) identifier[logger] . identifier[warning] ( identifier[msg] ) keyword[try] : identifier[image] = identifier[StockImage] . identifier[get] ( identifier[name] ) keyword[except] identifier[StockImageException] : keyword[pass] keyword[return] identifier[image]
def get_image(self, path): """Return tk image corresponding to name which is taken form path.""" image = '' name = os.path.basename(path) if not StockImage.is_registered(name): ipath = self.__find_image(path) if ipath is not None: StockImage.register(name, ipath) # depends on [control=['if'], data=['ipath']] else: msg = "Image '{0}' not found in resource paths.".format(name) logger.warning(msg) # depends on [control=['if'], data=[]] try: image = StockImage.get(name) # depends on [control=['try'], data=[]] except StockImageException: # TODO: notify something here. pass # depends on [control=['except'], data=[]] return image
def vhel_to_vgsr(coordinate, vhel, vsun): """ Convert a velocity from a heliocentric radial velocity to the Galactic standard of rest (GSR). Parameters ---------- coordinate : :class:`~astropy.coordinates.SkyCoord` An Astropy SkyCoord object or anything object that can be passed to the SkyCoord initializer. vhel : :class:`~astropy.units.Quantity` Barycentric line-of-sight velocity. vsun : :class:`~astropy.units.Quantity` Full-space velocity of the sun in a Galactocentric frame. By default, uses the value assumed by Astropy in `~astropy.coordinates.Galactocentric`. Returns ------- vgsr : :class:`~astropy.units.Quantity` Radial velocity in a galactocentric rest frame. """ if vsun is None: vsun = coord.Galactocentric.galcen_v_sun.to_cartesian().xyz return vhel + _get_vproj(coordinate, vsun)
def function[vhel_to_vgsr, parameter[coordinate, vhel, vsun]]: constant[ Convert a velocity from a heliocentric radial velocity to the Galactic standard of rest (GSR). Parameters ---------- coordinate : :class:`~astropy.coordinates.SkyCoord` An Astropy SkyCoord object or anything object that can be passed to the SkyCoord initializer. vhel : :class:`~astropy.units.Quantity` Barycentric line-of-sight velocity. vsun : :class:`~astropy.units.Quantity` Full-space velocity of the sun in a Galactocentric frame. By default, uses the value assumed by Astropy in `~astropy.coordinates.Galactocentric`. Returns ------- vgsr : :class:`~astropy.units.Quantity` Radial velocity in a galactocentric rest frame. ] if compare[name[vsun] is constant[None]] begin[:] variable[vsun] assign[=] call[name[coord].Galactocentric.galcen_v_sun.to_cartesian, parameter[]].xyz return[binary_operation[name[vhel] + call[name[_get_vproj], parameter[name[coordinate], name[vsun]]]]]
keyword[def] identifier[vhel_to_vgsr] ( identifier[coordinate] , identifier[vhel] , identifier[vsun] ): literal[string] keyword[if] identifier[vsun] keyword[is] keyword[None] : identifier[vsun] = identifier[coord] . identifier[Galactocentric] . identifier[galcen_v_sun] . identifier[to_cartesian] (). identifier[xyz] keyword[return] identifier[vhel] + identifier[_get_vproj] ( identifier[coordinate] , identifier[vsun] )
def vhel_to_vgsr(coordinate, vhel, vsun): """ Convert a velocity from a heliocentric radial velocity to the Galactic standard of rest (GSR). Parameters ---------- coordinate : :class:`~astropy.coordinates.SkyCoord` An Astropy SkyCoord object or anything object that can be passed to the SkyCoord initializer. vhel : :class:`~astropy.units.Quantity` Barycentric line-of-sight velocity. vsun : :class:`~astropy.units.Quantity` Full-space velocity of the sun in a Galactocentric frame. By default, uses the value assumed by Astropy in `~astropy.coordinates.Galactocentric`. Returns ------- vgsr : :class:`~astropy.units.Quantity` Radial velocity in a galactocentric rest frame. """ if vsun is None: vsun = coord.Galactocentric.galcen_v_sun.to_cartesian().xyz # depends on [control=['if'], data=['vsun']] return vhel + _get_vproj(coordinate, vsun)
def evaluate_F(self, F): """ Given a fixed policy F, with the interpretation :math:`u = -F x`, this function computes the matrix :math:`P_F` and constant :math:`d_F` associated with discounted cost :math:`J_F(x) = x' P_F x + d_F` Parameters ---------- F : array_like(float, ndim=2) The policy function, a k x n array Returns ------- P_F : array_like(float, ndim=2) Matrix for discounted cost d_F : scalar(float) Constant for discounted cost K_F : array_like(float, ndim=2) Worst case policy O_F : array_like(float, ndim=2) Matrix for discounted entropy o_F : scalar(float) Constant for discounted entropy """ # == Simplify names == # Q, R, A, B, C = self.Q, self.R, self.A, self.B, self.C beta, theta = self.beta, self.theta # == Solve for policies and costs using agent 2's problem == # K_F, P_F = self.F_to_K(F) I = np.identity(self.j) H = inv(I - C.T.dot(P_F.dot(C)) / theta) d_F = log(det(H)) # == Compute O_F and o_F == # sig = -1.0 / theta AO = sqrt(beta) * (A - dot(B, F) + dot(C, K_F)) O_F = solve_discrete_lyapunov(AO.T, beta * dot(K_F.T, K_F)) ho = (trace(H - 1) - d_F) / 2.0 tr = trace(dot(O_F, C.dot(H.dot(C.T)))) o_F = (ho + beta * tr) / (1 - beta) return K_F, P_F, d_F, O_F, o_F
def function[evaluate_F, parameter[self, F]]: constant[ Given a fixed policy F, with the interpretation :math:`u = -F x`, this function computes the matrix :math:`P_F` and constant :math:`d_F` associated with discounted cost :math:`J_F(x) = x' P_F x + d_F` Parameters ---------- F : array_like(float, ndim=2) The policy function, a k x n array Returns ------- P_F : array_like(float, ndim=2) Matrix for discounted cost d_F : scalar(float) Constant for discounted cost K_F : array_like(float, ndim=2) Worst case policy O_F : array_like(float, ndim=2) Matrix for discounted entropy o_F : scalar(float) Constant for discounted entropy ] <ast.Tuple object at 0x7da204566aa0> assign[=] tuple[[<ast.Attribute object at 0x7da204564280>, <ast.Attribute object at 0x7da204567b80>, <ast.Attribute object at 0x7da2045659c0>, <ast.Attribute object at 0x7da204567700>, <ast.Attribute object at 0x7da204564a30>]] <ast.Tuple object at 0x7da204567b20> assign[=] tuple[[<ast.Attribute object at 0x7da204564d30>, <ast.Attribute object at 0x7da204564af0>]] <ast.Tuple object at 0x7da204566f20> assign[=] call[name[self].F_to_K, parameter[name[F]]] variable[I] assign[=] call[name[np].identity, parameter[name[self].j]] variable[H] assign[=] call[name[inv], parameter[binary_operation[name[I] - binary_operation[call[name[C].T.dot, parameter[call[name[P_F].dot, parameter[name[C]]]]] / name[theta]]]]] variable[d_F] assign[=] call[name[log], parameter[call[name[det], parameter[name[H]]]]] variable[sig] assign[=] binary_operation[<ast.UnaryOp object at 0x7da20c6c4970> / name[theta]] variable[AO] assign[=] binary_operation[call[name[sqrt], parameter[name[beta]]] * binary_operation[binary_operation[name[A] - call[name[dot], parameter[name[B], name[F]]]] + call[name[dot], parameter[name[C], name[K_F]]]]] variable[O_F] assign[=] call[name[solve_discrete_lyapunov], parameter[name[AO].T, binary_operation[name[beta] * call[name[dot], parameter[name[K_F].T, name[K_F]]]]]] variable[ho] assign[=] binary_operation[binary_operation[call[name[trace], parameter[binary_operation[name[H] - constant[1]]]] - name[d_F]] / constant[2.0]] variable[tr] assign[=] call[name[trace], parameter[call[name[dot], parameter[name[O_F], call[name[C].dot, parameter[call[name[H].dot, parameter[name[C].T]]]]]]]] variable[o_F] assign[=] binary_operation[binary_operation[name[ho] + binary_operation[name[beta] * name[tr]]] / binary_operation[constant[1] - name[beta]]] return[tuple[[<ast.Name object at 0x7da1b26ae2f0>, <ast.Name object at 0x7da1b26ada50>, <ast.Name object at 0x7da1b26ae4a0>, <ast.Name object at 0x7da1b26af0a0>, <ast.Name object at 0x7da1b26ae260>]]]
keyword[def] identifier[evaluate_F] ( identifier[self] , identifier[F] ): literal[string] identifier[Q] , identifier[R] , identifier[A] , identifier[B] , identifier[C] = identifier[self] . identifier[Q] , identifier[self] . identifier[R] , identifier[self] . identifier[A] , identifier[self] . identifier[B] , identifier[self] . identifier[C] identifier[beta] , identifier[theta] = identifier[self] . identifier[beta] , identifier[self] . identifier[theta] identifier[K_F] , identifier[P_F] = identifier[self] . identifier[F_to_K] ( identifier[F] ) identifier[I] = identifier[np] . identifier[identity] ( identifier[self] . identifier[j] ) identifier[H] = identifier[inv] ( identifier[I] - identifier[C] . identifier[T] . identifier[dot] ( identifier[P_F] . identifier[dot] ( identifier[C] ))/ identifier[theta] ) identifier[d_F] = identifier[log] ( identifier[det] ( identifier[H] )) identifier[sig] =- literal[int] / identifier[theta] identifier[AO] = identifier[sqrt] ( identifier[beta] )*( identifier[A] - identifier[dot] ( identifier[B] , identifier[F] )+ identifier[dot] ( identifier[C] , identifier[K_F] )) identifier[O_F] = identifier[solve_discrete_lyapunov] ( identifier[AO] . identifier[T] , identifier[beta] * identifier[dot] ( identifier[K_F] . identifier[T] , identifier[K_F] )) identifier[ho] =( identifier[trace] ( identifier[H] - literal[int] )- identifier[d_F] )/ literal[int] identifier[tr] = identifier[trace] ( identifier[dot] ( identifier[O_F] , identifier[C] . identifier[dot] ( identifier[H] . identifier[dot] ( identifier[C] . identifier[T] )))) identifier[o_F] =( identifier[ho] + identifier[beta] * identifier[tr] )/( literal[int] - identifier[beta] ) keyword[return] identifier[K_F] , identifier[P_F] , identifier[d_F] , identifier[O_F] , identifier[o_F]
def evaluate_F(self, F): """ Given a fixed policy F, with the interpretation :math:`u = -F x`, this function computes the matrix :math:`P_F` and constant :math:`d_F` associated with discounted cost :math:`J_F(x) = x' P_F x + d_F` Parameters ---------- F : array_like(float, ndim=2) The policy function, a k x n array Returns ------- P_F : array_like(float, ndim=2) Matrix for discounted cost d_F : scalar(float) Constant for discounted cost K_F : array_like(float, ndim=2) Worst case policy O_F : array_like(float, ndim=2) Matrix for discounted entropy o_F : scalar(float) Constant for discounted entropy """ # == Simplify names == # (Q, R, A, B, C) = (self.Q, self.R, self.A, self.B, self.C) (beta, theta) = (self.beta, self.theta) # == Solve for policies and costs using agent 2's problem == # (K_F, P_F) = self.F_to_K(F) I = np.identity(self.j) H = inv(I - C.T.dot(P_F.dot(C)) / theta) d_F = log(det(H)) # == Compute O_F and o_F == # sig = -1.0 / theta AO = sqrt(beta) * (A - dot(B, F) + dot(C, K_F)) O_F = solve_discrete_lyapunov(AO.T, beta * dot(K_F.T, K_F)) ho = (trace(H - 1) - d_F) / 2.0 tr = trace(dot(O_F, C.dot(H.dot(C.T)))) o_F = (ho + beta * tr) / (1 - beta) return (K_F, P_F, d_F, O_F, o_F)
def create_public_key_from_json(values): """Create a public key object based on the values from the JSON record.""" # currently we only support RSA public keys _id = values.get('id') if not _id: # Make it more forgiving for now. _id = '' # raise ValueError('publicKey definition is missing the "id" value.') if values.get('type') == PUBLIC_KEY_TYPE_RSA: public_key = PublicKeyRSA(_id, owner=values.get('owner')) else: public_key = PublicKeyBase(_id, owner=values.get('owner'), type='EthereumECDSAKey') public_key.set_key_value(values) return public_key
def function[create_public_key_from_json, parameter[values]]: constant[Create a public key object based on the values from the JSON record.] variable[_id] assign[=] call[name[values].get, parameter[constant[id]]] if <ast.UnaryOp object at 0x7da20e962ce0> begin[:] variable[_id] assign[=] constant[] if compare[call[name[values].get, parameter[constant[type]]] equal[==] name[PUBLIC_KEY_TYPE_RSA]] begin[:] variable[public_key] assign[=] call[name[PublicKeyRSA], parameter[name[_id]]] call[name[public_key].set_key_value, parameter[name[values]]] return[name[public_key]]
keyword[def] identifier[create_public_key_from_json] ( identifier[values] ): literal[string] identifier[_id] = identifier[values] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[_id] : identifier[_id] = literal[string] keyword[if] identifier[values] . identifier[get] ( literal[string] )== identifier[PUBLIC_KEY_TYPE_RSA] : identifier[public_key] = identifier[PublicKeyRSA] ( identifier[_id] , identifier[owner] = identifier[values] . identifier[get] ( literal[string] )) keyword[else] : identifier[public_key] = identifier[PublicKeyBase] ( identifier[_id] , identifier[owner] = identifier[values] . identifier[get] ( literal[string] ), identifier[type] = literal[string] ) identifier[public_key] . identifier[set_key_value] ( identifier[values] ) keyword[return] identifier[public_key]
def create_public_key_from_json(values): """Create a public key object based on the values from the JSON record.""" # currently we only support RSA public keys _id = values.get('id') if not _id: # Make it more forgiving for now. _id = '' # depends on [control=['if'], data=[]] # raise ValueError('publicKey definition is missing the "id" value.') if values.get('type') == PUBLIC_KEY_TYPE_RSA: public_key = PublicKeyRSA(_id, owner=values.get('owner')) # depends on [control=['if'], data=[]] else: public_key = PublicKeyBase(_id, owner=values.get('owner'), type='EthereumECDSAKey') public_key.set_key_value(values) return public_key
def address_as_b58(addr): """ Given a b58check or c32check address, return the b58check encoding """ if is_c32_address(addr): return c32ToB58(addr) else: if check_address(addr): return addr else: raise ValueError('Address {} is not b58 or c32'.format(addr))
def function[address_as_b58, parameter[addr]]: constant[ Given a b58check or c32check address, return the b58check encoding ] if call[name[is_c32_address], parameter[name[addr]]] begin[:] return[call[name[c32ToB58], parameter[name[addr]]]]
keyword[def] identifier[address_as_b58] ( identifier[addr] ): literal[string] keyword[if] identifier[is_c32_address] ( identifier[addr] ): keyword[return] identifier[c32ToB58] ( identifier[addr] ) keyword[else] : keyword[if] identifier[check_address] ( identifier[addr] ): keyword[return] identifier[addr] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[addr] ))
def address_as_b58(addr): """ Given a b58check or c32check address, return the b58check encoding """ if is_c32_address(addr): return c32ToB58(addr) # depends on [control=['if'], data=[]] elif check_address(addr): return addr # depends on [control=['if'], data=[]] else: raise ValueError('Address {} is not b58 or c32'.format(addr))
def perminverse(s): ''' Fast inverse of a (numpy) permutation. **Paramters** **s** : sequence Sequence of indices giving a permutation. **Returns** **inv** : numpy array Sequence of indices giving the inverse of permutation `s`. ''' X = np.array(range(len(s))) X[s] = range(len(s)) return X
def function[perminverse, parameter[s]]: constant[ Fast inverse of a (numpy) permutation. **Paramters** **s** : sequence Sequence of indices giving a permutation. **Returns** **inv** : numpy array Sequence of indices giving the inverse of permutation `s`. ] variable[X] assign[=] call[name[np].array, parameter[call[name[range], parameter[call[name[len], parameter[name[s]]]]]]] call[name[X]][name[s]] assign[=] call[name[range], parameter[call[name[len], parameter[name[s]]]]] return[name[X]]
keyword[def] identifier[perminverse] ( identifier[s] ): literal[string] identifier[X] = identifier[np] . identifier[array] ( identifier[range] ( identifier[len] ( identifier[s] ))) identifier[X] [ identifier[s] ]= identifier[range] ( identifier[len] ( identifier[s] )) keyword[return] identifier[X]
def perminverse(s): """ Fast inverse of a (numpy) permutation. **Paramters** **s** : sequence Sequence of indices giving a permutation. **Returns** **inv** : numpy array Sequence of indices giving the inverse of permutation `s`. """ X = np.array(range(len(s))) X[s] = range(len(s)) return X
def GetAnnotatedMethods(cls): """Returns a dictionary of annotated router methods.""" result = {} # We want methods with the highest call-order to be processed last, # so that their annotations have precedence. for i_cls in reversed(inspect.getmro(cls)): for name in compatibility.ListAttrs(i_cls): cls_method = getattr(i_cls, name) if not callable(cls_method): continue if not hasattr(cls_method, "__http_methods__"): continue result[name] = RouterMethodMetadata( name=name, doc=cls_method.__doc__, args_type=getattr(cls_method, "__args_type__", None), result_type=getattr(cls_method, "__result_type__", None), category=getattr(cls_method, "__category__", None), http_methods=getattr(cls_method, "__http_methods__", set()), no_audit_log_required=getattr(cls_method, "__no_audit_log_required__", False)) return result
def function[GetAnnotatedMethods, parameter[cls]]: constant[Returns a dictionary of annotated router methods.] variable[result] assign[=] dictionary[[], []] for taget[name[i_cls]] in starred[call[name[reversed], parameter[call[name[inspect].getmro, parameter[name[cls]]]]]] begin[:] for taget[name[name]] in starred[call[name[compatibility].ListAttrs, parameter[name[i_cls]]]] begin[:] variable[cls_method] assign[=] call[name[getattr], parameter[name[i_cls], name[name]]] if <ast.UnaryOp object at 0x7da1b1cc2890> begin[:] continue if <ast.UnaryOp object at 0x7da1b1cc3160> begin[:] continue call[name[result]][name[name]] assign[=] call[name[RouterMethodMetadata], parameter[]] return[name[result]]
keyword[def] identifier[GetAnnotatedMethods] ( identifier[cls] ): literal[string] identifier[result] ={} keyword[for] identifier[i_cls] keyword[in] identifier[reversed] ( identifier[inspect] . identifier[getmro] ( identifier[cls] )): keyword[for] identifier[name] keyword[in] identifier[compatibility] . identifier[ListAttrs] ( identifier[i_cls] ): identifier[cls_method] = identifier[getattr] ( identifier[i_cls] , identifier[name] ) keyword[if] keyword[not] identifier[callable] ( identifier[cls_method] ): keyword[continue] keyword[if] keyword[not] identifier[hasattr] ( identifier[cls_method] , literal[string] ): keyword[continue] identifier[result] [ identifier[name] ]= identifier[RouterMethodMetadata] ( identifier[name] = identifier[name] , identifier[doc] = identifier[cls_method] . identifier[__doc__] , identifier[args_type] = identifier[getattr] ( identifier[cls_method] , literal[string] , keyword[None] ), identifier[result_type] = identifier[getattr] ( identifier[cls_method] , literal[string] , keyword[None] ), identifier[category] = identifier[getattr] ( identifier[cls_method] , literal[string] , keyword[None] ), identifier[http_methods] = identifier[getattr] ( identifier[cls_method] , literal[string] , identifier[set] ()), identifier[no_audit_log_required] = identifier[getattr] ( identifier[cls_method] , literal[string] , keyword[False] )) keyword[return] identifier[result]
def GetAnnotatedMethods(cls): """Returns a dictionary of annotated router methods.""" result = {} # We want methods with the highest call-order to be processed last, # so that their annotations have precedence. for i_cls in reversed(inspect.getmro(cls)): for name in compatibility.ListAttrs(i_cls): cls_method = getattr(i_cls, name) if not callable(cls_method): continue # depends on [control=['if'], data=[]] if not hasattr(cls_method, '__http_methods__'): continue # depends on [control=['if'], data=[]] result[name] = RouterMethodMetadata(name=name, doc=cls_method.__doc__, args_type=getattr(cls_method, '__args_type__', None), result_type=getattr(cls_method, '__result_type__', None), category=getattr(cls_method, '__category__', None), http_methods=getattr(cls_method, '__http_methods__', set()), no_audit_log_required=getattr(cls_method, '__no_audit_log_required__', False)) # depends on [control=['for'], data=['name']] # depends on [control=['for'], data=['i_cls']] return result
def ensure_packages(packages): """Install but do not upgrade required plugin packages.""" required = filter_installed_packages(packages) if required: apt_install(required, fatal=True)
def function[ensure_packages, parameter[packages]]: constant[Install but do not upgrade required plugin packages.] variable[required] assign[=] call[name[filter_installed_packages], parameter[name[packages]]] if name[required] begin[:] call[name[apt_install], parameter[name[required]]]
keyword[def] identifier[ensure_packages] ( identifier[packages] ): literal[string] identifier[required] = identifier[filter_installed_packages] ( identifier[packages] ) keyword[if] identifier[required] : identifier[apt_install] ( identifier[required] , identifier[fatal] = keyword[True] )
def ensure_packages(packages): """Install but do not upgrade required plugin packages.""" required = filter_installed_packages(packages) if required: apt_install(required, fatal=True) # depends on [control=['if'], data=[]]
def _lookup_field(self, path): """ Searches for a field as defined by path. This method is used by the ``dependency`` evaluation logic. :param path: Path elements are separated by a ``.``. A leading ``^`` indicates that the path relates to the document root, otherwise it relates to the currently evaluated document, which is possibly a subdocument. The sequence ``^^`` at the start will be interpreted as a literal ``^``. :type path: :class:`str` :returns: Either the found field name and its value or :obj:`None` for both. :rtype: A two-value :class:`tuple`. """ if path.startswith('^'): path = path[1:] context = self.document if path.startswith('^') \ else self.root_document else: context = self.document parts = path.split('.') for part in parts: if part not in context: return None, None context = context.get(part) return parts[-1], context
def function[_lookup_field, parameter[self, path]]: constant[ Searches for a field as defined by path. This method is used by the ``dependency`` evaluation logic. :param path: Path elements are separated by a ``.``. A leading ``^`` indicates that the path relates to the document root, otherwise it relates to the currently evaluated document, which is possibly a subdocument. The sequence ``^^`` at the start will be interpreted as a literal ``^``. :type path: :class:`str` :returns: Either the found field name and its value or :obj:`None` for both. :rtype: A two-value :class:`tuple`. ] if call[name[path].startswith, parameter[constant[^]]] begin[:] variable[path] assign[=] call[name[path]][<ast.Slice object at 0x7da2054a76d0>] variable[context] assign[=] <ast.IfExp object at 0x7da2054a4a30> variable[parts] assign[=] call[name[path].split, parameter[constant[.]]] for taget[name[part]] in starred[name[parts]] begin[:] if compare[name[part] <ast.NotIn object at 0x7da2590d7190> name[context]] begin[:] return[tuple[[<ast.Constant object at 0x7da18ede6350>, <ast.Constant object at 0x7da18ede4cd0>]]] variable[context] assign[=] call[name[context].get, parameter[name[part]]] return[tuple[[<ast.Subscript object at 0x7da20e74bc70>, <ast.Name object at 0x7da20e7484f0>]]]
keyword[def] identifier[_lookup_field] ( identifier[self] , identifier[path] ): literal[string] keyword[if] identifier[path] . identifier[startswith] ( literal[string] ): identifier[path] = identifier[path] [ literal[int] :] identifier[context] = identifier[self] . identifier[document] keyword[if] identifier[path] . identifier[startswith] ( literal[string] ) keyword[else] identifier[self] . identifier[root_document] keyword[else] : identifier[context] = identifier[self] . identifier[document] identifier[parts] = identifier[path] . identifier[split] ( literal[string] ) keyword[for] identifier[part] keyword[in] identifier[parts] : keyword[if] identifier[part] keyword[not] keyword[in] identifier[context] : keyword[return] keyword[None] , keyword[None] identifier[context] = identifier[context] . identifier[get] ( identifier[part] ) keyword[return] identifier[parts] [- literal[int] ], identifier[context]
def _lookup_field(self, path): """ Searches for a field as defined by path. This method is used by the ``dependency`` evaluation logic. :param path: Path elements are separated by a ``.``. A leading ``^`` indicates that the path relates to the document root, otherwise it relates to the currently evaluated document, which is possibly a subdocument. The sequence ``^^`` at the start will be interpreted as a literal ``^``. :type path: :class:`str` :returns: Either the found field name and its value or :obj:`None` for both. :rtype: A two-value :class:`tuple`. """ if path.startswith('^'): path = path[1:] context = self.document if path.startswith('^') else self.root_document # depends on [control=['if'], data=[]] else: context = self.document parts = path.split('.') for part in parts: if part not in context: return (None, None) # depends on [control=['if'], data=[]] context = context.get(part) # depends on [control=['for'], data=['part']] return (parts[-1], context)
def values(self, index): """ Returns the internal values of this attribute from all the instance objects. :return: the values as numpy array :rtype: list """ values = [] for i in xrange(self.num_instances): inst = self.get_instance(i) values.append(inst.get_value(index)) return numpy.array(values)
def function[values, parameter[self, index]]: constant[ Returns the internal values of this attribute from all the instance objects. :return: the values as numpy array :rtype: list ] variable[values] assign[=] list[[]] for taget[name[i]] in starred[call[name[xrange], parameter[name[self].num_instances]]] begin[:] variable[inst] assign[=] call[name[self].get_instance, parameter[name[i]]] call[name[values].append, parameter[call[name[inst].get_value, parameter[name[index]]]]] return[call[name[numpy].array, parameter[name[values]]]]
keyword[def] identifier[values] ( identifier[self] , identifier[index] ): literal[string] identifier[values] =[] keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[self] . identifier[num_instances] ): identifier[inst] = identifier[self] . identifier[get_instance] ( identifier[i] ) identifier[values] . identifier[append] ( identifier[inst] . identifier[get_value] ( identifier[index] )) keyword[return] identifier[numpy] . identifier[array] ( identifier[values] )
def values(self, index): """ Returns the internal values of this attribute from all the instance objects. :return: the values as numpy array :rtype: list """ values = [] for i in xrange(self.num_instances): inst = self.get_instance(i) values.append(inst.get_value(index)) # depends on [control=['for'], data=['i']] return numpy.array(values)
def find_slot(cls, ctx): """ Finds an empty slot to set a hardware breakpoint. @see: clear_bp, set_bp @type ctx: dict( str S{->} int ) @param ctx: Thread context dictionary. @rtype: int @return: Slot (debug register) for hardware breakpoint. """ Dr7 = ctx['Dr7'] slot = 0 for m in cls.enableMask: if (Dr7 & m) == 0: return slot slot += 1 return None
def function[find_slot, parameter[cls, ctx]]: constant[ Finds an empty slot to set a hardware breakpoint. @see: clear_bp, set_bp @type ctx: dict( str S{->} int ) @param ctx: Thread context dictionary. @rtype: int @return: Slot (debug register) for hardware breakpoint. ] variable[Dr7] assign[=] call[name[ctx]][constant[Dr7]] variable[slot] assign[=] constant[0] for taget[name[m]] in starred[name[cls].enableMask] begin[:] if compare[binary_operation[name[Dr7] <ast.BitAnd object at 0x7da2590d6b60> name[m]] equal[==] constant[0]] begin[:] return[name[slot]] <ast.AugAssign object at 0x7da1b0762d10> return[constant[None]]
keyword[def] identifier[find_slot] ( identifier[cls] , identifier[ctx] ): literal[string] identifier[Dr7] = identifier[ctx] [ literal[string] ] identifier[slot] = literal[int] keyword[for] identifier[m] keyword[in] identifier[cls] . identifier[enableMask] : keyword[if] ( identifier[Dr7] & identifier[m] )== literal[int] : keyword[return] identifier[slot] identifier[slot] += literal[int] keyword[return] keyword[None]
def find_slot(cls, ctx): """ Finds an empty slot to set a hardware breakpoint. @see: clear_bp, set_bp @type ctx: dict( str S{->} int ) @param ctx: Thread context dictionary. @rtype: int @return: Slot (debug register) for hardware breakpoint. """ Dr7 = ctx['Dr7'] slot = 0 for m in cls.enableMask: if Dr7 & m == 0: return slot # depends on [control=['if'], data=[]] slot += 1 # depends on [control=['for'], data=['m']] return None
def maximum(self): """Maximum value of the object.""" value = self._schema.get("maximum", None) if value is None: return if not isinstance(value, NUMERIC_TYPES): raise SchemaError( "maximum value {0!r} is not a numeric type".format( value)) return value
def function[maximum, parameter[self]]: constant[Maximum value of the object.] variable[value] assign[=] call[name[self]._schema.get, parameter[constant[maximum], constant[None]]] if compare[name[value] is constant[None]] begin[:] return[None] if <ast.UnaryOp object at 0x7da20c6aa290> begin[:] <ast.Raise object at 0x7da20c6a9300> return[name[value]]
keyword[def] identifier[maximum] ( identifier[self] ): literal[string] identifier[value] = identifier[self] . identifier[_schema] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[value] keyword[is] keyword[None] : keyword[return] keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[NUMERIC_TYPES] ): keyword[raise] identifier[SchemaError] ( literal[string] . identifier[format] ( identifier[value] )) keyword[return] identifier[value]
def maximum(self): """Maximum value of the object.""" value = self._schema.get('maximum', None) if value is None: return # depends on [control=['if'], data=[]] if not isinstance(value, NUMERIC_TYPES): raise SchemaError('maximum value {0!r} is not a numeric type'.format(value)) # depends on [control=['if'], data=[]] return value
def exp(vector): """ Computes a per-element exponent of the passed-in vector. Args: vector (TYPE): Description """ weld_type = None if isinstance(vector, LazyOpResult): weld_type = vector.weld_type vector = vector.expr elif isinstance(vector, np.ndarray): weld_type = numpy_weld_impl.numpy_to_weld_type_mapping[ str(vector.dtype)] return NumpyArrayWeld(numpy_weld_impl.exp(vector, weld_type), WeldDouble())
def function[exp, parameter[vector]]: constant[ Computes a per-element exponent of the passed-in vector. Args: vector (TYPE): Description ] variable[weld_type] assign[=] constant[None] if call[name[isinstance], parameter[name[vector], name[LazyOpResult]]] begin[:] variable[weld_type] assign[=] name[vector].weld_type variable[vector] assign[=] name[vector].expr return[call[name[NumpyArrayWeld], parameter[call[name[numpy_weld_impl].exp, parameter[name[vector], name[weld_type]]], call[name[WeldDouble], parameter[]]]]]
keyword[def] identifier[exp] ( identifier[vector] ): literal[string] identifier[weld_type] = keyword[None] keyword[if] identifier[isinstance] ( identifier[vector] , identifier[LazyOpResult] ): identifier[weld_type] = identifier[vector] . identifier[weld_type] identifier[vector] = identifier[vector] . identifier[expr] keyword[elif] identifier[isinstance] ( identifier[vector] , identifier[np] . identifier[ndarray] ): identifier[weld_type] = identifier[numpy_weld_impl] . identifier[numpy_to_weld_type_mapping] [ identifier[str] ( identifier[vector] . identifier[dtype] )] keyword[return] identifier[NumpyArrayWeld] ( identifier[numpy_weld_impl] . identifier[exp] ( identifier[vector] , identifier[weld_type] ), identifier[WeldDouble] ())
def exp(vector): """ Computes a per-element exponent of the passed-in vector. Args: vector (TYPE): Description """ weld_type = None if isinstance(vector, LazyOpResult): weld_type = vector.weld_type vector = vector.expr # depends on [control=['if'], data=[]] elif isinstance(vector, np.ndarray): weld_type = numpy_weld_impl.numpy_to_weld_type_mapping[str(vector.dtype)] # depends on [control=['if'], data=[]] return NumpyArrayWeld(numpy_weld_impl.exp(vector, weld_type), WeldDouble())
def as_json(self): """Return the proxy's properties in JSON format. :rtype: dict """ info = { 'host': self.host, 'port': self.port, 'geo': { 'country': {'code': self._geo.code, 'name': self._geo.name}, 'region': { 'code': self._geo.region_code, 'name': self._geo.region_name, }, 'city': self._geo.city_name, }, 'types': [], 'avg_resp_time': self.avg_resp_time, 'error_rate': self.error_rate, } order = lambda tp_lvl: (len(tp_lvl[0]), tp_lvl[0][-1]) # noqa: 731 for tp, lvl in sorted(self.types.items(), key=order): info['types'].append({'type': tp, 'level': lvl or ''}) return info
def function[as_json, parameter[self]]: constant[Return the proxy's properties in JSON format. :rtype: dict ] variable[info] assign[=] dictionary[[<ast.Constant object at 0x7da1b1b0e530>, <ast.Constant object at 0x7da1b1b0e740>, <ast.Constant object at 0x7da1b1b0c4f0>, <ast.Constant object at 0x7da1b1b0c5b0>, <ast.Constant object at 0x7da1b1b0ece0>, <ast.Constant object at 0x7da1b1b0e590>], [<ast.Attribute object at 0x7da1b1b0e890>, <ast.Attribute object at 0x7da1b1b0cf40>, <ast.Dict object at 0x7da1b1b0df30>, <ast.List object at 0x7da1b1b0e8f0>, <ast.Attribute object at 0x7da1b1b0d510>, <ast.Attribute object at 0x7da1b1b0e2c0>]] variable[order] assign[=] <ast.Lambda object at 0x7da1b1b0cbb0> for taget[tuple[[<ast.Name object at 0x7da1b1b0f880>, <ast.Name object at 0x7da1b1b0dcf0>]]] in starred[call[name[sorted], parameter[call[name[self].types.items, parameter[]]]]] begin[:] call[call[name[info]][constant[types]].append, parameter[dictionary[[<ast.Constant object at 0x7da1b1b0f280>, <ast.Constant object at 0x7da1b1b0d660>], [<ast.Name object at 0x7da1b1b0e680>, <ast.BoolOp object at 0x7da1b1b0c550>]]]] return[name[info]]
keyword[def] identifier[as_json] ( identifier[self] ): literal[string] identifier[info] ={ literal[string] : identifier[self] . identifier[host] , literal[string] : identifier[self] . identifier[port] , literal[string] :{ literal[string] :{ literal[string] : identifier[self] . identifier[_geo] . identifier[code] , literal[string] : identifier[self] . identifier[_geo] . identifier[name] }, literal[string] :{ literal[string] : identifier[self] . identifier[_geo] . identifier[region_code] , literal[string] : identifier[self] . identifier[_geo] . identifier[region_name] , }, literal[string] : identifier[self] . identifier[_geo] . identifier[city_name] , }, literal[string] :[], literal[string] : identifier[self] . identifier[avg_resp_time] , literal[string] : identifier[self] . identifier[error_rate] , } identifier[order] = keyword[lambda] identifier[tp_lvl] :( identifier[len] ( identifier[tp_lvl] [ literal[int] ]), identifier[tp_lvl] [ literal[int] ][- literal[int] ]) keyword[for] identifier[tp] , identifier[lvl] keyword[in] identifier[sorted] ( identifier[self] . identifier[types] . identifier[items] (), identifier[key] = identifier[order] ): identifier[info] [ literal[string] ]. identifier[append] ({ literal[string] : identifier[tp] , literal[string] : identifier[lvl] keyword[or] literal[string] }) keyword[return] identifier[info]
def as_json(self): """Return the proxy's properties in JSON format. :rtype: dict """ info = {'host': self.host, 'port': self.port, 'geo': {'country': {'code': self._geo.code, 'name': self._geo.name}, 'region': {'code': self._geo.region_code, 'name': self._geo.region_name}, 'city': self._geo.city_name}, 'types': [], 'avg_resp_time': self.avg_resp_time, 'error_rate': self.error_rate} order = lambda tp_lvl: (len(tp_lvl[0]), tp_lvl[0][-1]) # noqa: 731 for (tp, lvl) in sorted(self.types.items(), key=order): info['types'].append({'type': tp, 'level': lvl or ''}) # depends on [control=['for'], data=[]] return info
def _rewrite_ser_data(self, ser, series_data, date_1904): """ Rewrite the ``<c:tx>``, ``<c:xVal>`` and ``<c:yVal>`` child elements of *ser* based on the values in *series_data*. """ ser._remove_tx() ser._remove_xVal() ser._remove_yVal() xml_writer = _XySeriesXmlWriter(series_data) ser._insert_tx(xml_writer.tx) ser._insert_xVal(xml_writer.xVal) ser._insert_yVal(xml_writer.yVal)
def function[_rewrite_ser_data, parameter[self, ser, series_data, date_1904]]: constant[ Rewrite the ``<c:tx>``, ``<c:xVal>`` and ``<c:yVal>`` child elements of *ser* based on the values in *series_data*. ] call[name[ser]._remove_tx, parameter[]] call[name[ser]._remove_xVal, parameter[]] call[name[ser]._remove_yVal, parameter[]] variable[xml_writer] assign[=] call[name[_XySeriesXmlWriter], parameter[name[series_data]]] call[name[ser]._insert_tx, parameter[name[xml_writer].tx]] call[name[ser]._insert_xVal, parameter[name[xml_writer].xVal]] call[name[ser]._insert_yVal, parameter[name[xml_writer].yVal]]
keyword[def] identifier[_rewrite_ser_data] ( identifier[self] , identifier[ser] , identifier[series_data] , identifier[date_1904] ): literal[string] identifier[ser] . identifier[_remove_tx] () identifier[ser] . identifier[_remove_xVal] () identifier[ser] . identifier[_remove_yVal] () identifier[xml_writer] = identifier[_XySeriesXmlWriter] ( identifier[series_data] ) identifier[ser] . identifier[_insert_tx] ( identifier[xml_writer] . identifier[tx] ) identifier[ser] . identifier[_insert_xVal] ( identifier[xml_writer] . identifier[xVal] ) identifier[ser] . identifier[_insert_yVal] ( identifier[xml_writer] . identifier[yVal] )
def _rewrite_ser_data(self, ser, series_data, date_1904): """ Rewrite the ``<c:tx>``, ``<c:xVal>`` and ``<c:yVal>`` child elements of *ser* based on the values in *series_data*. """ ser._remove_tx() ser._remove_xVal() ser._remove_yVal() xml_writer = _XySeriesXmlWriter(series_data) ser._insert_tx(xml_writer.tx) ser._insert_xVal(xml_writer.xVal) ser._insert_yVal(xml_writer.yVal)
def reader(self): """ Reads raw text from the connection stream. Ensures proper exception handling. :return bytes: request """ request_stream = '' with self.connect() as request: if request.msg != 'OK': raise HTTPError request_stream = request.read().decode('utf-8') return request_stream
def function[reader, parameter[self]]: constant[ Reads raw text from the connection stream. Ensures proper exception handling. :return bytes: request ] variable[request_stream] assign[=] constant[] with call[name[self].connect, parameter[]] begin[:] if compare[name[request].msg not_equal[!=] constant[OK]] begin[:] <ast.Raise object at 0x7da1b15f58a0> variable[request_stream] assign[=] call[call[name[request].read, parameter[]].decode, parameter[constant[utf-8]]] return[name[request_stream]]
keyword[def] identifier[reader] ( identifier[self] ): literal[string] identifier[request_stream] = literal[string] keyword[with] identifier[self] . identifier[connect] () keyword[as] identifier[request] : keyword[if] identifier[request] . identifier[msg] != literal[string] : keyword[raise] identifier[HTTPError] identifier[request_stream] = identifier[request] . identifier[read] (). identifier[decode] ( literal[string] ) keyword[return] identifier[request_stream]
def reader(self): """ Reads raw text from the connection stream. Ensures proper exception handling. :return bytes: request """ request_stream = '' with self.connect() as request: if request.msg != 'OK': raise HTTPError # depends on [control=['if'], data=[]] request_stream = request.read().decode('utf-8') # depends on [control=['with'], data=['request']] return request_stream
def show_input(self, template_helper, language, seed): """ Show InputBox """ header = ParsableText(self.gettext(language, self._header), "rst", translation=self._translations.get(language, gettext.NullTranslations())) return str(DisplayableCodeSingleLineProblem.get_renderer(template_helper) .tasks.single_line_code(self.get_id(), header, "text", 0, self._optional, self._default))
def function[show_input, parameter[self, template_helper, language, seed]]: constant[ Show InputBox ] variable[header] assign[=] call[name[ParsableText], parameter[call[name[self].gettext, parameter[name[language], name[self]._header]], constant[rst]]] return[call[name[str], parameter[call[call[name[DisplayableCodeSingleLineProblem].get_renderer, parameter[name[template_helper]]].tasks.single_line_code, parameter[call[name[self].get_id, parameter[]], name[header], constant[text], constant[0], name[self]._optional, name[self]._default]]]]]
keyword[def] identifier[show_input] ( identifier[self] , identifier[template_helper] , identifier[language] , identifier[seed] ): literal[string] identifier[header] = identifier[ParsableText] ( identifier[self] . identifier[gettext] ( identifier[language] , identifier[self] . identifier[_header] ), literal[string] , identifier[translation] = identifier[self] . identifier[_translations] . identifier[get] ( identifier[language] , identifier[gettext] . identifier[NullTranslations] ())) keyword[return] identifier[str] ( identifier[DisplayableCodeSingleLineProblem] . identifier[get_renderer] ( identifier[template_helper] ) . identifier[tasks] . identifier[single_line_code] ( identifier[self] . identifier[get_id] (), identifier[header] , literal[string] , literal[int] , identifier[self] . identifier[_optional] , identifier[self] . identifier[_default] ))
def show_input(self, template_helper, language, seed): """ Show InputBox """ header = ParsableText(self.gettext(language, self._header), 'rst', translation=self._translations.get(language, gettext.NullTranslations())) return str(DisplayableCodeSingleLineProblem.get_renderer(template_helper).tasks.single_line_code(self.get_id(), header, 'text', 0, self._optional, self._default))
def get_attachment_data(self, attachment): """Attachments data """ f = attachment.getAttachmentFile() attachment_type = attachment.getAttachmentType() attachment_keys = attachment.getAttachmentKeys() filename = f.filename filesize = self.get_filesize(f) mimetype = f.getContentType() report_option = attachment.getReportOption() return { "obj": attachment, "attachment_type": attachment_type, "attachment_keys": attachment_keys, "file": f, "uid": api.get_uid(attachment), "filesize": filesize, "filename": filename, "mimetype": mimetype, "report_option": report_option, }
def function[get_attachment_data, parameter[self, attachment]]: constant[Attachments data ] variable[f] assign[=] call[name[attachment].getAttachmentFile, parameter[]] variable[attachment_type] assign[=] call[name[attachment].getAttachmentType, parameter[]] variable[attachment_keys] assign[=] call[name[attachment].getAttachmentKeys, parameter[]] variable[filename] assign[=] name[f].filename variable[filesize] assign[=] call[name[self].get_filesize, parameter[name[f]]] variable[mimetype] assign[=] call[name[f].getContentType, parameter[]] variable[report_option] assign[=] call[name[attachment].getReportOption, parameter[]] return[dictionary[[<ast.Constant object at 0x7da18eb57df0>, <ast.Constant object at 0x7da18eb561d0>, <ast.Constant object at 0x7da18eb56890>, <ast.Constant object at 0x7da18eb57490>, <ast.Constant object at 0x7da18eb550c0>, <ast.Constant object at 0x7da18eb569b0>, <ast.Constant object at 0x7da18eb55810>, <ast.Constant object at 0x7da18eb55d80>, <ast.Constant object at 0x7da18eb575e0>], [<ast.Name object at 0x7da18eb54430>, <ast.Name object at 0x7da18eb57940>, <ast.Name object at 0x7da18eb542b0>, <ast.Name object at 0x7da18eb55a50>, <ast.Call object at 0x7da18eb56fe0>, <ast.Name object at 0x7da18eb55db0>, <ast.Name object at 0x7da18eb553f0>, <ast.Name object at 0x7da18eb57280>, <ast.Name object at 0x7da18eb57070>]]]
keyword[def] identifier[get_attachment_data] ( identifier[self] , identifier[attachment] ): literal[string] identifier[f] = identifier[attachment] . identifier[getAttachmentFile] () identifier[attachment_type] = identifier[attachment] . identifier[getAttachmentType] () identifier[attachment_keys] = identifier[attachment] . identifier[getAttachmentKeys] () identifier[filename] = identifier[f] . identifier[filename] identifier[filesize] = identifier[self] . identifier[get_filesize] ( identifier[f] ) identifier[mimetype] = identifier[f] . identifier[getContentType] () identifier[report_option] = identifier[attachment] . identifier[getReportOption] () keyword[return] { literal[string] : identifier[attachment] , literal[string] : identifier[attachment_type] , literal[string] : identifier[attachment_keys] , literal[string] : identifier[f] , literal[string] : identifier[api] . identifier[get_uid] ( identifier[attachment] ), literal[string] : identifier[filesize] , literal[string] : identifier[filename] , literal[string] : identifier[mimetype] , literal[string] : identifier[report_option] , }
def get_attachment_data(self, attachment): """Attachments data """ f = attachment.getAttachmentFile() attachment_type = attachment.getAttachmentType() attachment_keys = attachment.getAttachmentKeys() filename = f.filename filesize = self.get_filesize(f) mimetype = f.getContentType() report_option = attachment.getReportOption() return {'obj': attachment, 'attachment_type': attachment_type, 'attachment_keys': attachment_keys, 'file': f, 'uid': api.get_uid(attachment), 'filesize': filesize, 'filename': filename, 'mimetype': mimetype, 'report_option': report_option}
def runSearchContinuous(self, request): """ Returns a SearchContinuousResponse for the specified SearchContinuousRequest object. :param request: JSON string representing searchContinuousRequest :return: JSON string representing searchContinuousResponse """ return self.runSearchRequest( request, protocol.SearchContinuousRequest, protocol.SearchContinuousResponse, self.continuousGenerator)
def function[runSearchContinuous, parameter[self, request]]: constant[ Returns a SearchContinuousResponse for the specified SearchContinuousRequest object. :param request: JSON string representing searchContinuousRequest :return: JSON string representing searchContinuousResponse ] return[call[name[self].runSearchRequest, parameter[name[request], name[protocol].SearchContinuousRequest, name[protocol].SearchContinuousResponse, name[self].continuousGenerator]]]
keyword[def] identifier[runSearchContinuous] ( identifier[self] , identifier[request] ): literal[string] keyword[return] identifier[self] . identifier[runSearchRequest] ( identifier[request] , identifier[protocol] . identifier[SearchContinuousRequest] , identifier[protocol] . identifier[SearchContinuousResponse] , identifier[self] . identifier[continuousGenerator] )
def runSearchContinuous(self, request): """ Returns a SearchContinuousResponse for the specified SearchContinuousRequest object. :param request: JSON string representing searchContinuousRequest :return: JSON string representing searchContinuousResponse """ return self.runSearchRequest(request, protocol.SearchContinuousRequest, protocol.SearchContinuousResponse, self.continuousGenerator)
def rename_document(self, did, name): ''' Renames the specified document. Args: - did (str): Document ID - name (str): New document name Returns: - requests.Response: Onshape response data ''' payload = { 'name': name } return self._api.request('post', '/api/documents/' + did, body=payload)
def function[rename_document, parameter[self, did, name]]: constant[ Renames the specified document. Args: - did (str): Document ID - name (str): New document name Returns: - requests.Response: Onshape response data ] variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b228d3f0>], [<ast.Name object at 0x7da1b228de70>]] return[call[name[self]._api.request, parameter[constant[post], binary_operation[constant[/api/documents/] + name[did]]]]]
keyword[def] identifier[rename_document] ( identifier[self] , identifier[did] , identifier[name] ): literal[string] identifier[payload] ={ literal[string] : identifier[name] } keyword[return] identifier[self] . identifier[_api] . identifier[request] ( literal[string] , literal[string] + identifier[did] , identifier[body] = identifier[payload] )
def rename_document(self, did, name): """ Renames the specified document. Args: - did (str): Document ID - name (str): New document name Returns: - requests.Response: Onshape response data """ payload = {'name': name} return self._api.request('post', '/api/documents/' + did, body=payload)
def load_lines(filename): """ Load a text file as an array of lines. Args: filename: Path to the input file. Returns: An array of strings, each representing an individual line. """ with open(filename, 'r', encoding='utf-8') as f: return [line.rstrip('\n') for line in f.readlines()]
def function[load_lines, parameter[filename]]: constant[ Load a text file as an array of lines. Args: filename: Path to the input file. Returns: An array of strings, each representing an individual line. ] with call[name[open], parameter[name[filename], constant[r]]] begin[:] return[<ast.ListComp object at 0x7da1b24b71c0>]
keyword[def] identifier[load_lines] ( identifier[filename] ): literal[string] keyword[with] identifier[open] ( identifier[filename] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[f] : keyword[return] [ identifier[line] . identifier[rstrip] ( literal[string] ) keyword[for] identifier[line] keyword[in] identifier[f] . identifier[readlines] ()]
def load_lines(filename): """ Load a text file as an array of lines. Args: filename: Path to the input file. Returns: An array of strings, each representing an individual line. """ with open(filename, 'r', encoding='utf-8') as f: return [line.rstrip('\n') for line in f.readlines()] # depends on [control=['with'], data=['f']]
def _filter_unique_identities(uidentities, matcher): """Filter a set of unique identities. This function will use the `matcher` to generate a list of `FilteredIdentity` objects. It will return a tuple with the list of filtered objects, the unique identities not filtered and a table mapping uuids with unique identities. """ filtered = [] no_filtered = [] uuids = {} for uidentity in uidentities: n = len(filtered) filtered += matcher.filter(uidentity) if len(filtered) > n: uuids[uidentity.uuid] = uidentity else: no_filtered.append([uidentity]) return filtered, no_filtered, uuids
def function[_filter_unique_identities, parameter[uidentities, matcher]]: constant[Filter a set of unique identities. This function will use the `matcher` to generate a list of `FilteredIdentity` objects. It will return a tuple with the list of filtered objects, the unique identities not filtered and a table mapping uuids with unique identities. ] variable[filtered] assign[=] list[[]] variable[no_filtered] assign[=] list[[]] variable[uuids] assign[=] dictionary[[], []] for taget[name[uidentity]] in starred[name[uidentities]] begin[:] variable[n] assign[=] call[name[len], parameter[name[filtered]]] <ast.AugAssign object at 0x7da1b0dbe710> if compare[call[name[len], parameter[name[filtered]]] greater[>] name[n]] begin[:] call[name[uuids]][name[uidentity].uuid] assign[=] name[uidentity] return[tuple[[<ast.Name object at 0x7da1b0ebfa30>, <ast.Name object at 0x7da1b0ebe320>, <ast.Name object at 0x7da1b0ebf7f0>]]]
keyword[def] identifier[_filter_unique_identities] ( identifier[uidentities] , identifier[matcher] ): literal[string] identifier[filtered] =[] identifier[no_filtered] =[] identifier[uuids] ={} keyword[for] identifier[uidentity] keyword[in] identifier[uidentities] : identifier[n] = identifier[len] ( identifier[filtered] ) identifier[filtered] += identifier[matcher] . identifier[filter] ( identifier[uidentity] ) keyword[if] identifier[len] ( identifier[filtered] )> identifier[n] : identifier[uuids] [ identifier[uidentity] . identifier[uuid] ]= identifier[uidentity] keyword[else] : identifier[no_filtered] . identifier[append] ([ identifier[uidentity] ]) keyword[return] identifier[filtered] , identifier[no_filtered] , identifier[uuids]
def _filter_unique_identities(uidentities, matcher): """Filter a set of unique identities. This function will use the `matcher` to generate a list of `FilteredIdentity` objects. It will return a tuple with the list of filtered objects, the unique identities not filtered and a table mapping uuids with unique identities. """ filtered = [] no_filtered = [] uuids = {} for uidentity in uidentities: n = len(filtered) filtered += matcher.filter(uidentity) if len(filtered) > n: uuids[uidentity.uuid] = uidentity # depends on [control=['if'], data=[]] else: no_filtered.append([uidentity]) # depends on [control=['for'], data=['uidentity']] return (filtered, no_filtered, uuids)
def bit_count(self, start=None, end=None): """ Count the set bits in a string. Note that the `start` and `end` parameters are offsets in **bytes**. """ return self.database.bitcount(self.key, start, end)
def function[bit_count, parameter[self, start, end]]: constant[ Count the set bits in a string. Note that the `start` and `end` parameters are offsets in **bytes**. ] return[call[name[self].database.bitcount, parameter[name[self].key, name[start], name[end]]]]
keyword[def] identifier[bit_count] ( identifier[self] , identifier[start] = keyword[None] , identifier[end] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[database] . identifier[bitcount] ( identifier[self] . identifier[key] , identifier[start] , identifier[end] )
def bit_count(self, start=None, end=None): """ Count the set bits in a string. Note that the `start` and `end` parameters are offsets in **bytes**. """ return self.database.bitcount(self.key, start, end)
def p_task_statement(self, p): 'task_statement : taskvardecls task_calc' p[0] = p[1] + (p[2],) p.set_lineno(0, p.lineno(1))
def function[p_task_statement, parameter[self, p]]: constant[task_statement : taskvardecls task_calc] call[name[p]][constant[0]] assign[=] binary_operation[call[name[p]][constant[1]] + tuple[[<ast.Subscript object at 0x7da1b16022c0>]]] call[name[p].set_lineno, parameter[constant[0], call[name[p].lineno, parameter[constant[1]]]]]
keyword[def] identifier[p_task_statement] ( identifier[self] , identifier[p] ): literal[string] identifier[p] [ literal[int] ]= identifier[p] [ literal[int] ]+( identifier[p] [ literal[int] ],) identifier[p] . identifier[set_lineno] ( literal[int] , identifier[p] . identifier[lineno] ( literal[int] ))
def p_task_statement(self, p): """task_statement : taskvardecls task_calc""" p[0] = p[1] + (p[2],) p.set_lineno(0, p.lineno(1))
async def open_websocket_server(sock, filter=None): # pylint: disable=W0622 """ A context manager which serves this websocket. :param filter: an async callback which accepts the connection request and returns a bool, or an explicit Accept/Reject message. """ ws = await create_websocket_server(sock, filter=filter) try: yield ws finally: await ws.close()
<ast.AsyncFunctionDef object at 0x7da2041d8310>
keyword[async] keyword[def] identifier[open_websocket_server] ( identifier[sock] , identifier[filter] = keyword[None] ): literal[string] identifier[ws] = keyword[await] identifier[create_websocket_server] ( identifier[sock] , identifier[filter] = identifier[filter] ) keyword[try] : keyword[yield] identifier[ws] keyword[finally] : keyword[await] identifier[ws] . identifier[close] ()
async def open_websocket_server(sock, filter=None): # pylint: disable=W0622 '\n A context manager which serves this websocket.\n\n :param filter: an async callback which accepts the connection request\n and returns a bool, or an explicit Accept/Reject message.\n ' ws = await create_websocket_server(sock, filter=filter) try: yield ws # depends on [control=['try'], data=[]] finally: await ws.close()
def serialize(input, tree="etree", encoding=None, **serializer_opts): """Serializes the input token stream using the specified treewalker :arg input: the token stream to serialize :arg tree: the treewalker to use :arg encoding: the encoding to use :arg serializer_opts: any options to pass to the :py:class:`html5lib.serializer.HTMLSerializer` that gets created :returns: the tree serialized as a string Example: >>> from html5lib.html5parser import parse >>> from html5lib.serializer import serialize >>> token_stream = parse('<html><body><p>Hi!</p></body></html>') >>> serialize(token_stream, omit_optional_tags=False) '<html><head></head><body><p>Hi!</p></body></html>' """ # XXX: Should we cache this? walker = treewalkers.getTreeWalker(tree) s = HTMLSerializer(**serializer_opts) return s.render(walker(input), encoding)
def function[serialize, parameter[input, tree, encoding]]: constant[Serializes the input token stream using the specified treewalker :arg input: the token stream to serialize :arg tree: the treewalker to use :arg encoding: the encoding to use :arg serializer_opts: any options to pass to the :py:class:`html5lib.serializer.HTMLSerializer` that gets created :returns: the tree serialized as a string Example: >>> from html5lib.html5parser import parse >>> from html5lib.serializer import serialize >>> token_stream = parse('<html><body><p>Hi!</p></body></html>') >>> serialize(token_stream, omit_optional_tags=False) '<html><head></head><body><p>Hi!</p></body></html>' ] variable[walker] assign[=] call[name[treewalkers].getTreeWalker, parameter[name[tree]]] variable[s] assign[=] call[name[HTMLSerializer], parameter[]] return[call[name[s].render, parameter[call[name[walker], parameter[name[input]]], name[encoding]]]]
keyword[def] identifier[serialize] ( identifier[input] , identifier[tree] = literal[string] , identifier[encoding] = keyword[None] ,** identifier[serializer_opts] ): literal[string] identifier[walker] = identifier[treewalkers] . identifier[getTreeWalker] ( identifier[tree] ) identifier[s] = identifier[HTMLSerializer] (** identifier[serializer_opts] ) keyword[return] identifier[s] . identifier[render] ( identifier[walker] ( identifier[input] ), identifier[encoding] )
def serialize(input, tree='etree', encoding=None, **serializer_opts): """Serializes the input token stream using the specified treewalker :arg input: the token stream to serialize :arg tree: the treewalker to use :arg encoding: the encoding to use :arg serializer_opts: any options to pass to the :py:class:`html5lib.serializer.HTMLSerializer` that gets created :returns: the tree serialized as a string Example: >>> from html5lib.html5parser import parse >>> from html5lib.serializer import serialize >>> token_stream = parse('<html><body><p>Hi!</p></body></html>') >>> serialize(token_stream, omit_optional_tags=False) '<html><head></head><body><p>Hi!</p></body></html>' """ # XXX: Should we cache this? walker = treewalkers.getTreeWalker(tree) s = HTMLSerializer(**serializer_opts) return s.render(walker(input), encoding)
def nestKey(self, key): '''Returns a nested `key`.''' nest = self.nest_keyfn(key) # if depth * length > len(key.name), we need to pad. mult = 1 + int(self.nest_depth * self.nest_length / len(nest)) nest = nest * mult pref = Key(self.nestedPath(nest, self.nest_depth, self.nest_length)) return pref.child(key)
def function[nestKey, parameter[self, key]]: constant[Returns a nested `key`.] variable[nest] assign[=] call[name[self].nest_keyfn, parameter[name[key]]] variable[mult] assign[=] binary_operation[constant[1] + call[name[int], parameter[binary_operation[binary_operation[name[self].nest_depth * name[self].nest_length] / call[name[len], parameter[name[nest]]]]]]] variable[nest] assign[=] binary_operation[name[nest] * name[mult]] variable[pref] assign[=] call[name[Key], parameter[call[name[self].nestedPath, parameter[name[nest], name[self].nest_depth, name[self].nest_length]]]] return[call[name[pref].child, parameter[name[key]]]]
keyword[def] identifier[nestKey] ( identifier[self] , identifier[key] ): literal[string] identifier[nest] = identifier[self] . identifier[nest_keyfn] ( identifier[key] ) identifier[mult] = literal[int] + identifier[int] ( identifier[self] . identifier[nest_depth] * identifier[self] . identifier[nest_length] / identifier[len] ( identifier[nest] )) identifier[nest] = identifier[nest] * identifier[mult] identifier[pref] = identifier[Key] ( identifier[self] . identifier[nestedPath] ( identifier[nest] , identifier[self] . identifier[nest_depth] , identifier[self] . identifier[nest_length] )) keyword[return] identifier[pref] . identifier[child] ( identifier[key] )
def nestKey(self, key): """Returns a nested `key`.""" nest = self.nest_keyfn(key) # if depth * length > len(key.name), we need to pad. mult = 1 + int(self.nest_depth * self.nest_length / len(nest)) nest = nest * mult pref = Key(self.nestedPath(nest, self.nest_depth, self.nest_length)) return pref.child(key)
def hcons(xmrs): """Return the list of all HandleConstraints in *xmrs*.""" return [ HandleConstraint(hi, reln, lo) for hi, reln, lo in sorted(xmrs.hcons(), key=lambda hc: var_id(hc[0])) ]
def function[hcons, parameter[xmrs]]: constant[Return the list of all HandleConstraints in *xmrs*.] return[<ast.ListComp object at 0x7da1b0348dc0>]
keyword[def] identifier[hcons] ( identifier[xmrs] ): literal[string] keyword[return] [ identifier[HandleConstraint] ( identifier[hi] , identifier[reln] , identifier[lo] ) keyword[for] identifier[hi] , identifier[reln] , identifier[lo] keyword[in] identifier[sorted] ( identifier[xmrs] . identifier[hcons] (), identifier[key] = keyword[lambda] identifier[hc] : identifier[var_id] ( identifier[hc] [ literal[int] ])) ]
def hcons(xmrs): """Return the list of all HandleConstraints in *xmrs*.""" return [HandleConstraint(hi, reln, lo) for (hi, reln, lo) in sorted(xmrs.hcons(), key=lambda hc: var_id(hc[0]))]
def exception_message(self) -> Union[str, None]: """ On Lavalink V3, if there was an exception during a load or get tracks call this property will be populated with the error message. If there was no error this property will be ``None``. """ if self.has_error: exception_data = self._raw.get("exception", {}) return exception_data.get("message") return None
def function[exception_message, parameter[self]]: constant[ On Lavalink V3, if there was an exception during a load or get tracks call this property will be populated with the error message. If there was no error this property will be ``None``. ] if name[self].has_error begin[:] variable[exception_data] assign[=] call[name[self]._raw.get, parameter[constant[exception], dictionary[[], []]]] return[call[name[exception_data].get, parameter[constant[message]]]] return[constant[None]]
keyword[def] identifier[exception_message] ( identifier[self] )-> identifier[Union] [ identifier[str] , keyword[None] ]: literal[string] keyword[if] identifier[self] . identifier[has_error] : identifier[exception_data] = identifier[self] . identifier[_raw] . identifier[get] ( literal[string] ,{}) keyword[return] identifier[exception_data] . identifier[get] ( literal[string] ) keyword[return] keyword[None]
def exception_message(self) -> Union[str, None]: """ On Lavalink V3, if there was an exception during a load or get tracks call this property will be populated with the error message. If there was no error this property will be ``None``. """ if self.has_error: exception_data = self._raw.get('exception', {}) return exception_data.get('message') # depends on [control=['if'], data=[]] return None
def export_file(self, filepath, __filter, data, preview_data=None): """Export data for other applications Parameters ---------- filepath: String \tPath of export file __filter: String \tImport filter data: Object \tCode array result object slice, i. e. one object or iterable of \tsuch objects """ if __filter.startswith("cell_"): self._export_figure(filepath, data, __filter[5:]) elif __filter == "csv": self._export_csv(filepath, data, preview_data=preview_data) elif __filter in ["pdf", "svg"]: self.export_cairo(filepath, __filter)
def function[export_file, parameter[self, filepath, __filter, data, preview_data]]: constant[Export data for other applications Parameters ---------- filepath: String Path of export file __filter: String Import filter data: Object Code array result object slice, i. e. one object or iterable of such objects ] if call[name[__filter].startswith, parameter[constant[cell_]]] begin[:] call[name[self]._export_figure, parameter[name[filepath], name[data], call[name[__filter]][<ast.Slice object at 0x7da1b17df340>]]]
keyword[def] identifier[export_file] ( identifier[self] , identifier[filepath] , identifier[__filter] , identifier[data] , identifier[preview_data] = keyword[None] ): literal[string] keyword[if] identifier[__filter] . identifier[startswith] ( literal[string] ): identifier[self] . identifier[_export_figure] ( identifier[filepath] , identifier[data] , identifier[__filter] [ literal[int] :]) keyword[elif] identifier[__filter] == literal[string] : identifier[self] . identifier[_export_csv] ( identifier[filepath] , identifier[data] , identifier[preview_data] = identifier[preview_data] ) keyword[elif] identifier[__filter] keyword[in] [ literal[string] , literal[string] ]: identifier[self] . identifier[export_cairo] ( identifier[filepath] , identifier[__filter] )
def export_file(self, filepath, __filter, data, preview_data=None): """Export data for other applications Parameters ---------- filepath: String Path of export file __filter: String Import filter data: Object Code array result object slice, i. e. one object or iterable of such objects """ if __filter.startswith('cell_'): self._export_figure(filepath, data, __filter[5:]) # depends on [control=['if'], data=[]] elif __filter == 'csv': self._export_csv(filepath, data, preview_data=preview_data) # depends on [control=['if'], data=[]] elif __filter in ['pdf', 'svg']: self.export_cairo(filepath, __filter) # depends on [control=['if'], data=['__filter']]
def fisher_by_pol(data): """ input: as in dolnp (list of dictionaries with 'dec' and 'inc') description: do fisher mean after splitting data into two polarity domains. output: three dictionaries: 'A'= polarity 'A' 'B = polarity 'B' 'ALL'= switching polarity of 'B' directions, and calculate fisher mean of all data code modified from eqarea_ell.py b rshaar 1/23/2014 """ FisherByPoles = {} DIblock, nameblock, locblock = [], [], [] for rec in data: if 'dec' in list(rec.keys()) and 'inc' in list(rec.keys()): # collect data for fisher calculation DIblock.append([float(rec["dec"]), float(rec["inc"])]) else: continue if 'name' in list(rec.keys()): nameblock.append(rec['name']) else: nameblock.append("") if 'loc' in list(rec.keys()): locblock.append(rec['loc']) else: locblock.append("") ppars = doprinc(np.array(DIblock)) # get principal directions # choose the northerly declination principe component ("normal") reference_DI = [ppars['dec'], ppars['inc']] # make reference direction in northern hemisphere if reference_DI[0] > 90 and reference_DI[0] < 270: reference_DI[0] = (reference_DI[0] + 180.) % 360 reference_DI[1] = reference_DI[1] * -1. nDIs, rDIs, all_DI, npars, rpars = [], [], [], [], [] nlist, rlist, alllist = "", "", "" nloclist, rloclist, allloclist = "", "", "" for k in range(len(DIblock)): if angle([DIblock[k][0], DIblock[k][1]], reference_DI) > 90.: rDIs.append(DIblock[k]) rlist = rlist + ":" + nameblock[k] if locblock[k] not in rloclist: rloclist = rloclist + ":" + locblock[k] all_DI.append([(DIblock[k][0] + 180.) % 360., -1. * DIblock[k][1]]) alllist = alllist + ":" + nameblock[k] if locblock[k] not in allloclist: allloclist = allloclist + ":" + locblock[k] else: nDIs.append(DIblock[k]) nlist = nlist + ":" + nameblock[k] if locblock[k] not in nloclist: nloclist = nloclist + ":" + locblock[k] all_DI.append(DIblock[k]) alllist = alllist + ":" + nameblock[k] if locblock[k] not in allloclist: allloclist = allloclist + ":" + locblock[k] for mode in ['A', 'B', 'All']: if mode == 'A' and len(nDIs) > 2: fpars = fisher_mean(nDIs) fpars['sites'] = nlist.strip(':') fpars['locs'] = nloclist.strip(':') FisherByPoles[mode] = fpars elif mode == 'B' and len(rDIs) > 2: fpars = fisher_mean(rDIs) fpars['sites'] = rlist.strip(':') fpars['locs'] = rloclist.strip(':') FisherByPoles[mode] = fpars elif mode == 'All' and len(all_DI) > 2: fpars = fisher_mean(all_DI) fpars['sites'] = alllist.strip(':') fpars['locs'] = allloclist.strip(':') FisherByPoles[mode] = fpars return FisherByPoles
def function[fisher_by_pol, parameter[data]]: constant[ input: as in dolnp (list of dictionaries with 'dec' and 'inc') description: do fisher mean after splitting data into two polarity domains. output: three dictionaries: 'A'= polarity 'A' 'B = polarity 'B' 'ALL'= switching polarity of 'B' directions, and calculate fisher mean of all data code modified from eqarea_ell.py b rshaar 1/23/2014 ] variable[FisherByPoles] assign[=] dictionary[[], []] <ast.Tuple object at 0x7da1b042cf40> assign[=] tuple[[<ast.List object at 0x7da1b042d8d0>, <ast.List object at 0x7da1b042fa30>, <ast.List object at 0x7da1b042fca0>]] for taget[name[rec]] in starred[name[data]] begin[:] if <ast.BoolOp object at 0x7da1b042cbb0> begin[:] call[name[DIblock].append, parameter[list[[<ast.Call object at 0x7da1b042e500>, <ast.Call object at 0x7da1b042cfa0>]]]] if compare[constant[name] in call[name[list], parameter[call[name[rec].keys, parameter[]]]]] begin[:] call[name[nameblock].append, parameter[call[name[rec]][constant[name]]]] if compare[constant[loc] in call[name[list], parameter[call[name[rec].keys, parameter[]]]]] begin[:] call[name[locblock].append, parameter[call[name[rec]][constant[loc]]]] variable[ppars] assign[=] call[name[doprinc], parameter[call[name[np].array, parameter[name[DIblock]]]]] variable[reference_DI] assign[=] list[[<ast.Subscript object at 0x7da1b042e2c0>, <ast.Subscript object at 0x7da1b042e260>]] if <ast.BoolOp object at 0x7da1b042d060> begin[:] call[name[reference_DI]][constant[0]] assign[=] binary_operation[binary_operation[call[name[reference_DI]][constant[0]] + constant[180.0]] <ast.Mod object at 0x7da2590d6920> constant[360]] call[name[reference_DI]][constant[1]] assign[=] binary_operation[call[name[reference_DI]][constant[1]] * <ast.UnaryOp object at 0x7da1b042f430>] <ast.Tuple object at 0x7da1b042e800> assign[=] tuple[[<ast.List object at 0x7da1b042df30>, <ast.List object at 0x7da1b042e8f0>, <ast.List object at 0x7da1b042d990>, <ast.List object at 0x7da1b042cd30>, <ast.List object at 0x7da1b042c040>]] <ast.Tuple object at 0x7da1b042faf0> assign[=] tuple[[<ast.Constant object at 0x7da1b042d090>, <ast.Constant object at 0x7da1b042db70>, <ast.Constant object at 0x7da1b042d5d0>]] <ast.Tuple object at 0x7da1b042c3a0> assign[=] tuple[[<ast.Constant object at 0x7da1b042f4f0>, <ast.Constant object at 0x7da1b042f7f0>, <ast.Constant object at 0x7da1b042f0d0>]] for taget[name[k]] in starred[call[name[range], parameter[call[name[len], parameter[name[DIblock]]]]]] begin[:] if compare[call[name[angle], parameter[list[[<ast.Subscript object at 0x7da1b042ffd0>, <ast.Subscript object at 0x7da1b042ceb0>]], name[reference_DI]]] greater[>] constant[90.0]] begin[:] call[name[rDIs].append, parameter[call[name[DIblock]][name[k]]]] variable[rlist] assign[=] binary_operation[binary_operation[name[rlist] + constant[:]] + call[name[nameblock]][name[k]]] if compare[call[name[locblock]][name[k]] <ast.NotIn object at 0x7da2590d7190> name[rloclist]] begin[:] variable[rloclist] assign[=] binary_operation[binary_operation[name[rloclist] + constant[:]] + call[name[locblock]][name[k]]] call[name[all_DI].append, parameter[list[[<ast.BinOp object at 0x7da1b042dd50>, <ast.BinOp object at 0x7da1b042fa90>]]]] variable[alllist] assign[=] binary_operation[binary_operation[name[alllist] + constant[:]] + call[name[nameblock]][name[k]]] if compare[call[name[locblock]][name[k]] <ast.NotIn object at 0x7da2590d7190> name[allloclist]] begin[:] variable[allloclist] assign[=] binary_operation[binary_operation[name[allloclist] + constant[:]] + call[name[locblock]][name[k]]] for taget[name[mode]] in starred[list[[<ast.Constant object at 0x7da18dc9ace0>, <ast.Constant object at 0x7da18dc996f0>, <ast.Constant object at 0x7da18dc99bd0>]]] begin[:] if <ast.BoolOp object at 0x7da18dc9ab00> begin[:] variable[fpars] assign[=] call[name[fisher_mean], parameter[name[nDIs]]] call[name[fpars]][constant[sites]] assign[=] call[name[nlist].strip, parameter[constant[:]]] call[name[fpars]][constant[locs]] assign[=] call[name[nloclist].strip, parameter[constant[:]]] call[name[FisherByPoles]][name[mode]] assign[=] name[fpars] return[name[FisherByPoles]]
keyword[def] identifier[fisher_by_pol] ( identifier[data] ): literal[string] identifier[FisherByPoles] ={} identifier[DIblock] , identifier[nameblock] , identifier[locblock] =[],[],[] keyword[for] identifier[rec] keyword[in] identifier[data] : keyword[if] literal[string] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()) keyword[and] literal[string] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()): identifier[DIblock] . identifier[append] ([ identifier[float] ( identifier[rec] [ literal[string] ]), identifier[float] ( identifier[rec] [ literal[string] ])]) keyword[else] : keyword[continue] keyword[if] literal[string] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()): identifier[nameblock] . identifier[append] ( identifier[rec] [ literal[string] ]) keyword[else] : identifier[nameblock] . identifier[append] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()): identifier[locblock] . identifier[append] ( identifier[rec] [ literal[string] ]) keyword[else] : identifier[locblock] . identifier[append] ( literal[string] ) identifier[ppars] = identifier[doprinc] ( identifier[np] . identifier[array] ( identifier[DIblock] )) identifier[reference_DI] =[ identifier[ppars] [ literal[string] ], identifier[ppars] [ literal[string] ]] keyword[if] identifier[reference_DI] [ literal[int] ]> literal[int] keyword[and] identifier[reference_DI] [ literal[int] ]< literal[int] : identifier[reference_DI] [ literal[int] ]=( identifier[reference_DI] [ literal[int] ]+ literal[int] )% literal[int] identifier[reference_DI] [ literal[int] ]= identifier[reference_DI] [ literal[int] ]*- literal[int] identifier[nDIs] , identifier[rDIs] , identifier[all_DI] , identifier[npars] , identifier[rpars] =[],[],[],[],[] identifier[nlist] , identifier[rlist] , identifier[alllist] = literal[string] , literal[string] , literal[string] identifier[nloclist] , identifier[rloclist] , identifier[allloclist] = literal[string] , literal[string] , literal[string] keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[DIblock] )): keyword[if] identifier[angle] ([ identifier[DIblock] [ identifier[k] ][ literal[int] ], identifier[DIblock] [ identifier[k] ][ literal[int] ]], identifier[reference_DI] )> literal[int] : identifier[rDIs] . identifier[append] ( identifier[DIblock] [ identifier[k] ]) identifier[rlist] = identifier[rlist] + literal[string] + identifier[nameblock] [ identifier[k] ] keyword[if] identifier[locblock] [ identifier[k] ] keyword[not] keyword[in] identifier[rloclist] : identifier[rloclist] = identifier[rloclist] + literal[string] + identifier[locblock] [ identifier[k] ] identifier[all_DI] . identifier[append] ([( identifier[DIblock] [ identifier[k] ][ literal[int] ]+ literal[int] )% literal[int] ,- literal[int] * identifier[DIblock] [ identifier[k] ][ literal[int] ]]) identifier[alllist] = identifier[alllist] + literal[string] + identifier[nameblock] [ identifier[k] ] keyword[if] identifier[locblock] [ identifier[k] ] keyword[not] keyword[in] identifier[allloclist] : identifier[allloclist] = identifier[allloclist] + literal[string] + identifier[locblock] [ identifier[k] ] keyword[else] : identifier[nDIs] . identifier[append] ( identifier[DIblock] [ identifier[k] ]) identifier[nlist] = identifier[nlist] + literal[string] + identifier[nameblock] [ identifier[k] ] keyword[if] identifier[locblock] [ identifier[k] ] keyword[not] keyword[in] identifier[nloclist] : identifier[nloclist] = identifier[nloclist] + literal[string] + identifier[locblock] [ identifier[k] ] identifier[all_DI] . identifier[append] ( identifier[DIblock] [ identifier[k] ]) identifier[alllist] = identifier[alllist] + literal[string] + identifier[nameblock] [ identifier[k] ] keyword[if] identifier[locblock] [ identifier[k] ] keyword[not] keyword[in] identifier[allloclist] : identifier[allloclist] = identifier[allloclist] + literal[string] + identifier[locblock] [ identifier[k] ] keyword[for] identifier[mode] keyword[in] [ literal[string] , literal[string] , literal[string] ]: keyword[if] identifier[mode] == literal[string] keyword[and] identifier[len] ( identifier[nDIs] )> literal[int] : identifier[fpars] = identifier[fisher_mean] ( identifier[nDIs] ) identifier[fpars] [ literal[string] ]= identifier[nlist] . identifier[strip] ( literal[string] ) identifier[fpars] [ literal[string] ]= identifier[nloclist] . identifier[strip] ( literal[string] ) identifier[FisherByPoles] [ identifier[mode] ]= identifier[fpars] keyword[elif] identifier[mode] == literal[string] keyword[and] identifier[len] ( identifier[rDIs] )> literal[int] : identifier[fpars] = identifier[fisher_mean] ( identifier[rDIs] ) identifier[fpars] [ literal[string] ]= identifier[rlist] . identifier[strip] ( literal[string] ) identifier[fpars] [ literal[string] ]= identifier[rloclist] . identifier[strip] ( literal[string] ) identifier[FisherByPoles] [ identifier[mode] ]= identifier[fpars] keyword[elif] identifier[mode] == literal[string] keyword[and] identifier[len] ( identifier[all_DI] )> literal[int] : identifier[fpars] = identifier[fisher_mean] ( identifier[all_DI] ) identifier[fpars] [ literal[string] ]= identifier[alllist] . identifier[strip] ( literal[string] ) identifier[fpars] [ literal[string] ]= identifier[allloclist] . identifier[strip] ( literal[string] ) identifier[FisherByPoles] [ identifier[mode] ]= identifier[fpars] keyword[return] identifier[FisherByPoles]
def fisher_by_pol(data): """ input: as in dolnp (list of dictionaries with 'dec' and 'inc') description: do fisher mean after splitting data into two polarity domains. output: three dictionaries: 'A'= polarity 'A' 'B = polarity 'B' 'ALL'= switching polarity of 'B' directions, and calculate fisher mean of all data code modified from eqarea_ell.py b rshaar 1/23/2014 """ FisherByPoles = {} (DIblock, nameblock, locblock) = ([], [], []) for rec in data: if 'dec' in list(rec.keys()) and 'inc' in list(rec.keys()): # collect data for fisher calculation DIblock.append([float(rec['dec']), float(rec['inc'])]) # depends on [control=['if'], data=[]] else: continue if 'name' in list(rec.keys()): nameblock.append(rec['name']) # depends on [control=['if'], data=[]] else: nameblock.append('') if 'loc' in list(rec.keys()): locblock.append(rec['loc']) # depends on [control=['if'], data=[]] else: locblock.append('') # depends on [control=['for'], data=['rec']] ppars = doprinc(np.array(DIblock)) # get principal directions # choose the northerly declination principe component ("normal") reference_DI = [ppars['dec'], ppars['inc']] # make reference direction in northern hemisphere if reference_DI[0] > 90 and reference_DI[0] < 270: reference_DI[0] = (reference_DI[0] + 180.0) % 360 reference_DI[1] = reference_DI[1] * -1.0 # depends on [control=['if'], data=[]] (nDIs, rDIs, all_DI, npars, rpars) = ([], [], [], [], []) (nlist, rlist, alllist) = ('', '', '') (nloclist, rloclist, allloclist) = ('', '', '') for k in range(len(DIblock)): if angle([DIblock[k][0], DIblock[k][1]], reference_DI) > 90.0: rDIs.append(DIblock[k]) rlist = rlist + ':' + nameblock[k] if locblock[k] not in rloclist: rloclist = rloclist + ':' + locblock[k] # depends on [control=['if'], data=['rloclist']] all_DI.append([(DIblock[k][0] + 180.0) % 360.0, -1.0 * DIblock[k][1]]) alllist = alllist + ':' + nameblock[k] if locblock[k] not in allloclist: allloclist = allloclist + ':' + locblock[k] # depends on [control=['if'], data=['allloclist']] # depends on [control=['if'], data=[]] else: nDIs.append(DIblock[k]) nlist = nlist + ':' + nameblock[k] if locblock[k] not in nloclist: nloclist = nloclist + ':' + locblock[k] # depends on [control=['if'], data=['nloclist']] all_DI.append(DIblock[k]) alllist = alllist + ':' + nameblock[k] if locblock[k] not in allloclist: allloclist = allloclist + ':' + locblock[k] # depends on [control=['if'], data=['allloclist']] # depends on [control=['for'], data=['k']] for mode in ['A', 'B', 'All']: if mode == 'A' and len(nDIs) > 2: fpars = fisher_mean(nDIs) fpars['sites'] = nlist.strip(':') fpars['locs'] = nloclist.strip(':') FisherByPoles[mode] = fpars # depends on [control=['if'], data=[]] elif mode == 'B' and len(rDIs) > 2: fpars = fisher_mean(rDIs) fpars['sites'] = rlist.strip(':') fpars['locs'] = rloclist.strip(':') FisherByPoles[mode] = fpars # depends on [control=['if'], data=[]] elif mode == 'All' and len(all_DI) > 2: fpars = fisher_mean(all_DI) fpars['sites'] = alllist.strip(':') fpars['locs'] = allloclist.strip(':') FisherByPoles[mode] = fpars # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['mode']] return FisherByPoles
def delete_sdb(self, sdb_id): """ Delete a safe deposit box specified by id Keyword arguments: sdb_id -- this is the id of the safe deposit box, not the path.""" sdb_resp = delete_with_retry(self.cerberus_url + '/v2/safe-deposit-box/' + sdb_id, headers=self.HEADERS) throw_if_bad_response(sdb_resp) return sdb_resp
def function[delete_sdb, parameter[self, sdb_id]]: constant[ Delete a safe deposit box specified by id Keyword arguments: sdb_id -- this is the id of the safe deposit box, not the path.] variable[sdb_resp] assign[=] call[name[delete_with_retry], parameter[binary_operation[binary_operation[name[self].cerberus_url + constant[/v2/safe-deposit-box/]] + name[sdb_id]]]] call[name[throw_if_bad_response], parameter[name[sdb_resp]]] return[name[sdb_resp]]
keyword[def] identifier[delete_sdb] ( identifier[self] , identifier[sdb_id] ): literal[string] identifier[sdb_resp] = identifier[delete_with_retry] ( identifier[self] . identifier[cerberus_url] + literal[string] + identifier[sdb_id] , identifier[headers] = identifier[self] . identifier[HEADERS] ) identifier[throw_if_bad_response] ( identifier[sdb_resp] ) keyword[return] identifier[sdb_resp]
def delete_sdb(self, sdb_id): """ Delete a safe deposit box specified by id Keyword arguments: sdb_id -- this is the id of the safe deposit box, not the path.""" sdb_resp = delete_with_retry(self.cerberus_url + '/v2/safe-deposit-box/' + sdb_id, headers=self.HEADERS) throw_if_bad_response(sdb_resp) return sdb_resp
def Nu_Xu(Re, Pr, rho_w=None, rho_b=None, mu_w=None, mu_b=None): r'''Calculates internal convection Nusselt number for turbulent vertical upward flow in a pipe under supercritical conditions according to [1]_. .. math:: Nu_b = 0.02269 Re_b^{0.8079} \bar{Pr}_b^{0.9213} \left(\frac{\rho_w}{\rho_b}\right)^{0.6638} \left(\frac{\mu_w}{\mu_b}\right)^{0.8687} \bar{Cp} = \frac{H_w-H_b}{T_w-T_b} Parameters ---------- Re : float Reynolds number with bulk fluid properties, [-] Pr : float Prandtl number with bulk fluid properties and an average heat capacity between the wall and bulk temperatures [-] rho_w : float, optional Density at the wall temperature, [kg/m^3] rho_b : float, optional Density at the bulk temperature, [kg/m^3] mu_w : float, optional Viscosity at the wall temperature, [Pa*s] mu_b : float, optional Viscosity at the bulk temperature, [Pa*s] Returns ------- Nu : float Nusselt number with bulk fluid properties, [-] Notes ----- For the data used to develop the correlation, P varied from 23 to 30 MPa, and D was 12 mm. G varied from 600-1200 kg/m^2/s and q varied from 100 to 600 kW/m^2. Cp used in the calculation of Prandtl number should be the average value of those at the wall and the bulk temperatures. For deteriorated heat transfer, this was the third most accurate correlation in [2]_ with a MAD of 20.5%. If the extra density and viscosity information is not provided, it will not be used. Examples -------- >>> Nu_Xu(1E5, 1.2, 330, 290., 8e-4, 9e-4) 289.133054256742 References ---------- .. [1] Xu, F., Guo, L.J., Mao, Y.F., Jiang, X.E., 2005. "Experimental investigation to the heat transfer characteristics of water in vertical pipes under supercritical pressure". J. Xi'an Jiaotong University 39, 468-471. .. [2] Chen, Weiwei, Xiande Fang, Yu Xu, and Xianghui Su. "An Assessment of Correlations of Forced Convection Heat Transfer to Water at Supercritical Pressure." Annals of Nuclear Energy 76 (February 2015): 451-60. doi:10.1016/j.anucene.2014.10.027. ''' Nu = 0.02269*Re**0.8079*Pr**0.9213 if rho_w and rho_b: Nu *= (rho_w/rho_b)**0.6638 if mu_w and mu_b: Nu *= (mu_w/mu_b)**0.8687 return Nu
def function[Nu_Xu, parameter[Re, Pr, rho_w, rho_b, mu_w, mu_b]]: constant[Calculates internal convection Nusselt number for turbulent vertical upward flow in a pipe under supercritical conditions according to [1]_. .. math:: Nu_b = 0.02269 Re_b^{0.8079} \bar{Pr}_b^{0.9213} \left(\frac{\rho_w}{\rho_b}\right)^{0.6638} \left(\frac{\mu_w}{\mu_b}\right)^{0.8687} \bar{Cp} = \frac{H_w-H_b}{T_w-T_b} Parameters ---------- Re : float Reynolds number with bulk fluid properties, [-] Pr : float Prandtl number with bulk fluid properties and an average heat capacity between the wall and bulk temperatures [-] rho_w : float, optional Density at the wall temperature, [kg/m^3] rho_b : float, optional Density at the bulk temperature, [kg/m^3] mu_w : float, optional Viscosity at the wall temperature, [Pa*s] mu_b : float, optional Viscosity at the bulk temperature, [Pa*s] Returns ------- Nu : float Nusselt number with bulk fluid properties, [-] Notes ----- For the data used to develop the correlation, P varied from 23 to 30 MPa, and D was 12 mm. G varied from 600-1200 kg/m^2/s and q varied from 100 to 600 kW/m^2. Cp used in the calculation of Prandtl number should be the average value of those at the wall and the bulk temperatures. For deteriorated heat transfer, this was the third most accurate correlation in [2]_ with a MAD of 20.5%. If the extra density and viscosity information is not provided, it will not be used. Examples -------- >>> Nu_Xu(1E5, 1.2, 330, 290., 8e-4, 9e-4) 289.133054256742 References ---------- .. [1] Xu, F., Guo, L.J., Mao, Y.F., Jiang, X.E., 2005. "Experimental investigation to the heat transfer characteristics of water in vertical pipes under supercritical pressure". J. Xi'an Jiaotong University 39, 468-471. .. [2] Chen, Weiwei, Xiande Fang, Yu Xu, and Xianghui Su. "An Assessment of Correlations of Forced Convection Heat Transfer to Water at Supercritical Pressure." Annals of Nuclear Energy 76 (February 2015): 451-60. doi:10.1016/j.anucene.2014.10.027. ] variable[Nu] assign[=] binary_operation[binary_operation[constant[0.02269] * binary_operation[name[Re] ** constant[0.8079]]] * binary_operation[name[Pr] ** constant[0.9213]]] if <ast.BoolOp object at 0x7da2047e8af0> begin[:] <ast.AugAssign object at 0x7da2047ebac0> if <ast.BoolOp object at 0x7da2047e9840> begin[:] <ast.AugAssign object at 0x7da2047eada0> return[name[Nu]]
keyword[def] identifier[Nu_Xu] ( identifier[Re] , identifier[Pr] , identifier[rho_w] = keyword[None] , identifier[rho_b] = keyword[None] , identifier[mu_w] = keyword[None] , identifier[mu_b] = keyword[None] ): literal[string] identifier[Nu] = literal[int] * identifier[Re] ** literal[int] * identifier[Pr] ** literal[int] keyword[if] identifier[rho_w] keyword[and] identifier[rho_b] : identifier[Nu] *=( identifier[rho_w] / identifier[rho_b] )** literal[int] keyword[if] identifier[mu_w] keyword[and] identifier[mu_b] : identifier[Nu] *=( identifier[mu_w] / identifier[mu_b] )** literal[int] keyword[return] identifier[Nu]
def Nu_Xu(Re, Pr, rho_w=None, rho_b=None, mu_w=None, mu_b=None): """Calculates internal convection Nusselt number for turbulent vertical upward flow in a pipe under supercritical conditions according to [1]_. .. math:: Nu_b = 0.02269 Re_b^{0.8079} \\bar{Pr}_b^{0.9213} \\left(\\frac{\\rho_w}{\\rho_b}\\right)^{0.6638} \\left(\\frac{\\mu_w}{\\mu_b}\\right)^{0.8687} \\bar{Cp} = \\frac{H_w-H_b}{T_w-T_b} Parameters ---------- Re : float Reynolds number with bulk fluid properties, [-] Pr : float Prandtl number with bulk fluid properties and an average heat capacity between the wall and bulk temperatures [-] rho_w : float, optional Density at the wall temperature, [kg/m^3] rho_b : float, optional Density at the bulk temperature, [kg/m^3] mu_w : float, optional Viscosity at the wall temperature, [Pa*s] mu_b : float, optional Viscosity at the bulk temperature, [Pa*s] Returns ------- Nu : float Nusselt number with bulk fluid properties, [-] Notes ----- For the data used to develop the correlation, P varied from 23 to 30 MPa, and D was 12 mm. G varied from 600-1200 kg/m^2/s and q varied from 100 to 600 kW/m^2. Cp used in the calculation of Prandtl number should be the average value of those at the wall and the bulk temperatures. For deteriorated heat transfer, this was the third most accurate correlation in [2]_ with a MAD of 20.5%. If the extra density and viscosity information is not provided, it will not be used. Examples -------- >>> Nu_Xu(1E5, 1.2, 330, 290., 8e-4, 9e-4) 289.133054256742 References ---------- .. [1] Xu, F., Guo, L.J., Mao, Y.F., Jiang, X.E., 2005. "Experimental investigation to the heat transfer characteristics of water in vertical pipes under supercritical pressure". J. Xi'an Jiaotong University 39, 468-471. .. [2] Chen, Weiwei, Xiande Fang, Yu Xu, and Xianghui Su. "An Assessment of Correlations of Forced Convection Heat Transfer to Water at Supercritical Pressure." Annals of Nuclear Energy 76 (February 2015): 451-60. doi:10.1016/j.anucene.2014.10.027. """ Nu = 0.02269 * Re ** 0.8079 * Pr ** 0.9213 if rho_w and rho_b: Nu *= (rho_w / rho_b) ** 0.6638 # depends on [control=['if'], data=[]] if mu_w and mu_b: Nu *= (mu_w / mu_b) ** 0.8687 # depends on [control=['if'], data=[]] return Nu
def create_database(self): """ Create postgres database. """ self.print_message("Creating database '%s'" % self.databases['destination']['name']) self.export_pgpassword('destination') args = [ "createdb", self.databases['destination']['name'], ] args.extend(self.databases['destination']['args']) for arg in self.databases['destination']['args']: if arg[:7] == '--user=': args.append('--owner=%s' % arg[7:]) subprocess.check_call(args)
def function[create_database, parameter[self]]: constant[ Create postgres database. ] call[name[self].print_message, parameter[binary_operation[constant[Creating database '%s'] <ast.Mod object at 0x7da2590d6920> call[call[name[self].databases][constant[destination]]][constant[name]]]]] call[name[self].export_pgpassword, parameter[constant[destination]]] variable[args] assign[=] list[[<ast.Constant object at 0x7da20c7c9450>, <ast.Subscript object at 0x7da20c7ca440>]] call[name[args].extend, parameter[call[call[name[self].databases][constant[destination]]][constant[args]]]] for taget[name[arg]] in starred[call[call[name[self].databases][constant[destination]]][constant[args]]] begin[:] if compare[call[name[arg]][<ast.Slice object at 0x7da2046206a0>] equal[==] constant[--user=]] begin[:] call[name[args].append, parameter[binary_operation[constant[--owner=%s] <ast.Mod object at 0x7da2590d6920> call[name[arg]][<ast.Slice object at 0x7da204623a30>]]]] call[name[subprocess].check_call, parameter[name[args]]]
keyword[def] identifier[create_database] ( identifier[self] ): literal[string] identifier[self] . identifier[print_message] ( literal[string] % identifier[self] . identifier[databases] [ literal[string] ][ literal[string] ]) identifier[self] . identifier[export_pgpassword] ( literal[string] ) identifier[args] =[ literal[string] , identifier[self] . identifier[databases] [ literal[string] ][ literal[string] ], ] identifier[args] . identifier[extend] ( identifier[self] . identifier[databases] [ literal[string] ][ literal[string] ]) keyword[for] identifier[arg] keyword[in] identifier[self] . identifier[databases] [ literal[string] ][ literal[string] ]: keyword[if] identifier[arg] [: literal[int] ]== literal[string] : identifier[args] . identifier[append] ( literal[string] % identifier[arg] [ literal[int] :]) identifier[subprocess] . identifier[check_call] ( identifier[args] )
def create_database(self): """ Create postgres database. """ self.print_message("Creating database '%s'" % self.databases['destination']['name']) self.export_pgpassword('destination') args = ['createdb', self.databases['destination']['name']] args.extend(self.databases['destination']['args']) for arg in self.databases['destination']['args']: if arg[:7] == '--user=': args.append('--owner=%s' % arg[7:]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['arg']] subprocess.check_call(args)
def _after_request(self, response): """ Set a new ID token cookie if the ID token has changed. """ # This means that if either the new or the old are False, we set # insecure cookies. # We don't define OIDC_ID_TOKEN_COOKIE_SECURE in init_app, because we # don't want people to find it easily. cookie_secure = (current_app.config['OIDC_COOKIE_SECURE'] and current_app.config.get('OIDC_ID_TOKEN_COOKIE_SECURE', True)) if getattr(g, 'oidc_id_token_dirty', False): if g.oidc_id_token: signed_id_token = self.cookie_serializer.dumps(g.oidc_id_token) response.set_cookie( current_app.config['OIDC_ID_TOKEN_COOKIE_NAME'], signed_id_token, secure=cookie_secure, httponly=True, max_age=current_app.config['OIDC_ID_TOKEN_COOKIE_TTL']) else: # This was a log out response.set_cookie( current_app.config['OIDC_ID_TOKEN_COOKIE_NAME'], '', path=current_app.config['OIDC_ID_TOKEN_COOKIE_PATH'], secure=cookie_secure, httponly=True, expires=0) return response
def function[_after_request, parameter[self, response]]: constant[ Set a new ID token cookie if the ID token has changed. ] variable[cookie_secure] assign[=] <ast.BoolOp object at 0x7da1b26affa0> if call[name[getattr], parameter[name[g], constant[oidc_id_token_dirty], constant[False]]] begin[:] if name[g].oidc_id_token begin[:] variable[signed_id_token] assign[=] call[name[self].cookie_serializer.dumps, parameter[name[g].oidc_id_token]] call[name[response].set_cookie, parameter[call[name[current_app].config][constant[OIDC_ID_TOKEN_COOKIE_NAME]], name[signed_id_token]]] return[name[response]]
keyword[def] identifier[_after_request] ( identifier[self] , identifier[response] ): literal[string] identifier[cookie_secure] =( identifier[current_app] . identifier[config] [ literal[string] ] keyword[and] identifier[current_app] . identifier[config] . identifier[get] ( literal[string] , keyword[True] )) keyword[if] identifier[getattr] ( identifier[g] , literal[string] , keyword[False] ): keyword[if] identifier[g] . identifier[oidc_id_token] : identifier[signed_id_token] = identifier[self] . identifier[cookie_serializer] . identifier[dumps] ( identifier[g] . identifier[oidc_id_token] ) identifier[response] . identifier[set_cookie] ( identifier[current_app] . identifier[config] [ literal[string] ], identifier[signed_id_token] , identifier[secure] = identifier[cookie_secure] , identifier[httponly] = keyword[True] , identifier[max_age] = identifier[current_app] . identifier[config] [ literal[string] ]) keyword[else] : identifier[response] . identifier[set_cookie] ( identifier[current_app] . identifier[config] [ literal[string] ], literal[string] , identifier[path] = identifier[current_app] . identifier[config] [ literal[string] ], identifier[secure] = identifier[cookie_secure] , identifier[httponly] = keyword[True] , identifier[expires] = literal[int] ) keyword[return] identifier[response]
def _after_request(self, response): """ Set a new ID token cookie if the ID token has changed. """ # This means that if either the new or the old are False, we set # insecure cookies. # We don't define OIDC_ID_TOKEN_COOKIE_SECURE in init_app, because we # don't want people to find it easily. cookie_secure = current_app.config['OIDC_COOKIE_SECURE'] and current_app.config.get('OIDC_ID_TOKEN_COOKIE_SECURE', True) if getattr(g, 'oidc_id_token_dirty', False): if g.oidc_id_token: signed_id_token = self.cookie_serializer.dumps(g.oidc_id_token) response.set_cookie(current_app.config['OIDC_ID_TOKEN_COOKIE_NAME'], signed_id_token, secure=cookie_secure, httponly=True, max_age=current_app.config['OIDC_ID_TOKEN_COOKIE_TTL']) # depends on [control=['if'], data=[]] else: # This was a log out response.set_cookie(current_app.config['OIDC_ID_TOKEN_COOKIE_NAME'], '', path=current_app.config['OIDC_ID_TOKEN_COOKIE_PATH'], secure=cookie_secure, httponly=True, expires=0) # depends on [control=['if'], data=[]] return response
def electric_field_amplitude_intensity(s0,Omega=1.0e6): '''This function returns the value of E0 (the amplitude of the electric field) at a given saturation parameter s0=I/I0, where I0=2.50399 mW/cm^2 is the saturation intensity of the D2 line of Rubidium for linearly polarized light.''' e0=hbar*Omega/(e*a0) #This is the electric field scale. I0=2.50399 #mW/cm^2 I0=1.66889451102868 #mW/cm^2 I0=I0/1000*(100**2) #W/m^2 r_ciclic=4.226983616875483 #a0 gamma_D2=2*Pi*6.065e6/Omega # The decay frequency of the D2 line. E0_sat=gamma_D2/r_ciclic/sqrt(2.0) E0_sat=E0_sat*e0 I0=E0_sat**2/2/c/mu0 #return sqrt(c*mu0*s0*I0/2)/e0 #return sqrt(c*mu0*s0*I0)/e0 return sqrt(2*c*mu0*s0*I0)/e0
def function[electric_field_amplitude_intensity, parameter[s0, Omega]]: constant[This function returns the value of E0 (the amplitude of the electric field) at a given saturation parameter s0=I/I0, where I0=2.50399 mW/cm^2 is the saturation intensity of the D2 line of Rubidium for linearly polarized light.] variable[e0] assign[=] binary_operation[binary_operation[name[hbar] * name[Omega]] / binary_operation[name[e] * name[a0]]] variable[I0] assign[=] constant[2.50399] variable[I0] assign[=] constant[1.66889451102868] variable[I0] assign[=] binary_operation[binary_operation[name[I0] / constant[1000]] * binary_operation[constant[100] ** constant[2]]] variable[r_ciclic] assign[=] constant[4.226983616875483] variable[gamma_D2] assign[=] binary_operation[binary_operation[binary_operation[constant[2] * name[Pi]] * constant[6065000.0]] / name[Omega]] variable[E0_sat] assign[=] binary_operation[binary_operation[name[gamma_D2] / name[r_ciclic]] / call[name[sqrt], parameter[constant[2.0]]]] variable[E0_sat] assign[=] binary_operation[name[E0_sat] * name[e0]] variable[I0] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[E0_sat] ** constant[2]] / constant[2]] / name[c]] / name[mu0]] return[binary_operation[call[name[sqrt], parameter[binary_operation[binary_operation[binary_operation[binary_operation[constant[2] * name[c]] * name[mu0]] * name[s0]] * name[I0]]]] / name[e0]]]
keyword[def] identifier[electric_field_amplitude_intensity] ( identifier[s0] , identifier[Omega] = literal[int] ): literal[string] identifier[e0] = identifier[hbar] * identifier[Omega] /( identifier[e] * identifier[a0] ) identifier[I0] = literal[int] identifier[I0] = literal[int] identifier[I0] = identifier[I0] / literal[int] *( literal[int] ** literal[int] ) identifier[r_ciclic] = literal[int] identifier[gamma_D2] = literal[int] * identifier[Pi] * literal[int] / identifier[Omega] identifier[E0_sat] = identifier[gamma_D2] / identifier[r_ciclic] / identifier[sqrt] ( literal[int] ) identifier[E0_sat] = identifier[E0_sat] * identifier[e0] identifier[I0] = identifier[E0_sat] ** literal[int] / literal[int] / identifier[c] / identifier[mu0] keyword[return] identifier[sqrt] ( literal[int] * identifier[c] * identifier[mu0] * identifier[s0] * identifier[I0] )/ identifier[e0]
def electric_field_amplitude_intensity(s0, Omega=1000000.0): """This function returns the value of E0 (the amplitude of the electric field) at a given saturation parameter s0=I/I0, where I0=2.50399 mW/cm^2 is the saturation intensity of the D2 line of Rubidium for linearly polarized light.""" e0 = hbar * Omega / (e * a0) #This is the electric field scale. I0 = 2.50399 #mW/cm^2 I0 = 1.66889451102868 #mW/cm^2 I0 = I0 / 1000 * 100 ** 2 #W/m^2 r_ciclic = 4.226983616875483 #a0 gamma_D2 = 2 * Pi * 6065000.0 / Omega # The decay frequency of the D2 line. E0_sat = gamma_D2 / r_ciclic / sqrt(2.0) E0_sat = E0_sat * e0 I0 = E0_sat ** 2 / 2 / c / mu0 #return sqrt(c*mu0*s0*I0/2)/e0 #return sqrt(c*mu0*s0*I0)/e0 return sqrt(2 * c * mu0 * s0 * I0) / e0
def Start(self): """Query the client for available Volume Shadow Copies using a WMI query.""" self.state.shadows = [] self.state.raw_device = None self.CallClient( server_stubs.WmiQuery, query="SELECT * FROM Win32_ShadowCopy", next_state="ListDeviceDirectories")
def function[Start, parameter[self]]: constant[Query the client for available Volume Shadow Copies using a WMI query.] name[self].state.shadows assign[=] list[[]] name[self].state.raw_device assign[=] constant[None] call[name[self].CallClient, parameter[name[server_stubs].WmiQuery]]
keyword[def] identifier[Start] ( identifier[self] ): literal[string] identifier[self] . identifier[state] . identifier[shadows] =[] identifier[self] . identifier[state] . identifier[raw_device] = keyword[None] identifier[self] . identifier[CallClient] ( identifier[server_stubs] . identifier[WmiQuery] , identifier[query] = literal[string] , identifier[next_state] = literal[string] )
def Start(self): """Query the client for available Volume Shadow Copies using a WMI query.""" self.state.shadows = [] self.state.raw_device = None self.CallClient(server_stubs.WmiQuery, query='SELECT * FROM Win32_ShadowCopy', next_state='ListDeviceDirectories')
def dump_tf_graph(tfgraph, tfgraphdef): """List all the nodes in a TF graph. tfgraph: A TF Graph object. tfgraphdef: A TF GraphDef object. """ print("Nodes ({})".format(len(tfgraphdef.node))) f = "{:15} {:59} {:20} {}" print(f.format("kind", "scopeName", "shape", "inputs")) for node in tfgraphdef.node: scopename = node.name kind = node.op inputs = node.input shape = tf.graph_util.tensor_shape_from_node_def_name(tfgraph, scopename) print(f.format(kind, scopename, str(shape), inputs))
def function[dump_tf_graph, parameter[tfgraph, tfgraphdef]]: constant[List all the nodes in a TF graph. tfgraph: A TF Graph object. tfgraphdef: A TF GraphDef object. ] call[name[print], parameter[call[constant[Nodes ({})].format, parameter[call[name[len], parameter[name[tfgraphdef].node]]]]]] variable[f] assign[=] constant[{:15} {:59} {:20} {}] call[name[print], parameter[call[name[f].format, parameter[constant[kind], constant[scopeName], constant[shape], constant[inputs]]]]] for taget[name[node]] in starred[name[tfgraphdef].node] begin[:] variable[scopename] assign[=] name[node].name variable[kind] assign[=] name[node].op variable[inputs] assign[=] name[node].input variable[shape] assign[=] call[name[tf].graph_util.tensor_shape_from_node_def_name, parameter[name[tfgraph], name[scopename]]] call[name[print], parameter[call[name[f].format, parameter[name[kind], name[scopename], call[name[str], parameter[name[shape]]], name[inputs]]]]]
keyword[def] identifier[dump_tf_graph] ( identifier[tfgraph] , identifier[tfgraphdef] ): literal[string] identifier[print] ( literal[string] . identifier[format] ( identifier[len] ( identifier[tfgraphdef] . identifier[node] ))) identifier[f] = literal[string] identifier[print] ( identifier[f] . identifier[format] ( literal[string] , literal[string] , literal[string] , literal[string] )) keyword[for] identifier[node] keyword[in] identifier[tfgraphdef] . identifier[node] : identifier[scopename] = identifier[node] . identifier[name] identifier[kind] = identifier[node] . identifier[op] identifier[inputs] = identifier[node] . identifier[input] identifier[shape] = identifier[tf] . identifier[graph_util] . identifier[tensor_shape_from_node_def_name] ( identifier[tfgraph] , identifier[scopename] ) identifier[print] ( identifier[f] . identifier[format] ( identifier[kind] , identifier[scopename] , identifier[str] ( identifier[shape] ), identifier[inputs] ))
def dump_tf_graph(tfgraph, tfgraphdef): """List all the nodes in a TF graph. tfgraph: A TF Graph object. tfgraphdef: A TF GraphDef object. """ print('Nodes ({})'.format(len(tfgraphdef.node))) f = '{:15} {:59} {:20} {}' print(f.format('kind', 'scopeName', 'shape', 'inputs')) for node in tfgraphdef.node: scopename = node.name kind = node.op inputs = node.input shape = tf.graph_util.tensor_shape_from_node_def_name(tfgraph, scopename) print(f.format(kind, scopename, str(shape), inputs)) # depends on [control=['for'], data=['node']]
def update(self): """Determine the total number of AR coefficients. >>> from hydpy.models.arma import * >>> parameterstep('1d') >>> responses(((1., 2.), (1.,)), th_3=((1.,), (1., 2., 3.))) >>> derived.ar_order.update() >>> derived.ar_order ar_order(2, 1) """ responses = self.subpars.pars.control.responses self.shape = len(responses) self(responses.ar_orders)
def function[update, parameter[self]]: constant[Determine the total number of AR coefficients. >>> from hydpy.models.arma import * >>> parameterstep('1d') >>> responses(((1., 2.), (1.,)), th_3=((1.,), (1., 2., 3.))) >>> derived.ar_order.update() >>> derived.ar_order ar_order(2, 1) ] variable[responses] assign[=] name[self].subpars.pars.control.responses name[self].shape assign[=] call[name[len], parameter[name[responses]]] call[name[self], parameter[name[responses].ar_orders]]
keyword[def] identifier[update] ( identifier[self] ): literal[string] identifier[responses] = identifier[self] . identifier[subpars] . identifier[pars] . identifier[control] . identifier[responses] identifier[self] . identifier[shape] = identifier[len] ( identifier[responses] ) identifier[self] ( identifier[responses] . identifier[ar_orders] )
def update(self): """Determine the total number of AR coefficients. >>> from hydpy.models.arma import * >>> parameterstep('1d') >>> responses(((1., 2.), (1.,)), th_3=((1.,), (1., 2., 3.))) >>> derived.ar_order.update() >>> derived.ar_order ar_order(2, 1) """ responses = self.subpars.pars.control.responses self.shape = len(responses) self(responses.ar_orders)
def run_async(self, time_limit): ''' Run this module asynchronously and return a poller. ''' self.background = time_limit results = self.run() return results, poller.AsyncPoller(results, self)
def function[run_async, parameter[self, time_limit]]: constant[ Run this module asynchronously and return a poller. ] name[self].background assign[=] name[time_limit] variable[results] assign[=] call[name[self].run, parameter[]] return[tuple[[<ast.Name object at 0x7da18c4ceec0>, <ast.Call object at 0x7da18c4cfbe0>]]]
keyword[def] identifier[run_async] ( identifier[self] , identifier[time_limit] ): literal[string] identifier[self] . identifier[background] = identifier[time_limit] identifier[results] = identifier[self] . identifier[run] () keyword[return] identifier[results] , identifier[poller] . identifier[AsyncPoller] ( identifier[results] , identifier[self] )
def run_async(self, time_limit): """ Run this module asynchronously and return a poller. """ self.background = time_limit results = self.run() return (results, poller.AsyncPoller(results, self))
def contains_no_backer(self, addr): """ Tests if the address is contained in any page of paged memory, without considering memory backers. :param int addr: The address to test. :return: True if the address is included in one of the pages, False otherwise. :rtype: bool """ for i, p in self._pages.items(): if i * self._page_size <= addr < (i + 1) * self._page_size: return addr - (i * self._page_size) in p.keys() return False
def function[contains_no_backer, parameter[self, addr]]: constant[ Tests if the address is contained in any page of paged memory, without considering memory backers. :param int addr: The address to test. :return: True if the address is included in one of the pages, False otherwise. :rtype: bool ] for taget[tuple[[<ast.Name object at 0x7da18bc70220>, <ast.Name object at 0x7da18bc70be0>]]] in starred[call[name[self]._pages.items, parameter[]]] begin[:] if compare[binary_operation[name[i] * name[self]._page_size] less_or_equal[<=] name[addr]] begin[:] return[compare[binary_operation[name[addr] - binary_operation[name[i] * name[self]._page_size]] in call[name[p].keys, parameter[]]]] return[constant[False]]
keyword[def] identifier[contains_no_backer] ( identifier[self] , identifier[addr] ): literal[string] keyword[for] identifier[i] , identifier[p] keyword[in] identifier[self] . identifier[_pages] . identifier[items] (): keyword[if] identifier[i] * identifier[self] . identifier[_page_size] <= identifier[addr] <( identifier[i] + literal[int] )* identifier[self] . identifier[_page_size] : keyword[return] identifier[addr] -( identifier[i] * identifier[self] . identifier[_page_size] ) keyword[in] identifier[p] . identifier[keys] () keyword[return] keyword[False]
def contains_no_backer(self, addr): """ Tests if the address is contained in any page of paged memory, without considering memory backers. :param int addr: The address to test. :return: True if the address is included in one of the pages, False otherwise. :rtype: bool """ for (i, p) in self._pages.items(): if i * self._page_size <= addr < (i + 1) * self._page_size: return addr - i * self._page_size in p.keys() # depends on [control=['if'], data=['addr']] # depends on [control=['for'], data=[]] return False
def _get_ref(data, position, obj_end, opts, element_name): """Decode (deprecated) BSON DBPointer to bson.dbref.DBRef.""" collection, position = _get_string( data, position, obj_end, opts, element_name) oid, position = _get_oid(data, position, obj_end, opts, element_name) return DBRef(collection, oid), position
def function[_get_ref, parameter[data, position, obj_end, opts, element_name]]: constant[Decode (deprecated) BSON DBPointer to bson.dbref.DBRef.] <ast.Tuple object at 0x7da20c6abd30> assign[=] call[name[_get_string], parameter[name[data], name[position], name[obj_end], name[opts], name[element_name]]] <ast.Tuple object at 0x7da20c6ab760> assign[=] call[name[_get_oid], parameter[name[data], name[position], name[obj_end], name[opts], name[element_name]]] return[tuple[[<ast.Call object at 0x7da20c6aa4d0>, <ast.Name object at 0x7da20c6ab790>]]]
keyword[def] identifier[_get_ref] ( identifier[data] , identifier[position] , identifier[obj_end] , identifier[opts] , identifier[element_name] ): literal[string] identifier[collection] , identifier[position] = identifier[_get_string] ( identifier[data] , identifier[position] , identifier[obj_end] , identifier[opts] , identifier[element_name] ) identifier[oid] , identifier[position] = identifier[_get_oid] ( identifier[data] , identifier[position] , identifier[obj_end] , identifier[opts] , identifier[element_name] ) keyword[return] identifier[DBRef] ( identifier[collection] , identifier[oid] ), identifier[position]
def _get_ref(data, position, obj_end, opts, element_name): """Decode (deprecated) BSON DBPointer to bson.dbref.DBRef.""" (collection, position) = _get_string(data, position, obj_end, opts, element_name) (oid, position) = _get_oid(data, position, obj_end, opts, element_name) return (DBRef(collection, oid), position)
def entropy(self): """ For each string compute its Shannon entropy, if the string is empty the entropy is 0. :returns: an H2OFrame of Shannon entropies. """ fr = H2OFrame._expr(expr=ExprNode("entropy", self)) fr._ex._cache.nrows = self.nrow fr._ex._cache.ncol = self.ncol return fr
def function[entropy, parameter[self]]: constant[ For each string compute its Shannon entropy, if the string is empty the entropy is 0. :returns: an H2OFrame of Shannon entropies. ] variable[fr] assign[=] call[name[H2OFrame]._expr, parameter[]] name[fr]._ex._cache.nrows assign[=] name[self].nrow name[fr]._ex._cache.ncol assign[=] name[self].ncol return[name[fr]]
keyword[def] identifier[entropy] ( identifier[self] ): literal[string] identifier[fr] = identifier[H2OFrame] . identifier[_expr] ( identifier[expr] = identifier[ExprNode] ( literal[string] , identifier[self] )) identifier[fr] . identifier[_ex] . identifier[_cache] . identifier[nrows] = identifier[self] . identifier[nrow] identifier[fr] . identifier[_ex] . identifier[_cache] . identifier[ncol] = identifier[self] . identifier[ncol] keyword[return] identifier[fr]
def entropy(self): """ For each string compute its Shannon entropy, if the string is empty the entropy is 0. :returns: an H2OFrame of Shannon entropies. """ fr = H2OFrame._expr(expr=ExprNode('entropy', self)) fr._ex._cache.nrows = self.nrow fr._ex._cache.ncol = self.ncol return fr
def sismember(self, name, value): """Emulate sismember.""" redis_set = self._get_set(name, 'SISMEMBER') if not redis_set: return 0 result = self._encode(value) in redis_set return 1 if result else 0
def function[sismember, parameter[self, name, value]]: constant[Emulate sismember.] variable[redis_set] assign[=] call[name[self]._get_set, parameter[name[name], constant[SISMEMBER]]] if <ast.UnaryOp object at 0x7da204346800> begin[:] return[constant[0]] variable[result] assign[=] compare[call[name[self]._encode, parameter[name[value]]] in name[redis_set]] return[<ast.IfExp object at 0x7da2043450c0>]
keyword[def] identifier[sismember] ( identifier[self] , identifier[name] , identifier[value] ): literal[string] identifier[redis_set] = identifier[self] . identifier[_get_set] ( identifier[name] , literal[string] ) keyword[if] keyword[not] identifier[redis_set] : keyword[return] literal[int] identifier[result] = identifier[self] . identifier[_encode] ( identifier[value] ) keyword[in] identifier[redis_set] keyword[return] literal[int] keyword[if] identifier[result] keyword[else] literal[int]
def sismember(self, name, value): """Emulate sismember.""" redis_set = self._get_set(name, 'SISMEMBER') if not redis_set: return 0 # depends on [control=['if'], data=[]] result = self._encode(value) in redis_set return 1 if result else 0
def compute_stable_poses(mesh, center_mass=None, sigma=0.0, n_samples=1, threshold=0.0): """ Computes stable orientations of a mesh and their quasi-static probabilites. This method samples the location of the center of mass from a multivariate gaussian with the mean at the center of mass, and a covariance equal to and identity matrix times sigma, over n_samples. For each sample, it computes the stable resting poses of the mesh on a a planar workspace and evaulates the probabilities of landing in each pose if the object is dropped onto the table randomly. This method returns the 4x4 homogenous transform matrices that place the shape against the planar surface with the z-axis pointing upwards and a list of the probabilities for each pose. The transforms and probabilties that are returned are sorted, with the most probable pose first. Parameters ---------- mesh : trimesh.Trimesh The target mesh com : (3,) float Rhe object center of mass. If None, this method assumes uniform density and watertightness and computes a center of mass explicitly sigma : float Rhe covariance for the multivariate gaussian used to sample center of mass locations n_samples : int The number of samples of the center of mass location threshold : float The probability value at which to threshold returned stable poses Returns ------- transforms : (n, 4, 4) float The homogenous matrices that transform the object to rest in a stable pose, with the new z-axis pointing upwards from the table and the object just touching the table. probs : (n,) float Probability in (0, 1) for each pose """ # save convex hull mesh to avoid a cache check cvh = mesh.convex_hull if center_mass is None: center_mass = mesh.center_mass # Sample center of mass, rejecting points outside of conv hull sample_coms = [] while len(sample_coms) < n_samples: remaining = n_samples - len(sample_coms) coms = np.random.multivariate_normal(center_mass, sigma * np.eye(3), remaining) for c in coms: dots = np.einsum('ij,ij->i', c - cvh.triangles_center, cvh.face_normals) if np.all(dots < 0): sample_coms.append(c) norms_to_probs = {} # Map from normal to probabilities # For each sample, compute the stable poses for sample_com in sample_coms: # Create toppling digraph dg = _create_topple_graph(cvh, sample_com) # Propagate probabilites to sink nodes with a breadth-first traversal nodes = [n for n in dg.nodes() if dg.in_degree(n) == 0] n_iters = 0 while len(nodes) > 0 and n_iters <= len(mesh.faces): new_nodes = [] for node in nodes: if dg.out_degree(node) == 0: continue successor = next(iter(dg.successors(node))) dg.node[successor]['prob'] += dg.node[node]['prob'] dg.node[node]['prob'] = 0.0 new_nodes.append(successor) nodes = new_nodes n_iters += 1 # Collect stable poses for node in dg.nodes(): if dg.node[node]['prob'] > 0.0: normal = cvh.face_normals[node] prob = dg.node[node]['prob'] key = tuple(np.around(normal, decimals=3)) if key in norms_to_probs: norms_to_probs[key]['prob'] += 1.0 / n_samples * prob else: norms_to_probs[key] = { 'prob': 1.0 / n_samples * prob, 'normal': normal } transforms = [] probs = [] # Filter stable poses for key in norms_to_probs: prob = norms_to_probs[key]['prob'] if prob > threshold: tf = np.eye(4) # Compute a rotation matrix for this stable pose z = -1.0 * norms_to_probs[key]['normal'] x = np.array([-z[1], z[0], 0]) if np.linalg.norm(x) == 0.0: x = np.array([1, 0, 0]) else: x = x / np.linalg.norm(x) y = np.cross(z, x) y = y / np.linalg.norm(y) tf[:3, :3] = np.array([x, y, z]) # Compute the necessary translation for this stable pose m = cvh.copy() m.apply_transform(tf) z = -m.bounds[0][2] tf[:3, 3] = np.array([0, 0, z]) transforms.append(tf) probs.append(prob) # Sort the results transforms = np.array(transforms) probs = np.array(probs) inds = np.argsort(-probs) return transforms[inds], probs[inds]
def function[compute_stable_poses, parameter[mesh, center_mass, sigma, n_samples, threshold]]: constant[ Computes stable orientations of a mesh and their quasi-static probabilites. This method samples the location of the center of mass from a multivariate gaussian with the mean at the center of mass, and a covariance equal to and identity matrix times sigma, over n_samples. For each sample, it computes the stable resting poses of the mesh on a a planar workspace and evaulates the probabilities of landing in each pose if the object is dropped onto the table randomly. This method returns the 4x4 homogenous transform matrices that place the shape against the planar surface with the z-axis pointing upwards and a list of the probabilities for each pose. The transforms and probabilties that are returned are sorted, with the most probable pose first. Parameters ---------- mesh : trimesh.Trimesh The target mesh com : (3,) float Rhe object center of mass. If None, this method assumes uniform density and watertightness and computes a center of mass explicitly sigma : float Rhe covariance for the multivariate gaussian used to sample center of mass locations n_samples : int The number of samples of the center of mass location threshold : float The probability value at which to threshold returned stable poses Returns ------- transforms : (n, 4, 4) float The homogenous matrices that transform the object to rest in a stable pose, with the new z-axis pointing upwards from the table and the object just touching the table. probs : (n,) float Probability in (0, 1) for each pose ] variable[cvh] assign[=] name[mesh].convex_hull if compare[name[center_mass] is constant[None]] begin[:] variable[center_mass] assign[=] name[mesh].center_mass variable[sample_coms] assign[=] list[[]] while compare[call[name[len], parameter[name[sample_coms]]] less[<] name[n_samples]] begin[:] variable[remaining] assign[=] binary_operation[name[n_samples] - call[name[len], parameter[name[sample_coms]]]] variable[coms] assign[=] call[name[np].random.multivariate_normal, parameter[name[center_mass], binary_operation[name[sigma] * call[name[np].eye, parameter[constant[3]]]], name[remaining]]] for taget[name[c]] in starred[name[coms]] begin[:] variable[dots] assign[=] call[name[np].einsum, parameter[constant[ij,ij->i], binary_operation[name[c] - name[cvh].triangles_center], name[cvh].face_normals]] if call[name[np].all, parameter[compare[name[dots] less[<] constant[0]]]] begin[:] call[name[sample_coms].append, parameter[name[c]]] variable[norms_to_probs] assign[=] dictionary[[], []] for taget[name[sample_com]] in starred[name[sample_coms]] begin[:] variable[dg] assign[=] call[name[_create_topple_graph], parameter[name[cvh], name[sample_com]]] variable[nodes] assign[=] <ast.ListComp object at 0x7da18bc71f90> variable[n_iters] assign[=] constant[0] while <ast.BoolOp object at 0x7da18bc739d0> begin[:] variable[new_nodes] assign[=] list[[]] for taget[name[node]] in starred[name[nodes]] begin[:] if compare[call[name[dg].out_degree, parameter[name[node]]] equal[==] constant[0]] begin[:] continue variable[successor] assign[=] call[name[next], parameter[call[name[iter], parameter[call[name[dg].successors, parameter[name[node]]]]]]] <ast.AugAssign object at 0x7da18f8139a0> call[call[name[dg].node][name[node]]][constant[prob]] assign[=] constant[0.0] call[name[new_nodes].append, parameter[name[successor]]] variable[nodes] assign[=] name[new_nodes] <ast.AugAssign object at 0x7da18f811720> for taget[name[node]] in starred[call[name[dg].nodes, parameter[]]] begin[:] if compare[call[call[name[dg].node][name[node]]][constant[prob]] greater[>] constant[0.0]] begin[:] variable[normal] assign[=] call[name[cvh].face_normals][name[node]] variable[prob] assign[=] call[call[name[dg].node][name[node]]][constant[prob]] variable[key] assign[=] call[name[tuple], parameter[call[name[np].around, parameter[name[normal]]]]] if compare[name[key] in name[norms_to_probs]] begin[:] <ast.AugAssign object at 0x7da18f810b20> variable[transforms] assign[=] list[[]] variable[probs] assign[=] list[[]] for taget[name[key]] in starred[name[norms_to_probs]] begin[:] variable[prob] assign[=] call[call[name[norms_to_probs]][name[key]]][constant[prob]] if compare[name[prob] greater[>] name[threshold]] begin[:] variable[tf] assign[=] call[name[np].eye, parameter[constant[4]]] variable[z] assign[=] binary_operation[<ast.UnaryOp object at 0x7da18f810790> * call[call[name[norms_to_probs]][name[key]]][constant[normal]]] variable[x] assign[=] call[name[np].array, parameter[list[[<ast.UnaryOp object at 0x7da18f8118d0>, <ast.Subscript object at 0x7da18f8119c0>, <ast.Constant object at 0x7da18f812230>]]]] if compare[call[name[np].linalg.norm, parameter[name[x]]] equal[==] constant[0.0]] begin[:] variable[x] assign[=] call[name[np].array, parameter[list[[<ast.Constant object at 0x7da18f810700>, <ast.Constant object at 0x7da18f813f70>, <ast.Constant object at 0x7da18f813010>]]]] variable[y] assign[=] call[name[np].cross, parameter[name[z], name[x]]] variable[y] assign[=] binary_operation[name[y] / call[name[np].linalg.norm, parameter[name[y]]]] call[name[tf]][tuple[[<ast.Slice object at 0x7da204565ff0>, <ast.Slice object at 0x7da204564c70>]]] assign[=] call[name[np].array, parameter[list[[<ast.Name object at 0x7da204567d30>, <ast.Name object at 0x7da2045677f0>, <ast.Name object at 0x7da2045666b0>]]]] variable[m] assign[=] call[name[cvh].copy, parameter[]] call[name[m].apply_transform, parameter[name[tf]]] variable[z] assign[=] <ast.UnaryOp object at 0x7da204566b60> call[name[tf]][tuple[[<ast.Slice object at 0x7da20c6a9ab0>, <ast.Constant object at 0x7da20c6a8a00>]]] assign[=] call[name[np].array, parameter[list[[<ast.Constant object at 0x7da20c6ab790>, <ast.Constant object at 0x7da20c6aa4a0>, <ast.Name object at 0x7da20c6aa9b0>]]]] call[name[transforms].append, parameter[name[tf]]] call[name[probs].append, parameter[name[prob]]] variable[transforms] assign[=] call[name[np].array, parameter[name[transforms]]] variable[probs] assign[=] call[name[np].array, parameter[name[probs]]] variable[inds] assign[=] call[name[np].argsort, parameter[<ast.UnaryOp object at 0x7da20c6ab970>]] return[tuple[[<ast.Subscript object at 0x7da20c6a9cc0>, <ast.Subscript object at 0x7da20c6a8eb0>]]]
keyword[def] identifier[compute_stable_poses] ( identifier[mesh] , identifier[center_mass] = keyword[None] , identifier[sigma] = literal[int] , identifier[n_samples] = literal[int] , identifier[threshold] = literal[int] ): literal[string] identifier[cvh] = identifier[mesh] . identifier[convex_hull] keyword[if] identifier[center_mass] keyword[is] keyword[None] : identifier[center_mass] = identifier[mesh] . identifier[center_mass] identifier[sample_coms] =[] keyword[while] identifier[len] ( identifier[sample_coms] )< identifier[n_samples] : identifier[remaining] = identifier[n_samples] - identifier[len] ( identifier[sample_coms] ) identifier[coms] = identifier[np] . identifier[random] . identifier[multivariate_normal] ( identifier[center_mass] , identifier[sigma] * identifier[np] . identifier[eye] ( literal[int] ), identifier[remaining] ) keyword[for] identifier[c] keyword[in] identifier[coms] : identifier[dots] = identifier[np] . identifier[einsum] ( literal[string] , identifier[c] - identifier[cvh] . identifier[triangles_center] , identifier[cvh] . identifier[face_normals] ) keyword[if] identifier[np] . identifier[all] ( identifier[dots] < literal[int] ): identifier[sample_coms] . identifier[append] ( identifier[c] ) identifier[norms_to_probs] ={} keyword[for] identifier[sample_com] keyword[in] identifier[sample_coms] : identifier[dg] = identifier[_create_topple_graph] ( identifier[cvh] , identifier[sample_com] ) identifier[nodes] =[ identifier[n] keyword[for] identifier[n] keyword[in] identifier[dg] . identifier[nodes] () keyword[if] identifier[dg] . identifier[in_degree] ( identifier[n] )== literal[int] ] identifier[n_iters] = literal[int] keyword[while] identifier[len] ( identifier[nodes] )> literal[int] keyword[and] identifier[n_iters] <= identifier[len] ( identifier[mesh] . identifier[faces] ): identifier[new_nodes] =[] keyword[for] identifier[node] keyword[in] identifier[nodes] : keyword[if] identifier[dg] . identifier[out_degree] ( identifier[node] )== literal[int] : keyword[continue] identifier[successor] = identifier[next] ( identifier[iter] ( identifier[dg] . identifier[successors] ( identifier[node] ))) identifier[dg] . identifier[node] [ identifier[successor] ][ literal[string] ]+= identifier[dg] . identifier[node] [ identifier[node] ][ literal[string] ] identifier[dg] . identifier[node] [ identifier[node] ][ literal[string] ]= literal[int] identifier[new_nodes] . identifier[append] ( identifier[successor] ) identifier[nodes] = identifier[new_nodes] identifier[n_iters] += literal[int] keyword[for] identifier[node] keyword[in] identifier[dg] . identifier[nodes] (): keyword[if] identifier[dg] . identifier[node] [ identifier[node] ][ literal[string] ]> literal[int] : identifier[normal] = identifier[cvh] . identifier[face_normals] [ identifier[node] ] identifier[prob] = identifier[dg] . identifier[node] [ identifier[node] ][ literal[string] ] identifier[key] = identifier[tuple] ( identifier[np] . identifier[around] ( identifier[normal] , identifier[decimals] = literal[int] )) keyword[if] identifier[key] keyword[in] identifier[norms_to_probs] : identifier[norms_to_probs] [ identifier[key] ][ literal[string] ]+= literal[int] / identifier[n_samples] * identifier[prob] keyword[else] : identifier[norms_to_probs] [ identifier[key] ]={ literal[string] : literal[int] / identifier[n_samples] * identifier[prob] , literal[string] : identifier[normal] } identifier[transforms] =[] identifier[probs] =[] keyword[for] identifier[key] keyword[in] identifier[norms_to_probs] : identifier[prob] = identifier[norms_to_probs] [ identifier[key] ][ literal[string] ] keyword[if] identifier[prob] > identifier[threshold] : identifier[tf] = identifier[np] . identifier[eye] ( literal[int] ) identifier[z] =- literal[int] * identifier[norms_to_probs] [ identifier[key] ][ literal[string] ] identifier[x] = identifier[np] . identifier[array] ([- identifier[z] [ literal[int] ], identifier[z] [ literal[int] ], literal[int] ]) keyword[if] identifier[np] . identifier[linalg] . identifier[norm] ( identifier[x] )== literal[int] : identifier[x] = identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] ]) keyword[else] : identifier[x] = identifier[x] / identifier[np] . identifier[linalg] . identifier[norm] ( identifier[x] ) identifier[y] = identifier[np] . identifier[cross] ( identifier[z] , identifier[x] ) identifier[y] = identifier[y] / identifier[np] . identifier[linalg] . identifier[norm] ( identifier[y] ) identifier[tf] [: literal[int] ,: literal[int] ]= identifier[np] . identifier[array] ([ identifier[x] , identifier[y] , identifier[z] ]) identifier[m] = identifier[cvh] . identifier[copy] () identifier[m] . identifier[apply_transform] ( identifier[tf] ) identifier[z] =- identifier[m] . identifier[bounds] [ literal[int] ][ literal[int] ] identifier[tf] [: literal[int] , literal[int] ]= identifier[np] . identifier[array] ([ literal[int] , literal[int] , identifier[z] ]) identifier[transforms] . identifier[append] ( identifier[tf] ) identifier[probs] . identifier[append] ( identifier[prob] ) identifier[transforms] = identifier[np] . identifier[array] ( identifier[transforms] ) identifier[probs] = identifier[np] . identifier[array] ( identifier[probs] ) identifier[inds] = identifier[np] . identifier[argsort] (- identifier[probs] ) keyword[return] identifier[transforms] [ identifier[inds] ], identifier[probs] [ identifier[inds] ]
def compute_stable_poses(mesh, center_mass=None, sigma=0.0, n_samples=1, threshold=0.0): """ Computes stable orientations of a mesh and their quasi-static probabilites. This method samples the location of the center of mass from a multivariate gaussian with the mean at the center of mass, and a covariance equal to and identity matrix times sigma, over n_samples. For each sample, it computes the stable resting poses of the mesh on a a planar workspace and evaulates the probabilities of landing in each pose if the object is dropped onto the table randomly. This method returns the 4x4 homogenous transform matrices that place the shape against the planar surface with the z-axis pointing upwards and a list of the probabilities for each pose. The transforms and probabilties that are returned are sorted, with the most probable pose first. Parameters ---------- mesh : trimesh.Trimesh The target mesh com : (3,) float Rhe object center of mass. If None, this method assumes uniform density and watertightness and computes a center of mass explicitly sigma : float Rhe covariance for the multivariate gaussian used to sample center of mass locations n_samples : int The number of samples of the center of mass location threshold : float The probability value at which to threshold returned stable poses Returns ------- transforms : (n, 4, 4) float The homogenous matrices that transform the object to rest in a stable pose, with the new z-axis pointing upwards from the table and the object just touching the table. probs : (n,) float Probability in (0, 1) for each pose """ # save convex hull mesh to avoid a cache check cvh = mesh.convex_hull if center_mass is None: center_mass = mesh.center_mass # depends on [control=['if'], data=['center_mass']] # Sample center of mass, rejecting points outside of conv hull sample_coms = [] while len(sample_coms) < n_samples: remaining = n_samples - len(sample_coms) coms = np.random.multivariate_normal(center_mass, sigma * np.eye(3), remaining) for c in coms: dots = np.einsum('ij,ij->i', c - cvh.triangles_center, cvh.face_normals) if np.all(dots < 0): sample_coms.append(c) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # depends on [control=['while'], data=['n_samples']] norms_to_probs = {} # Map from normal to probabilities # For each sample, compute the stable poses for sample_com in sample_coms: # Create toppling digraph dg = _create_topple_graph(cvh, sample_com) # Propagate probabilites to sink nodes with a breadth-first traversal nodes = [n for n in dg.nodes() if dg.in_degree(n) == 0] n_iters = 0 while len(nodes) > 0 and n_iters <= len(mesh.faces): new_nodes = [] for node in nodes: if dg.out_degree(node) == 0: continue # depends on [control=['if'], data=[]] successor = next(iter(dg.successors(node))) dg.node[successor]['prob'] += dg.node[node]['prob'] dg.node[node]['prob'] = 0.0 new_nodes.append(successor) # depends on [control=['for'], data=['node']] nodes = new_nodes n_iters += 1 # depends on [control=['while'], data=[]] # Collect stable poses for node in dg.nodes(): if dg.node[node]['prob'] > 0.0: normal = cvh.face_normals[node] prob = dg.node[node]['prob'] key = tuple(np.around(normal, decimals=3)) if key in norms_to_probs: norms_to_probs[key]['prob'] += 1.0 / n_samples * prob # depends on [control=['if'], data=['key', 'norms_to_probs']] else: norms_to_probs[key] = {'prob': 1.0 / n_samples * prob, 'normal': normal} # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']] # depends on [control=['for'], data=['sample_com']] transforms = [] probs = [] # Filter stable poses for key in norms_to_probs: prob = norms_to_probs[key]['prob'] if prob > threshold: tf = np.eye(4) # Compute a rotation matrix for this stable pose z = -1.0 * norms_to_probs[key]['normal'] x = np.array([-z[1], z[0], 0]) if np.linalg.norm(x) == 0.0: x = np.array([1, 0, 0]) # depends on [control=['if'], data=[]] else: x = x / np.linalg.norm(x) y = np.cross(z, x) y = y / np.linalg.norm(y) tf[:3, :3] = np.array([x, y, z]) # Compute the necessary translation for this stable pose m = cvh.copy() m.apply_transform(tf) z = -m.bounds[0][2] tf[:3, 3] = np.array([0, 0, z]) transforms.append(tf) probs.append(prob) # depends on [control=['if'], data=['prob']] # depends on [control=['for'], data=['key']] # Sort the results transforms = np.array(transforms) probs = np.array(probs) inds = np.argsort(-probs) return (transforms[inds], probs[inds])
def all_tensorboard_jobs(self): """ Similar to tensorboard_jobs, but uses the default manager to return archived experiments as well. """ from db.models.tensorboards import TensorboardJob return TensorboardJob.all.filter(project=self)
def function[all_tensorboard_jobs, parameter[self]]: constant[ Similar to tensorboard_jobs, but uses the default manager to return archived experiments as well. ] from relative_module[db.models.tensorboards] import module[TensorboardJob] return[call[name[TensorboardJob].all.filter, parameter[]]]
keyword[def] identifier[all_tensorboard_jobs] ( identifier[self] ): literal[string] keyword[from] identifier[db] . identifier[models] . identifier[tensorboards] keyword[import] identifier[TensorboardJob] keyword[return] identifier[TensorboardJob] . identifier[all] . identifier[filter] ( identifier[project] = identifier[self] )
def all_tensorboard_jobs(self): """ Similar to tensorboard_jobs, but uses the default manager to return archived experiments as well. """ from db.models.tensorboards import TensorboardJob return TensorboardJob.all.filter(project=self)
def _on_key_pressed(self, event): """ Resets editor font size to the default font size :param event: wheelEvent :type event: QKeyEvent """ if (int(event.modifiers()) & QtCore.Qt.ControlModifier > 0 and not int(event.modifiers()) & QtCore.Qt.ShiftModifier): if event.key() == QtCore.Qt.Key_0: self.editor.reset_zoom() event.accept() if event.key() == QtCore.Qt.Key_Plus: self.editor.zoom_in() event.accept() if event.key() == QtCore.Qt.Key_Minus: self.editor.zoom_out() event.accept()
def function[_on_key_pressed, parameter[self, event]]: constant[ Resets editor font size to the default font size :param event: wheelEvent :type event: QKeyEvent ] if <ast.BoolOp object at 0x7da18dc99e70> begin[:] if compare[call[name[event].key, parameter[]] equal[==] name[QtCore].Qt.Key_0] begin[:] call[name[self].editor.reset_zoom, parameter[]] call[name[event].accept, parameter[]] if compare[call[name[event].key, parameter[]] equal[==] name[QtCore].Qt.Key_Plus] begin[:] call[name[self].editor.zoom_in, parameter[]] call[name[event].accept, parameter[]] if compare[call[name[event].key, parameter[]] equal[==] name[QtCore].Qt.Key_Minus] begin[:] call[name[self].editor.zoom_out, parameter[]] call[name[event].accept, parameter[]]
keyword[def] identifier[_on_key_pressed] ( identifier[self] , identifier[event] ): literal[string] keyword[if] ( identifier[int] ( identifier[event] . identifier[modifiers] ())& identifier[QtCore] . identifier[Qt] . identifier[ControlModifier] > literal[int] keyword[and] keyword[not] identifier[int] ( identifier[event] . identifier[modifiers] ())& identifier[QtCore] . identifier[Qt] . identifier[ShiftModifier] ): keyword[if] identifier[event] . identifier[key] ()== identifier[QtCore] . identifier[Qt] . identifier[Key_0] : identifier[self] . identifier[editor] . identifier[reset_zoom] () identifier[event] . identifier[accept] () keyword[if] identifier[event] . identifier[key] ()== identifier[QtCore] . identifier[Qt] . identifier[Key_Plus] : identifier[self] . identifier[editor] . identifier[zoom_in] () identifier[event] . identifier[accept] () keyword[if] identifier[event] . identifier[key] ()== identifier[QtCore] . identifier[Qt] . identifier[Key_Minus] : identifier[self] . identifier[editor] . identifier[zoom_out] () identifier[event] . identifier[accept] ()
def _on_key_pressed(self, event): """ Resets editor font size to the default font size :param event: wheelEvent :type event: QKeyEvent """ if int(event.modifiers()) & QtCore.Qt.ControlModifier > 0 and (not int(event.modifiers()) & QtCore.Qt.ShiftModifier): if event.key() == QtCore.Qt.Key_0: self.editor.reset_zoom() event.accept() # depends on [control=['if'], data=[]] if event.key() == QtCore.Qt.Key_Plus: self.editor.zoom_in() event.accept() # depends on [control=['if'], data=[]] if event.key() == QtCore.Qt.Key_Minus: self.editor.zoom_out() event.accept() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def get_queue_func(request): """Establish the connection to rabbitmq.""" def cleanup(request): conn.close() def queue_func(**kwargs): return conn.channel().basic_publish( exchange='', body=json.dumps(kwargs), routing_key=queue, properties=pika.BasicProperties(delivery_mode=2)) server = request.registry.settings['queue_server'] queue = request.registry.settings['queue_verification'] conn = pika.BlockingConnection(pika.ConnectionParameters(host=server)) request.add_finished_callback(cleanup) return queue_func
def function[get_queue_func, parameter[request]]: constant[Establish the connection to rabbitmq.] def function[cleanup, parameter[request]]: call[name[conn].close, parameter[]] def function[queue_func, parameter[]]: return[call[call[name[conn].channel, parameter[]].basic_publish, parameter[]]] variable[server] assign[=] call[name[request].registry.settings][constant[queue_server]] variable[queue] assign[=] call[name[request].registry.settings][constant[queue_verification]] variable[conn] assign[=] call[name[pika].BlockingConnection, parameter[call[name[pika].ConnectionParameters, parameter[]]]] call[name[request].add_finished_callback, parameter[name[cleanup]]] return[name[queue_func]]
keyword[def] identifier[get_queue_func] ( identifier[request] ): literal[string] keyword[def] identifier[cleanup] ( identifier[request] ): identifier[conn] . identifier[close] () keyword[def] identifier[queue_func] (** identifier[kwargs] ): keyword[return] identifier[conn] . identifier[channel] (). identifier[basic_publish] ( identifier[exchange] = literal[string] , identifier[body] = identifier[json] . identifier[dumps] ( identifier[kwargs] ), identifier[routing_key] = identifier[queue] , identifier[properties] = identifier[pika] . identifier[BasicProperties] ( identifier[delivery_mode] = literal[int] )) identifier[server] = identifier[request] . identifier[registry] . identifier[settings] [ literal[string] ] identifier[queue] = identifier[request] . identifier[registry] . identifier[settings] [ literal[string] ] identifier[conn] = identifier[pika] . identifier[BlockingConnection] ( identifier[pika] . identifier[ConnectionParameters] ( identifier[host] = identifier[server] )) identifier[request] . identifier[add_finished_callback] ( identifier[cleanup] ) keyword[return] identifier[queue_func]
def get_queue_func(request): """Establish the connection to rabbitmq.""" def cleanup(request): conn.close() def queue_func(**kwargs): return conn.channel().basic_publish(exchange='', body=json.dumps(kwargs), routing_key=queue, properties=pika.BasicProperties(delivery_mode=2)) server = request.registry.settings['queue_server'] queue = request.registry.settings['queue_verification'] conn = pika.BlockingConnection(pika.ConnectionParameters(host=server)) request.add_finished_callback(cleanup) return queue_func
def easyopen(fname, idd=None, epw=None): """automatically set idd and open idf file. Uses version from idf to set correct idd It will work under the following circumstances: - the IDF file should have the VERSION object. - Needs the version of EnergyPlus installed that matches the IDF version. - Energyplus should be installed in the default location. Parameters ---------- fname : str, StringIO or IOBase Filepath IDF file, File handle of IDF file open to read StringIO with IDF contents within idd : str, StringIO or IOBase This is an optional argument. easyopen will find the IDD without this arg Filepath IDD file, File handle of IDD file open to read StringIO with IDD contents within epw : str path name to the weather file. This arg is needed to run EneryPlus from eppy. """ if idd: eppy.modeleditor.IDF.setiddname(idd) idf = eppy.modeleditor.IDF(fname, epw=epw) return idf # the rest of the code runs if idd=None if isinstance(fname, (IOBase, StringIO)): fhandle = fname else: fhandle = io.open(fname, 'r', encoding='latin-1') # latin-1 seems to read most things # - get the version number from the idf file txt = fhandle.read() # try: # txt = txt.decode('latin-1') # latin-1 seems to read most things # except AttributeError: # pass ntxt = eppy.EPlusInterfaceFunctions.parse_idd.nocomment(txt, '!') blocks = ntxt.split(';') blocks = [block.strip()for block in blocks] bblocks = [block.split(',') for block in blocks] bblocks1 = [[item.strip() for item in block] for block in bblocks] ver_blocks = [block for block in bblocks1 if block[0].upper() == 'VERSION'] ver_block = ver_blocks[0] versionid = ver_block[1] # - get the E+ folder based on version number iddfile = getiddfile(versionid) if os.path.exists(iddfile): pass # might be an old version of E+ else: iddfile = getoldiddfile(versionid) if os.path.exists(iddfile): # if True: # - set IDD and open IDF. eppy.modeleditor.IDF.setiddname(iddfile) if isinstance(fname, (IOBase, StringIO)): fhandle.seek(0) idf = eppy.modeleditor.IDF(fhandle, epw=epw) else: idf = eppy.modeleditor.IDF(fname, epw=epw) return idf else: # - can't find IDD -> throw an exception astr = "input idf file says E+ version {}. easyopen() cannot find the corresponding idd file '{}'" astr = astr.format(versionid, iddfile) raise MissingIDDException(astr)
def function[easyopen, parameter[fname, idd, epw]]: constant[automatically set idd and open idf file. Uses version from idf to set correct idd It will work under the following circumstances: - the IDF file should have the VERSION object. - Needs the version of EnergyPlus installed that matches the IDF version. - Energyplus should be installed in the default location. Parameters ---------- fname : str, StringIO or IOBase Filepath IDF file, File handle of IDF file open to read StringIO with IDF contents within idd : str, StringIO or IOBase This is an optional argument. easyopen will find the IDD without this arg Filepath IDD file, File handle of IDD file open to read StringIO with IDD contents within epw : str path name to the weather file. This arg is needed to run EneryPlus from eppy. ] if name[idd] begin[:] call[name[eppy].modeleditor.IDF.setiddname, parameter[name[idd]]] variable[idf] assign[=] call[name[eppy].modeleditor.IDF, parameter[name[fname]]] return[name[idf]] if call[name[isinstance], parameter[name[fname], tuple[[<ast.Name object at 0x7da20c76f130>, <ast.Name object at 0x7da20c76cd60>]]]] begin[:] variable[fhandle] assign[=] name[fname] variable[txt] assign[=] call[name[fhandle].read, parameter[]] variable[ntxt] assign[=] call[name[eppy].EPlusInterfaceFunctions.parse_idd.nocomment, parameter[name[txt], constant[!]]] variable[blocks] assign[=] call[name[ntxt].split, parameter[constant[;]]] variable[blocks] assign[=] <ast.ListComp object at 0x7da18dc9a140> variable[bblocks] assign[=] <ast.ListComp object at 0x7da18dc98be0> variable[bblocks1] assign[=] <ast.ListComp object at 0x7da18dc9a710> variable[ver_blocks] assign[=] <ast.ListComp object at 0x7da18dc9a050> variable[ver_block] assign[=] call[name[ver_blocks]][constant[0]] variable[versionid] assign[=] call[name[ver_block]][constant[1]] variable[iddfile] assign[=] call[name[getiddfile], parameter[name[versionid]]] if call[name[os].path.exists, parameter[name[iddfile]]] begin[:] pass if call[name[os].path.exists, parameter[name[iddfile]]] begin[:] call[name[eppy].modeleditor.IDF.setiddname, parameter[name[iddfile]]] if call[name[isinstance], parameter[name[fname], tuple[[<ast.Name object at 0x7da1b26aeb60>, <ast.Name object at 0x7da1b26ae2f0>]]]] begin[:] call[name[fhandle].seek, parameter[constant[0]]] variable[idf] assign[=] call[name[eppy].modeleditor.IDF, parameter[name[fhandle]]] return[name[idf]]
keyword[def] identifier[easyopen] ( identifier[fname] , identifier[idd] = keyword[None] , identifier[epw] = keyword[None] ): literal[string] keyword[if] identifier[idd] : identifier[eppy] . identifier[modeleditor] . identifier[IDF] . identifier[setiddname] ( identifier[idd] ) identifier[idf] = identifier[eppy] . identifier[modeleditor] . identifier[IDF] ( identifier[fname] , identifier[epw] = identifier[epw] ) keyword[return] identifier[idf] keyword[if] identifier[isinstance] ( identifier[fname] ,( identifier[IOBase] , identifier[StringIO] )): identifier[fhandle] = identifier[fname] keyword[else] : identifier[fhandle] = identifier[io] . identifier[open] ( identifier[fname] , literal[string] , identifier[encoding] = literal[string] ) identifier[txt] = identifier[fhandle] . identifier[read] () identifier[ntxt] = identifier[eppy] . identifier[EPlusInterfaceFunctions] . identifier[parse_idd] . identifier[nocomment] ( identifier[txt] , literal[string] ) identifier[blocks] = identifier[ntxt] . identifier[split] ( literal[string] ) identifier[blocks] =[ identifier[block] . identifier[strip] () keyword[for] identifier[block] keyword[in] identifier[blocks] ] identifier[bblocks] =[ identifier[block] . identifier[split] ( literal[string] ) keyword[for] identifier[block] keyword[in] identifier[blocks] ] identifier[bblocks1] =[[ identifier[item] . identifier[strip] () keyword[for] identifier[item] keyword[in] identifier[block] ] keyword[for] identifier[block] keyword[in] identifier[bblocks] ] identifier[ver_blocks] =[ identifier[block] keyword[for] identifier[block] keyword[in] identifier[bblocks1] keyword[if] identifier[block] [ literal[int] ]. identifier[upper] ()== literal[string] ] identifier[ver_block] = identifier[ver_blocks] [ literal[int] ] identifier[versionid] = identifier[ver_block] [ literal[int] ] identifier[iddfile] = identifier[getiddfile] ( identifier[versionid] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[iddfile] ): keyword[pass] keyword[else] : identifier[iddfile] = identifier[getoldiddfile] ( identifier[versionid] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[iddfile] ): identifier[eppy] . identifier[modeleditor] . identifier[IDF] . identifier[setiddname] ( identifier[iddfile] ) keyword[if] identifier[isinstance] ( identifier[fname] ,( identifier[IOBase] , identifier[StringIO] )): identifier[fhandle] . identifier[seek] ( literal[int] ) identifier[idf] = identifier[eppy] . identifier[modeleditor] . identifier[IDF] ( identifier[fhandle] , identifier[epw] = identifier[epw] ) keyword[else] : identifier[idf] = identifier[eppy] . identifier[modeleditor] . identifier[IDF] ( identifier[fname] , identifier[epw] = identifier[epw] ) keyword[return] identifier[idf] keyword[else] : identifier[astr] = literal[string] identifier[astr] = identifier[astr] . identifier[format] ( identifier[versionid] , identifier[iddfile] ) keyword[raise] identifier[MissingIDDException] ( identifier[astr] )
def easyopen(fname, idd=None, epw=None): """automatically set idd and open idf file. Uses version from idf to set correct idd It will work under the following circumstances: - the IDF file should have the VERSION object. - Needs the version of EnergyPlus installed that matches the IDF version. - Energyplus should be installed in the default location. Parameters ---------- fname : str, StringIO or IOBase Filepath IDF file, File handle of IDF file open to read StringIO with IDF contents within idd : str, StringIO or IOBase This is an optional argument. easyopen will find the IDD without this arg Filepath IDD file, File handle of IDD file open to read StringIO with IDD contents within epw : str path name to the weather file. This arg is needed to run EneryPlus from eppy. """ if idd: eppy.modeleditor.IDF.setiddname(idd) idf = eppy.modeleditor.IDF(fname, epw=epw) return idf # depends on [control=['if'], data=[]] # the rest of the code runs if idd=None if isinstance(fname, (IOBase, StringIO)): fhandle = fname # depends on [control=['if'], data=[]] else: fhandle = io.open(fname, 'r', encoding='latin-1') # latin-1 seems to read most things # - get the version number from the idf file txt = fhandle.read() # try: # txt = txt.decode('latin-1') # latin-1 seems to read most things # except AttributeError: # pass ntxt = eppy.EPlusInterfaceFunctions.parse_idd.nocomment(txt, '!') blocks = ntxt.split(';') blocks = [block.strip() for block in blocks] bblocks = [block.split(',') for block in blocks] bblocks1 = [[item.strip() for item in block] for block in bblocks] ver_blocks = [block for block in bblocks1 if block[0].upper() == 'VERSION'] ver_block = ver_blocks[0] versionid = ver_block[1] # - get the E+ folder based on version number iddfile = getiddfile(versionid) if os.path.exists(iddfile): pass # depends on [control=['if'], data=[]] else: # might be an old version of E+ iddfile = getoldiddfile(versionid) if os.path.exists(iddfile): # if True: # - set IDD and open IDF. eppy.modeleditor.IDF.setiddname(iddfile) if isinstance(fname, (IOBase, StringIO)): fhandle.seek(0) idf = eppy.modeleditor.IDF(fhandle, epw=epw) # depends on [control=['if'], data=[]] else: idf = eppy.modeleditor.IDF(fname, epw=epw) return idf # depends on [control=['if'], data=[]] else: # - can't find IDD -> throw an exception astr = "input idf file says E+ version {}. easyopen() cannot find the corresponding idd file '{}'" astr = astr.format(versionid, iddfile) raise MissingIDDException(astr)
def parse(url): """Parses a search URL.""" config = {} url = urlparse.urlparse(url) # Remove query strings. path = url.path[1:] path = path.split('?', 2)[0] if url.scheme in SCHEMES: config["ENGINE"] = SCHEMES[url.scheme] if url.scheme in USES_URL: config["URL"] = urlparse.urlunparse(("http",) + url[1:]) if url.scheme in USES_INDEX: if path.endswith("/"): path = path[:-1] split = path.rsplit("/", 1) if len(split) > 1: path = split[:-1] index = split[-1] else: path = "" index = split[0] config.update({ "URL": urlparse.urlunparse(("http",) + url[1:2] + (path,) + url[3:]), "INDEX_NAME": index, }) if url.scheme in USES_PATH: config.update({ "PATH": path, }) return config
def function[parse, parameter[url]]: constant[Parses a search URL.] variable[config] assign[=] dictionary[[], []] variable[url] assign[=] call[name[urlparse].urlparse, parameter[name[url]]] variable[path] assign[=] call[name[url].path][<ast.Slice object at 0x7da18f721f90>] variable[path] assign[=] call[call[name[path].split, parameter[constant[?], constant[2]]]][constant[0]] if compare[name[url].scheme in name[SCHEMES]] begin[:] call[name[config]][constant[ENGINE]] assign[=] call[name[SCHEMES]][name[url].scheme] if compare[name[url].scheme in name[USES_URL]] begin[:] call[name[config]][constant[URL]] assign[=] call[name[urlparse].urlunparse, parameter[binary_operation[tuple[[<ast.Constant object at 0x7da18f720430>]] + call[name[url]][<ast.Slice object at 0x7da18f7208e0>]]]] if compare[name[url].scheme in name[USES_INDEX]] begin[:] if call[name[path].endswith, parameter[constant[/]]] begin[:] variable[path] assign[=] call[name[path]][<ast.Slice object at 0x7da18dc05e40>] variable[split] assign[=] call[name[path].rsplit, parameter[constant[/], constant[1]]] if compare[call[name[len], parameter[name[split]]] greater[>] constant[1]] begin[:] variable[path] assign[=] call[name[split]][<ast.Slice object at 0x7da18dc075b0>] variable[index] assign[=] call[name[split]][<ast.UnaryOp object at 0x7da18dc06020>] call[name[config].update, parameter[dictionary[[<ast.Constant object at 0x7da18dc07460>, <ast.Constant object at 0x7da18dc077f0>], [<ast.Call object at 0x7da18dc06f50>, <ast.Name object at 0x7da18dc04460>]]]] if compare[name[url].scheme in name[USES_PATH]] begin[:] call[name[config].update, parameter[dictionary[[<ast.Constant object at 0x7da18dc04100>], [<ast.Name object at 0x7da18dc051e0>]]]] return[name[config]]
keyword[def] identifier[parse] ( identifier[url] ): literal[string] identifier[config] ={} identifier[url] = identifier[urlparse] . identifier[urlparse] ( identifier[url] ) identifier[path] = identifier[url] . identifier[path] [ literal[int] :] identifier[path] = identifier[path] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ] keyword[if] identifier[url] . identifier[scheme] keyword[in] identifier[SCHEMES] : identifier[config] [ literal[string] ]= identifier[SCHEMES] [ identifier[url] . identifier[scheme] ] keyword[if] identifier[url] . identifier[scheme] keyword[in] identifier[USES_URL] : identifier[config] [ literal[string] ]= identifier[urlparse] . identifier[urlunparse] (( literal[string] ,)+ identifier[url] [ literal[int] :]) keyword[if] identifier[url] . identifier[scheme] keyword[in] identifier[USES_INDEX] : keyword[if] identifier[path] . identifier[endswith] ( literal[string] ): identifier[path] = identifier[path] [:- literal[int] ] identifier[split] = identifier[path] . identifier[rsplit] ( literal[string] , literal[int] ) keyword[if] identifier[len] ( identifier[split] )> literal[int] : identifier[path] = identifier[split] [:- literal[int] ] identifier[index] = identifier[split] [- literal[int] ] keyword[else] : identifier[path] = literal[string] identifier[index] = identifier[split] [ literal[int] ] identifier[config] . identifier[update] ({ literal[string] : identifier[urlparse] . identifier[urlunparse] (( literal[string] ,)+ identifier[url] [ literal[int] : literal[int] ]+( identifier[path] ,)+ identifier[url] [ literal[int] :]), literal[string] : identifier[index] , }) keyword[if] identifier[url] . identifier[scheme] keyword[in] identifier[USES_PATH] : identifier[config] . identifier[update] ({ literal[string] : identifier[path] , }) keyword[return] identifier[config]
def parse(url): """Parses a search URL.""" config = {} url = urlparse.urlparse(url) # Remove query strings. path = url.path[1:] path = path.split('?', 2)[0] if url.scheme in SCHEMES: config['ENGINE'] = SCHEMES[url.scheme] # depends on [control=['if'], data=['SCHEMES']] if url.scheme in USES_URL: config['URL'] = urlparse.urlunparse(('http',) + url[1:]) # depends on [control=['if'], data=[]] if url.scheme in USES_INDEX: if path.endswith('/'): path = path[:-1] # depends on [control=['if'], data=[]] split = path.rsplit('/', 1) if len(split) > 1: path = split[:-1] index = split[-1] # depends on [control=['if'], data=[]] else: path = '' index = split[0] config.update({'URL': urlparse.urlunparse(('http',) + url[1:2] + (path,) + url[3:]), 'INDEX_NAME': index}) # depends on [control=['if'], data=[]] if url.scheme in USES_PATH: config.update({'PATH': path}) # depends on [control=['if'], data=[]] return config
def get_raw_values(self, pydict, recovery_name=True): """Convert naive get response data to human readable field name format. using raw data format. """ new_dict = {"id": pydict["id"]} for field in self: raw_key = "%s_raw" % field.key if raw_key in pydict: if recovery_name: new_dict[field.name] = pydict[raw_key] else: new_dict[field.key] = pydict[raw_key] return new_dict
def function[get_raw_values, parameter[self, pydict, recovery_name]]: constant[Convert naive get response data to human readable field name format. using raw data format. ] variable[new_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b14abc10>], [<ast.Subscript object at 0x7da1b14aabf0>]] for taget[name[field]] in starred[name[self]] begin[:] variable[raw_key] assign[=] binary_operation[constant[%s_raw] <ast.Mod object at 0x7da2590d6920> name[field].key] if compare[name[raw_key] in name[pydict]] begin[:] if name[recovery_name] begin[:] call[name[new_dict]][name[field].name] assign[=] call[name[pydict]][name[raw_key]] return[name[new_dict]]
keyword[def] identifier[get_raw_values] ( identifier[self] , identifier[pydict] , identifier[recovery_name] = keyword[True] ): literal[string] identifier[new_dict] ={ literal[string] : identifier[pydict] [ literal[string] ]} keyword[for] identifier[field] keyword[in] identifier[self] : identifier[raw_key] = literal[string] % identifier[field] . identifier[key] keyword[if] identifier[raw_key] keyword[in] identifier[pydict] : keyword[if] identifier[recovery_name] : identifier[new_dict] [ identifier[field] . identifier[name] ]= identifier[pydict] [ identifier[raw_key] ] keyword[else] : identifier[new_dict] [ identifier[field] . identifier[key] ]= identifier[pydict] [ identifier[raw_key] ] keyword[return] identifier[new_dict]
def get_raw_values(self, pydict, recovery_name=True): """Convert naive get response data to human readable field name format. using raw data format. """ new_dict = {'id': pydict['id']} for field in self: raw_key = '%s_raw' % field.key if raw_key in pydict: if recovery_name: new_dict[field.name] = pydict[raw_key] # depends on [control=['if'], data=[]] else: new_dict[field.key] = pydict[raw_key] # depends on [control=['if'], data=['raw_key', 'pydict']] # depends on [control=['for'], data=['field']] return new_dict
def possible_import_patterns(modname): """ does not support from x import * does not support from x import z, y Example: >>> # DISABLE_DOCTEST >>> import utool as ut >>> modname = 'package.submod.submod2.module' >>> result = ut.repr3(ut.possible_import_patterns(modname)) >>> print(result) [ 'import\\spackage.submod.submod2.module', 'from\\spackage\\.submod\\.submod2\\simportmodule', ] """ # common regexes WS = r'\s' import_ = 'import' from_ = 'from' dot_ = r'\.' patterns = [import_ + WS + modname] if '.' in modname: parts = modname.split('.') modpart = dot_.join(parts[0:-1]) imppart = parts[-1] patterns += [from_ + WS + modpart + WS + import_ + imppart] NONSTANDARD = False if NONSTANDARD: if '.' in modname: for i in range(1, len(parts) - 1): modpart = '.'.join(parts[i:-1]) imppart = parts[-1] patterns += [from_ + WS + modpart + WS + import_ + imppart] imppart = parts[-1] patterns += [import_ + WS + imppart] return patterns
def function[possible_import_patterns, parameter[modname]]: constant[ does not support from x import * does not support from x import z, y Example: >>> # DISABLE_DOCTEST >>> import utool as ut >>> modname = 'package.submod.submod2.module' >>> result = ut.repr3(ut.possible_import_patterns(modname)) >>> print(result) [ 'import\spackage.submod.submod2.module', 'from\spackage\.submod\.submod2\simportmodule', ] ] variable[WS] assign[=] constant[\s] variable[import_] assign[=] constant[import] variable[from_] assign[=] constant[from] variable[dot_] assign[=] constant[\.] variable[patterns] assign[=] list[[<ast.BinOp object at 0x7da1b2462650>]] if compare[constant[.] in name[modname]] begin[:] variable[parts] assign[=] call[name[modname].split, parameter[constant[.]]] variable[modpart] assign[=] call[name[dot_].join, parameter[call[name[parts]][<ast.Slice object at 0x7da1b2461090>]]] variable[imppart] assign[=] call[name[parts]][<ast.UnaryOp object at 0x7da1b2468400>] <ast.AugAssign object at 0x7da1b24697b0> variable[NONSTANDARD] assign[=] constant[False] if name[NONSTANDARD] begin[:] if compare[constant[.] in name[modname]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[call[name[len], parameter[name[parts]]] - constant[1]]]]] begin[:] variable[modpart] assign[=] call[constant[.].join, parameter[call[name[parts]][<ast.Slice object at 0x7da1b24683a0>]]] variable[imppart] assign[=] call[name[parts]][<ast.UnaryOp object at 0x7da1b246a980>] <ast.AugAssign object at 0x7da1b246b460> variable[imppart] assign[=] call[name[parts]][<ast.UnaryOp object at 0x7da1b246ab00>] <ast.AugAssign object at 0x7da1b24687c0> return[name[patterns]]
keyword[def] identifier[possible_import_patterns] ( identifier[modname] ): literal[string] identifier[WS] = literal[string] identifier[import_] = literal[string] identifier[from_] = literal[string] identifier[dot_] = literal[string] identifier[patterns] =[ identifier[import_] + identifier[WS] + identifier[modname] ] keyword[if] literal[string] keyword[in] identifier[modname] : identifier[parts] = identifier[modname] . identifier[split] ( literal[string] ) identifier[modpart] = identifier[dot_] . identifier[join] ( identifier[parts] [ literal[int] :- literal[int] ]) identifier[imppart] = identifier[parts] [- literal[int] ] identifier[patterns] +=[ identifier[from_] + identifier[WS] + identifier[modpart] + identifier[WS] + identifier[import_] + identifier[imppart] ] identifier[NONSTANDARD] = keyword[False] keyword[if] identifier[NONSTANDARD] : keyword[if] literal[string] keyword[in] identifier[modname] : keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[parts] )- literal[int] ): identifier[modpart] = literal[string] . identifier[join] ( identifier[parts] [ identifier[i] :- literal[int] ]) identifier[imppart] = identifier[parts] [- literal[int] ] identifier[patterns] +=[ identifier[from_] + identifier[WS] + identifier[modpart] + identifier[WS] + identifier[import_] + identifier[imppart] ] identifier[imppart] = identifier[parts] [- literal[int] ] identifier[patterns] +=[ identifier[import_] + identifier[WS] + identifier[imppart] ] keyword[return] identifier[patterns]
def possible_import_patterns(modname): """ does not support from x import * does not support from x import z, y Example: >>> # DISABLE_DOCTEST >>> import utool as ut >>> modname = 'package.submod.submod2.module' >>> result = ut.repr3(ut.possible_import_patterns(modname)) >>> print(result) [ 'import\\spackage.submod.submod2.module', 'from\\spackage\\.submod\\.submod2\\simportmodule', ] """ # common regexes WS = '\\s' import_ = 'import' from_ = 'from' dot_ = '\\.' patterns = [import_ + WS + modname] if '.' in modname: parts = modname.split('.') modpart = dot_.join(parts[0:-1]) imppart = parts[-1] patterns += [from_ + WS + modpart + WS + import_ + imppart] # depends on [control=['if'], data=['modname']] NONSTANDARD = False if NONSTANDARD: if '.' in modname: for i in range(1, len(parts) - 1): modpart = '.'.join(parts[i:-1]) imppart = parts[-1] patterns += [from_ + WS + modpart + WS + import_ + imppart] # depends on [control=['for'], data=['i']] imppart = parts[-1] patterns += [import_ + WS + imppart] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return patterns
def from_hex(cls, value): """Initialize a new network from hexadecimal notation.""" if len(value) == 8: return cls(int(value, 16)) elif len(value) == 32: return cls(int(value, 16)) else: raise ValueError('%r: invalid hexadecimal notation' % (value,))
def function[from_hex, parameter[cls, value]]: constant[Initialize a new network from hexadecimal notation.] if compare[call[name[len], parameter[name[value]]] equal[==] constant[8]] begin[:] return[call[name[cls], parameter[call[name[int], parameter[name[value], constant[16]]]]]]
keyword[def] identifier[from_hex] ( identifier[cls] , identifier[value] ): literal[string] keyword[if] identifier[len] ( identifier[value] )== literal[int] : keyword[return] identifier[cls] ( identifier[int] ( identifier[value] , literal[int] )) keyword[elif] identifier[len] ( identifier[value] )== literal[int] : keyword[return] identifier[cls] ( identifier[int] ( identifier[value] , literal[int] )) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[value] ,))
def from_hex(cls, value): """Initialize a new network from hexadecimal notation.""" if len(value) == 8: return cls(int(value, 16)) # depends on [control=['if'], data=[]] elif len(value) == 32: return cls(int(value, 16)) # depends on [control=['if'], data=[]] else: raise ValueError('%r: invalid hexadecimal notation' % (value,))
def edges(self, zfill = 3): """ Returns the aspect ratio of all elements. """ edges = self.split("edges", at = "coords").unstack() edges["lx"] = edges.x[1]-edges.x[0] edges["ly"] = edges.y[1]-edges.y[0] edges["lz"] = edges.z[1]-edges.z[0] edges["l"] = np.linalg.norm(edges[["lx", "ly", "lz"]], axis = 1) edges = (edges.l).unstack() edges.columns = pd.MultiIndex.from_product([["length"], ["e" + "{0}".format(s).zfill(zfill) for s in np.arange(edges.shape[1])]]) edges[("stats", "lmax")] = edges.length.max(axis = 1) edges[("stats", "lmin")] = edges.length.min(axis = 1) edges[("stats", "aspect_ratio")] = edges.stats.lmax / edges.stats.lmin return edges.sort_index(axis = 1)
def function[edges, parameter[self, zfill]]: constant[ Returns the aspect ratio of all elements. ] variable[edges] assign[=] call[call[name[self].split, parameter[constant[edges]]].unstack, parameter[]] call[name[edges]][constant[lx]] assign[=] binary_operation[call[name[edges].x][constant[1]] - call[name[edges].x][constant[0]]] call[name[edges]][constant[ly]] assign[=] binary_operation[call[name[edges].y][constant[1]] - call[name[edges].y][constant[0]]] call[name[edges]][constant[lz]] assign[=] binary_operation[call[name[edges].z][constant[1]] - call[name[edges].z][constant[0]]] call[name[edges]][constant[l]] assign[=] call[name[np].linalg.norm, parameter[call[name[edges]][list[[<ast.Constant object at 0x7da1b002eb30>, <ast.Constant object at 0x7da1b002ead0>, <ast.Constant object at 0x7da1b002ea70>]]]]] variable[edges] assign[=] call[name[edges].l.unstack, parameter[]] name[edges].columns assign[=] call[name[pd].MultiIndex.from_product, parameter[list[[<ast.List object at 0x7da1b00231f0>, <ast.ListComp object at 0x7da1b0020070>]]]] call[name[edges]][tuple[[<ast.Constant object at 0x7da1b0020a90>, <ast.Constant object at 0x7da1b0020ac0>]]] assign[=] call[name[edges].length.max, parameter[]] call[name[edges]][tuple[[<ast.Constant object at 0x7da1b0020cd0>, <ast.Constant object at 0x7da1b0020d00>]]] assign[=] call[name[edges].length.min, parameter[]] call[name[edges]][tuple[[<ast.Constant object at 0x7da1b0020f10>, <ast.Constant object at 0x7da1b0020f40>]]] assign[=] binary_operation[name[edges].stats.lmax / name[edges].stats.lmin] return[call[name[edges].sort_index, parameter[]]]
keyword[def] identifier[edges] ( identifier[self] , identifier[zfill] = literal[int] ): literal[string] identifier[edges] = identifier[self] . identifier[split] ( literal[string] , identifier[at] = literal[string] ). identifier[unstack] () identifier[edges] [ literal[string] ]= identifier[edges] . identifier[x] [ literal[int] ]- identifier[edges] . identifier[x] [ literal[int] ] identifier[edges] [ literal[string] ]= identifier[edges] . identifier[y] [ literal[int] ]- identifier[edges] . identifier[y] [ literal[int] ] identifier[edges] [ literal[string] ]= identifier[edges] . identifier[z] [ literal[int] ]- identifier[edges] . identifier[z] [ literal[int] ] identifier[edges] [ literal[string] ]= identifier[np] . identifier[linalg] . identifier[norm] ( identifier[edges] [[ literal[string] , literal[string] , literal[string] ]], identifier[axis] = literal[int] ) identifier[edges] =( identifier[edges] . identifier[l] ). identifier[unstack] () identifier[edges] . identifier[columns] = identifier[pd] . identifier[MultiIndex] . identifier[from_product] ([[ literal[string] ], [ literal[string] + literal[string] . identifier[format] ( identifier[s] ). identifier[zfill] ( identifier[zfill] ) keyword[for] identifier[s] keyword[in] identifier[np] . identifier[arange] ( identifier[edges] . identifier[shape] [ literal[int] ])]]) identifier[edges] [( literal[string] , literal[string] )]= identifier[edges] . identifier[length] . identifier[max] ( identifier[axis] = literal[int] ) identifier[edges] [( literal[string] , literal[string] )]= identifier[edges] . identifier[length] . identifier[min] ( identifier[axis] = literal[int] ) identifier[edges] [( literal[string] , literal[string] )]= identifier[edges] . identifier[stats] . identifier[lmax] / identifier[edges] . identifier[stats] . identifier[lmin] keyword[return] identifier[edges] . identifier[sort_index] ( identifier[axis] = literal[int] )
def edges(self, zfill=3): """ Returns the aspect ratio of all elements. """ edges = self.split('edges', at='coords').unstack() edges['lx'] = edges.x[1] - edges.x[0] edges['ly'] = edges.y[1] - edges.y[0] edges['lz'] = edges.z[1] - edges.z[0] edges['l'] = np.linalg.norm(edges[['lx', 'ly', 'lz']], axis=1) edges = edges.l.unstack() edges.columns = pd.MultiIndex.from_product([['length'], ['e' + '{0}'.format(s).zfill(zfill) for s in np.arange(edges.shape[1])]]) edges['stats', 'lmax'] = edges.length.max(axis=1) edges['stats', 'lmin'] = edges.length.min(axis=1) edges['stats', 'aspect_ratio'] = edges.stats.lmax / edges.stats.lmin return edges.sort_index(axis=1)
def from_json(cls, json_obj, variables=None): """ Constructs a Variable from the provided json-object. Example -------- >>> import json >>> with open("path_to_file.json") as infile: >>> constraint = Constraint.from_json(json.load(infile)) """ if variables is None: variables = {} expression = parse_expr(json_obj["expression"], variables) if json_obj["indicator_variable"] is None: indicator = None else: indicator = variables[json_obj["indicator_variable"]] return cls( expression, name=json_obj["name"], lb=json_obj["lb"], ub=json_obj["ub"], indicator_variable=indicator, active_when=json_obj["active_when"] )
def function[from_json, parameter[cls, json_obj, variables]]: constant[ Constructs a Variable from the provided json-object. Example -------- >>> import json >>> with open("path_to_file.json") as infile: >>> constraint = Constraint.from_json(json.load(infile)) ] if compare[name[variables] is constant[None]] begin[:] variable[variables] assign[=] dictionary[[], []] variable[expression] assign[=] call[name[parse_expr], parameter[call[name[json_obj]][constant[expression]], name[variables]]] if compare[call[name[json_obj]][constant[indicator_variable]] is constant[None]] begin[:] variable[indicator] assign[=] constant[None] return[call[name[cls], parameter[name[expression]]]]
keyword[def] identifier[from_json] ( identifier[cls] , identifier[json_obj] , identifier[variables] = keyword[None] ): literal[string] keyword[if] identifier[variables] keyword[is] keyword[None] : identifier[variables] ={} identifier[expression] = identifier[parse_expr] ( identifier[json_obj] [ literal[string] ], identifier[variables] ) keyword[if] identifier[json_obj] [ literal[string] ] keyword[is] keyword[None] : identifier[indicator] = keyword[None] keyword[else] : identifier[indicator] = identifier[variables] [ identifier[json_obj] [ literal[string] ]] keyword[return] identifier[cls] ( identifier[expression] , identifier[name] = identifier[json_obj] [ literal[string] ], identifier[lb] = identifier[json_obj] [ literal[string] ], identifier[ub] = identifier[json_obj] [ literal[string] ], identifier[indicator_variable] = identifier[indicator] , identifier[active_when] = identifier[json_obj] [ literal[string] ] )
def from_json(cls, json_obj, variables=None): """ Constructs a Variable from the provided json-object. Example -------- >>> import json >>> with open("path_to_file.json") as infile: >>> constraint = Constraint.from_json(json.load(infile)) """ if variables is None: variables = {} # depends on [control=['if'], data=['variables']] expression = parse_expr(json_obj['expression'], variables) if json_obj['indicator_variable'] is None: indicator = None # depends on [control=['if'], data=[]] else: indicator = variables[json_obj['indicator_variable']] return cls(expression, name=json_obj['name'], lb=json_obj['lb'], ub=json_obj['ub'], indicator_variable=indicator, active_when=json_obj['active_when'])
def from_dataframe(cls,**kwargs): """class method constructor to create an Ensemble from a pandas.DataFrame Parameters ---------- **kwargs : dict optional args to pass to the Ensemble Constructor. Expects 'df' in kwargs.keys() that must be a pandas.DataFrame instance Returns ------- Ensemble : Ensemble """ df = kwargs.pop("df") assert isinstance(df,pd.DataFrame) df.columns = [c.lower() for c in df.columns] mean_values = kwargs.pop("mean_values",df.mean(axis=0)) e = cls(data=df,index=df.index,columns=df.columns, mean_values=mean_values,**kwargs) return e
def function[from_dataframe, parameter[cls]]: constant[class method constructor to create an Ensemble from a pandas.DataFrame Parameters ---------- **kwargs : dict optional args to pass to the Ensemble Constructor. Expects 'df' in kwargs.keys() that must be a pandas.DataFrame instance Returns ------- Ensemble : Ensemble ] variable[df] assign[=] call[name[kwargs].pop, parameter[constant[df]]] assert[call[name[isinstance], parameter[name[df], name[pd].DataFrame]]] name[df].columns assign[=] <ast.ListComp object at 0x7da2041dbf70> variable[mean_values] assign[=] call[name[kwargs].pop, parameter[constant[mean_values], call[name[df].mean, parameter[]]]] variable[e] assign[=] call[name[cls], parameter[]] return[name[e]]
keyword[def] identifier[from_dataframe] ( identifier[cls] ,** identifier[kwargs] ): literal[string] identifier[df] = identifier[kwargs] . identifier[pop] ( literal[string] ) keyword[assert] identifier[isinstance] ( identifier[df] , identifier[pd] . identifier[DataFrame] ) identifier[df] . identifier[columns] =[ identifier[c] . identifier[lower] () keyword[for] identifier[c] keyword[in] identifier[df] . identifier[columns] ] identifier[mean_values] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[df] . identifier[mean] ( identifier[axis] = literal[int] )) identifier[e] = identifier[cls] ( identifier[data] = identifier[df] , identifier[index] = identifier[df] . identifier[index] , identifier[columns] = identifier[df] . identifier[columns] , identifier[mean_values] = identifier[mean_values] ,** identifier[kwargs] ) keyword[return] identifier[e]
def from_dataframe(cls, **kwargs): """class method constructor to create an Ensemble from a pandas.DataFrame Parameters ---------- **kwargs : dict optional args to pass to the Ensemble Constructor. Expects 'df' in kwargs.keys() that must be a pandas.DataFrame instance Returns ------- Ensemble : Ensemble """ df = kwargs.pop('df') assert isinstance(df, pd.DataFrame) df.columns = [c.lower() for c in df.columns] mean_values = kwargs.pop('mean_values', df.mean(axis=0)) e = cls(data=df, index=df.index, columns=df.columns, mean_values=mean_values, **kwargs) return e
def get_boto_client( client, region=None, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, endpoint_url=None ): """Get a boto3 client connection.""" cache_key = '{0}:{1}:{2}:{3}'.format( client, region, aws_access_key_id, endpoint_url or '' ) if not aws_session_token: if cache_key in CLIENT_CACHE: return CLIENT_CACHE[cache_key] session = get_boto_session( region, aws_access_key_id, aws_secret_access_key, aws_session_token ) if not session: logging.error("Failed to get {0} client.".format(client)) return None CLIENT_CACHE[cache_key] = session.client( client, endpoint_url=endpoint_url ) return CLIENT_CACHE[cache_key]
def function[get_boto_client, parameter[client, region, aws_access_key_id, aws_secret_access_key, aws_session_token, endpoint_url]]: constant[Get a boto3 client connection.] variable[cache_key] assign[=] call[constant[{0}:{1}:{2}:{3}].format, parameter[name[client], name[region], name[aws_access_key_id], <ast.BoolOp object at 0x7da1b04fea70>]] if <ast.UnaryOp object at 0x7da1b04fc2b0> begin[:] if compare[name[cache_key] in name[CLIENT_CACHE]] begin[:] return[call[name[CLIENT_CACHE]][name[cache_key]]] variable[session] assign[=] call[name[get_boto_session], parameter[name[region], name[aws_access_key_id], name[aws_secret_access_key], name[aws_session_token]]] if <ast.UnaryOp object at 0x7da1b04fd1b0> begin[:] call[name[logging].error, parameter[call[constant[Failed to get {0} client.].format, parameter[name[client]]]]] return[constant[None]] call[name[CLIENT_CACHE]][name[cache_key]] assign[=] call[name[session].client, parameter[name[client]]] return[call[name[CLIENT_CACHE]][name[cache_key]]]
keyword[def] identifier[get_boto_client] ( identifier[client] , identifier[region] = keyword[None] , identifier[aws_access_key_id] = keyword[None] , identifier[aws_secret_access_key] = keyword[None] , identifier[aws_session_token] = keyword[None] , identifier[endpoint_url] = keyword[None] ): literal[string] identifier[cache_key] = literal[string] . identifier[format] ( identifier[client] , identifier[region] , identifier[aws_access_key_id] , identifier[endpoint_url] keyword[or] literal[string] ) keyword[if] keyword[not] identifier[aws_session_token] : keyword[if] identifier[cache_key] keyword[in] identifier[CLIENT_CACHE] : keyword[return] identifier[CLIENT_CACHE] [ identifier[cache_key] ] identifier[session] = identifier[get_boto_session] ( identifier[region] , identifier[aws_access_key_id] , identifier[aws_secret_access_key] , identifier[aws_session_token] ) keyword[if] keyword[not] identifier[session] : identifier[logging] . identifier[error] ( literal[string] . identifier[format] ( identifier[client] )) keyword[return] keyword[None] identifier[CLIENT_CACHE] [ identifier[cache_key] ]= identifier[session] . identifier[client] ( identifier[client] , identifier[endpoint_url] = identifier[endpoint_url] ) keyword[return] identifier[CLIENT_CACHE] [ identifier[cache_key] ]
def get_boto_client(client, region=None, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, endpoint_url=None): """Get a boto3 client connection.""" cache_key = '{0}:{1}:{2}:{3}'.format(client, region, aws_access_key_id, endpoint_url or '') if not aws_session_token: if cache_key in CLIENT_CACHE: return CLIENT_CACHE[cache_key] # depends on [control=['if'], data=['cache_key', 'CLIENT_CACHE']] # depends on [control=['if'], data=[]] session = get_boto_session(region, aws_access_key_id, aws_secret_access_key, aws_session_token) if not session: logging.error('Failed to get {0} client.'.format(client)) return None # depends on [control=['if'], data=[]] CLIENT_CACHE[cache_key] = session.client(client, endpoint_url=endpoint_url) return CLIENT_CACHE[cache_key]
def symlink_plus(orig, new): """Create relative symlinks and handle associated biological index files. """ orig = os.path.abspath(orig) if not os.path.exists(orig): raise RuntimeError("File not found: %s" % orig) for ext in ["", ".idx", ".gbi", ".tbi", ".bai", ".fai"]: if os.path.exists(orig + ext) and (not os.path.lexists(new + ext) or not os.path.exists(new + ext)): with chdir(os.path.dirname(new)): remove_safe(new + ext) # Work around symlink issues on some filesystems. Randomly # fail to symlink. try: os.symlink(os.path.relpath(orig + ext), os.path.basename(new + ext)) except OSError: if not os.path.exists(new + ext) or not os.path.lexists(new + ext): remove_safe(new + ext) shutil.copyfile(orig + ext, new + ext) orig_noext = splitext_plus(orig)[0] new_noext = splitext_plus(new)[0] for sub_ext in [".bai", ".dict"]: if os.path.exists(orig_noext + sub_ext) and not os.path.lexists(new_noext + sub_ext): with chdir(os.path.dirname(new_noext)): os.symlink(os.path.relpath(orig_noext + sub_ext), os.path.basename(new_noext + sub_ext))
def function[symlink_plus, parameter[orig, new]]: constant[Create relative symlinks and handle associated biological index files. ] variable[orig] assign[=] call[name[os].path.abspath, parameter[name[orig]]] if <ast.UnaryOp object at 0x7da1b18bd600> begin[:] <ast.Raise object at 0x7da1b18bf0a0> for taget[name[ext]] in starred[list[[<ast.Constant object at 0x7da1b18be3b0>, <ast.Constant object at 0x7da1b18be0e0>, <ast.Constant object at 0x7da1b18befb0>, <ast.Constant object at 0x7da1b18bf970>, <ast.Constant object at 0x7da1b18bc3a0>, <ast.Constant object at 0x7da1b18bd150>]]] begin[:] if <ast.BoolOp object at 0x7da1b18bddb0> begin[:] with call[name[chdir], parameter[call[name[os].path.dirname, parameter[name[new]]]]] begin[:] call[name[remove_safe], parameter[binary_operation[name[new] + name[ext]]]] <ast.Try object at 0x7da1b18bcee0> variable[orig_noext] assign[=] call[call[name[splitext_plus], parameter[name[orig]]]][constant[0]] variable[new_noext] assign[=] call[call[name[splitext_plus], parameter[name[new]]]][constant[0]] for taget[name[sub_ext]] in starred[list[[<ast.Constant object at 0x7da1b18bc700>, <ast.Constant object at 0x7da1b18bfc40>]]] begin[:] if <ast.BoolOp object at 0x7da1b18be080> begin[:] with call[name[chdir], parameter[call[name[os].path.dirname, parameter[name[new_noext]]]]] begin[:] call[name[os].symlink, parameter[call[name[os].path.relpath, parameter[binary_operation[name[orig_noext] + name[sub_ext]]]], call[name[os].path.basename, parameter[binary_operation[name[new_noext] + name[sub_ext]]]]]]
keyword[def] identifier[symlink_plus] ( identifier[orig] , identifier[new] ): literal[string] identifier[orig] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[orig] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[orig] ): keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[orig] ) keyword[for] identifier[ext] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]: keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[orig] + identifier[ext] ) keyword[and] ( keyword[not] identifier[os] . identifier[path] . identifier[lexists] ( identifier[new] + identifier[ext] ) keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[new] + identifier[ext] )): keyword[with] identifier[chdir] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[new] )): identifier[remove_safe] ( identifier[new] + identifier[ext] ) keyword[try] : identifier[os] . identifier[symlink] ( identifier[os] . identifier[path] . identifier[relpath] ( identifier[orig] + identifier[ext] ), identifier[os] . identifier[path] . identifier[basename] ( identifier[new] + identifier[ext] )) keyword[except] identifier[OSError] : keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[new] + identifier[ext] ) keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[lexists] ( identifier[new] + identifier[ext] ): identifier[remove_safe] ( identifier[new] + identifier[ext] ) identifier[shutil] . identifier[copyfile] ( identifier[orig] + identifier[ext] , identifier[new] + identifier[ext] ) identifier[orig_noext] = identifier[splitext_plus] ( identifier[orig] )[ literal[int] ] identifier[new_noext] = identifier[splitext_plus] ( identifier[new] )[ literal[int] ] keyword[for] identifier[sub_ext] keyword[in] [ literal[string] , literal[string] ]: keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[orig_noext] + identifier[sub_ext] ) keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[lexists] ( identifier[new_noext] + identifier[sub_ext] ): keyword[with] identifier[chdir] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[new_noext] )): identifier[os] . identifier[symlink] ( identifier[os] . identifier[path] . identifier[relpath] ( identifier[orig_noext] + identifier[sub_ext] ), identifier[os] . identifier[path] . identifier[basename] ( identifier[new_noext] + identifier[sub_ext] ))
def symlink_plus(orig, new): """Create relative symlinks and handle associated biological index files. """ orig = os.path.abspath(orig) if not os.path.exists(orig): raise RuntimeError('File not found: %s' % orig) # depends on [control=['if'], data=[]] for ext in ['', '.idx', '.gbi', '.tbi', '.bai', '.fai']: if os.path.exists(orig + ext) and (not os.path.lexists(new + ext) or not os.path.exists(new + ext)): with chdir(os.path.dirname(new)): remove_safe(new + ext) # Work around symlink issues on some filesystems. Randomly # fail to symlink. try: os.symlink(os.path.relpath(orig + ext), os.path.basename(new + ext)) # depends on [control=['try'], data=[]] except OSError: if not os.path.exists(new + ext) or not os.path.lexists(new + ext): remove_safe(new + ext) shutil.copyfile(orig + ext, new + ext) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ext']] orig_noext = splitext_plus(orig)[0] new_noext = splitext_plus(new)[0] for sub_ext in ['.bai', '.dict']: if os.path.exists(orig_noext + sub_ext) and (not os.path.lexists(new_noext + sub_ext)): with chdir(os.path.dirname(new_noext)): os.symlink(os.path.relpath(orig_noext + sub_ext), os.path.basename(new_noext + sub_ext)) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sub_ext']]
def _populate_data(self): """Assinging some measurement's raw meta data from API response to instance properties""" if self.id is None: self.id = self.meta_data.get("id") self.stop_time = None self.creation_time = None self.start_time = None self.populate_times() self.protocol = self.meta_data.get("af") self.target_ip = self.meta_data.get("target_ip") self.target_asn = self.meta_data.get("target_asn") self.target = self.meta_data.get("target") self.description = self.meta_data.get("description") self.is_oneoff = self.meta_data.get("is_oneoff") self.is_public = self.meta_data.get("is_public") self.interval = self.meta_data.get("interval") self.resolve_on_probe = self.meta_data.get("resolve_on_probe") self.status_id = self.meta_data.get("status", {}).get("id") self.status = self.meta_data.get("status", {}).get("name") self.type = self.get_type() self.result_url = self.meta_data.get("result")
def function[_populate_data, parameter[self]]: constant[Assinging some measurement's raw meta data from API response to instance properties] if compare[name[self].id is constant[None]] begin[:] name[self].id assign[=] call[name[self].meta_data.get, parameter[constant[id]]] name[self].stop_time assign[=] constant[None] name[self].creation_time assign[=] constant[None] name[self].start_time assign[=] constant[None] call[name[self].populate_times, parameter[]] name[self].protocol assign[=] call[name[self].meta_data.get, parameter[constant[af]]] name[self].target_ip assign[=] call[name[self].meta_data.get, parameter[constant[target_ip]]] name[self].target_asn assign[=] call[name[self].meta_data.get, parameter[constant[target_asn]]] name[self].target assign[=] call[name[self].meta_data.get, parameter[constant[target]]] name[self].description assign[=] call[name[self].meta_data.get, parameter[constant[description]]] name[self].is_oneoff assign[=] call[name[self].meta_data.get, parameter[constant[is_oneoff]]] name[self].is_public assign[=] call[name[self].meta_data.get, parameter[constant[is_public]]] name[self].interval assign[=] call[name[self].meta_data.get, parameter[constant[interval]]] name[self].resolve_on_probe assign[=] call[name[self].meta_data.get, parameter[constant[resolve_on_probe]]] name[self].status_id assign[=] call[call[name[self].meta_data.get, parameter[constant[status], dictionary[[], []]]].get, parameter[constant[id]]] name[self].status assign[=] call[call[name[self].meta_data.get, parameter[constant[status], dictionary[[], []]]].get, parameter[constant[name]]] name[self].type assign[=] call[name[self].get_type, parameter[]] name[self].result_url assign[=] call[name[self].meta_data.get, parameter[constant[result]]]
keyword[def] identifier[_populate_data] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[id] keyword[is] keyword[None] : identifier[self] . identifier[id] = identifier[self] . identifier[meta_data] . identifier[get] ( literal[string] ) identifier[self] . identifier[stop_time] = keyword[None] identifier[self] . identifier[creation_time] = keyword[None] identifier[self] . identifier[start_time] = keyword[None] identifier[self] . identifier[populate_times] () identifier[self] . identifier[protocol] = identifier[self] . identifier[meta_data] . identifier[get] ( literal[string] ) identifier[self] . identifier[target_ip] = identifier[self] . identifier[meta_data] . identifier[get] ( literal[string] ) identifier[self] . identifier[target_asn] = identifier[self] . identifier[meta_data] . identifier[get] ( literal[string] ) identifier[self] . identifier[target] = identifier[self] . identifier[meta_data] . identifier[get] ( literal[string] ) identifier[self] . identifier[description] = identifier[self] . identifier[meta_data] . identifier[get] ( literal[string] ) identifier[self] . identifier[is_oneoff] = identifier[self] . identifier[meta_data] . identifier[get] ( literal[string] ) identifier[self] . identifier[is_public] = identifier[self] . identifier[meta_data] . identifier[get] ( literal[string] ) identifier[self] . identifier[interval] = identifier[self] . identifier[meta_data] . identifier[get] ( literal[string] ) identifier[self] . identifier[resolve_on_probe] = identifier[self] . identifier[meta_data] . identifier[get] ( literal[string] ) identifier[self] . identifier[status_id] = identifier[self] . identifier[meta_data] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ) identifier[self] . identifier[status] = identifier[self] . identifier[meta_data] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ) identifier[self] . identifier[type] = identifier[self] . identifier[get_type] () identifier[self] . identifier[result_url] = identifier[self] . identifier[meta_data] . identifier[get] ( literal[string] )
def _populate_data(self): """Assinging some measurement's raw meta data from API response to instance properties""" if self.id is None: self.id = self.meta_data.get('id') # depends on [control=['if'], data=[]] self.stop_time = None self.creation_time = None self.start_time = None self.populate_times() self.protocol = self.meta_data.get('af') self.target_ip = self.meta_data.get('target_ip') self.target_asn = self.meta_data.get('target_asn') self.target = self.meta_data.get('target') self.description = self.meta_data.get('description') self.is_oneoff = self.meta_data.get('is_oneoff') self.is_public = self.meta_data.get('is_public') self.interval = self.meta_data.get('interval') self.resolve_on_probe = self.meta_data.get('resolve_on_probe') self.status_id = self.meta_data.get('status', {}).get('id') self.status = self.meta_data.get('status', {}).get('name') self.type = self.get_type() self.result_url = self.meta_data.get('result')
def pickle_load(path, compression=False): """Unpickle a possible compressed pickle. Parameters ---------- path: str path to the output file compression: bool if true assumes that pickle was compressed when created and attempts decompression. Returns ------- obj: object the unpickled object """ if compression: with zipfile.ZipFile(path, "r", compression=zipfile.ZIP_DEFLATED) as myzip: with myzip.open("data") as f: return pickle.load(f) else: with open(path, "rb") as f: return pickle.load(f)
def function[pickle_load, parameter[path, compression]]: constant[Unpickle a possible compressed pickle. Parameters ---------- path: str path to the output file compression: bool if true assumes that pickle was compressed when created and attempts decompression. Returns ------- obj: object the unpickled object ] if name[compression] begin[:] with call[name[zipfile].ZipFile, parameter[name[path], constant[r]]] begin[:] with call[name[myzip].open, parameter[constant[data]]] begin[:] return[call[name[pickle].load, parameter[name[f]]]]
keyword[def] identifier[pickle_load] ( identifier[path] , identifier[compression] = keyword[False] ): literal[string] keyword[if] identifier[compression] : keyword[with] identifier[zipfile] . identifier[ZipFile] ( identifier[path] , literal[string] , identifier[compression] = identifier[zipfile] . identifier[ZIP_DEFLATED] ) keyword[as] identifier[myzip] : keyword[with] identifier[myzip] . identifier[open] ( literal[string] ) keyword[as] identifier[f] : keyword[return] identifier[pickle] . identifier[load] ( identifier[f] ) keyword[else] : keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] : keyword[return] identifier[pickle] . identifier[load] ( identifier[f] )
def pickle_load(path, compression=False): """Unpickle a possible compressed pickle. Parameters ---------- path: str path to the output file compression: bool if true assumes that pickle was compressed when created and attempts decompression. Returns ------- obj: object the unpickled object """ if compression: with zipfile.ZipFile(path, 'r', compression=zipfile.ZIP_DEFLATED) as myzip: with myzip.open('data') as f: return pickle.load(f) # depends on [control=['with'], data=['f']] # depends on [control=['with'], data=['myzip']] # depends on [control=['if'], data=[]] else: with open(path, 'rb') as f: return pickle.load(f) # depends on [control=['with'], data=['f']]
def validate (properties): """ Exit with error if any of the properties is not valid. properties may be a single property or a sequence of properties. """ if isinstance(properties, Property): properties = [properties] assert is_iterable_typed(properties, Property) for p in properties: __validate1(p)
def function[validate, parameter[properties]]: constant[ Exit with error if any of the properties is not valid. properties may be a single property or a sequence of properties. ] if call[name[isinstance], parameter[name[properties], name[Property]]] begin[:] variable[properties] assign[=] list[[<ast.Name object at 0x7da1b20ef430>]] assert[call[name[is_iterable_typed], parameter[name[properties], name[Property]]]] for taget[name[p]] in starred[name[properties]] begin[:] call[name[__validate1], parameter[name[p]]]
keyword[def] identifier[validate] ( identifier[properties] ): literal[string] keyword[if] identifier[isinstance] ( identifier[properties] , identifier[Property] ): identifier[properties] =[ identifier[properties] ] keyword[assert] identifier[is_iterable_typed] ( identifier[properties] , identifier[Property] ) keyword[for] identifier[p] keyword[in] identifier[properties] : identifier[__validate1] ( identifier[p] )
def validate(properties): """ Exit with error if any of the properties is not valid. properties may be a single property or a sequence of properties. """ if isinstance(properties, Property): properties = [properties] # depends on [control=['if'], data=[]] assert is_iterable_typed(properties, Property) for p in properties: __validate1(p) # depends on [control=['for'], data=['p']]
def _format_iso_time(self, time): """Makes sure we have proper ISO 8601 time. :param time: either already ISO 8601 a string or datetime.datetime :returns: ISO 8601 time :rtype: str """ if isinstance(time, str): return time elif isinstance(time, datetime): return time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') else: return None
def function[_format_iso_time, parameter[self, time]]: constant[Makes sure we have proper ISO 8601 time. :param time: either already ISO 8601 a string or datetime.datetime :returns: ISO 8601 time :rtype: str ] if call[name[isinstance], parameter[name[time], name[str]]] begin[:] return[name[time]]
keyword[def] identifier[_format_iso_time] ( identifier[self] , identifier[time] ): literal[string] keyword[if] identifier[isinstance] ( identifier[time] , identifier[str] ): keyword[return] identifier[time] keyword[elif] identifier[isinstance] ( identifier[time] , identifier[datetime] ): keyword[return] identifier[time] . identifier[strftime] ( literal[string] ) keyword[else] : keyword[return] keyword[None]
def _format_iso_time(self, time): """Makes sure we have proper ISO 8601 time. :param time: either already ISO 8601 a string or datetime.datetime :returns: ISO 8601 time :rtype: str """ if isinstance(time, str): return time # depends on [control=['if'], data=[]] elif isinstance(time, datetime): return time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') # depends on [control=['if'], data=[]] else: return None
def set_computer_name(name): ''' Set the Windows computer name Args: name (str): The new name to give the computer. Requires a reboot to take effect. Returns: dict: Returns a dictionary containing the old and new names if successful. ``False`` if not. CLI Example: .. code-block:: bash salt 'minion-id' system.set_computer_name 'DavesComputer' ''' if six.PY2: name = _to_unicode(name) if windll.kernel32.SetComputerNameExW( win32con.ComputerNamePhysicalDnsHostname, name): ret = {'Computer Name': {'Current': get_computer_name()}} pending = get_pending_computer_name() if pending not in (None, False): ret['Computer Name']['Pending'] = pending return ret return False
def function[set_computer_name, parameter[name]]: constant[ Set the Windows computer name Args: name (str): The new name to give the computer. Requires a reboot to take effect. Returns: dict: Returns a dictionary containing the old and new names if successful. ``False`` if not. CLI Example: .. code-block:: bash salt 'minion-id' system.set_computer_name 'DavesComputer' ] if name[six].PY2 begin[:] variable[name] assign[=] call[name[_to_unicode], parameter[name[name]]] if call[name[windll].kernel32.SetComputerNameExW, parameter[name[win32con].ComputerNamePhysicalDnsHostname, name[name]]] begin[:] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da2044c3b20>], [<ast.Dict object at 0x7da2044c3e80>]] variable[pending] assign[=] call[name[get_pending_computer_name], parameter[]] if compare[name[pending] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da2044c2860>, <ast.Constant object at 0x7da2044c15a0>]]] begin[:] call[call[name[ret]][constant[Computer Name]]][constant[Pending]] assign[=] name[pending] return[name[ret]] return[constant[False]]
keyword[def] identifier[set_computer_name] ( identifier[name] ): literal[string] keyword[if] identifier[six] . identifier[PY2] : identifier[name] = identifier[_to_unicode] ( identifier[name] ) keyword[if] identifier[windll] . identifier[kernel32] . identifier[SetComputerNameExW] ( identifier[win32con] . identifier[ComputerNamePhysicalDnsHostname] , identifier[name] ): identifier[ret] ={ literal[string] :{ literal[string] : identifier[get_computer_name] ()}} identifier[pending] = identifier[get_pending_computer_name] () keyword[if] identifier[pending] keyword[not] keyword[in] ( keyword[None] , keyword[False] ): identifier[ret] [ literal[string] ][ literal[string] ]= identifier[pending] keyword[return] identifier[ret] keyword[return] keyword[False]
def set_computer_name(name): """ Set the Windows computer name Args: name (str): The new name to give the computer. Requires a reboot to take effect. Returns: dict: Returns a dictionary containing the old and new names if successful. ``False`` if not. CLI Example: .. code-block:: bash salt 'minion-id' system.set_computer_name 'DavesComputer' """ if six.PY2: name = _to_unicode(name) # depends on [control=['if'], data=[]] if windll.kernel32.SetComputerNameExW(win32con.ComputerNamePhysicalDnsHostname, name): ret = {'Computer Name': {'Current': get_computer_name()}} pending = get_pending_computer_name() if pending not in (None, False): ret['Computer Name']['Pending'] = pending # depends on [control=['if'], data=['pending']] return ret # depends on [control=['if'], data=[]] return False
def get_resource_from_handle(self, resource_handle, verify_repo=True): """Get a resource. Args: resource_handle (`ResourceHandle`): Handle of the resource. Returns: `PackageRepositoryResource` instance. """ if verify_repo: # we could fix the handle at this point, but handles should # always be made from repo.make_resource_handle... for now, # at least, error to catch any "incorrect" construction of # handles... if resource_handle.variables.get("repository_type") != self.name(): raise ResourceError("repository_type mismatch - requested %r, " "repository_type is %r" % (resource_handle.variables["repository_type"], self.name())) if resource_handle.variables.get("location") != self.location: raise ResourceError("location mismatch - requested %r, " "repository location is %r " % (resource_handle.variables["location"], self.location)) resource = self.pool.get_resource_from_handle(resource_handle) resource._repository = self return resource
def function[get_resource_from_handle, parameter[self, resource_handle, verify_repo]]: constant[Get a resource. Args: resource_handle (`ResourceHandle`): Handle of the resource. Returns: `PackageRepositoryResource` instance. ] if name[verify_repo] begin[:] if compare[call[name[resource_handle].variables.get, parameter[constant[repository_type]]] not_equal[!=] call[name[self].name, parameter[]]] begin[:] <ast.Raise object at 0x7da1b17ec760> if compare[call[name[resource_handle].variables.get, parameter[constant[location]]] not_equal[!=] name[self].location] begin[:] <ast.Raise object at 0x7da1b17ecd00> variable[resource] assign[=] call[name[self].pool.get_resource_from_handle, parameter[name[resource_handle]]] name[resource]._repository assign[=] name[self] return[name[resource]]
keyword[def] identifier[get_resource_from_handle] ( identifier[self] , identifier[resource_handle] , identifier[verify_repo] = keyword[True] ): literal[string] keyword[if] identifier[verify_repo] : keyword[if] identifier[resource_handle] . identifier[variables] . identifier[get] ( literal[string] )!= identifier[self] . identifier[name] (): keyword[raise] identifier[ResourceError] ( literal[string] literal[string] %( identifier[resource_handle] . identifier[variables] [ literal[string] ], identifier[self] . identifier[name] ())) keyword[if] identifier[resource_handle] . identifier[variables] . identifier[get] ( literal[string] )!= identifier[self] . identifier[location] : keyword[raise] identifier[ResourceError] ( literal[string] literal[string] %( identifier[resource_handle] . identifier[variables] [ literal[string] ], identifier[self] . identifier[location] )) identifier[resource] = identifier[self] . identifier[pool] . identifier[get_resource_from_handle] ( identifier[resource_handle] ) identifier[resource] . identifier[_repository] = identifier[self] keyword[return] identifier[resource]
def get_resource_from_handle(self, resource_handle, verify_repo=True): """Get a resource. Args: resource_handle (`ResourceHandle`): Handle of the resource. Returns: `PackageRepositoryResource` instance. """ if verify_repo: # we could fix the handle at this point, but handles should # always be made from repo.make_resource_handle... for now, # at least, error to catch any "incorrect" construction of # handles... if resource_handle.variables.get('repository_type') != self.name(): raise ResourceError('repository_type mismatch - requested %r, repository_type is %r' % (resource_handle.variables['repository_type'], self.name())) # depends on [control=['if'], data=[]] if resource_handle.variables.get('location') != self.location: raise ResourceError('location mismatch - requested %r, repository location is %r ' % (resource_handle.variables['location'], self.location)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] resource = self.pool.get_resource_from_handle(resource_handle) resource._repository = self return resource