code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def get_alias(self): """ Gets the alias for the table or the auto_alias if one is set. If there isn't any kind of alias, None is returned. :returns: The table alias, auto_alias, or None :rtype: str or None """ alias = None if self.alias: alias = self.alias elif self.auto_alias: alias = self.auto_alias return alias
def function[get_alias, parameter[self]]: constant[ Gets the alias for the table or the auto_alias if one is set. If there isn't any kind of alias, None is returned. :returns: The table alias, auto_alias, or None :rtype: str or None ] variable[alias] assign[=] constant[None] if name[self].alias begin[:] variable[alias] assign[=] name[self].alias return[name[alias]]
keyword[def] identifier[get_alias] ( identifier[self] ): literal[string] identifier[alias] = keyword[None] keyword[if] identifier[self] . identifier[alias] : identifier[alias] = identifier[self] . identifier[alias] keyword[elif] identifier[self] . identifier[auto_alias] : identifier[alias] = identifier[self] . identifier[auto_alias] keyword[return] identifier[alias]
def get_alias(self): """ Gets the alias for the table or the auto_alias if one is set. If there isn't any kind of alias, None is returned. :returns: The table alias, auto_alias, or None :rtype: str or None """ alias = None if self.alias: alias = self.alias # depends on [control=['if'], data=[]] elif self.auto_alias: alias = self.auto_alias # depends on [control=['if'], data=[]] return alias
def get_name_servers(self, id_or_uri): """ Gets the named servers for an interconnect. Args: id_or_uri: Can be either the interconnect id or the interconnect uri. Returns: dict: the name servers for an interconnect. """ uri = self._client.build_uri(id_or_uri) + "/nameServers" return self._client.get(uri)
def function[get_name_servers, parameter[self, id_or_uri]]: constant[ Gets the named servers for an interconnect. Args: id_or_uri: Can be either the interconnect id or the interconnect uri. Returns: dict: the name servers for an interconnect. ] variable[uri] assign[=] binary_operation[call[name[self]._client.build_uri, parameter[name[id_or_uri]]] + constant[/nameServers]] return[call[name[self]._client.get, parameter[name[uri]]]]
keyword[def] identifier[get_name_servers] ( identifier[self] , identifier[id_or_uri] ): literal[string] identifier[uri] = identifier[self] . identifier[_client] . identifier[build_uri] ( identifier[id_or_uri] )+ literal[string] keyword[return] identifier[self] . identifier[_client] . identifier[get] ( identifier[uri] )
def get_name_servers(self, id_or_uri): """ Gets the named servers for an interconnect. Args: id_or_uri: Can be either the interconnect id or the interconnect uri. Returns: dict: the name servers for an interconnect. """ uri = self._client.build_uri(id_or_uri) + '/nameServers' return self._client.get(uri)
def windowed_tajima_d(pos, ac, size=None, start=None, stop=None, step=None, windows=None, min_sites=3): """Calculate the value of Tajima's D in windows over a single chromosome/contig. Parameters ---------- pos : array_like, int, shape (n_items,) Variant positions, using 1-based coordinates, in ascending order. ac : array_like, int, shape (n_variants, n_alleles) Allele counts array. size : int, optional The window size (number of bases). start : int, optional The position at which to start (1-based). stop : int, optional The position at which to stop (1-based). step : int, optional The distance between start positions of windows. If not given, defaults to the window size, i.e., non-overlapping windows. windows : array_like, int, shape (n_windows, 2), optional Manually specify the windows to use as a sequence of (window_start, window_stop) positions, using 1-based coordinates. Overrides the size/start/stop/step parameters. min_sites : int, optional Minimum number of segregating sites for which to calculate a value. If there are fewer, np.nan is returned. Defaults to 3. Returns ------- D : ndarray, float, shape (n_windows,) Tajima's D. windows : ndarray, int, shape (n_windows, 2) The windows used, as an array of (window_start, window_stop) positions, using 1-based coordinates. counts : ndarray, int, shape (n_windows,) Number of variants in each window. Examples -------- >>> import allel >>> g = allel.GenotypeArray([[[0, 0], [0, 0]], ... [[0, 0], [0, 1]], ... [[0, 0], [1, 1]], ... [[0, 1], [1, 1]], ... [[1, 1], [1, 1]], ... [[0, 0], [1, 2]], ... [[0, 1], [1, 2]], ... [[0, 1], [-1, -1]], ... [[-1, -1], [-1, -1]]]) >>> ac = g.count_alleles() >>> pos = [2, 4, 7, 14, 15, 20, 22, 25, 27] >>> D, windows, counts = allel.windowed_tajima_d(pos, ac, size=20, step=10, start=1, stop=31) >>> D array([1.36521524, 4.22566622]) >>> windows array([[ 1, 20], [11, 31]]) >>> counts array([6, 6]) """ # check inputs if not isinstance(pos, SortedIndex): pos = SortedIndex(pos, copy=False) if not hasattr(ac, 'count_segregating'): ac = AlleleCountsArray(ac, copy=False) # assume number of chromosomes sampled is constant for all variants n = ac.sum(axis=1).max() # calculate constants a1 = np.sum(1 / np.arange(1, n)) a2 = np.sum(1 / (np.arange(1, n)**2)) b1 = (n + 1) / (3 * (n - 1)) b2 = 2 * (n**2 + n + 3) / (9 * n * (n - 1)) c1 = b1 - (1 / a1) c2 = b2 - ((n + 2) / (a1 * n)) + (a2 / (a1**2)) e1 = c1 / a1 e2 = c2 / (a1**2 + a2) # locate segregating variants is_seg = ac.is_segregating() # calculate mean pairwise difference mpd = mean_pairwise_difference(ac, fill=0) # define statistic to compute for each window # noinspection PyPep8Naming def statistic(w_is_seg, w_mpd): S = np.count_nonzero(w_is_seg) if S < min_sites: return np.nan pi = np.sum(w_mpd) d = pi - (S / a1) d_stdev = np.sqrt((e1 * S) + (e2 * S * (S - 1))) wD = d / d_stdev return wD D, windows, counts = windowed_statistic(pos, values=(is_seg, mpd), statistic=statistic, size=size, start=start, stop=stop, step=step, windows=windows, fill=np.nan) return D, windows, counts
def function[windowed_tajima_d, parameter[pos, ac, size, start, stop, step, windows, min_sites]]: constant[Calculate the value of Tajima's D in windows over a single chromosome/contig. Parameters ---------- pos : array_like, int, shape (n_items,) Variant positions, using 1-based coordinates, in ascending order. ac : array_like, int, shape (n_variants, n_alleles) Allele counts array. size : int, optional The window size (number of bases). start : int, optional The position at which to start (1-based). stop : int, optional The position at which to stop (1-based). step : int, optional The distance between start positions of windows. If not given, defaults to the window size, i.e., non-overlapping windows. windows : array_like, int, shape (n_windows, 2), optional Manually specify the windows to use as a sequence of (window_start, window_stop) positions, using 1-based coordinates. Overrides the size/start/stop/step parameters. min_sites : int, optional Minimum number of segregating sites for which to calculate a value. If there are fewer, np.nan is returned. Defaults to 3. Returns ------- D : ndarray, float, shape (n_windows,) Tajima's D. windows : ndarray, int, shape (n_windows, 2) The windows used, as an array of (window_start, window_stop) positions, using 1-based coordinates. counts : ndarray, int, shape (n_windows,) Number of variants in each window. Examples -------- >>> import allel >>> g = allel.GenotypeArray([[[0, 0], [0, 0]], ... [[0, 0], [0, 1]], ... [[0, 0], [1, 1]], ... [[0, 1], [1, 1]], ... [[1, 1], [1, 1]], ... [[0, 0], [1, 2]], ... [[0, 1], [1, 2]], ... [[0, 1], [-1, -1]], ... [[-1, -1], [-1, -1]]]) >>> ac = g.count_alleles() >>> pos = [2, 4, 7, 14, 15, 20, 22, 25, 27] >>> D, windows, counts = allel.windowed_tajima_d(pos, ac, size=20, step=10, start=1, stop=31) >>> D array([1.36521524, 4.22566622]) >>> windows array([[ 1, 20], [11, 31]]) >>> counts array([6, 6]) ] if <ast.UnaryOp object at 0x7da20c796140> begin[:] variable[pos] assign[=] call[name[SortedIndex], parameter[name[pos]]] if <ast.UnaryOp object at 0x7da20c795420> begin[:] variable[ac] assign[=] call[name[AlleleCountsArray], parameter[name[ac]]] variable[n] assign[=] call[call[name[ac].sum, parameter[]].max, parameter[]] variable[a1] assign[=] call[name[np].sum, parameter[binary_operation[constant[1] / call[name[np].arange, parameter[constant[1], name[n]]]]]] variable[a2] assign[=] call[name[np].sum, parameter[binary_operation[constant[1] / binary_operation[call[name[np].arange, parameter[constant[1], name[n]]] ** constant[2]]]]] variable[b1] assign[=] binary_operation[binary_operation[name[n] + constant[1]] / binary_operation[constant[3] * binary_operation[name[n] - constant[1]]]] variable[b2] assign[=] binary_operation[binary_operation[constant[2] * binary_operation[binary_operation[binary_operation[name[n] ** constant[2]] + name[n]] + constant[3]]] / binary_operation[binary_operation[constant[9] * name[n]] * binary_operation[name[n] - constant[1]]]] variable[c1] assign[=] binary_operation[name[b1] - binary_operation[constant[1] / name[a1]]] variable[c2] assign[=] binary_operation[binary_operation[name[b2] - binary_operation[binary_operation[name[n] + constant[2]] / binary_operation[name[a1] * name[n]]]] + binary_operation[name[a2] / binary_operation[name[a1] ** constant[2]]]] variable[e1] assign[=] binary_operation[name[c1] / name[a1]] variable[e2] assign[=] binary_operation[name[c2] / binary_operation[binary_operation[name[a1] ** constant[2]] + name[a2]]] variable[is_seg] assign[=] call[name[ac].is_segregating, parameter[]] variable[mpd] assign[=] call[name[mean_pairwise_difference], parameter[name[ac]]] def function[statistic, parameter[w_is_seg, w_mpd]]: variable[S] assign[=] call[name[np].count_nonzero, parameter[name[w_is_seg]]] if compare[name[S] less[<] name[min_sites]] begin[:] return[name[np].nan] variable[pi] assign[=] call[name[np].sum, parameter[name[w_mpd]]] variable[d] assign[=] binary_operation[name[pi] - binary_operation[name[S] / name[a1]]] variable[d_stdev] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[name[e1] * name[S]] + binary_operation[binary_operation[name[e2] * name[S]] * binary_operation[name[S] - constant[1]]]]]] variable[wD] assign[=] binary_operation[name[d] / name[d_stdev]] return[name[wD]] <ast.Tuple object at 0x7da18f721ea0> assign[=] call[name[windowed_statistic], parameter[name[pos]]] return[tuple[[<ast.Name object at 0x7da18f723040>, <ast.Name object at 0x7da18f720130>, <ast.Name object at 0x7da18f722350>]]]
keyword[def] identifier[windowed_tajima_d] ( identifier[pos] , identifier[ac] , identifier[size] = keyword[None] , identifier[start] = keyword[None] , identifier[stop] = keyword[None] , identifier[step] = keyword[None] , identifier[windows] = keyword[None] , identifier[min_sites] = literal[int] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[pos] , identifier[SortedIndex] ): identifier[pos] = identifier[SortedIndex] ( identifier[pos] , identifier[copy] = keyword[False] ) keyword[if] keyword[not] identifier[hasattr] ( identifier[ac] , literal[string] ): identifier[ac] = identifier[AlleleCountsArray] ( identifier[ac] , identifier[copy] = keyword[False] ) identifier[n] = identifier[ac] . identifier[sum] ( identifier[axis] = literal[int] ). identifier[max] () identifier[a1] = identifier[np] . identifier[sum] ( literal[int] / identifier[np] . identifier[arange] ( literal[int] , identifier[n] )) identifier[a2] = identifier[np] . identifier[sum] ( literal[int] /( identifier[np] . identifier[arange] ( literal[int] , identifier[n] )** literal[int] )) identifier[b1] =( identifier[n] + literal[int] )/( literal[int] *( identifier[n] - literal[int] )) identifier[b2] = literal[int] *( identifier[n] ** literal[int] + identifier[n] + literal[int] )/( literal[int] * identifier[n] *( identifier[n] - literal[int] )) identifier[c1] = identifier[b1] -( literal[int] / identifier[a1] ) identifier[c2] = identifier[b2] -(( identifier[n] + literal[int] )/( identifier[a1] * identifier[n] ))+( identifier[a2] /( identifier[a1] ** literal[int] )) identifier[e1] = identifier[c1] / identifier[a1] identifier[e2] = identifier[c2] /( identifier[a1] ** literal[int] + identifier[a2] ) identifier[is_seg] = identifier[ac] . identifier[is_segregating] () identifier[mpd] = identifier[mean_pairwise_difference] ( identifier[ac] , identifier[fill] = literal[int] ) keyword[def] identifier[statistic] ( identifier[w_is_seg] , identifier[w_mpd] ): identifier[S] = identifier[np] . identifier[count_nonzero] ( identifier[w_is_seg] ) keyword[if] identifier[S] < identifier[min_sites] : keyword[return] identifier[np] . identifier[nan] identifier[pi] = identifier[np] . identifier[sum] ( identifier[w_mpd] ) identifier[d] = identifier[pi] -( identifier[S] / identifier[a1] ) identifier[d_stdev] = identifier[np] . identifier[sqrt] (( identifier[e1] * identifier[S] )+( identifier[e2] * identifier[S] *( identifier[S] - literal[int] ))) identifier[wD] = identifier[d] / identifier[d_stdev] keyword[return] identifier[wD] identifier[D] , identifier[windows] , identifier[counts] = identifier[windowed_statistic] ( identifier[pos] , identifier[values] =( identifier[is_seg] , identifier[mpd] ), identifier[statistic] = identifier[statistic] , identifier[size] = identifier[size] , identifier[start] = identifier[start] , identifier[stop] = identifier[stop] , identifier[step] = identifier[step] , identifier[windows] = identifier[windows] , identifier[fill] = identifier[np] . identifier[nan] ) keyword[return] identifier[D] , identifier[windows] , identifier[counts]
def windowed_tajima_d(pos, ac, size=None, start=None, stop=None, step=None, windows=None, min_sites=3): """Calculate the value of Tajima's D in windows over a single chromosome/contig. Parameters ---------- pos : array_like, int, shape (n_items,) Variant positions, using 1-based coordinates, in ascending order. ac : array_like, int, shape (n_variants, n_alleles) Allele counts array. size : int, optional The window size (number of bases). start : int, optional The position at which to start (1-based). stop : int, optional The position at which to stop (1-based). step : int, optional The distance between start positions of windows. If not given, defaults to the window size, i.e., non-overlapping windows. windows : array_like, int, shape (n_windows, 2), optional Manually specify the windows to use as a sequence of (window_start, window_stop) positions, using 1-based coordinates. Overrides the size/start/stop/step parameters. min_sites : int, optional Minimum number of segregating sites for which to calculate a value. If there are fewer, np.nan is returned. Defaults to 3. Returns ------- D : ndarray, float, shape (n_windows,) Tajima's D. windows : ndarray, int, shape (n_windows, 2) The windows used, as an array of (window_start, window_stop) positions, using 1-based coordinates. counts : ndarray, int, shape (n_windows,) Number of variants in each window. Examples -------- >>> import allel >>> g = allel.GenotypeArray([[[0, 0], [0, 0]], ... [[0, 0], [0, 1]], ... [[0, 0], [1, 1]], ... [[0, 1], [1, 1]], ... [[1, 1], [1, 1]], ... [[0, 0], [1, 2]], ... [[0, 1], [1, 2]], ... [[0, 1], [-1, -1]], ... [[-1, -1], [-1, -1]]]) >>> ac = g.count_alleles() >>> pos = [2, 4, 7, 14, 15, 20, 22, 25, 27] >>> D, windows, counts = allel.windowed_tajima_d(pos, ac, size=20, step=10, start=1, stop=31) >>> D array([1.36521524, 4.22566622]) >>> windows array([[ 1, 20], [11, 31]]) >>> counts array([6, 6]) """ # check inputs if not isinstance(pos, SortedIndex): pos = SortedIndex(pos, copy=False) # depends on [control=['if'], data=[]] if not hasattr(ac, 'count_segregating'): ac = AlleleCountsArray(ac, copy=False) # depends on [control=['if'], data=[]] # assume number of chromosomes sampled is constant for all variants n = ac.sum(axis=1).max() # calculate constants a1 = np.sum(1 / np.arange(1, n)) a2 = np.sum(1 / np.arange(1, n) ** 2) b1 = (n + 1) / (3 * (n - 1)) b2 = 2 * (n ** 2 + n + 3) / (9 * n * (n - 1)) c1 = b1 - 1 / a1 c2 = b2 - (n + 2) / (a1 * n) + a2 / a1 ** 2 e1 = c1 / a1 e2 = c2 / (a1 ** 2 + a2) # locate segregating variants is_seg = ac.is_segregating() # calculate mean pairwise difference mpd = mean_pairwise_difference(ac, fill=0) # define statistic to compute for each window # noinspection PyPep8Naming def statistic(w_is_seg, w_mpd): S = np.count_nonzero(w_is_seg) if S < min_sites: return np.nan # depends on [control=['if'], data=[]] pi = np.sum(w_mpd) d = pi - S / a1 d_stdev = np.sqrt(e1 * S + e2 * S * (S - 1)) wD = d / d_stdev return wD (D, windows, counts) = windowed_statistic(pos, values=(is_seg, mpd), statistic=statistic, size=size, start=start, stop=stop, step=step, windows=windows, fill=np.nan) return (D, windows, counts)
def put_privileges(self, body, params=None): """ `<TODO>`_ :arg body: The privilege(s) to add :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes., valid choices are: 'true', 'false', 'wait_for' """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PUT", "/_security/privilege/", params=params, body=body )
def function[put_privileges, parameter[self, body, params]]: constant[ `<TODO>`_ :arg body: The privilege(s) to add :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes., valid choices are: 'true', 'false', 'wait_for' ] if compare[name[body] in name[SKIP_IN_PATH]] begin[:] <ast.Raise object at 0x7da18f7229b0> return[call[name[self].transport.perform_request, parameter[constant[PUT], constant[/_security/privilege/]]]]
keyword[def] identifier[put_privileges] ( identifier[self] , identifier[body] , identifier[params] = keyword[None] ): literal[string] keyword[if] identifier[body] keyword[in] identifier[SKIP_IN_PATH] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[self] . identifier[transport] . identifier[perform_request] ( literal[string] , literal[string] , identifier[params] = identifier[params] , identifier[body] = identifier[body] )
def put_privileges(self, body, params=None): """ `<TODO>`_ :arg body: The privilege(s) to add :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes., valid choices are: 'true', 'false', 'wait_for' """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") # depends on [control=['if'], data=[]] return self.transport.perform_request('PUT', '/_security/privilege/', params=params, body=body)
def docs(ctx, output='html', rebuild=False, show=True, verbose=True): """Build the docs and show them in default web browser.""" sphinx_build = ctx.run( 'sphinx-build -b {output} {all} {verbose} docs docs/_build'.format( output=output, all='-a -E' if rebuild else '', verbose='-v' if verbose else '')) if not sphinx_build.ok: fatal("Failed to build the docs", cause=sphinx_build) if show: path = os.path.join(DOCS_OUTPUT_DIR, 'index.html') if sys.platform == 'darwin': path = 'file://%s' % os.path.abspath(path) webbrowser.open_new_tab(path)
def function[docs, parameter[ctx, output, rebuild, show, verbose]]: constant[Build the docs and show them in default web browser.] variable[sphinx_build] assign[=] call[name[ctx].run, parameter[call[constant[sphinx-build -b {output} {all} {verbose} docs docs/_build].format, parameter[]]]] if <ast.UnaryOp object at 0x7da1b0ff30d0> begin[:] call[name[fatal], parameter[constant[Failed to build the docs]]] if name[show] begin[:] variable[path] assign[=] call[name[os].path.join, parameter[name[DOCS_OUTPUT_DIR], constant[index.html]]] if compare[name[sys].platform equal[==] constant[darwin]] begin[:] variable[path] assign[=] binary_operation[constant[file://%s] <ast.Mod object at 0x7da2590d6920> call[name[os].path.abspath, parameter[name[path]]]] call[name[webbrowser].open_new_tab, parameter[name[path]]]
keyword[def] identifier[docs] ( identifier[ctx] , identifier[output] = literal[string] , identifier[rebuild] = keyword[False] , identifier[show] = keyword[True] , identifier[verbose] = keyword[True] ): literal[string] identifier[sphinx_build] = identifier[ctx] . identifier[run] ( literal[string] . identifier[format] ( identifier[output] = identifier[output] , identifier[all] = literal[string] keyword[if] identifier[rebuild] keyword[else] literal[string] , identifier[verbose] = literal[string] keyword[if] identifier[verbose] keyword[else] literal[string] )) keyword[if] keyword[not] identifier[sphinx_build] . identifier[ok] : identifier[fatal] ( literal[string] , identifier[cause] = identifier[sphinx_build] ) keyword[if] identifier[show] : identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[DOCS_OUTPUT_DIR] , literal[string] ) keyword[if] identifier[sys] . identifier[platform] == literal[string] : identifier[path] = literal[string] % identifier[os] . identifier[path] . identifier[abspath] ( identifier[path] ) identifier[webbrowser] . identifier[open_new_tab] ( identifier[path] )
def docs(ctx, output='html', rebuild=False, show=True, verbose=True): """Build the docs and show them in default web browser.""" sphinx_build = ctx.run('sphinx-build -b {output} {all} {verbose} docs docs/_build'.format(output=output, all='-a -E' if rebuild else '', verbose='-v' if verbose else '')) if not sphinx_build.ok: fatal('Failed to build the docs', cause=sphinx_build) # depends on [control=['if'], data=[]] if show: path = os.path.join(DOCS_OUTPUT_DIR, 'index.html') if sys.platform == 'darwin': path = 'file://%s' % os.path.abspath(path) # depends on [control=['if'], data=[]] webbrowser.open_new_tab(path) # depends on [control=['if'], data=[]]
def plot_two_digit_freqs(f2): """ Plot two digits frequency counts using matplotlib. """ f2_copy = f2.copy() f2_copy.shape = (10,10) ax = plt.matshow(f2_copy) plt.colorbar() for i in range(10): for j in range(10): plt.text(i-0.2, j+0.2, str(j)+str(i)) plt.ylabel('First digit') plt.xlabel('Second digit') return ax
def function[plot_two_digit_freqs, parameter[f2]]: constant[ Plot two digits frequency counts using matplotlib. ] variable[f2_copy] assign[=] call[name[f2].copy, parameter[]] name[f2_copy].shape assign[=] tuple[[<ast.Constant object at 0x7da18f813f10>, <ast.Constant object at 0x7da18f813250>]] variable[ax] assign[=] call[name[plt].matshow, parameter[name[f2_copy]]] call[name[plt].colorbar, parameter[]] for taget[name[i]] in starred[call[name[range], parameter[constant[10]]]] begin[:] for taget[name[j]] in starred[call[name[range], parameter[constant[10]]]] begin[:] call[name[plt].text, parameter[binary_operation[name[i] - constant[0.2]], binary_operation[name[j] + constant[0.2]], binary_operation[call[name[str], parameter[name[j]]] + call[name[str], parameter[name[i]]]]]] call[name[plt].ylabel, parameter[constant[First digit]]] call[name[plt].xlabel, parameter[constant[Second digit]]] return[name[ax]]
keyword[def] identifier[plot_two_digit_freqs] ( identifier[f2] ): literal[string] identifier[f2_copy] = identifier[f2] . identifier[copy] () identifier[f2_copy] . identifier[shape] =( literal[int] , literal[int] ) identifier[ax] = identifier[plt] . identifier[matshow] ( identifier[f2_copy] ) identifier[plt] . identifier[colorbar] () keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] ): identifier[plt] . identifier[text] ( identifier[i] - literal[int] , identifier[j] + literal[int] , identifier[str] ( identifier[j] )+ identifier[str] ( identifier[i] )) identifier[plt] . identifier[ylabel] ( literal[string] ) identifier[plt] . identifier[xlabel] ( literal[string] ) keyword[return] identifier[ax]
def plot_two_digit_freqs(f2): """ Plot two digits frequency counts using matplotlib. """ f2_copy = f2.copy() f2_copy.shape = (10, 10) ax = plt.matshow(f2_copy) plt.colorbar() for i in range(10): for j in range(10): plt.text(i - 0.2, j + 0.2, str(j) + str(i)) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] plt.ylabel('First digit') plt.xlabel('Second digit') return ax
def get_es(urls=None, timeout=DEFAULT_TIMEOUT, force_new=False, **settings): """Create an elasticsearch `Elasticsearch` object and return it. This will aggressively re-use `Elasticsearch` objects with the following rules: 1. if you pass the same argument values to `get_es()`, then it will return the same `Elasticsearch` object 2. if you pass different argument values to `get_es()`, then it will return different `Elasticsearch` object 3. it caches each `Elasticsearch` object that gets created 4. if you pass in `force_new=True`, then you are guaranteed to get a fresh `Elasticsearch` object AND that object will not be cached :arg urls: list of uris; Elasticsearch hosts to connect to, defaults to ``['http://localhost:9200']`` :arg timeout: int; the timeout in seconds, defaults to 5 :arg force_new: Forces get_es() to generate a new Elasticsearch object rather than pulling it from cache. :arg settings: other settings to pass into Elasticsearch constructor; See `<http://elasticsearch-py.readthedocs.org/>`_ for more details. Examples:: # Returns cached Elasticsearch object es = get_es() # Returns a new Elasticsearch object es = get_es(force_new=True) es = get_es(urls=['localhost']) es = get_es(urls=['localhost:9200'], timeout=10, max_retries=3) """ # Cheap way of de-None-ifying things urls = urls or DEFAULT_URLS # v0.7: Check for 'hosts' instead of 'urls'. Take this out in v1.0. if 'hosts' in settings: raise DeprecationWarning('"hosts" is deprecated in favor of "urls".') if not force_new: key = _build_key(urls, timeout, **settings) if key in _cached_elasticsearch: return _cached_elasticsearch[key] es = Elasticsearch(urls, timeout=timeout, **settings) if not force_new: # We don't need to rebuild the key here since we built it in # the previous if block, so it's in the namespace. Having said # that, this is a little ew. _cached_elasticsearch[key] = es return es
def function[get_es, parameter[urls, timeout, force_new]]: constant[Create an elasticsearch `Elasticsearch` object and return it. This will aggressively re-use `Elasticsearch` objects with the following rules: 1. if you pass the same argument values to `get_es()`, then it will return the same `Elasticsearch` object 2. if you pass different argument values to `get_es()`, then it will return different `Elasticsearch` object 3. it caches each `Elasticsearch` object that gets created 4. if you pass in `force_new=True`, then you are guaranteed to get a fresh `Elasticsearch` object AND that object will not be cached :arg urls: list of uris; Elasticsearch hosts to connect to, defaults to ``['http://localhost:9200']`` :arg timeout: int; the timeout in seconds, defaults to 5 :arg force_new: Forces get_es() to generate a new Elasticsearch object rather than pulling it from cache. :arg settings: other settings to pass into Elasticsearch constructor; See `<http://elasticsearch-py.readthedocs.org/>`_ for more details. Examples:: # Returns cached Elasticsearch object es = get_es() # Returns a new Elasticsearch object es = get_es(force_new=True) es = get_es(urls=['localhost']) es = get_es(urls=['localhost:9200'], timeout=10, max_retries=3) ] variable[urls] assign[=] <ast.BoolOp object at 0x7da1b10425f0> if compare[constant[hosts] in name[settings]] begin[:] <ast.Raise object at 0x7da1b1040ee0> if <ast.UnaryOp object at 0x7da1b1043130> begin[:] variable[key] assign[=] call[name[_build_key], parameter[name[urls], name[timeout]]] if compare[name[key] in name[_cached_elasticsearch]] begin[:] return[call[name[_cached_elasticsearch]][name[key]]] variable[es] assign[=] call[name[Elasticsearch], parameter[name[urls]]] if <ast.UnaryOp object at 0x7da1b1043340> begin[:] call[name[_cached_elasticsearch]][name[key]] assign[=] name[es] return[name[es]]
keyword[def] identifier[get_es] ( identifier[urls] = keyword[None] , identifier[timeout] = identifier[DEFAULT_TIMEOUT] , identifier[force_new] = keyword[False] ,** identifier[settings] ): literal[string] identifier[urls] = identifier[urls] keyword[or] identifier[DEFAULT_URLS] keyword[if] literal[string] keyword[in] identifier[settings] : keyword[raise] identifier[DeprecationWarning] ( literal[string] ) keyword[if] keyword[not] identifier[force_new] : identifier[key] = identifier[_build_key] ( identifier[urls] , identifier[timeout] ,** identifier[settings] ) keyword[if] identifier[key] keyword[in] identifier[_cached_elasticsearch] : keyword[return] identifier[_cached_elasticsearch] [ identifier[key] ] identifier[es] = identifier[Elasticsearch] ( identifier[urls] , identifier[timeout] = identifier[timeout] ,** identifier[settings] ) keyword[if] keyword[not] identifier[force_new] : identifier[_cached_elasticsearch] [ identifier[key] ]= identifier[es] keyword[return] identifier[es]
def get_es(urls=None, timeout=DEFAULT_TIMEOUT, force_new=False, **settings): """Create an elasticsearch `Elasticsearch` object and return it. This will aggressively re-use `Elasticsearch` objects with the following rules: 1. if you pass the same argument values to `get_es()`, then it will return the same `Elasticsearch` object 2. if you pass different argument values to `get_es()`, then it will return different `Elasticsearch` object 3. it caches each `Elasticsearch` object that gets created 4. if you pass in `force_new=True`, then you are guaranteed to get a fresh `Elasticsearch` object AND that object will not be cached :arg urls: list of uris; Elasticsearch hosts to connect to, defaults to ``['http://localhost:9200']`` :arg timeout: int; the timeout in seconds, defaults to 5 :arg force_new: Forces get_es() to generate a new Elasticsearch object rather than pulling it from cache. :arg settings: other settings to pass into Elasticsearch constructor; See `<http://elasticsearch-py.readthedocs.org/>`_ for more details. Examples:: # Returns cached Elasticsearch object es = get_es() # Returns a new Elasticsearch object es = get_es(force_new=True) es = get_es(urls=['localhost']) es = get_es(urls=['localhost:9200'], timeout=10, max_retries=3) """ # Cheap way of de-None-ifying things urls = urls or DEFAULT_URLS # v0.7: Check for 'hosts' instead of 'urls'. Take this out in v1.0. if 'hosts' in settings: raise DeprecationWarning('"hosts" is deprecated in favor of "urls".') # depends on [control=['if'], data=[]] if not force_new: key = _build_key(urls, timeout, **settings) if key in _cached_elasticsearch: return _cached_elasticsearch[key] # depends on [control=['if'], data=['key', '_cached_elasticsearch']] # depends on [control=['if'], data=[]] es = Elasticsearch(urls, timeout=timeout, **settings) if not force_new: # We don't need to rebuild the key here since we built it in # the previous if block, so it's in the namespace. Having said # that, this is a little ew. _cached_elasticsearch[key] = es # depends on [control=['if'], data=[]] return es
def generate_move(self, position): """ Returns valid and legal move given position :type: position: Board :rtype: Move """ while True: print(position) raw = input(str(self.color) + "\'s move \n") move = converter.short_alg(raw, self.color, position) if move is None: continue return move
def function[generate_move, parameter[self, position]]: constant[ Returns valid and legal move given position :type: position: Board :rtype: Move ] while constant[True] begin[:] call[name[print], parameter[name[position]]] variable[raw] assign[=] call[name[input], parameter[binary_operation[call[name[str], parameter[name[self].color]] + constant['s move ]]]] variable[move] assign[=] call[name[converter].short_alg, parameter[name[raw], name[self].color, name[position]]] if compare[name[move] is constant[None]] begin[:] continue return[name[move]]
keyword[def] identifier[generate_move] ( identifier[self] , identifier[position] ): literal[string] keyword[while] keyword[True] : identifier[print] ( identifier[position] ) identifier[raw] = identifier[input] ( identifier[str] ( identifier[self] . identifier[color] )+ literal[string] ) identifier[move] = identifier[converter] . identifier[short_alg] ( identifier[raw] , identifier[self] . identifier[color] , identifier[position] ) keyword[if] identifier[move] keyword[is] keyword[None] : keyword[continue] keyword[return] identifier[move]
def generate_move(self, position): """ Returns valid and legal move given position :type: position: Board :rtype: Move """ while True: print(position) raw = input(str(self.color) + "'s move \n") move = converter.short_alg(raw, self.color, position) if move is None: continue # depends on [control=['if'], data=[]] return move # depends on [control=['while'], data=[]]
def cache_infos(self, queryset): """ Cache the number of entries published and the last modification date under each tag. """ self.cache = {} for item in queryset: # If the sitemap is too slow, don't hesitate to do this : # self.cache[item.pk] = (item.count, None) self.cache[item.pk] = ( item.count, TaggedItem.objects.get_by_model( self.entries_qs, item)[0].last_update)
def function[cache_infos, parameter[self, queryset]]: constant[ Cache the number of entries published and the last modification date under each tag. ] name[self].cache assign[=] dictionary[[], []] for taget[name[item]] in starred[name[queryset]] begin[:] call[name[self].cache][name[item].pk] assign[=] tuple[[<ast.Attribute object at 0x7da1b1d749d0>, <ast.Attribute object at 0x7da1b1d770d0>]]
keyword[def] identifier[cache_infos] ( identifier[self] , identifier[queryset] ): literal[string] identifier[self] . identifier[cache] ={} keyword[for] identifier[item] keyword[in] identifier[queryset] : identifier[self] . identifier[cache] [ identifier[item] . identifier[pk] ]=( identifier[item] . identifier[count] , identifier[TaggedItem] . identifier[objects] . identifier[get_by_model] ( identifier[self] . identifier[entries_qs] , identifier[item] )[ literal[int] ]. identifier[last_update] )
def cache_infos(self, queryset): """ Cache the number of entries published and the last modification date under each tag. """ self.cache = {} for item in queryset: # If the sitemap is too slow, don't hesitate to do this : # self.cache[item.pk] = (item.count, None) self.cache[item.pk] = (item.count, TaggedItem.objects.get_by_model(self.entries_qs, item)[0].last_update) # depends on [control=['for'], data=['item']]
def transform(self, X): """Select categorical features and transform them using OneHotEncoder. Parameters ---------- X: numpy ndarray, {n_samples, n_components} New data, where n_samples is the number of samples and n_components is the number of components. Returns ------- array-like, {n_samples, n_components} """ selected = auto_select_categorical_features(X, threshold=self.threshold) X_sel, _, n_selected, _ = _X_selected(X, selected) if n_selected == 0: # No features selected. raise ValueError('No categorical feature was found!') else: ohe = OneHotEncoder(categorical_features='all', sparse=False, minimum_fraction=self.minimum_fraction) return ohe.fit_transform(X_sel)
def function[transform, parameter[self, X]]: constant[Select categorical features and transform them using OneHotEncoder. Parameters ---------- X: numpy ndarray, {n_samples, n_components} New data, where n_samples is the number of samples and n_components is the number of components. Returns ------- array-like, {n_samples, n_components} ] variable[selected] assign[=] call[name[auto_select_categorical_features], parameter[name[X]]] <ast.Tuple object at 0x7da20c6c7130> assign[=] call[name[_X_selected], parameter[name[X], name[selected]]] if compare[name[n_selected] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da20c6c7f70>
keyword[def] identifier[transform] ( identifier[self] , identifier[X] ): literal[string] identifier[selected] = identifier[auto_select_categorical_features] ( identifier[X] , identifier[threshold] = identifier[self] . identifier[threshold] ) identifier[X_sel] , identifier[_] , identifier[n_selected] , identifier[_] = identifier[_X_selected] ( identifier[X] , identifier[selected] ) keyword[if] identifier[n_selected] == literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[else] : identifier[ohe] = identifier[OneHotEncoder] ( identifier[categorical_features] = literal[string] , identifier[sparse] = keyword[False] , identifier[minimum_fraction] = identifier[self] . identifier[minimum_fraction] ) keyword[return] identifier[ohe] . identifier[fit_transform] ( identifier[X_sel] )
def transform(self, X): """Select categorical features and transform them using OneHotEncoder. Parameters ---------- X: numpy ndarray, {n_samples, n_components} New data, where n_samples is the number of samples and n_components is the number of components. Returns ------- array-like, {n_samples, n_components} """ selected = auto_select_categorical_features(X, threshold=self.threshold) (X_sel, _, n_selected, _) = _X_selected(X, selected) if n_selected == 0: # No features selected. raise ValueError('No categorical feature was found!') # depends on [control=['if'], data=[]] else: ohe = OneHotEncoder(categorical_features='all', sparse=False, minimum_fraction=self.minimum_fraction) return ohe.fit_transform(X_sel)
def assert_ast_like(sample, template, _path=None): """Check that the sample AST matches the template. Raises a suitable subclass of :exc:`ASTMismatch` if a difference is detected. The ``_path`` parameter is used for recursion; you shouldn't normally pass it. """ if _path is None: _path = ['tree'] if callable(template): # Checker function at the top level return template(sample, _path) if not isinstance(sample, type(template)): raise ASTNodeTypeMismatch(_path, sample, template) for name, template_field in ast.iter_fields(template): sample_field = getattr(sample, name) field_path = _path + [name] if isinstance(template_field, list): if template_field and (isinstance(template_field[0], ast.AST) or callable(template_field[0])): _check_node_list(field_path, sample_field, template_field) else: # List of plain values, e.g. 'global' statement names if sample_field != template_field: raise ASTPlainListMismatch(field_path, sample_field, template_field) elif isinstance(template_field, ast.AST): assert_ast_like(sample_field, template_field, field_path) elif callable(template_field): # Checker function template_field(sample_field, field_path) else: # Single value, e.g. Name.id if sample_field != template_field: raise ASTPlainObjMismatch(field_path, sample_field, template_field)
def function[assert_ast_like, parameter[sample, template, _path]]: constant[Check that the sample AST matches the template. Raises a suitable subclass of :exc:`ASTMismatch` if a difference is detected. The ``_path`` parameter is used for recursion; you shouldn't normally pass it. ] if compare[name[_path] is constant[None]] begin[:] variable[_path] assign[=] list[[<ast.Constant object at 0x7da20c794d60>]] if call[name[callable], parameter[name[template]]] begin[:] return[call[name[template], parameter[name[sample], name[_path]]]] if <ast.UnaryOp object at 0x7da20c7964d0> begin[:] <ast.Raise object at 0x7da20c795c30> for taget[tuple[[<ast.Name object at 0x7da20c794a00>, <ast.Name object at 0x7da20c795a20>]]] in starred[call[name[ast].iter_fields, parameter[name[template]]]] begin[:] variable[sample_field] assign[=] call[name[getattr], parameter[name[sample], name[name]]] variable[field_path] assign[=] binary_operation[name[_path] + list[[<ast.Name object at 0x7da20c795ab0>]]] if call[name[isinstance], parameter[name[template_field], name[list]]] begin[:] if <ast.BoolOp object at 0x7da20e955180> begin[:] call[name[_check_node_list], parameter[name[field_path], name[sample_field], name[template_field]]]
keyword[def] identifier[assert_ast_like] ( identifier[sample] , identifier[template] , identifier[_path] = keyword[None] ): literal[string] keyword[if] identifier[_path] keyword[is] keyword[None] : identifier[_path] =[ literal[string] ] keyword[if] identifier[callable] ( identifier[template] ): keyword[return] identifier[template] ( identifier[sample] , identifier[_path] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[sample] , identifier[type] ( identifier[template] )): keyword[raise] identifier[ASTNodeTypeMismatch] ( identifier[_path] , identifier[sample] , identifier[template] ) keyword[for] identifier[name] , identifier[template_field] keyword[in] identifier[ast] . identifier[iter_fields] ( identifier[template] ): identifier[sample_field] = identifier[getattr] ( identifier[sample] , identifier[name] ) identifier[field_path] = identifier[_path] +[ identifier[name] ] keyword[if] identifier[isinstance] ( identifier[template_field] , identifier[list] ): keyword[if] identifier[template_field] keyword[and] ( identifier[isinstance] ( identifier[template_field] [ literal[int] ], identifier[ast] . identifier[AST] ) keyword[or] identifier[callable] ( identifier[template_field] [ literal[int] ])): identifier[_check_node_list] ( identifier[field_path] , identifier[sample_field] , identifier[template_field] ) keyword[else] : keyword[if] identifier[sample_field] != identifier[template_field] : keyword[raise] identifier[ASTPlainListMismatch] ( identifier[field_path] , identifier[sample_field] , identifier[template_field] ) keyword[elif] identifier[isinstance] ( identifier[template_field] , identifier[ast] . identifier[AST] ): identifier[assert_ast_like] ( identifier[sample_field] , identifier[template_field] , identifier[field_path] ) keyword[elif] identifier[callable] ( identifier[template_field] ): identifier[template_field] ( identifier[sample_field] , identifier[field_path] ) keyword[else] : keyword[if] identifier[sample_field] != identifier[template_field] : keyword[raise] identifier[ASTPlainObjMismatch] ( identifier[field_path] , identifier[sample_field] , identifier[template_field] )
def assert_ast_like(sample, template, _path=None): """Check that the sample AST matches the template. Raises a suitable subclass of :exc:`ASTMismatch` if a difference is detected. The ``_path`` parameter is used for recursion; you shouldn't normally pass it. """ if _path is None: _path = ['tree'] # depends on [control=['if'], data=['_path']] if callable(template): # Checker function at the top level return template(sample, _path) # depends on [control=['if'], data=[]] if not isinstance(sample, type(template)): raise ASTNodeTypeMismatch(_path, sample, template) # depends on [control=['if'], data=[]] for (name, template_field) in ast.iter_fields(template): sample_field = getattr(sample, name) field_path = _path + [name] if isinstance(template_field, list): if template_field and (isinstance(template_field[0], ast.AST) or callable(template_field[0])): _check_node_list(field_path, sample_field, template_field) # depends on [control=['if'], data=[]] # List of plain values, e.g. 'global' statement names elif sample_field != template_field: raise ASTPlainListMismatch(field_path, sample_field, template_field) # depends on [control=['if'], data=['sample_field', 'template_field']] # depends on [control=['if'], data=[]] elif isinstance(template_field, ast.AST): assert_ast_like(sample_field, template_field, field_path) # depends on [control=['if'], data=[]] elif callable(template_field): # Checker function template_field(sample_field, field_path) # depends on [control=['if'], data=[]] # Single value, e.g. Name.id elif sample_field != template_field: raise ASTPlainObjMismatch(field_path, sample_field, template_field) # depends on [control=['if'], data=['sample_field', 'template_field']] # depends on [control=['for'], data=[]]
def get_annotation_data_after_time(self, id_tier, time): """Give the annotation before a given time. When the tier contains reference annotations this will be returned, check :func:`get_ref_annotation_data_before_time` for the format. If an annotation overlaps with ``time`` that annotation will be returned. :param str id_tier: Name of the tier. :param int time: Time to get the annotation before. :raises KeyError: If the tier is non existent. """ if self.tiers[id_tier][1]: return self.get_ref_annotation_after_time(id_tier, time) befores = self.get_annotation_data_between_times( id_tier, time, self.get_full_time_interval()[1]) if befores: return [min(befores, key=lambda x: x[0])] else: return []
def function[get_annotation_data_after_time, parameter[self, id_tier, time]]: constant[Give the annotation before a given time. When the tier contains reference annotations this will be returned, check :func:`get_ref_annotation_data_before_time` for the format. If an annotation overlaps with ``time`` that annotation will be returned. :param str id_tier: Name of the tier. :param int time: Time to get the annotation before. :raises KeyError: If the tier is non existent. ] if call[call[name[self].tiers][name[id_tier]]][constant[1]] begin[:] return[call[name[self].get_ref_annotation_after_time, parameter[name[id_tier], name[time]]]] variable[befores] assign[=] call[name[self].get_annotation_data_between_times, parameter[name[id_tier], name[time], call[call[name[self].get_full_time_interval, parameter[]]][constant[1]]]] if name[befores] begin[:] return[list[[<ast.Call object at 0x7da1b02d1990>]]]
keyword[def] identifier[get_annotation_data_after_time] ( identifier[self] , identifier[id_tier] , identifier[time] ): literal[string] keyword[if] identifier[self] . identifier[tiers] [ identifier[id_tier] ][ literal[int] ]: keyword[return] identifier[self] . identifier[get_ref_annotation_after_time] ( identifier[id_tier] , identifier[time] ) identifier[befores] = identifier[self] . identifier[get_annotation_data_between_times] ( identifier[id_tier] , identifier[time] , identifier[self] . identifier[get_full_time_interval] ()[ literal[int] ]) keyword[if] identifier[befores] : keyword[return] [ identifier[min] ( identifier[befores] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ])] keyword[else] : keyword[return] []
def get_annotation_data_after_time(self, id_tier, time): """Give the annotation before a given time. When the tier contains reference annotations this will be returned, check :func:`get_ref_annotation_data_before_time` for the format. If an annotation overlaps with ``time`` that annotation will be returned. :param str id_tier: Name of the tier. :param int time: Time to get the annotation before. :raises KeyError: If the tier is non existent. """ if self.tiers[id_tier][1]: return self.get_ref_annotation_after_time(id_tier, time) # depends on [control=['if'], data=[]] befores = self.get_annotation_data_between_times(id_tier, time, self.get_full_time_interval()[1]) if befores: return [min(befores, key=lambda x: x[0])] # depends on [control=['if'], data=[]] else: return []
def kill_all(self): '''Kill all workers''' while self._num_workers > 0 and self._worker_backend_socket.poll(1000): msg = self._worker_backend_socket.recv_pyobj() self._worker_backend_socket.send_pyobj(None) self._num_workers -= 1 self.report(f'Kill {msg[1:]}') # join all processes [worker.join() for worker in self._workers]
def function[kill_all, parameter[self]]: constant[Kill all workers] while <ast.BoolOp object at 0x7da18eb55690> begin[:] variable[msg] assign[=] call[name[self]._worker_backend_socket.recv_pyobj, parameter[]] call[name[self]._worker_backend_socket.send_pyobj, parameter[constant[None]]] <ast.AugAssign object at 0x7da18eb567d0> call[name[self].report, parameter[<ast.JoinedStr object at 0x7da18eb552d0>]] <ast.ListComp object at 0x7da18eb55c60>
keyword[def] identifier[kill_all] ( identifier[self] ): literal[string] keyword[while] identifier[self] . identifier[_num_workers] > literal[int] keyword[and] identifier[self] . identifier[_worker_backend_socket] . identifier[poll] ( literal[int] ): identifier[msg] = identifier[self] . identifier[_worker_backend_socket] . identifier[recv_pyobj] () identifier[self] . identifier[_worker_backend_socket] . identifier[send_pyobj] ( keyword[None] ) identifier[self] . identifier[_num_workers] -= literal[int] identifier[self] . identifier[report] ( literal[string] ) [ identifier[worker] . identifier[join] () keyword[for] identifier[worker] keyword[in] identifier[self] . identifier[_workers] ]
def kill_all(self): """Kill all workers""" while self._num_workers > 0 and self._worker_backend_socket.poll(1000): msg = self._worker_backend_socket.recv_pyobj() self._worker_backend_socket.send_pyobj(None) self._num_workers -= 1 self.report(f'Kill {msg[1:]}') # depends on [control=['while'], data=[]] # join all processes [worker.join() for worker in self._workers]
def cliques(self, reordered = True): """ Returns a list of cliques """ if reordered: return [list(self.snrowidx[self.sncolptr[k]:self.sncolptr[k+1]]) for k in range(self.Nsn)] else: return [list(self.__p[self.snrowidx[self.sncolptr[k]:self.sncolptr[k+1]]]) for k in range(self.Nsn)]
def function[cliques, parameter[self, reordered]]: constant[ Returns a list of cliques ] if name[reordered] begin[:] return[<ast.ListComp object at 0x7da20e954d30>]
keyword[def] identifier[cliques] ( identifier[self] , identifier[reordered] = keyword[True] ): literal[string] keyword[if] identifier[reordered] : keyword[return] [ identifier[list] ( identifier[self] . identifier[snrowidx] [ identifier[self] . identifier[sncolptr] [ identifier[k] ]: identifier[self] . identifier[sncolptr] [ identifier[k] + literal[int] ]]) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[self] . identifier[Nsn] )] keyword[else] : keyword[return] [ identifier[list] ( identifier[self] . identifier[__p] [ identifier[self] . identifier[snrowidx] [ identifier[self] . identifier[sncolptr] [ identifier[k] ]: identifier[self] . identifier[sncolptr] [ identifier[k] + literal[int] ]]]) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[self] . identifier[Nsn] )]
def cliques(self, reordered=True): """ Returns a list of cliques """ if reordered: return [list(self.snrowidx[self.sncolptr[k]:self.sncolptr[k + 1]]) for k in range(self.Nsn)] # depends on [control=['if'], data=[]] else: return [list(self.__p[self.snrowidx[self.sncolptr[k]:self.sncolptr[k + 1]]]) for k in range(self.Nsn)]
def other_profiles(p,ifig=1,xlm=xlm,show=False,xaxis=xaxis_type, figsize2=(10,8)): '''Four panels of other profile plots Parameters ---------- p : instance mesa_profile instance xll : tuple xlimits: mass_min, mass_max show : Boolean False for batch use True for interactive use ''' matplotlib.rc('figure',facecolor='white',figsize=figsize2) mass = p.get('mass') # in units of Msun radius = p.get('radius')*ast.rsun_cm/1.e8 # in units of Mm if xaxis is "Eulerian": xxx = radius if xlm[0]==0 and xlm[1] == 0: indtop = 0 indbot = len(mass)-1 else: indbot = np.where(radius>=xlm[0])[0][-1] indtop = np.where(radius<xlm[1])[0][0] xll = (radius[indbot],radius[indtop]) xxlabel = "radius (Mm)" elif xaxis is "Lagrangian": xxx = mass xll = xlm xxlabel = "$M / \mathrm{M_{sun}}$" else: print("Error: don't understand xaxis choice, must be Lagrangian or Eulerian") # create subplot structure t, ([ax1,ax2],[ax3, ax4],[ax5,ax6]) = matplotlib.pyplot.subplots(3, 2, sharex=True, sharey=False) # panel 1: burns: pp, cno, burn_c # panel 2: convection and mixing: entropy, Tgrad # which burning to show Enuc = ['pp','cno','tri_alfa','burn_c','burn_o','burn_n','burn_si','burn_mg','burn_na','burn_ne','eps_nuc'] ax = ax1 for thing in Enuc: ind = Enuc.index(thing) ax.plot(xxx, np.log10(p.get(thing)), ls=u.linestylecb(ind,a,b)[0],\ marker=u.linestylecb(ind,a,b)[1], color=u.linestylecb(ind,a,b)[2],\ markevery=50,label=thing) # set x and y lims and labels #ax.set_title('Nuclear Energy Production') ax.set_ylim(0,15) ax.set_xlim(xll) ax.legend(loc=1, ncol=2, fontsize='small') #ax.set_xlabel(xxlabel) ax.set_ylabel('$ \log \epsilon $') #--------------------------------------------------------------------------------------------# # gradients mix = [['gradr']] mix1 = [['grada']] for i in range(1): for thing in mix[i]: ind = mix[i].index(thing) for i in range(1): for thing1 in mix1[i]: ind1 = mix1[i].index(thing1) ax2.plot(xxx, (np.tanh(np.log10(p.get(thing))-np.log10(p.get(thing1))))\ ,ls=u.linestylecb(ind,a,b)[0],\ marker=u.linestylecb(ind,a,b)[1], color=u.linestylecb(ind,a,b)[2],\ markevery=50,label=thing) # set x and y lims and labels ax2.axhline(ls='dashed',color='black',label="") #ax2.set_title('Mixing Regions') ax2.yaxis.tick_right() ax2.yaxis.set_label_position("right") ax2.set_ylim(-.1,.1) ax2.set_xlim(xll) ax2.legend(labels='Mixing',loc=1) #ax2.set_xlabel(xxlabel) ax2.set_ylabel('$\\tanh(\\log(\\frac{\\nabla_{rad}}{\\nabla_{ad}}))$') #--------------------------------------------------------------------------------------------# # entropy S = ['entropy'] ax = ax5 for thing in S: ind = 2 ax.plot(xxx, p.get(thing), ls=u.linestylecb(ind,a,b)[0],\ marker=u.linestylecb(ind,a,b)[1], color=u.linestylecb(ind,a,b)[2],\ markevery=50,label=thing) # set x and y lims and labels #ax.set_title('Specific Entropy (/A*kerg)') ax.set_ylim(0,50) ax.set_xlim(xll) ax.legend(loc=1) ax.set_xlabel(xxlabel) ax.set_ylabel(' Specific Entropy') #--------------------------------------------------------------------------------------------# # rho, mu, T S = ['logRho','mu','temperature'] T8 = [False,False,True] ax = ax6 for thing in S: ind = S.index(thing) thisy = p.get(thing)/1.e8 if T8[ind] else p.get(thing) ax.plot(xxx, thisy, ls=u.linestylecb(ind,a,b)[0],\ marker=u.linestylecb(ind,a,b)[1], color=u.linestylecb(ind,a,b)[2],\ markevery=50,label=thing) # set x and y lims and labels #ax.set_title('Rho, mu, T') ax.set_ylim(0.,9.) ax.set_xlim(xll) ax.legend(loc=0) ax.set_xlabel(xxlabel) ax.set_ylabel('log Rho, mu, T8') #--------------------------------------------------------------------------------------------# # gas pressure fraction and opacity S = ['pgas_div_ptotal'] o = ['log_opacity'] ax = ax4 axo = ax.twinx() for thing in S: ind = 5 ax.plot(xxx, p.get(thing), ls=u.linestylecb(ind,a,b)[0],\ marker=u.linestylecb(ind,a,b)[1], color=u.linestylecb(ind,a,b)[2],\ markevery=50,label=thing) for thing in o: ind = 3 axo.plot(xxx, p.get(thing), ls=u.linestylecb(ind,a,b)[0],\ marker=u.linestylecb(ind,a,b)[1], color=u.linestylecb(ind,a,b)[2],\ markevery=50,label=thing) # set x and y lims and labels # ax.set_title('Pgas fraction + opacity') # ax.set_ylim(0,60) ax.set_xlim(xll) axo.set_xlim(xll) ax.legend(loc=0) axo.legend(loc=(.15,.85)) #ax.set_xlabel(xxlabel) ax.set_ylabel('$\mathrm{ P_{gas} / P_{tot}}$') axo.set_ylabel('$ log(Opacity)$') #--------------------------------------------------------------------------------------------# # Diffusion coefficient gT = ['log_D_mix','conv_vel_div_csound'] logy = [False,True] ax = ax3 ind = 0 for thing in gT: ind = gT.index(thing) thisx = np.log(p.get(thing))+16 if logy[ind] else p.get(thing) ax.plot(xxx, thisx, ls=u.linestylecb(ind,a,b)[0],\ marker=u.linestylecb(ind,a,b)[1], color=u.linestylecb(ind,a,b)[2],\ markevery=50,label=thing) # set x and y lims and labels ax.axhline(16,ls='dashed',color='black',label="$\mathrm{Ma}=0$") # ax.set_title('Mixing') ax.set_ylim(10,17) ax.set_xlim(xll) ax.legend(loc=0) # ax.set_xlabel(xxlabel) ax.set_ylabel('$\\log D / [cgs] \\log v_{\mathrm{conv}}/c_s + 16 $ ') title_str = "Other profiles: "+'t ='+str(title_format%p.header_attr['star_age'])\ +', dt ='+str(title_format%p.header_attr['time_step'])\ +', model number ='+str(int(p.header_attr['model_number'])) t.suptitle(title_str, fontsize=12) # t.tight_layout() t.subplots_adjust(left=0.1, bottom=0.1, right=0.9, top=0.9, wspace=0.15, hspace=0.1) t.savefig('other'+str(int(p.header_attr['model_number'])).zfill(6)+'.png')
def function[other_profiles, parameter[p, ifig, xlm, show, xaxis, figsize2]]: constant[Four panels of other profile plots Parameters ---------- p : instance mesa_profile instance xll : tuple xlimits: mass_min, mass_max show : Boolean False for batch use True for interactive use ] call[name[matplotlib].rc, parameter[constant[figure]]] variable[mass] assign[=] call[name[p].get, parameter[constant[mass]]] variable[radius] assign[=] binary_operation[binary_operation[call[name[p].get, parameter[constant[radius]]] * name[ast].rsun_cm] / constant[100000000.0]] if compare[name[xaxis] is constant[Eulerian]] begin[:] variable[xxx] assign[=] name[radius] if <ast.BoolOp object at 0x7da1b1941300> begin[:] variable[indtop] assign[=] constant[0] variable[indbot] assign[=] binary_operation[call[name[len], parameter[name[mass]]] - constant[1]] variable[xll] assign[=] tuple[[<ast.Subscript object at 0x7da1b1941d50>, <ast.Subscript object at 0x7da1b1941de0>]] variable[xxlabel] assign[=] constant[radius (Mm)] <ast.Tuple object at 0x7da1b1942350> assign[=] call[name[matplotlib].pyplot.subplots, parameter[constant[3], constant[2]]] variable[Enuc] assign[=] list[[<ast.Constant object at 0x7da1b1942c20>, <ast.Constant object at 0x7da1b1942ce0>, <ast.Constant object at 0x7da1b1942d40>, <ast.Constant object at 0x7da1b1942b00>, <ast.Constant object at 0x7da1b1942ad0>, <ast.Constant object at 0x7da1b1942b30>, <ast.Constant object at 0x7da1b1942a70>, <ast.Constant object at 0x7da1b1942a10>, <ast.Constant object at 0x7da1b19429e0>, <ast.Constant object at 0x7da1b1942a40>, <ast.Constant object at 0x7da1b1942aa0>]] variable[ax] assign[=] name[ax1] for taget[name[thing]] in starred[name[Enuc]] begin[:] variable[ind] assign[=] call[name[Enuc].index, parameter[name[thing]]] call[name[ax].plot, parameter[name[xxx], call[name[np].log10, parameter[call[name[p].get, parameter[name[thing]]]]]]] call[name[ax].set_ylim, parameter[constant[0], constant[15]]] call[name[ax].set_xlim, parameter[name[xll]]] call[name[ax].legend, parameter[]] call[name[ax].set_ylabel, parameter[constant[$ \log \epsilon $]]] variable[mix] assign[=] list[[<ast.List object at 0x7da1b19437f0>]] variable[mix1] assign[=] list[[<ast.List object at 0x7da1b1943b80>]] for taget[name[i]] in starred[call[name[range], parameter[constant[1]]]] begin[:] for taget[name[thing]] in starred[call[name[mix]][name[i]]] begin[:] variable[ind] assign[=] call[call[name[mix]][name[i]].index, parameter[name[thing]]] for taget[name[i]] in starred[call[name[range], parameter[constant[1]]]] begin[:] for taget[name[thing1]] in starred[call[name[mix1]][name[i]]] begin[:] variable[ind1] assign[=] call[call[name[mix1]][name[i]].index, parameter[name[thing1]]] call[name[ax2].plot, parameter[name[xxx], call[name[np].tanh, parameter[binary_operation[call[name[np].log10, parameter[call[name[p].get, parameter[name[thing]]]]] - call[name[np].log10, parameter[call[name[p].get, parameter[name[thing1]]]]]]]]]] call[name[ax2].axhline, parameter[]] call[name[ax2].yaxis.tick_right, parameter[]] call[name[ax2].yaxis.set_label_position, parameter[constant[right]]] call[name[ax2].set_ylim, parameter[<ast.UnaryOp object at 0x7da1b1998100>, constant[0.1]]] call[name[ax2].set_xlim, parameter[name[xll]]] call[name[ax2].legend, parameter[]] call[name[ax2].set_ylabel, parameter[constant[$\tanh(\log(\frac{\nabla_{rad}}{\nabla_{ad}}))$]]] variable[S] assign[=] list[[<ast.Constant object at 0x7da1b199b790>]] variable[ax] assign[=] name[ax5] for taget[name[thing]] in starred[name[S]] begin[:] variable[ind] assign[=] constant[2] call[name[ax].plot, parameter[name[xxx], call[name[p].get, parameter[name[thing]]]]] call[name[ax].set_ylim, parameter[constant[0], constant[50]]] call[name[ax].set_xlim, parameter[name[xll]]] call[name[ax].legend, parameter[]] call[name[ax].set_xlabel, parameter[name[xxlabel]]] call[name[ax].set_ylabel, parameter[constant[ Specific Entropy]]] variable[S] assign[=] list[[<ast.Constant object at 0x7da1b1999000>, <ast.Constant object at 0x7da1b1998fd0>, <ast.Constant object at 0x7da1b1998e20>]] variable[T8] assign[=] list[[<ast.Constant object at 0x7da1b1a6ed40>, <ast.Constant object at 0x7da1b1a6dc60>, <ast.Constant object at 0x7da1b1a6efb0>]] variable[ax] assign[=] name[ax6] for taget[name[thing]] in starred[name[S]] begin[:] variable[ind] assign[=] call[name[S].index, parameter[name[thing]]] variable[thisy] assign[=] <ast.IfExp object at 0x7da1b1a6dc90> call[name[ax].plot, parameter[name[xxx], name[thisy]]] call[name[ax].set_ylim, parameter[constant[0.0], constant[9.0]]] call[name[ax].set_xlim, parameter[name[xll]]] call[name[ax].legend, parameter[]] call[name[ax].set_xlabel, parameter[name[xxlabel]]] call[name[ax].set_ylabel, parameter[constant[log Rho, mu, T8]]] variable[S] assign[=] list[[<ast.Constant object at 0x7da1b1a6ea70>]] variable[o] assign[=] list[[<ast.Constant object at 0x7da1b1a6dd50>]] variable[ax] assign[=] name[ax4] variable[axo] assign[=] call[name[ax].twinx, parameter[]] for taget[name[thing]] in starred[name[S]] begin[:] variable[ind] assign[=] constant[5] call[name[ax].plot, parameter[name[xxx], call[name[p].get, parameter[name[thing]]]]] for taget[name[thing]] in starred[name[o]] begin[:] variable[ind] assign[=] constant[3] call[name[axo].plot, parameter[name[xxx], call[name[p].get, parameter[name[thing]]]]] call[name[ax].set_xlim, parameter[name[xll]]] call[name[axo].set_xlim, parameter[name[xll]]] call[name[ax].legend, parameter[]] call[name[axo].legend, parameter[]] call[name[ax].set_ylabel, parameter[constant[$\mathrm{ P_{gas} / P_{tot}}$]]] call[name[axo].set_ylabel, parameter[constant[$ log(Opacity)$]]] variable[gT] assign[=] list[[<ast.Constant object at 0x7da18f58cf40>, <ast.Constant object at 0x7da18f58ff10>]] variable[logy] assign[=] list[[<ast.Constant object at 0x7da18f58d870>, <ast.Constant object at 0x7da18f58ea40>]] variable[ax] assign[=] name[ax3] variable[ind] assign[=] constant[0] for taget[name[thing]] in starred[name[gT]] begin[:] variable[ind] assign[=] call[name[gT].index, parameter[name[thing]]] variable[thisx] assign[=] <ast.IfExp object at 0x7da18f58ddb0> call[name[ax].plot, parameter[name[xxx], name[thisx]]] call[name[ax].axhline, parameter[constant[16]]] call[name[ax].set_ylim, parameter[constant[10], constant[17]]] call[name[ax].set_xlim, parameter[name[xll]]] call[name[ax].legend, parameter[]] call[name[ax].set_ylabel, parameter[constant[$\log D / [cgs] \log v_{\mathrm{conv}}/c_s + 16 $ ]]] variable[title_str] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[Other profiles: ] + constant[t =]] + call[name[str], parameter[binary_operation[name[title_format] <ast.Mod object at 0x7da2590d6920> call[name[p].header_attr][constant[star_age]]]]]] + constant[, dt =]] + call[name[str], parameter[binary_operation[name[title_format] <ast.Mod object at 0x7da2590d6920> call[name[p].header_attr][constant[time_step]]]]]] + constant[, model number =]] + call[name[str], parameter[call[name[int], parameter[call[name[p].header_attr][constant[model_number]]]]]]] call[name[t].suptitle, parameter[name[title_str]]] call[name[t].subplots_adjust, parameter[]] call[name[t].savefig, parameter[binary_operation[binary_operation[constant[other] + call[call[name[str], parameter[call[name[int], parameter[call[name[p].header_attr][constant[model_number]]]]]].zfill, parameter[constant[6]]]] + constant[.png]]]]
keyword[def] identifier[other_profiles] ( identifier[p] , identifier[ifig] = literal[int] , identifier[xlm] = identifier[xlm] , identifier[show] = keyword[False] , identifier[xaxis] = identifier[xaxis_type] , identifier[figsize2] =( literal[int] , literal[int] )): literal[string] identifier[matplotlib] . identifier[rc] ( literal[string] , identifier[facecolor] = literal[string] , identifier[figsize] = identifier[figsize2] ) identifier[mass] = identifier[p] . identifier[get] ( literal[string] ) identifier[radius] = identifier[p] . identifier[get] ( literal[string] )* identifier[ast] . identifier[rsun_cm] / literal[int] keyword[if] identifier[xaxis] keyword[is] literal[string] : identifier[xxx] = identifier[radius] keyword[if] identifier[xlm] [ literal[int] ]== literal[int] keyword[and] identifier[xlm] [ literal[int] ]== literal[int] : identifier[indtop] = literal[int] identifier[indbot] = identifier[len] ( identifier[mass] )- literal[int] keyword[else] : identifier[indbot] = identifier[np] . identifier[where] ( identifier[radius] >= identifier[xlm] [ literal[int] ])[ literal[int] ][- literal[int] ] identifier[indtop] = identifier[np] . identifier[where] ( identifier[radius] < identifier[xlm] [ literal[int] ])[ literal[int] ][ literal[int] ] identifier[xll] =( identifier[radius] [ identifier[indbot] ], identifier[radius] [ identifier[indtop] ]) identifier[xxlabel] = literal[string] keyword[elif] identifier[xaxis] keyword[is] literal[string] : identifier[xxx] = identifier[mass] identifier[xll] = identifier[xlm] identifier[xxlabel] = literal[string] keyword[else] : identifier[print] ( literal[string] ) identifier[t] ,([ identifier[ax1] , identifier[ax2] ],[ identifier[ax3] , identifier[ax4] ],[ identifier[ax5] , identifier[ax6] ])= identifier[matplotlib] . identifier[pyplot] . identifier[subplots] ( literal[int] , literal[int] , identifier[sharex] = keyword[True] , identifier[sharey] = keyword[False] ) identifier[Enuc] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] identifier[ax] = identifier[ax1] keyword[for] identifier[thing] keyword[in] identifier[Enuc] : identifier[ind] = identifier[Enuc] . identifier[index] ( identifier[thing] ) identifier[ax] . identifier[plot] ( identifier[xxx] , identifier[np] . identifier[log10] ( identifier[p] . identifier[get] ( identifier[thing] )), identifier[ls] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[marker] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[color] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[markevery] = literal[int] , identifier[label] = identifier[thing] ) identifier[ax] . identifier[set_ylim] ( literal[int] , literal[int] ) identifier[ax] . identifier[set_xlim] ( identifier[xll] ) identifier[ax] . identifier[legend] ( identifier[loc] = literal[int] , identifier[ncol] = literal[int] , identifier[fontsize] = literal[string] ) identifier[ax] . identifier[set_ylabel] ( literal[string] ) identifier[mix] =[[ literal[string] ]] identifier[mix1] =[[ literal[string] ]] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): keyword[for] identifier[thing] keyword[in] identifier[mix] [ identifier[i] ]: identifier[ind] = identifier[mix] [ identifier[i] ]. identifier[index] ( identifier[thing] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): keyword[for] identifier[thing1] keyword[in] identifier[mix1] [ identifier[i] ]: identifier[ind1] = identifier[mix1] [ identifier[i] ]. identifier[index] ( identifier[thing1] ) identifier[ax2] . identifier[plot] ( identifier[xxx] ,( identifier[np] . identifier[tanh] ( identifier[np] . identifier[log10] ( identifier[p] . identifier[get] ( identifier[thing] ))- identifier[np] . identifier[log10] ( identifier[p] . identifier[get] ( identifier[thing1] )))), identifier[ls] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[marker] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[color] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[markevery] = literal[int] , identifier[label] = identifier[thing] ) identifier[ax2] . identifier[axhline] ( identifier[ls] = literal[string] , identifier[color] = literal[string] , identifier[label] = literal[string] ) identifier[ax2] . identifier[yaxis] . identifier[tick_right] () identifier[ax2] . identifier[yaxis] . identifier[set_label_position] ( literal[string] ) identifier[ax2] . identifier[set_ylim] (- literal[int] , literal[int] ) identifier[ax2] . identifier[set_xlim] ( identifier[xll] ) identifier[ax2] . identifier[legend] ( identifier[labels] = literal[string] , identifier[loc] = literal[int] ) identifier[ax2] . identifier[set_ylabel] ( literal[string] ) identifier[S] =[ literal[string] ] identifier[ax] = identifier[ax5] keyword[for] identifier[thing] keyword[in] identifier[S] : identifier[ind] = literal[int] identifier[ax] . identifier[plot] ( identifier[xxx] , identifier[p] . identifier[get] ( identifier[thing] ), identifier[ls] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[marker] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[color] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[markevery] = literal[int] , identifier[label] = identifier[thing] ) identifier[ax] . identifier[set_ylim] ( literal[int] , literal[int] ) identifier[ax] . identifier[set_xlim] ( identifier[xll] ) identifier[ax] . identifier[legend] ( identifier[loc] = literal[int] ) identifier[ax] . identifier[set_xlabel] ( identifier[xxlabel] ) identifier[ax] . identifier[set_ylabel] ( literal[string] ) identifier[S] =[ literal[string] , literal[string] , literal[string] ] identifier[T8] =[ keyword[False] , keyword[False] , keyword[True] ] identifier[ax] = identifier[ax6] keyword[for] identifier[thing] keyword[in] identifier[S] : identifier[ind] = identifier[S] . identifier[index] ( identifier[thing] ) identifier[thisy] = identifier[p] . identifier[get] ( identifier[thing] )/ literal[int] keyword[if] identifier[T8] [ identifier[ind] ] keyword[else] identifier[p] . identifier[get] ( identifier[thing] ) identifier[ax] . identifier[plot] ( identifier[xxx] , identifier[thisy] , identifier[ls] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[marker] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[color] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[markevery] = literal[int] , identifier[label] = identifier[thing] ) identifier[ax] . identifier[set_ylim] ( literal[int] , literal[int] ) identifier[ax] . identifier[set_xlim] ( identifier[xll] ) identifier[ax] . identifier[legend] ( identifier[loc] = literal[int] ) identifier[ax] . identifier[set_xlabel] ( identifier[xxlabel] ) identifier[ax] . identifier[set_ylabel] ( literal[string] ) identifier[S] =[ literal[string] ] identifier[o] =[ literal[string] ] identifier[ax] = identifier[ax4] identifier[axo] = identifier[ax] . identifier[twinx] () keyword[for] identifier[thing] keyword[in] identifier[S] : identifier[ind] = literal[int] identifier[ax] . identifier[plot] ( identifier[xxx] , identifier[p] . identifier[get] ( identifier[thing] ), identifier[ls] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[marker] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[color] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[markevery] = literal[int] , identifier[label] = identifier[thing] ) keyword[for] identifier[thing] keyword[in] identifier[o] : identifier[ind] = literal[int] identifier[axo] . identifier[plot] ( identifier[xxx] , identifier[p] . identifier[get] ( identifier[thing] ), identifier[ls] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[marker] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[color] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[markevery] = literal[int] , identifier[label] = identifier[thing] ) identifier[ax] . identifier[set_xlim] ( identifier[xll] ) identifier[axo] . identifier[set_xlim] ( identifier[xll] ) identifier[ax] . identifier[legend] ( identifier[loc] = literal[int] ) identifier[axo] . identifier[legend] ( identifier[loc] =( literal[int] , literal[int] )) identifier[ax] . identifier[set_ylabel] ( literal[string] ) identifier[axo] . identifier[set_ylabel] ( literal[string] ) identifier[gT] =[ literal[string] , literal[string] ] identifier[logy] =[ keyword[False] , keyword[True] ] identifier[ax] = identifier[ax3] identifier[ind] = literal[int] keyword[for] identifier[thing] keyword[in] identifier[gT] : identifier[ind] = identifier[gT] . identifier[index] ( identifier[thing] ) identifier[thisx] = identifier[np] . identifier[log] ( identifier[p] . identifier[get] ( identifier[thing] ))+ literal[int] keyword[if] identifier[logy] [ identifier[ind] ] keyword[else] identifier[p] . identifier[get] ( identifier[thing] ) identifier[ax] . identifier[plot] ( identifier[xxx] , identifier[thisx] , identifier[ls] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[marker] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[color] = identifier[u] . identifier[linestylecb] ( identifier[ind] , identifier[a] , identifier[b] )[ literal[int] ], identifier[markevery] = literal[int] , identifier[label] = identifier[thing] ) identifier[ax] . identifier[axhline] ( literal[int] , identifier[ls] = literal[string] , identifier[color] = literal[string] , identifier[label] = literal[string] ) identifier[ax] . identifier[set_ylim] ( literal[int] , literal[int] ) identifier[ax] . identifier[set_xlim] ( identifier[xll] ) identifier[ax] . identifier[legend] ( identifier[loc] = literal[int] ) identifier[ax] . identifier[set_ylabel] ( literal[string] ) identifier[title_str] = literal[string] + literal[string] + identifier[str] ( identifier[title_format] % identifier[p] . identifier[header_attr] [ literal[string] ])+ literal[string] + identifier[str] ( identifier[title_format] % identifier[p] . identifier[header_attr] [ literal[string] ])+ literal[string] + identifier[str] ( identifier[int] ( identifier[p] . identifier[header_attr] [ literal[string] ])) identifier[t] . identifier[suptitle] ( identifier[title_str] , identifier[fontsize] = literal[int] ) identifier[t] . identifier[subplots_adjust] ( identifier[left] = literal[int] , identifier[bottom] = literal[int] , identifier[right] = literal[int] , identifier[top] = literal[int] , identifier[wspace] = literal[int] , identifier[hspace] = literal[int] ) identifier[t] . identifier[savefig] ( literal[string] + identifier[str] ( identifier[int] ( identifier[p] . identifier[header_attr] [ literal[string] ])). identifier[zfill] ( literal[int] )+ literal[string] )
def other_profiles(p, ifig=1, xlm=xlm, show=False, xaxis=xaxis_type, figsize2=(10, 8)): """Four panels of other profile plots Parameters ---------- p : instance mesa_profile instance xll : tuple xlimits: mass_min, mass_max show : Boolean False for batch use True for interactive use """ matplotlib.rc('figure', facecolor='white', figsize=figsize2) mass = p.get('mass') # in units of Msun radius = p.get('radius') * ast.rsun_cm / 100000000.0 # in units of Mm if xaxis is 'Eulerian': xxx = radius if xlm[0] == 0 and xlm[1] == 0: indtop = 0 indbot = len(mass) - 1 # depends on [control=['if'], data=[]] else: indbot = np.where(radius >= xlm[0])[0][-1] indtop = np.where(radius < xlm[1])[0][0] xll = (radius[indbot], radius[indtop]) xxlabel = 'radius (Mm)' # depends on [control=['if'], data=[]] elif xaxis is 'Lagrangian': xxx = mass xll = xlm xxlabel = '$M / \\mathrm{M_{sun}}$' # depends on [control=['if'], data=[]] else: print("Error: don't understand xaxis choice, must be Lagrangian or Eulerian") # create subplot structure (t, ([ax1, ax2], [ax3, ax4], [ax5, ax6])) = matplotlib.pyplot.subplots(3, 2, sharex=True, sharey=False) # panel 1: burns: pp, cno, burn_c # panel 2: convection and mixing: entropy, Tgrad # which burning to show Enuc = ['pp', 'cno', 'tri_alfa', 'burn_c', 'burn_o', 'burn_n', 'burn_si', 'burn_mg', 'burn_na', 'burn_ne', 'eps_nuc'] ax = ax1 for thing in Enuc: ind = Enuc.index(thing) ax.plot(xxx, np.log10(p.get(thing)), ls=u.linestylecb(ind, a, b)[0], marker=u.linestylecb(ind, a, b)[1], color=u.linestylecb(ind, a, b)[2], markevery=50, label=thing) # depends on [control=['for'], data=['thing']] # set x and y lims and labels #ax.set_title('Nuclear Energy Production') ax.set_ylim(0, 15) ax.set_xlim(xll) ax.legend(loc=1, ncol=2, fontsize='small') #ax.set_xlabel(xxlabel) ax.set_ylabel('$ \\log \\epsilon $') #--------------------------------------------------------------------------------------------# # gradients mix = [['gradr']] mix1 = [['grada']] for i in range(1): for thing in mix[i]: ind = mix[i].index(thing) # depends on [control=['for'], data=['thing']] # depends on [control=['for'], data=['i']] for i in range(1): for thing1 in mix1[i]: ind1 = mix1[i].index(thing1) ax2.plot(xxx, np.tanh(np.log10(p.get(thing)) - np.log10(p.get(thing1))), ls=u.linestylecb(ind, a, b)[0], marker=u.linestylecb(ind, a, b)[1], color=u.linestylecb(ind, a, b)[2], markevery=50, label=thing) # depends on [control=['for'], data=['thing1']] # depends on [control=['for'], data=['i']] # set x and y lims and labels ax2.axhline(ls='dashed', color='black', label='') #ax2.set_title('Mixing Regions') ax2.yaxis.tick_right() ax2.yaxis.set_label_position('right') ax2.set_ylim(-0.1, 0.1) ax2.set_xlim(xll) ax2.legend(labels='Mixing', loc=1) #ax2.set_xlabel(xxlabel) ax2.set_ylabel('$\\tanh(\\log(\\frac{\\nabla_{rad}}{\\nabla_{ad}}))$') #--------------------------------------------------------------------------------------------# # entropy S = ['entropy'] ax = ax5 for thing in S: ind = 2 ax.plot(xxx, p.get(thing), ls=u.linestylecb(ind, a, b)[0], marker=u.linestylecb(ind, a, b)[1], color=u.linestylecb(ind, a, b)[2], markevery=50, label=thing) # depends on [control=['for'], data=['thing']] # set x and y lims and labels #ax.set_title('Specific Entropy (/A*kerg)') ax.set_ylim(0, 50) ax.set_xlim(xll) ax.legend(loc=1) ax.set_xlabel(xxlabel) ax.set_ylabel(' Specific Entropy') #--------------------------------------------------------------------------------------------# # rho, mu, T S = ['logRho', 'mu', 'temperature'] T8 = [False, False, True] ax = ax6 for thing in S: ind = S.index(thing) thisy = p.get(thing) / 100000000.0 if T8[ind] else p.get(thing) ax.plot(xxx, thisy, ls=u.linestylecb(ind, a, b)[0], marker=u.linestylecb(ind, a, b)[1], color=u.linestylecb(ind, a, b)[2], markevery=50, label=thing) # depends on [control=['for'], data=['thing']] # set x and y lims and labels #ax.set_title('Rho, mu, T') ax.set_ylim(0.0, 9.0) ax.set_xlim(xll) ax.legend(loc=0) ax.set_xlabel(xxlabel) ax.set_ylabel('log Rho, mu, T8') #--------------------------------------------------------------------------------------------# # gas pressure fraction and opacity S = ['pgas_div_ptotal'] o = ['log_opacity'] ax = ax4 axo = ax.twinx() for thing in S: ind = 5 ax.plot(xxx, p.get(thing), ls=u.linestylecb(ind, a, b)[0], marker=u.linestylecb(ind, a, b)[1], color=u.linestylecb(ind, a, b)[2], markevery=50, label=thing) # depends on [control=['for'], data=['thing']] for thing in o: ind = 3 axo.plot(xxx, p.get(thing), ls=u.linestylecb(ind, a, b)[0], marker=u.linestylecb(ind, a, b)[1], color=u.linestylecb(ind, a, b)[2], markevery=50, label=thing) # depends on [control=['for'], data=['thing']] # set x and y lims and labels # ax.set_title('Pgas fraction + opacity') # ax.set_ylim(0,60) ax.set_xlim(xll) axo.set_xlim(xll) ax.legend(loc=0) axo.legend(loc=(0.15, 0.85)) #ax.set_xlabel(xxlabel) ax.set_ylabel('$\\mathrm{ P_{gas} / P_{tot}}$') axo.set_ylabel('$ log(Opacity)$') #--------------------------------------------------------------------------------------------# # Diffusion coefficient gT = ['log_D_mix', 'conv_vel_div_csound'] logy = [False, True] ax = ax3 ind = 0 for thing in gT: ind = gT.index(thing) thisx = np.log(p.get(thing)) + 16 if logy[ind] else p.get(thing) ax.plot(xxx, thisx, ls=u.linestylecb(ind, a, b)[0], marker=u.linestylecb(ind, a, b)[1], color=u.linestylecb(ind, a, b)[2], markevery=50, label=thing) # depends on [control=['for'], data=['thing']] # set x and y lims and labels ax.axhline(16, ls='dashed', color='black', label='$\\mathrm{Ma}=0$') # ax.set_title('Mixing') ax.set_ylim(10, 17) ax.set_xlim(xll) ax.legend(loc=0) # ax.set_xlabel(xxlabel) ax.set_ylabel('$\\log D / [cgs] \\log v_{\\mathrm{conv}}/c_s + 16 $ ') title_str = 'Other profiles: ' + 't =' + str(title_format % p.header_attr['star_age']) + ', dt =' + str(title_format % p.header_attr['time_step']) + ', model number =' + str(int(p.header_attr['model_number'])) t.suptitle(title_str, fontsize=12) # t.tight_layout() t.subplots_adjust(left=0.1, bottom=0.1, right=0.9, top=0.9, wspace=0.15, hspace=0.1) t.savefig('other' + str(int(p.header_attr['model_number'])).zfill(6) + '.png')
def has_section(self, section) -> bool: """Test if file has section. Parameters ---------- section : string Section. Returns ------- boolean """ self.config.read(self.filepath) return self.config.has_section(section)
def function[has_section, parameter[self, section]]: constant[Test if file has section. Parameters ---------- section : string Section. Returns ------- boolean ] call[name[self].config.read, parameter[name[self].filepath]] return[call[name[self].config.has_section, parameter[name[section]]]]
keyword[def] identifier[has_section] ( identifier[self] , identifier[section] )-> identifier[bool] : literal[string] identifier[self] . identifier[config] . identifier[read] ( identifier[self] . identifier[filepath] ) keyword[return] identifier[self] . identifier[config] . identifier[has_section] ( identifier[section] )
def has_section(self, section) -> bool: """Test if file has section. Parameters ---------- section : string Section. Returns ------- boolean """ self.config.read(self.filepath) return self.config.has_section(section)
def get_normalized_grid(self): """ Analyzes subcell structure """ log = logging.getLogger(__name__) # Resolve multirow mentions, TODO: validate against all PDFs # subcol_count = 0 mega_rows = [] for row_id, row in enumerate(self._grid): # maps yc_grid -> [mentions] subrow_across_cell = defaultdict(list) for col_id, cell in enumerate(row): # Keep cell text in reading order cell.texts.sort(key=cmp_to_key(reading_order)) log.debug("=" * 50) for m in cell.texts: subrow_across_cell[m.yc_grid].append(m) # prev = m log.debug(pformat(dict(subrow_across_cell))) mega_rows.append(subrow_across_cell) # Multiline paragraph check # Subrow/Subcolumn return mega_rows
def function[get_normalized_grid, parameter[self]]: constant[ Analyzes subcell structure ] variable[log] assign[=] call[name[logging].getLogger, parameter[name[__name__]]] variable[mega_rows] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b13a97e0>, <ast.Name object at 0x7da1b13aab60>]]] in starred[call[name[enumerate], parameter[name[self]._grid]]] begin[:] variable[subrow_across_cell] assign[=] call[name[defaultdict], parameter[name[list]]] for taget[tuple[[<ast.Name object at 0x7da1b13aa380>, <ast.Name object at 0x7da1b13ab2b0>]]] in starred[call[name[enumerate], parameter[name[row]]]] begin[:] call[name[cell].texts.sort, parameter[]] call[name[log].debug, parameter[binary_operation[constant[=] * constant[50]]]] for taget[name[m]] in starred[name[cell].texts] begin[:] call[call[name[subrow_across_cell]][name[m].yc_grid].append, parameter[name[m]]] call[name[log].debug, parameter[call[name[pformat], parameter[call[name[dict], parameter[name[subrow_across_cell]]]]]]] call[name[mega_rows].append, parameter[name[subrow_across_cell]]] return[name[mega_rows]]
keyword[def] identifier[get_normalized_grid] ( identifier[self] ): literal[string] identifier[log] = identifier[logging] . identifier[getLogger] ( identifier[__name__] ) identifier[mega_rows] =[] keyword[for] identifier[row_id] , identifier[row] keyword[in] identifier[enumerate] ( identifier[self] . identifier[_grid] ): identifier[subrow_across_cell] = identifier[defaultdict] ( identifier[list] ) keyword[for] identifier[col_id] , identifier[cell] keyword[in] identifier[enumerate] ( identifier[row] ): identifier[cell] . identifier[texts] . identifier[sort] ( identifier[key] = identifier[cmp_to_key] ( identifier[reading_order] )) identifier[log] . identifier[debug] ( literal[string] * literal[int] ) keyword[for] identifier[m] keyword[in] identifier[cell] . identifier[texts] : identifier[subrow_across_cell] [ identifier[m] . identifier[yc_grid] ]. identifier[append] ( identifier[m] ) identifier[log] . identifier[debug] ( identifier[pformat] ( identifier[dict] ( identifier[subrow_across_cell] ))) identifier[mega_rows] . identifier[append] ( identifier[subrow_across_cell] ) keyword[return] identifier[mega_rows]
def get_normalized_grid(self): """ Analyzes subcell structure """ log = logging.getLogger(__name__) # Resolve multirow mentions, TODO: validate against all PDFs # subcol_count = 0 mega_rows = [] for (row_id, row) in enumerate(self._grid): # maps yc_grid -> [mentions] subrow_across_cell = defaultdict(list) for (col_id, cell) in enumerate(row): # Keep cell text in reading order cell.texts.sort(key=cmp_to_key(reading_order)) log.debug('=' * 50) for m in cell.texts: subrow_across_cell[m.yc_grid].append(m) # depends on [control=['for'], data=['m']] # depends on [control=['for'], data=[]] # prev = m log.debug(pformat(dict(subrow_across_cell))) mega_rows.append(subrow_across_cell) # depends on [control=['for'], data=[]] # Multiline paragraph check # Subrow/Subcolumn return mega_rows
def stringPropertyNames(self): r""" Returns a `set` of all keys in the `Properties` object and its `defaults` (and its `defaults`\ ’s `defaults`, etc.) :rtype: `set` of text strings """ names = set(self.data) if self.defaults is not None: names.update(self.defaults.stringPropertyNames()) return names
def function[stringPropertyNames, parameter[self]]: constant[ Returns a `set` of all keys in the `Properties` object and its `defaults` (and its `defaults`\ ’s `defaults`, etc.) :rtype: `set` of text strings ] variable[names] assign[=] call[name[set], parameter[name[self].data]] if compare[name[self].defaults is_not constant[None]] begin[:] call[name[names].update, parameter[call[name[self].defaults.stringPropertyNames, parameter[]]]] return[name[names]]
keyword[def] identifier[stringPropertyNames] ( identifier[self] ): literal[string] identifier[names] = identifier[set] ( identifier[self] . identifier[data] ) keyword[if] identifier[self] . identifier[defaults] keyword[is] keyword[not] keyword[None] : identifier[names] . identifier[update] ( identifier[self] . identifier[defaults] . identifier[stringPropertyNames] ()) keyword[return] identifier[names]
def stringPropertyNames(self): """ Returns a `set` of all keys in the `Properties` object and its `defaults` (and its `defaults`\\ ’s `defaults`, etc.) :rtype: `set` of text strings """ names = set(self.data) if self.defaults is not None: names.update(self.defaults.stringPropertyNames()) # depends on [control=['if'], data=[]] return names
def service_executions(self, name=None, pk=None, scope=None, service=None, **kwargs): """ Retrieve Service Executions. If additional `keyword=value` arguments are provided, these are added to the request parameters. Please refer to the documentation of the KE-chain API for additional query parameters. :param name: (optional) name to limit the search for :type name: basestring or None :param pk: (optional) primary key or id (UUID) of the service to search for :type pk: basestring or None :param scope: (optional) id (UUID) of the scope to search in :type scope: basestring or None :param service: (optional) service UUID to filter on :type service: basestring or None :param kwargs: (optional) additional search keyword arguments :type kwargs: dict or None :return: a single :class:`models.ServiceExecution` object :raises NotFoundError: When no `ServiceExecution` object is found """ request_params = { 'name': name, 'id': pk, 'service': service, 'scope': scope } if kwargs: request_params.update(**kwargs) r = self._request('GET', self._build_url('service_executions'), params=request_params) if r.status_code != requests.codes.ok: # pragma: no cover raise NotFoundError("Could not retrieve service executions") data = r.json() return [ServiceExecution(service_exeuction, client=self) for service_exeuction in data['results']]
def function[service_executions, parameter[self, name, pk, scope, service]]: constant[ Retrieve Service Executions. If additional `keyword=value` arguments are provided, these are added to the request parameters. Please refer to the documentation of the KE-chain API for additional query parameters. :param name: (optional) name to limit the search for :type name: basestring or None :param pk: (optional) primary key or id (UUID) of the service to search for :type pk: basestring or None :param scope: (optional) id (UUID) of the scope to search in :type scope: basestring or None :param service: (optional) service UUID to filter on :type service: basestring or None :param kwargs: (optional) additional search keyword arguments :type kwargs: dict or None :return: a single :class:`models.ServiceExecution` object :raises NotFoundError: When no `ServiceExecution` object is found ] variable[request_params] assign[=] dictionary[[<ast.Constant object at 0x7da20c76ce80>, <ast.Constant object at 0x7da20c76c3a0>, <ast.Constant object at 0x7da20c76f910>, <ast.Constant object at 0x7da20c76f3a0>], [<ast.Name object at 0x7da20c76d450>, <ast.Name object at 0x7da20c76eaa0>, <ast.Name object at 0x7da20c76f580>, <ast.Name object at 0x7da20c76f160>]] if name[kwargs] begin[:] call[name[request_params].update, parameter[]] variable[r] assign[=] call[name[self]._request, parameter[constant[GET], call[name[self]._build_url, parameter[constant[service_executions]]]]] if compare[name[r].status_code not_equal[!=] name[requests].codes.ok] begin[:] <ast.Raise object at 0x7da20c76fa60> variable[data] assign[=] call[name[r].json, parameter[]] return[<ast.ListComp object at 0x7da20c76e140>]
keyword[def] identifier[service_executions] ( identifier[self] , identifier[name] = keyword[None] , identifier[pk] = keyword[None] , identifier[scope] = keyword[None] , identifier[service] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[request_params] ={ literal[string] : identifier[name] , literal[string] : identifier[pk] , literal[string] : identifier[service] , literal[string] : identifier[scope] } keyword[if] identifier[kwargs] : identifier[request_params] . identifier[update] (** identifier[kwargs] ) identifier[r] = identifier[self] . identifier[_request] ( literal[string] , identifier[self] . identifier[_build_url] ( literal[string] ), identifier[params] = identifier[request_params] ) keyword[if] identifier[r] . identifier[status_code] != identifier[requests] . identifier[codes] . identifier[ok] : keyword[raise] identifier[NotFoundError] ( literal[string] ) identifier[data] = identifier[r] . identifier[json] () keyword[return] [ identifier[ServiceExecution] ( identifier[service_exeuction] , identifier[client] = identifier[self] ) keyword[for] identifier[service_exeuction] keyword[in] identifier[data] [ literal[string] ]]
def service_executions(self, name=None, pk=None, scope=None, service=None, **kwargs): """ Retrieve Service Executions. If additional `keyword=value` arguments are provided, these are added to the request parameters. Please refer to the documentation of the KE-chain API for additional query parameters. :param name: (optional) name to limit the search for :type name: basestring or None :param pk: (optional) primary key or id (UUID) of the service to search for :type pk: basestring or None :param scope: (optional) id (UUID) of the scope to search in :type scope: basestring or None :param service: (optional) service UUID to filter on :type service: basestring or None :param kwargs: (optional) additional search keyword arguments :type kwargs: dict or None :return: a single :class:`models.ServiceExecution` object :raises NotFoundError: When no `ServiceExecution` object is found """ request_params = {'name': name, 'id': pk, 'service': service, 'scope': scope} if kwargs: request_params.update(**kwargs) # depends on [control=['if'], data=[]] r = self._request('GET', self._build_url('service_executions'), params=request_params) if r.status_code != requests.codes.ok: # pragma: no cover raise NotFoundError('Could not retrieve service executions') # depends on [control=['if'], data=[]] data = r.json() return [ServiceExecution(service_exeuction, client=self) for service_exeuction in data['results']]
def get_settings(): """ This function returns a dict containing default settings """ s = getattr(settings, 'CLAMAV_UPLOAD', {}) s = { 'CONTENT_TYPE_CHECK_ENABLED': s.get('CONTENT_TYPE_CHECK_ENABLED', False), # LAST_HANDLER is not a user configurable option; we return # it with the settings dict simply because it's convenient. 'LAST_HANDLER': getattr(settings, 'FILE_UPLOAD_HANDLERS')[-1] } return s
def function[get_settings, parameter[]]: constant[ This function returns a dict containing default settings ] variable[s] assign[=] call[name[getattr], parameter[name[settings], constant[CLAMAV_UPLOAD], dictionary[[], []]]] variable[s] assign[=] dictionary[[<ast.Constant object at 0x7da1b2446410>, <ast.Constant object at 0x7da1b2446500>], [<ast.Call object at 0x7da1b24463e0>, <ast.Subscript object at 0x7da1b2445e70>]] return[name[s]]
keyword[def] identifier[get_settings] (): literal[string] identifier[s] = identifier[getattr] ( identifier[settings] , literal[string] ,{}) identifier[s] ={ literal[string] : identifier[s] . identifier[get] ( literal[string] , keyword[False] ), literal[string] : identifier[getattr] ( identifier[settings] , literal[string] )[- literal[int] ] } keyword[return] identifier[s]
def get_settings(): """ This function returns a dict containing default settings """ s = getattr(settings, 'CLAMAV_UPLOAD', {}) # LAST_HANDLER is not a user configurable option; we return # it with the settings dict simply because it's convenient. s = {'CONTENT_TYPE_CHECK_ENABLED': s.get('CONTENT_TYPE_CHECK_ENABLED', False), 'LAST_HANDLER': getattr(settings, 'FILE_UPLOAD_HANDLERS')[-1]} return s
def remove_sbi_id(self, sbi_id): """Remove an SBI Identifier.""" sbi_ids = self.sbi_ids sbi_ids.remove(sbi_id) DB.set_hash_value(self._key, 'sbi_ids', sbi_ids)
def function[remove_sbi_id, parameter[self, sbi_id]]: constant[Remove an SBI Identifier.] variable[sbi_ids] assign[=] name[self].sbi_ids call[name[sbi_ids].remove, parameter[name[sbi_id]]] call[name[DB].set_hash_value, parameter[name[self]._key, constant[sbi_ids], name[sbi_ids]]]
keyword[def] identifier[remove_sbi_id] ( identifier[self] , identifier[sbi_id] ): literal[string] identifier[sbi_ids] = identifier[self] . identifier[sbi_ids] identifier[sbi_ids] . identifier[remove] ( identifier[sbi_id] ) identifier[DB] . identifier[set_hash_value] ( identifier[self] . identifier[_key] , literal[string] , identifier[sbi_ids] )
def remove_sbi_id(self, sbi_id): """Remove an SBI Identifier.""" sbi_ids = self.sbi_ids sbi_ids.remove(sbi_id) DB.set_hash_value(self._key, 'sbi_ids', sbi_ids)
def encry_decry_chunk(chunk, key, algo, bool_encry, assoc_data): """ When bool_encry is True, encrypt a chunk of the file with the key and a randomly generated nonce. When it is False, the function extract the nonce from the cipherchunk (first 16 bytes), and decrypt the rest of the chunk. :param chunk: a chunk in bytes to encrypt or decrypt. :param key: a 32 bytes key in bytes. :param algo: a string of algorithm. Can be "srp" , "AES" or "twf" :param bool_encry: if bool_encry is True, chunk is encrypted. Else, it will be decrypted. :param assoc_data: bytes string of additional data for GCM Authentication. :return: if bool_encry is True, corresponding nonce + cipherchunk else, a decrypted chunk. """ engine = botan.cipher(algo=algo, encrypt=bool_encry) engine.set_key(key=key) engine.set_assoc_data(assoc_data) if bool_encry is True: nonce = generate_nonce_timestamp() engine.start(nonce=nonce) return nonce + engine.finish(chunk) else: nonce = chunk[:__nonce_length__] encryptedchunk = chunk[__nonce_length__:__nonce_length__ + __gcmtag_length__ + __chunk_size__] engine.start(nonce=nonce) decryptedchunk = engine.finish(encryptedchunk) if decryptedchunk == b"": raise Exception("Integrity failure: Invalid passphrase or corrupted data") return decryptedchunk
def function[encry_decry_chunk, parameter[chunk, key, algo, bool_encry, assoc_data]]: constant[ When bool_encry is True, encrypt a chunk of the file with the key and a randomly generated nonce. When it is False, the function extract the nonce from the cipherchunk (first 16 bytes), and decrypt the rest of the chunk. :param chunk: a chunk in bytes to encrypt or decrypt. :param key: a 32 bytes key in bytes. :param algo: a string of algorithm. Can be "srp" , "AES" or "twf" :param bool_encry: if bool_encry is True, chunk is encrypted. Else, it will be decrypted. :param assoc_data: bytes string of additional data for GCM Authentication. :return: if bool_encry is True, corresponding nonce + cipherchunk else, a decrypted chunk. ] variable[engine] assign[=] call[name[botan].cipher, parameter[]] call[name[engine].set_key, parameter[]] call[name[engine].set_assoc_data, parameter[name[assoc_data]]] if compare[name[bool_encry] is constant[True]] begin[:] variable[nonce] assign[=] call[name[generate_nonce_timestamp], parameter[]] call[name[engine].start, parameter[]] return[binary_operation[name[nonce] + call[name[engine].finish, parameter[name[chunk]]]]]
keyword[def] identifier[encry_decry_chunk] ( identifier[chunk] , identifier[key] , identifier[algo] , identifier[bool_encry] , identifier[assoc_data] ): literal[string] identifier[engine] = identifier[botan] . identifier[cipher] ( identifier[algo] = identifier[algo] , identifier[encrypt] = identifier[bool_encry] ) identifier[engine] . identifier[set_key] ( identifier[key] = identifier[key] ) identifier[engine] . identifier[set_assoc_data] ( identifier[assoc_data] ) keyword[if] identifier[bool_encry] keyword[is] keyword[True] : identifier[nonce] = identifier[generate_nonce_timestamp] () identifier[engine] . identifier[start] ( identifier[nonce] = identifier[nonce] ) keyword[return] identifier[nonce] + identifier[engine] . identifier[finish] ( identifier[chunk] ) keyword[else] : identifier[nonce] = identifier[chunk] [: identifier[__nonce_length__] ] identifier[encryptedchunk] = identifier[chunk] [ identifier[__nonce_length__] : identifier[__nonce_length__] + identifier[__gcmtag_length__] + identifier[__chunk_size__] ] identifier[engine] . identifier[start] ( identifier[nonce] = identifier[nonce] ) identifier[decryptedchunk] = identifier[engine] . identifier[finish] ( identifier[encryptedchunk] ) keyword[if] identifier[decryptedchunk] == literal[string] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[return] identifier[decryptedchunk]
def encry_decry_chunk(chunk, key, algo, bool_encry, assoc_data): """ When bool_encry is True, encrypt a chunk of the file with the key and a randomly generated nonce. When it is False, the function extract the nonce from the cipherchunk (first 16 bytes), and decrypt the rest of the chunk. :param chunk: a chunk in bytes to encrypt or decrypt. :param key: a 32 bytes key in bytes. :param algo: a string of algorithm. Can be "srp" , "AES" or "twf" :param bool_encry: if bool_encry is True, chunk is encrypted. Else, it will be decrypted. :param assoc_data: bytes string of additional data for GCM Authentication. :return: if bool_encry is True, corresponding nonce + cipherchunk else, a decrypted chunk. """ engine = botan.cipher(algo=algo, encrypt=bool_encry) engine.set_key(key=key) engine.set_assoc_data(assoc_data) if bool_encry is True: nonce = generate_nonce_timestamp() engine.start(nonce=nonce) return nonce + engine.finish(chunk) # depends on [control=['if'], data=[]] else: nonce = chunk[:__nonce_length__] encryptedchunk = chunk[__nonce_length__:__nonce_length__ + __gcmtag_length__ + __chunk_size__] engine.start(nonce=nonce) decryptedchunk = engine.finish(encryptedchunk) if decryptedchunk == b'': raise Exception('Integrity failure: Invalid passphrase or corrupted data') # depends on [control=['if'], data=[]] return decryptedchunk
def get_padding_value(dtype): """Returns the padding value given a dtype.""" padding_value = None if dtype == np.uint8: padding_value = np.uint8(0) elif dtype == np.uint16: padding_value = np.uint16(0) elif dtype == np.float32: padding_value = 0.0 else: padding_value = 0 assert padding_value is not None return padding_value
def function[get_padding_value, parameter[dtype]]: constant[Returns the padding value given a dtype.] variable[padding_value] assign[=] constant[None] if compare[name[dtype] equal[==] name[np].uint8] begin[:] variable[padding_value] assign[=] call[name[np].uint8, parameter[constant[0]]] assert[compare[name[padding_value] is_not constant[None]]] return[name[padding_value]]
keyword[def] identifier[get_padding_value] ( identifier[dtype] ): literal[string] identifier[padding_value] = keyword[None] keyword[if] identifier[dtype] == identifier[np] . identifier[uint8] : identifier[padding_value] = identifier[np] . identifier[uint8] ( literal[int] ) keyword[elif] identifier[dtype] == identifier[np] . identifier[uint16] : identifier[padding_value] = identifier[np] . identifier[uint16] ( literal[int] ) keyword[elif] identifier[dtype] == identifier[np] . identifier[float32] : identifier[padding_value] = literal[int] keyword[else] : identifier[padding_value] = literal[int] keyword[assert] identifier[padding_value] keyword[is] keyword[not] keyword[None] keyword[return] identifier[padding_value]
def get_padding_value(dtype): """Returns the padding value given a dtype.""" padding_value = None if dtype == np.uint8: padding_value = np.uint8(0) # depends on [control=['if'], data=[]] elif dtype == np.uint16: padding_value = np.uint16(0) # depends on [control=['if'], data=[]] elif dtype == np.float32: padding_value = 0.0 # depends on [control=['if'], data=[]] else: padding_value = 0 assert padding_value is not None return padding_value
def _build_slack_message(self): """ Construct the Slack message. All relevant parameters are combined here to a valid Slack json message :return: Slack message (str) to send """ cmd = {} if self.channel: cmd['channel'] = self.channel if self.username: cmd['username'] = self.username if self.icon_emoji: cmd['icon_emoji'] = self.icon_emoji if self.link_names: cmd['link_names'] = 1 if self.attachments: cmd['attachments'] = self.attachments cmd['text'] = self.message return json.dumps(cmd)
def function[_build_slack_message, parameter[self]]: constant[ Construct the Slack message. All relevant parameters are combined here to a valid Slack json message :return: Slack message (str) to send ] variable[cmd] assign[=] dictionary[[], []] if name[self].channel begin[:] call[name[cmd]][constant[channel]] assign[=] name[self].channel if name[self].username begin[:] call[name[cmd]][constant[username]] assign[=] name[self].username if name[self].icon_emoji begin[:] call[name[cmd]][constant[icon_emoji]] assign[=] name[self].icon_emoji if name[self].link_names begin[:] call[name[cmd]][constant[link_names]] assign[=] constant[1] if name[self].attachments begin[:] call[name[cmd]][constant[attachments]] assign[=] name[self].attachments call[name[cmd]][constant[text]] assign[=] name[self].message return[call[name[json].dumps, parameter[name[cmd]]]]
keyword[def] identifier[_build_slack_message] ( identifier[self] ): literal[string] identifier[cmd] ={} keyword[if] identifier[self] . identifier[channel] : identifier[cmd] [ literal[string] ]= identifier[self] . identifier[channel] keyword[if] identifier[self] . identifier[username] : identifier[cmd] [ literal[string] ]= identifier[self] . identifier[username] keyword[if] identifier[self] . identifier[icon_emoji] : identifier[cmd] [ literal[string] ]= identifier[self] . identifier[icon_emoji] keyword[if] identifier[self] . identifier[link_names] : identifier[cmd] [ literal[string] ]= literal[int] keyword[if] identifier[self] . identifier[attachments] : identifier[cmd] [ literal[string] ]= identifier[self] . identifier[attachments] identifier[cmd] [ literal[string] ]= identifier[self] . identifier[message] keyword[return] identifier[json] . identifier[dumps] ( identifier[cmd] )
def _build_slack_message(self): """ Construct the Slack message. All relevant parameters are combined here to a valid Slack json message :return: Slack message (str) to send """ cmd = {} if self.channel: cmd['channel'] = self.channel # depends on [control=['if'], data=[]] if self.username: cmd['username'] = self.username # depends on [control=['if'], data=[]] if self.icon_emoji: cmd['icon_emoji'] = self.icon_emoji # depends on [control=['if'], data=[]] if self.link_names: cmd['link_names'] = 1 # depends on [control=['if'], data=[]] if self.attachments: cmd['attachments'] = self.attachments # depends on [control=['if'], data=[]] cmd['text'] = self.message return json.dumps(cmd)
def query_subscriptions(self, subscription_query): """QuerySubscriptions. [Preview API] Query for subscriptions. A subscription is returned if it matches one or more of the specified conditions. :param :class:`<SubscriptionQuery> <azure.devops.v5_0.notification.models.SubscriptionQuery>` subscription_query: :rtype: [NotificationSubscription] """ content = self._serialize.body(subscription_query, 'SubscriptionQuery') response = self._send(http_method='POST', location_id='6864db85-08c0-4006-8e8e-cc1bebe31675', version='5.0-preview.1', content=content) return self._deserialize('[NotificationSubscription]', self._unwrap_collection(response))
def function[query_subscriptions, parameter[self, subscription_query]]: constant[QuerySubscriptions. [Preview API] Query for subscriptions. A subscription is returned if it matches one or more of the specified conditions. :param :class:`<SubscriptionQuery> <azure.devops.v5_0.notification.models.SubscriptionQuery>` subscription_query: :rtype: [NotificationSubscription] ] variable[content] assign[=] call[name[self]._serialize.body, parameter[name[subscription_query], constant[SubscriptionQuery]]] variable[response] assign[=] call[name[self]._send, parameter[]] return[call[name[self]._deserialize, parameter[constant[[NotificationSubscription]], call[name[self]._unwrap_collection, parameter[name[response]]]]]]
keyword[def] identifier[query_subscriptions] ( identifier[self] , identifier[subscription_query] ): literal[string] identifier[content] = identifier[self] . identifier[_serialize] . identifier[body] ( identifier[subscription_query] , literal[string] ) identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] , identifier[location_id] = literal[string] , identifier[version] = literal[string] , identifier[content] = identifier[content] ) keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[self] . identifier[_unwrap_collection] ( identifier[response] ))
def query_subscriptions(self, subscription_query): """QuerySubscriptions. [Preview API] Query for subscriptions. A subscription is returned if it matches one or more of the specified conditions. :param :class:`<SubscriptionQuery> <azure.devops.v5_0.notification.models.SubscriptionQuery>` subscription_query: :rtype: [NotificationSubscription] """ content = self._serialize.body(subscription_query, 'SubscriptionQuery') response = self._send(http_method='POST', location_id='6864db85-08c0-4006-8e8e-cc1bebe31675', version='5.0-preview.1', content=content) return self._deserialize('[NotificationSubscription]', self._unwrap_collection(response))
def _threadsafe_get_connection(self): """Get a connection from the pool.""" self._lock.acquire() try: next = self._nextConnection con = PooledDBConnection(self, self._connections[next]) next += 1 if next >= len(self._connections): next = 0 self._nextConnection = next return con finally: self._lock.release()
def function[_threadsafe_get_connection, parameter[self]]: constant[Get a connection from the pool.] call[name[self]._lock.acquire, parameter[]] <ast.Try object at 0x7da20c6e5630>
keyword[def] identifier[_threadsafe_get_connection] ( identifier[self] ): literal[string] identifier[self] . identifier[_lock] . identifier[acquire] () keyword[try] : identifier[next] = identifier[self] . identifier[_nextConnection] identifier[con] = identifier[PooledDBConnection] ( identifier[self] , identifier[self] . identifier[_connections] [ identifier[next] ]) identifier[next] += literal[int] keyword[if] identifier[next] >= identifier[len] ( identifier[self] . identifier[_connections] ): identifier[next] = literal[int] identifier[self] . identifier[_nextConnection] = identifier[next] keyword[return] identifier[con] keyword[finally] : identifier[self] . identifier[_lock] . identifier[release] ()
def _threadsafe_get_connection(self): """Get a connection from the pool.""" self._lock.acquire() try: next = self._nextConnection con = PooledDBConnection(self, self._connections[next]) next += 1 if next >= len(self._connections): next = 0 # depends on [control=['if'], data=['next']] self._nextConnection = next return con # depends on [control=['try'], data=[]] finally: self._lock.release()
def make_sentence(list_words): """ Return a sentence from list of words. :param list list_words: list of words :returns: sentence :rtype: str """ lw_len = len(list_words) if lw_len > 6: list_words.insert(lw_len // 2 + random.choice(range(-2, 2)), ',') sentence = ' '.join(list_words).replace(' ,', ',') return sentence.capitalize() + '.'
def function[make_sentence, parameter[list_words]]: constant[ Return a sentence from list of words. :param list list_words: list of words :returns: sentence :rtype: str ] variable[lw_len] assign[=] call[name[len], parameter[name[list_words]]] if compare[name[lw_len] greater[>] constant[6]] begin[:] call[name[list_words].insert, parameter[binary_operation[binary_operation[name[lw_len] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] + call[name[random].choice, parameter[call[name[range], parameter[<ast.UnaryOp object at 0x7da18bcca800>, constant[2]]]]]], constant[,]]] variable[sentence] assign[=] call[call[constant[ ].join, parameter[name[list_words]]].replace, parameter[constant[ ,], constant[,]]] return[binary_operation[call[name[sentence].capitalize, parameter[]] + constant[.]]]
keyword[def] identifier[make_sentence] ( identifier[list_words] ): literal[string] identifier[lw_len] = identifier[len] ( identifier[list_words] ) keyword[if] identifier[lw_len] > literal[int] : identifier[list_words] . identifier[insert] ( identifier[lw_len] // literal[int] + identifier[random] . identifier[choice] ( identifier[range] (- literal[int] , literal[int] )), literal[string] ) identifier[sentence] = literal[string] . identifier[join] ( identifier[list_words] ). identifier[replace] ( literal[string] , literal[string] ) keyword[return] identifier[sentence] . identifier[capitalize] ()+ literal[string]
def make_sentence(list_words): """ Return a sentence from list of words. :param list list_words: list of words :returns: sentence :rtype: str """ lw_len = len(list_words) if lw_len > 6: list_words.insert(lw_len // 2 + random.choice(range(-2, 2)), ',') # depends on [control=['if'], data=['lw_len']] sentence = ' '.join(list_words).replace(' ,', ',') return sentence.capitalize() + '.'
def import_tags(self, tag_nodes): """ Import all the tags form 'wp:tag' nodes, because tags in 'item' nodes are not necessarily all the tags, then use only the nicename, because it's like a slug and the true tag name may be not valid for url usage. """ self.write_out(self.style.STEP('- Importing tags\n')) for tag_node in tag_nodes: tag_name = tag_node.find( '{%s}tag_slug' % WP_NS).text[:50] self.write_out('> %s... ' % tag_name) Tag.objects.get_or_create(name=tag_name) self.write_out(self.style.ITEM('OK\n'))
def function[import_tags, parameter[self, tag_nodes]]: constant[ Import all the tags form 'wp:tag' nodes, because tags in 'item' nodes are not necessarily all the tags, then use only the nicename, because it's like a slug and the true tag name may be not valid for url usage. ] call[name[self].write_out, parameter[call[name[self].style.STEP, parameter[constant[- Importing tags ]]]]] for taget[name[tag_node]] in starred[name[tag_nodes]] begin[:] variable[tag_name] assign[=] call[call[name[tag_node].find, parameter[binary_operation[constant[{%s}tag_slug] <ast.Mod object at 0x7da2590d6920> name[WP_NS]]]].text][<ast.Slice object at 0x7da1b244c550>] call[name[self].write_out, parameter[binary_operation[constant[> %s... ] <ast.Mod object at 0x7da2590d6920> name[tag_name]]]] call[name[Tag].objects.get_or_create, parameter[]] call[name[self].write_out, parameter[call[name[self].style.ITEM, parameter[constant[OK ]]]]]
keyword[def] identifier[import_tags] ( identifier[self] , identifier[tag_nodes] ): literal[string] identifier[self] . identifier[write_out] ( identifier[self] . identifier[style] . identifier[STEP] ( literal[string] )) keyword[for] identifier[tag_node] keyword[in] identifier[tag_nodes] : identifier[tag_name] = identifier[tag_node] . identifier[find] ( literal[string] % identifier[WP_NS] ). identifier[text] [: literal[int] ] identifier[self] . identifier[write_out] ( literal[string] % identifier[tag_name] ) identifier[Tag] . identifier[objects] . identifier[get_or_create] ( identifier[name] = identifier[tag_name] ) identifier[self] . identifier[write_out] ( identifier[self] . identifier[style] . identifier[ITEM] ( literal[string] ))
def import_tags(self, tag_nodes): """ Import all the tags form 'wp:tag' nodes, because tags in 'item' nodes are not necessarily all the tags, then use only the nicename, because it's like a slug and the true tag name may be not valid for url usage. """ self.write_out(self.style.STEP('- Importing tags\n')) for tag_node in tag_nodes: tag_name = tag_node.find('{%s}tag_slug' % WP_NS).text[:50] self.write_out('> %s... ' % tag_name) Tag.objects.get_or_create(name=tag_name) self.write_out(self.style.ITEM('OK\n')) # depends on [control=['for'], data=['tag_node']]
def __precision(y_true, y_pred): ''' Precision metric tolerant to unlabeled data in y_true, NA values are ignored for the precision calculation ''' # make copies of the arrays to avoid modifying the original ones y_true = np.copy(y_true) y_pred = np.copy(y_pred) # precision = tp/(tp+fp) # True nehatives do not affect precision value, so for every missing # value in y_true, replace it with 0 and also replace the value # in y_pred with 0 is_nan = np.isnan(y_true) y_true[is_nan] = 0 y_pred[is_nan] = 0 precision = precision_score(y_true, y_pred) return precision
def function[__precision, parameter[y_true, y_pred]]: constant[ Precision metric tolerant to unlabeled data in y_true, NA values are ignored for the precision calculation ] variable[y_true] assign[=] call[name[np].copy, parameter[name[y_true]]] variable[y_pred] assign[=] call[name[np].copy, parameter[name[y_pred]]] variable[is_nan] assign[=] call[name[np].isnan, parameter[name[y_true]]] call[name[y_true]][name[is_nan]] assign[=] constant[0] call[name[y_pred]][name[is_nan]] assign[=] constant[0] variable[precision] assign[=] call[name[precision_score], parameter[name[y_true], name[y_pred]]] return[name[precision]]
keyword[def] identifier[__precision] ( identifier[y_true] , identifier[y_pred] ): literal[string] identifier[y_true] = identifier[np] . identifier[copy] ( identifier[y_true] ) identifier[y_pred] = identifier[np] . identifier[copy] ( identifier[y_pred] ) identifier[is_nan] = identifier[np] . identifier[isnan] ( identifier[y_true] ) identifier[y_true] [ identifier[is_nan] ]= literal[int] identifier[y_pred] [ identifier[is_nan] ]= literal[int] identifier[precision] = identifier[precision_score] ( identifier[y_true] , identifier[y_pred] ) keyword[return] identifier[precision]
def __precision(y_true, y_pred): """ Precision metric tolerant to unlabeled data in y_true, NA values are ignored for the precision calculation """ # make copies of the arrays to avoid modifying the original ones y_true = np.copy(y_true) y_pred = np.copy(y_pred) # precision = tp/(tp+fp) # True nehatives do not affect precision value, so for every missing # value in y_true, replace it with 0 and also replace the value # in y_pred with 0 is_nan = np.isnan(y_true) y_true[is_nan] = 0 y_pred[is_nan] = 0 precision = precision_score(y_true, y_pred) return precision
def numeric(value, allow_empty = False, minimum = None, maximum = None, **kwargs): """Validate that ``value`` is a numeric value. :param value: The value to validate. :param allow_empty: If ``True``, returns :obj:`None <python:None>` if ``value`` is :obj:`None <python:None>`. If ``False``, raises an :class:`EmptyValueError <validator_collection.errors.EmptyValueError>` if ``value`` is :obj:`None <python:None>`. Defaults to ``False``. :type allow_empty: :class:`bool <python:bool>` :param minimum: If supplied, will make sure that ``value`` is greater than or equal to this value. :type minimum: numeric :param maximum: If supplied, will make sure that ``value`` is less than or equal to this value. :type maximum: numeric :returns: ``value`` / :obj:`None <python:None>` :raises EmptyValueError: if ``value`` is :obj:`None <python:None>` and ``allow_empty`` is ``False`` :raises MinimumValueError: if ``minimum`` is supplied and ``value`` is less than the ``minimum`` :raises MaximumValueError: if ``maximum`` is supplied and ``value`` is more than the ``maximum`` :raises CannotCoerceError: if ``value`` cannot be coerced to a numeric form """ if maximum is None: maximum = POSITIVE_INFINITY else: maximum = numeric(maximum) if minimum is None: minimum = NEGATIVE_INFINITY else: minimum = numeric(minimum) if value is None and not allow_empty: raise errors.EmptyValueError('value (%s) was empty' % value) elif value is not None: if isinstance(value, str): try: value = float_(value) except (ValueError, TypeError): raise errors.CannotCoerceError( 'value (%s) cannot be coerced to a numeric form' % value ) elif not isinstance(value, numeric_types): raise errors.CannotCoerceError( 'value (%s) is not a numeric type, was %s' % (value, type(value)) ) if value is not None and value > maximum: raise errors.MaximumValueError( 'value (%s) exceeds maximum (%s)' % (value, maximum) ) if value is not None and value < minimum: raise errors.MinimumValueError( 'value (%s) less than minimum (%s)' % (value, minimum) ) return value
def function[numeric, parameter[value, allow_empty, minimum, maximum]]: constant[Validate that ``value`` is a numeric value. :param value: The value to validate. :param allow_empty: If ``True``, returns :obj:`None <python:None>` if ``value`` is :obj:`None <python:None>`. If ``False``, raises an :class:`EmptyValueError <validator_collection.errors.EmptyValueError>` if ``value`` is :obj:`None <python:None>`. Defaults to ``False``. :type allow_empty: :class:`bool <python:bool>` :param minimum: If supplied, will make sure that ``value`` is greater than or equal to this value. :type minimum: numeric :param maximum: If supplied, will make sure that ``value`` is less than or equal to this value. :type maximum: numeric :returns: ``value`` / :obj:`None <python:None>` :raises EmptyValueError: if ``value`` is :obj:`None <python:None>` and ``allow_empty`` is ``False`` :raises MinimumValueError: if ``minimum`` is supplied and ``value`` is less than the ``minimum`` :raises MaximumValueError: if ``maximum`` is supplied and ``value`` is more than the ``maximum`` :raises CannotCoerceError: if ``value`` cannot be coerced to a numeric form ] if compare[name[maximum] is constant[None]] begin[:] variable[maximum] assign[=] name[POSITIVE_INFINITY] if compare[name[minimum] is constant[None]] begin[:] variable[minimum] assign[=] name[NEGATIVE_INFINITY] if <ast.BoolOp object at 0x7da1b060b400> begin[:] <ast.Raise object at 0x7da1b0608730> if <ast.BoolOp object at 0x7da1b06f0790> begin[:] <ast.Raise object at 0x7da1b06f0e20> if <ast.BoolOp object at 0x7da1b06f2c50> begin[:] <ast.Raise object at 0x7da1b06f1e40> return[name[value]]
keyword[def] identifier[numeric] ( identifier[value] , identifier[allow_empty] = keyword[False] , identifier[minimum] = keyword[None] , identifier[maximum] = keyword[None] , ** identifier[kwargs] ): literal[string] keyword[if] identifier[maximum] keyword[is] keyword[None] : identifier[maximum] = identifier[POSITIVE_INFINITY] keyword[else] : identifier[maximum] = identifier[numeric] ( identifier[maximum] ) keyword[if] identifier[minimum] keyword[is] keyword[None] : identifier[minimum] = identifier[NEGATIVE_INFINITY] keyword[else] : identifier[minimum] = identifier[numeric] ( identifier[minimum] ) keyword[if] identifier[value] keyword[is] keyword[None] keyword[and] keyword[not] identifier[allow_empty] : keyword[raise] identifier[errors] . identifier[EmptyValueError] ( literal[string] % identifier[value] ) keyword[elif] identifier[value] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[isinstance] ( identifier[value] , identifier[str] ): keyword[try] : identifier[value] = identifier[float_] ( identifier[value] ) keyword[except] ( identifier[ValueError] , identifier[TypeError] ): keyword[raise] identifier[errors] . identifier[CannotCoerceError] ( literal[string] % identifier[value] ) keyword[elif] keyword[not] identifier[isinstance] ( identifier[value] , identifier[numeric_types] ): keyword[raise] identifier[errors] . identifier[CannotCoerceError] ( literal[string] %( identifier[value] , identifier[type] ( identifier[value] )) ) keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] keyword[and] identifier[value] > identifier[maximum] : keyword[raise] identifier[errors] . identifier[MaximumValueError] ( literal[string] %( identifier[value] , identifier[maximum] ) ) keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] keyword[and] identifier[value] < identifier[minimum] : keyword[raise] identifier[errors] . identifier[MinimumValueError] ( literal[string] %( identifier[value] , identifier[minimum] ) ) keyword[return] identifier[value]
def numeric(value, allow_empty=False, minimum=None, maximum=None, **kwargs): """Validate that ``value`` is a numeric value. :param value: The value to validate. :param allow_empty: If ``True``, returns :obj:`None <python:None>` if ``value`` is :obj:`None <python:None>`. If ``False``, raises an :class:`EmptyValueError <validator_collection.errors.EmptyValueError>` if ``value`` is :obj:`None <python:None>`. Defaults to ``False``. :type allow_empty: :class:`bool <python:bool>` :param minimum: If supplied, will make sure that ``value`` is greater than or equal to this value. :type minimum: numeric :param maximum: If supplied, will make sure that ``value`` is less than or equal to this value. :type maximum: numeric :returns: ``value`` / :obj:`None <python:None>` :raises EmptyValueError: if ``value`` is :obj:`None <python:None>` and ``allow_empty`` is ``False`` :raises MinimumValueError: if ``minimum`` is supplied and ``value`` is less than the ``minimum`` :raises MaximumValueError: if ``maximum`` is supplied and ``value`` is more than the ``maximum`` :raises CannotCoerceError: if ``value`` cannot be coerced to a numeric form """ if maximum is None: maximum = POSITIVE_INFINITY # depends on [control=['if'], data=['maximum']] else: maximum = numeric(maximum) if minimum is None: minimum = NEGATIVE_INFINITY # depends on [control=['if'], data=['minimum']] else: minimum = numeric(minimum) if value is None and (not allow_empty): raise errors.EmptyValueError('value (%s) was empty' % value) # depends on [control=['if'], data=[]] elif value is not None: if isinstance(value, str): try: value = float_(value) # depends on [control=['try'], data=[]] except (ValueError, TypeError): raise errors.CannotCoerceError('value (%s) cannot be coerced to a numeric form' % value) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] elif not isinstance(value, numeric_types): raise errors.CannotCoerceError('value (%s) is not a numeric type, was %s' % (value, type(value))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['value']] if value is not None and value > maximum: raise errors.MaximumValueError('value (%s) exceeds maximum (%s)' % (value, maximum)) # depends on [control=['if'], data=[]] if value is not None and value < minimum: raise errors.MinimumValueError('value (%s) less than minimum (%s)' % (value, minimum)) # depends on [control=['if'], data=[]] return value
def read_tb(path): """ path : a tensorboard file OR a directory, where we will find all TB files of the form events.* """ import pandas import numpy as np from glob import glob from collections import defaultdict import tensorflow as tf if osp.isdir(path): fnames = glob(osp.join(path, "events.*")) elif osp.basename(path).startswith("events."): fnames = [path] else: raise NotImplementedError("Expected tensorboard file or directory containing them. Got %s"%path) tag2pairs = defaultdict(list) maxstep = 0 for fname in fnames: for summary in tf.train.summary_iterator(fname): if summary.step > 0: for v in summary.summary.value: pair = (summary.step, v.simple_value) tag2pairs[v.tag].append(pair) maxstep = max(summary.step, maxstep) data = np.empty((maxstep, len(tag2pairs))) data[:] = np.nan tags = sorted(tag2pairs.keys()) for (colidx,tag) in enumerate(tags): pairs = tag2pairs[tag] for (step, value) in pairs: data[step-1, colidx] = value return pandas.DataFrame(data, columns=tags)
def function[read_tb, parameter[path]]: constant[ path : a tensorboard file OR a directory, where we will find all TB files of the form events.* ] import module[pandas] import module[numpy] as alias[np] from relative_module[glob] import module[glob] from relative_module[collections] import module[defaultdict] import module[tensorflow] as alias[tf] if call[name[osp].isdir, parameter[name[path]]] begin[:] variable[fnames] assign[=] call[name[glob], parameter[call[name[osp].join, parameter[name[path], constant[events.*]]]]] variable[tag2pairs] assign[=] call[name[defaultdict], parameter[name[list]]] variable[maxstep] assign[=] constant[0] for taget[name[fname]] in starred[name[fnames]] begin[:] for taget[name[summary]] in starred[call[name[tf].train.summary_iterator, parameter[name[fname]]]] begin[:] if compare[name[summary].step greater[>] constant[0]] begin[:] for taget[name[v]] in starred[name[summary].summary.value] begin[:] variable[pair] assign[=] tuple[[<ast.Attribute object at 0x7da18bccb220>, <ast.Attribute object at 0x7da1b17932b0>]] call[call[name[tag2pairs]][name[v].tag].append, parameter[name[pair]]] variable[maxstep] assign[=] call[name[max], parameter[name[summary].step, name[maxstep]]] variable[data] assign[=] call[name[np].empty, parameter[tuple[[<ast.Name object at 0x7da1b1793bb0>, <ast.Call object at 0x7da1b1791de0>]]]] call[name[data]][<ast.Slice object at 0x7da1b17933d0>] assign[=] name[np].nan variable[tags] assign[=] call[name[sorted], parameter[call[name[tag2pairs].keys, parameter[]]]] for taget[tuple[[<ast.Name object at 0x7da1b17904c0>, <ast.Name object at 0x7da1b17938e0>]]] in starred[call[name[enumerate], parameter[name[tags]]]] begin[:] variable[pairs] assign[=] call[name[tag2pairs]][name[tag]] for taget[tuple[[<ast.Name object at 0x7da18bccb880>, <ast.Name object at 0x7da18bcc92d0>]]] in starred[name[pairs]] begin[:] call[name[data]][tuple[[<ast.BinOp object at 0x7da18bcc8340>, <ast.Name object at 0x7da18bccae60>]]] assign[=] name[value] return[call[name[pandas].DataFrame, parameter[name[data]]]]
keyword[def] identifier[read_tb] ( identifier[path] ): literal[string] keyword[import] identifier[pandas] keyword[import] identifier[numpy] keyword[as] identifier[np] keyword[from] identifier[glob] keyword[import] identifier[glob] keyword[from] identifier[collections] keyword[import] identifier[defaultdict] keyword[import] identifier[tensorflow] keyword[as] identifier[tf] keyword[if] identifier[osp] . identifier[isdir] ( identifier[path] ): identifier[fnames] = identifier[glob] ( identifier[osp] . identifier[join] ( identifier[path] , literal[string] )) keyword[elif] identifier[osp] . identifier[basename] ( identifier[path] ). identifier[startswith] ( literal[string] ): identifier[fnames] =[ identifier[path] ] keyword[else] : keyword[raise] identifier[NotImplementedError] ( literal[string] % identifier[path] ) identifier[tag2pairs] = identifier[defaultdict] ( identifier[list] ) identifier[maxstep] = literal[int] keyword[for] identifier[fname] keyword[in] identifier[fnames] : keyword[for] identifier[summary] keyword[in] identifier[tf] . identifier[train] . identifier[summary_iterator] ( identifier[fname] ): keyword[if] identifier[summary] . identifier[step] > literal[int] : keyword[for] identifier[v] keyword[in] identifier[summary] . identifier[summary] . identifier[value] : identifier[pair] =( identifier[summary] . identifier[step] , identifier[v] . identifier[simple_value] ) identifier[tag2pairs] [ identifier[v] . identifier[tag] ]. identifier[append] ( identifier[pair] ) identifier[maxstep] = identifier[max] ( identifier[summary] . identifier[step] , identifier[maxstep] ) identifier[data] = identifier[np] . identifier[empty] (( identifier[maxstep] , identifier[len] ( identifier[tag2pairs] ))) identifier[data] [:]= identifier[np] . identifier[nan] identifier[tags] = identifier[sorted] ( identifier[tag2pairs] . identifier[keys] ()) keyword[for] ( identifier[colidx] , identifier[tag] ) keyword[in] identifier[enumerate] ( identifier[tags] ): identifier[pairs] = identifier[tag2pairs] [ identifier[tag] ] keyword[for] ( identifier[step] , identifier[value] ) keyword[in] identifier[pairs] : identifier[data] [ identifier[step] - literal[int] , identifier[colidx] ]= identifier[value] keyword[return] identifier[pandas] . identifier[DataFrame] ( identifier[data] , identifier[columns] = identifier[tags] )
def read_tb(path): """ path : a tensorboard file OR a directory, where we will find all TB files of the form events.* """ import pandas import numpy as np from glob import glob from collections import defaultdict import tensorflow as tf if osp.isdir(path): fnames = glob(osp.join(path, 'events.*')) # depends on [control=['if'], data=[]] elif osp.basename(path).startswith('events.'): fnames = [path] # depends on [control=['if'], data=[]] else: raise NotImplementedError('Expected tensorboard file or directory containing them. Got %s' % path) tag2pairs = defaultdict(list) maxstep = 0 for fname in fnames: for summary in tf.train.summary_iterator(fname): if summary.step > 0: for v in summary.summary.value: pair = (summary.step, v.simple_value) tag2pairs[v.tag].append(pair) # depends on [control=['for'], data=['v']] maxstep = max(summary.step, maxstep) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['summary']] # depends on [control=['for'], data=['fname']] data = np.empty((maxstep, len(tag2pairs))) data[:] = np.nan tags = sorted(tag2pairs.keys()) for (colidx, tag) in enumerate(tags): pairs = tag2pairs[tag] for (step, value) in pairs: data[step - 1, colidx] = value # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] return pandas.DataFrame(data, columns=tags)
def format_meta_lines(cls, meta, labels, offset, **kwargs): '''Return all information from a given meta dictionary in a list of lines''' lines = [] # Name and underline name = meta['package_name'] if 'version' in meta: name += '-' + meta['version'] if 'custom_location' in kwargs: name += ' ({loc})'.format(loc=kwargs['custom_location']) lines.append(name) lines.append(len(name)*'=') lines.append('') # Summary lines.extend(meta['summary'].splitlines()) lines.append('') # Description if meta.get('description', ''): lines.extend(meta['description'].splitlines()) lines.append('') # Other metadata data = [] for item in labels: if meta.get(item, '') != '': # We want to process False and 0 label = (cls._nice_strings[item] + ':').ljust(offset + 2) data.append(label + cls._format_field(meta[item])) lines.extend(data) return lines
def function[format_meta_lines, parameter[cls, meta, labels, offset]]: constant[Return all information from a given meta dictionary in a list of lines] variable[lines] assign[=] list[[]] variable[name] assign[=] call[name[meta]][constant[package_name]] if compare[constant[version] in name[meta]] begin[:] <ast.AugAssign object at 0x7da1b10a7bb0> if compare[constant[custom_location] in name[kwargs]] begin[:] <ast.AugAssign object at 0x7da1b10a69b0> call[name[lines].append, parameter[name[name]]] call[name[lines].append, parameter[binary_operation[call[name[len], parameter[name[name]]] * constant[=]]]] call[name[lines].append, parameter[constant[]]] call[name[lines].extend, parameter[call[call[name[meta]][constant[summary]].splitlines, parameter[]]]] call[name[lines].append, parameter[constant[]]] if call[name[meta].get, parameter[constant[description], constant[]]] begin[:] call[name[lines].extend, parameter[call[call[name[meta]][constant[description]].splitlines, parameter[]]]] call[name[lines].append, parameter[constant[]]] variable[data] assign[=] list[[]] for taget[name[item]] in starred[name[labels]] begin[:] if compare[call[name[meta].get, parameter[name[item], constant[]]] not_equal[!=] constant[]] begin[:] variable[label] assign[=] call[binary_operation[call[name[cls]._nice_strings][name[item]] + constant[:]].ljust, parameter[binary_operation[name[offset] + constant[2]]]] call[name[data].append, parameter[binary_operation[name[label] + call[name[cls]._format_field, parameter[call[name[meta]][name[item]]]]]]] call[name[lines].extend, parameter[name[data]]] return[name[lines]]
keyword[def] identifier[format_meta_lines] ( identifier[cls] , identifier[meta] , identifier[labels] , identifier[offset] ,** identifier[kwargs] ): literal[string] identifier[lines] =[] identifier[name] = identifier[meta] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[meta] : identifier[name] += literal[string] + identifier[meta] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[name] += literal[string] . identifier[format] ( identifier[loc] = identifier[kwargs] [ literal[string] ]) identifier[lines] . identifier[append] ( identifier[name] ) identifier[lines] . identifier[append] ( identifier[len] ( identifier[name] )* literal[string] ) identifier[lines] . identifier[append] ( literal[string] ) identifier[lines] . identifier[extend] ( identifier[meta] [ literal[string] ]. identifier[splitlines] ()) identifier[lines] . identifier[append] ( literal[string] ) keyword[if] identifier[meta] . identifier[get] ( literal[string] , literal[string] ): identifier[lines] . identifier[extend] ( identifier[meta] [ literal[string] ]. identifier[splitlines] ()) identifier[lines] . identifier[append] ( literal[string] ) identifier[data] =[] keyword[for] identifier[item] keyword[in] identifier[labels] : keyword[if] identifier[meta] . identifier[get] ( identifier[item] , literal[string] )!= literal[string] : identifier[label] =( identifier[cls] . identifier[_nice_strings] [ identifier[item] ]+ literal[string] ). identifier[ljust] ( identifier[offset] + literal[int] ) identifier[data] . identifier[append] ( identifier[label] + identifier[cls] . identifier[_format_field] ( identifier[meta] [ identifier[item] ])) identifier[lines] . identifier[extend] ( identifier[data] ) keyword[return] identifier[lines]
def format_meta_lines(cls, meta, labels, offset, **kwargs): """Return all information from a given meta dictionary in a list of lines""" lines = [] # Name and underline name = meta['package_name'] if 'version' in meta: name += '-' + meta['version'] # depends on [control=['if'], data=['meta']] if 'custom_location' in kwargs: name += ' ({loc})'.format(loc=kwargs['custom_location']) # depends on [control=['if'], data=['kwargs']] lines.append(name) lines.append(len(name) * '=') lines.append('') # Summary lines.extend(meta['summary'].splitlines()) lines.append('') # Description if meta.get('description', ''): lines.extend(meta['description'].splitlines()) lines.append('') # depends on [control=['if'], data=[]] # Other metadata data = [] for item in labels: if meta.get(item, '') != '': # We want to process False and 0 label = (cls._nice_strings[item] + ':').ljust(offset + 2) data.append(label + cls._format_field(meta[item])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] lines.extend(data) return lines
def set_color(self, color): """ Change gradient color and change cursor position if an alpha value is supplied. :param color: new gradient color in RGB(A) format :type color: tuple[int] """ if len(color) == 3: alpha = self.get() else: alpha = color[3] self._draw_gradient(alpha, color[:3])
def function[set_color, parameter[self, color]]: constant[ Change gradient color and change cursor position if an alpha value is supplied. :param color: new gradient color in RGB(A) format :type color: tuple[int] ] if compare[call[name[len], parameter[name[color]]] equal[==] constant[3]] begin[:] variable[alpha] assign[=] call[name[self].get, parameter[]] call[name[self]._draw_gradient, parameter[name[alpha], call[name[color]][<ast.Slice object at 0x7da1b23efa60>]]]
keyword[def] identifier[set_color] ( identifier[self] , identifier[color] ): literal[string] keyword[if] identifier[len] ( identifier[color] )== literal[int] : identifier[alpha] = identifier[self] . identifier[get] () keyword[else] : identifier[alpha] = identifier[color] [ literal[int] ] identifier[self] . identifier[_draw_gradient] ( identifier[alpha] , identifier[color] [: literal[int] ])
def set_color(self, color): """ Change gradient color and change cursor position if an alpha value is supplied. :param color: new gradient color in RGB(A) format :type color: tuple[int] """ if len(color) == 3: alpha = self.get() # depends on [control=['if'], data=[]] else: alpha = color[3] self._draw_gradient(alpha, color[:3])
def enforce_timezone(cls, value): """ When `self.default_timezone` is `None`, always return naive datetimes. When `self.default_timezone` is not `None`, always return aware datetimes. """ field_timezone = cls.default_timezone() if (field_timezone is not None) and not is_aware(value): return make_aware(value, field_timezone) elif (field_timezone is None) and is_aware(value): return make_naive(value, utc) return value
def function[enforce_timezone, parameter[cls, value]]: constant[ When `self.default_timezone` is `None`, always return naive datetimes. When `self.default_timezone` is not `None`, always return aware datetimes. ] variable[field_timezone] assign[=] call[name[cls].default_timezone, parameter[]] if <ast.BoolOp object at 0x7da1b1116a70> begin[:] return[call[name[make_aware], parameter[name[value], name[field_timezone]]]] return[name[value]]
keyword[def] identifier[enforce_timezone] ( identifier[cls] , identifier[value] ): literal[string] identifier[field_timezone] = identifier[cls] . identifier[default_timezone] () keyword[if] ( identifier[field_timezone] keyword[is] keyword[not] keyword[None] ) keyword[and] keyword[not] identifier[is_aware] ( identifier[value] ): keyword[return] identifier[make_aware] ( identifier[value] , identifier[field_timezone] ) keyword[elif] ( identifier[field_timezone] keyword[is] keyword[None] ) keyword[and] identifier[is_aware] ( identifier[value] ): keyword[return] identifier[make_naive] ( identifier[value] , identifier[utc] ) keyword[return] identifier[value]
def enforce_timezone(cls, value): """ When `self.default_timezone` is `None`, always return naive datetimes. When `self.default_timezone` is not `None`, always return aware datetimes. """ field_timezone = cls.default_timezone() if field_timezone is not None and (not is_aware(value)): return make_aware(value, field_timezone) # depends on [control=['if'], data=[]] elif field_timezone is None and is_aware(value): return make_naive(value, utc) # depends on [control=['if'], data=[]] return value
def change_access_key(self): """Change access key of your account.""" method = 'POST' endpoint = '/rest/v1/users/{}/accesskey/change'.format( self.client.sauce_username) return self.client.request(method, endpoint)
def function[change_access_key, parameter[self]]: constant[Change access key of your account.] variable[method] assign[=] constant[POST] variable[endpoint] assign[=] call[constant[/rest/v1/users/{}/accesskey/change].format, parameter[name[self].client.sauce_username]] return[call[name[self].client.request, parameter[name[method], name[endpoint]]]]
keyword[def] identifier[change_access_key] ( identifier[self] ): literal[string] identifier[method] = literal[string] identifier[endpoint] = literal[string] . identifier[format] ( identifier[self] . identifier[client] . identifier[sauce_username] ) keyword[return] identifier[self] . identifier[client] . identifier[request] ( identifier[method] , identifier[endpoint] )
def change_access_key(self): """Change access key of your account.""" method = 'POST' endpoint = '/rest/v1/users/{}/accesskey/change'.format(self.client.sauce_username) return self.client.request(method, endpoint)
def run(self, data, results=None, mask=None, positions=None): """ Run a fit for each galaxy from the previous phase. Parameters ---------- data: LensData results: ResultsCollection Results from all previous phases mask: Mask The mask positions Returns ------- results: HyperGalaxyResults A collection of results, with one item per a galaxy """ model_image = results.last.unmasked_model_image galaxy_tuples = results.last.constant.name_instance_tuples_for_class(g.Galaxy) results_copy = copy.copy(results.last) for name, galaxy in galaxy_tuples: optimizer = self.optimizer.copy_with_name_extension(name) optimizer.variable.hyper_galaxy = g.HyperGalaxy galaxy_image = results.last.unmasked_image_for_galaxy(galaxy) optimizer.fit(self.__class__.Analysis(data, model_image, galaxy_image)) getattr(results_copy.variable, name).hyper_galaxy = optimizer.variable.hyper_galaxy getattr(results_copy.constant, name).hyper_galaxy = optimizer.constant.hyper_galaxy return results_copy
def function[run, parameter[self, data, results, mask, positions]]: constant[ Run a fit for each galaxy from the previous phase. Parameters ---------- data: LensData results: ResultsCollection Results from all previous phases mask: Mask The mask positions Returns ------- results: HyperGalaxyResults A collection of results, with one item per a galaxy ] variable[model_image] assign[=] name[results].last.unmasked_model_image variable[galaxy_tuples] assign[=] call[name[results].last.constant.name_instance_tuples_for_class, parameter[name[g].Galaxy]] variable[results_copy] assign[=] call[name[copy].copy, parameter[name[results].last]] for taget[tuple[[<ast.Name object at 0x7da2043463b0>, <ast.Name object at 0x7da204344a60>]]] in starred[name[galaxy_tuples]] begin[:] variable[optimizer] assign[=] call[name[self].optimizer.copy_with_name_extension, parameter[name[name]]] name[optimizer].variable.hyper_galaxy assign[=] name[g].HyperGalaxy variable[galaxy_image] assign[=] call[name[results].last.unmasked_image_for_galaxy, parameter[name[galaxy]]] call[name[optimizer].fit, parameter[call[name[self].__class__.Analysis, parameter[name[data], name[model_image], name[galaxy_image]]]]] call[name[getattr], parameter[name[results_copy].variable, name[name]]].hyper_galaxy assign[=] name[optimizer].variable.hyper_galaxy call[name[getattr], parameter[name[results_copy].constant, name[name]]].hyper_galaxy assign[=] name[optimizer].constant.hyper_galaxy return[name[results_copy]]
keyword[def] identifier[run] ( identifier[self] , identifier[data] , identifier[results] = keyword[None] , identifier[mask] = keyword[None] , identifier[positions] = keyword[None] ): literal[string] identifier[model_image] = identifier[results] . identifier[last] . identifier[unmasked_model_image] identifier[galaxy_tuples] = identifier[results] . identifier[last] . identifier[constant] . identifier[name_instance_tuples_for_class] ( identifier[g] . identifier[Galaxy] ) identifier[results_copy] = identifier[copy] . identifier[copy] ( identifier[results] . identifier[last] ) keyword[for] identifier[name] , identifier[galaxy] keyword[in] identifier[galaxy_tuples] : identifier[optimizer] = identifier[self] . identifier[optimizer] . identifier[copy_with_name_extension] ( identifier[name] ) identifier[optimizer] . identifier[variable] . identifier[hyper_galaxy] = identifier[g] . identifier[HyperGalaxy] identifier[galaxy_image] = identifier[results] . identifier[last] . identifier[unmasked_image_for_galaxy] ( identifier[galaxy] ) identifier[optimizer] . identifier[fit] ( identifier[self] . identifier[__class__] . identifier[Analysis] ( identifier[data] , identifier[model_image] , identifier[galaxy_image] )) identifier[getattr] ( identifier[results_copy] . identifier[variable] , identifier[name] ). identifier[hyper_galaxy] = identifier[optimizer] . identifier[variable] . identifier[hyper_galaxy] identifier[getattr] ( identifier[results_copy] . identifier[constant] , identifier[name] ). identifier[hyper_galaxy] = identifier[optimizer] . identifier[constant] . identifier[hyper_galaxy] keyword[return] identifier[results_copy]
def run(self, data, results=None, mask=None, positions=None): """ Run a fit for each galaxy from the previous phase. Parameters ---------- data: LensData results: ResultsCollection Results from all previous phases mask: Mask The mask positions Returns ------- results: HyperGalaxyResults A collection of results, with one item per a galaxy """ model_image = results.last.unmasked_model_image galaxy_tuples = results.last.constant.name_instance_tuples_for_class(g.Galaxy) results_copy = copy.copy(results.last) for (name, galaxy) in galaxy_tuples: optimizer = self.optimizer.copy_with_name_extension(name) optimizer.variable.hyper_galaxy = g.HyperGalaxy galaxy_image = results.last.unmasked_image_for_galaxy(galaxy) optimizer.fit(self.__class__.Analysis(data, model_image, galaxy_image)) getattr(results_copy.variable, name).hyper_galaxy = optimizer.variable.hyper_galaxy getattr(results_copy.constant, name).hyper_galaxy = optimizer.constant.hyper_galaxy # depends on [control=['for'], data=[]] return results_copy
def _union(d1,d2): ''' d1 = {'a':'x','b':'y','c':'z'} d2 = {'a':'x','b':'u','d':'v'} _union(d1,d2) _union(d2,d1) ''' u = {} ds = _diff_internal(d1,d2) for key in ds['same']: u[key] = d1[key] for key in ds['vdiff']: u[key] = d1[key] for key in ds['kdiff']: u[key] = d1[key] ds = _diff_internal(d2,d1) for key in ds['kdiff']: u[key] = d2[key] return(u)
def function[_union, parameter[d1, d2]]: constant[ d1 = {'a':'x','b':'y','c':'z'} d2 = {'a':'x','b':'u','d':'v'} _union(d1,d2) _union(d2,d1) ] variable[u] assign[=] dictionary[[], []] variable[ds] assign[=] call[name[_diff_internal], parameter[name[d1], name[d2]]] for taget[name[key]] in starred[call[name[ds]][constant[same]]] begin[:] call[name[u]][name[key]] assign[=] call[name[d1]][name[key]] for taget[name[key]] in starred[call[name[ds]][constant[vdiff]]] begin[:] call[name[u]][name[key]] assign[=] call[name[d1]][name[key]] for taget[name[key]] in starred[call[name[ds]][constant[kdiff]]] begin[:] call[name[u]][name[key]] assign[=] call[name[d1]][name[key]] variable[ds] assign[=] call[name[_diff_internal], parameter[name[d2], name[d1]]] for taget[name[key]] in starred[call[name[ds]][constant[kdiff]]] begin[:] call[name[u]][name[key]] assign[=] call[name[d2]][name[key]] return[name[u]]
keyword[def] identifier[_union] ( identifier[d1] , identifier[d2] ): literal[string] identifier[u] ={} identifier[ds] = identifier[_diff_internal] ( identifier[d1] , identifier[d2] ) keyword[for] identifier[key] keyword[in] identifier[ds] [ literal[string] ]: identifier[u] [ identifier[key] ]= identifier[d1] [ identifier[key] ] keyword[for] identifier[key] keyword[in] identifier[ds] [ literal[string] ]: identifier[u] [ identifier[key] ]= identifier[d1] [ identifier[key] ] keyword[for] identifier[key] keyword[in] identifier[ds] [ literal[string] ]: identifier[u] [ identifier[key] ]= identifier[d1] [ identifier[key] ] identifier[ds] = identifier[_diff_internal] ( identifier[d2] , identifier[d1] ) keyword[for] identifier[key] keyword[in] identifier[ds] [ literal[string] ]: identifier[u] [ identifier[key] ]= identifier[d2] [ identifier[key] ] keyword[return] ( identifier[u] )
def _union(d1, d2): """ d1 = {'a':'x','b':'y','c':'z'} d2 = {'a':'x','b':'u','d':'v'} _union(d1,d2) _union(d2,d1) """ u = {} ds = _diff_internal(d1, d2) for key in ds['same']: u[key] = d1[key] # depends on [control=['for'], data=['key']] for key in ds['vdiff']: u[key] = d1[key] # depends on [control=['for'], data=['key']] for key in ds['kdiff']: u[key] = d1[key] # depends on [control=['for'], data=['key']] ds = _diff_internal(d2, d1) for key in ds['kdiff']: u[key] = d2[key] # depends on [control=['for'], data=['key']] return u
def cosmic_link(variant_obj): """Compose link to COSMIC Database. Args: variant_obj(scout.models.Variant) Returns: url_template(str): Link to COSMIIC database if cosmic id is present """ cosmic_ids = variant_obj.get('cosmic_ids') if not cosmic_ids: return None else: cosmic_id = cosmic_ids[0] url_template = ("https://cancer.sanger.ac.uk/cosmic/mutation/overview?id={}") return url_template.format(cosmic_id)
def function[cosmic_link, parameter[variant_obj]]: constant[Compose link to COSMIC Database. Args: variant_obj(scout.models.Variant) Returns: url_template(str): Link to COSMIIC database if cosmic id is present ] variable[cosmic_ids] assign[=] call[name[variant_obj].get, parameter[constant[cosmic_ids]]] if <ast.UnaryOp object at 0x7da20e955c30> begin[:] return[constant[None]] return[call[name[url_template].format, parameter[name[cosmic_id]]]]
keyword[def] identifier[cosmic_link] ( identifier[variant_obj] ): literal[string] identifier[cosmic_ids] = identifier[variant_obj] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[cosmic_ids] : keyword[return] keyword[None] keyword[else] : identifier[cosmic_id] = identifier[cosmic_ids] [ literal[int] ] identifier[url_template] =( literal[string] ) keyword[return] identifier[url_template] . identifier[format] ( identifier[cosmic_id] )
def cosmic_link(variant_obj): """Compose link to COSMIC Database. Args: variant_obj(scout.models.Variant) Returns: url_template(str): Link to COSMIIC database if cosmic id is present """ cosmic_ids = variant_obj.get('cosmic_ids') if not cosmic_ids: return None # depends on [control=['if'], data=[]] else: cosmic_id = cosmic_ids[0] url_template = 'https://cancer.sanger.ac.uk/cosmic/mutation/overview?id={}' return url_template.format(cosmic_id)
def get_public_cms_page_urls(*, language_code): """ :param language_code: e.g.: "en" or "de" :return: Tuple with all public urls in the given language """ pages = Page.objects.public() urls = [page.get_absolute_url(language=language_code) for page in pages] urls.sort() return tuple(urls)
def function[get_public_cms_page_urls, parameter[]]: constant[ :param language_code: e.g.: "en" or "de" :return: Tuple with all public urls in the given language ] variable[pages] assign[=] call[name[Page].objects.public, parameter[]] variable[urls] assign[=] <ast.ListComp object at 0x7da20c6a8a90> call[name[urls].sort, parameter[]] return[call[name[tuple], parameter[name[urls]]]]
keyword[def] identifier[get_public_cms_page_urls] (*, identifier[language_code] ): literal[string] identifier[pages] = identifier[Page] . identifier[objects] . identifier[public] () identifier[urls] =[ identifier[page] . identifier[get_absolute_url] ( identifier[language] = identifier[language_code] ) keyword[for] identifier[page] keyword[in] identifier[pages] ] identifier[urls] . identifier[sort] () keyword[return] identifier[tuple] ( identifier[urls] )
def get_public_cms_page_urls(*, language_code): """ :param language_code: e.g.: "en" or "de" :return: Tuple with all public urls in the given language """ pages = Page.objects.public() urls = [page.get_absolute_url(language=language_code) for page in pages] urls.sort() return tuple(urls)
def user_data(self, access_token, *args, **kwargs): """Loads user data from service""" url = '{}/userinfo'.format(self.BASE_URL) response = self.get_json( url, headers={'Authorization': 'Bearer ' + access_token}, ) self.check_correct_audience(response['audience']) userdata = response['user'] return userdata
def function[user_data, parameter[self, access_token]]: constant[Loads user data from service] variable[url] assign[=] call[constant[{}/userinfo].format, parameter[name[self].BASE_URL]] variable[response] assign[=] call[name[self].get_json, parameter[name[url]]] call[name[self].check_correct_audience, parameter[call[name[response]][constant[audience]]]] variable[userdata] assign[=] call[name[response]][constant[user]] return[name[userdata]]
keyword[def] identifier[user_data] ( identifier[self] , identifier[access_token] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[url] = literal[string] . identifier[format] ( identifier[self] . identifier[BASE_URL] ) identifier[response] = identifier[self] . identifier[get_json] ( identifier[url] , identifier[headers] ={ literal[string] : literal[string] + identifier[access_token] }, ) identifier[self] . identifier[check_correct_audience] ( identifier[response] [ literal[string] ]) identifier[userdata] = identifier[response] [ literal[string] ] keyword[return] identifier[userdata]
def user_data(self, access_token, *args, **kwargs): """Loads user data from service""" url = '{}/userinfo'.format(self.BASE_URL) response = self.get_json(url, headers={'Authorization': 'Bearer ' + access_token}) self.check_correct_audience(response['audience']) userdata = response['user'] return userdata
def ddb_path(self): """Absolute path of the DDB file. Empty string if file is not present.""" # Lazy property to avoid multiple calls to has_abiext. try: return self._ddb_path except AttributeError: path = self.outdir.has_abiext("DDB") if path: self._ddb_path = path return path
def function[ddb_path, parameter[self]]: constant[Absolute path of the DDB file. Empty string if file is not present.] <ast.Try object at 0x7da20e954610>
keyword[def] identifier[ddb_path] ( identifier[self] ): literal[string] keyword[try] : keyword[return] identifier[self] . identifier[_ddb_path] keyword[except] identifier[AttributeError] : identifier[path] = identifier[self] . identifier[outdir] . identifier[has_abiext] ( literal[string] ) keyword[if] identifier[path] : identifier[self] . identifier[_ddb_path] = identifier[path] keyword[return] identifier[path]
def ddb_path(self): """Absolute path of the DDB file. Empty string if file is not present.""" # Lazy property to avoid multiple calls to has_abiext. try: return self._ddb_path # depends on [control=['try'], data=[]] except AttributeError: path = self.outdir.has_abiext('DDB') if path: self._ddb_path = path # depends on [control=['if'], data=[]] return path # depends on [control=['except'], data=[]]
def fetch_data(self, stock_no, nowdatetime): """ Fetch data from gretai.org.tw(OTC) return list. 從 gretai.org.tw 下載資料,回傳格式為 csv.reader 0. 日期 1. 成交股數 2. 成交金額 3. 開盤價 4. 最高價(續) 5. 最低價 6. 收盤價 7. 漲跌價差 8. 成交筆數 :param str stock_no: 股票代碼 :param datetime nowdatetime: 此刻時間 :rtype: list """ url = ( '/ch/stock/aftertrading/' + 'daily_trading_info/st43_download.php?d=%(year)d/%(mon)02d&' + 'stkno=%(stock)s&r=%(rand)s') % { 'year': nowdatetime.year - 1911, 'mon': nowdatetime.month, 'stock': stock_no, 'rand': random.randrange(1, 1000000)} logging.info(url) result = GRETAI_CONNECTIONS.urlopen('GET', url) csv_files = csv.reader(StringIO(result.data)) self.__url.append(GRETAI_HOST + url) return csv_files
def function[fetch_data, parameter[self, stock_no, nowdatetime]]: constant[ Fetch data from gretai.org.tw(OTC) return list. 從 gretai.org.tw 下載資料,回傳格式為 csv.reader 0. 日期 1. 成交股數 2. 成交金額 3. 開盤價 4. 最高價(續) 5. 最低價 6. 收盤價 7. 漲跌價差 8. 成交筆數 :param str stock_no: 股票代碼 :param datetime nowdatetime: 此刻時間 :rtype: list ] variable[url] assign[=] binary_operation[binary_operation[binary_operation[constant[/ch/stock/aftertrading/] + constant[daily_trading_info/st43_download.php?d=%(year)d/%(mon)02d&]] + constant[stkno=%(stock)s&r=%(rand)s]] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b0677310>, <ast.Constant object at 0x7da1b0677e20>, <ast.Constant object at 0x7da1b0677100>, <ast.Constant object at 0x7da1b06772e0>], [<ast.BinOp object at 0x7da1b0677040>, <ast.Attribute object at 0x7da1b0677af0>, <ast.Name object at 0x7da1b0677070>, <ast.Call object at 0x7da1b0677370>]]] call[name[logging].info, parameter[name[url]]] variable[result] assign[=] call[name[GRETAI_CONNECTIONS].urlopen, parameter[constant[GET], name[url]]] variable[csv_files] assign[=] call[name[csv].reader, parameter[call[name[StringIO], parameter[name[result].data]]]] call[name[self].__url.append, parameter[binary_operation[name[GRETAI_HOST] + name[url]]]] return[name[csv_files]]
keyword[def] identifier[fetch_data] ( identifier[self] , identifier[stock_no] , identifier[nowdatetime] ): literal[string] identifier[url] =( literal[string] + literal[string] + literal[string] )%{ literal[string] : identifier[nowdatetime] . identifier[year] - literal[int] , literal[string] : identifier[nowdatetime] . identifier[month] , literal[string] : identifier[stock_no] , literal[string] : identifier[random] . identifier[randrange] ( literal[int] , literal[int] )} identifier[logging] . identifier[info] ( identifier[url] ) identifier[result] = identifier[GRETAI_CONNECTIONS] . identifier[urlopen] ( literal[string] , identifier[url] ) identifier[csv_files] = identifier[csv] . identifier[reader] ( identifier[StringIO] ( identifier[result] . identifier[data] )) identifier[self] . identifier[__url] . identifier[append] ( identifier[GRETAI_HOST] + identifier[url] ) keyword[return] identifier[csv_files]
def fetch_data(self, stock_no, nowdatetime): """ Fetch data from gretai.org.tw(OTC) return list. 從 gretai.org.tw 下載資料,回傳格式為 csv.reader 0. 日期 1. 成交股數 2. 成交金額 3. 開盤價 4. 最高價(續) 5. 最低價 6. 收盤價 7. 漲跌價差 8. 成交筆數 :param str stock_no: 股票代碼 :param datetime nowdatetime: 此刻時間 :rtype: list """ url = ('/ch/stock/aftertrading/' + 'daily_trading_info/st43_download.php?d=%(year)d/%(mon)02d&' + 'stkno=%(stock)s&r=%(rand)s') % {'year': nowdatetime.year - 1911, 'mon': nowdatetime.month, 'stock': stock_no, 'rand': random.randrange(1, 1000000)} logging.info(url) result = GRETAI_CONNECTIONS.urlopen('GET', url) csv_files = csv.reader(StringIO(result.data)) self.__url.append(GRETAI_HOST + url) return csv_files
def _ufunc_dispatch(ufunc, method, i, inputs, **kwargs): """Route ufunc execution intelligently to local host or remote engine(s) depending on where the inputs are, to minimize the need to move data. Args: see numpy documentation for __numpy_ufunc__ """ #__print_ufunc(ufunc, method, i, inputs, **kwargs) if 'out' in kwargs and kwargs['out'] is not None: raise Error('for distributed ufuncs `out=` is not yet implemented') nin = 2 if ufunc is np.dot else ufunc.nin if nin is 1 and method == '__call__': return vectorize(ufunc.__call__)(inputs[0], **kwargs) elif nin is 2 and method == '__call__': from distob import engine here = engine.eid # Choose best location for the computation, possibly distributed: locs, weights = zip(*[_engine_affinity(a) for a in inputs]) # for DistArrays, adjust preferred distaxis to account for broadcasting bshape = _broadcast_shape(*inputs) locs = list(locs) for i, loc in enumerate(locs): if isinstance(loc, _TupleType): num_new_axes = len(bshape) - inputs[i].ndim if num_new_axes > 0: locs[i] = (locs[i][0], locs[i][1] + num_new_axes) if ufunc is np.dot: locs = [here if isinstance(m, _TupleType) else m for m in locs] if locs[0] == locs[1]: location = locs[0] else: # TODO: More accurately penalize the increased data movement if we # choose to distribute an axis that requires broadcasting. smallest = 0 if weights[0] <= weights[1] else 1 largest = 1 - smallest if locs[0] is here or locs[1] is here: location = here if weights[0] == weights[1] else locs[largest] else: # Both inputs are on remote engines. With the current # implementation, data on one remote engine can only be moved # to another remote engine via the client. Cost accordingly: if weights[smallest]*2 < weights[largest] + weights[smallest]: location = locs[largest] else: location = here # Move both inputs to the chosen location: inputs = [_ufunc_move_input(a, location, bshape) for a in inputs] # Execute computation: if location is here: return ufunc.__call__(inputs[0], inputs[1], **kwargs) else: if isinstance(location, numbers.Integral): # location is a single remote engine return call(ufunc.__call__, inputs[0], inputs[1], **kwargs) else: # location is a tuple (list of engine ids, distaxis) implying # that the moved inputs are now distributed arrays (or scalar) engine_ids, distaxis = location n = len(engine_ids) is_dist = tuple(isinstance(a, DistArray) for a in inputs) assert(is_dist[0] or is_dist[1]) for i in 0, 1: if is_dist[i]: ndim = inputs[i].ndim assert(inputs[i]._distaxis == distaxis) assert(inputs[i]._n == n) def _remote_ucall(inputs, **kwargs): """(Executed on a remote or local engine) call the ufunc""" return ufunc.__call__(inputs[0], inputs[1], **kwargs) results = [] kwargs = kwargs.copy() kwargs['block'] = False kwargs['prefer_local'] = False for j in range(n): subinputs = tuple(inputs[i]._subarrays[j] if is_dist[i] else inputs[i] for i in (0, 1)) results.append(call(_remote_ucall, subinputs, **kwargs)) results = [convert_result(ar) for ar in results] return DistArray(results, distaxis) elif ufunc.nin > 2: raise Error(u'Distributing ufuncs with >2 inputs is not yet supported') else: raise Error(u'Distributed ufunc.%s() is not yet implemented' % method)
def function[_ufunc_dispatch, parameter[ufunc, method, i, inputs]]: constant[Route ufunc execution intelligently to local host or remote engine(s) depending on where the inputs are, to minimize the need to move data. Args: see numpy documentation for __numpy_ufunc__ ] if <ast.BoolOp object at 0x7da1b0049fc0> begin[:] <ast.Raise object at 0x7da1b00481f0> variable[nin] assign[=] <ast.IfExp object at 0x7da1b0048df0> if <ast.BoolOp object at 0x7da1b004bcd0> begin[:] return[call[call[name[vectorize], parameter[name[ufunc].__call__]], parameter[call[name[inputs]][constant[0]]]]]
keyword[def] identifier[_ufunc_dispatch] ( identifier[ufunc] , identifier[method] , identifier[i] , identifier[inputs] ,** identifier[kwargs] ): literal[string] keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[and] identifier[kwargs] [ literal[string] ] keyword[is] keyword[not] keyword[None] : keyword[raise] identifier[Error] ( literal[string] ) identifier[nin] = literal[int] keyword[if] identifier[ufunc] keyword[is] identifier[np] . identifier[dot] keyword[else] identifier[ufunc] . identifier[nin] keyword[if] identifier[nin] keyword[is] literal[int] keyword[and] identifier[method] == literal[string] : keyword[return] identifier[vectorize] ( identifier[ufunc] . identifier[__call__] )( identifier[inputs] [ literal[int] ],** identifier[kwargs] ) keyword[elif] identifier[nin] keyword[is] literal[int] keyword[and] identifier[method] == literal[string] : keyword[from] identifier[distob] keyword[import] identifier[engine] identifier[here] = identifier[engine] . identifier[eid] identifier[locs] , identifier[weights] = identifier[zip] (*[ identifier[_engine_affinity] ( identifier[a] ) keyword[for] identifier[a] keyword[in] identifier[inputs] ]) identifier[bshape] = identifier[_broadcast_shape] (* identifier[inputs] ) identifier[locs] = identifier[list] ( identifier[locs] ) keyword[for] identifier[i] , identifier[loc] keyword[in] identifier[enumerate] ( identifier[locs] ): keyword[if] identifier[isinstance] ( identifier[loc] , identifier[_TupleType] ): identifier[num_new_axes] = identifier[len] ( identifier[bshape] )- identifier[inputs] [ identifier[i] ]. identifier[ndim] keyword[if] identifier[num_new_axes] > literal[int] : identifier[locs] [ identifier[i] ]=( identifier[locs] [ identifier[i] ][ literal[int] ], identifier[locs] [ identifier[i] ][ literal[int] ]+ identifier[num_new_axes] ) keyword[if] identifier[ufunc] keyword[is] identifier[np] . identifier[dot] : identifier[locs] =[ identifier[here] keyword[if] identifier[isinstance] ( identifier[m] , identifier[_TupleType] ) keyword[else] identifier[m] keyword[for] identifier[m] keyword[in] identifier[locs] ] keyword[if] identifier[locs] [ literal[int] ]== identifier[locs] [ literal[int] ]: identifier[location] = identifier[locs] [ literal[int] ] keyword[else] : identifier[smallest] = literal[int] keyword[if] identifier[weights] [ literal[int] ]<= identifier[weights] [ literal[int] ] keyword[else] literal[int] identifier[largest] = literal[int] - identifier[smallest] keyword[if] identifier[locs] [ literal[int] ] keyword[is] identifier[here] keyword[or] identifier[locs] [ literal[int] ] keyword[is] identifier[here] : identifier[location] = identifier[here] keyword[if] identifier[weights] [ literal[int] ]== identifier[weights] [ literal[int] ] keyword[else] identifier[locs] [ identifier[largest] ] keyword[else] : keyword[if] identifier[weights] [ identifier[smallest] ]* literal[int] < identifier[weights] [ identifier[largest] ]+ identifier[weights] [ identifier[smallest] ]: identifier[location] = identifier[locs] [ identifier[largest] ] keyword[else] : identifier[location] = identifier[here] identifier[inputs] =[ identifier[_ufunc_move_input] ( identifier[a] , identifier[location] , identifier[bshape] ) keyword[for] identifier[a] keyword[in] identifier[inputs] ] keyword[if] identifier[location] keyword[is] identifier[here] : keyword[return] identifier[ufunc] . identifier[__call__] ( identifier[inputs] [ literal[int] ], identifier[inputs] [ literal[int] ],** identifier[kwargs] ) keyword[else] : keyword[if] identifier[isinstance] ( identifier[location] , identifier[numbers] . identifier[Integral] ): keyword[return] identifier[call] ( identifier[ufunc] . identifier[__call__] , identifier[inputs] [ literal[int] ], identifier[inputs] [ literal[int] ],** identifier[kwargs] ) keyword[else] : identifier[engine_ids] , identifier[distaxis] = identifier[location] identifier[n] = identifier[len] ( identifier[engine_ids] ) identifier[is_dist] = identifier[tuple] ( identifier[isinstance] ( identifier[a] , identifier[DistArray] ) keyword[for] identifier[a] keyword[in] identifier[inputs] ) keyword[assert] ( identifier[is_dist] [ literal[int] ] keyword[or] identifier[is_dist] [ literal[int] ]) keyword[for] identifier[i] keyword[in] literal[int] , literal[int] : keyword[if] identifier[is_dist] [ identifier[i] ]: identifier[ndim] = identifier[inputs] [ identifier[i] ]. identifier[ndim] keyword[assert] ( identifier[inputs] [ identifier[i] ]. identifier[_distaxis] == identifier[distaxis] ) keyword[assert] ( identifier[inputs] [ identifier[i] ]. identifier[_n] == identifier[n] ) keyword[def] identifier[_remote_ucall] ( identifier[inputs] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[ufunc] . identifier[__call__] ( identifier[inputs] [ literal[int] ], identifier[inputs] [ literal[int] ],** identifier[kwargs] ) identifier[results] =[] identifier[kwargs] = identifier[kwargs] . identifier[copy] () identifier[kwargs] [ literal[string] ]= keyword[False] identifier[kwargs] [ literal[string] ]= keyword[False] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[n] ): identifier[subinputs] = identifier[tuple] ( identifier[inputs] [ identifier[i] ]. identifier[_subarrays] [ identifier[j] ] keyword[if] identifier[is_dist] [ identifier[i] ] keyword[else] identifier[inputs] [ identifier[i] ] keyword[for] identifier[i] keyword[in] ( literal[int] , literal[int] )) identifier[results] . identifier[append] ( identifier[call] ( identifier[_remote_ucall] , identifier[subinputs] ,** identifier[kwargs] )) identifier[results] =[ identifier[convert_result] ( identifier[ar] ) keyword[for] identifier[ar] keyword[in] identifier[results] ] keyword[return] identifier[DistArray] ( identifier[results] , identifier[distaxis] ) keyword[elif] identifier[ufunc] . identifier[nin] > literal[int] : keyword[raise] identifier[Error] ( literal[string] ) keyword[else] : keyword[raise] identifier[Error] ( literal[string] % identifier[method] )
def _ufunc_dispatch(ufunc, method, i, inputs, **kwargs): """Route ufunc execution intelligently to local host or remote engine(s) depending on where the inputs are, to minimize the need to move data. Args: see numpy documentation for __numpy_ufunc__ """ #__print_ufunc(ufunc, method, i, inputs, **kwargs) if 'out' in kwargs and kwargs['out'] is not None: raise Error('for distributed ufuncs `out=` is not yet implemented') # depends on [control=['if'], data=[]] nin = 2 if ufunc is np.dot else ufunc.nin if nin is 1 and method == '__call__': return vectorize(ufunc.__call__)(inputs[0], **kwargs) # depends on [control=['if'], data=[]] elif nin is 2 and method == '__call__': from distob import engine here = engine.eid # Choose best location for the computation, possibly distributed: (locs, weights) = zip(*[_engine_affinity(a) for a in inputs]) # for DistArrays, adjust preferred distaxis to account for broadcasting bshape = _broadcast_shape(*inputs) locs = list(locs) for (i, loc) in enumerate(locs): if isinstance(loc, _TupleType): num_new_axes = len(bshape) - inputs[i].ndim if num_new_axes > 0: locs[i] = (locs[i][0], locs[i][1] + num_new_axes) # depends on [control=['if'], data=['num_new_axes']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] if ufunc is np.dot: locs = [here if isinstance(m, _TupleType) else m for m in locs] # depends on [control=['if'], data=[]] if locs[0] == locs[1]: location = locs[0] # depends on [control=['if'], data=[]] else: # TODO: More accurately penalize the increased data movement if we # choose to distribute an axis that requires broadcasting. smallest = 0 if weights[0] <= weights[1] else 1 largest = 1 - smallest if locs[0] is here or locs[1] is here: location = here if weights[0] == weights[1] else locs[largest] # depends on [control=['if'], data=[]] # Both inputs are on remote engines. With the current # implementation, data on one remote engine can only be moved # to another remote engine via the client. Cost accordingly: elif weights[smallest] * 2 < weights[largest] + weights[smallest]: location = locs[largest] # depends on [control=['if'], data=[]] else: location = here # Move both inputs to the chosen location: inputs = [_ufunc_move_input(a, location, bshape) for a in inputs] # Execute computation: if location is here: return ufunc.__call__(inputs[0], inputs[1], **kwargs) # depends on [control=['if'], data=[]] elif isinstance(location, numbers.Integral): # location is a single remote engine return call(ufunc.__call__, inputs[0], inputs[1], **kwargs) # depends on [control=['if'], data=[]] else: # location is a tuple (list of engine ids, distaxis) implying # that the moved inputs are now distributed arrays (or scalar) (engine_ids, distaxis) = location n = len(engine_ids) is_dist = tuple((isinstance(a, DistArray) for a in inputs)) assert is_dist[0] or is_dist[1] for i in (0, 1): if is_dist[i]: ndim = inputs[i].ndim assert inputs[i]._distaxis == distaxis assert inputs[i]._n == n # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] def _remote_ucall(inputs, **kwargs): """(Executed on a remote or local engine) call the ufunc""" return ufunc.__call__(inputs[0], inputs[1], **kwargs) results = [] kwargs = kwargs.copy() kwargs['block'] = False kwargs['prefer_local'] = False for j in range(n): subinputs = tuple((inputs[i]._subarrays[j] if is_dist[i] else inputs[i] for i in (0, 1))) results.append(call(_remote_ucall, subinputs, **kwargs)) # depends on [control=['for'], data=['j']] results = [convert_result(ar) for ar in results] return DistArray(results, distaxis) # depends on [control=['if'], data=[]] elif ufunc.nin > 2: raise Error(u'Distributing ufuncs with >2 inputs is not yet supported') # depends on [control=['if'], data=[]] else: raise Error(u'Distributed ufunc.%s() is not yet implemented' % method)
def mesh_plane(mesh, plane_normal, plane_origin, return_faces=False, cached_dots=None): """ Find a the intersections between a mesh and a plane, returning a set of line segments on that plane. Parameters --------- mesh : Trimesh object Source mesh to slice plane_normal : (3,) float Normal vector of plane to intersect with mesh plane_origin: (3,) float Point on plane to intersect with mesh return_faces: bool If True return face index each line is from cached_dots : (n, 3) float If an external function has stored dot products pass them here to avoid recomputing Returns ---------- lines : (m, 2, 3) float List of 3D line segments in space face_index : (m,) int Index of mesh.faces for each line Only returned if return_faces was True """ def triangle_cases(signs): """ Figure out which faces correspond to which intersection case from the signs of the dot product of each vertex. Does this by bitbang each row of signs into an 8 bit integer. code : signs : intersects 0 : [-1 -1 -1] : No 2 : [-1 -1 0] : No 4 : [-1 -1 1] : Yes; 2 on one side, 1 on the other 6 : [-1 0 0] : Yes; one edge fully on plane 8 : [-1 0 1] : Yes; one vertex on plane, 2 on different sides 12 : [-1 1 1] : Yes; 2 on one side, 1 on the other 14 : [0 0 0] : No (on plane fully) 16 : [0 0 1] : Yes; one edge fully on plane 20 : [0 1 1] : No 28 : [1 1 1] : No Parameters ---------- signs: (n,3) int, all values are -1,0, or 1 Each row contains the dot product of all three vertices in a face with respect to the plane Returns --------- basic: (n,) bool, which faces are in the basic intersection case one_vertex: (n,) bool, which faces are in the one vertex case one_edge: (n,) bool, which faces are in the one edge case """ signs_sorted = np.sort(signs, axis=1) coded = np.zeros(len(signs_sorted), dtype=np.int8) + 14 for i in range(3): coded += signs_sorted[:, i] << 3 - i # one edge fully on the plane # note that we are only accepting *one* of the on- edge cases, # where the other vertex has a positive dot product (16) instead # of both on- edge cases ([6,16]) # this is so that for regions that are co-planar with the the section plane # we don't end up with an invalid boundary key = np.zeros(29, dtype=np.bool) key[16] = True one_edge = key[coded] # one vertex on plane, other two on different sides key[:] = False key[8] = True one_vertex = key[coded] # one vertex on one side of the plane, two on the other key[:] = False key[[4, 12]] = True basic = key[coded] return basic, one_vertex, one_edge def handle_on_vertex(signs, faces, vertices): # case where one vertex is on plane, two are on different sides vertex_plane = faces[signs == 0] edge_thru = faces[signs != 0].reshape((-1, 2)) point_intersect, valid = plane_lines(plane_origin, plane_normal, vertices[edge_thru.T], line_segments=False) lines = np.column_stack((vertices[vertex_plane[valid]], point_intersect)).reshape((-1, 2, 3)) return lines def handle_on_edge(signs, faces, vertices): # case where two vertices are on the plane and one is off edges = faces[signs == 0].reshape((-1, 2)) points = vertices[edges] return points def handle_basic(signs, faces, vertices): # case where one vertex is on one side and two are on the other unique_element = grouping.unique_value_in_row( signs, unique=[-1, 1]) edges = np.column_stack( (faces[unique_element], faces[np.roll(unique_element, 1, axis=1)], faces[unique_element], faces[np.roll(unique_element, 2, axis=1)])).reshape( (-1, 2)) intersections, valid = plane_lines(plane_origin, plane_normal, vertices[edges.T], line_segments=False) # since the data has been pre- culled, any invalid intersections at all # means the culling was done incorrectly and thus things are # mega-fucked assert valid.all() return intersections.reshape((-1, 2, 3)) # check input plane plane_normal = np.asanyarray(plane_normal, dtype=np.float64) plane_origin = np.asanyarray(plane_origin, dtype=np.float64) if plane_origin.shape != (3,) or plane_normal.shape != (3,): raise ValueError('Plane origin and normal must be (3,)!') if cached_dots is not None: dots = cached_dots else: # dot product of each vertex with the plane normal indexed by face # so for each face the dot product of each vertex is a row # shape is the same as mesh.faces (n,3) dots = np.dot(plane_normal, (mesh.vertices - plane_origin).T)[mesh.faces] # sign of the dot product is -1, 0, or 1 # shape is the same as mesh.faces (n,3) signs = np.zeros(mesh.faces.shape, dtype=np.int8) signs[dots < -tol.merge] = -1 signs[dots > tol.merge] = 1 # figure out which triangles are in the cross section, # and which of the three intersection cases they are in cases = triangle_cases(signs) # handlers for each case handlers = (handle_basic, handle_on_vertex, handle_on_edge) # the (m, 2, 3) line segments lines = np.vstack([h(signs[c], mesh.faces[c], mesh.vertices) for c, h in zip(cases, handlers)]) log.debug('mesh_cross_section found %i intersections', len(lines)) if return_faces: face_index = np.hstack([np.nonzero(c)[0] for c in cases]) return lines, face_index return lines
def function[mesh_plane, parameter[mesh, plane_normal, plane_origin, return_faces, cached_dots]]: constant[ Find a the intersections between a mesh and a plane, returning a set of line segments on that plane. Parameters --------- mesh : Trimesh object Source mesh to slice plane_normal : (3,) float Normal vector of plane to intersect with mesh plane_origin: (3,) float Point on plane to intersect with mesh return_faces: bool If True return face index each line is from cached_dots : (n, 3) float If an external function has stored dot products pass them here to avoid recomputing Returns ---------- lines : (m, 2, 3) float List of 3D line segments in space face_index : (m,) int Index of mesh.faces for each line Only returned if return_faces was True ] def function[triangle_cases, parameter[signs]]: constant[ Figure out which faces correspond to which intersection case from the signs of the dot product of each vertex. Does this by bitbang each row of signs into an 8 bit integer. code : signs : intersects 0 : [-1 -1 -1] : No 2 : [-1 -1 0] : No 4 : [-1 -1 1] : Yes; 2 on one side, 1 on the other 6 : [-1 0 0] : Yes; one edge fully on plane 8 : [-1 0 1] : Yes; one vertex on plane, 2 on different sides 12 : [-1 1 1] : Yes; 2 on one side, 1 on the other 14 : [0 0 0] : No (on plane fully) 16 : [0 0 1] : Yes; one edge fully on plane 20 : [0 1 1] : No 28 : [1 1 1] : No Parameters ---------- signs: (n,3) int, all values are -1,0, or 1 Each row contains the dot product of all three vertices in a face with respect to the plane Returns --------- basic: (n,) bool, which faces are in the basic intersection case one_vertex: (n,) bool, which faces are in the one vertex case one_edge: (n,) bool, which faces are in the one edge case ] variable[signs_sorted] assign[=] call[name[np].sort, parameter[name[signs]]] variable[coded] assign[=] binary_operation[call[name[np].zeros, parameter[call[name[len], parameter[name[signs_sorted]]]]] + constant[14]] for taget[name[i]] in starred[call[name[range], parameter[constant[3]]]] begin[:] <ast.AugAssign object at 0x7da2049627d0> variable[key] assign[=] call[name[np].zeros, parameter[constant[29]]] call[name[key]][constant[16]] assign[=] constant[True] variable[one_edge] assign[=] call[name[key]][name[coded]] call[name[key]][<ast.Slice object at 0x7da2049603d0>] assign[=] constant[False] call[name[key]][constant[8]] assign[=] constant[True] variable[one_vertex] assign[=] call[name[key]][name[coded]] call[name[key]][<ast.Slice object at 0x7da204961c30>] assign[=] constant[False] call[name[key]][list[[<ast.Constant object at 0x7da204960790>, <ast.Constant object at 0x7da204962f80>]]] assign[=] constant[True] variable[basic] assign[=] call[name[key]][name[coded]] return[tuple[[<ast.Name object at 0x7da204961930>, <ast.Name object at 0x7da2049632e0>, <ast.Name object at 0x7da204960310>]]] def function[handle_on_vertex, parameter[signs, faces, vertices]]: variable[vertex_plane] assign[=] call[name[faces]][compare[name[signs] equal[==] constant[0]]] variable[edge_thru] assign[=] call[call[name[faces]][compare[name[signs] not_equal[!=] constant[0]]].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da204960d90>, <ast.Constant object at 0x7da204961600>]]]] <ast.Tuple object at 0x7da204963190> assign[=] call[name[plane_lines], parameter[name[plane_origin], name[plane_normal], call[name[vertices]][name[edge_thru].T]]] variable[lines] assign[=] call[call[name[np].column_stack, parameter[tuple[[<ast.Subscript object at 0x7da2049621d0>, <ast.Name object at 0x7da204960460>]]]].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da204960100>, <ast.Constant object at 0x7da204963070>, <ast.Constant object at 0x7da2049615a0>]]]] return[name[lines]] def function[handle_on_edge, parameter[signs, faces, vertices]]: variable[edges] assign[=] call[call[name[faces]][compare[name[signs] equal[==] constant[0]]].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da204961720>, <ast.Constant object at 0x7da2049616c0>]]]] variable[points] assign[=] call[name[vertices]][name[edges]] return[name[points]] def function[handle_basic, parameter[signs, faces, vertices]]: variable[unique_element] assign[=] call[name[grouping].unique_value_in_row, parameter[name[signs]]] variable[edges] assign[=] call[call[name[np].column_stack, parameter[tuple[[<ast.Subscript object at 0x7da2044c1c00>, <ast.Subscript object at 0x7da2044c1540>, <ast.Subscript object at 0x7da2044c0040>, <ast.Subscript object at 0x7da2044c3100>]]]].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da2044c3d30>, <ast.Constant object at 0x7da2044c3460>]]]] <ast.Tuple object at 0x7da2044c23b0> assign[=] call[name[plane_lines], parameter[name[plane_origin], name[plane_normal], call[name[vertices]][name[edges].T]]] assert[call[name[valid].all, parameter[]]] return[call[name[intersections].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da2044c03d0>, <ast.Constant object at 0x7da2044c18a0>, <ast.Constant object at 0x7da2044c1180>]]]]] variable[plane_normal] assign[=] call[name[np].asanyarray, parameter[name[plane_normal]]] variable[plane_origin] assign[=] call[name[np].asanyarray, parameter[name[plane_origin]]] if <ast.BoolOp object at 0x7da2044c3520> begin[:] <ast.Raise object at 0x7da2044c26e0> if compare[name[cached_dots] is_not constant[None]] begin[:] variable[dots] assign[=] name[cached_dots] variable[signs] assign[=] call[name[np].zeros, parameter[name[mesh].faces.shape]] call[name[signs]][compare[name[dots] less[<] <ast.UnaryOp object at 0x7da2044c3d90>]] assign[=] <ast.UnaryOp object at 0x7da2044c1810> call[name[signs]][compare[name[dots] greater[>] name[tol].merge]] assign[=] constant[1] variable[cases] assign[=] call[name[triangle_cases], parameter[name[signs]]] variable[handlers] assign[=] tuple[[<ast.Name object at 0x7da207f9a110>, <ast.Name object at 0x7da207f98bb0>, <ast.Name object at 0x7da207f98a90>]] variable[lines] assign[=] call[name[np].vstack, parameter[<ast.ListComp object at 0x7da207f9b010>]] call[name[log].debug, parameter[constant[mesh_cross_section found %i intersections], call[name[len], parameter[name[lines]]]]] if name[return_faces] begin[:] variable[face_index] assign[=] call[name[np].hstack, parameter[<ast.ListComp object at 0x7da207f99210>]] return[tuple[[<ast.Name object at 0x7da207f9ab60>, <ast.Name object at 0x7da207f98850>]]] return[name[lines]]
keyword[def] identifier[mesh_plane] ( identifier[mesh] , identifier[plane_normal] , identifier[plane_origin] , identifier[return_faces] = keyword[False] , identifier[cached_dots] = keyword[None] ): literal[string] keyword[def] identifier[triangle_cases] ( identifier[signs] ): literal[string] identifier[signs_sorted] = identifier[np] . identifier[sort] ( identifier[signs] , identifier[axis] = literal[int] ) identifier[coded] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[signs_sorted] ), identifier[dtype] = identifier[np] . identifier[int8] )+ literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): identifier[coded] += identifier[signs_sorted] [:, identifier[i] ]<< literal[int] - identifier[i] identifier[key] = identifier[np] . identifier[zeros] ( literal[int] , identifier[dtype] = identifier[np] . identifier[bool] ) identifier[key] [ literal[int] ]= keyword[True] identifier[one_edge] = identifier[key] [ identifier[coded] ] identifier[key] [:]= keyword[False] identifier[key] [ literal[int] ]= keyword[True] identifier[one_vertex] = identifier[key] [ identifier[coded] ] identifier[key] [:]= keyword[False] identifier[key] [[ literal[int] , literal[int] ]]= keyword[True] identifier[basic] = identifier[key] [ identifier[coded] ] keyword[return] identifier[basic] , identifier[one_vertex] , identifier[one_edge] keyword[def] identifier[handle_on_vertex] ( identifier[signs] , identifier[faces] , identifier[vertices] ): identifier[vertex_plane] = identifier[faces] [ identifier[signs] == literal[int] ] identifier[edge_thru] = identifier[faces] [ identifier[signs] != literal[int] ]. identifier[reshape] ((- literal[int] , literal[int] )) identifier[point_intersect] , identifier[valid] = identifier[plane_lines] ( identifier[plane_origin] , identifier[plane_normal] , identifier[vertices] [ identifier[edge_thru] . identifier[T] ], identifier[line_segments] = keyword[False] ) identifier[lines] = identifier[np] . identifier[column_stack] (( identifier[vertices] [ identifier[vertex_plane] [ identifier[valid] ]], identifier[point_intersect] )). identifier[reshape] ((- literal[int] , literal[int] , literal[int] )) keyword[return] identifier[lines] keyword[def] identifier[handle_on_edge] ( identifier[signs] , identifier[faces] , identifier[vertices] ): identifier[edges] = identifier[faces] [ identifier[signs] == literal[int] ]. identifier[reshape] ((- literal[int] , literal[int] )) identifier[points] = identifier[vertices] [ identifier[edges] ] keyword[return] identifier[points] keyword[def] identifier[handle_basic] ( identifier[signs] , identifier[faces] , identifier[vertices] ): identifier[unique_element] = identifier[grouping] . identifier[unique_value_in_row] ( identifier[signs] , identifier[unique] =[- literal[int] , literal[int] ]) identifier[edges] = identifier[np] . identifier[column_stack] ( ( identifier[faces] [ identifier[unique_element] ], identifier[faces] [ identifier[np] . identifier[roll] ( identifier[unique_element] , literal[int] , identifier[axis] = literal[int] )], identifier[faces] [ identifier[unique_element] ], identifier[faces] [ identifier[np] . identifier[roll] ( identifier[unique_element] , literal[int] , identifier[axis] = literal[int] )])). identifier[reshape] ( (- literal[int] , literal[int] )) identifier[intersections] , identifier[valid] = identifier[plane_lines] ( identifier[plane_origin] , identifier[plane_normal] , identifier[vertices] [ identifier[edges] . identifier[T] ], identifier[line_segments] = keyword[False] ) keyword[assert] identifier[valid] . identifier[all] () keyword[return] identifier[intersections] . identifier[reshape] ((- literal[int] , literal[int] , literal[int] )) identifier[plane_normal] = identifier[np] . identifier[asanyarray] ( identifier[plane_normal] , identifier[dtype] = identifier[np] . identifier[float64] ) identifier[plane_origin] = identifier[np] . identifier[asanyarray] ( identifier[plane_origin] , identifier[dtype] = identifier[np] . identifier[float64] ) keyword[if] identifier[plane_origin] . identifier[shape] !=( literal[int] ,) keyword[or] identifier[plane_normal] . identifier[shape] !=( literal[int] ,): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[cached_dots] keyword[is] keyword[not] keyword[None] : identifier[dots] = identifier[cached_dots] keyword[else] : identifier[dots] = identifier[np] . identifier[dot] ( identifier[plane_normal] , ( identifier[mesh] . identifier[vertices] - identifier[plane_origin] ). identifier[T] )[ identifier[mesh] . identifier[faces] ] identifier[signs] = identifier[np] . identifier[zeros] ( identifier[mesh] . identifier[faces] . identifier[shape] , identifier[dtype] = identifier[np] . identifier[int8] ) identifier[signs] [ identifier[dots] <- identifier[tol] . identifier[merge] ]=- literal[int] identifier[signs] [ identifier[dots] > identifier[tol] . identifier[merge] ]= literal[int] identifier[cases] = identifier[triangle_cases] ( identifier[signs] ) identifier[handlers] =( identifier[handle_basic] , identifier[handle_on_vertex] , identifier[handle_on_edge] ) identifier[lines] = identifier[np] . identifier[vstack] ([ identifier[h] ( identifier[signs] [ identifier[c] ], identifier[mesh] . identifier[faces] [ identifier[c] ], identifier[mesh] . identifier[vertices] ) keyword[for] identifier[c] , identifier[h] keyword[in] identifier[zip] ( identifier[cases] , identifier[handlers] )]) identifier[log] . identifier[debug] ( literal[string] , identifier[len] ( identifier[lines] )) keyword[if] identifier[return_faces] : identifier[face_index] = identifier[np] . identifier[hstack] ([ identifier[np] . identifier[nonzero] ( identifier[c] )[ literal[int] ] keyword[for] identifier[c] keyword[in] identifier[cases] ]) keyword[return] identifier[lines] , identifier[face_index] keyword[return] identifier[lines]
def mesh_plane(mesh, plane_normal, plane_origin, return_faces=False, cached_dots=None): """ Find a the intersections between a mesh and a plane, returning a set of line segments on that plane. Parameters --------- mesh : Trimesh object Source mesh to slice plane_normal : (3,) float Normal vector of plane to intersect with mesh plane_origin: (3,) float Point on plane to intersect with mesh return_faces: bool If True return face index each line is from cached_dots : (n, 3) float If an external function has stored dot products pass them here to avoid recomputing Returns ---------- lines : (m, 2, 3) float List of 3D line segments in space face_index : (m,) int Index of mesh.faces for each line Only returned if return_faces was True """ def triangle_cases(signs): """ Figure out which faces correspond to which intersection case from the signs of the dot product of each vertex. Does this by bitbang each row of signs into an 8 bit integer. code : signs : intersects 0 : [-1 -1 -1] : No 2 : [-1 -1 0] : No 4 : [-1 -1 1] : Yes; 2 on one side, 1 on the other 6 : [-1 0 0] : Yes; one edge fully on plane 8 : [-1 0 1] : Yes; one vertex on plane, 2 on different sides 12 : [-1 1 1] : Yes; 2 on one side, 1 on the other 14 : [0 0 0] : No (on plane fully) 16 : [0 0 1] : Yes; one edge fully on plane 20 : [0 1 1] : No 28 : [1 1 1] : No Parameters ---------- signs: (n,3) int, all values are -1,0, or 1 Each row contains the dot product of all three vertices in a face with respect to the plane Returns --------- basic: (n,) bool, which faces are in the basic intersection case one_vertex: (n,) bool, which faces are in the one vertex case one_edge: (n,) bool, which faces are in the one edge case """ signs_sorted = np.sort(signs, axis=1) coded = np.zeros(len(signs_sorted), dtype=np.int8) + 14 for i in range(3): coded += signs_sorted[:, i] << 3 - i # depends on [control=['for'], data=['i']] # one edge fully on the plane # note that we are only accepting *one* of the on- edge cases, # where the other vertex has a positive dot product (16) instead # of both on- edge cases ([6,16]) # this is so that for regions that are co-planar with the the section plane # we don't end up with an invalid boundary key = np.zeros(29, dtype=np.bool) key[16] = True one_edge = key[coded] # one vertex on plane, other two on different sides key[:] = False key[8] = True one_vertex = key[coded] # one vertex on one side of the plane, two on the other key[:] = False key[[4, 12]] = True basic = key[coded] return (basic, one_vertex, one_edge) def handle_on_vertex(signs, faces, vertices): # case where one vertex is on plane, two are on different sides vertex_plane = faces[signs == 0] edge_thru = faces[signs != 0].reshape((-1, 2)) (point_intersect, valid) = plane_lines(plane_origin, plane_normal, vertices[edge_thru.T], line_segments=False) lines = np.column_stack((vertices[vertex_plane[valid]], point_intersect)).reshape((-1, 2, 3)) return lines def handle_on_edge(signs, faces, vertices): # case where two vertices are on the plane and one is off edges = faces[signs == 0].reshape((-1, 2)) points = vertices[edges] return points def handle_basic(signs, faces, vertices): # case where one vertex is on one side and two are on the other unique_element = grouping.unique_value_in_row(signs, unique=[-1, 1]) edges = np.column_stack((faces[unique_element], faces[np.roll(unique_element, 1, axis=1)], faces[unique_element], faces[np.roll(unique_element, 2, axis=1)])).reshape((-1, 2)) (intersections, valid) = plane_lines(plane_origin, plane_normal, vertices[edges.T], line_segments=False) # since the data has been pre- culled, any invalid intersections at all # means the culling was done incorrectly and thus things are # mega-fucked assert valid.all() return intersections.reshape((-1, 2, 3)) # check input plane plane_normal = np.asanyarray(plane_normal, dtype=np.float64) plane_origin = np.asanyarray(plane_origin, dtype=np.float64) if plane_origin.shape != (3,) or plane_normal.shape != (3,): raise ValueError('Plane origin and normal must be (3,)!') # depends on [control=['if'], data=[]] if cached_dots is not None: dots = cached_dots # depends on [control=['if'], data=['cached_dots']] else: # dot product of each vertex with the plane normal indexed by face # so for each face the dot product of each vertex is a row # shape is the same as mesh.faces (n,3) dots = np.dot(plane_normal, (mesh.vertices - plane_origin).T)[mesh.faces] # sign of the dot product is -1, 0, or 1 # shape is the same as mesh.faces (n,3) signs = np.zeros(mesh.faces.shape, dtype=np.int8) signs[dots < -tol.merge] = -1 signs[dots > tol.merge] = 1 # figure out which triangles are in the cross section, # and which of the three intersection cases they are in cases = triangle_cases(signs) # handlers for each case handlers = (handle_basic, handle_on_vertex, handle_on_edge) # the (m, 2, 3) line segments lines = np.vstack([h(signs[c], mesh.faces[c], mesh.vertices) for (c, h) in zip(cases, handlers)]) log.debug('mesh_cross_section found %i intersections', len(lines)) if return_faces: face_index = np.hstack([np.nonzero(c)[0] for c in cases]) return (lines, face_index) # depends on [control=['if'], data=[]] return lines
def cast_primitive(state, value, to_type): """ Cast the value of primtive types. :param value: Bitvector storing the primitive value. :param to_type: Name of the targeted type. :return: Resized value. """ if to_type in ['float', 'double']: if value.symbolic: # TODO extend support for floating point types l.warning('No support for symbolic floating-point arguments.' 'Value gets concretized.') value = float(state.solver.eval(value)) sort = FSORT_FLOAT if to_type == 'float' else FSORT_DOUBLE return FPV(value, sort) elif to_type == 'int' and isinstance(value, FP): # TODO fix fpToIEEEBV in claripty l.warning('Converting FP to BV might provide incorrect results.') return fpToIEEEBV(value)[63:32] elif to_type == 'long' and isinstance(value, FP): # TODO fix fpToIEEEBV in claripty l.warning('Converting FP to BV might provide incorrect results.') return fpToIEEEBV(value) else: # lookup the type size and extract value value_size = ArchSoot.sizeof[to_type] value_extracted = value.reversed.get_bytes(index=0, size=value_size//8).reversed # determine size of Soot bitvector and resize bitvector # Note: smaller types than int's are stored in a 32-bit BV value_soot_size = value_size if value_size >= 32 else 32 if to_type in ['char', 'boolean']: # unsigned extend return value_extracted.zero_extend(value_soot_size-value_extracted.size()) # signed extend return value_extracted.sign_extend(value_soot_size-value_extracted.size())
def function[cast_primitive, parameter[state, value, to_type]]: constant[ Cast the value of primtive types. :param value: Bitvector storing the primitive value. :param to_type: Name of the targeted type. :return: Resized value. ] if compare[name[to_type] in list[[<ast.Constant object at 0x7da18f00eb00>, <ast.Constant object at 0x7da18f00fd30>]]] begin[:] if name[value].symbolic begin[:] call[name[l].warning, parameter[constant[No support for symbolic floating-point arguments.Value gets concretized.]]] variable[value] assign[=] call[name[float], parameter[call[name[state].solver.eval, parameter[name[value]]]]] variable[sort] assign[=] <ast.IfExp object at 0x7da18f00ff70> return[call[name[FPV], parameter[name[value], name[sort]]]]
keyword[def] identifier[cast_primitive] ( identifier[state] , identifier[value] , identifier[to_type] ): literal[string] keyword[if] identifier[to_type] keyword[in] [ literal[string] , literal[string] ]: keyword[if] identifier[value] . identifier[symbolic] : identifier[l] . identifier[warning] ( literal[string] literal[string] ) identifier[value] = identifier[float] ( identifier[state] . identifier[solver] . identifier[eval] ( identifier[value] )) identifier[sort] = identifier[FSORT_FLOAT] keyword[if] identifier[to_type] == literal[string] keyword[else] identifier[FSORT_DOUBLE] keyword[return] identifier[FPV] ( identifier[value] , identifier[sort] ) keyword[elif] identifier[to_type] == literal[string] keyword[and] identifier[isinstance] ( identifier[value] , identifier[FP] ): identifier[l] . identifier[warning] ( literal[string] ) keyword[return] identifier[fpToIEEEBV] ( identifier[value] )[ literal[int] : literal[int] ] keyword[elif] identifier[to_type] == literal[string] keyword[and] identifier[isinstance] ( identifier[value] , identifier[FP] ): identifier[l] . identifier[warning] ( literal[string] ) keyword[return] identifier[fpToIEEEBV] ( identifier[value] ) keyword[else] : identifier[value_size] = identifier[ArchSoot] . identifier[sizeof] [ identifier[to_type] ] identifier[value_extracted] = identifier[value] . identifier[reversed] . identifier[get_bytes] ( identifier[index] = literal[int] , identifier[size] = identifier[value_size] // literal[int] ). identifier[reversed] identifier[value_soot_size] = identifier[value_size] keyword[if] identifier[value_size] >= literal[int] keyword[else] literal[int] keyword[if] identifier[to_type] keyword[in] [ literal[string] , literal[string] ]: keyword[return] identifier[value_extracted] . identifier[zero_extend] ( identifier[value_soot_size] - identifier[value_extracted] . identifier[size] ()) keyword[return] identifier[value_extracted] . identifier[sign_extend] ( identifier[value_soot_size] - identifier[value_extracted] . identifier[size] ())
def cast_primitive(state, value, to_type): """ Cast the value of primtive types. :param value: Bitvector storing the primitive value. :param to_type: Name of the targeted type. :return: Resized value. """ if to_type in ['float', 'double']: if value.symbolic: # TODO extend support for floating point types l.warning('No support for symbolic floating-point arguments.Value gets concretized.') # depends on [control=['if'], data=[]] value = float(state.solver.eval(value)) sort = FSORT_FLOAT if to_type == 'float' else FSORT_DOUBLE return FPV(value, sort) # depends on [control=['if'], data=['to_type']] elif to_type == 'int' and isinstance(value, FP): # TODO fix fpToIEEEBV in claripty l.warning('Converting FP to BV might provide incorrect results.') return fpToIEEEBV(value)[63:32] # depends on [control=['if'], data=[]] elif to_type == 'long' and isinstance(value, FP): # TODO fix fpToIEEEBV in claripty l.warning('Converting FP to BV might provide incorrect results.') return fpToIEEEBV(value) # depends on [control=['if'], data=[]] else: # lookup the type size and extract value value_size = ArchSoot.sizeof[to_type] value_extracted = value.reversed.get_bytes(index=0, size=value_size // 8).reversed # determine size of Soot bitvector and resize bitvector # Note: smaller types than int's are stored in a 32-bit BV value_soot_size = value_size if value_size >= 32 else 32 if to_type in ['char', 'boolean']: # unsigned extend return value_extracted.zero_extend(value_soot_size - value_extracted.size()) # depends on [control=['if'], data=[]] # signed extend return value_extracted.sign_extend(value_soot_size - value_extracted.size())
def read_lines(self, max_lines=None): """Reads the content of this object as text, and return a list of lines up to some max. Args: max_lines: max number of lines to return. If None, return all lines. Returns: The text content of the object as a list of lines. Raises: Exception if there was an error requesting the object's content. """ if max_lines is None: return self.read_stream().split('\n') max_to_read = self.metadata.size bytes_to_read = min(100 * max_lines, self.metadata.size) while True: content = self.read_stream(byte_count=bytes_to_read) lines = content.split('\n') if len(lines) > max_lines or bytes_to_read >= max_to_read: break # try 10 times more bytes or max bytes_to_read = min(bytes_to_read * 10, max_to_read) # remove the partial line at last del lines[-1] return lines[0:max_lines]
def function[read_lines, parameter[self, max_lines]]: constant[Reads the content of this object as text, and return a list of lines up to some max. Args: max_lines: max number of lines to return. If None, return all lines. Returns: The text content of the object as a list of lines. Raises: Exception if there was an error requesting the object's content. ] if compare[name[max_lines] is constant[None]] begin[:] return[call[call[name[self].read_stream, parameter[]].split, parameter[constant[ ]]]] variable[max_to_read] assign[=] name[self].metadata.size variable[bytes_to_read] assign[=] call[name[min], parameter[binary_operation[constant[100] * name[max_lines]], name[self].metadata.size]] while constant[True] begin[:] variable[content] assign[=] call[name[self].read_stream, parameter[]] variable[lines] assign[=] call[name[content].split, parameter[constant[ ]]] if <ast.BoolOp object at 0x7da18c4cfdc0> begin[:] break variable[bytes_to_read] assign[=] call[name[min], parameter[binary_operation[name[bytes_to_read] * constant[10]], name[max_to_read]]] <ast.Delete object at 0x7da18c4cc6a0> return[call[name[lines]][<ast.Slice object at 0x7da18c4cc4c0>]]
keyword[def] identifier[read_lines] ( identifier[self] , identifier[max_lines] = keyword[None] ): literal[string] keyword[if] identifier[max_lines] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[read_stream] (). identifier[split] ( literal[string] ) identifier[max_to_read] = identifier[self] . identifier[metadata] . identifier[size] identifier[bytes_to_read] = identifier[min] ( literal[int] * identifier[max_lines] , identifier[self] . identifier[metadata] . identifier[size] ) keyword[while] keyword[True] : identifier[content] = identifier[self] . identifier[read_stream] ( identifier[byte_count] = identifier[bytes_to_read] ) identifier[lines] = identifier[content] . identifier[split] ( literal[string] ) keyword[if] identifier[len] ( identifier[lines] )> identifier[max_lines] keyword[or] identifier[bytes_to_read] >= identifier[max_to_read] : keyword[break] identifier[bytes_to_read] = identifier[min] ( identifier[bytes_to_read] * literal[int] , identifier[max_to_read] ) keyword[del] identifier[lines] [- literal[int] ] keyword[return] identifier[lines] [ literal[int] : identifier[max_lines] ]
def read_lines(self, max_lines=None): """Reads the content of this object as text, and return a list of lines up to some max. Args: max_lines: max number of lines to return. If None, return all lines. Returns: The text content of the object as a list of lines. Raises: Exception if there was an error requesting the object's content. """ if max_lines is None: return self.read_stream().split('\n') # depends on [control=['if'], data=[]] max_to_read = self.metadata.size bytes_to_read = min(100 * max_lines, self.metadata.size) while True: content = self.read_stream(byte_count=bytes_to_read) lines = content.split('\n') if len(lines) > max_lines or bytes_to_read >= max_to_read: break # depends on [control=['if'], data=[]] # try 10 times more bytes or max bytes_to_read = min(bytes_to_read * 10, max_to_read) # depends on [control=['while'], data=[]] # remove the partial line at last del lines[-1] return lines[0:max_lines]
def _get_update_method(self): """Return the HTTP method to use. Returns: object: http_put (default) or http_post """ if getattr(self, '_update_uses_post', False): http_method = self.gitlab.http_post else: http_method = self.gitlab.http_put return http_method
def function[_get_update_method, parameter[self]]: constant[Return the HTTP method to use. Returns: object: http_put (default) or http_post ] if call[name[getattr], parameter[name[self], constant[_update_uses_post], constant[False]]] begin[:] variable[http_method] assign[=] name[self].gitlab.http_post return[name[http_method]]
keyword[def] identifier[_get_update_method] ( identifier[self] ): literal[string] keyword[if] identifier[getattr] ( identifier[self] , literal[string] , keyword[False] ): identifier[http_method] = identifier[self] . identifier[gitlab] . identifier[http_post] keyword[else] : identifier[http_method] = identifier[self] . identifier[gitlab] . identifier[http_put] keyword[return] identifier[http_method]
def _get_update_method(self): """Return the HTTP method to use. Returns: object: http_put (default) or http_post """ if getattr(self, '_update_uses_post', False): http_method = self.gitlab.http_post # depends on [control=['if'], data=[]] else: http_method = self.gitlab.http_put return http_method
def update_panels(context, mongodb, username, password, authdb, host, port, loglevel, config): """scout: manage interactions with a scout instance.""" coloredlogs.install(level=loglevel) LOG.info("Running scout version %s", __version__) LOG.debug("Debug logging enabled.") mongo_config = {} cli_config = {} if config: LOG.debug("Use config file %s", config) with open(config, 'r') as in_handle: cli_config = yaml.load(in_handle) mongo_config['mongodb'] = (mongodb or cli_config.get('mongodb') or 'scout') mongo_config['host'] = (host or cli_config.get('host') or 'localhost') mongo_config['port'] = (port or cli_config.get('port') or 27017) mongo_config['username'] = username or cli_config.get('username') mongo_config['password'] = password or cli_config.get('password') mongo_config['authdb'] = authdb or cli_config.get('authdb') or mongo_config['mongodb'] mongo_config['omim_api_key'] = cli_config.get('omim_api_key') LOG.info("Setting database name to %s", mongo_config['mongodb']) LOG.debug("Setting host to %s", mongo_config['host']) LOG.debug("Setting port to %s", mongo_config['port']) valid_connection = check_connection( host=mongo_config['host'], port=mongo_config['port'], username=mongo_config['username'], password=mongo_config['password'], authdb=mongo_config['authdb'], ) LOG.info("Test if mongod is running") if not valid_connection: LOG.warning("Connection could not be established") context.abort() try: client = get_connection(**mongo_config) except ConnectionFailure: context.abort() database = client[mongo_config['mongodb']] LOG.info("Setting up a mongo adapter") mongo_config['client'] = client adapter = MongoAdapter(database) requests = [] for case_obj in adapter.case_collection.find(): # pp(case_obj) gene_to_panels = adapter.gene_to_panels(case_obj) variants = adapter.variant_collection.find({ 'case_id': case_obj['_id'], 'category': 'snv', 'variant_type': 'clinical', }) for variant_obj in variants: panel_names = set() for hgnc_id in variant_obj['hgnc_ids']: gene_panels = gene_to_panels.get(hgnc_id, set()) panel_names = panel_names.union(gene_panels) if panel_names: operation = pymongo.UpdateOne( {'_id': variant_obj['_id']}, { '$set': { 'panels': list(panel_names) } }) requests.append(operation) if len(requests) > 5000: adapter.variant_collection.bulk_write(requests, ordered=False) requests = [] if requests: adapter.variant_collection.bulk_write(requests, ordered=False) requests = []
def function[update_panels, parameter[context, mongodb, username, password, authdb, host, port, loglevel, config]]: constant[scout: manage interactions with a scout instance.] call[name[coloredlogs].install, parameter[]] call[name[LOG].info, parameter[constant[Running scout version %s], name[__version__]]] call[name[LOG].debug, parameter[constant[Debug logging enabled.]]] variable[mongo_config] assign[=] dictionary[[], []] variable[cli_config] assign[=] dictionary[[], []] if name[config] begin[:] call[name[LOG].debug, parameter[constant[Use config file %s], name[config]]] with call[name[open], parameter[name[config], constant[r]]] begin[:] variable[cli_config] assign[=] call[name[yaml].load, parameter[name[in_handle]]] call[name[mongo_config]][constant[mongodb]] assign[=] <ast.BoolOp object at 0x7da18c4cee00> call[name[mongo_config]][constant[host]] assign[=] <ast.BoolOp object at 0x7da18c4cc820> call[name[mongo_config]][constant[port]] assign[=] <ast.BoolOp object at 0x7da18c4ce1a0> call[name[mongo_config]][constant[username]] assign[=] <ast.BoolOp object at 0x7da18c4ced40> call[name[mongo_config]][constant[password]] assign[=] <ast.BoolOp object at 0x7da18c4ce9b0> call[name[mongo_config]][constant[authdb]] assign[=] <ast.BoolOp object at 0x7da18c4cc670> call[name[mongo_config]][constant[omim_api_key]] assign[=] call[name[cli_config].get, parameter[constant[omim_api_key]]] call[name[LOG].info, parameter[constant[Setting database name to %s], call[name[mongo_config]][constant[mongodb]]]] call[name[LOG].debug, parameter[constant[Setting host to %s], call[name[mongo_config]][constant[host]]]] call[name[LOG].debug, parameter[constant[Setting port to %s], call[name[mongo_config]][constant[port]]]] variable[valid_connection] assign[=] call[name[check_connection], parameter[]] call[name[LOG].info, parameter[constant[Test if mongod is running]]] if <ast.UnaryOp object at 0x7da1b02b8700> begin[:] call[name[LOG].warning, parameter[constant[Connection could not be established]]] call[name[context].abort, parameter[]] <ast.Try object at 0x7da1b02bbc70> variable[database] assign[=] call[name[client]][call[name[mongo_config]][constant[mongodb]]] call[name[LOG].info, parameter[constant[Setting up a mongo adapter]]] call[name[mongo_config]][constant[client]] assign[=] name[client] variable[adapter] assign[=] call[name[MongoAdapter], parameter[name[database]]] variable[requests] assign[=] list[[]] for taget[name[case_obj]] in starred[call[name[adapter].case_collection.find, parameter[]]] begin[:] variable[gene_to_panels] assign[=] call[name[adapter].gene_to_panels, parameter[name[case_obj]]] variable[variants] assign[=] call[name[adapter].variant_collection.find, parameter[dictionary[[<ast.Constant object at 0x7da1b02b8a30>, <ast.Constant object at 0x7da1b02b83a0>, <ast.Constant object at 0x7da1b02b9300>], [<ast.Subscript object at 0x7da1b02b91b0>, <ast.Constant object at 0x7da1b02b8b50>, <ast.Constant object at 0x7da1b02b9b70>]]]] for taget[name[variant_obj]] in starred[name[variants]] begin[:] variable[panel_names] assign[=] call[name[set], parameter[]] for taget[name[hgnc_id]] in starred[call[name[variant_obj]][constant[hgnc_ids]]] begin[:] variable[gene_panels] assign[=] call[name[gene_to_panels].get, parameter[name[hgnc_id], call[name[set], parameter[]]]] variable[panel_names] assign[=] call[name[panel_names].union, parameter[name[gene_panels]]] if name[panel_names] begin[:] variable[operation] assign[=] call[name[pymongo].UpdateOne, parameter[dictionary[[<ast.Constant object at 0x7da20e9b1570>], [<ast.Subscript object at 0x7da20e9b0eb0>]], dictionary[[<ast.Constant object at 0x7da20e9b23b0>], [<ast.Dict object at 0x7da20e9b0970>]]]] call[name[requests].append, parameter[name[operation]]] if compare[call[name[len], parameter[name[requests]]] greater[>] constant[5000]] begin[:] call[name[adapter].variant_collection.bulk_write, parameter[name[requests]]] variable[requests] assign[=] list[[]] if name[requests] begin[:] call[name[adapter].variant_collection.bulk_write, parameter[name[requests]]] variable[requests] assign[=] list[[]]
keyword[def] identifier[update_panels] ( identifier[context] , identifier[mongodb] , identifier[username] , identifier[password] , identifier[authdb] , identifier[host] , identifier[port] , identifier[loglevel] , identifier[config] ): literal[string] identifier[coloredlogs] . identifier[install] ( identifier[level] = identifier[loglevel] ) identifier[LOG] . identifier[info] ( literal[string] , identifier[__version__] ) identifier[LOG] . identifier[debug] ( literal[string] ) identifier[mongo_config] ={} identifier[cli_config] ={} keyword[if] identifier[config] : identifier[LOG] . identifier[debug] ( literal[string] , identifier[config] ) keyword[with] identifier[open] ( identifier[config] , literal[string] ) keyword[as] identifier[in_handle] : identifier[cli_config] = identifier[yaml] . identifier[load] ( identifier[in_handle] ) identifier[mongo_config] [ literal[string] ]=( identifier[mongodb] keyword[or] identifier[cli_config] . identifier[get] ( literal[string] ) keyword[or] literal[string] ) identifier[mongo_config] [ literal[string] ]=( identifier[host] keyword[or] identifier[cli_config] . identifier[get] ( literal[string] ) keyword[or] literal[string] ) identifier[mongo_config] [ literal[string] ]=( identifier[port] keyword[or] identifier[cli_config] . identifier[get] ( literal[string] ) keyword[or] literal[int] ) identifier[mongo_config] [ literal[string] ]= identifier[username] keyword[or] identifier[cli_config] . identifier[get] ( literal[string] ) identifier[mongo_config] [ literal[string] ]= identifier[password] keyword[or] identifier[cli_config] . identifier[get] ( literal[string] ) identifier[mongo_config] [ literal[string] ]= identifier[authdb] keyword[or] identifier[cli_config] . identifier[get] ( literal[string] ) keyword[or] identifier[mongo_config] [ literal[string] ] identifier[mongo_config] [ literal[string] ]= identifier[cli_config] . identifier[get] ( literal[string] ) identifier[LOG] . identifier[info] ( literal[string] , identifier[mongo_config] [ literal[string] ]) identifier[LOG] . identifier[debug] ( literal[string] , identifier[mongo_config] [ literal[string] ]) identifier[LOG] . identifier[debug] ( literal[string] , identifier[mongo_config] [ literal[string] ]) identifier[valid_connection] = identifier[check_connection] ( identifier[host] = identifier[mongo_config] [ literal[string] ], identifier[port] = identifier[mongo_config] [ literal[string] ], identifier[username] = identifier[mongo_config] [ literal[string] ], identifier[password] = identifier[mongo_config] [ literal[string] ], identifier[authdb] = identifier[mongo_config] [ literal[string] ], ) identifier[LOG] . identifier[info] ( literal[string] ) keyword[if] keyword[not] identifier[valid_connection] : identifier[LOG] . identifier[warning] ( literal[string] ) identifier[context] . identifier[abort] () keyword[try] : identifier[client] = identifier[get_connection] (** identifier[mongo_config] ) keyword[except] identifier[ConnectionFailure] : identifier[context] . identifier[abort] () identifier[database] = identifier[client] [ identifier[mongo_config] [ literal[string] ]] identifier[LOG] . identifier[info] ( literal[string] ) identifier[mongo_config] [ literal[string] ]= identifier[client] identifier[adapter] = identifier[MongoAdapter] ( identifier[database] ) identifier[requests] =[] keyword[for] identifier[case_obj] keyword[in] identifier[adapter] . identifier[case_collection] . identifier[find] (): identifier[gene_to_panels] = identifier[adapter] . identifier[gene_to_panels] ( identifier[case_obj] ) identifier[variants] = identifier[adapter] . identifier[variant_collection] . identifier[find] ({ literal[string] : identifier[case_obj] [ literal[string] ], literal[string] : literal[string] , literal[string] : literal[string] , }) keyword[for] identifier[variant_obj] keyword[in] identifier[variants] : identifier[panel_names] = identifier[set] () keyword[for] identifier[hgnc_id] keyword[in] identifier[variant_obj] [ literal[string] ]: identifier[gene_panels] = identifier[gene_to_panels] . identifier[get] ( identifier[hgnc_id] , identifier[set] ()) identifier[panel_names] = identifier[panel_names] . identifier[union] ( identifier[gene_panels] ) keyword[if] identifier[panel_names] : identifier[operation] = identifier[pymongo] . identifier[UpdateOne] ( { literal[string] : identifier[variant_obj] [ literal[string] ]}, { literal[string] :{ literal[string] : identifier[list] ( identifier[panel_names] ) } }) identifier[requests] . identifier[append] ( identifier[operation] ) keyword[if] identifier[len] ( identifier[requests] )> literal[int] : identifier[adapter] . identifier[variant_collection] . identifier[bulk_write] ( identifier[requests] , identifier[ordered] = keyword[False] ) identifier[requests] =[] keyword[if] identifier[requests] : identifier[adapter] . identifier[variant_collection] . identifier[bulk_write] ( identifier[requests] , identifier[ordered] = keyword[False] ) identifier[requests] =[]
def update_panels(context, mongodb, username, password, authdb, host, port, loglevel, config): """scout: manage interactions with a scout instance.""" coloredlogs.install(level=loglevel) LOG.info('Running scout version %s', __version__) LOG.debug('Debug logging enabled.') mongo_config = {} cli_config = {} if config: LOG.debug('Use config file %s', config) with open(config, 'r') as in_handle: cli_config = yaml.load(in_handle) # depends on [control=['with'], data=['in_handle']] # depends on [control=['if'], data=[]] mongo_config['mongodb'] = mongodb or cli_config.get('mongodb') or 'scout' mongo_config['host'] = host or cli_config.get('host') or 'localhost' mongo_config['port'] = port or cli_config.get('port') or 27017 mongo_config['username'] = username or cli_config.get('username') mongo_config['password'] = password or cli_config.get('password') mongo_config['authdb'] = authdb or cli_config.get('authdb') or mongo_config['mongodb'] mongo_config['omim_api_key'] = cli_config.get('omim_api_key') LOG.info('Setting database name to %s', mongo_config['mongodb']) LOG.debug('Setting host to %s', mongo_config['host']) LOG.debug('Setting port to %s', mongo_config['port']) valid_connection = check_connection(host=mongo_config['host'], port=mongo_config['port'], username=mongo_config['username'], password=mongo_config['password'], authdb=mongo_config['authdb']) LOG.info('Test if mongod is running') if not valid_connection: LOG.warning('Connection could not be established') context.abort() # depends on [control=['if'], data=[]] try: client = get_connection(**mongo_config) # depends on [control=['try'], data=[]] except ConnectionFailure: context.abort() # depends on [control=['except'], data=[]] database = client[mongo_config['mongodb']] LOG.info('Setting up a mongo adapter') mongo_config['client'] = client adapter = MongoAdapter(database) requests = [] for case_obj in adapter.case_collection.find(): # pp(case_obj) gene_to_panels = adapter.gene_to_panels(case_obj) variants = adapter.variant_collection.find({'case_id': case_obj['_id'], 'category': 'snv', 'variant_type': 'clinical'}) for variant_obj in variants: panel_names = set() for hgnc_id in variant_obj['hgnc_ids']: gene_panels = gene_to_panels.get(hgnc_id, set()) panel_names = panel_names.union(gene_panels) # depends on [control=['for'], data=['hgnc_id']] if panel_names: operation = pymongo.UpdateOne({'_id': variant_obj['_id']}, {'$set': {'panels': list(panel_names)}}) requests.append(operation) # depends on [control=['if'], data=[]] if len(requests) > 5000: adapter.variant_collection.bulk_write(requests, ordered=False) requests = [] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['variant_obj']] if requests: adapter.variant_collection.bulk_write(requests, ordered=False) requests = [] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['case_obj']]
def set_basic_auth(self, username, password): """Manually set basic auth in the header when normal method does not work.""" credentials = str(b64encode('{}:{}'.format(username, password).encode('utf-8')), 'utf-8') self.authorization = 'Basic {}'.format(credentials)
def function[set_basic_auth, parameter[self, username, password]]: constant[Manually set basic auth in the header when normal method does not work.] variable[credentials] assign[=] call[name[str], parameter[call[name[b64encode], parameter[call[call[constant[{}:{}].format, parameter[name[username], name[password]]].encode, parameter[constant[utf-8]]]]], constant[utf-8]]] name[self].authorization assign[=] call[constant[Basic {}].format, parameter[name[credentials]]]
keyword[def] identifier[set_basic_auth] ( identifier[self] , identifier[username] , identifier[password] ): literal[string] identifier[credentials] = identifier[str] ( identifier[b64encode] ( literal[string] . identifier[format] ( identifier[username] , identifier[password] ). identifier[encode] ( literal[string] )), literal[string] ) identifier[self] . identifier[authorization] = literal[string] . identifier[format] ( identifier[credentials] )
def set_basic_auth(self, username, password): """Manually set basic auth in the header when normal method does not work.""" credentials = str(b64encode('{}:{}'.format(username, password).encode('utf-8')), 'utf-8') self.authorization = 'Basic {}'.format(credentials)
def conn(host=None, user=None, password=None, init_fun=None, reset=False): """ Returns a persistent connection object to be shared by multiple modules. If the connection is not yet established or reset=True, a new connection is set up. If connection information is not provided, it is taken from config which takes the information from dj_local_conf.json. If the password is not specified in that file datajoint prompts for the password. :param host: hostname :param user: mysql user :param password: mysql password :param init_fun: initialization function :param reset: whether the connection should be reset or not """ if not hasattr(conn, 'connection') or reset: host = host if host is not None else config['database.host'] user = user if user is not None else config['database.user'] password = password if password is not None else config['database.password'] if user is None: # pragma: no cover user = input("Please enter DataJoint username: ") if password is None: # pragma: no cover password = getpass(prompt="Please enter DataJoint password: ") init_fun = init_fun if init_fun is not None else config['connection.init_function'] conn.connection = Connection(host, user, password, init_fun) return conn.connection
def function[conn, parameter[host, user, password, init_fun, reset]]: constant[ Returns a persistent connection object to be shared by multiple modules. If the connection is not yet established or reset=True, a new connection is set up. If connection information is not provided, it is taken from config which takes the information from dj_local_conf.json. If the password is not specified in that file datajoint prompts for the password. :param host: hostname :param user: mysql user :param password: mysql password :param init_fun: initialization function :param reset: whether the connection should be reset or not ] if <ast.BoolOp object at 0x7da2054a41f0> begin[:] variable[host] assign[=] <ast.IfExp object at 0x7da2054a4eb0> variable[user] assign[=] <ast.IfExp object at 0x7da2054a4970> variable[password] assign[=] <ast.IfExp object at 0x7da2054a7220> if compare[name[user] is constant[None]] begin[:] variable[user] assign[=] call[name[input], parameter[constant[Please enter DataJoint username: ]]] if compare[name[password] is constant[None]] begin[:] variable[password] assign[=] call[name[getpass], parameter[]] variable[init_fun] assign[=] <ast.IfExp object at 0x7da2044c1810> name[conn].connection assign[=] call[name[Connection], parameter[name[host], name[user], name[password], name[init_fun]]] return[name[conn].connection]
keyword[def] identifier[conn] ( identifier[host] = keyword[None] , identifier[user] = keyword[None] , identifier[password] = keyword[None] , identifier[init_fun] = keyword[None] , identifier[reset] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[conn] , literal[string] ) keyword[or] identifier[reset] : identifier[host] = identifier[host] keyword[if] identifier[host] keyword[is] keyword[not] keyword[None] keyword[else] identifier[config] [ literal[string] ] identifier[user] = identifier[user] keyword[if] identifier[user] keyword[is] keyword[not] keyword[None] keyword[else] identifier[config] [ literal[string] ] identifier[password] = identifier[password] keyword[if] identifier[password] keyword[is] keyword[not] keyword[None] keyword[else] identifier[config] [ literal[string] ] keyword[if] identifier[user] keyword[is] keyword[None] : identifier[user] = identifier[input] ( literal[string] ) keyword[if] identifier[password] keyword[is] keyword[None] : identifier[password] = identifier[getpass] ( identifier[prompt] = literal[string] ) identifier[init_fun] = identifier[init_fun] keyword[if] identifier[init_fun] keyword[is] keyword[not] keyword[None] keyword[else] identifier[config] [ literal[string] ] identifier[conn] . identifier[connection] = identifier[Connection] ( identifier[host] , identifier[user] , identifier[password] , identifier[init_fun] ) keyword[return] identifier[conn] . identifier[connection]
def conn(host=None, user=None, password=None, init_fun=None, reset=False): """ Returns a persistent connection object to be shared by multiple modules. If the connection is not yet established or reset=True, a new connection is set up. If connection information is not provided, it is taken from config which takes the information from dj_local_conf.json. If the password is not specified in that file datajoint prompts for the password. :param host: hostname :param user: mysql user :param password: mysql password :param init_fun: initialization function :param reset: whether the connection should be reset or not """ if not hasattr(conn, 'connection') or reset: host = host if host is not None else config['database.host'] user = user if user is not None else config['database.user'] password = password if password is not None else config['database.password'] if user is None: # pragma: no cover user = input('Please enter DataJoint username: ') # depends on [control=['if'], data=['user']] if password is None: # pragma: no cover password = getpass(prompt='Please enter DataJoint password: ') # depends on [control=['if'], data=['password']] init_fun = init_fun if init_fun is not None else config['connection.init_function'] conn.connection = Connection(host, user, password, init_fun) # depends on [control=['if'], data=[]] return conn.connection
def parse(self): """ Return the list of string of all the decorators found """ self._parse(self.method) return list(set([deco for deco in self.decos if deco]))
def function[parse, parameter[self]]: constant[ Return the list of string of all the decorators found ] call[name[self]._parse, parameter[name[self].method]] return[call[name[list], parameter[call[name[set], parameter[<ast.ListComp object at 0x7da18f00cdf0>]]]]]
keyword[def] identifier[parse] ( identifier[self] ): literal[string] identifier[self] . identifier[_parse] ( identifier[self] . identifier[method] ) keyword[return] identifier[list] ( identifier[set] ([ identifier[deco] keyword[for] identifier[deco] keyword[in] identifier[self] . identifier[decos] keyword[if] identifier[deco] ]))
def parse(self): """ Return the list of string of all the decorators found """ self._parse(self.method) return list(set([deco for deco in self.decos if deco]))
def ratechangebase(self, ratefactor, current_base, new_base): """ Local helper function for changing currency base, returns new rate in new base Defaults to ROUND_HALF_EVEN """ if self._multiplier is None: self.log(logging.WARNING, "CurrencyHandler: changing base ourselves") # Check the current base is 1 if Decimal(1) != self.get_ratefactor(current_base, current_base): raise RuntimeError("CurrencyHandler: current baserate: %s not 1" % current_base) self._multiplier = Decimal(1) / self.get_ratefactor(current_base, new_base) return (ratefactor * self._multiplier).quantize(Decimal(".0001"))
def function[ratechangebase, parameter[self, ratefactor, current_base, new_base]]: constant[ Local helper function for changing currency base, returns new rate in new base Defaults to ROUND_HALF_EVEN ] if compare[name[self]._multiplier is constant[None]] begin[:] call[name[self].log, parameter[name[logging].WARNING, constant[CurrencyHandler: changing base ourselves]]] if compare[call[name[Decimal], parameter[constant[1]]] not_equal[!=] call[name[self].get_ratefactor, parameter[name[current_base], name[current_base]]]] begin[:] <ast.Raise object at 0x7da18f7204c0> name[self]._multiplier assign[=] binary_operation[call[name[Decimal], parameter[constant[1]]] / call[name[self].get_ratefactor, parameter[name[current_base], name[new_base]]]] return[call[binary_operation[name[ratefactor] * name[self]._multiplier].quantize, parameter[call[name[Decimal], parameter[constant[.0001]]]]]]
keyword[def] identifier[ratechangebase] ( identifier[self] , identifier[ratefactor] , identifier[current_base] , identifier[new_base] ): literal[string] keyword[if] identifier[self] . identifier[_multiplier] keyword[is] keyword[None] : identifier[self] . identifier[log] ( identifier[logging] . identifier[WARNING] , literal[string] ) keyword[if] identifier[Decimal] ( literal[int] )!= identifier[self] . identifier[get_ratefactor] ( identifier[current_base] , identifier[current_base] ): keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[current_base] ) identifier[self] . identifier[_multiplier] = identifier[Decimal] ( literal[int] )/ identifier[self] . identifier[get_ratefactor] ( identifier[current_base] , identifier[new_base] ) keyword[return] ( identifier[ratefactor] * identifier[self] . identifier[_multiplier] ). identifier[quantize] ( identifier[Decimal] ( literal[string] ))
def ratechangebase(self, ratefactor, current_base, new_base): """ Local helper function for changing currency base, returns new rate in new base Defaults to ROUND_HALF_EVEN """ if self._multiplier is None: self.log(logging.WARNING, 'CurrencyHandler: changing base ourselves') # Check the current base is 1 if Decimal(1) != self.get_ratefactor(current_base, current_base): raise RuntimeError('CurrencyHandler: current baserate: %s not 1' % current_base) # depends on [control=['if'], data=[]] self._multiplier = Decimal(1) / self.get_ratefactor(current_base, new_base) # depends on [control=['if'], data=[]] return (ratefactor * self._multiplier).quantize(Decimal('.0001'))
def set(self, name, value): """Set an option value. Args: name (str): The name of the option. value: The value to set the option to. Raises: AttributeError: If the name is not registered. TypeError: If the value is not a string or appropriate native type. ValueError: If the value is a string but cannot be coerced. """ if name not in self._options: raise AttributeError("Option {0} does not exist.".format(name)) return self._options[name].__set__(self, value)
def function[set, parameter[self, name, value]]: constant[Set an option value. Args: name (str): The name of the option. value: The value to set the option to. Raises: AttributeError: If the name is not registered. TypeError: If the value is not a string or appropriate native type. ValueError: If the value is a string but cannot be coerced. ] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[self]._options] begin[:] <ast.Raise object at 0x7da1b2899060> return[call[call[name[self]._options][name[name]].__set__, parameter[name[self], name[value]]]]
keyword[def] identifier[set] ( identifier[self] , identifier[name] , identifier[value] ): literal[string] keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[_options] : keyword[raise] identifier[AttributeError] ( literal[string] . identifier[format] ( identifier[name] )) keyword[return] identifier[self] . identifier[_options] [ identifier[name] ]. identifier[__set__] ( identifier[self] , identifier[value] )
def set(self, name, value): """Set an option value. Args: name (str): The name of the option. value: The value to set the option to. Raises: AttributeError: If the name is not registered. TypeError: If the value is not a string or appropriate native type. ValueError: If the value is a string but cannot be coerced. """ if name not in self._options: raise AttributeError('Option {0} does not exist.'.format(name)) # depends on [control=['if'], data=['name']] return self._options[name].__set__(self, value)
def to_json(self, value, preserve_ro): """ Returns the JSON encoding of the given attribute value. If the value has a 'to_json_dict' object, that method is called. Otherwise, the following values are returned for each input type: - datetime.datetime: string with the API representation of a date. - dictionary: if 'atype' is ApiConfig, a list of ApiConfig objects. - python list: python list (or ApiList) with JSON encoding of items - the raw value otherwise """ if hasattr(value, 'to_json_dict'): return value.to_json_dict(preserve_ro) elif isinstance(value, dict) and self._atype == ApiConfig: return config_to_api_list(value) elif isinstance(value, datetime.datetime): return value.strftime(self.DATE_FMT) elif isinstance(value, list) or isinstance(value, tuple): if self._is_api_list: return ApiList(value).to_json_dict() else: return [ self.to_json(x, preserve_ro) for x in value ] else: return value
def function[to_json, parameter[self, value, preserve_ro]]: constant[ Returns the JSON encoding of the given attribute value. If the value has a 'to_json_dict' object, that method is called. Otherwise, the following values are returned for each input type: - datetime.datetime: string with the API representation of a date. - dictionary: if 'atype' is ApiConfig, a list of ApiConfig objects. - python list: python list (or ApiList) with JSON encoding of items - the raw value otherwise ] if call[name[hasattr], parameter[name[value], constant[to_json_dict]]] begin[:] return[call[name[value].to_json_dict, parameter[name[preserve_ro]]]]
keyword[def] identifier[to_json] ( identifier[self] , identifier[value] , identifier[preserve_ro] ): literal[string] keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ): keyword[return] identifier[value] . identifier[to_json_dict] ( identifier[preserve_ro] ) keyword[elif] identifier[isinstance] ( identifier[value] , identifier[dict] ) keyword[and] identifier[self] . identifier[_atype] == identifier[ApiConfig] : keyword[return] identifier[config_to_api_list] ( identifier[value] ) keyword[elif] identifier[isinstance] ( identifier[value] , identifier[datetime] . identifier[datetime] ): keyword[return] identifier[value] . identifier[strftime] ( identifier[self] . identifier[DATE_FMT] ) keyword[elif] identifier[isinstance] ( identifier[value] , identifier[list] ) keyword[or] identifier[isinstance] ( identifier[value] , identifier[tuple] ): keyword[if] identifier[self] . identifier[_is_api_list] : keyword[return] identifier[ApiList] ( identifier[value] ). identifier[to_json_dict] () keyword[else] : keyword[return] [ identifier[self] . identifier[to_json] ( identifier[x] , identifier[preserve_ro] ) keyword[for] identifier[x] keyword[in] identifier[value] ] keyword[else] : keyword[return] identifier[value]
def to_json(self, value, preserve_ro): """ Returns the JSON encoding of the given attribute value. If the value has a 'to_json_dict' object, that method is called. Otherwise, the following values are returned for each input type: - datetime.datetime: string with the API representation of a date. - dictionary: if 'atype' is ApiConfig, a list of ApiConfig objects. - python list: python list (or ApiList) with JSON encoding of items - the raw value otherwise """ if hasattr(value, 'to_json_dict'): return value.to_json_dict(preserve_ro) # depends on [control=['if'], data=[]] elif isinstance(value, dict) and self._atype == ApiConfig: return config_to_api_list(value) # depends on [control=['if'], data=[]] elif isinstance(value, datetime.datetime): return value.strftime(self.DATE_FMT) # depends on [control=['if'], data=[]] elif isinstance(value, list) or isinstance(value, tuple): if self._is_api_list: return ApiList(value).to_json_dict() # depends on [control=['if'], data=[]] else: return [self.to_json(x, preserve_ro) for x in value] # depends on [control=['if'], data=[]] else: return value
def create_missing(self): """Possibly set several extra instance attributes. If ``onthefly_register`` is set and is true, set the following instance attributes: * account_password * account_firstname * account_lastname * attr_login * attr_mail """ super(AuthSourceLDAP, self).create_missing() if getattr(self, 'onthefly_register', False) is True: for field in ( 'account_password', 'attr_firstname', 'attr_lastname', 'attr_login', 'attr_mail'): if not hasattr(self, field): setattr(self, field, self._fields[field].gen_value())
def function[create_missing, parameter[self]]: constant[Possibly set several extra instance attributes. If ``onthefly_register`` is set and is true, set the following instance attributes: * account_password * account_firstname * account_lastname * attr_login * attr_mail ] call[call[name[super], parameter[name[AuthSourceLDAP], name[self]]].create_missing, parameter[]] if compare[call[name[getattr], parameter[name[self], constant[onthefly_register], constant[False]]] is constant[True]] begin[:] for taget[name[field]] in starred[tuple[[<ast.Constant object at 0x7da1b067ba30>, <ast.Constant object at 0x7da1b067a170>, <ast.Constant object at 0x7da1b067b820>, <ast.Constant object at 0x7da1b0679150>, <ast.Constant object at 0x7da1b06786d0>]]] begin[:] if <ast.UnaryOp object at 0x7da1b0679c00> begin[:] call[name[setattr], parameter[name[self], name[field], call[call[name[self]._fields][name[field]].gen_value, parameter[]]]]
keyword[def] identifier[create_missing] ( identifier[self] ): literal[string] identifier[super] ( identifier[AuthSourceLDAP] , identifier[self] ). identifier[create_missing] () keyword[if] identifier[getattr] ( identifier[self] , literal[string] , keyword[False] ) keyword[is] keyword[True] : keyword[for] identifier[field] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ): keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , identifier[field] ): identifier[setattr] ( identifier[self] , identifier[field] , identifier[self] . identifier[_fields] [ identifier[field] ]. identifier[gen_value] ())
def create_missing(self): """Possibly set several extra instance attributes. If ``onthefly_register`` is set and is true, set the following instance attributes: * account_password * account_firstname * account_lastname * attr_login * attr_mail """ super(AuthSourceLDAP, self).create_missing() if getattr(self, 'onthefly_register', False) is True: for field in ('account_password', 'attr_firstname', 'attr_lastname', 'attr_login', 'attr_mail'): if not hasattr(self, field): setattr(self, field, self._fields[field].gen_value()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']] # depends on [control=['if'], data=[]]
def dex_ticker(self): ''' Simply grabs the ticker using the steem_instance method and adds it to a class variable. ''' self.dex = Dex(self.steem_instance()) self.ticker = self.dex.get_ticker(); return self.ticker
def function[dex_ticker, parameter[self]]: constant[ Simply grabs the ticker using the steem_instance method and adds it to a class variable. ] name[self].dex assign[=] call[name[Dex], parameter[call[name[self].steem_instance, parameter[]]]] name[self].ticker assign[=] call[name[self].dex.get_ticker, parameter[]] return[name[self].ticker]
keyword[def] identifier[dex_ticker] ( identifier[self] ): literal[string] identifier[self] . identifier[dex] = identifier[Dex] ( identifier[self] . identifier[steem_instance] ()) identifier[self] . identifier[ticker] = identifier[self] . identifier[dex] . identifier[get_ticker] (); keyword[return] identifier[self] . identifier[ticker]
def dex_ticker(self): """ Simply grabs the ticker using the steem_instance method and adds it to a class variable. """ self.dex = Dex(self.steem_instance()) self.ticker = self.dex.get_ticker() return self.ticker
def _normalize_sv_coverage_gatk(group_id, inputs, backgrounds, work_dir, back_files, out_files): """Normalize CNV coverage using panel of normals with GATK's de-noise approaches. """ input_backs = set(filter(lambda x: x is not None, [dd.get_background_cnv_reference(d, "gatk-cnv") for d in inputs])) if input_backs: assert len(input_backs) == 1, "Multiple backgrounds in group: %s" % list(input_backs) pon = list(input_backs)[0] elif backgrounds: pon = gatkcnv.create_panel_of_normals(backgrounds, group_id, work_dir) else: pon = None for data in inputs: work_dir = utils.safe_makedir(os.path.join(dd.get_work_dir(data), "structural", dd.get_sample_name(data), "bins")) denoise_file = gatkcnv.denoise(data, pon, work_dir) out_files[dd.get_sample_name(data)] = denoise_file back_files[dd.get_sample_name(data)] = pon return back_files, out_files
def function[_normalize_sv_coverage_gatk, parameter[group_id, inputs, backgrounds, work_dir, back_files, out_files]]: constant[Normalize CNV coverage using panel of normals with GATK's de-noise approaches. ] variable[input_backs] assign[=] call[name[set], parameter[call[name[filter], parameter[<ast.Lambda object at 0x7da1b18a1510>, <ast.ListComp object at 0x7da1b18a1270>]]]] if name[input_backs] begin[:] assert[compare[call[name[len], parameter[name[input_backs]]] equal[==] constant[1]]] variable[pon] assign[=] call[call[name[list], parameter[name[input_backs]]]][constant[0]] for taget[name[data]] in starred[name[inputs]] begin[:] variable[work_dir] assign[=] call[name[utils].safe_makedir, parameter[call[name[os].path.join, parameter[call[name[dd].get_work_dir, parameter[name[data]]], constant[structural], call[name[dd].get_sample_name, parameter[name[data]]], constant[bins]]]]] variable[denoise_file] assign[=] call[name[gatkcnv].denoise, parameter[name[data], name[pon], name[work_dir]]] call[name[out_files]][call[name[dd].get_sample_name, parameter[name[data]]]] assign[=] name[denoise_file] call[name[back_files]][call[name[dd].get_sample_name, parameter[name[data]]]] assign[=] name[pon] return[tuple[[<ast.Name object at 0x7da1b1984a30>, <ast.Name object at 0x7da1b1985b10>]]]
keyword[def] identifier[_normalize_sv_coverage_gatk] ( identifier[group_id] , identifier[inputs] , identifier[backgrounds] , identifier[work_dir] , identifier[back_files] , identifier[out_files] ): literal[string] identifier[input_backs] = identifier[set] ( identifier[filter] ( keyword[lambda] identifier[x] : identifier[x] keyword[is] keyword[not] keyword[None] , [ identifier[dd] . identifier[get_background_cnv_reference] ( identifier[d] , literal[string] ) keyword[for] identifier[d] keyword[in] identifier[inputs] ])) keyword[if] identifier[input_backs] : keyword[assert] identifier[len] ( identifier[input_backs] )== literal[int] , literal[string] % identifier[list] ( identifier[input_backs] ) identifier[pon] = identifier[list] ( identifier[input_backs] )[ literal[int] ] keyword[elif] identifier[backgrounds] : identifier[pon] = identifier[gatkcnv] . identifier[create_panel_of_normals] ( identifier[backgrounds] , identifier[group_id] , identifier[work_dir] ) keyword[else] : identifier[pon] = keyword[None] keyword[for] identifier[data] keyword[in] identifier[inputs] : identifier[work_dir] = identifier[utils] . identifier[safe_makedir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dd] . identifier[get_work_dir] ( identifier[data] ), literal[string] , identifier[dd] . identifier[get_sample_name] ( identifier[data] ), literal[string] )) identifier[denoise_file] = identifier[gatkcnv] . identifier[denoise] ( identifier[data] , identifier[pon] , identifier[work_dir] ) identifier[out_files] [ identifier[dd] . identifier[get_sample_name] ( identifier[data] )]= identifier[denoise_file] identifier[back_files] [ identifier[dd] . identifier[get_sample_name] ( identifier[data] )]= identifier[pon] keyword[return] identifier[back_files] , identifier[out_files]
def _normalize_sv_coverage_gatk(group_id, inputs, backgrounds, work_dir, back_files, out_files): """Normalize CNV coverage using panel of normals with GATK's de-noise approaches. """ input_backs = set(filter(lambda x: x is not None, [dd.get_background_cnv_reference(d, 'gatk-cnv') for d in inputs])) if input_backs: assert len(input_backs) == 1, 'Multiple backgrounds in group: %s' % list(input_backs) pon = list(input_backs)[0] # depends on [control=['if'], data=[]] elif backgrounds: pon = gatkcnv.create_panel_of_normals(backgrounds, group_id, work_dir) # depends on [control=['if'], data=[]] else: pon = None for data in inputs: work_dir = utils.safe_makedir(os.path.join(dd.get_work_dir(data), 'structural', dd.get_sample_name(data), 'bins')) denoise_file = gatkcnv.denoise(data, pon, work_dir) out_files[dd.get_sample_name(data)] = denoise_file back_files[dd.get_sample_name(data)] = pon # depends on [control=['for'], data=['data']] return (back_files, out_files)
def delete(self, using=None, **kwargs): """ Deletes the index in elasticsearch. Any additional keyword arguments will be passed to ``Elasticsearch.indices.delete`` unchanged. """ return self._get_connection(using).indices.delete(index=self._name, **kwargs)
def function[delete, parameter[self, using]]: constant[ Deletes the index in elasticsearch. Any additional keyword arguments will be passed to ``Elasticsearch.indices.delete`` unchanged. ] return[call[call[name[self]._get_connection, parameter[name[using]]].indices.delete, parameter[]]]
keyword[def] identifier[delete] ( identifier[self] , identifier[using] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[self] . identifier[_get_connection] ( identifier[using] ). identifier[indices] . identifier[delete] ( identifier[index] = identifier[self] . identifier[_name] ,** identifier[kwargs] )
def delete(self, using=None, **kwargs): """ Deletes the index in elasticsearch. Any additional keyword arguments will be passed to ``Elasticsearch.indices.delete`` unchanged. """ return self._get_connection(using).indices.delete(index=self._name, **kwargs)
def bls_serial_pfind(times, mags, errs, magsarefluxes=False, startp=0.1, # search from 0.1 d to... endp=100.0, # ... 100.0 d -- don't search full timebase stepsize=5.0e-4, mintransitduration=0.01, # minimum transit length in phase maxtransitduration=0.4, # maximum transit length in phase ndurations=100, autofreq=True, # figure out f0, nf, and df automatically blsobjective='likelihood', blsmethod='fast', blsoversample=10, blsmintransits=3, blsfreqfactor=10.0, periodepsilon=0.1, nbestpeaks=5, sigclip=10.0, verbose=True, raiseonfail=False): '''Runs the Box Least Squares Fitting Search for transit-shaped signals. Based on the version of BLS in Astropy 3.1: `astropy.stats.BoxLeastSquares`. If you don't have Astropy 3.1, this module will fail to import. Note that by default, this implementation of `bls_serial_pfind` doesn't use the `.autoperiod()` function from `BoxLeastSquares` but uses the same auto frequency-grid generation as the functions in `periodbase.kbls`. If you want to use Astropy's implementation, set the value of `autofreq` kwarg to 'astropy'. The dict returned from this function contains a `blsmodel` key, which is the generated model from Astropy's BLS. Use the `.compute_stats()` method to calculate the required stats like SNR, depth, duration, etc. Parameters ---------- times,mags,errs : np.array The magnitude/flux time-series to search for transits. magsarefluxes : bool If the input measurement values in `mags` and `errs` are in fluxes, set this to True. startp,endp : float The minimum and maximum periods to consider for the transit search. stepsize : float The step-size in frequency to use when constructing a frequency grid for the period search. mintransitduration,maxtransitduration : float The minimum and maximum transitdurations (in units of phase) to consider for the transit search. ndurations : int The number of transit durations to use in the period-search. autofreq : bool or str If this is True, the values of `stepsize` and `nphasebins` will be ignored, and these, along with a frequency-grid, will be determined based on the following relations:: nphasebins = int(ceil(2.0/mintransitduration)) if nphasebins > 3000: nphasebins = 3000 stepsize = 0.25*mintransitduration/(times.max()-times.min()) minfreq = 1.0/endp maxfreq = 1.0/startp nfreq = int(ceil((maxfreq - minfreq)/stepsize)) If this is False, you must set `startp`, `endp`, and `stepsize` as appropriate. If this is str == 'astropy', will use the `astropy.stats.BoxLeastSquares.autoperiod()` function to calculate the frequency grid instead of the kbls method. blsobjective : {'likelihood','snr'} Sets the type of objective to optimize in the `BoxLeastSquares.power()` function. blsmethod : {'fast','slow'} Sets the type of method to use in the `BoxLeastSquares.power()` function. blsoversample : {'likelihood','snr'} Sets the `oversample` kwarg for the `BoxLeastSquares.power()` function. blsmintransits : int Sets the `min_n_transits` kwarg for the `BoxLeastSquares.autoperiod()` function. blsfreqfactor : float Sets the `frequency_factor` kwarg for the `BoxLeastSquares.autperiod()` function. periodepsilon : float The fractional difference between successive values of 'best' periods when sorting by periodogram power to consider them as separate periods (as opposed to part of the same periodogram peak). This is used to avoid broad peaks in the periodogram and make sure the 'best' periods returned are all actually independent. nbestpeaks : int The number of 'best' peaks to return from the periodogram results, starting from the global maximum of the periodogram peak values. sigclip : float or int or sequence of two floats/ints or None If a single float or int, a symmetric sigma-clip will be performed using the number provided as the sigma-multiplier to cut out from the input time-series. If a list of two ints/floats is provided, the function will perform an 'asymmetric' sigma-clip. The first element in this list is the sigma value to use for fainter flux/mag values; the second element in this list is the sigma value to use for brighter flux/mag values. For example, `sigclip=[10., 3.]`, will sigclip out greater than 10-sigma dimmings and greater than 3-sigma brightenings. Here the meaning of "dimming" and "brightening" is set by *physics* (not the magnitude system), which is why the `magsarefluxes` kwarg must be correctly set. If `sigclip` is None, no sigma-clipping will be performed, and the time-series (with non-finite elems removed) will be passed through to the output. verbose : bool If this is True, will indicate progress and details about the frequency grid used for the period search. raiseonfail : bool If True, raises an exception if something goes wrong. Otherwise, returns None. Returns ------- dict This function returns a dict, referred to as an `lspinfo` dict in other astrobase functions that operate on periodogram results. This is a standardized format across all astrobase period-finders, and is of the form below:: {'bestperiod': the best period value in the periodogram, 'bestlspval': the periodogram peak associated with the best period, 'nbestpeaks': the input value of nbestpeaks, 'nbestlspvals': nbestpeaks-size list of best period peak values, 'nbestperiods': nbestpeaks-size list of best periods, 'lspvals': the full array of periodogram powers, 'frequencies': the full array of frequencies considered, 'periods': the full array of periods considered, 'durations': the array of durations used to run BLS, 'blsresult': Astropy BLS result object (BoxLeastSquaresResult), 'blsmodel': Astropy BLS BoxLeastSquares object used for work, 'stepsize': the actual stepsize used, 'nfreq': the actual nfreq used, 'durations': the durations array used, 'mintransitduration': the input mintransitduration, 'maxtransitduration': the input maxtransitdurations, 'method':'bls' -> the name of the period-finder method, 'kwargs':{ dict of all of the input kwargs for record-keeping}} ''' # get rid of nans first and sigclip stimes, smags, serrs = sigclip_magseries(times, mags, errs, magsarefluxes=magsarefluxes, sigclip=sigclip) # make sure there are enough points to calculate a spectrum if len(stimes) > 9 and len(smags) > 9 and len(serrs) > 9: # if we're setting up everything automatically if isinstance(autofreq, bool) and autofreq: # use heuristic to figure out best timestep stepsize = 0.25*mintransitduration/(stimes.max()-stimes.min()) # now figure out the frequencies to use minfreq = 1.0/endp maxfreq = 1.0/startp nfreq = int(npceil((maxfreq - minfreq)/stepsize)) # say what we're using if verbose: LOGINFO('min P: %s, max P: %s, nfreq: %s, ' 'minfreq: %s, maxfreq: %s' % (startp, endp, nfreq, minfreq, maxfreq)) LOGINFO('autofreq = True: using AUTOMATIC values for ' 'freq stepsize: %s, ndurations: %s, ' 'min transit duration: %s, max transit duration: %s' % (stepsize, ndurations, mintransitduration, maxtransitduration)) use_autoperiod = False elif isinstance(autofreq, bool) and not autofreq: minfreq = 1.0/endp maxfreq = 1.0/startp nfreq = int(npceil((maxfreq - minfreq)/stepsize)) # say what we're using if verbose: LOGINFO('min P: %s, max P: %s, nfreq: %s, ' 'minfreq: %s, maxfreq: %s' % (startp, endp, nfreq, minfreq, maxfreq)) LOGINFO('autofreq = False: using PROVIDED values for ' 'freq stepsize: %s, ndurations: %s, ' 'min transit duration: %s, max transit duration: %s' % (stepsize, ndurations, mintransitduration, maxtransitduration)) use_autoperiod = False elif isinstance(autofreq, str) and autofreq == 'astropy': use_autoperiod = True minfreq = 1.0/endp maxfreq = 1.0/startp else: LOGERROR("unknown autofreq kwarg encountered. can't continue...") return None # check the time-base vs. endp value if minfreq < (1.0/(stimes.max() - stimes.min())): if verbose: LOGWARNING('the requested max P = %.3f is larger than ' 'the time base of the observations = %.3f, ' ' will make minfreq = 2 x 1/timebase' % (endp, stimes.max() - stimes.min())) minfreq = 2.0/(stimes.max() - stimes.min()) if verbose: LOGINFO('new minfreq: %s, maxfreq: %s' % (minfreq, maxfreq)) # run BLS try: # astropy's BLS requires durations in units of time durations = nplinspace(mintransitduration*startp, maxtransitduration*startp, ndurations) # set up the correct units for the BLS model if magsarefluxes: blsmodel = BoxLeastSquares( stimes*u.day, smags*u.dimensionless_unscaled, dy=serrs*u.dimensionless_unscaled ) else: blsmodel = BoxLeastSquares( stimes*u.day, smags*u.mag, dy=serrs*u.mag ) # use autoperiod if requested if use_autoperiod: periods = nparray( blsmodel.autoperiod( durations, minimum_period=startp, maximum_period=endp, minimum_n_transit=blsmintransits, frequency_factor=blsfreqfactor ) ) nfreq = periods.size if verbose: LOGINFO( "autofreq = 'astropy', used .autoperiod() with " "minimum_n_transit = %s, freq_factor = %s " "to generate the frequency grid" % (blsmintransits, blsfreqfactor) ) LOGINFO('stepsize = %.5f, nfreq = %s, minfreq = %.5f, ' 'maxfreq = %.5f, ndurations = %s' % (abs(1.0/periods[1] - 1.0/periods[0]), nfreq, 1.0/periods.max(), 1.0/periods.min(), durations.size)) # otherwise, use kbls method else: frequencies = minfreq + nparange(nfreq)*stepsize periods = 1.0/frequencies if nfreq > 5.0e5: if verbose: LOGWARNING('more than 5.0e5 frequencies to go through; ' 'this will take a while. ' 'you might want to use the ' 'abls.bls_parallel_pfind function instead') # run the periodogram blsresult = blsmodel.power( periods*u.day, durations*u.day, objective=blsobjective, method=blsmethod, oversample=blsoversample ) # get the peak values lsp = nparray(blsresult.power) # find the nbestpeaks for the periodogram: 1. sort the lsp array # by highest value first 2. go down the values until we find # five values that are separated by at least periodepsilon in # period # make sure to get only the finite peaks in the periodogram # this is needed because BLS may produce infs for some peaks finitepeakind = npisfinite(lsp) finlsp = lsp[finitepeakind] finperiods = periods[finitepeakind] # make sure that finlsp has finite values before we work on it try: bestperiodind = npargmax(finlsp) except ValueError: LOGERROR('no finite periodogram values ' 'for this mag series, skipping...') return {'bestperiod':npnan, 'bestlspval':npnan, 'nbestpeaks':nbestpeaks, 'nbestinds':None, 'nbestlspvals':None, 'nbestperiods':None, 'lspvals':None, 'periods':None, 'durations':None, 'method':'bls', 'blsresult':None, 'blsmodel':None, 'kwargs':{'startp':startp, 'endp':endp, 'stepsize':stepsize, 'mintransitduration':mintransitduration, 'maxtransitduration':maxtransitduration, 'ndurations':ndurations, 'blsobjective':blsobjective, 'blsmethod':blsmethod, 'blsoversample':blsoversample, 'blsntransits':blsmintransits, 'blsfreqfactor':blsfreqfactor, 'autofreq':autofreq, 'periodepsilon':periodepsilon, 'nbestpeaks':nbestpeaks, 'sigclip':sigclip, 'magsarefluxes':magsarefluxes}} sortedlspind = npargsort(finlsp)[::-1] sortedlspperiods = finperiods[sortedlspind] sortedlspvals = finlsp[sortedlspind] # now get the nbestpeaks nbestperiods, nbestlspvals, nbestinds, peakcount = ( [finperiods[bestperiodind]], [finlsp[bestperiodind]], [bestperiodind], 1 ) prevperiod = sortedlspperiods[0] # find the best nbestpeaks in the lsp and their periods for period, lspval, ind in zip(sortedlspperiods, sortedlspvals, sortedlspind): if peakcount == nbestpeaks: break perioddiff = abs(period - prevperiod) bestperiodsdiff = [abs(period - x) for x in nbestperiods] # print('prevperiod = %s, thisperiod = %s, ' # 'perioddiff = %s, peakcount = %s' % # (prevperiod, period, perioddiff, peakcount)) # this ensures that this period is different from the last # period and from all the other existing best periods by # periodepsilon to make sure we jump to an entire different # peak in the periodogram if (perioddiff > (periodepsilon*prevperiod) and all(x > (periodepsilon*period) for x in bestperiodsdiff)): nbestperiods.append(period) nbestlspvals.append(lspval) nbestinds.append(ind) peakcount = peakcount + 1 prevperiod = period # generate the return dict resultdict = { 'bestperiod':finperiods[bestperiodind], 'bestlspval':finlsp[bestperiodind], 'nbestpeaks':nbestpeaks, 'nbestinds':nbestinds, 'nbestlspvals':nbestlspvals, 'nbestperiods':nbestperiods, 'lspvals':lsp, 'frequencies':frequencies, 'periods':periods, 'durations':durations, 'blsresult':blsresult, 'blsmodel':blsmodel, 'stepsize':stepsize, 'nfreq':nfreq, 'mintransitduration':mintransitduration, 'maxtransitduration':maxtransitduration, 'method':'bls', 'kwargs':{'startp':startp, 'endp':endp, 'stepsize':stepsize, 'mintransitduration':mintransitduration, 'maxtransitduration':maxtransitduration, 'ndurations':ndurations, 'blsobjective':blsobjective, 'blsmethod':blsmethod, 'blsoversample':blsoversample, 'blsntransits':blsmintransits, 'blsfreqfactor':blsfreqfactor, 'autofreq':autofreq, 'periodepsilon':periodepsilon, 'nbestpeaks':nbestpeaks, 'sigclip':sigclip, 'magsarefluxes':magsarefluxes} } return resultdict except Exception as e: LOGEXCEPTION('BLS failed!') if raiseonfail: raise return {'bestperiod':npnan, 'bestlspval':npnan, 'nbestinds':None, 'nbestpeaks':nbestpeaks, 'nbestlspvals':None, 'nbestperiods':None, 'lspvals':None, 'periods':None, 'durations':None, 'blsresult':None, 'blsmodel':None, 'stepsize':stepsize, 'nfreq':nfreq, 'mintransitduration':mintransitduration, 'maxtransitduration':maxtransitduration, 'method':'bls', 'kwargs':{'startp':startp, 'endp':endp, 'stepsize':stepsize, 'mintransitduration':mintransitduration, 'maxtransitduration':maxtransitduration, 'ndurations':ndurations, 'blsobjective':blsobjective, 'blsmethod':blsmethod, 'blsoversample':blsoversample, 'blsntransits':blsmintransits, 'blsfreqfactor':blsfreqfactor, 'autofreq':autofreq, 'periodepsilon':periodepsilon, 'nbestpeaks':nbestpeaks, 'sigclip':sigclip, 'magsarefluxes':magsarefluxes}} else: LOGERROR('no good detections for these times and mags, skipping...') return {'bestperiod':npnan, 'bestlspval':npnan, 'nbestinds':None, 'nbestpeaks':nbestpeaks, 'nbestlspvals':None, 'nbestperiods':None, 'lspvals':None, 'periods':None, 'durations':None, 'blsresult':None, 'blsmodel':None, 'stepsize':stepsize, 'nfreq':None, 'nphasebins':None, 'mintransitduration':mintransitduration, 'maxtransitduration':maxtransitduration, 'method':'bls', 'kwargs':{'startp':startp, 'endp':endp, 'stepsize':stepsize, 'mintransitduration':mintransitduration, 'maxtransitduration':maxtransitduration, 'ndurations':ndurations, 'blsobjective':blsobjective, 'blsmethod':blsmethod, 'blsoversample':blsoversample, 'blsntransits':blsmintransits, 'blsfreqfactor':blsfreqfactor, 'autofreq':autofreq, 'periodepsilon':periodepsilon, 'nbestpeaks':nbestpeaks, 'sigclip':sigclip, 'magsarefluxes':magsarefluxes}}
def function[bls_serial_pfind, parameter[times, mags, errs, magsarefluxes, startp, endp, stepsize, mintransitduration, maxtransitduration, ndurations, autofreq, blsobjective, blsmethod, blsoversample, blsmintransits, blsfreqfactor, periodepsilon, nbestpeaks, sigclip, verbose, raiseonfail]]: constant[Runs the Box Least Squares Fitting Search for transit-shaped signals. Based on the version of BLS in Astropy 3.1: `astropy.stats.BoxLeastSquares`. If you don't have Astropy 3.1, this module will fail to import. Note that by default, this implementation of `bls_serial_pfind` doesn't use the `.autoperiod()` function from `BoxLeastSquares` but uses the same auto frequency-grid generation as the functions in `periodbase.kbls`. If you want to use Astropy's implementation, set the value of `autofreq` kwarg to 'astropy'. The dict returned from this function contains a `blsmodel` key, which is the generated model from Astropy's BLS. Use the `.compute_stats()` method to calculate the required stats like SNR, depth, duration, etc. Parameters ---------- times,mags,errs : np.array The magnitude/flux time-series to search for transits. magsarefluxes : bool If the input measurement values in `mags` and `errs` are in fluxes, set this to True. startp,endp : float The minimum and maximum periods to consider for the transit search. stepsize : float The step-size in frequency to use when constructing a frequency grid for the period search. mintransitduration,maxtransitduration : float The minimum and maximum transitdurations (in units of phase) to consider for the transit search. ndurations : int The number of transit durations to use in the period-search. autofreq : bool or str If this is True, the values of `stepsize` and `nphasebins` will be ignored, and these, along with a frequency-grid, will be determined based on the following relations:: nphasebins = int(ceil(2.0/mintransitduration)) if nphasebins > 3000: nphasebins = 3000 stepsize = 0.25*mintransitduration/(times.max()-times.min()) minfreq = 1.0/endp maxfreq = 1.0/startp nfreq = int(ceil((maxfreq - minfreq)/stepsize)) If this is False, you must set `startp`, `endp`, and `stepsize` as appropriate. If this is str == 'astropy', will use the `astropy.stats.BoxLeastSquares.autoperiod()` function to calculate the frequency grid instead of the kbls method. blsobjective : {'likelihood','snr'} Sets the type of objective to optimize in the `BoxLeastSquares.power()` function. blsmethod : {'fast','slow'} Sets the type of method to use in the `BoxLeastSquares.power()` function. blsoversample : {'likelihood','snr'} Sets the `oversample` kwarg for the `BoxLeastSquares.power()` function. blsmintransits : int Sets the `min_n_transits` kwarg for the `BoxLeastSquares.autoperiod()` function. blsfreqfactor : float Sets the `frequency_factor` kwarg for the `BoxLeastSquares.autperiod()` function. periodepsilon : float The fractional difference between successive values of 'best' periods when sorting by periodogram power to consider them as separate periods (as opposed to part of the same periodogram peak). This is used to avoid broad peaks in the periodogram and make sure the 'best' periods returned are all actually independent. nbestpeaks : int The number of 'best' peaks to return from the periodogram results, starting from the global maximum of the periodogram peak values. sigclip : float or int or sequence of two floats/ints or None If a single float or int, a symmetric sigma-clip will be performed using the number provided as the sigma-multiplier to cut out from the input time-series. If a list of two ints/floats is provided, the function will perform an 'asymmetric' sigma-clip. The first element in this list is the sigma value to use for fainter flux/mag values; the second element in this list is the sigma value to use for brighter flux/mag values. For example, `sigclip=[10., 3.]`, will sigclip out greater than 10-sigma dimmings and greater than 3-sigma brightenings. Here the meaning of "dimming" and "brightening" is set by *physics* (not the magnitude system), which is why the `magsarefluxes` kwarg must be correctly set. If `sigclip` is None, no sigma-clipping will be performed, and the time-series (with non-finite elems removed) will be passed through to the output. verbose : bool If this is True, will indicate progress and details about the frequency grid used for the period search. raiseonfail : bool If True, raises an exception if something goes wrong. Otherwise, returns None. Returns ------- dict This function returns a dict, referred to as an `lspinfo` dict in other astrobase functions that operate on periodogram results. This is a standardized format across all astrobase period-finders, and is of the form below:: {'bestperiod': the best period value in the periodogram, 'bestlspval': the periodogram peak associated with the best period, 'nbestpeaks': the input value of nbestpeaks, 'nbestlspvals': nbestpeaks-size list of best period peak values, 'nbestperiods': nbestpeaks-size list of best periods, 'lspvals': the full array of periodogram powers, 'frequencies': the full array of frequencies considered, 'periods': the full array of periods considered, 'durations': the array of durations used to run BLS, 'blsresult': Astropy BLS result object (BoxLeastSquaresResult), 'blsmodel': Astropy BLS BoxLeastSquares object used for work, 'stepsize': the actual stepsize used, 'nfreq': the actual nfreq used, 'durations': the durations array used, 'mintransitduration': the input mintransitduration, 'maxtransitduration': the input maxtransitdurations, 'method':'bls' -> the name of the period-finder method, 'kwargs':{ dict of all of the input kwargs for record-keeping}} ] <ast.Tuple object at 0x7da18dc04f40> assign[=] call[name[sigclip_magseries], parameter[name[times], name[mags], name[errs]]] if <ast.BoolOp object at 0x7da1b00f7dc0> begin[:] if <ast.BoolOp object at 0x7da1b00f4580> begin[:] variable[stepsize] assign[=] binary_operation[binary_operation[constant[0.25] * name[mintransitduration]] / binary_operation[call[name[stimes].max, parameter[]] - call[name[stimes].min, parameter[]]]] variable[minfreq] assign[=] binary_operation[constant[1.0] / name[endp]] variable[maxfreq] assign[=] binary_operation[constant[1.0] / name[startp]] variable[nfreq] assign[=] call[name[int], parameter[call[name[npceil], parameter[binary_operation[binary_operation[name[maxfreq] - name[minfreq]] / name[stepsize]]]]]] if name[verbose] begin[:] call[name[LOGINFO], parameter[binary_operation[constant[min P: %s, max P: %s, nfreq: %s, minfreq: %s, maxfreq: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b00f73a0>, <ast.Name object at 0x7da1b00f4190>, <ast.Name object at 0x7da1b00f4070>, <ast.Name object at 0x7da1b00f7be0>, <ast.Name object at 0x7da1b00f7130>]]]]] call[name[LOGINFO], parameter[binary_operation[constant[autofreq = True: using AUTOMATIC values for freq stepsize: %s, ndurations: %s, min transit duration: %s, max transit duration: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b00f6650>, <ast.Name object at 0x7da1b00f5a80>, <ast.Name object at 0x7da1b00f60b0>, <ast.Name object at 0x7da1b00f6620>]]]]] variable[use_autoperiod] assign[=] constant[False] if compare[name[minfreq] less[<] binary_operation[constant[1.0] / binary_operation[call[name[stimes].max, parameter[]] - call[name[stimes].min, parameter[]]]]] begin[:] if name[verbose] begin[:] call[name[LOGWARNING], parameter[binary_operation[constant[the requested max P = %.3f is larger than the time base of the observations = %.3f, will make minfreq = 2 x 1/timebase] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c7946d0>, <ast.BinOp object at 0x7da20c7966e0>]]]]] variable[minfreq] assign[=] binary_operation[constant[2.0] / binary_operation[call[name[stimes].max, parameter[]] - call[name[stimes].min, parameter[]]]] if name[verbose] begin[:] call[name[LOGINFO], parameter[binary_operation[constant[new minfreq: %s, maxfreq: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c7960e0>, <ast.Name object at 0x7da20c7954b0>]]]]] <ast.Try object at 0x7da20c795840>
keyword[def] identifier[bls_serial_pfind] ( identifier[times] , identifier[mags] , identifier[errs] , identifier[magsarefluxes] = keyword[False] , identifier[startp] = literal[int] , identifier[endp] = literal[int] , identifier[stepsize] = literal[int] , identifier[mintransitduration] = literal[int] , identifier[maxtransitduration] = literal[int] , identifier[ndurations] = literal[int] , identifier[autofreq] = keyword[True] , identifier[blsobjective] = literal[string] , identifier[blsmethod] = literal[string] , identifier[blsoversample] = literal[int] , identifier[blsmintransits] = literal[int] , identifier[blsfreqfactor] = literal[int] , identifier[periodepsilon] = literal[int] , identifier[nbestpeaks] = literal[int] , identifier[sigclip] = literal[int] , identifier[verbose] = keyword[True] , identifier[raiseonfail] = keyword[False] ): literal[string] identifier[stimes] , identifier[smags] , identifier[serrs] = identifier[sigclip_magseries] ( identifier[times] , identifier[mags] , identifier[errs] , identifier[magsarefluxes] = identifier[magsarefluxes] , identifier[sigclip] = identifier[sigclip] ) keyword[if] identifier[len] ( identifier[stimes] )> literal[int] keyword[and] identifier[len] ( identifier[smags] )> literal[int] keyword[and] identifier[len] ( identifier[serrs] )> literal[int] : keyword[if] identifier[isinstance] ( identifier[autofreq] , identifier[bool] ) keyword[and] identifier[autofreq] : identifier[stepsize] = literal[int] * identifier[mintransitduration] /( identifier[stimes] . identifier[max] ()- identifier[stimes] . identifier[min] ()) identifier[minfreq] = literal[int] / identifier[endp] identifier[maxfreq] = literal[int] / identifier[startp] identifier[nfreq] = identifier[int] ( identifier[npceil] (( identifier[maxfreq] - identifier[minfreq] )/ identifier[stepsize] )) keyword[if] identifier[verbose] : identifier[LOGINFO] ( literal[string] literal[string] %( identifier[startp] , identifier[endp] , identifier[nfreq] , identifier[minfreq] , identifier[maxfreq] )) identifier[LOGINFO] ( literal[string] literal[string] literal[string] % ( identifier[stepsize] , identifier[ndurations] , identifier[mintransitduration] , identifier[maxtransitduration] )) identifier[use_autoperiod] = keyword[False] keyword[elif] identifier[isinstance] ( identifier[autofreq] , identifier[bool] ) keyword[and] keyword[not] identifier[autofreq] : identifier[minfreq] = literal[int] / identifier[endp] identifier[maxfreq] = literal[int] / identifier[startp] identifier[nfreq] = identifier[int] ( identifier[npceil] (( identifier[maxfreq] - identifier[minfreq] )/ identifier[stepsize] )) keyword[if] identifier[verbose] : identifier[LOGINFO] ( literal[string] literal[string] %( identifier[startp] , identifier[endp] , identifier[nfreq] , identifier[minfreq] , identifier[maxfreq] )) identifier[LOGINFO] ( literal[string] literal[string] literal[string] % ( identifier[stepsize] , identifier[ndurations] , identifier[mintransitduration] , identifier[maxtransitduration] )) identifier[use_autoperiod] = keyword[False] keyword[elif] identifier[isinstance] ( identifier[autofreq] , identifier[str] ) keyword[and] identifier[autofreq] == literal[string] : identifier[use_autoperiod] = keyword[True] identifier[minfreq] = literal[int] / identifier[endp] identifier[maxfreq] = literal[int] / identifier[startp] keyword[else] : identifier[LOGERROR] ( literal[string] ) keyword[return] keyword[None] keyword[if] identifier[minfreq] <( literal[int] /( identifier[stimes] . identifier[max] ()- identifier[stimes] . identifier[min] ())): keyword[if] identifier[verbose] : identifier[LOGWARNING] ( literal[string] literal[string] literal[string] %( identifier[endp] , identifier[stimes] . identifier[max] ()- identifier[stimes] . identifier[min] ())) identifier[minfreq] = literal[int] /( identifier[stimes] . identifier[max] ()- identifier[stimes] . identifier[min] ()) keyword[if] identifier[verbose] : identifier[LOGINFO] ( literal[string] % ( identifier[minfreq] , identifier[maxfreq] )) keyword[try] : identifier[durations] = identifier[nplinspace] ( identifier[mintransitduration] * identifier[startp] , identifier[maxtransitduration] * identifier[startp] , identifier[ndurations] ) keyword[if] identifier[magsarefluxes] : identifier[blsmodel] = identifier[BoxLeastSquares] ( identifier[stimes] * identifier[u] . identifier[day] , identifier[smags] * identifier[u] . identifier[dimensionless_unscaled] , identifier[dy] = identifier[serrs] * identifier[u] . identifier[dimensionless_unscaled] ) keyword[else] : identifier[blsmodel] = identifier[BoxLeastSquares] ( identifier[stimes] * identifier[u] . identifier[day] , identifier[smags] * identifier[u] . identifier[mag] , identifier[dy] = identifier[serrs] * identifier[u] . identifier[mag] ) keyword[if] identifier[use_autoperiod] : identifier[periods] = identifier[nparray] ( identifier[blsmodel] . identifier[autoperiod] ( identifier[durations] , identifier[minimum_period] = identifier[startp] , identifier[maximum_period] = identifier[endp] , identifier[minimum_n_transit] = identifier[blsmintransits] , identifier[frequency_factor] = identifier[blsfreqfactor] ) ) identifier[nfreq] = identifier[periods] . identifier[size] keyword[if] identifier[verbose] : identifier[LOGINFO] ( literal[string] literal[string] literal[string] % ( identifier[blsmintransits] , identifier[blsfreqfactor] ) ) identifier[LOGINFO] ( literal[string] literal[string] % ( identifier[abs] ( literal[int] / identifier[periods] [ literal[int] ]- literal[int] / identifier[periods] [ literal[int] ]), identifier[nfreq] , literal[int] / identifier[periods] . identifier[max] (), literal[int] / identifier[periods] . identifier[min] (), identifier[durations] . identifier[size] )) keyword[else] : identifier[frequencies] = identifier[minfreq] + identifier[nparange] ( identifier[nfreq] )* identifier[stepsize] identifier[periods] = literal[int] / identifier[frequencies] keyword[if] identifier[nfreq] > literal[int] : keyword[if] identifier[verbose] : identifier[LOGWARNING] ( literal[string] literal[string] literal[string] literal[string] ) identifier[blsresult] = identifier[blsmodel] . identifier[power] ( identifier[periods] * identifier[u] . identifier[day] , identifier[durations] * identifier[u] . identifier[day] , identifier[objective] = identifier[blsobjective] , identifier[method] = identifier[blsmethod] , identifier[oversample] = identifier[blsoversample] ) identifier[lsp] = identifier[nparray] ( identifier[blsresult] . identifier[power] ) identifier[finitepeakind] = identifier[npisfinite] ( identifier[lsp] ) identifier[finlsp] = identifier[lsp] [ identifier[finitepeakind] ] identifier[finperiods] = identifier[periods] [ identifier[finitepeakind] ] keyword[try] : identifier[bestperiodind] = identifier[npargmax] ( identifier[finlsp] ) keyword[except] identifier[ValueError] : identifier[LOGERROR] ( literal[string] literal[string] ) keyword[return] { literal[string] : identifier[npnan] , literal[string] : identifier[npnan] , literal[string] : identifier[nbestpeaks] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] :{ literal[string] : identifier[startp] , literal[string] : identifier[endp] , literal[string] : identifier[stepsize] , literal[string] : identifier[mintransitduration] , literal[string] : identifier[maxtransitduration] , literal[string] : identifier[ndurations] , literal[string] : identifier[blsobjective] , literal[string] : identifier[blsmethod] , literal[string] : identifier[blsoversample] , literal[string] : identifier[blsmintransits] , literal[string] : identifier[blsfreqfactor] , literal[string] : identifier[autofreq] , literal[string] : identifier[periodepsilon] , literal[string] : identifier[nbestpeaks] , literal[string] : identifier[sigclip] , literal[string] : identifier[magsarefluxes] }} identifier[sortedlspind] = identifier[npargsort] ( identifier[finlsp] )[::- literal[int] ] identifier[sortedlspperiods] = identifier[finperiods] [ identifier[sortedlspind] ] identifier[sortedlspvals] = identifier[finlsp] [ identifier[sortedlspind] ] identifier[nbestperiods] , identifier[nbestlspvals] , identifier[nbestinds] , identifier[peakcount] =( [ identifier[finperiods] [ identifier[bestperiodind] ]], [ identifier[finlsp] [ identifier[bestperiodind] ]], [ identifier[bestperiodind] ], literal[int] ) identifier[prevperiod] = identifier[sortedlspperiods] [ literal[int] ] keyword[for] identifier[period] , identifier[lspval] , identifier[ind] keyword[in] identifier[zip] ( identifier[sortedlspperiods] , identifier[sortedlspvals] , identifier[sortedlspind] ): keyword[if] identifier[peakcount] == identifier[nbestpeaks] : keyword[break] identifier[perioddiff] = identifier[abs] ( identifier[period] - identifier[prevperiod] ) identifier[bestperiodsdiff] =[ identifier[abs] ( identifier[period] - identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[nbestperiods] ] keyword[if] ( identifier[perioddiff] >( identifier[periodepsilon] * identifier[prevperiod] ) keyword[and] identifier[all] ( identifier[x] >( identifier[periodepsilon] * identifier[period] ) keyword[for] identifier[x] keyword[in] identifier[bestperiodsdiff] )): identifier[nbestperiods] . identifier[append] ( identifier[period] ) identifier[nbestlspvals] . identifier[append] ( identifier[lspval] ) identifier[nbestinds] . identifier[append] ( identifier[ind] ) identifier[peakcount] = identifier[peakcount] + literal[int] identifier[prevperiod] = identifier[period] identifier[resultdict] ={ literal[string] : identifier[finperiods] [ identifier[bestperiodind] ], literal[string] : identifier[finlsp] [ identifier[bestperiodind] ], literal[string] : identifier[nbestpeaks] , literal[string] : identifier[nbestinds] , literal[string] : identifier[nbestlspvals] , literal[string] : identifier[nbestperiods] , literal[string] : identifier[lsp] , literal[string] : identifier[frequencies] , literal[string] : identifier[periods] , literal[string] : identifier[durations] , literal[string] : identifier[blsresult] , literal[string] : identifier[blsmodel] , literal[string] : identifier[stepsize] , literal[string] : identifier[nfreq] , literal[string] : identifier[mintransitduration] , literal[string] : identifier[maxtransitduration] , literal[string] : literal[string] , literal[string] :{ literal[string] : identifier[startp] , literal[string] : identifier[endp] , literal[string] : identifier[stepsize] , literal[string] : identifier[mintransitduration] , literal[string] : identifier[maxtransitduration] , literal[string] : identifier[ndurations] , literal[string] : identifier[blsobjective] , literal[string] : identifier[blsmethod] , literal[string] : identifier[blsoversample] , literal[string] : identifier[blsmintransits] , literal[string] : identifier[blsfreqfactor] , literal[string] : identifier[autofreq] , literal[string] : identifier[periodepsilon] , literal[string] : identifier[nbestpeaks] , literal[string] : identifier[sigclip] , literal[string] : identifier[magsarefluxes] } } keyword[return] identifier[resultdict] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[LOGEXCEPTION] ( literal[string] ) keyword[if] identifier[raiseonfail] : keyword[raise] keyword[return] { literal[string] : identifier[npnan] , literal[string] : identifier[npnan] , literal[string] : keyword[None] , literal[string] : identifier[nbestpeaks] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : identifier[stepsize] , literal[string] : identifier[nfreq] , literal[string] : identifier[mintransitduration] , literal[string] : identifier[maxtransitduration] , literal[string] : literal[string] , literal[string] :{ literal[string] : identifier[startp] , literal[string] : identifier[endp] , literal[string] : identifier[stepsize] , literal[string] : identifier[mintransitduration] , literal[string] : identifier[maxtransitduration] , literal[string] : identifier[ndurations] , literal[string] : identifier[blsobjective] , literal[string] : identifier[blsmethod] , literal[string] : identifier[blsoversample] , literal[string] : identifier[blsmintransits] , literal[string] : identifier[blsfreqfactor] , literal[string] : identifier[autofreq] , literal[string] : identifier[periodepsilon] , literal[string] : identifier[nbestpeaks] , literal[string] : identifier[sigclip] , literal[string] : identifier[magsarefluxes] }} keyword[else] : identifier[LOGERROR] ( literal[string] ) keyword[return] { literal[string] : identifier[npnan] , literal[string] : identifier[npnan] , literal[string] : keyword[None] , literal[string] : identifier[nbestpeaks] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : identifier[stepsize] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : identifier[mintransitduration] , literal[string] : identifier[maxtransitduration] , literal[string] : literal[string] , literal[string] :{ literal[string] : identifier[startp] , literal[string] : identifier[endp] , literal[string] : identifier[stepsize] , literal[string] : identifier[mintransitduration] , literal[string] : identifier[maxtransitduration] , literal[string] : identifier[ndurations] , literal[string] : identifier[blsobjective] , literal[string] : identifier[blsmethod] , literal[string] : identifier[blsoversample] , literal[string] : identifier[blsmintransits] , literal[string] : identifier[blsfreqfactor] , literal[string] : identifier[autofreq] , literal[string] : identifier[periodepsilon] , literal[string] : identifier[nbestpeaks] , literal[string] : identifier[sigclip] , literal[string] : identifier[magsarefluxes] }}
def bls_serial_pfind(times, mags, errs, magsarefluxes=False, startp=0.1, endp=100.0, stepsize=0.0005, mintransitduration=0.01, maxtransitduration=0.4, ndurations=100, autofreq=True, blsobjective='likelihood', blsmethod='fast', blsoversample=10, blsmintransits=3, blsfreqfactor=10.0, periodepsilon=0.1, nbestpeaks=5, sigclip=10.0, verbose=True, raiseonfail=False): # search from 0.1 d to... # ... 100.0 d -- don't search full timebase # minimum transit length in phase # maximum transit length in phase # figure out f0, nf, and df automatically 'Runs the Box Least Squares Fitting Search for transit-shaped signals.\n\n Based on the version of BLS in Astropy 3.1:\n `astropy.stats.BoxLeastSquares`. If you don\'t have Astropy 3.1, this module\n will fail to import. Note that by default, this implementation of\n `bls_serial_pfind` doesn\'t use the `.autoperiod()` function from\n `BoxLeastSquares` but uses the same auto frequency-grid generation as the\n functions in `periodbase.kbls`. If you want to use Astropy\'s implementation,\n set the value of `autofreq` kwarg to \'astropy\'.\n\n The dict returned from this function contains a `blsmodel` key, which is the\n generated model from Astropy\'s BLS. Use the `.compute_stats()` method to\n calculate the required stats like SNR, depth, duration, etc.\n\n Parameters\n ----------\n\n times,mags,errs : np.array\n The magnitude/flux time-series to search for transits.\n\n magsarefluxes : bool\n If the input measurement values in `mags` and `errs` are in fluxes, set\n this to True.\n\n startp,endp : float\n The minimum and maximum periods to consider for the transit search.\n\n stepsize : float\n The step-size in frequency to use when constructing a frequency grid for\n the period search.\n\n mintransitduration,maxtransitduration : float\n The minimum and maximum transitdurations (in units of phase) to consider\n for the transit search.\n\n ndurations : int\n The number of transit durations to use in the period-search.\n\n autofreq : bool or str\n If this is True, the values of `stepsize` and `nphasebins` will be\n ignored, and these, along with a frequency-grid, will be determined\n based on the following relations::\n\n nphasebins = int(ceil(2.0/mintransitduration))\n if nphasebins > 3000:\n nphasebins = 3000\n\n stepsize = 0.25*mintransitduration/(times.max()-times.min())\n\n minfreq = 1.0/endp\n maxfreq = 1.0/startp\n nfreq = int(ceil((maxfreq - minfreq)/stepsize))\n\n If this is False, you must set `startp`, `endp`, and `stepsize` as\n appropriate.\n\n If this is str == \'astropy\', will use the\n `astropy.stats.BoxLeastSquares.autoperiod()` function to calculate the\n frequency grid instead of the kbls method.\n\n blsobjective : {\'likelihood\',\'snr\'}\n Sets the type of objective to optimize in the `BoxLeastSquares.power()`\n function.\n\n blsmethod : {\'fast\',\'slow\'}\n Sets the type of method to use in the `BoxLeastSquares.power()`\n function.\n\n blsoversample : {\'likelihood\',\'snr\'}\n Sets the `oversample` kwarg for the `BoxLeastSquares.power()` function.\n\n blsmintransits : int\n Sets the `min_n_transits` kwarg for the `BoxLeastSquares.autoperiod()`\n function.\n\n blsfreqfactor : float\n Sets the `frequency_factor` kwarg for the `BoxLeastSquares.autperiod()`\n function.\n\n periodepsilon : float\n The fractional difference between successive values of \'best\' periods\n when sorting by periodogram power to consider them as separate periods\n (as opposed to part of the same periodogram peak). This is used to avoid\n broad peaks in the periodogram and make sure the \'best\' periods returned\n are all actually independent.\n\n nbestpeaks : int\n The number of \'best\' peaks to return from the periodogram results,\n starting from the global maximum of the periodogram peak values.\n\n sigclip : float or int or sequence of two floats/ints or None\n If a single float or int, a symmetric sigma-clip will be performed using\n the number provided as the sigma-multiplier to cut out from the input\n time-series.\n\n If a list of two ints/floats is provided, the function will perform an\n \'asymmetric\' sigma-clip. The first element in this list is the sigma\n value to use for fainter flux/mag values; the second element in this\n list is the sigma value to use for brighter flux/mag values. For\n example, `sigclip=[10., 3.]`, will sigclip out greater than 10-sigma\n dimmings and greater than 3-sigma brightenings. Here the meaning of\n "dimming" and "brightening" is set by *physics* (not the magnitude\n system), which is why the `magsarefluxes` kwarg must be correctly set.\n\n If `sigclip` is None, no sigma-clipping will be performed, and the\n time-series (with non-finite elems removed) will be passed through to\n the output.\n\n verbose : bool\n If this is True, will indicate progress and details about the frequency\n grid used for the period search.\n\n raiseonfail : bool\n If True, raises an exception if something goes wrong. Otherwise, returns\n None.\n\n Returns\n -------\n\n dict\n This function returns a dict, referred to as an `lspinfo` dict in other\n astrobase functions that operate on periodogram results. This is a\n standardized format across all astrobase period-finders, and is of the\n form below::\n\n {\'bestperiod\': the best period value in the periodogram,\n \'bestlspval\': the periodogram peak associated with the best period,\n \'nbestpeaks\': the input value of nbestpeaks,\n \'nbestlspvals\': nbestpeaks-size list of best period peak values,\n \'nbestperiods\': nbestpeaks-size list of best periods,\n \'lspvals\': the full array of periodogram powers,\n \'frequencies\': the full array of frequencies considered,\n \'periods\': the full array of periods considered,\n \'durations\': the array of durations used to run BLS,\n \'blsresult\': Astropy BLS result object (BoxLeastSquaresResult),\n \'blsmodel\': Astropy BLS BoxLeastSquares object used for work,\n \'stepsize\': the actual stepsize used,\n \'nfreq\': the actual nfreq used,\n \'durations\': the durations array used,\n \'mintransitduration\': the input mintransitduration,\n \'maxtransitduration\': the input maxtransitdurations,\n \'method\':\'bls\' -> the name of the period-finder method,\n \'kwargs\':{ dict of all of the input kwargs for record-keeping}}\n\n ' # get rid of nans first and sigclip (stimes, smags, serrs) = sigclip_magseries(times, mags, errs, magsarefluxes=magsarefluxes, sigclip=sigclip) # make sure there are enough points to calculate a spectrum if len(stimes) > 9 and len(smags) > 9 and (len(serrs) > 9): # if we're setting up everything automatically if isinstance(autofreq, bool) and autofreq: # use heuristic to figure out best timestep stepsize = 0.25 * mintransitduration / (stimes.max() - stimes.min()) # now figure out the frequencies to use minfreq = 1.0 / endp maxfreq = 1.0 / startp nfreq = int(npceil((maxfreq - minfreq) / stepsize)) # say what we're using if verbose: LOGINFO('min P: %s, max P: %s, nfreq: %s, minfreq: %s, maxfreq: %s' % (startp, endp, nfreq, minfreq, maxfreq)) LOGINFO('autofreq = True: using AUTOMATIC values for freq stepsize: %s, ndurations: %s, min transit duration: %s, max transit duration: %s' % (stepsize, ndurations, mintransitduration, maxtransitduration)) # depends on [control=['if'], data=[]] use_autoperiod = False # depends on [control=['if'], data=[]] elif isinstance(autofreq, bool) and (not autofreq): minfreq = 1.0 / endp maxfreq = 1.0 / startp nfreq = int(npceil((maxfreq - minfreq) / stepsize)) # say what we're using if verbose: LOGINFO('min P: %s, max P: %s, nfreq: %s, minfreq: %s, maxfreq: %s' % (startp, endp, nfreq, minfreq, maxfreq)) LOGINFO('autofreq = False: using PROVIDED values for freq stepsize: %s, ndurations: %s, min transit duration: %s, max transit duration: %s' % (stepsize, ndurations, mintransitduration, maxtransitduration)) # depends on [control=['if'], data=[]] use_autoperiod = False # depends on [control=['if'], data=[]] elif isinstance(autofreq, str) and autofreq == 'astropy': use_autoperiod = True minfreq = 1.0 / endp maxfreq = 1.0 / startp # depends on [control=['if'], data=[]] else: LOGERROR("unknown autofreq kwarg encountered. can't continue...") return None # check the time-base vs. endp value if minfreq < 1.0 / (stimes.max() - stimes.min()): if verbose: LOGWARNING('the requested max P = %.3f is larger than the time base of the observations = %.3f, will make minfreq = 2 x 1/timebase' % (endp, stimes.max() - stimes.min())) # depends on [control=['if'], data=[]] minfreq = 2.0 / (stimes.max() - stimes.min()) if verbose: LOGINFO('new minfreq: %s, maxfreq: %s' % (minfreq, maxfreq)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['minfreq']] # run BLS try: # astropy's BLS requires durations in units of time durations = nplinspace(mintransitduration * startp, maxtransitduration * startp, ndurations) # set up the correct units for the BLS model if magsarefluxes: blsmodel = BoxLeastSquares(stimes * u.day, smags * u.dimensionless_unscaled, dy=serrs * u.dimensionless_unscaled) # depends on [control=['if'], data=[]] else: blsmodel = BoxLeastSquares(stimes * u.day, smags * u.mag, dy=serrs * u.mag) # use autoperiod if requested if use_autoperiod: periods = nparray(blsmodel.autoperiod(durations, minimum_period=startp, maximum_period=endp, minimum_n_transit=blsmintransits, frequency_factor=blsfreqfactor)) nfreq = periods.size if verbose: LOGINFO("autofreq = 'astropy', used .autoperiod() with minimum_n_transit = %s, freq_factor = %s to generate the frequency grid" % (blsmintransits, blsfreqfactor)) LOGINFO('stepsize = %.5f, nfreq = %s, minfreq = %.5f, maxfreq = %.5f, ndurations = %s' % (abs(1.0 / periods[1] - 1.0 / periods[0]), nfreq, 1.0 / periods.max(), 1.0 / periods.min(), durations.size)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # otherwise, use kbls method frequencies = minfreq + nparange(nfreq) * stepsize periods = 1.0 / frequencies if nfreq > 500000.0: if verbose: LOGWARNING('more than 5.0e5 frequencies to go through; this will take a while. you might want to use the abls.bls_parallel_pfind function instead') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # run the periodogram blsresult = blsmodel.power(periods * u.day, durations * u.day, objective=blsobjective, method=blsmethod, oversample=blsoversample) # get the peak values lsp = nparray(blsresult.power) # find the nbestpeaks for the periodogram: 1. sort the lsp array # by highest value first 2. go down the values until we find # five values that are separated by at least periodepsilon in # period # make sure to get only the finite peaks in the periodogram # this is needed because BLS may produce infs for some peaks finitepeakind = npisfinite(lsp) finlsp = lsp[finitepeakind] finperiods = periods[finitepeakind] # make sure that finlsp has finite values before we work on it try: bestperiodind = npargmax(finlsp) # depends on [control=['try'], data=[]] except ValueError: LOGERROR('no finite periodogram values for this mag series, skipping...') return {'bestperiod': npnan, 'bestlspval': npnan, 'nbestpeaks': nbestpeaks, 'nbestinds': None, 'nbestlspvals': None, 'nbestperiods': None, 'lspvals': None, 'periods': None, 'durations': None, 'method': 'bls', 'blsresult': None, 'blsmodel': None, 'kwargs': {'startp': startp, 'endp': endp, 'stepsize': stepsize, 'mintransitduration': mintransitduration, 'maxtransitduration': maxtransitduration, 'ndurations': ndurations, 'blsobjective': blsobjective, 'blsmethod': blsmethod, 'blsoversample': blsoversample, 'blsntransits': blsmintransits, 'blsfreqfactor': blsfreqfactor, 'autofreq': autofreq, 'periodepsilon': periodepsilon, 'nbestpeaks': nbestpeaks, 'sigclip': sigclip, 'magsarefluxes': magsarefluxes}} # depends on [control=['except'], data=[]] sortedlspind = npargsort(finlsp)[::-1] sortedlspperiods = finperiods[sortedlspind] sortedlspvals = finlsp[sortedlspind] # now get the nbestpeaks (nbestperiods, nbestlspvals, nbestinds, peakcount) = ([finperiods[bestperiodind]], [finlsp[bestperiodind]], [bestperiodind], 1) prevperiod = sortedlspperiods[0] # find the best nbestpeaks in the lsp and their periods for (period, lspval, ind) in zip(sortedlspperiods, sortedlspvals, sortedlspind): if peakcount == nbestpeaks: break # depends on [control=['if'], data=[]] perioddiff = abs(period - prevperiod) bestperiodsdiff = [abs(period - x) for x in nbestperiods] # print('prevperiod = %s, thisperiod = %s, ' # 'perioddiff = %s, peakcount = %s' % # (prevperiod, period, perioddiff, peakcount)) # this ensures that this period is different from the last # period and from all the other existing best periods by # periodepsilon to make sure we jump to an entire different # peak in the periodogram if perioddiff > periodepsilon * prevperiod and all((x > periodepsilon * period for x in bestperiodsdiff)): nbestperiods.append(period) nbestlspvals.append(lspval) nbestinds.append(ind) peakcount = peakcount + 1 # depends on [control=['if'], data=[]] prevperiod = period # depends on [control=['for'], data=[]] # generate the return dict resultdict = {'bestperiod': finperiods[bestperiodind], 'bestlspval': finlsp[bestperiodind], 'nbestpeaks': nbestpeaks, 'nbestinds': nbestinds, 'nbestlspvals': nbestlspvals, 'nbestperiods': nbestperiods, 'lspvals': lsp, 'frequencies': frequencies, 'periods': periods, 'durations': durations, 'blsresult': blsresult, 'blsmodel': blsmodel, 'stepsize': stepsize, 'nfreq': nfreq, 'mintransitduration': mintransitduration, 'maxtransitduration': maxtransitduration, 'method': 'bls', 'kwargs': {'startp': startp, 'endp': endp, 'stepsize': stepsize, 'mintransitduration': mintransitduration, 'maxtransitduration': maxtransitduration, 'ndurations': ndurations, 'blsobjective': blsobjective, 'blsmethod': blsmethod, 'blsoversample': blsoversample, 'blsntransits': blsmintransits, 'blsfreqfactor': blsfreqfactor, 'autofreq': autofreq, 'periodepsilon': periodepsilon, 'nbestpeaks': nbestpeaks, 'sigclip': sigclip, 'magsarefluxes': magsarefluxes}} return resultdict # depends on [control=['try'], data=[]] except Exception as e: LOGEXCEPTION('BLS failed!') if raiseonfail: raise # depends on [control=['if'], data=[]] return {'bestperiod': npnan, 'bestlspval': npnan, 'nbestinds': None, 'nbestpeaks': nbestpeaks, 'nbestlspvals': None, 'nbestperiods': None, 'lspvals': None, 'periods': None, 'durations': None, 'blsresult': None, 'blsmodel': None, 'stepsize': stepsize, 'nfreq': nfreq, 'mintransitduration': mintransitduration, 'maxtransitduration': maxtransitduration, 'method': 'bls', 'kwargs': {'startp': startp, 'endp': endp, 'stepsize': stepsize, 'mintransitduration': mintransitduration, 'maxtransitduration': maxtransitduration, 'ndurations': ndurations, 'blsobjective': blsobjective, 'blsmethod': blsmethod, 'blsoversample': blsoversample, 'blsntransits': blsmintransits, 'blsfreqfactor': blsfreqfactor, 'autofreq': autofreq, 'periodepsilon': periodepsilon, 'nbestpeaks': nbestpeaks, 'sigclip': sigclip, 'magsarefluxes': magsarefluxes}} # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: LOGERROR('no good detections for these times and mags, skipping...') return {'bestperiod': npnan, 'bestlspval': npnan, 'nbestinds': None, 'nbestpeaks': nbestpeaks, 'nbestlspvals': None, 'nbestperiods': None, 'lspvals': None, 'periods': None, 'durations': None, 'blsresult': None, 'blsmodel': None, 'stepsize': stepsize, 'nfreq': None, 'nphasebins': None, 'mintransitduration': mintransitduration, 'maxtransitduration': maxtransitduration, 'method': 'bls', 'kwargs': {'startp': startp, 'endp': endp, 'stepsize': stepsize, 'mintransitduration': mintransitduration, 'maxtransitduration': maxtransitduration, 'ndurations': ndurations, 'blsobjective': blsobjective, 'blsmethod': blsmethod, 'blsoversample': blsoversample, 'blsntransits': blsmintransits, 'blsfreqfactor': blsfreqfactor, 'autofreq': autofreq, 'periodepsilon': periodepsilon, 'nbestpeaks': nbestpeaks, 'sigclip': sigclip, 'magsarefluxes': magsarefluxes}}
def plot_ioncell_relaxation(self, **kwargs): """ Plot the history of the ion-cell relaxation. kwargs are passed to the plot method of :class:`HistFile` Return `matplotlib` figure or None if hist file is not found. """ with self.ioncell_task.open_hist() as hist: return hist.plot(**kwargs) if hist else None
def function[plot_ioncell_relaxation, parameter[self]]: constant[ Plot the history of the ion-cell relaxation. kwargs are passed to the plot method of :class:`HistFile` Return `matplotlib` figure or None if hist file is not found. ] with call[name[self].ioncell_task.open_hist, parameter[]] begin[:] return[<ast.IfExp object at 0x7da18dc06860>]
keyword[def] identifier[plot_ioncell_relaxation] ( identifier[self] ,** identifier[kwargs] ): literal[string] keyword[with] identifier[self] . identifier[ioncell_task] . identifier[open_hist] () keyword[as] identifier[hist] : keyword[return] identifier[hist] . identifier[plot] (** identifier[kwargs] ) keyword[if] identifier[hist] keyword[else] keyword[None]
def plot_ioncell_relaxation(self, **kwargs): """ Plot the history of the ion-cell relaxation. kwargs are passed to the plot method of :class:`HistFile` Return `matplotlib` figure or None if hist file is not found. """ with self.ioncell_task.open_hist() as hist: return hist.plot(**kwargs) if hist else None # depends on [control=['with'], data=['hist']]
def pttl(self, name): """ Returns the number of milliseconds until the key ``name`` will expire :param name: str the name of the redis key :return: """ with self.pipe as pipe: return pipe.pttl(self.redis_key(name))
def function[pttl, parameter[self, name]]: constant[ Returns the number of milliseconds until the key ``name`` will expire :param name: str the name of the redis key :return: ] with name[self].pipe begin[:] return[call[name[pipe].pttl, parameter[call[name[self].redis_key, parameter[name[name]]]]]]
keyword[def] identifier[pttl] ( identifier[self] , identifier[name] ): literal[string] keyword[with] identifier[self] . identifier[pipe] keyword[as] identifier[pipe] : keyword[return] identifier[pipe] . identifier[pttl] ( identifier[self] . identifier[redis_key] ( identifier[name] ))
def pttl(self, name): """ Returns the number of milliseconds until the key ``name`` will expire :param name: str the name of the redis key :return: """ with self.pipe as pipe: return pipe.pttl(self.redis_key(name)) # depends on [control=['with'], data=['pipe']]
def get_symmetric_site(self, point, cartesian=False): """ This method uses symmetry operations to find equivalent sites on both sides of the slab. Works mainly for slabs with Laue symmetry. This is useful for retaining the non-polar and symmetric properties of a slab when creating adsorbed structures or symmetric reconstructions. Arg: point: Fractional coordinate. Returns: point: Fractional coordinate. A point equivalent to the parameter point, but on the other side of the slab """ sg = SpacegroupAnalyzer(self) ops = sg.get_symmetry_operations(cartesian=cartesian) # Each operation on a point will return an equivalent point. # We want to find the point on the other side of the slab. for op in ops: slab = self.copy() site2 = op.operate(point) if "%.6f" % (site2[2]) == "%.6f" % (point[2]): continue # Add dummy site to check the overall structure is symmetric slab.append("O", point, coords_are_cartesian=cartesian) slab.append("O", site2, coords_are_cartesian=cartesian) sg = SpacegroupAnalyzer(slab) if sg.is_laue(): break else: # If not symmetric, remove the two added # sites and try another symmetry operator slab.remove_sites([len(slab) - 1]) slab.remove_sites([len(slab) - 1]) return site2
def function[get_symmetric_site, parameter[self, point, cartesian]]: constant[ This method uses symmetry operations to find equivalent sites on both sides of the slab. Works mainly for slabs with Laue symmetry. This is useful for retaining the non-polar and symmetric properties of a slab when creating adsorbed structures or symmetric reconstructions. Arg: point: Fractional coordinate. Returns: point: Fractional coordinate. A point equivalent to the parameter point, but on the other side of the slab ] variable[sg] assign[=] call[name[SpacegroupAnalyzer], parameter[name[self]]] variable[ops] assign[=] call[name[sg].get_symmetry_operations, parameter[]] for taget[name[op]] in starred[name[ops]] begin[:] variable[slab] assign[=] call[name[self].copy, parameter[]] variable[site2] assign[=] call[name[op].operate, parameter[name[point]]] if compare[binary_operation[constant[%.6f] <ast.Mod object at 0x7da2590d6920> call[name[site2]][constant[2]]] equal[==] binary_operation[constant[%.6f] <ast.Mod object at 0x7da2590d6920> call[name[point]][constant[2]]]] begin[:] continue call[name[slab].append, parameter[constant[O], name[point]]] call[name[slab].append, parameter[constant[O], name[site2]]] variable[sg] assign[=] call[name[SpacegroupAnalyzer], parameter[name[slab]]] if call[name[sg].is_laue, parameter[]] begin[:] break return[name[site2]]
keyword[def] identifier[get_symmetric_site] ( identifier[self] , identifier[point] , identifier[cartesian] = keyword[False] ): literal[string] identifier[sg] = identifier[SpacegroupAnalyzer] ( identifier[self] ) identifier[ops] = identifier[sg] . identifier[get_symmetry_operations] ( identifier[cartesian] = identifier[cartesian] ) keyword[for] identifier[op] keyword[in] identifier[ops] : identifier[slab] = identifier[self] . identifier[copy] () identifier[site2] = identifier[op] . identifier[operate] ( identifier[point] ) keyword[if] literal[string] %( identifier[site2] [ literal[int] ])== literal[string] %( identifier[point] [ literal[int] ]): keyword[continue] identifier[slab] . identifier[append] ( literal[string] , identifier[point] , identifier[coords_are_cartesian] = identifier[cartesian] ) identifier[slab] . identifier[append] ( literal[string] , identifier[site2] , identifier[coords_are_cartesian] = identifier[cartesian] ) identifier[sg] = identifier[SpacegroupAnalyzer] ( identifier[slab] ) keyword[if] identifier[sg] . identifier[is_laue] (): keyword[break] keyword[else] : identifier[slab] . identifier[remove_sites] ([ identifier[len] ( identifier[slab] )- literal[int] ]) identifier[slab] . identifier[remove_sites] ([ identifier[len] ( identifier[slab] )- literal[int] ]) keyword[return] identifier[site2]
def get_symmetric_site(self, point, cartesian=False): """ This method uses symmetry operations to find equivalent sites on both sides of the slab. Works mainly for slabs with Laue symmetry. This is useful for retaining the non-polar and symmetric properties of a slab when creating adsorbed structures or symmetric reconstructions. Arg: point: Fractional coordinate. Returns: point: Fractional coordinate. A point equivalent to the parameter point, but on the other side of the slab """ sg = SpacegroupAnalyzer(self) ops = sg.get_symmetry_operations(cartesian=cartesian) # Each operation on a point will return an equivalent point. # We want to find the point on the other side of the slab. for op in ops: slab = self.copy() site2 = op.operate(point) if '%.6f' % site2[2] == '%.6f' % point[2]: continue # depends on [control=['if'], data=[]] # Add dummy site to check the overall structure is symmetric slab.append('O', point, coords_are_cartesian=cartesian) slab.append('O', site2, coords_are_cartesian=cartesian) sg = SpacegroupAnalyzer(slab) if sg.is_laue(): break # depends on [control=['if'], data=[]] else: # If not symmetric, remove the two added # sites and try another symmetry operator slab.remove_sites([len(slab) - 1]) slab.remove_sites([len(slab) - 1]) # depends on [control=['for'], data=['op']] return site2
def db_en010(self, value=None): """ Corresponds to IDD Field `db_en010` mean coincident dry-bulb temperature to Enthalpy corresponding to 1.0% annual cumulative frequency of occurrence Args: value (float): value for IDD Field `db_en010` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """ if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `db_en010`'.format(value)) self._db_en010 = value
def function[db_en010, parameter[self, value]]: constant[ Corresponds to IDD Field `db_en010` mean coincident dry-bulb temperature to Enthalpy corresponding to 1.0% annual cumulative frequency of occurrence Args: value (float): value for IDD Field `db_en010` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value ] if compare[name[value] is_not constant[None]] begin[:] <ast.Try object at 0x7da1b0fefb20> name[self]._db_en010 assign[=] name[value]
keyword[def] identifier[db_en010] ( identifier[self] , identifier[value] = keyword[None] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[value] = identifier[float] ( identifier[value] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[value] )) identifier[self] . identifier[_db_en010] = identifier[value]
def db_en010(self, value=None): """ Corresponds to IDD Field `db_en010` mean coincident dry-bulb temperature to Enthalpy corresponding to 1.0% annual cumulative frequency of occurrence Args: value (float): value for IDD Field `db_en010` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """ if value is not None: try: value = float(value) # depends on [control=['try'], data=[]] except ValueError: raise ValueError('value {} need to be of type float for field `db_en010`'.format(value)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['value']] self._db_en010 = value
def delete(self): """ Delete the current instance from the DB. """ with rconnect() as conn: # Can't delete an object without an ID. if self.id is None: raise FrinkError("You can't delete an object with no ID") else: if isinstance(self.id, uuid.UUID): self.id = str(self.id) try: query = r.db( self._db ).table( self._table ).get( self.id ).delete() log.debug(query) rv = query.run(conn) except Exception as e: log.warn(e) raise else: return True
def function[delete, parameter[self]]: constant[ Delete the current instance from the DB. ] with call[name[rconnect], parameter[]] begin[:] if compare[name[self].id is constant[None]] begin[:] <ast.Raise object at 0x7da1b16238b0>
keyword[def] identifier[delete] ( identifier[self] ): literal[string] keyword[with] identifier[rconnect] () keyword[as] identifier[conn] : keyword[if] identifier[self] . identifier[id] keyword[is] keyword[None] : keyword[raise] identifier[FrinkError] ( literal[string] ) keyword[else] : keyword[if] identifier[isinstance] ( identifier[self] . identifier[id] , identifier[uuid] . identifier[UUID] ): identifier[self] . identifier[id] = identifier[str] ( identifier[self] . identifier[id] ) keyword[try] : identifier[query] = identifier[r] . identifier[db] ( identifier[self] . identifier[_db] ). identifier[table] ( identifier[self] . identifier[_table] ). identifier[get] ( identifier[self] . identifier[id] ). identifier[delete] () identifier[log] . identifier[debug] ( identifier[query] ) identifier[rv] = identifier[query] . identifier[run] ( identifier[conn] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[log] . identifier[warn] ( identifier[e] ) keyword[raise] keyword[else] : keyword[return] keyword[True]
def delete(self): """ Delete the current instance from the DB. """ with rconnect() as conn: # Can't delete an object without an ID. if self.id is None: raise FrinkError("You can't delete an object with no ID") # depends on [control=['if'], data=[]] else: if isinstance(self.id, uuid.UUID): self.id = str(self.id) # depends on [control=['if'], data=[]] try: query = r.db(self._db).table(self._table).get(self.id).delete() log.debug(query) rv = query.run(conn) # depends on [control=['try'], data=[]] except Exception as e: log.warn(e) raise # depends on [control=['except'], data=['e']] else: return True # depends on [control=['with'], data=['conn']]
def _get_binary(self): """ find binaries available""" ## check for binary backup_binaries = ["raxmlHPC-PTHREADS", "raxmlHPC-PTHREADS-SSE3"] ## check user binary first, then backups for binary in [self.params.binary] + backup_binaries: proc = subprocess.Popen(["which", self.params.binary], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate() ## update the binary if proc: self.params.binary = binary ## if none then raise error if not proc[0]: raise Exception(BINARY_ERROR.format(self.params.binary))
def function[_get_binary, parameter[self]]: constant[ find binaries available] variable[backup_binaries] assign[=] list[[<ast.Constant object at 0x7da204345960>, <ast.Constant object at 0x7da204347160>]] for taget[name[binary]] in starred[binary_operation[list[[<ast.Attribute object at 0x7da2043470d0>]] + name[backup_binaries]]] begin[:] variable[proc] assign[=] call[call[name[subprocess].Popen, parameter[list[[<ast.Constant object at 0x7da204346d70>, <ast.Attribute object at 0x7da2043462f0>]]]].communicate, parameter[]] if name[proc] begin[:] name[self].params.binary assign[=] name[binary] if <ast.UnaryOp object at 0x7da204347640> begin[:] <ast.Raise object at 0x7da204344fa0>
keyword[def] identifier[_get_binary] ( identifier[self] ): literal[string] identifier[backup_binaries] =[ literal[string] , literal[string] ] keyword[for] identifier[binary] keyword[in] [ identifier[self] . identifier[params] . identifier[binary] ]+ identifier[backup_binaries] : identifier[proc] = identifier[subprocess] . identifier[Popen] ([ literal[string] , identifier[self] . identifier[params] . identifier[binary] ], identifier[stdout] = identifier[subprocess] . identifier[PIPE] , identifier[stderr] = identifier[subprocess] . identifier[STDOUT] ). identifier[communicate] () keyword[if] identifier[proc] : identifier[self] . identifier[params] . identifier[binary] = identifier[binary] keyword[if] keyword[not] identifier[proc] [ literal[int] ]: keyword[raise] identifier[Exception] ( identifier[BINARY_ERROR] . identifier[format] ( identifier[self] . identifier[params] . identifier[binary] ))
def _get_binary(self): """ find binaries available""" ## check for binary backup_binaries = ['raxmlHPC-PTHREADS', 'raxmlHPC-PTHREADS-SSE3'] ## check user binary first, then backups for binary in [self.params.binary] + backup_binaries: proc = subprocess.Popen(['which', self.params.binary], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate() ## update the binary if proc: self.params.binary = binary # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['binary']] ## if none then raise error if not proc[0]: raise Exception(BINARY_ERROR.format(self.params.binary)) # depends on [control=['if'], data=[]]
def decision(self, result, **values): """ The decision method with callback option. This method will find matching row, construct a dictionary and call callback with dictionary. Args: callback (function): Callback function will be called when decision will be finded. result (array of str): Array of header string **values (dict): What should finder look for, (headerString : value). Returns: Arrays of finded values strings Example: >>> table = DecisionTable(''' >>> header1 header2 >>> =============== >>> value1 value2 >>> ''') >>> >>> header1, header2 = table.decision( >>> ['header1','header2'], >>> header1='value1', >>> header2='value2' >>> ) >>> print(header1,header2) (value1 value2) """ data = self.__getDecision(result, **values) data = [data[value] for value in result] if len(data) == 1: return data[0] else: return data
def function[decision, parameter[self, result]]: constant[ The decision method with callback option. This method will find matching row, construct a dictionary and call callback with dictionary. Args: callback (function): Callback function will be called when decision will be finded. result (array of str): Array of header string **values (dict): What should finder look for, (headerString : value). Returns: Arrays of finded values strings Example: >>> table = DecisionTable(''' >>> header1 header2 >>> =============== >>> value1 value2 >>> ''') >>> >>> header1, header2 = table.decision( >>> ['header1','header2'], >>> header1='value1', >>> header2='value2' >>> ) >>> print(header1,header2) (value1 value2) ] variable[data] assign[=] call[name[self].__getDecision, parameter[name[result]]] variable[data] assign[=] <ast.ListComp object at 0x7da18dc07130> if compare[call[name[len], parameter[name[data]]] equal[==] constant[1]] begin[:] return[call[name[data]][constant[0]]]
keyword[def] identifier[decision] ( identifier[self] , identifier[result] ,** identifier[values] ): literal[string] identifier[data] = identifier[self] . identifier[__getDecision] ( identifier[result] ,** identifier[values] ) identifier[data] =[ identifier[data] [ identifier[value] ] keyword[for] identifier[value] keyword[in] identifier[result] ] keyword[if] identifier[len] ( identifier[data] )== literal[int] : keyword[return] identifier[data] [ literal[int] ] keyword[else] : keyword[return] identifier[data]
def decision(self, result, **values): """ The decision method with callback option. This method will find matching row, construct a dictionary and call callback with dictionary. Args: callback (function): Callback function will be called when decision will be finded. result (array of str): Array of header string **values (dict): What should finder look for, (headerString : value). Returns: Arrays of finded values strings Example: >>> table = DecisionTable(''' >>> header1 header2 >>> =============== >>> value1 value2 >>> ''') >>> >>> header1, header2 = table.decision( >>> ['header1','header2'], >>> header1='value1', >>> header2='value2' >>> ) >>> print(header1,header2) (value1 value2) """ data = self.__getDecision(result, **values) data = [data[value] for value in result] if len(data) == 1: return data[0] # depends on [control=['if'], data=[]] else: return data
def get_pathext(default_pathext=None): """Returns the path extensions from environment or a default""" if default_pathext is None: default_pathext = os.pathsep.join([ '.COM', '.EXE', '.BAT', '.CMD' ]) pathext = os.environ.get('PATHEXT', default_pathext) return pathext
def function[get_pathext, parameter[default_pathext]]: constant[Returns the path extensions from environment or a default] if compare[name[default_pathext] is constant[None]] begin[:] variable[default_pathext] assign[=] call[name[os].pathsep.join, parameter[list[[<ast.Constant object at 0x7da2044c1c90>, <ast.Constant object at 0x7da2044c3160>, <ast.Constant object at 0x7da2044c2530>, <ast.Constant object at 0x7da2044c1de0>]]]] variable[pathext] assign[=] call[name[os].environ.get, parameter[constant[PATHEXT], name[default_pathext]]] return[name[pathext]]
keyword[def] identifier[get_pathext] ( identifier[default_pathext] = keyword[None] ): literal[string] keyword[if] identifier[default_pathext] keyword[is] keyword[None] : identifier[default_pathext] = identifier[os] . identifier[pathsep] . identifier[join] ([ literal[string] , literal[string] , literal[string] , literal[string] ]) identifier[pathext] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , identifier[default_pathext] ) keyword[return] identifier[pathext]
def get_pathext(default_pathext=None): """Returns the path extensions from environment or a default""" if default_pathext is None: default_pathext = os.pathsep.join(['.COM', '.EXE', '.BAT', '.CMD']) # depends on [control=['if'], data=['default_pathext']] pathext = os.environ.get('PATHEXT', default_pathext) return pathext
def uncertain_inputs_sparse_regression(max_iters=200, optimize=True, plot=True): """Run a 1D example of a sparse GP regression with uncertain inputs.""" fig, axes = pb.subplots(1, 2, figsize=(12, 5), sharex=True, sharey=True) # sample inputs and outputs S = np.ones((20, 1)) X = np.random.uniform(-3., 3., (20, 1)) Y = np.sin(X) + np.random.randn(20, 1) * 0.05 # likelihood = GPy.likelihoods.Gaussian(Y) Z = np.random.uniform(-3., 3., (7, 1)) k = GPy.kern.RBF(1) # create simple GP Model - no input uncertainty on this one m = GPy.models.SparseGPRegression(X, Y, kernel=k, Z=Z) if optimize: m.optimize('scg', messages=1, max_iters=max_iters) if plot: m.plot(ax=axes[0]) axes[0].set_title('no input uncertainty') print(m) # the same Model with uncertainty m = GPy.models.SparseGPRegression(X, Y, kernel=GPy.kern.RBF(1), Z=Z, X_variance=S) if optimize: m.optimize('scg', messages=1, max_iters=max_iters) if plot: m.plot(ax=axes[1]) axes[1].set_title('with input uncertainty') fig.canvas.draw() print(m) return m
def function[uncertain_inputs_sparse_regression, parameter[max_iters, optimize, plot]]: constant[Run a 1D example of a sparse GP regression with uncertain inputs.] <ast.Tuple object at 0x7da1b1cca950> assign[=] call[name[pb].subplots, parameter[constant[1], constant[2]]] variable[S] assign[=] call[name[np].ones, parameter[tuple[[<ast.Constant object at 0x7da1b1cc84f0>, <ast.Constant object at 0x7da1b1cc8520>]]]] variable[X] assign[=] call[name[np].random.uniform, parameter[<ast.UnaryOp object at 0x7da1b1cc8580>, constant[3.0], tuple[[<ast.Constant object at 0x7da1b1cca5f0>, <ast.Constant object at 0x7da1b1cca6e0>]]]] variable[Y] assign[=] binary_operation[call[name[np].sin, parameter[name[X]]] + binary_operation[call[name[np].random.randn, parameter[constant[20], constant[1]]] * constant[0.05]]] variable[Z] assign[=] call[name[np].random.uniform, parameter[<ast.UnaryOp object at 0x7da1b1cca1d0>, constant[3.0], tuple[[<ast.Constant object at 0x7da1b1cca290>, <ast.Constant object at 0x7da1b1cca260>]]]] variable[k] assign[=] call[name[GPy].kern.RBF, parameter[constant[1]]] variable[m] assign[=] call[name[GPy].models.SparseGPRegression, parameter[name[X], name[Y]]] if name[optimize] begin[:] call[name[m].optimize, parameter[constant[scg]]] if name[plot] begin[:] call[name[m].plot, parameter[]] call[call[name[axes]][constant[0]].set_title, parameter[constant[no input uncertainty]]] call[name[print], parameter[name[m]]] variable[m] assign[=] call[name[GPy].models.SparseGPRegression, parameter[name[X], name[Y]]] if name[optimize] begin[:] call[name[m].optimize, parameter[constant[scg]]] if name[plot] begin[:] call[name[m].plot, parameter[]] call[call[name[axes]][constant[1]].set_title, parameter[constant[with input uncertainty]]] call[name[fig].canvas.draw, parameter[]] call[name[print], parameter[name[m]]] return[name[m]]
keyword[def] identifier[uncertain_inputs_sparse_regression] ( identifier[max_iters] = literal[int] , identifier[optimize] = keyword[True] , identifier[plot] = keyword[True] ): literal[string] identifier[fig] , identifier[axes] = identifier[pb] . identifier[subplots] ( literal[int] , literal[int] , identifier[figsize] =( literal[int] , literal[int] ), identifier[sharex] = keyword[True] , identifier[sharey] = keyword[True] ) identifier[S] = identifier[np] . identifier[ones] (( literal[int] , literal[int] )) identifier[X] = identifier[np] . identifier[random] . identifier[uniform] (- literal[int] , literal[int] ,( literal[int] , literal[int] )) identifier[Y] = identifier[np] . identifier[sin] ( identifier[X] )+ identifier[np] . identifier[random] . identifier[randn] ( literal[int] , literal[int] )* literal[int] identifier[Z] = identifier[np] . identifier[random] . identifier[uniform] (- literal[int] , literal[int] ,( literal[int] , literal[int] )) identifier[k] = identifier[GPy] . identifier[kern] . identifier[RBF] ( literal[int] ) identifier[m] = identifier[GPy] . identifier[models] . identifier[SparseGPRegression] ( identifier[X] , identifier[Y] , identifier[kernel] = identifier[k] , identifier[Z] = identifier[Z] ) keyword[if] identifier[optimize] : identifier[m] . identifier[optimize] ( literal[string] , identifier[messages] = literal[int] , identifier[max_iters] = identifier[max_iters] ) keyword[if] identifier[plot] : identifier[m] . identifier[plot] ( identifier[ax] = identifier[axes] [ literal[int] ]) identifier[axes] [ literal[int] ]. identifier[set_title] ( literal[string] ) identifier[print] ( identifier[m] ) identifier[m] = identifier[GPy] . identifier[models] . identifier[SparseGPRegression] ( identifier[X] , identifier[Y] , identifier[kernel] = identifier[GPy] . identifier[kern] . identifier[RBF] ( literal[int] ), identifier[Z] = identifier[Z] , identifier[X_variance] = identifier[S] ) keyword[if] identifier[optimize] : identifier[m] . identifier[optimize] ( literal[string] , identifier[messages] = literal[int] , identifier[max_iters] = identifier[max_iters] ) keyword[if] identifier[plot] : identifier[m] . identifier[plot] ( identifier[ax] = identifier[axes] [ literal[int] ]) identifier[axes] [ literal[int] ]. identifier[set_title] ( literal[string] ) identifier[fig] . identifier[canvas] . identifier[draw] () identifier[print] ( identifier[m] ) keyword[return] identifier[m]
def uncertain_inputs_sparse_regression(max_iters=200, optimize=True, plot=True): """Run a 1D example of a sparse GP regression with uncertain inputs.""" (fig, axes) = pb.subplots(1, 2, figsize=(12, 5), sharex=True, sharey=True) # sample inputs and outputs S = np.ones((20, 1)) X = np.random.uniform(-3.0, 3.0, (20, 1)) Y = np.sin(X) + np.random.randn(20, 1) * 0.05 # likelihood = GPy.likelihoods.Gaussian(Y) Z = np.random.uniform(-3.0, 3.0, (7, 1)) k = GPy.kern.RBF(1) # create simple GP Model - no input uncertainty on this one m = GPy.models.SparseGPRegression(X, Y, kernel=k, Z=Z) if optimize: m.optimize('scg', messages=1, max_iters=max_iters) # depends on [control=['if'], data=[]] if plot: m.plot(ax=axes[0]) axes[0].set_title('no input uncertainty') # depends on [control=['if'], data=[]] print(m) # the same Model with uncertainty m = GPy.models.SparseGPRegression(X, Y, kernel=GPy.kern.RBF(1), Z=Z, X_variance=S) if optimize: m.optimize('scg', messages=1, max_iters=max_iters) # depends on [control=['if'], data=[]] if plot: m.plot(ax=axes[1]) axes[1].set_title('with input uncertainty') fig.canvas.draw() # depends on [control=['if'], data=[]] print(m) return m
def get_series_info(self, series_id): """ Get information about a series such as its title, frequency, observation start/end dates, units, notes, etc. Parameters ---------- series_id : str Fred series id such as 'CPIAUCSL' Returns ------- info : Series a pandas Series containing information about the Fred series """ url = "%s/series?series_id=%s" % (self.root_url, series_id) root = self.__fetch_data(url) if root is None or not len(root): raise ValueError('No info exists for series id: ' + series_id) info = pd.Series(root.getchildren()[0].attrib) return info
def function[get_series_info, parameter[self, series_id]]: constant[ Get information about a series such as its title, frequency, observation start/end dates, units, notes, etc. Parameters ---------- series_id : str Fred series id such as 'CPIAUCSL' Returns ------- info : Series a pandas Series containing information about the Fred series ] variable[url] assign[=] binary_operation[constant[%s/series?series_id=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0ff91b0>, <ast.Name object at 0x7da1b0ffb460>]]] variable[root] assign[=] call[name[self].__fetch_data, parameter[name[url]]] if <ast.BoolOp object at 0x7da1b0ffaa10> begin[:] <ast.Raise object at 0x7da1b0ff9690> variable[info] assign[=] call[name[pd].Series, parameter[call[call[name[root].getchildren, parameter[]]][constant[0]].attrib]] return[name[info]]
keyword[def] identifier[get_series_info] ( identifier[self] , identifier[series_id] ): literal[string] identifier[url] = literal[string] %( identifier[self] . identifier[root_url] , identifier[series_id] ) identifier[root] = identifier[self] . identifier[__fetch_data] ( identifier[url] ) keyword[if] identifier[root] keyword[is] keyword[None] keyword[or] keyword[not] identifier[len] ( identifier[root] ): keyword[raise] identifier[ValueError] ( literal[string] + identifier[series_id] ) identifier[info] = identifier[pd] . identifier[Series] ( identifier[root] . identifier[getchildren] ()[ literal[int] ]. identifier[attrib] ) keyword[return] identifier[info]
def get_series_info(self, series_id): """ Get information about a series such as its title, frequency, observation start/end dates, units, notes, etc. Parameters ---------- series_id : str Fred series id such as 'CPIAUCSL' Returns ------- info : Series a pandas Series containing information about the Fred series """ url = '%s/series?series_id=%s' % (self.root_url, series_id) root = self.__fetch_data(url) if root is None or not len(root): raise ValueError('No info exists for series id: ' + series_id) # depends on [control=['if'], data=[]] info = pd.Series(root.getchildren()[0].attrib) return info
def add_resource(self, resource_id, attributes, parents=[], issuer='default'): """ Will add the given resource with a given identifier and attribute dictionary. example/ add_resource('/asset/12', {'id': 12, 'manufacturer': 'GE'}) """ # MAINT: consider test to avoid adding duplicate resource id assert isinstance(attributes, (dict)), "attributes expected to be dict" attrs = [] for key in attributes.keys(): attrs.append({ 'issuer': issuer, 'name': key, 'value': attributes[key] }) body = { "resourceIdentifier": resource_id, "parents": parents, "attributes": attrs, } return self._put_resource(resource_id, body)
def function[add_resource, parameter[self, resource_id, attributes, parents, issuer]]: constant[ Will add the given resource with a given identifier and attribute dictionary. example/ add_resource('/asset/12', {'id': 12, 'manufacturer': 'GE'}) ] assert[call[name[isinstance], parameter[name[attributes], name[dict]]]] variable[attrs] assign[=] list[[]] for taget[name[key]] in starred[call[name[attributes].keys, parameter[]]] begin[:] call[name[attrs].append, parameter[dictionary[[<ast.Constant object at 0x7da18f8137f0>, <ast.Constant object at 0x7da18f813160>, <ast.Constant object at 0x7da18f813c70>], [<ast.Name object at 0x7da18f813fd0>, <ast.Name object at 0x7da18f811480>, <ast.Subscript object at 0x7da18f8132b0>]]]] variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da18f812a70>, <ast.Constant object at 0x7da18f811e40>, <ast.Constant object at 0x7da18f8126b0>], [<ast.Name object at 0x7da18f812b00>, <ast.Name object at 0x7da18f812560>, <ast.Name object at 0x7da18f811210>]] return[call[name[self]._put_resource, parameter[name[resource_id], name[body]]]]
keyword[def] identifier[add_resource] ( identifier[self] , identifier[resource_id] , identifier[attributes] , identifier[parents] =[], identifier[issuer] = literal[string] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[attributes] ,( identifier[dict] )), literal[string] identifier[attrs] =[] keyword[for] identifier[key] keyword[in] identifier[attributes] . identifier[keys] (): identifier[attrs] . identifier[append] ({ literal[string] : identifier[issuer] , literal[string] : identifier[key] , literal[string] : identifier[attributes] [ identifier[key] ] }) identifier[body] ={ literal[string] : identifier[resource_id] , literal[string] : identifier[parents] , literal[string] : identifier[attrs] , } keyword[return] identifier[self] . identifier[_put_resource] ( identifier[resource_id] , identifier[body] )
def add_resource(self, resource_id, attributes, parents=[], issuer='default'): """ Will add the given resource with a given identifier and attribute dictionary. example/ add_resource('/asset/12', {'id': 12, 'manufacturer': 'GE'}) """ # MAINT: consider test to avoid adding duplicate resource id assert isinstance(attributes, dict), 'attributes expected to be dict' attrs = [] for key in attributes.keys(): attrs.append({'issuer': issuer, 'name': key, 'value': attributes[key]}) # depends on [control=['for'], data=['key']] body = {'resourceIdentifier': resource_id, 'parents': parents, 'attributes': attrs} return self._put_resource(resource_id, body)
def get_sitemessage_urls(): """Returns sitemessage urlpatterns, that can be attached to urlpatterns of a project: # Example from urls.py. from sitemessage.toolbox import get_sitemessage_urls urlpatterns = patterns('', # Your URL Patterns belongs here. ) + get_sitemessage_urls() # Now attaching additional URLs. """ url_unsubscribe = url( r'^messages/unsubscribe/(?P<message_id>\d+)/(?P<dispatch_id>\d+)/(?P<hashed>[^/]+)/$', unsubscribe, name='sitemessage_unsubscribe' ) url_mark_read = url( r'^messages/ping/(?P<message_id>\d+)/(?P<dispatch_id>\d+)/(?P<hashed>[^/]+)/$', mark_read, name='sitemessage_mark_read' ) if VERSION >= (1, 9): return [url_unsubscribe, url_mark_read] from django.conf.urls import patterns return patterns('', url_unsubscribe, url_mark_read)
def function[get_sitemessage_urls, parameter[]]: constant[Returns sitemessage urlpatterns, that can be attached to urlpatterns of a project: # Example from urls.py. from sitemessage.toolbox import get_sitemessage_urls urlpatterns = patterns('', # Your URL Patterns belongs here. ) + get_sitemessage_urls() # Now attaching additional URLs. ] variable[url_unsubscribe] assign[=] call[name[url], parameter[constant[^messages/unsubscribe/(?P<message_id>\d+)/(?P<dispatch_id>\d+)/(?P<hashed>[^/]+)/$], name[unsubscribe]]] variable[url_mark_read] assign[=] call[name[url], parameter[constant[^messages/ping/(?P<message_id>\d+)/(?P<dispatch_id>\d+)/(?P<hashed>[^/]+)/$], name[mark_read]]] if compare[name[VERSION] greater_or_equal[>=] tuple[[<ast.Constant object at 0x7da1b26ca230>, <ast.Constant object at 0x7da1b26cae00>]]] begin[:] return[list[[<ast.Name object at 0x7da1b26cb070>, <ast.Name object at 0x7da1b26cb820>]]] from relative_module[django.conf.urls] import module[patterns] return[call[name[patterns], parameter[constant[], name[url_unsubscribe], name[url_mark_read]]]]
keyword[def] identifier[get_sitemessage_urls] (): literal[string] identifier[url_unsubscribe] = identifier[url] ( literal[string] , identifier[unsubscribe] , identifier[name] = literal[string] ) identifier[url_mark_read] = identifier[url] ( literal[string] , identifier[mark_read] , identifier[name] = literal[string] ) keyword[if] identifier[VERSION] >=( literal[int] , literal[int] ): keyword[return] [ identifier[url_unsubscribe] , identifier[url_mark_read] ] keyword[from] identifier[django] . identifier[conf] . identifier[urls] keyword[import] identifier[patterns] keyword[return] identifier[patterns] ( literal[string] , identifier[url_unsubscribe] , identifier[url_mark_read] )
def get_sitemessage_urls(): """Returns sitemessage urlpatterns, that can be attached to urlpatterns of a project: # Example from urls.py. from sitemessage.toolbox import get_sitemessage_urls urlpatterns = patterns('', # Your URL Patterns belongs here. ) + get_sitemessage_urls() # Now attaching additional URLs. """ url_unsubscribe = url('^messages/unsubscribe/(?P<message_id>\\d+)/(?P<dispatch_id>\\d+)/(?P<hashed>[^/]+)/$', unsubscribe, name='sitemessage_unsubscribe') url_mark_read = url('^messages/ping/(?P<message_id>\\d+)/(?P<dispatch_id>\\d+)/(?P<hashed>[^/]+)/$', mark_read, name='sitemessage_mark_read') if VERSION >= (1, 9): return [url_unsubscribe, url_mark_read] # depends on [control=['if'], data=[]] from django.conf.urls import patterns return patterns('', url_unsubscribe, url_mark_read)
def df(self, version=None, tags=None, ext=None, **kwargs): """Loads an instance of this dataset into a dataframe. Parameters ---------- version: str, optional The version of the instance of this dataset. tags : list of str, optional The tags associated with the desired instance of this dataset. ext : str, optional The file extension to use. If not given, the default extension is used. **kwargs : extra keyword arguments, optional Extra keyword arguments are forwarded to the deserialization method of the SerializationFormat object corresponding to the extension used. Returns ------- pandas.DataFrame A dataframe containing the desired instance of this dataset. """ ext = self._find_extension(version=version, tags=tags) if ext is None: attribs = "{}{}".format( "version={} and ".format(version) if version else "", "tags={}".format(tags) if tags else "", ) raise MissingDatasetError( "No dataset with {} in local store!".format(attribs)) fpath = self.fpath(version=version, tags=tags, ext=ext) fmt = SerializationFormat.by_name(ext) return fmt.deserialize(fpath, **kwargs)
def function[df, parameter[self, version, tags, ext]]: constant[Loads an instance of this dataset into a dataframe. Parameters ---------- version: str, optional The version of the instance of this dataset. tags : list of str, optional The tags associated with the desired instance of this dataset. ext : str, optional The file extension to use. If not given, the default extension is used. **kwargs : extra keyword arguments, optional Extra keyword arguments are forwarded to the deserialization method of the SerializationFormat object corresponding to the extension used. Returns ------- pandas.DataFrame A dataframe containing the desired instance of this dataset. ] variable[ext] assign[=] call[name[self]._find_extension, parameter[]] if compare[name[ext] is constant[None]] begin[:] variable[attribs] assign[=] call[constant[{}{}].format, parameter[<ast.IfExp object at 0x7da18ede74c0>, <ast.IfExp object at 0x7da18ede5ab0>]] <ast.Raise object at 0x7da18ede41f0> variable[fpath] assign[=] call[name[self].fpath, parameter[]] variable[fmt] assign[=] call[name[SerializationFormat].by_name, parameter[name[ext]]] return[call[name[fmt].deserialize, parameter[name[fpath]]]]
keyword[def] identifier[df] ( identifier[self] , identifier[version] = keyword[None] , identifier[tags] = keyword[None] , identifier[ext] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[ext] = identifier[self] . identifier[_find_extension] ( identifier[version] = identifier[version] , identifier[tags] = identifier[tags] ) keyword[if] identifier[ext] keyword[is] keyword[None] : identifier[attribs] = literal[string] . identifier[format] ( literal[string] . identifier[format] ( identifier[version] ) keyword[if] identifier[version] keyword[else] literal[string] , literal[string] . identifier[format] ( identifier[tags] ) keyword[if] identifier[tags] keyword[else] literal[string] , ) keyword[raise] identifier[MissingDatasetError] ( literal[string] . identifier[format] ( identifier[attribs] )) identifier[fpath] = identifier[self] . identifier[fpath] ( identifier[version] = identifier[version] , identifier[tags] = identifier[tags] , identifier[ext] = identifier[ext] ) identifier[fmt] = identifier[SerializationFormat] . identifier[by_name] ( identifier[ext] ) keyword[return] identifier[fmt] . identifier[deserialize] ( identifier[fpath] ,** identifier[kwargs] )
def df(self, version=None, tags=None, ext=None, **kwargs): """Loads an instance of this dataset into a dataframe. Parameters ---------- version: str, optional The version of the instance of this dataset. tags : list of str, optional The tags associated with the desired instance of this dataset. ext : str, optional The file extension to use. If not given, the default extension is used. **kwargs : extra keyword arguments, optional Extra keyword arguments are forwarded to the deserialization method of the SerializationFormat object corresponding to the extension used. Returns ------- pandas.DataFrame A dataframe containing the desired instance of this dataset. """ ext = self._find_extension(version=version, tags=tags) if ext is None: attribs = '{}{}'.format('version={} and '.format(version) if version else '', 'tags={}'.format(tags) if tags else '') raise MissingDatasetError('No dataset with {} in local store!'.format(attribs)) # depends on [control=['if'], data=[]] fpath = self.fpath(version=version, tags=tags, ext=ext) fmt = SerializationFormat.by_name(ext) return fmt.deserialize(fpath, **kwargs)
def download_content_gui(**args): """ function to fetch links and download them """ global row if not args ['directory']: args ['directory'] = args ['query'].replace(' ', '-') root1 = Frame(root) t1 = threading.Thread(target = search_function, args = (root1, args['query'], args['website'], args['file_type'], args['limit'],args['option'])) t1.start() task(root1) t1.join() #new frame for progress bar row = Frame(root) row.pack() if args['parallel']: download_parallel_gui(row, links, args['directory'], args['min_file_size'], args['max_file_size'], args['no_redirects']) else: download_series_gui(row, links, args['directory'], args['min_file_size'], args['max_file_size'], args['no_redirects'])
def function[download_content_gui, parameter[]]: constant[ function to fetch links and download them ] <ast.Global object at 0x7da1b0f0dff0> if <ast.UnaryOp object at 0x7da1b0f0d630> begin[:] call[name[args]][constant[directory]] assign[=] call[call[name[args]][constant[query]].replace, parameter[constant[ ], constant[-]]] variable[root1] assign[=] call[name[Frame], parameter[name[root]]] variable[t1] assign[=] call[name[threading].Thread, parameter[]] call[name[t1].start, parameter[]] call[name[task], parameter[name[root1]]] call[name[t1].join, parameter[]] variable[row] assign[=] call[name[Frame], parameter[name[root]]] call[name[row].pack, parameter[]] if call[name[args]][constant[parallel]] begin[:] call[name[download_parallel_gui], parameter[name[row], name[links], call[name[args]][constant[directory]], call[name[args]][constant[min_file_size]], call[name[args]][constant[max_file_size]], call[name[args]][constant[no_redirects]]]]
keyword[def] identifier[download_content_gui] (** identifier[args] ): literal[string] keyword[global] identifier[row] keyword[if] keyword[not] identifier[args] [ literal[string] ]: identifier[args] [ literal[string] ]= identifier[args] [ literal[string] ]. identifier[replace] ( literal[string] , literal[string] ) identifier[root1] = identifier[Frame] ( identifier[root] ) identifier[t1] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[search_function] , identifier[args] =( identifier[root1] , identifier[args] [ literal[string] ], identifier[args] [ literal[string] ], identifier[args] [ literal[string] ], identifier[args] [ literal[string] ], identifier[args] [ literal[string] ])) identifier[t1] . identifier[start] () identifier[task] ( identifier[root1] ) identifier[t1] . identifier[join] () identifier[row] = identifier[Frame] ( identifier[root] ) identifier[row] . identifier[pack] () keyword[if] identifier[args] [ literal[string] ]: identifier[download_parallel_gui] ( identifier[row] , identifier[links] , identifier[args] [ literal[string] ], identifier[args] [ literal[string] ], identifier[args] [ literal[string] ], identifier[args] [ literal[string] ]) keyword[else] : identifier[download_series_gui] ( identifier[row] , identifier[links] , identifier[args] [ literal[string] ], identifier[args] [ literal[string] ], identifier[args] [ literal[string] ], identifier[args] [ literal[string] ])
def download_content_gui(**args): """ function to fetch links and download them """ global row if not args['directory']: args['directory'] = args['query'].replace(' ', '-') # depends on [control=['if'], data=[]] root1 = Frame(root) t1 = threading.Thread(target=search_function, args=(root1, args['query'], args['website'], args['file_type'], args['limit'], args['option'])) t1.start() task(root1) t1.join() #new frame for progress bar row = Frame(root) row.pack() if args['parallel']: download_parallel_gui(row, links, args['directory'], args['min_file_size'], args['max_file_size'], args['no_redirects']) # depends on [control=['if'], data=[]] else: download_series_gui(row, links, args['directory'], args['min_file_size'], args['max_file_size'], args['no_redirects'])
def add_receipt(self, block_header: BlockHeader, index_key: int, receipt: Receipt) -> Hash32: """ Adds the given receipt to the provided block header. Returns the updated `receipts_root` for updated block header. """ receipt_db = HexaryTrie(db=self.db, root_hash=block_header.receipt_root) receipt_db[index_key] = rlp.encode(receipt) return receipt_db.root_hash
def function[add_receipt, parameter[self, block_header, index_key, receipt]]: constant[ Adds the given receipt to the provided block header. Returns the updated `receipts_root` for updated block header. ] variable[receipt_db] assign[=] call[name[HexaryTrie], parameter[]] call[name[receipt_db]][name[index_key]] assign[=] call[name[rlp].encode, parameter[name[receipt]]] return[name[receipt_db].root_hash]
keyword[def] identifier[add_receipt] ( identifier[self] , identifier[block_header] : identifier[BlockHeader] , identifier[index_key] : identifier[int] , identifier[receipt] : identifier[Receipt] )-> identifier[Hash32] : literal[string] identifier[receipt_db] = identifier[HexaryTrie] ( identifier[db] = identifier[self] . identifier[db] , identifier[root_hash] = identifier[block_header] . identifier[receipt_root] ) identifier[receipt_db] [ identifier[index_key] ]= identifier[rlp] . identifier[encode] ( identifier[receipt] ) keyword[return] identifier[receipt_db] . identifier[root_hash]
def add_receipt(self, block_header: BlockHeader, index_key: int, receipt: Receipt) -> Hash32: """ Adds the given receipt to the provided block header. Returns the updated `receipts_root` for updated block header. """ receipt_db = HexaryTrie(db=self.db, root_hash=block_header.receipt_root) receipt_db[index_key] = rlp.encode(receipt) return receipt_db.root_hash
def pass_condition(b, test, a): """ Generic test function used by Scout2 / AWS recipes . :param b: Value to be tested against :param test: Name of the test case to run :param a: Value to be tested :return: True of condition is met, False otherwise """ # Return false by default result = False # Equality tests if test == 'equal': a = str(a) b = str(b) result = (a == b) elif test == 'notEqual': result = (not pass_condition(b, 'equal', a)) # More/Less tests elif test == 'lessThan': result = (int(b) < int(a)) elif test == 'lessOrEqual': result = (int(b) <= int(a)) elif test == 'moreThan': result = (int(b) > int(a)) elif test == 'moreOrEqual': result = (int(b) >= int(a)) # Empty tests elif test == 'empty': result = ((type(b) == dict and b == {}) or (type(b) == list and b == []) or (type(b) == list and b == [None])) elif test == 'notEmpty': result = (not pass_condition(b, 'empty', 'a')) elif test == 'null': result = ((b == None) or (type(b) == str and b == 'None')) elif test == 'notNull': result = (not pass_condition(b, 'null', a)) # Boolean tests elif test == 'true': result = (str(b).lower() == 'true') elif test == 'notTrue' or test == 'false': result = (str(b).lower() == 'false') # Object length tests elif test == 'lengthLessThan': result = (len(b) < int(a)) elif test == 'lengthMoreThan': result = (len(b) > int(a)) elif test == 'lengthEqual': result = (len(b) == int(a)) # Dictionary keys tests elif test == 'withKey': result = (a in b) elif test == 'withoutKey': result = (not a in b) # List tests elif test == 'containAtLeastOneOf': result = False if not type(b) == list: b = [ b ] if not type(a) == list: a = [ a ] for c in b: if type(c): c = str(c) if c in a: result = True break elif test == 'containAtLeastOneDifferentFrom': result = False if not type(b) == list: b = [ b ] if not type(a) == list: a = [ a ] for c in b: if c != None and c != '' and c not in a: result = True break elif test == 'containNoneOf': result = True if not type(b) == list: b = [ b ] if not type(a) == list: a = [ a ] for c in b: if c in a: result = False break # Regex tests elif test == 'match': if type(a) != list: a = [ a ] b = str(b) for c in a: if re.match(c, b) != None: result = True break elif test == 'notMatch': result = (not pass_condition(b, 'match', a)) # Date tests elif test == 'priorToDate': b = dateutil.parser.parse(str(b)).replace(tzinfo=None) a = dateutil.parser.parse(str(a)).replace(tzinfo=None) result = (b < a) elif test == 'olderThan': age, threshold = __prepare_age_test(a, b) result = (age > threshold) elif test == 'newerThan': age, threshold = __prepare_age_test(a, b) result = (age < threshold) # CIDR tests elif test == 'inSubnets': result = False grant = netaddr.IPNetwork(b) if type(a) != list: a = [ a ] for c in a: known_subnet = netaddr.IPNetwork(c) if grant in known_subnet: result = True break elif test == 'notInSubnets': result = (not pass_condition(b, 'inSubnets', a)) # Policy statement tests elif test == 'containAction': result = False if type(b) != dict: b = json.loads(b) statement_actions = get_actions_from_statement(b) rule_actions = _expand_wildcard_action(a) for action in rule_actions: if action.lower() in statement_actions: result = True break elif test == 'notContainAction': result = (not pass_condition(b, 'containAction', a)) elif test == 'containAtLeastOneAction': result = False if type(b) != dict: b = json.loads(b) if type(a) != list: a = [ a ] actions = get_actions_from_statement(b) for c in a: if c.lower() in actions: result = True break # Policy principal tests elif test == 'isCrossAccount': result = False if type(b) != list: b = [b] for c in b: if c != a and not re.match(r'arn:aws:iam:.*?:%s:.*' % a, c): result = True break elif test == 'isSameAccount': result = False if type(b) != list: b = [b] for c in b: if c == a or re.match(r'arn:aws:iam:.*?:%s:.*' % a, c): result = True break # Unknown test case else: printError('Error: unknown test case %s' % test) raise Exception return result
def function[pass_condition, parameter[b, test, a]]: constant[ Generic test function used by Scout2 / AWS recipes . :param b: Value to be tested against :param test: Name of the test case to run :param a: Value to be tested :return: True of condition is met, False otherwise ] variable[result] assign[=] constant[False] if compare[name[test] equal[==] constant[equal]] begin[:] variable[a] assign[=] call[name[str], parameter[name[a]]] variable[b] assign[=] call[name[str], parameter[name[b]]] variable[result] assign[=] compare[name[a] equal[==] name[b]] return[name[result]]
keyword[def] identifier[pass_condition] ( identifier[b] , identifier[test] , identifier[a] ): literal[string] identifier[result] = keyword[False] keyword[if] identifier[test] == literal[string] : identifier[a] = identifier[str] ( identifier[a] ) identifier[b] = identifier[str] ( identifier[b] ) identifier[result] =( identifier[a] == identifier[b] ) keyword[elif] identifier[test] == literal[string] : identifier[result] =( keyword[not] identifier[pass_condition] ( identifier[b] , literal[string] , identifier[a] )) keyword[elif] identifier[test] == literal[string] : identifier[result] =( identifier[int] ( identifier[b] )< identifier[int] ( identifier[a] )) keyword[elif] identifier[test] == literal[string] : identifier[result] =( identifier[int] ( identifier[b] )<= identifier[int] ( identifier[a] )) keyword[elif] identifier[test] == literal[string] : identifier[result] =( identifier[int] ( identifier[b] )> identifier[int] ( identifier[a] )) keyword[elif] identifier[test] == literal[string] : identifier[result] =( identifier[int] ( identifier[b] )>= identifier[int] ( identifier[a] )) keyword[elif] identifier[test] == literal[string] : identifier[result] =(( identifier[type] ( identifier[b] )== identifier[dict] keyword[and] identifier[b] =={}) keyword[or] ( identifier[type] ( identifier[b] )== identifier[list] keyword[and] identifier[b] ==[]) keyword[or] ( identifier[type] ( identifier[b] )== identifier[list] keyword[and] identifier[b] ==[ keyword[None] ])) keyword[elif] identifier[test] == literal[string] : identifier[result] =( keyword[not] identifier[pass_condition] ( identifier[b] , literal[string] , literal[string] )) keyword[elif] identifier[test] == literal[string] : identifier[result] =(( identifier[b] == keyword[None] ) keyword[or] ( identifier[type] ( identifier[b] )== identifier[str] keyword[and] identifier[b] == literal[string] )) keyword[elif] identifier[test] == literal[string] : identifier[result] =( keyword[not] identifier[pass_condition] ( identifier[b] , literal[string] , identifier[a] )) keyword[elif] identifier[test] == literal[string] : identifier[result] =( identifier[str] ( identifier[b] ). identifier[lower] ()== literal[string] ) keyword[elif] identifier[test] == literal[string] keyword[or] identifier[test] == literal[string] : identifier[result] =( identifier[str] ( identifier[b] ). identifier[lower] ()== literal[string] ) keyword[elif] identifier[test] == literal[string] : identifier[result] =( identifier[len] ( identifier[b] )< identifier[int] ( identifier[a] )) keyword[elif] identifier[test] == literal[string] : identifier[result] =( identifier[len] ( identifier[b] )> identifier[int] ( identifier[a] )) keyword[elif] identifier[test] == literal[string] : identifier[result] =( identifier[len] ( identifier[b] )== identifier[int] ( identifier[a] )) keyword[elif] identifier[test] == literal[string] : identifier[result] =( identifier[a] keyword[in] identifier[b] ) keyword[elif] identifier[test] == literal[string] : identifier[result] =( keyword[not] identifier[a] keyword[in] identifier[b] ) keyword[elif] identifier[test] == literal[string] : identifier[result] = keyword[False] keyword[if] keyword[not] identifier[type] ( identifier[b] )== identifier[list] : identifier[b] =[ identifier[b] ] keyword[if] keyword[not] identifier[type] ( identifier[a] )== identifier[list] : identifier[a] =[ identifier[a] ] keyword[for] identifier[c] keyword[in] identifier[b] : keyword[if] identifier[type] ( identifier[c] ): identifier[c] = identifier[str] ( identifier[c] ) keyword[if] identifier[c] keyword[in] identifier[a] : identifier[result] = keyword[True] keyword[break] keyword[elif] identifier[test] == literal[string] : identifier[result] = keyword[False] keyword[if] keyword[not] identifier[type] ( identifier[b] )== identifier[list] : identifier[b] =[ identifier[b] ] keyword[if] keyword[not] identifier[type] ( identifier[a] )== identifier[list] : identifier[a] =[ identifier[a] ] keyword[for] identifier[c] keyword[in] identifier[b] : keyword[if] identifier[c] != keyword[None] keyword[and] identifier[c] != literal[string] keyword[and] identifier[c] keyword[not] keyword[in] identifier[a] : identifier[result] = keyword[True] keyword[break] keyword[elif] identifier[test] == literal[string] : identifier[result] = keyword[True] keyword[if] keyword[not] identifier[type] ( identifier[b] )== identifier[list] : identifier[b] =[ identifier[b] ] keyword[if] keyword[not] identifier[type] ( identifier[a] )== identifier[list] : identifier[a] =[ identifier[a] ] keyword[for] identifier[c] keyword[in] identifier[b] : keyword[if] identifier[c] keyword[in] identifier[a] : identifier[result] = keyword[False] keyword[break] keyword[elif] identifier[test] == literal[string] : keyword[if] identifier[type] ( identifier[a] )!= identifier[list] : identifier[a] =[ identifier[a] ] identifier[b] = identifier[str] ( identifier[b] ) keyword[for] identifier[c] keyword[in] identifier[a] : keyword[if] identifier[re] . identifier[match] ( identifier[c] , identifier[b] )!= keyword[None] : identifier[result] = keyword[True] keyword[break] keyword[elif] identifier[test] == literal[string] : identifier[result] =( keyword[not] identifier[pass_condition] ( identifier[b] , literal[string] , identifier[a] )) keyword[elif] identifier[test] == literal[string] : identifier[b] = identifier[dateutil] . identifier[parser] . identifier[parse] ( identifier[str] ( identifier[b] )). identifier[replace] ( identifier[tzinfo] = keyword[None] ) identifier[a] = identifier[dateutil] . identifier[parser] . identifier[parse] ( identifier[str] ( identifier[a] )). identifier[replace] ( identifier[tzinfo] = keyword[None] ) identifier[result] =( identifier[b] < identifier[a] ) keyword[elif] identifier[test] == literal[string] : identifier[age] , identifier[threshold] = identifier[__prepare_age_test] ( identifier[a] , identifier[b] ) identifier[result] =( identifier[age] > identifier[threshold] ) keyword[elif] identifier[test] == literal[string] : identifier[age] , identifier[threshold] = identifier[__prepare_age_test] ( identifier[a] , identifier[b] ) identifier[result] =( identifier[age] < identifier[threshold] ) keyword[elif] identifier[test] == literal[string] : identifier[result] = keyword[False] identifier[grant] = identifier[netaddr] . identifier[IPNetwork] ( identifier[b] ) keyword[if] identifier[type] ( identifier[a] )!= identifier[list] : identifier[a] =[ identifier[a] ] keyword[for] identifier[c] keyword[in] identifier[a] : identifier[known_subnet] = identifier[netaddr] . identifier[IPNetwork] ( identifier[c] ) keyword[if] identifier[grant] keyword[in] identifier[known_subnet] : identifier[result] = keyword[True] keyword[break] keyword[elif] identifier[test] == literal[string] : identifier[result] =( keyword[not] identifier[pass_condition] ( identifier[b] , literal[string] , identifier[a] )) keyword[elif] identifier[test] == literal[string] : identifier[result] = keyword[False] keyword[if] identifier[type] ( identifier[b] )!= identifier[dict] : identifier[b] = identifier[json] . identifier[loads] ( identifier[b] ) identifier[statement_actions] = identifier[get_actions_from_statement] ( identifier[b] ) identifier[rule_actions] = identifier[_expand_wildcard_action] ( identifier[a] ) keyword[for] identifier[action] keyword[in] identifier[rule_actions] : keyword[if] identifier[action] . identifier[lower] () keyword[in] identifier[statement_actions] : identifier[result] = keyword[True] keyword[break] keyword[elif] identifier[test] == literal[string] : identifier[result] =( keyword[not] identifier[pass_condition] ( identifier[b] , literal[string] , identifier[a] )) keyword[elif] identifier[test] == literal[string] : identifier[result] = keyword[False] keyword[if] identifier[type] ( identifier[b] )!= identifier[dict] : identifier[b] = identifier[json] . identifier[loads] ( identifier[b] ) keyword[if] identifier[type] ( identifier[a] )!= identifier[list] : identifier[a] =[ identifier[a] ] identifier[actions] = identifier[get_actions_from_statement] ( identifier[b] ) keyword[for] identifier[c] keyword[in] identifier[a] : keyword[if] identifier[c] . identifier[lower] () keyword[in] identifier[actions] : identifier[result] = keyword[True] keyword[break] keyword[elif] identifier[test] == literal[string] : identifier[result] = keyword[False] keyword[if] identifier[type] ( identifier[b] )!= identifier[list] : identifier[b] =[ identifier[b] ] keyword[for] identifier[c] keyword[in] identifier[b] : keyword[if] identifier[c] != identifier[a] keyword[and] keyword[not] identifier[re] . identifier[match] ( literal[string] % identifier[a] , identifier[c] ): identifier[result] = keyword[True] keyword[break] keyword[elif] identifier[test] == literal[string] : identifier[result] = keyword[False] keyword[if] identifier[type] ( identifier[b] )!= identifier[list] : identifier[b] =[ identifier[b] ] keyword[for] identifier[c] keyword[in] identifier[b] : keyword[if] identifier[c] == identifier[a] keyword[or] identifier[re] . identifier[match] ( literal[string] % identifier[a] , identifier[c] ): identifier[result] = keyword[True] keyword[break] keyword[else] : identifier[printError] ( literal[string] % identifier[test] ) keyword[raise] identifier[Exception] keyword[return] identifier[result]
def pass_condition(b, test, a): """ Generic test function used by Scout2 / AWS recipes . :param b: Value to be tested against :param test: Name of the test case to run :param a: Value to be tested :return: True of condition is met, False otherwise """ # Return false by default result = False # Equality tests if test == 'equal': a = str(a) b = str(b) result = a == b # depends on [control=['if'], data=[]] elif test == 'notEqual': result = not pass_condition(b, 'equal', a) # depends on [control=['if'], data=[]] # More/Less tests elif test == 'lessThan': result = int(b) < int(a) # depends on [control=['if'], data=[]] elif test == 'lessOrEqual': result = int(b) <= int(a) # depends on [control=['if'], data=[]] elif test == 'moreThan': result = int(b) > int(a) # depends on [control=['if'], data=[]] elif test == 'moreOrEqual': result = int(b) >= int(a) # depends on [control=['if'], data=[]] # Empty tests elif test == 'empty': result = type(b) == dict and b == {} or (type(b) == list and b == []) or (type(b) == list and b == [None]) # depends on [control=['if'], data=[]] elif test == 'notEmpty': result = not pass_condition(b, 'empty', 'a') # depends on [control=['if'], data=[]] elif test == 'null': result = b == None or (type(b) == str and b == 'None') # depends on [control=['if'], data=[]] elif test == 'notNull': result = not pass_condition(b, 'null', a) # depends on [control=['if'], data=[]] # Boolean tests elif test == 'true': result = str(b).lower() == 'true' # depends on [control=['if'], data=[]] elif test == 'notTrue' or test == 'false': result = str(b).lower() == 'false' # depends on [control=['if'], data=[]] # Object length tests elif test == 'lengthLessThan': result = len(b) < int(a) # depends on [control=['if'], data=[]] elif test == 'lengthMoreThan': result = len(b) > int(a) # depends on [control=['if'], data=[]] elif test == 'lengthEqual': result = len(b) == int(a) # depends on [control=['if'], data=[]] # Dictionary keys tests elif test == 'withKey': result = a in b # depends on [control=['if'], data=[]] elif test == 'withoutKey': result = not a in b # depends on [control=['if'], data=[]] # List tests elif test == 'containAtLeastOneOf': result = False if not type(b) == list: b = [b] # depends on [control=['if'], data=[]] if not type(a) == list: a = [a] # depends on [control=['if'], data=[]] for c in b: if type(c): c = str(c) # depends on [control=['if'], data=[]] if c in a: result = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]] elif test == 'containAtLeastOneDifferentFrom': result = False if not type(b) == list: b = [b] # depends on [control=['if'], data=[]] if not type(a) == list: a = [a] # depends on [control=['if'], data=[]] for c in b: if c != None and c != '' and (c not in a): result = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]] elif test == 'containNoneOf': result = True if not type(b) == list: b = [b] # depends on [control=['if'], data=[]] if not type(a) == list: a = [a] # depends on [control=['if'], data=[]] for c in b: if c in a: result = False break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]] # Regex tests elif test == 'match': if type(a) != list: a = [a] # depends on [control=['if'], data=[]] b = str(b) for c in a: if re.match(c, b) != None: result = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]] elif test == 'notMatch': result = not pass_condition(b, 'match', a) # depends on [control=['if'], data=[]] # Date tests elif test == 'priorToDate': b = dateutil.parser.parse(str(b)).replace(tzinfo=None) a = dateutil.parser.parse(str(a)).replace(tzinfo=None) result = b < a # depends on [control=['if'], data=[]] elif test == 'olderThan': (age, threshold) = __prepare_age_test(a, b) result = age > threshold # depends on [control=['if'], data=[]] elif test == 'newerThan': (age, threshold) = __prepare_age_test(a, b) result = age < threshold # depends on [control=['if'], data=[]] # CIDR tests elif test == 'inSubnets': result = False grant = netaddr.IPNetwork(b) if type(a) != list: a = [a] # depends on [control=['if'], data=[]] for c in a: known_subnet = netaddr.IPNetwork(c) if grant in known_subnet: result = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]] elif test == 'notInSubnets': result = not pass_condition(b, 'inSubnets', a) # depends on [control=['if'], data=[]] # Policy statement tests elif test == 'containAction': result = False if type(b) != dict: b = json.loads(b) # depends on [control=['if'], data=[]] statement_actions = get_actions_from_statement(b) rule_actions = _expand_wildcard_action(a) for action in rule_actions: if action.lower() in statement_actions: result = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['action']] # depends on [control=['if'], data=[]] elif test == 'notContainAction': result = not pass_condition(b, 'containAction', a) # depends on [control=['if'], data=[]] elif test == 'containAtLeastOneAction': result = False if type(b) != dict: b = json.loads(b) # depends on [control=['if'], data=[]] if type(a) != list: a = [a] # depends on [control=['if'], data=[]] actions = get_actions_from_statement(b) for c in a: if c.lower() in actions: result = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]] # Policy principal tests elif test == 'isCrossAccount': result = False if type(b) != list: b = [b] # depends on [control=['if'], data=[]] for c in b: if c != a and (not re.match('arn:aws:iam:.*?:%s:.*' % a, c)): result = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]] elif test == 'isSameAccount': result = False if type(b) != list: b = [b] # depends on [control=['if'], data=[]] for c in b: if c == a or re.match('arn:aws:iam:.*?:%s:.*' % a, c): result = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]] else: # Unknown test case printError('Error: unknown test case %s' % test) raise Exception return result
def calcOffset(self, x, y): """Calculate offset into data array. Only uses to test correctness of the formula.""" # Datalayout # X = longitude # Y = latitude # Sample for size 1201x1201 # ( 0/1200) ( 1/1200) ... (1199/1200) (1200/1200) # ( 0/1199) ( 1/1199) ... (1199/1199) (1200/1199) # ... ... ... ... # ( 0/ 1) ( 1/ 1) ... (1199/ 1) (1200/ 1) # ( 0/ 0) ( 1/ 0) ... (1199/ 0) (1200/ 0) # Some offsets: # (0/1200) 0 # (1200/1200) 1200 # (0/1199) 1201 # (1200/1199) 2401 # (0/0) 1201*1200 # (1200/0) 1201*1201-1 return x + self.size * (self.size - y - 1)
def function[calcOffset, parameter[self, x, y]]: constant[Calculate offset into data array. Only uses to test correctness of the formula.] return[binary_operation[name[x] + binary_operation[name[self].size * binary_operation[binary_operation[name[self].size - name[y]] - constant[1]]]]]
keyword[def] identifier[calcOffset] ( identifier[self] , identifier[x] , identifier[y] ): literal[string] keyword[return] identifier[x] + identifier[self] . identifier[size] *( identifier[self] . identifier[size] - identifier[y] - literal[int] )
def calcOffset(self, x, y): """Calculate offset into data array. Only uses to test correctness of the formula.""" # Datalayout # X = longitude # Y = latitude # Sample for size 1201x1201 # ( 0/1200) ( 1/1200) ... (1199/1200) (1200/1200) # ( 0/1199) ( 1/1199) ... (1199/1199) (1200/1199) # ... ... ... ... # ( 0/ 1) ( 1/ 1) ... (1199/ 1) (1200/ 1) # ( 0/ 0) ( 1/ 0) ... (1199/ 0) (1200/ 0) # Some offsets: # (0/1200) 0 # (1200/1200) 1200 # (0/1199) 1201 # (1200/1199) 2401 # (0/0) 1201*1200 # (1200/0) 1201*1201-1 return x + self.size * (self.size - y - 1)
def cmd_legend(self, args): '''setup legend for graphs''' if len(args) == 0: for leg in self.legend.keys(): print("%s -> %s" % (leg, self.legend[leg])) elif len(args) == 1: leg = args[0] if leg in self.legend: print("Removing legend %s" % leg) self.legend.pop(leg) elif len(args) >= 2: leg = args[0] leg2 = args[1] print("Adding legend %s -> %s" % (leg, leg2)) self.legend[leg] = leg2
def function[cmd_legend, parameter[self, args]]: constant[setup legend for graphs] if compare[call[name[len], parameter[name[args]]] equal[==] constant[0]] begin[:] for taget[name[leg]] in starred[call[name[self].legend.keys, parameter[]]] begin[:] call[name[print], parameter[binary_operation[constant[%s -> %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1721450>, <ast.Subscript object at 0x7da1b17213c0>]]]]]
keyword[def] identifier[cmd_legend] ( identifier[self] , identifier[args] ): literal[string] keyword[if] identifier[len] ( identifier[args] )== literal[int] : keyword[for] identifier[leg] keyword[in] identifier[self] . identifier[legend] . identifier[keys] (): identifier[print] ( literal[string] %( identifier[leg] , identifier[self] . identifier[legend] [ identifier[leg] ])) keyword[elif] identifier[len] ( identifier[args] )== literal[int] : identifier[leg] = identifier[args] [ literal[int] ] keyword[if] identifier[leg] keyword[in] identifier[self] . identifier[legend] : identifier[print] ( literal[string] % identifier[leg] ) identifier[self] . identifier[legend] . identifier[pop] ( identifier[leg] ) keyword[elif] identifier[len] ( identifier[args] )>= literal[int] : identifier[leg] = identifier[args] [ literal[int] ] identifier[leg2] = identifier[args] [ literal[int] ] identifier[print] ( literal[string] %( identifier[leg] , identifier[leg2] )) identifier[self] . identifier[legend] [ identifier[leg] ]= identifier[leg2]
def cmd_legend(self, args): """setup legend for graphs""" if len(args) == 0: for leg in self.legend.keys(): print('%s -> %s' % (leg, self.legend[leg])) # depends on [control=['for'], data=['leg']] # depends on [control=['if'], data=[]] elif len(args) == 1: leg = args[0] if leg in self.legend: print('Removing legend %s' % leg) self.legend.pop(leg) # depends on [control=['if'], data=['leg']] # depends on [control=['if'], data=[]] elif len(args) >= 2: leg = args[0] leg2 = args[1] print('Adding legend %s -> %s' % (leg, leg2)) self.legend[leg] = leg2 # depends on [control=['if'], data=[]]
def setxattr(self, req, ino, name, value, flags): """ Set an extended attribute Valid replies: reply_err """ self.reply_err(req, errno.ENOSYS)
def function[setxattr, parameter[self, req, ino, name, value, flags]]: constant[ Set an extended attribute Valid replies: reply_err ] call[name[self].reply_err, parameter[name[req], name[errno].ENOSYS]]
keyword[def] identifier[setxattr] ( identifier[self] , identifier[req] , identifier[ino] , identifier[name] , identifier[value] , identifier[flags] ): literal[string] identifier[self] . identifier[reply_err] ( identifier[req] , identifier[errno] . identifier[ENOSYS] )
def setxattr(self, req, ino, name, value, flags): """ Set an extended attribute Valid replies: reply_err """ self.reply_err(req, errno.ENOSYS)
def get_file_lines(file_name): """Return a list of non-empty lines from `file_path`.""" file_path = path.join(path.dirname(path.abspath(__file__)), file_name) with open(file_path) as file_obj: return [line for line in file_obj.read().splitlines() if line]
def function[get_file_lines, parameter[file_name]]: constant[Return a list of non-empty lines from `file_path`.] variable[file_path] assign[=] call[name[path].join, parameter[call[name[path].dirname, parameter[call[name[path].abspath, parameter[name[__file__]]]]], name[file_name]]] with call[name[open], parameter[name[file_path]]] begin[:] return[<ast.ListComp object at 0x7da20e9569e0>]
keyword[def] identifier[get_file_lines] ( identifier[file_name] ): literal[string] identifier[file_path] = identifier[path] . identifier[join] ( identifier[path] . identifier[dirname] ( identifier[path] . identifier[abspath] ( identifier[__file__] )), identifier[file_name] ) keyword[with] identifier[open] ( identifier[file_path] ) keyword[as] identifier[file_obj] : keyword[return] [ identifier[line] keyword[for] identifier[line] keyword[in] identifier[file_obj] . identifier[read] (). identifier[splitlines] () keyword[if] identifier[line] ]
def get_file_lines(file_name): """Return a list of non-empty lines from `file_path`.""" file_path = path.join(path.dirname(path.abspath(__file__)), file_name) with open(file_path) as file_obj: return [line for line in file_obj.read().splitlines() if line] # depends on [control=['with'], data=['file_obj']]
def get_unused_list_annotation_values(graph) -> Mapping[str, Set[str]]: """Get all of the unused values for list annotations. :param pybel.BELGraph graph: A BEL graph :return: A dictionary of {str annotation: set of str values that aren't used} """ result = {} for annotation, values in graph.annotation_list.items(): used_values = get_annotation_values(graph, annotation) if len(used_values) == len(values): # all values have been used continue result[annotation] = set(values) - used_values return result
def function[get_unused_list_annotation_values, parameter[graph]]: constant[Get all of the unused values for list annotations. :param pybel.BELGraph graph: A BEL graph :return: A dictionary of {str annotation: set of str values that aren't used} ] variable[result] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da204622590>, <ast.Name object at 0x7da204620d00>]]] in starred[call[name[graph].annotation_list.items, parameter[]]] begin[:] variable[used_values] assign[=] call[name[get_annotation_values], parameter[name[graph], name[annotation]]] if compare[call[name[len], parameter[name[used_values]]] equal[==] call[name[len], parameter[name[values]]]] begin[:] continue call[name[result]][name[annotation]] assign[=] binary_operation[call[name[set], parameter[name[values]]] - name[used_values]] return[name[result]]
keyword[def] identifier[get_unused_list_annotation_values] ( identifier[graph] )-> identifier[Mapping] [ identifier[str] , identifier[Set] [ identifier[str] ]]: literal[string] identifier[result] ={} keyword[for] identifier[annotation] , identifier[values] keyword[in] identifier[graph] . identifier[annotation_list] . identifier[items] (): identifier[used_values] = identifier[get_annotation_values] ( identifier[graph] , identifier[annotation] ) keyword[if] identifier[len] ( identifier[used_values] )== identifier[len] ( identifier[values] ): keyword[continue] identifier[result] [ identifier[annotation] ]= identifier[set] ( identifier[values] )- identifier[used_values] keyword[return] identifier[result]
def get_unused_list_annotation_values(graph) -> Mapping[str, Set[str]]: """Get all of the unused values for list annotations. :param pybel.BELGraph graph: A BEL graph :return: A dictionary of {str annotation: set of str values that aren't used} """ result = {} for (annotation, values) in graph.annotation_list.items(): used_values = get_annotation_values(graph, annotation) if len(used_values) == len(values): # all values have been used continue # depends on [control=['if'], data=[]] result[annotation] = set(values) - used_values # depends on [control=['for'], data=[]] return result
def decode_function_result(self, function_name, data): """ Return the function call result decoded. Args: function_name (str): One of the existing functions described in the contract interface. data (bin): The encoded result from calling `function_name`. Return: List[object]: The values returned by the call to `function_name`. """ description = self.function_data[function_name] arguments = decode_abi(description['decode_types'], data) return arguments
def function[decode_function_result, parameter[self, function_name, data]]: constant[ Return the function call result decoded. Args: function_name (str): One of the existing functions described in the contract interface. data (bin): The encoded result from calling `function_name`. Return: List[object]: The values returned by the call to `function_name`. ] variable[description] assign[=] call[name[self].function_data][name[function_name]] variable[arguments] assign[=] call[name[decode_abi], parameter[call[name[description]][constant[decode_types]], name[data]]] return[name[arguments]]
keyword[def] identifier[decode_function_result] ( identifier[self] , identifier[function_name] , identifier[data] ): literal[string] identifier[description] = identifier[self] . identifier[function_data] [ identifier[function_name] ] identifier[arguments] = identifier[decode_abi] ( identifier[description] [ literal[string] ], identifier[data] ) keyword[return] identifier[arguments]
def decode_function_result(self, function_name, data): """ Return the function call result decoded. Args: function_name (str): One of the existing functions described in the contract interface. data (bin): The encoded result from calling `function_name`. Return: List[object]: The values returned by the call to `function_name`. """ description = self.function_data[function_name] arguments = decode_abi(description['decode_types'], data) return arguments
def _get_tweets(self, tweets_key, max_cnt_tweets): """Get at most `max_cnt_tweets` tweets from the Redis list `tweets_key`. Parameters ---------- tweets_key: str The key of the Redis list which stores the tweets. max_cnt_tweets: int The maximum number of tweets included in the returned list. If it is set to -1, then all the available tweets will be included. Returns ------- tweets A list of tweets """ tweets = [] if max_cnt_tweets == 0: return tweets elif max_cnt_tweets == -1: # Return all the tweets in the timeline. last_tweet_index = -1 else: # Return at most max_cnt_tweets tweets. last_tweet_index = max_cnt_tweets - 1 # Get the post IDs of the tweets. post_ids = self._rc.lrange(tweets_key, 0, last_tweet_index) if not post_ids: return tweets with self._rc.pipeline() as pipe: # Get the tweets with their user IDs and UNIX timestamps. pipe.multi() for post_id in post_ids: post_id_key = pytwis_constants.TWEET_KEY_FORMAT.format(post_id) pipe.hgetall(post_id_key) tweets = pipe.execute() # Get the userid-to-username mappings for all the user IDs associated with the tweets. userid_set = {tweet[pytwis_constants.TWEET_USERID_KEY] for tweet in tweets} userid_list = [] pipe.multi() for userid in userid_set: userid_list.append(userid) userid_key = pytwis_constants.USER_PROFILE_KEY_FORMAT.format(userid) pipe.hget(userid_key, pytwis_constants.USERNAME_KEY) username_list = pipe.execute() userid_to_username = {userid: username for userid, username in\ zip(userid_list, username_list)} # Add the username for the user ID of each tweet. for tweet in tweets: tweet[pytwis_constants.USERNAME_KEY] = \ userid_to_username[tweet[pytwis_constants.TWEET_USERID_KEY]] return tweets
def function[_get_tweets, parameter[self, tweets_key, max_cnt_tweets]]: constant[Get at most `max_cnt_tweets` tweets from the Redis list `tweets_key`. Parameters ---------- tweets_key: str The key of the Redis list which stores the tweets. max_cnt_tweets: int The maximum number of tweets included in the returned list. If it is set to -1, then all the available tweets will be included. Returns ------- tweets A list of tweets ] variable[tweets] assign[=] list[[]] if compare[name[max_cnt_tweets] equal[==] constant[0]] begin[:] return[name[tweets]] variable[post_ids] assign[=] call[name[self]._rc.lrange, parameter[name[tweets_key], constant[0], name[last_tweet_index]]] if <ast.UnaryOp object at 0x7da1b0a81180> begin[:] return[name[tweets]] with call[name[self]._rc.pipeline, parameter[]] begin[:] call[name[pipe].multi, parameter[]] for taget[name[post_id]] in starred[name[post_ids]] begin[:] variable[post_id_key] assign[=] call[name[pytwis_constants].TWEET_KEY_FORMAT.format, parameter[name[post_id]]] call[name[pipe].hgetall, parameter[name[post_id_key]]] variable[tweets] assign[=] call[name[pipe].execute, parameter[]] variable[userid_set] assign[=] <ast.SetComp object at 0x7da1b0a82d40> variable[userid_list] assign[=] list[[]] call[name[pipe].multi, parameter[]] for taget[name[userid]] in starred[name[userid_set]] begin[:] call[name[userid_list].append, parameter[name[userid]]] variable[userid_key] assign[=] call[name[pytwis_constants].USER_PROFILE_KEY_FORMAT.format, parameter[name[userid]]] call[name[pipe].hget, parameter[name[userid_key], name[pytwis_constants].USERNAME_KEY]] variable[username_list] assign[=] call[name[pipe].execute, parameter[]] variable[userid_to_username] assign[=] <ast.DictComp object at 0x7da1b0a80190> for taget[name[tweet]] in starred[name[tweets]] begin[:] call[name[tweet]][name[pytwis_constants].USERNAME_KEY] assign[=] call[name[userid_to_username]][call[name[tweet]][name[pytwis_constants].TWEET_USERID_KEY]] return[name[tweets]]
keyword[def] identifier[_get_tweets] ( identifier[self] , identifier[tweets_key] , identifier[max_cnt_tweets] ): literal[string] identifier[tweets] =[] keyword[if] identifier[max_cnt_tweets] == literal[int] : keyword[return] identifier[tweets] keyword[elif] identifier[max_cnt_tweets] ==- literal[int] : identifier[last_tweet_index] =- literal[int] keyword[else] : identifier[last_tweet_index] = identifier[max_cnt_tweets] - literal[int] identifier[post_ids] = identifier[self] . identifier[_rc] . identifier[lrange] ( identifier[tweets_key] , literal[int] , identifier[last_tweet_index] ) keyword[if] keyword[not] identifier[post_ids] : keyword[return] identifier[tweets] keyword[with] identifier[self] . identifier[_rc] . identifier[pipeline] () keyword[as] identifier[pipe] : identifier[pipe] . identifier[multi] () keyword[for] identifier[post_id] keyword[in] identifier[post_ids] : identifier[post_id_key] = identifier[pytwis_constants] . identifier[TWEET_KEY_FORMAT] . identifier[format] ( identifier[post_id] ) identifier[pipe] . identifier[hgetall] ( identifier[post_id_key] ) identifier[tweets] = identifier[pipe] . identifier[execute] () identifier[userid_set] ={ identifier[tweet] [ identifier[pytwis_constants] . identifier[TWEET_USERID_KEY] ] keyword[for] identifier[tweet] keyword[in] identifier[tweets] } identifier[userid_list] =[] identifier[pipe] . identifier[multi] () keyword[for] identifier[userid] keyword[in] identifier[userid_set] : identifier[userid_list] . identifier[append] ( identifier[userid] ) identifier[userid_key] = identifier[pytwis_constants] . identifier[USER_PROFILE_KEY_FORMAT] . identifier[format] ( identifier[userid] ) identifier[pipe] . identifier[hget] ( identifier[userid_key] , identifier[pytwis_constants] . identifier[USERNAME_KEY] ) identifier[username_list] = identifier[pipe] . identifier[execute] () identifier[userid_to_username] ={ identifier[userid] : identifier[username] keyword[for] identifier[userid] , identifier[username] keyword[in] identifier[zip] ( identifier[userid_list] , identifier[username_list] )} keyword[for] identifier[tweet] keyword[in] identifier[tweets] : identifier[tweet] [ identifier[pytwis_constants] . identifier[USERNAME_KEY] ]= identifier[userid_to_username] [ identifier[tweet] [ identifier[pytwis_constants] . identifier[TWEET_USERID_KEY] ]] keyword[return] identifier[tweets]
def _get_tweets(self, tweets_key, max_cnt_tweets): """Get at most `max_cnt_tweets` tweets from the Redis list `tweets_key`. Parameters ---------- tweets_key: str The key of the Redis list which stores the tweets. max_cnt_tweets: int The maximum number of tweets included in the returned list. If it is set to -1, then all the available tweets will be included. Returns ------- tweets A list of tweets """ tweets = [] if max_cnt_tweets == 0: return tweets # depends on [control=['if'], data=[]] elif max_cnt_tweets == -1: # Return all the tweets in the timeline. last_tweet_index = -1 # depends on [control=['if'], data=[]] else: # Return at most max_cnt_tweets tweets. last_tweet_index = max_cnt_tweets - 1 # Get the post IDs of the tweets. post_ids = self._rc.lrange(tweets_key, 0, last_tweet_index) if not post_ids: return tweets # depends on [control=['if'], data=[]] with self._rc.pipeline() as pipe: # Get the tweets with their user IDs and UNIX timestamps. pipe.multi() for post_id in post_ids: post_id_key = pytwis_constants.TWEET_KEY_FORMAT.format(post_id) pipe.hgetall(post_id_key) # depends on [control=['for'], data=['post_id']] tweets = pipe.execute() # Get the userid-to-username mappings for all the user IDs associated with the tweets. userid_set = {tweet[pytwis_constants.TWEET_USERID_KEY] for tweet in tweets} userid_list = [] pipe.multi() for userid in userid_set: userid_list.append(userid) userid_key = pytwis_constants.USER_PROFILE_KEY_FORMAT.format(userid) pipe.hget(userid_key, pytwis_constants.USERNAME_KEY) # depends on [control=['for'], data=['userid']] username_list = pipe.execute() # depends on [control=['with'], data=['pipe']] userid_to_username = {userid: username for (userid, username) in zip(userid_list, username_list)} # Add the username for the user ID of each tweet. for tweet in tweets: tweet[pytwis_constants.USERNAME_KEY] = userid_to_username[tweet[pytwis_constants.TWEET_USERID_KEY]] # depends on [control=['for'], data=['tweet']] return tweets
def request(self, method, url, *args, **kwargs): """Make a request to the Ansible Tower API, and return the response. """ # If the URL has the api/vX at the front strip it off # This is common to have if you are extracting a URL from an existing object. # For example, any of the 'related' fields of an object will have this import re url = re.sub("^/?api/v[0-9]+/", "", url) # Piece together the full URL. use_version = not url.startswith('/o/') url = '%s%s' % (self.get_prefix(use_version), url.lstrip('/')) # Ansible Tower expects authenticated requests; add the authentication # from settings if it's provided. kwargs.setdefault( 'auth', BasicTowerAuth( settings.username, settings.password, self ) ) # POST and PUT requests will send JSON by default; make this # the content_type by default. This makes it such that we don't have # to constantly write that in our code, which gets repetitive. headers = kwargs.get('headers', {}) if method.upper() in ('PATCH', 'POST', 'PUT'): headers.setdefault('Content-Type', 'application/json') kwargs['headers'] = headers # If debugging is on, print the URL and data being sent. debug.log('%s %s' % (method, url), fg='blue', bold=True) if method in ('POST', 'PUT', 'PATCH'): debug.log('Data: %s' % kwargs.get('data', {}), fg='blue', bold=True) if method == 'GET' or kwargs.get('params', None): debug.log('Params: %s' % kwargs.get('params', {}), fg='blue', bold=True) debug.log('') # If this is a JSON request, encode the data value. if headers.get('Content-Type', '') == 'application/json': kwargs['data'] = json.dumps(kwargs.get('data', {})) r = self._make_request(method, url, args, kwargs) # Sanity check: Did the server send back some kind of internal error? # If so, bubble this up. if r.status_code >= 500: raise exc.ServerError('The Tower server sent back a server error. ' 'Please try again later.') # Sanity check: Did we fail to authenticate properly? # If so, fail out now; this is always a failure. if r.status_code == 401: raise exc.AuthError('Invalid Tower authentication credentials (HTTP 401).') # Sanity check: Did we get a forbidden response, which means that # the user isn't allowed to do this? Report that. if r.status_code == 403: raise exc.Forbidden("You don't have permission to do that (HTTP 403).") # Sanity check: Did we get a 404 response? # Requests with primary keys will return a 404 if there is no response, # and we want to consistently trap these. if r.status_code == 404: raise exc.NotFound('The requested object could not be found.') # Sanity check: Did we get a 405 response? # A 405 means we used a method that isn't allowed. Usually this # is a bad request, but it requires special treatment because the # API sends it as a logic error in a few situations (e.g. trying to # cancel a job that isn't running). if r.status_code == 405: raise exc.MethodNotAllowed( "The Tower server says you can't make a request with the " "%s method to that URL (%s)." % (method, url), ) # Sanity check: Did we get some other kind of error? # If so, write an appropriate error message. if r.status_code >= 400: raise exc.BadRequest( 'The Tower server claims it was sent a bad request.\n\n' '%s %s\nParams: %s\nData: %s\n\nResponse: %s' % (method, url, kwargs.get('params', None), kwargs.get('data', None), r.content.decode('utf8')) ) # Django REST Framework intelligently prints API keys in the # order that they are defined in the models and serializer. # # We want to preserve this behavior when it is possible to do so # with minimal effort, because while the order has no explicit meaning, # we make some effort to order keys in a convenient manner. # # To this end, make this response into an APIResponse subclass # (defined below), which has a `json` method that doesn't lose key # order. r.__class__ = APIResponse # Return the response object. return r
def function[request, parameter[self, method, url]]: constant[Make a request to the Ansible Tower API, and return the response. ] import module[re] variable[url] assign[=] call[name[re].sub, parameter[constant[^/?api/v[0-9]+/], constant[], name[url]]] variable[use_version] assign[=] <ast.UnaryOp object at 0x7da204623fd0> variable[url] assign[=] binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da2046227d0>, <ast.Call object at 0x7da2046204c0>]]] call[name[kwargs].setdefault, parameter[constant[auth], call[name[BasicTowerAuth], parameter[name[settings].username, name[settings].password, name[self]]]]] variable[headers] assign[=] call[name[kwargs].get, parameter[constant[headers], dictionary[[], []]]] if compare[call[name[method].upper, parameter[]] in tuple[[<ast.Constant object at 0x7da204623250>, <ast.Constant object at 0x7da204622020>, <ast.Constant object at 0x7da204622500>]]] begin[:] call[name[headers].setdefault, parameter[constant[Content-Type], constant[application/json]]] call[name[kwargs]][constant[headers]] assign[=] name[headers] call[name[debug].log, parameter[binary_operation[constant[%s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2046200a0>, <ast.Name object at 0x7da204622860>]]]]] if compare[name[method] in tuple[[<ast.Constant object at 0x7da204620310>, <ast.Constant object at 0x7da2046222c0>, <ast.Constant object at 0x7da204622b60>]]] begin[:] call[name[debug].log, parameter[binary_operation[constant[Data: %s] <ast.Mod object at 0x7da2590d6920> call[name[kwargs].get, parameter[constant[data], dictionary[[], []]]]]]] if <ast.BoolOp object at 0x7da18ede7670> begin[:] call[name[debug].log, parameter[binary_operation[constant[Params: %s] <ast.Mod object at 0x7da2590d6920> call[name[kwargs].get, parameter[constant[params], dictionary[[], []]]]]]] call[name[debug].log, parameter[constant[]]] if compare[call[name[headers].get, parameter[constant[Content-Type], constant[]]] equal[==] constant[application/json]] begin[:] call[name[kwargs]][constant[data]] assign[=] call[name[json].dumps, parameter[call[name[kwargs].get, parameter[constant[data], dictionary[[], []]]]]] variable[r] assign[=] call[name[self]._make_request, parameter[name[method], name[url], name[args], name[kwargs]]] if compare[name[r].status_code greater_or_equal[>=] constant[500]] begin[:] <ast.Raise object at 0x7da207f01ab0> if compare[name[r].status_code equal[==] constant[401]] begin[:] <ast.Raise object at 0x7da207f03f10> if compare[name[r].status_code equal[==] constant[403]] begin[:] <ast.Raise object at 0x7da207f013f0> if compare[name[r].status_code equal[==] constant[404]] begin[:] <ast.Raise object at 0x7da1b00842b0> if compare[name[r].status_code equal[==] constant[405]] begin[:] <ast.Raise object at 0x7da1b0085b10> if compare[name[r].status_code greater_or_equal[>=] constant[400]] begin[:] <ast.Raise object at 0x7da1b0085c90> name[r].__class__ assign[=] name[APIResponse] return[name[r]]
keyword[def] identifier[request] ( identifier[self] , identifier[method] , identifier[url] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[import] identifier[re] identifier[url] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[url] ) identifier[use_version] = keyword[not] identifier[url] . identifier[startswith] ( literal[string] ) identifier[url] = literal[string] %( identifier[self] . identifier[get_prefix] ( identifier[use_version] ), identifier[url] . identifier[lstrip] ( literal[string] )) identifier[kwargs] . identifier[setdefault] ( literal[string] , identifier[BasicTowerAuth] ( identifier[settings] . identifier[username] , identifier[settings] . identifier[password] , identifier[self] ) ) identifier[headers] = identifier[kwargs] . identifier[get] ( literal[string] ,{}) keyword[if] identifier[method] . identifier[upper] () keyword[in] ( literal[string] , literal[string] , literal[string] ): identifier[headers] . identifier[setdefault] ( literal[string] , literal[string] ) identifier[kwargs] [ literal[string] ]= identifier[headers] identifier[debug] . identifier[log] ( literal[string] %( identifier[method] , identifier[url] ), identifier[fg] = literal[string] , identifier[bold] = keyword[True] ) keyword[if] identifier[method] keyword[in] ( literal[string] , literal[string] , literal[string] ): identifier[debug] . identifier[log] ( literal[string] % identifier[kwargs] . identifier[get] ( literal[string] ,{}), identifier[fg] = literal[string] , identifier[bold] = keyword[True] ) keyword[if] identifier[method] == literal[string] keyword[or] identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ): identifier[debug] . identifier[log] ( literal[string] % identifier[kwargs] . identifier[get] ( literal[string] ,{}), identifier[fg] = literal[string] , identifier[bold] = keyword[True] ) identifier[debug] . identifier[log] ( literal[string] ) keyword[if] identifier[headers] . identifier[get] ( literal[string] , literal[string] )== literal[string] : identifier[kwargs] [ literal[string] ]= identifier[json] . identifier[dumps] ( identifier[kwargs] . identifier[get] ( literal[string] ,{})) identifier[r] = identifier[self] . identifier[_make_request] ( identifier[method] , identifier[url] , identifier[args] , identifier[kwargs] ) keyword[if] identifier[r] . identifier[status_code] >= literal[int] : keyword[raise] identifier[exc] . identifier[ServerError] ( literal[string] literal[string] ) keyword[if] identifier[r] . identifier[status_code] == literal[int] : keyword[raise] identifier[exc] . identifier[AuthError] ( literal[string] ) keyword[if] identifier[r] . identifier[status_code] == literal[int] : keyword[raise] identifier[exc] . identifier[Forbidden] ( literal[string] ) keyword[if] identifier[r] . identifier[status_code] == literal[int] : keyword[raise] identifier[exc] . identifier[NotFound] ( literal[string] ) keyword[if] identifier[r] . identifier[status_code] == literal[int] : keyword[raise] identifier[exc] . identifier[MethodNotAllowed] ( literal[string] literal[string] %( identifier[method] , identifier[url] ), ) keyword[if] identifier[r] . identifier[status_code] >= literal[int] : keyword[raise] identifier[exc] . identifier[BadRequest] ( literal[string] literal[string] % ( identifier[method] , identifier[url] , identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ), identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ), identifier[r] . identifier[content] . identifier[decode] ( literal[string] )) ) identifier[r] . identifier[__class__] = identifier[APIResponse] keyword[return] identifier[r]
def request(self, method, url, *args, **kwargs): """Make a request to the Ansible Tower API, and return the response. """ # If the URL has the api/vX at the front strip it off # This is common to have if you are extracting a URL from an existing object. # For example, any of the 'related' fields of an object will have this import re url = re.sub('^/?api/v[0-9]+/', '', url) # Piece together the full URL. use_version = not url.startswith('/o/') url = '%s%s' % (self.get_prefix(use_version), url.lstrip('/')) # Ansible Tower expects authenticated requests; add the authentication # from settings if it's provided. kwargs.setdefault('auth', BasicTowerAuth(settings.username, settings.password, self)) # POST and PUT requests will send JSON by default; make this # the content_type by default. This makes it such that we don't have # to constantly write that in our code, which gets repetitive. headers = kwargs.get('headers', {}) if method.upper() in ('PATCH', 'POST', 'PUT'): headers.setdefault('Content-Type', 'application/json') kwargs['headers'] = headers # depends on [control=['if'], data=[]] # If debugging is on, print the URL and data being sent. debug.log('%s %s' % (method, url), fg='blue', bold=True) if method in ('POST', 'PUT', 'PATCH'): debug.log('Data: %s' % kwargs.get('data', {}), fg='blue', bold=True) # depends on [control=['if'], data=[]] if method == 'GET' or kwargs.get('params', None): debug.log('Params: %s' % kwargs.get('params', {}), fg='blue', bold=True) # depends on [control=['if'], data=[]] debug.log('') # If this is a JSON request, encode the data value. if headers.get('Content-Type', '') == 'application/json': kwargs['data'] = json.dumps(kwargs.get('data', {})) # depends on [control=['if'], data=[]] r = self._make_request(method, url, args, kwargs) # Sanity check: Did the server send back some kind of internal error? # If so, bubble this up. if r.status_code >= 500: raise exc.ServerError('The Tower server sent back a server error. Please try again later.') # depends on [control=['if'], data=[]] # Sanity check: Did we fail to authenticate properly? # If so, fail out now; this is always a failure. if r.status_code == 401: raise exc.AuthError('Invalid Tower authentication credentials (HTTP 401).') # depends on [control=['if'], data=[]] # Sanity check: Did we get a forbidden response, which means that # the user isn't allowed to do this? Report that. if r.status_code == 403: raise exc.Forbidden("You don't have permission to do that (HTTP 403).") # depends on [control=['if'], data=[]] # Sanity check: Did we get a 404 response? # Requests with primary keys will return a 404 if there is no response, # and we want to consistently trap these. if r.status_code == 404: raise exc.NotFound('The requested object could not be found.') # depends on [control=['if'], data=[]] # Sanity check: Did we get a 405 response? # A 405 means we used a method that isn't allowed. Usually this # is a bad request, but it requires special treatment because the # API sends it as a logic error in a few situations (e.g. trying to # cancel a job that isn't running). if r.status_code == 405: raise exc.MethodNotAllowed("The Tower server says you can't make a request with the %s method to that URL (%s)." % (method, url)) # depends on [control=['if'], data=[]] # Sanity check: Did we get some other kind of error? # If so, write an appropriate error message. if r.status_code >= 400: raise exc.BadRequest('The Tower server claims it was sent a bad request.\n\n%s %s\nParams: %s\nData: %s\n\nResponse: %s' % (method, url, kwargs.get('params', None), kwargs.get('data', None), r.content.decode('utf8'))) # depends on [control=['if'], data=[]] # Django REST Framework intelligently prints API keys in the # order that they are defined in the models and serializer. # # We want to preserve this behavior when it is possible to do so # with minimal effort, because while the order has no explicit meaning, # we make some effort to order keys in a convenient manner. # # To this end, make this response into an APIResponse subclass # (defined below), which has a `json` method that doesn't lose key # order. r.__class__ = APIResponse # Return the response object. return r
def _set_system_monitor_mail(self, v, load=False): """ Setter method for system_monitor_mail, mapped from YANG variable /system_monitor_mail (container) If this variable is read-only (config: false) in the source YANG file, then _set_system_monitor_mail is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_system_monitor_mail() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=system_monitor_mail.system_monitor_mail, is_container='container', presence=False, yang_name="system-monitor-mail", rest_name="system-monitor-mail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure FRU mail setting', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-system-monitor', defining_module='brocade-system-monitor', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """system_monitor_mail must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=system_monitor_mail.system_monitor_mail, is_container='container', presence=False, yang_name="system-monitor-mail", rest_name="system-monitor-mail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure FRU mail setting', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-system-monitor', defining_module='brocade-system-monitor', yang_type='container', is_config=True)""", }) self.__system_monitor_mail = t if hasattr(self, '_set'): self._set()
def function[_set_system_monitor_mail, parameter[self, v, load]]: constant[ Setter method for system_monitor_mail, mapped from YANG variable /system_monitor_mail (container) If this variable is read-only (config: false) in the source YANG file, then _set_system_monitor_mail is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_system_monitor_mail() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da207f01570> name[self].__system_monitor_mail assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_system_monitor_mail] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[system_monitor_mail] . identifier[system_monitor_mail] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__system_monitor_mail] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_system_monitor_mail(self, v, load=False): """ Setter method for system_monitor_mail, mapped from YANG variable /system_monitor_mail (container) If this variable is read-only (config: false) in the source YANG file, then _set_system_monitor_mail is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_system_monitor_mail() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=system_monitor_mail.system_monitor_mail, is_container='container', presence=False, yang_name='system-monitor-mail', rest_name='system-monitor-mail', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure FRU mail setting', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-system-monitor', defining_module='brocade-system-monitor', yang_type='container', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'system_monitor_mail must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=system_monitor_mail.system_monitor_mail, is_container=\'container\', presence=False, yang_name="system-monitor-mail", rest_name="system-monitor-mail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Configure FRU mail setting\', u\'cli-incomplete-no\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-system-monitor\', defining_module=\'brocade-system-monitor\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__system_monitor_mail = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def elementTypeName(self): """ String representation of the element type. """ fieldName = self.nodeName return str(self._ncVar.dtype.fields[fieldName][0])
def function[elementTypeName, parameter[self]]: constant[ String representation of the element type. ] variable[fieldName] assign[=] name[self].nodeName return[call[name[str], parameter[call[call[name[self]._ncVar.dtype.fields][name[fieldName]]][constant[0]]]]]
keyword[def] identifier[elementTypeName] ( identifier[self] ): literal[string] identifier[fieldName] = identifier[self] . identifier[nodeName] keyword[return] identifier[str] ( identifier[self] . identifier[_ncVar] . identifier[dtype] . identifier[fields] [ identifier[fieldName] ][ literal[int] ])
def elementTypeName(self): """ String representation of the element type. """ fieldName = self.nodeName return str(self._ncVar.dtype.fields[fieldName][0])
def _createL2456Column(network, networkConfig, suffix=""): """ Create a single L2456 column with appropriate suffix on the name. """ locationInputName = "locationInput" + suffix sensorInputName = "sensorInput" + suffix coarseSensorInputName = "coarseSensorInput" + suffix L2ColumnName = "L2Column" + suffix L4ColumnName = "L4Column" + suffix L5ColumnName = "L5Column" + suffix L6ColumnName = "L6Column" + suffix # TODO: Convert locationInput to a coordinate sensor region once its ready # Add the three sensors to network. network.addRegion( locationInputName, "py.CoordinateSensorRegion", json.dumps(networkConfig["locationParams"])) network.addRegion( coarseSensorInputName, "py.RawSensor", json.dumps(networkConfig["coarseSensorParams"])) network.addRegion( sensorInputName, "py.RawSensor", json.dumps(networkConfig["sensorParams"])) # Add L2/L5 column pooler regions network.addRegion( L2ColumnName, "py.ColumnPoolerRegion", json.dumps(networkConfig["L2Params"])) network.addRegion( L5ColumnName, "py.ColumnPoolerRegion", json.dumps(networkConfig["L5Params"])) # Add L4/L6 extended temporal memory regions L6Params = copy.deepcopy(networkConfig["L6Params"]) L6Params["basalInputWidth"] = networkConfig["locationParams"]["outputWidth"] L6Params["apicalInputWidth"] = networkConfig["L5Params"]["cellCount"] network.addRegion( L6ColumnName, "py.ApicalTMPairRegion", json.dumps(L6Params)) L4Params = copy.deepcopy(networkConfig["L4Params"]) L4Params["basalInputWidth"] = ( L6Params["columnCount"] * L6Params["cellsPerColumn"] ) L4Params["apicalInputWidth"] = networkConfig["L2Params"]["cellCount"] network.addRegion( L4ColumnName, "py.ApicalTMPairRegion", json.dumps(L4Params)) # Once regions are created, ensure inputs match column counts assert(network.regions[L6ColumnName].getParameter("columnCount") == network.regions[coarseSensorInputName].getParameter("outputWidth")), \ "L6 column count must equal coarse sensor width" assert(network.regions[L4ColumnName].getParameter("columnCount") == network.regions[sensorInputName].getParameter("outputWidth")), \ "L4 column count must equal sensor width" # Link up the sensors network.link(locationInputName, L6ColumnName, "UniformLink", "", srcOutput="dataOut", destInput="basalInput") network.link(coarseSensorInputName, L6ColumnName, "UniformLink", "", srcOutput="dataOut", destInput="activeColumns") network.link(sensorInputName, L4ColumnName, "UniformLink", "", srcOutput="dataOut", destInput="activeColumns") # Link L6 to L4 network.link(L6ColumnName, L4ColumnName, "UniformLink", "", srcOutput="activeCells", destInput="basalInput") # Link L4 to L2, L6 to L5 network.link(L4ColumnName, L2ColumnName, "UniformLink", "", srcOutput="activeCells", destInput="feedforwardInput") network.link(L4ColumnName, L2ColumnName, "UniformLink", "", srcOutput="predictedActiveCells", destInput="feedforwardGrowthCandidates") network.link(L6ColumnName, L5ColumnName, "UniformLink", "", srcOutput="activeCells", destInput="feedforwardInput") network.link(L6ColumnName, L5ColumnName, "UniformLink", "", srcOutput="predictedActiveCells", destInput="feedforwardGrowthCandidates") # Link L2 feedback to L4, L5 to L6 network.link(L2ColumnName, L4ColumnName, "UniformLink", "", srcOutput="feedForwardOutput", destInput="apicalInput", propagationDelay=1) network.link(L5ColumnName, L6ColumnName, "UniformLink", "", srcOutput="feedForwardOutput", destInput="apicalInput", propagationDelay=1) # Link reset outputs to L5 and L2. For L6 and L4, an empty input is sufficient # for a reset. network.link(sensorInputName, L5ColumnName, "UniformLink", "", srcOutput="resetOut", destInput="resetIn") network.link(sensorInputName, L2ColumnName, "UniformLink", "", srcOutput="resetOut", destInput="resetIn") # Set phases appropriately so regions are executed in the proper sequence. # This is required particularly when we create multiple columns - the order of # execution is not the same as the order of region creation. # All sensors have phase 0 # All L6's have phase 2 # All L5's have phase 3 # All L4's have phase 4 # All L2's have phase 5 # Note: we skip phase 1 in case we add spatial poolers on top of the sensors. network.setPhases(locationInputName,[0]) network.setPhases(sensorInputName,[0]) network.setPhases(coarseSensorInputName,[0]) network.setPhases(L6ColumnName,[2]) network.setPhases(L5ColumnName,[3]) network.setPhases(L4ColumnName, [4]) network.setPhases(L2ColumnName, [5]) return network
def function[_createL2456Column, parameter[network, networkConfig, suffix]]: constant[ Create a single L2456 column with appropriate suffix on the name. ] variable[locationInputName] assign[=] binary_operation[constant[locationInput] + name[suffix]] variable[sensorInputName] assign[=] binary_operation[constant[sensorInput] + name[suffix]] variable[coarseSensorInputName] assign[=] binary_operation[constant[coarseSensorInput] + name[suffix]] variable[L2ColumnName] assign[=] binary_operation[constant[L2Column] + name[suffix]] variable[L4ColumnName] assign[=] binary_operation[constant[L4Column] + name[suffix]] variable[L5ColumnName] assign[=] binary_operation[constant[L5Column] + name[suffix]] variable[L6ColumnName] assign[=] binary_operation[constant[L6Column] + name[suffix]] call[name[network].addRegion, parameter[name[locationInputName], constant[py.CoordinateSensorRegion], call[name[json].dumps, parameter[call[name[networkConfig]][constant[locationParams]]]]]] call[name[network].addRegion, parameter[name[coarseSensorInputName], constant[py.RawSensor], call[name[json].dumps, parameter[call[name[networkConfig]][constant[coarseSensorParams]]]]]] call[name[network].addRegion, parameter[name[sensorInputName], constant[py.RawSensor], call[name[json].dumps, parameter[call[name[networkConfig]][constant[sensorParams]]]]]] call[name[network].addRegion, parameter[name[L2ColumnName], constant[py.ColumnPoolerRegion], call[name[json].dumps, parameter[call[name[networkConfig]][constant[L2Params]]]]]] call[name[network].addRegion, parameter[name[L5ColumnName], constant[py.ColumnPoolerRegion], call[name[json].dumps, parameter[call[name[networkConfig]][constant[L5Params]]]]]] variable[L6Params] assign[=] call[name[copy].deepcopy, parameter[call[name[networkConfig]][constant[L6Params]]]] call[name[L6Params]][constant[basalInputWidth]] assign[=] call[call[name[networkConfig]][constant[locationParams]]][constant[outputWidth]] call[name[L6Params]][constant[apicalInputWidth]] assign[=] call[call[name[networkConfig]][constant[L5Params]]][constant[cellCount]] call[name[network].addRegion, parameter[name[L6ColumnName], constant[py.ApicalTMPairRegion], call[name[json].dumps, parameter[name[L6Params]]]]] variable[L4Params] assign[=] call[name[copy].deepcopy, parameter[call[name[networkConfig]][constant[L4Params]]]] call[name[L4Params]][constant[basalInputWidth]] assign[=] binary_operation[call[name[L6Params]][constant[columnCount]] * call[name[L6Params]][constant[cellsPerColumn]]] call[name[L4Params]][constant[apicalInputWidth]] assign[=] call[call[name[networkConfig]][constant[L2Params]]][constant[cellCount]] call[name[network].addRegion, parameter[name[L4ColumnName], constant[py.ApicalTMPairRegion], call[name[json].dumps, parameter[name[L4Params]]]]] assert[compare[call[call[name[network].regions][name[L6ColumnName]].getParameter, parameter[constant[columnCount]]] equal[==] call[call[name[network].regions][name[coarseSensorInputName]].getParameter, parameter[constant[outputWidth]]]]] assert[compare[call[call[name[network].regions][name[L4ColumnName]].getParameter, parameter[constant[columnCount]]] equal[==] call[call[name[network].regions][name[sensorInputName]].getParameter, parameter[constant[outputWidth]]]]] call[name[network].link, parameter[name[locationInputName], name[L6ColumnName], constant[UniformLink], constant[]]] call[name[network].link, parameter[name[coarseSensorInputName], name[L6ColumnName], constant[UniformLink], constant[]]] call[name[network].link, parameter[name[sensorInputName], name[L4ColumnName], constant[UniformLink], constant[]]] call[name[network].link, parameter[name[L6ColumnName], name[L4ColumnName], constant[UniformLink], constant[]]] call[name[network].link, parameter[name[L4ColumnName], name[L2ColumnName], constant[UniformLink], constant[]]] call[name[network].link, parameter[name[L4ColumnName], name[L2ColumnName], constant[UniformLink], constant[]]] call[name[network].link, parameter[name[L6ColumnName], name[L5ColumnName], constant[UniformLink], constant[]]] call[name[network].link, parameter[name[L6ColumnName], name[L5ColumnName], constant[UniformLink], constant[]]] call[name[network].link, parameter[name[L2ColumnName], name[L4ColumnName], constant[UniformLink], constant[]]] call[name[network].link, parameter[name[L5ColumnName], name[L6ColumnName], constant[UniformLink], constant[]]] call[name[network].link, parameter[name[sensorInputName], name[L5ColumnName], constant[UniformLink], constant[]]] call[name[network].link, parameter[name[sensorInputName], name[L2ColumnName], constant[UniformLink], constant[]]] call[name[network].setPhases, parameter[name[locationInputName], list[[<ast.Constant object at 0x7da1b0902f80>]]]] call[name[network].setPhases, parameter[name[sensorInputName], list[[<ast.Constant object at 0x7da1b0902b60>]]]] call[name[network].setPhases, parameter[name[coarseSensorInputName], list[[<ast.Constant object at 0x7da1b0902da0>]]]] call[name[network].setPhases, parameter[name[L6ColumnName], list[[<ast.Constant object at 0x7da1b0901d50>]]]] call[name[network].setPhases, parameter[name[L5ColumnName], list[[<ast.Constant object at 0x7da1b0900730>]]]] call[name[network].setPhases, parameter[name[L4ColumnName], list[[<ast.Constant object at 0x7da1b0902920>]]]] call[name[network].setPhases, parameter[name[L2ColumnName], list[[<ast.Constant object at 0x7da1b0903f10>]]]] return[name[network]]
keyword[def] identifier[_createL2456Column] ( identifier[network] , identifier[networkConfig] , identifier[suffix] = literal[string] ): literal[string] identifier[locationInputName] = literal[string] + identifier[suffix] identifier[sensorInputName] = literal[string] + identifier[suffix] identifier[coarseSensorInputName] = literal[string] + identifier[suffix] identifier[L2ColumnName] = literal[string] + identifier[suffix] identifier[L4ColumnName] = literal[string] + identifier[suffix] identifier[L5ColumnName] = literal[string] + identifier[suffix] identifier[L6ColumnName] = literal[string] + identifier[suffix] identifier[network] . identifier[addRegion] ( identifier[locationInputName] , literal[string] , identifier[json] . identifier[dumps] ( identifier[networkConfig] [ literal[string] ])) identifier[network] . identifier[addRegion] ( identifier[coarseSensorInputName] , literal[string] , identifier[json] . identifier[dumps] ( identifier[networkConfig] [ literal[string] ])) identifier[network] . identifier[addRegion] ( identifier[sensorInputName] , literal[string] , identifier[json] . identifier[dumps] ( identifier[networkConfig] [ literal[string] ])) identifier[network] . identifier[addRegion] ( identifier[L2ColumnName] , literal[string] , identifier[json] . identifier[dumps] ( identifier[networkConfig] [ literal[string] ])) identifier[network] . identifier[addRegion] ( identifier[L5ColumnName] , literal[string] , identifier[json] . identifier[dumps] ( identifier[networkConfig] [ literal[string] ])) identifier[L6Params] = identifier[copy] . identifier[deepcopy] ( identifier[networkConfig] [ literal[string] ]) identifier[L6Params] [ literal[string] ]= identifier[networkConfig] [ literal[string] ][ literal[string] ] identifier[L6Params] [ literal[string] ]= identifier[networkConfig] [ literal[string] ][ literal[string] ] identifier[network] . identifier[addRegion] ( identifier[L6ColumnName] , literal[string] , identifier[json] . identifier[dumps] ( identifier[L6Params] )) identifier[L4Params] = identifier[copy] . identifier[deepcopy] ( identifier[networkConfig] [ literal[string] ]) identifier[L4Params] [ literal[string] ]=( identifier[L6Params] [ literal[string] ]* identifier[L6Params] [ literal[string] ]) identifier[L4Params] [ literal[string] ]= identifier[networkConfig] [ literal[string] ][ literal[string] ] identifier[network] . identifier[addRegion] ( identifier[L4ColumnName] , literal[string] , identifier[json] . identifier[dumps] ( identifier[L4Params] )) keyword[assert] ( identifier[network] . identifier[regions] [ identifier[L6ColumnName] ]. identifier[getParameter] ( literal[string] )== identifier[network] . identifier[regions] [ identifier[coarseSensorInputName] ]. identifier[getParameter] ( literal[string] )), literal[string] keyword[assert] ( identifier[network] . identifier[regions] [ identifier[L4ColumnName] ]. identifier[getParameter] ( literal[string] )== identifier[network] . identifier[regions] [ identifier[sensorInputName] ]. identifier[getParameter] ( literal[string] )), literal[string] identifier[network] . identifier[link] ( identifier[locationInputName] , identifier[L6ColumnName] , literal[string] , literal[string] , identifier[srcOutput] = literal[string] , identifier[destInput] = literal[string] ) identifier[network] . identifier[link] ( identifier[coarseSensorInputName] , identifier[L6ColumnName] , literal[string] , literal[string] , identifier[srcOutput] = literal[string] , identifier[destInput] = literal[string] ) identifier[network] . identifier[link] ( identifier[sensorInputName] , identifier[L4ColumnName] , literal[string] , literal[string] , identifier[srcOutput] = literal[string] , identifier[destInput] = literal[string] ) identifier[network] . identifier[link] ( identifier[L6ColumnName] , identifier[L4ColumnName] , literal[string] , literal[string] , identifier[srcOutput] = literal[string] , identifier[destInput] = literal[string] ) identifier[network] . identifier[link] ( identifier[L4ColumnName] , identifier[L2ColumnName] , literal[string] , literal[string] , identifier[srcOutput] = literal[string] , identifier[destInput] = literal[string] ) identifier[network] . identifier[link] ( identifier[L4ColumnName] , identifier[L2ColumnName] , literal[string] , literal[string] , identifier[srcOutput] = literal[string] , identifier[destInput] = literal[string] ) identifier[network] . identifier[link] ( identifier[L6ColumnName] , identifier[L5ColumnName] , literal[string] , literal[string] , identifier[srcOutput] = literal[string] , identifier[destInput] = literal[string] ) identifier[network] . identifier[link] ( identifier[L6ColumnName] , identifier[L5ColumnName] , literal[string] , literal[string] , identifier[srcOutput] = literal[string] , identifier[destInput] = literal[string] ) identifier[network] . identifier[link] ( identifier[L2ColumnName] , identifier[L4ColumnName] , literal[string] , literal[string] , identifier[srcOutput] = literal[string] , identifier[destInput] = literal[string] , identifier[propagationDelay] = literal[int] ) identifier[network] . identifier[link] ( identifier[L5ColumnName] , identifier[L6ColumnName] , literal[string] , literal[string] , identifier[srcOutput] = literal[string] , identifier[destInput] = literal[string] , identifier[propagationDelay] = literal[int] ) identifier[network] . identifier[link] ( identifier[sensorInputName] , identifier[L5ColumnName] , literal[string] , literal[string] , identifier[srcOutput] = literal[string] , identifier[destInput] = literal[string] ) identifier[network] . identifier[link] ( identifier[sensorInputName] , identifier[L2ColumnName] , literal[string] , literal[string] , identifier[srcOutput] = literal[string] , identifier[destInput] = literal[string] ) identifier[network] . identifier[setPhases] ( identifier[locationInputName] ,[ literal[int] ]) identifier[network] . identifier[setPhases] ( identifier[sensorInputName] ,[ literal[int] ]) identifier[network] . identifier[setPhases] ( identifier[coarseSensorInputName] ,[ literal[int] ]) identifier[network] . identifier[setPhases] ( identifier[L6ColumnName] ,[ literal[int] ]) identifier[network] . identifier[setPhases] ( identifier[L5ColumnName] ,[ literal[int] ]) identifier[network] . identifier[setPhases] ( identifier[L4ColumnName] ,[ literal[int] ]) identifier[network] . identifier[setPhases] ( identifier[L2ColumnName] ,[ literal[int] ]) keyword[return] identifier[network]
def _createL2456Column(network, networkConfig, suffix=''): """ Create a single L2456 column with appropriate suffix on the name. """ locationInputName = 'locationInput' + suffix sensorInputName = 'sensorInput' + suffix coarseSensorInputName = 'coarseSensorInput' + suffix L2ColumnName = 'L2Column' + suffix L4ColumnName = 'L4Column' + suffix L5ColumnName = 'L5Column' + suffix L6ColumnName = 'L6Column' + suffix # TODO: Convert locationInput to a coordinate sensor region once its ready # Add the three sensors to network. network.addRegion(locationInputName, 'py.CoordinateSensorRegion', json.dumps(networkConfig['locationParams'])) network.addRegion(coarseSensorInputName, 'py.RawSensor', json.dumps(networkConfig['coarseSensorParams'])) network.addRegion(sensorInputName, 'py.RawSensor', json.dumps(networkConfig['sensorParams'])) # Add L2/L5 column pooler regions network.addRegion(L2ColumnName, 'py.ColumnPoolerRegion', json.dumps(networkConfig['L2Params'])) network.addRegion(L5ColumnName, 'py.ColumnPoolerRegion', json.dumps(networkConfig['L5Params'])) # Add L4/L6 extended temporal memory regions L6Params = copy.deepcopy(networkConfig['L6Params']) L6Params['basalInputWidth'] = networkConfig['locationParams']['outputWidth'] L6Params['apicalInputWidth'] = networkConfig['L5Params']['cellCount'] network.addRegion(L6ColumnName, 'py.ApicalTMPairRegion', json.dumps(L6Params)) L4Params = copy.deepcopy(networkConfig['L4Params']) L4Params['basalInputWidth'] = L6Params['columnCount'] * L6Params['cellsPerColumn'] L4Params['apicalInputWidth'] = networkConfig['L2Params']['cellCount'] network.addRegion(L4ColumnName, 'py.ApicalTMPairRegion', json.dumps(L4Params)) # Once regions are created, ensure inputs match column counts assert network.regions[L6ColumnName].getParameter('columnCount') == network.regions[coarseSensorInputName].getParameter('outputWidth'), 'L6 column count must equal coarse sensor width' assert network.regions[L4ColumnName].getParameter('columnCount') == network.regions[sensorInputName].getParameter('outputWidth'), 'L4 column count must equal sensor width' # Link up the sensors network.link(locationInputName, L6ColumnName, 'UniformLink', '', srcOutput='dataOut', destInput='basalInput') network.link(coarseSensorInputName, L6ColumnName, 'UniformLink', '', srcOutput='dataOut', destInput='activeColumns') network.link(sensorInputName, L4ColumnName, 'UniformLink', '', srcOutput='dataOut', destInput='activeColumns') # Link L6 to L4 network.link(L6ColumnName, L4ColumnName, 'UniformLink', '', srcOutput='activeCells', destInput='basalInput') # Link L4 to L2, L6 to L5 network.link(L4ColumnName, L2ColumnName, 'UniformLink', '', srcOutput='activeCells', destInput='feedforwardInput') network.link(L4ColumnName, L2ColumnName, 'UniformLink', '', srcOutput='predictedActiveCells', destInput='feedforwardGrowthCandidates') network.link(L6ColumnName, L5ColumnName, 'UniformLink', '', srcOutput='activeCells', destInput='feedforwardInput') network.link(L6ColumnName, L5ColumnName, 'UniformLink', '', srcOutput='predictedActiveCells', destInput='feedforwardGrowthCandidates') # Link L2 feedback to L4, L5 to L6 network.link(L2ColumnName, L4ColumnName, 'UniformLink', '', srcOutput='feedForwardOutput', destInput='apicalInput', propagationDelay=1) network.link(L5ColumnName, L6ColumnName, 'UniformLink', '', srcOutput='feedForwardOutput', destInput='apicalInput', propagationDelay=1) # Link reset outputs to L5 and L2. For L6 and L4, an empty input is sufficient # for a reset. network.link(sensorInputName, L5ColumnName, 'UniformLink', '', srcOutput='resetOut', destInput='resetIn') network.link(sensorInputName, L2ColumnName, 'UniformLink', '', srcOutput='resetOut', destInput='resetIn') # Set phases appropriately so regions are executed in the proper sequence. # This is required particularly when we create multiple columns - the order of # execution is not the same as the order of region creation. # All sensors have phase 0 # All L6's have phase 2 # All L5's have phase 3 # All L4's have phase 4 # All L2's have phase 5 # Note: we skip phase 1 in case we add spatial poolers on top of the sensors. network.setPhases(locationInputName, [0]) network.setPhases(sensorInputName, [0]) network.setPhases(coarseSensorInputName, [0]) network.setPhases(L6ColumnName, [2]) network.setPhases(L5ColumnName, [3]) network.setPhases(L4ColumnName, [4]) network.setPhases(L2ColumnName, [5]) return network
def column_query(self, sql, param=None): """ RETURN RESULTS IN [column][row_num] GRID """ self._execute_backlog() try: old_cursor = self.cursor if not old_cursor: # ALLOW NON-TRANSACTIONAL READS self.cursor = self.db.cursor() self.cursor.execute("SET TIME_ZONE='+00:00'") self.cursor.close() self.cursor = self.db.cursor() if param: sql = expand_template(sql, quote_param(param)) sql = self.preamble + outdent(sql) self.debug and Log.note("Execute SQL:\n{{sql}}", sql=indent(sql)) self.cursor.execute(sql) grid = [[utf8_to_unicode(c) for c in row] for row in self.cursor] # columns = [utf8_to_unicode(d[0]) for d in coalesce(self.cursor.description, [])] result = transpose(*grid) if not old_cursor: # CLEANUP AFTER NON-TRANSACTIONAL READS self.cursor.close() self.cursor = None return result except Exception as e: if isinstance(e, InterfaceError) or e.message.find("InterfaceError") >= 0: Log.error("Did you close the db connection?", e) Log.error("Problem executing SQL:\n{{sql|indent}}", sql=sql, cause=e, stack_depth=1)
def function[column_query, parameter[self, sql, param]]: constant[ RETURN RESULTS IN [column][row_num] GRID ] call[name[self]._execute_backlog, parameter[]] <ast.Try object at 0x7da1b0a3f790>
keyword[def] identifier[column_query] ( identifier[self] , identifier[sql] , identifier[param] = keyword[None] ): literal[string] identifier[self] . identifier[_execute_backlog] () keyword[try] : identifier[old_cursor] = identifier[self] . identifier[cursor] keyword[if] keyword[not] identifier[old_cursor] : identifier[self] . identifier[cursor] = identifier[self] . identifier[db] . identifier[cursor] () identifier[self] . identifier[cursor] . identifier[execute] ( literal[string] ) identifier[self] . identifier[cursor] . identifier[close] () identifier[self] . identifier[cursor] = identifier[self] . identifier[db] . identifier[cursor] () keyword[if] identifier[param] : identifier[sql] = identifier[expand_template] ( identifier[sql] , identifier[quote_param] ( identifier[param] )) identifier[sql] = identifier[self] . identifier[preamble] + identifier[outdent] ( identifier[sql] ) identifier[self] . identifier[debug] keyword[and] identifier[Log] . identifier[note] ( literal[string] , identifier[sql] = identifier[indent] ( identifier[sql] )) identifier[self] . identifier[cursor] . identifier[execute] ( identifier[sql] ) identifier[grid] =[[ identifier[utf8_to_unicode] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[row] ] keyword[for] identifier[row] keyword[in] identifier[self] . identifier[cursor] ] identifier[result] = identifier[transpose] (* identifier[grid] ) keyword[if] keyword[not] identifier[old_cursor] : identifier[self] . identifier[cursor] . identifier[close] () identifier[self] . identifier[cursor] = keyword[None] keyword[return] identifier[result] keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[if] identifier[isinstance] ( identifier[e] , identifier[InterfaceError] ) keyword[or] identifier[e] . identifier[message] . identifier[find] ( literal[string] )>= literal[int] : identifier[Log] . identifier[error] ( literal[string] , identifier[e] ) identifier[Log] . identifier[error] ( literal[string] , identifier[sql] = identifier[sql] , identifier[cause] = identifier[e] , identifier[stack_depth] = literal[int] )
def column_query(self, sql, param=None): """ RETURN RESULTS IN [column][row_num] GRID """ self._execute_backlog() try: old_cursor = self.cursor if not old_cursor: # ALLOW NON-TRANSACTIONAL READS self.cursor = self.db.cursor() self.cursor.execute("SET TIME_ZONE='+00:00'") self.cursor.close() self.cursor = self.db.cursor() # depends on [control=['if'], data=[]] if param: sql = expand_template(sql, quote_param(param)) # depends on [control=['if'], data=[]] sql = self.preamble + outdent(sql) self.debug and Log.note('Execute SQL:\n{{sql}}', sql=indent(sql)) self.cursor.execute(sql) grid = [[utf8_to_unicode(c) for c in row] for row in self.cursor] # columns = [utf8_to_unicode(d[0]) for d in coalesce(self.cursor.description, [])] result = transpose(*grid) if not old_cursor: # CLEANUP AFTER NON-TRANSACTIONAL READS self.cursor.close() self.cursor = None # depends on [control=['if'], data=[]] return result # depends on [control=['try'], data=[]] except Exception as e: if isinstance(e, InterfaceError) or e.message.find('InterfaceError') >= 0: Log.error('Did you close the db connection?', e) # depends on [control=['if'], data=[]] Log.error('Problem executing SQL:\n{{sql|indent}}', sql=sql, cause=e, stack_depth=1) # depends on [control=['except'], data=['e']]
def diff(self, *args): """Call forward_mode; discard value, only keep the derivative.""" arg_dicts = self._parse_args_forward_mode(*args) val, diff = self._forward_mode(*arg_dicts) return diff
def function[diff, parameter[self]]: constant[Call forward_mode; discard value, only keep the derivative.] variable[arg_dicts] assign[=] call[name[self]._parse_args_forward_mode, parameter[<ast.Starred object at 0x7da20e9547c0>]] <ast.Tuple object at 0x7da20e954f40> assign[=] call[name[self]._forward_mode, parameter[<ast.Starred object at 0x7da20e955210>]] return[name[diff]]
keyword[def] identifier[diff] ( identifier[self] ,* identifier[args] ): literal[string] identifier[arg_dicts] = identifier[self] . identifier[_parse_args_forward_mode] (* identifier[args] ) identifier[val] , identifier[diff] = identifier[self] . identifier[_forward_mode] (* identifier[arg_dicts] ) keyword[return] identifier[diff]
def diff(self, *args): """Call forward_mode; discard value, only keep the derivative.""" arg_dicts = self._parse_args_forward_mode(*args) (val, diff) = self._forward_mode(*arg_dicts) return diff
def delta_E( self ): """ The change in system energy if this jump were accepted. Args: None Returns: (Float): delta E """ site_delta_E = self.final_site.energy - self.initial_site.energy if self.nearest_neighbour_energy: site_delta_E += self.nearest_neighbour_delta_E() if self.coordination_number_energy: site_delta_E += self.coordination_number_delta_E() return site_delta_E
def function[delta_E, parameter[self]]: constant[ The change in system energy if this jump were accepted. Args: None Returns: (Float): delta E ] variable[site_delta_E] assign[=] binary_operation[name[self].final_site.energy - name[self].initial_site.energy] if name[self].nearest_neighbour_energy begin[:] <ast.AugAssign object at 0x7da18bc73fa0> if name[self].coordination_number_energy begin[:] <ast.AugAssign object at 0x7da18bc72fe0> return[name[site_delta_E]]
keyword[def] identifier[delta_E] ( identifier[self] ): literal[string] identifier[site_delta_E] = identifier[self] . identifier[final_site] . identifier[energy] - identifier[self] . identifier[initial_site] . identifier[energy] keyword[if] identifier[self] . identifier[nearest_neighbour_energy] : identifier[site_delta_E] += identifier[self] . identifier[nearest_neighbour_delta_E] () keyword[if] identifier[self] . identifier[coordination_number_energy] : identifier[site_delta_E] += identifier[self] . identifier[coordination_number_delta_E] () keyword[return] identifier[site_delta_E]
def delta_E(self): """ The change in system energy if this jump were accepted. Args: None Returns: (Float): delta E """ site_delta_E = self.final_site.energy - self.initial_site.energy if self.nearest_neighbour_energy: site_delta_E += self.nearest_neighbour_delta_E() # depends on [control=['if'], data=[]] if self.coordination_number_energy: site_delta_E += self.coordination_number_delta_E() # depends on [control=['if'], data=[]] return site_delta_E
def compileFST(fst): u""" convert FST to byte array representing arcs """ arcs = [] address = {} pos = 0 for (num, s) in enumerate(fst.dictionary.values()): for i, (c, v) in enumerate(sorted(s.trans_map.items(), reverse=True)): bary = bytearray() flag = 0 output_size, output = 0, bytes() if i == 0: flag += FLAG_LAST_ARC if v['output']: flag += FLAG_ARC_HAS_OUTPUT output_size = len(v['output']) output = v['output'] # encode flag, label, output_size, output, relative target address bary += pack('b', flag) if PY3: bary += pack('B', c) else: bary += pack('c', c) if output_size > 0: bary += pack('I', output_size) bary += output next_addr = address.get(v['state'].id) assert next_addr is not None target = (pos + len(bary) + 4) - next_addr assert target > 0 bary += pack('I', target) # add the arc represented in bytes if PY3: arcs.append(bytes(bary)) else: arcs.append(b''.join(chr(b) for b in bary)) # address count up pos += len(bary) if s.is_final(): bary = bytearray() # final state flag = FLAG_FINAL_ARC output_count = 0 if s.final_output and any(len(e) > 0 for e in s.final_output): # the arc has final output flag += FLAG_ARC_HAS_FINAL_OUTPUT output_count = len(s.final_output) if not s.trans_map: flag += FLAG_LAST_ARC # encode flag, output size, output bary += pack('b', flag) if output_count: bary += pack('I', output_count) for out in s.final_output: output_size = len(out) bary += pack('I', output_size) if output_size: bary += out # add the arc represented in bytes if PY3: arcs.append(bytes(bary)) else: arcs.append(b''.join(chr(b) for b in bary)) # address count up pos += len(bary) address[s.id] = pos logger.debug('compiled arcs size: %d' % len(arcs)) arcs.reverse() return b''.join(arcs)
def function[compileFST, parameter[fst]]: constant[ convert FST to byte array representing arcs ] variable[arcs] assign[=] list[[]] variable[address] assign[=] dictionary[[], []] variable[pos] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da20e9b16c0>, <ast.Name object at 0x7da20e9b33d0>]]] in starred[call[name[enumerate], parameter[call[name[fst].dictionary.values, parameter[]]]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da20e9b33a0>, <ast.Tuple object at 0x7da20e9b2590>]]] in starred[call[name[enumerate], parameter[call[name[sorted], parameter[call[name[s].trans_map.items, parameter[]]]]]]] begin[:] variable[bary] assign[=] call[name[bytearray], parameter[]] variable[flag] assign[=] constant[0] <ast.Tuple object at 0x7da20e9b3a60> assign[=] tuple[[<ast.Constant object at 0x7da20e9b3310>, <ast.Call object at 0x7da20e9b28f0>]] if compare[name[i] equal[==] constant[0]] begin[:] <ast.AugAssign object at 0x7da20e9b3130> if call[name[v]][constant[output]] begin[:] <ast.AugAssign object at 0x7da20e9b2fb0> variable[output_size] assign[=] call[name[len], parameter[call[name[v]][constant[output]]]] variable[output] assign[=] call[name[v]][constant[output]] <ast.AugAssign object at 0x7da20e9b0670> if name[PY3] begin[:] <ast.AugAssign object at 0x7da20eb29cc0> if compare[name[output_size] greater[>] constant[0]] begin[:] <ast.AugAssign object at 0x7da2054a4370> <ast.AugAssign object at 0x7da2054a4670> variable[next_addr] assign[=] call[name[address].get, parameter[call[name[v]][constant[state]].id]] assert[compare[name[next_addr] is_not constant[None]]] variable[target] assign[=] binary_operation[binary_operation[binary_operation[name[pos] + call[name[len], parameter[name[bary]]]] + constant[4]] - name[next_addr]] assert[compare[name[target] greater[>] constant[0]]] <ast.AugAssign object at 0x7da2054a79d0> if name[PY3] begin[:] call[name[arcs].append, parameter[call[name[bytes], parameter[name[bary]]]]] <ast.AugAssign object at 0x7da20c991270> if call[name[s].is_final, parameter[]] begin[:] variable[bary] assign[=] call[name[bytearray], parameter[]] variable[flag] assign[=] name[FLAG_FINAL_ARC] variable[output_count] assign[=] constant[0] if <ast.BoolOp object at 0x7da1b08cbd90> begin[:] <ast.AugAssign object at 0x7da204962b00> variable[output_count] assign[=] call[name[len], parameter[name[s].final_output]] if <ast.UnaryOp object at 0x7da204960df0> begin[:] <ast.AugAssign object at 0x7da2049601c0> <ast.AugAssign object at 0x7da204963e20> if name[output_count] begin[:] <ast.AugAssign object at 0x7da204963d30> for taget[name[out]] in starred[name[s].final_output] begin[:] variable[output_size] assign[=] call[name[len], parameter[name[out]]] <ast.AugAssign object at 0x7da2049629b0> if name[output_size] begin[:] <ast.AugAssign object at 0x7da2049603d0> if name[PY3] begin[:] call[name[arcs].append, parameter[call[name[bytes], parameter[name[bary]]]]] <ast.AugAssign object at 0x7da2049625c0> call[name[address]][name[s].id] assign[=] name[pos] call[name[logger].debug, parameter[binary_operation[constant[compiled arcs size: %d] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[arcs]]]]]] call[name[arcs].reverse, parameter[]] return[call[constant[b''].join, parameter[name[arcs]]]]
keyword[def] identifier[compileFST] ( identifier[fst] ): literal[string] identifier[arcs] =[] identifier[address] ={} identifier[pos] = literal[int] keyword[for] ( identifier[num] , identifier[s] ) keyword[in] identifier[enumerate] ( identifier[fst] . identifier[dictionary] . identifier[values] ()): keyword[for] identifier[i] ,( identifier[c] , identifier[v] ) keyword[in] identifier[enumerate] ( identifier[sorted] ( identifier[s] . identifier[trans_map] . identifier[items] (), identifier[reverse] = keyword[True] )): identifier[bary] = identifier[bytearray] () identifier[flag] = literal[int] identifier[output_size] , identifier[output] = literal[int] , identifier[bytes] () keyword[if] identifier[i] == literal[int] : identifier[flag] += identifier[FLAG_LAST_ARC] keyword[if] identifier[v] [ literal[string] ]: identifier[flag] += identifier[FLAG_ARC_HAS_OUTPUT] identifier[output_size] = identifier[len] ( identifier[v] [ literal[string] ]) identifier[output] = identifier[v] [ literal[string] ] identifier[bary] += identifier[pack] ( literal[string] , identifier[flag] ) keyword[if] identifier[PY3] : identifier[bary] += identifier[pack] ( literal[string] , identifier[c] ) keyword[else] : identifier[bary] += identifier[pack] ( literal[string] , identifier[c] ) keyword[if] identifier[output_size] > literal[int] : identifier[bary] += identifier[pack] ( literal[string] , identifier[output_size] ) identifier[bary] += identifier[output] identifier[next_addr] = identifier[address] . identifier[get] ( identifier[v] [ literal[string] ]. identifier[id] ) keyword[assert] identifier[next_addr] keyword[is] keyword[not] keyword[None] identifier[target] =( identifier[pos] + identifier[len] ( identifier[bary] )+ literal[int] )- identifier[next_addr] keyword[assert] identifier[target] > literal[int] identifier[bary] += identifier[pack] ( literal[string] , identifier[target] ) keyword[if] identifier[PY3] : identifier[arcs] . identifier[append] ( identifier[bytes] ( identifier[bary] )) keyword[else] : identifier[arcs] . identifier[append] ( literal[string] . identifier[join] ( identifier[chr] ( identifier[b] ) keyword[for] identifier[b] keyword[in] identifier[bary] )) identifier[pos] += identifier[len] ( identifier[bary] ) keyword[if] identifier[s] . identifier[is_final] (): identifier[bary] = identifier[bytearray] () identifier[flag] = identifier[FLAG_FINAL_ARC] identifier[output_count] = literal[int] keyword[if] identifier[s] . identifier[final_output] keyword[and] identifier[any] ( identifier[len] ( identifier[e] )> literal[int] keyword[for] identifier[e] keyword[in] identifier[s] . identifier[final_output] ): identifier[flag] += identifier[FLAG_ARC_HAS_FINAL_OUTPUT] identifier[output_count] = identifier[len] ( identifier[s] . identifier[final_output] ) keyword[if] keyword[not] identifier[s] . identifier[trans_map] : identifier[flag] += identifier[FLAG_LAST_ARC] identifier[bary] += identifier[pack] ( literal[string] , identifier[flag] ) keyword[if] identifier[output_count] : identifier[bary] += identifier[pack] ( literal[string] , identifier[output_count] ) keyword[for] identifier[out] keyword[in] identifier[s] . identifier[final_output] : identifier[output_size] = identifier[len] ( identifier[out] ) identifier[bary] += identifier[pack] ( literal[string] , identifier[output_size] ) keyword[if] identifier[output_size] : identifier[bary] += identifier[out] keyword[if] identifier[PY3] : identifier[arcs] . identifier[append] ( identifier[bytes] ( identifier[bary] )) keyword[else] : identifier[arcs] . identifier[append] ( literal[string] . identifier[join] ( identifier[chr] ( identifier[b] ) keyword[for] identifier[b] keyword[in] identifier[bary] )) identifier[pos] += identifier[len] ( identifier[bary] ) identifier[address] [ identifier[s] . identifier[id] ]= identifier[pos] identifier[logger] . identifier[debug] ( literal[string] % identifier[len] ( identifier[arcs] )) identifier[arcs] . identifier[reverse] () keyword[return] literal[string] . identifier[join] ( identifier[arcs] )
def compileFST(fst): u""" convert FST to byte array representing arcs """ arcs = [] address = {} pos = 0 for (num, s) in enumerate(fst.dictionary.values()): for (i, (c, v)) in enumerate(sorted(s.trans_map.items(), reverse=True)): bary = bytearray() flag = 0 (output_size, output) = (0, bytes()) if i == 0: flag += FLAG_LAST_ARC # depends on [control=['if'], data=[]] if v['output']: flag += FLAG_ARC_HAS_OUTPUT output_size = len(v['output']) output = v['output'] # depends on [control=['if'], data=[]] # encode flag, label, output_size, output, relative target address bary += pack('b', flag) if PY3: bary += pack('B', c) # depends on [control=['if'], data=[]] else: bary += pack('c', c) if output_size > 0: bary += pack('I', output_size) bary += output # depends on [control=['if'], data=['output_size']] next_addr = address.get(v['state'].id) assert next_addr is not None target = pos + len(bary) + 4 - next_addr assert target > 0 bary += pack('I', target) # add the arc represented in bytes if PY3: arcs.append(bytes(bary)) # depends on [control=['if'], data=[]] else: arcs.append(b''.join((chr(b) for b in bary))) # address count up pos += len(bary) # depends on [control=['for'], data=[]] if s.is_final(): bary = bytearray() # final state flag = FLAG_FINAL_ARC output_count = 0 if s.final_output and any((len(e) > 0 for e in s.final_output)): # the arc has final output flag += FLAG_ARC_HAS_FINAL_OUTPUT output_count = len(s.final_output) # depends on [control=['if'], data=[]] if not s.trans_map: flag += FLAG_LAST_ARC # depends on [control=['if'], data=[]] # encode flag, output size, output bary += pack('b', flag) if output_count: bary += pack('I', output_count) for out in s.final_output: output_size = len(out) bary += pack('I', output_size) if output_size: bary += out # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['out']] # depends on [control=['if'], data=[]] # add the arc represented in bytes if PY3: arcs.append(bytes(bary)) # depends on [control=['if'], data=[]] else: arcs.append(b''.join((chr(b) for b in bary))) # address count up pos += len(bary) # depends on [control=['if'], data=[]] address[s.id] = pos # depends on [control=['for'], data=[]] logger.debug('compiled arcs size: %d' % len(arcs)) arcs.reverse() return b''.join(arcs)
def save(self, eopatch, use_tmp=True): """ Method which does the saving :param eopatch: EOPatch containing the data which will be saved :type eopatch: EOPatch :param use_tmp: If `True` data will be saved to temporary file, otherwise it will be saved to intended (i.e. final) location :type use_tmp: bool """ filename = self.tmp_filename if use_tmp else self.final_filename if self.feature_name is None: data = eopatch[self.feature_type] if self.feature_type.has_dict(): data = data.get_dict() if self.feature_type is FeatureType.BBOX: data = tuple(data) + (int(data.crs.value),) else: data = eopatch[self.feature_type][self.feature_name] file_dir = os.path.dirname(filename) os.makedirs(file_dir, exist_ok=True) if self.compress_level: file_handle = gzip.GzipFile(filename, 'w', self.compress_level) else: file_handle = open(filename, 'wb') with file_handle as outfile: LOGGER.debug("Saving (%s, %s) to %s", str(self.feature_type), str(self.feature_name), filename) if self.file_format is FileFormat.NPY: np.save(outfile, data) elif self.file_format is FileFormat.PICKLE: pickle.dump(data, outfile) else: ValueError('File {} was not saved because saving in file format {} is currently not ' 'supported'.format(filename, self.file_format))
def function[save, parameter[self, eopatch, use_tmp]]: constant[ Method which does the saving :param eopatch: EOPatch containing the data which will be saved :type eopatch: EOPatch :param use_tmp: If `True` data will be saved to temporary file, otherwise it will be saved to intended (i.e. final) location :type use_tmp: bool ] variable[filename] assign[=] <ast.IfExp object at 0x7da20c993190> if compare[name[self].feature_name is constant[None]] begin[:] variable[data] assign[=] call[name[eopatch]][name[self].feature_type] if call[name[self].feature_type.has_dict, parameter[]] begin[:] variable[data] assign[=] call[name[data].get_dict, parameter[]] if compare[name[self].feature_type is name[FeatureType].BBOX] begin[:] variable[data] assign[=] binary_operation[call[name[tuple], parameter[name[data]]] + tuple[[<ast.Call object at 0x7da20c990f70>]]] variable[file_dir] assign[=] call[name[os].path.dirname, parameter[name[filename]]] call[name[os].makedirs, parameter[name[file_dir]]] if name[self].compress_level begin[:] variable[file_handle] assign[=] call[name[gzip].GzipFile, parameter[name[filename], constant[w], name[self].compress_level]] with name[file_handle] begin[:] call[name[LOGGER].debug, parameter[constant[Saving (%s, %s) to %s], call[name[str], parameter[name[self].feature_type]], call[name[str], parameter[name[self].feature_name]], name[filename]]] if compare[name[self].file_format is name[FileFormat].NPY] begin[:] call[name[np].save, parameter[name[outfile], name[data]]]
keyword[def] identifier[save] ( identifier[self] , identifier[eopatch] , identifier[use_tmp] = keyword[True] ): literal[string] identifier[filename] = identifier[self] . identifier[tmp_filename] keyword[if] identifier[use_tmp] keyword[else] identifier[self] . identifier[final_filename] keyword[if] identifier[self] . identifier[feature_name] keyword[is] keyword[None] : identifier[data] = identifier[eopatch] [ identifier[self] . identifier[feature_type] ] keyword[if] identifier[self] . identifier[feature_type] . identifier[has_dict] (): identifier[data] = identifier[data] . identifier[get_dict] () keyword[if] identifier[self] . identifier[feature_type] keyword[is] identifier[FeatureType] . identifier[BBOX] : identifier[data] = identifier[tuple] ( identifier[data] )+( identifier[int] ( identifier[data] . identifier[crs] . identifier[value] ),) keyword[else] : identifier[data] = identifier[eopatch] [ identifier[self] . identifier[feature_type] ][ identifier[self] . identifier[feature_name] ] identifier[file_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[filename] ) identifier[os] . identifier[makedirs] ( identifier[file_dir] , identifier[exist_ok] = keyword[True] ) keyword[if] identifier[self] . identifier[compress_level] : identifier[file_handle] = identifier[gzip] . identifier[GzipFile] ( identifier[filename] , literal[string] , identifier[self] . identifier[compress_level] ) keyword[else] : identifier[file_handle] = identifier[open] ( identifier[filename] , literal[string] ) keyword[with] identifier[file_handle] keyword[as] identifier[outfile] : identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[str] ( identifier[self] . identifier[feature_type] ), identifier[str] ( identifier[self] . identifier[feature_name] ), identifier[filename] ) keyword[if] identifier[self] . identifier[file_format] keyword[is] identifier[FileFormat] . identifier[NPY] : identifier[np] . identifier[save] ( identifier[outfile] , identifier[data] ) keyword[elif] identifier[self] . identifier[file_format] keyword[is] identifier[FileFormat] . identifier[PICKLE] : identifier[pickle] . identifier[dump] ( identifier[data] , identifier[outfile] ) keyword[else] : identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[filename] , identifier[self] . identifier[file_format] ))
def save(self, eopatch, use_tmp=True): """ Method which does the saving :param eopatch: EOPatch containing the data which will be saved :type eopatch: EOPatch :param use_tmp: If `True` data will be saved to temporary file, otherwise it will be saved to intended (i.e. final) location :type use_tmp: bool """ filename = self.tmp_filename if use_tmp else self.final_filename if self.feature_name is None: data = eopatch[self.feature_type] if self.feature_type.has_dict(): data = data.get_dict() # depends on [control=['if'], data=[]] if self.feature_type is FeatureType.BBOX: data = tuple(data) + (int(data.crs.value),) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: data = eopatch[self.feature_type][self.feature_name] file_dir = os.path.dirname(filename) os.makedirs(file_dir, exist_ok=True) if self.compress_level: file_handle = gzip.GzipFile(filename, 'w', self.compress_level) # depends on [control=['if'], data=[]] else: file_handle = open(filename, 'wb') with file_handle as outfile: LOGGER.debug('Saving (%s, %s) to %s', str(self.feature_type), str(self.feature_name), filename) if self.file_format is FileFormat.NPY: np.save(outfile, data) # depends on [control=['if'], data=[]] elif self.file_format is FileFormat.PICKLE: pickle.dump(data, outfile) # depends on [control=['if'], data=[]] else: ValueError('File {} was not saved because saving in file format {} is currently not supported'.format(filename, self.file_format)) # depends on [control=['with'], data=['outfile']]
def _get_obj_ct(self, obj): """ Look up and return object's content type and cache for reuse """ if not hasattr(obj, '_wfct'): # Use polymorpic content type if available if hasattr(obj, 'polymorphic_ctype'): obj._wfct = obj.polymorphic_ctype else: obj._wfct = ContentType.objects.get_for_model(obj) return obj._wfct
def function[_get_obj_ct, parameter[self, obj]]: constant[ Look up and return object's content type and cache for reuse ] if <ast.UnaryOp object at 0x7da204564970> begin[:] if call[name[hasattr], parameter[name[obj], constant[polymorphic_ctype]]] begin[:] name[obj]._wfct assign[=] name[obj].polymorphic_ctype return[name[obj]._wfct]
keyword[def] identifier[_get_obj_ct] ( identifier[self] , identifier[obj] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[obj] , literal[string] ): keyword[if] identifier[hasattr] ( identifier[obj] , literal[string] ): identifier[obj] . identifier[_wfct] = identifier[obj] . identifier[polymorphic_ctype] keyword[else] : identifier[obj] . identifier[_wfct] = identifier[ContentType] . identifier[objects] . identifier[get_for_model] ( identifier[obj] ) keyword[return] identifier[obj] . identifier[_wfct]
def _get_obj_ct(self, obj): """ Look up and return object's content type and cache for reuse """ if not hasattr(obj, '_wfct'): # Use polymorpic content type if available if hasattr(obj, 'polymorphic_ctype'): obj._wfct = obj.polymorphic_ctype # depends on [control=['if'], data=[]] else: obj._wfct = ContentType.objects.get_for_model(obj) # depends on [control=['if'], data=[]] return obj._wfct
async def create_checkpoint_if_not_exists_async(self, partition_id): """ Create the given partition checkpoint if it doesn't exist.Do nothing if it does exist. The offset/sequenceNumber for a freshly-created checkpoint should be set to StartOfStream/0. :param partition_id: The partition ID. :type partition_id: str :return: The checkpoint for the given partition, whether newly created or already existing. :rtype: ~azure.eventprocessorhost.checkpoint.Checkpoint """ checkpoint = await self.get_checkpoint_async(partition_id) if not checkpoint: await self.create_lease_if_not_exists_async(partition_id) checkpoint = Checkpoint(partition_id) return checkpoint
<ast.AsyncFunctionDef object at 0x7da20c991fc0>
keyword[async] keyword[def] identifier[create_checkpoint_if_not_exists_async] ( identifier[self] , identifier[partition_id] ): literal[string] identifier[checkpoint] = keyword[await] identifier[self] . identifier[get_checkpoint_async] ( identifier[partition_id] ) keyword[if] keyword[not] identifier[checkpoint] : keyword[await] identifier[self] . identifier[create_lease_if_not_exists_async] ( identifier[partition_id] ) identifier[checkpoint] = identifier[Checkpoint] ( identifier[partition_id] ) keyword[return] identifier[checkpoint]
async def create_checkpoint_if_not_exists_async(self, partition_id): """ Create the given partition checkpoint if it doesn't exist.Do nothing if it does exist. The offset/sequenceNumber for a freshly-created checkpoint should be set to StartOfStream/0. :param partition_id: The partition ID. :type partition_id: str :return: The checkpoint for the given partition, whether newly created or already existing. :rtype: ~azure.eventprocessorhost.checkpoint.Checkpoint """ checkpoint = await self.get_checkpoint_async(partition_id) if not checkpoint: await self.create_lease_if_not_exists_async(partition_id) checkpoint = Checkpoint(partition_id) # depends on [control=['if'], data=[]] return checkpoint
def relabel(self, qubits: Qubits) -> 'Density': """Return a copy of this state with new qubits""" return Density(self.vec.tensor, qubits, self._memory)
def function[relabel, parameter[self, qubits]]: constant[Return a copy of this state with new qubits] return[call[name[Density], parameter[name[self].vec.tensor, name[qubits], name[self]._memory]]]
keyword[def] identifier[relabel] ( identifier[self] , identifier[qubits] : identifier[Qubits] )-> literal[string] : literal[string] keyword[return] identifier[Density] ( identifier[self] . identifier[vec] . identifier[tensor] , identifier[qubits] , identifier[self] . identifier[_memory] )
def relabel(self, qubits: Qubits) -> 'Density': """Return a copy of this state with new qubits""" return Density(self.vec.tensor, qubits, self._memory)
def _build_hline(self, is_header=False): """Return a string used to separated rows or separate header from rows """ horiz = self._char_horiz if (is_header): horiz = self._char_header # compute cell separator s = "%s%s%s" % (horiz, [horiz, self._char_corner][self._has_vlines()], horiz) # build the line l = s.join([horiz * n for n in self._width]) # add border if needed if self._has_border(): l = "%s%s%s%s%s\n" % (self._char_corner, horiz, l, horiz, self._char_corner) else: l += "\n" return l
def function[_build_hline, parameter[self, is_header]]: constant[Return a string used to separated rows or separate header from rows ] variable[horiz] assign[=] name[self]._char_horiz if name[is_header] begin[:] variable[horiz] assign[=] name[self]._char_header variable[s] assign[=] binary_operation[constant[%s%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18ede6500>, <ast.Subscript object at 0x7da18ede6890>, <ast.Name object at 0x7da18ede6bc0>]]] variable[l] assign[=] call[name[s].join, parameter[<ast.ListComp object at 0x7da18ede4af0>]] if call[name[self]._has_border, parameter[]] begin[:] variable[l] assign[=] binary_operation[constant[%s%s%s%s%s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18f09d990>, <ast.Name object at 0x7da18f09cee0>, <ast.Name object at 0x7da18f09e5c0>, <ast.Name object at 0x7da18f09db40>, <ast.Attribute object at 0x7da18f09d6f0>]]] return[name[l]]
keyword[def] identifier[_build_hline] ( identifier[self] , identifier[is_header] = keyword[False] ): literal[string] identifier[horiz] = identifier[self] . identifier[_char_horiz] keyword[if] ( identifier[is_header] ): identifier[horiz] = identifier[self] . identifier[_char_header] identifier[s] = literal[string] %( identifier[horiz] ,[ identifier[horiz] , identifier[self] . identifier[_char_corner] ][ identifier[self] . identifier[_has_vlines] ()], identifier[horiz] ) identifier[l] = identifier[s] . identifier[join] ([ identifier[horiz] * identifier[n] keyword[for] identifier[n] keyword[in] identifier[self] . identifier[_width] ]) keyword[if] identifier[self] . identifier[_has_border] (): identifier[l] = literal[string] %( identifier[self] . identifier[_char_corner] , identifier[horiz] , identifier[l] , identifier[horiz] , identifier[self] . identifier[_char_corner] ) keyword[else] : identifier[l] += literal[string] keyword[return] identifier[l]
def _build_hline(self, is_header=False): """Return a string used to separated rows or separate header from rows """ horiz = self._char_horiz if is_header: horiz = self._char_header # depends on [control=['if'], data=[]] # compute cell separator s = '%s%s%s' % (horiz, [horiz, self._char_corner][self._has_vlines()], horiz) # build the line l = s.join([horiz * n for n in self._width]) # add border if needed if self._has_border(): l = '%s%s%s%s%s\n' % (self._char_corner, horiz, l, horiz, self._char_corner) # depends on [control=['if'], data=[]] else: l += '\n' return l
def cancel(self): '''Cancel any request.''' if self._body: self._body._cancel = True else: self._cancel = True
def function[cancel, parameter[self]]: constant[Cancel any request.] if name[self]._body begin[:] name[self]._body._cancel assign[=] constant[True]
keyword[def] identifier[cancel] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_body] : identifier[self] . identifier[_body] . identifier[_cancel] = keyword[True] keyword[else] : identifier[self] . identifier[_cancel] = keyword[True]
def cancel(self): """Cancel any request.""" if self._body: self._body._cancel = True # depends on [control=['if'], data=[]] else: self._cancel = True
def is_model_admin_subclass(node): """Checks that node is derivative of ModelAdmin class.""" if node.name[-5:] != 'Admin' or isinstance(node.parent, ClassDef): return False return node_is_subclass(node, 'django.contrib.admin.options.ModelAdmin')
def function[is_model_admin_subclass, parameter[node]]: constant[Checks that node is derivative of ModelAdmin class.] if <ast.BoolOp object at 0x7da18bccbbe0> begin[:] return[constant[False]] return[call[name[node_is_subclass], parameter[name[node], constant[django.contrib.admin.options.ModelAdmin]]]]
keyword[def] identifier[is_model_admin_subclass] ( identifier[node] ): literal[string] keyword[if] identifier[node] . identifier[name] [- literal[int] :]!= literal[string] keyword[or] identifier[isinstance] ( identifier[node] . identifier[parent] , identifier[ClassDef] ): keyword[return] keyword[False] keyword[return] identifier[node_is_subclass] ( identifier[node] , literal[string] )
def is_model_admin_subclass(node): """Checks that node is derivative of ModelAdmin class.""" if node.name[-5:] != 'Admin' or isinstance(node.parent, ClassDef): return False # depends on [control=['if'], data=[]] return node_is_subclass(node, 'django.contrib.admin.options.ModelAdmin')
def profileFromPNG(inp): """ Extract profile from PNG file. Return (*profile*, *name*) pair. """ r = png.Reader(file=inp) _, chunk = r.chunk('iCCP') i = chunk.index(b'\x00') name = chunk[: i] compression = chunk[i + 1] assert compression == 0 profile = zlib.decompress(chunk[i + 2:]) return profile, name
def function[profileFromPNG, parameter[inp]]: constant[ Extract profile from PNG file. Return (*profile*, *name*) pair. ] variable[r] assign[=] call[name[png].Reader, parameter[]] <ast.Tuple object at 0x7da1b050bcd0> assign[=] call[name[r].chunk, parameter[constant[iCCP]]] variable[i] assign[=] call[name[chunk].index, parameter[constant[b'\x00']]] variable[name] assign[=] call[name[chunk]][<ast.Slice object at 0x7da1b0508880>] variable[compression] assign[=] call[name[chunk]][binary_operation[name[i] + constant[1]]] assert[compare[name[compression] equal[==] constant[0]]] variable[profile] assign[=] call[name[zlib].decompress, parameter[call[name[chunk]][<ast.Slice object at 0x7da1b050b820>]]] return[tuple[[<ast.Name object at 0x7da1b0508f10>, <ast.Name object at 0x7da1b0508c40>]]]
keyword[def] identifier[profileFromPNG] ( identifier[inp] ): literal[string] identifier[r] = identifier[png] . identifier[Reader] ( identifier[file] = identifier[inp] ) identifier[_] , identifier[chunk] = identifier[r] . identifier[chunk] ( literal[string] ) identifier[i] = identifier[chunk] . identifier[index] ( literal[string] ) identifier[name] = identifier[chunk] [: identifier[i] ] identifier[compression] = identifier[chunk] [ identifier[i] + literal[int] ] keyword[assert] identifier[compression] == literal[int] identifier[profile] = identifier[zlib] . identifier[decompress] ( identifier[chunk] [ identifier[i] + literal[int] :]) keyword[return] identifier[profile] , identifier[name]
def profileFromPNG(inp): """ Extract profile from PNG file. Return (*profile*, *name*) pair. """ r = png.Reader(file=inp) (_, chunk) = r.chunk('iCCP') i = chunk.index(b'\x00') name = chunk[:i] compression = chunk[i + 1] assert compression == 0 profile = zlib.decompress(chunk[i + 2:]) return (profile, name)
def glitter_startbody(context): """ Template tag which renders the glitter overlay and sidebar. This is only shown to users with permission to edit the page. """ user = context.get('user') path_body = 'glitter/include/startbody.html' path_plus = 'glitter/include/startbody_%s_%s.html' rendered = '' if user is not None and user.is_staff: templates = [path_body] # We've got a page with a glitter object: # - May need a different startbody template # - Check if user has permission to add glitter = context.get('glitter') if glitter is not None: opts = glitter.obj._meta.app_label, glitter.obj._meta.model_name template_path = path_plus % opts templates.insert(0, template_path) template = context.template.engine.select_template(templates) rendered = template.render(context) return rendered
def function[glitter_startbody, parameter[context]]: constant[ Template tag which renders the glitter overlay and sidebar. This is only shown to users with permission to edit the page. ] variable[user] assign[=] call[name[context].get, parameter[constant[user]]] variable[path_body] assign[=] constant[glitter/include/startbody.html] variable[path_plus] assign[=] constant[glitter/include/startbody_%s_%s.html] variable[rendered] assign[=] constant[] if <ast.BoolOp object at 0x7da1b1121990> begin[:] variable[templates] assign[=] list[[<ast.Name object at 0x7da1b1120460>]] variable[glitter] assign[=] call[name[context].get, parameter[constant[glitter]]] if compare[name[glitter] is_not constant[None]] begin[:] variable[opts] assign[=] tuple[[<ast.Attribute object at 0x7da1b1120f10>, <ast.Attribute object at 0x7da1b1121b10>]] variable[template_path] assign[=] binary_operation[name[path_plus] <ast.Mod object at 0x7da2590d6920> name[opts]] call[name[templates].insert, parameter[constant[0], name[template_path]]] variable[template] assign[=] call[name[context].template.engine.select_template, parameter[name[templates]]] variable[rendered] assign[=] call[name[template].render, parameter[name[context]]] return[name[rendered]]
keyword[def] identifier[glitter_startbody] ( identifier[context] ): literal[string] identifier[user] = identifier[context] . identifier[get] ( literal[string] ) identifier[path_body] = literal[string] identifier[path_plus] = literal[string] identifier[rendered] = literal[string] keyword[if] identifier[user] keyword[is] keyword[not] keyword[None] keyword[and] identifier[user] . identifier[is_staff] : identifier[templates] =[ identifier[path_body] ] identifier[glitter] = identifier[context] . identifier[get] ( literal[string] ) keyword[if] identifier[glitter] keyword[is] keyword[not] keyword[None] : identifier[opts] = identifier[glitter] . identifier[obj] . identifier[_meta] . identifier[app_label] , identifier[glitter] . identifier[obj] . identifier[_meta] . identifier[model_name] identifier[template_path] = identifier[path_plus] % identifier[opts] identifier[templates] . identifier[insert] ( literal[int] , identifier[template_path] ) identifier[template] = identifier[context] . identifier[template] . identifier[engine] . identifier[select_template] ( identifier[templates] ) identifier[rendered] = identifier[template] . identifier[render] ( identifier[context] ) keyword[return] identifier[rendered]
def glitter_startbody(context): """ Template tag which renders the glitter overlay and sidebar. This is only shown to users with permission to edit the page. """ user = context.get('user') path_body = 'glitter/include/startbody.html' path_plus = 'glitter/include/startbody_%s_%s.html' rendered = '' if user is not None and user.is_staff: templates = [path_body] # We've got a page with a glitter object: # - May need a different startbody template # - Check if user has permission to add glitter = context.get('glitter') if glitter is not None: opts = (glitter.obj._meta.app_label, glitter.obj._meta.model_name) template_path = path_plus % opts templates.insert(0, template_path) # depends on [control=['if'], data=['glitter']] template = context.template.engine.select_template(templates) rendered = template.render(context) # depends on [control=['if'], data=[]] return rendered
def convert_tokens_to_ids(self, tokens): """Converts a sequence of tokens into ids using the vocab.""" ids = [] for token in tokens: ids.append(self.vocab[token]) if len(ids) > self.max_len: logger.warning( "Token indices sequence length is longer than the specified maximum " " sequence length for this BERT model ({} > {}). Running this" " sequence through BERT will result in indexing errors".format(len(ids), self.max_len) ) return ids
def function[convert_tokens_to_ids, parameter[self, tokens]]: constant[Converts a sequence of tokens into ids using the vocab.] variable[ids] assign[=] list[[]] for taget[name[token]] in starred[name[tokens]] begin[:] call[name[ids].append, parameter[call[name[self].vocab][name[token]]]] if compare[call[name[len], parameter[name[ids]]] greater[>] name[self].max_len] begin[:] call[name[logger].warning, parameter[call[constant[Token indices sequence length is longer than the specified maximum sequence length for this BERT model ({} > {}). Running this sequence through BERT will result in indexing errors].format, parameter[call[name[len], parameter[name[ids]]], name[self].max_len]]]] return[name[ids]]
keyword[def] identifier[convert_tokens_to_ids] ( identifier[self] , identifier[tokens] ): literal[string] identifier[ids] =[] keyword[for] identifier[token] keyword[in] identifier[tokens] : identifier[ids] . identifier[append] ( identifier[self] . identifier[vocab] [ identifier[token] ]) keyword[if] identifier[len] ( identifier[ids] )> identifier[self] . identifier[max_len] : identifier[logger] . identifier[warning] ( literal[string] literal[string] literal[string] . identifier[format] ( identifier[len] ( identifier[ids] ), identifier[self] . identifier[max_len] ) ) keyword[return] identifier[ids]
def convert_tokens_to_ids(self, tokens): """Converts a sequence of tokens into ids using the vocab.""" ids = [] for token in tokens: ids.append(self.vocab[token]) # depends on [control=['for'], data=['token']] if len(ids) > self.max_len: logger.warning('Token indices sequence length is longer than the specified maximum sequence length for this BERT model ({} > {}). Running this sequence through BERT will result in indexing errors'.format(len(ids), self.max_len)) # depends on [control=['if'], data=[]] return ids