code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def region_est_ellipsoid(self, level=0.95, tol=0.0001, modelparam_slice=None): r""" Estimates a credible region over models by finding the minimum volume enclosing ellipse (MVEE) of a credible subset of particles. :param float level: The desired crediblity level (see :meth:`SMCUpdater.est_credible_region`). :param float tol: The allowed error tolerance in the MVEE optimization (see :meth:`~qinfer.utils.mvee`). :param slice modelparam_slice: Slice over which model parameters to consider. :return: A tuple ``(A, c)`` where ``A`` is the covariance matrix of the ellipsoid and ``c`` is the center. A point :math:`\vec{x}` is in the ellipsoid whenever :math:`(\vec{x}-\vec{c})^{T}A^{-1}(\vec{x}-\vec{c})\leq 1`. :rtype: ``A`` is ``np.ndarray`` of shape ``(n_mps,n_mps)`` and ``centroid`` is ``np.ndarray`` of shape ``(n_mps)``. ``n_mps`` corresponds to the size of ``param_slice``. """ _, vertices = self.region_est_hull(level=level, modelparam_slice=modelparam_slice) A, centroid = u.mvee(vertices, tol) return A, centroid
def function[region_est_ellipsoid, parameter[self, level, tol, modelparam_slice]]: constant[ Estimates a credible region over models by finding the minimum volume enclosing ellipse (MVEE) of a credible subset of particles. :param float level: The desired crediblity level (see :meth:`SMCUpdater.est_credible_region`). :param float tol: The allowed error tolerance in the MVEE optimization (see :meth:`~qinfer.utils.mvee`). :param slice modelparam_slice: Slice over which model parameters to consider. :return: A tuple ``(A, c)`` where ``A`` is the covariance matrix of the ellipsoid and ``c`` is the center. A point :math:`\vec{x}` is in the ellipsoid whenever :math:`(\vec{x}-\vec{c})^{T}A^{-1}(\vec{x}-\vec{c})\leq 1`. :rtype: ``A`` is ``np.ndarray`` of shape ``(n_mps,n_mps)`` and ``centroid`` is ``np.ndarray`` of shape ``(n_mps)``. ``n_mps`` corresponds to the size of ``param_slice``. ] <ast.Tuple object at 0x7da20e955f90> assign[=] call[name[self].region_est_hull, parameter[]] <ast.Tuple object at 0x7da20e955e40> assign[=] call[name[u].mvee, parameter[name[vertices], name[tol]]] return[tuple[[<ast.Name object at 0x7da20c6c7700>, <ast.Name object at 0x7da20c6c5ed0>]]]
keyword[def] identifier[region_est_ellipsoid] ( identifier[self] , identifier[level] = literal[int] , identifier[tol] = literal[int] , identifier[modelparam_slice] = keyword[None] ): literal[string] identifier[_] , identifier[vertices] = identifier[self] . identifier[region_est_hull] ( identifier[level] = identifier[level] , identifier[modelparam_slice] = identifier[modelparam_slice] ) identifier[A] , identifier[centroid] = identifier[u] . identifier[mvee] ( identifier[vertices] , identifier[tol] ) keyword[return] identifier[A] , identifier[centroid]
def region_est_ellipsoid(self, level=0.95, tol=0.0001, modelparam_slice=None): """ Estimates a credible region over models by finding the minimum volume enclosing ellipse (MVEE) of a credible subset of particles. :param float level: The desired crediblity level (see :meth:`SMCUpdater.est_credible_region`). :param float tol: The allowed error tolerance in the MVEE optimization (see :meth:`~qinfer.utils.mvee`). :param slice modelparam_slice: Slice over which model parameters to consider. :return: A tuple ``(A, c)`` where ``A`` is the covariance matrix of the ellipsoid and ``c`` is the center. A point :math:`\\vec{x}` is in the ellipsoid whenever :math:`(\\vec{x}-\\vec{c})^{T}A^{-1}(\\vec{x}-\\vec{c})\\leq 1`. :rtype: ``A`` is ``np.ndarray`` of shape ``(n_mps,n_mps)`` and ``centroid`` is ``np.ndarray`` of shape ``(n_mps)``. ``n_mps`` corresponds to the size of ``param_slice``. """ (_, vertices) = self.region_est_hull(level=level, modelparam_slice=modelparam_slice) (A, centroid) = u.mvee(vertices, tol) return (A, centroid)
def add_wf(self,wf_obj): """ Adds a token object to the text layer @type wf_obj: L{Cwf} @param wf_obj: token object """ if wf_obj.get_id() in self.idx: raise ValueError("Text node (wf) with id {} already exists!" .format(wf_obj.get_id())) self.node.append(wf_obj.get_node()) self.idx[wf_obj.get_id()] = wf_obj
def function[add_wf, parameter[self, wf_obj]]: constant[ Adds a token object to the text layer @type wf_obj: L{Cwf} @param wf_obj: token object ] if compare[call[name[wf_obj].get_id, parameter[]] in name[self].idx] begin[:] <ast.Raise object at 0x7da1b2585bd0> call[name[self].node.append, parameter[call[name[wf_obj].get_node, parameter[]]]] call[name[self].idx][call[name[wf_obj].get_id, parameter[]]] assign[=] name[wf_obj]
keyword[def] identifier[add_wf] ( identifier[self] , identifier[wf_obj] ): literal[string] keyword[if] identifier[wf_obj] . identifier[get_id] () keyword[in] identifier[self] . identifier[idx] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[wf_obj] . identifier[get_id] ())) identifier[self] . identifier[node] . identifier[append] ( identifier[wf_obj] . identifier[get_node] ()) identifier[self] . identifier[idx] [ identifier[wf_obj] . identifier[get_id] ()]= identifier[wf_obj]
def add_wf(self, wf_obj): """ Adds a token object to the text layer @type wf_obj: L{Cwf} @param wf_obj: token object """ if wf_obj.get_id() in self.idx: raise ValueError('Text node (wf) with id {} already exists!'.format(wf_obj.get_id())) # depends on [control=['if'], data=[]] self.node.append(wf_obj.get_node()) self.idx[wf_obj.get_id()] = wf_obj
def get_version_text(self): """Return the version information from Unix host.""" version_text = None try: version_text = self.device.send('uname -sr', timeout=10) except CommandError as exc: exc.command = 'show version' raise exc return version_text
def function[get_version_text, parameter[self]]: constant[Return the version information from Unix host.] variable[version_text] assign[=] constant[None] <ast.Try object at 0x7da1b25ef220> return[name[version_text]]
keyword[def] identifier[get_version_text] ( identifier[self] ): literal[string] identifier[version_text] = keyword[None] keyword[try] : identifier[version_text] = identifier[self] . identifier[device] . identifier[send] ( literal[string] , identifier[timeout] = literal[int] ) keyword[except] identifier[CommandError] keyword[as] identifier[exc] : identifier[exc] . identifier[command] = literal[string] keyword[raise] identifier[exc] keyword[return] identifier[version_text]
def get_version_text(self): """Return the version information from Unix host.""" version_text = None try: version_text = self.device.send('uname -sr', timeout=10) # depends on [control=['try'], data=[]] except CommandError as exc: exc.command = 'show version' raise exc # depends on [control=['except'], data=['exc']] return version_text
def get_last_modified_date(filename): """ Get the last modified date of a given file :param filename: string: pathname of a file :return: Date """ if os.path.isfile(filename): t = os.path.getmtime(filename) return datetime.date.fromtimestamp(t).strftime('%d/%m/%Y') return None
def function[get_last_modified_date, parameter[filename]]: constant[ Get the last modified date of a given file :param filename: string: pathname of a file :return: Date ] if call[name[os].path.isfile, parameter[name[filename]]] begin[:] variable[t] assign[=] call[name[os].path.getmtime, parameter[name[filename]]] return[call[call[name[datetime].date.fromtimestamp, parameter[name[t]]].strftime, parameter[constant[%d/%m/%Y]]]] return[constant[None]]
keyword[def] identifier[get_last_modified_date] ( identifier[filename] ): literal[string] keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[filename] ): identifier[t] = identifier[os] . identifier[path] . identifier[getmtime] ( identifier[filename] ) keyword[return] identifier[datetime] . identifier[date] . identifier[fromtimestamp] ( identifier[t] ). identifier[strftime] ( literal[string] ) keyword[return] keyword[None]
def get_last_modified_date(filename): """ Get the last modified date of a given file :param filename: string: pathname of a file :return: Date """ if os.path.isfile(filename): t = os.path.getmtime(filename) return datetime.date.fromtimestamp(t).strftime('%d/%m/%Y') # depends on [control=['if'], data=[]] return None
def plot_samples(self, plot_limits=None, fixed_inputs=None, resolution=None, plot_raw=True, apply_link=False, visible_dims=None, which_data_ycols='all', samples=3, projection='2d', label='gp_samples', predict_kw=None, **kwargs): """ Plot the mean of the GP. You can deactivate the legend for this one plot by supplying None to label. Give the Y_metadata in the predict_kw if you need it. :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits :type plot_limits: np.array :param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input dimension i should be set to value v. :type fixed_inputs: a list of tuples :param int resolution: The resolution of the prediction [defaults are 1D:200, 2D:50] :param bool plot_raw: plot the latent function (usually denoted f) only? This is usually what you want! :param bool apply_link: whether to apply the link function of the GP to the raw prediction. :param array-like visible_dims: which columns of the input X (!) to plot (array-like or list of ints) :param array-like which_data_ycols: which columns of y to plot (array-like or list of ints) :param dict predict_kw: the keyword arguments for the prediction. If you want to plot a specific kernel give dict(kern=<specific kernel>) in here :param int levels: for 2D plotting, the number of contour levels to use is """ canvas, kwargs = pl().new_canvas(projection=projection, **kwargs) ycols = get_which_data_ycols(self, which_data_ycols) X = get_x_y_var(self)[0] helper_data = helper_for_plot_data(self, X, plot_limits, visible_dims, fixed_inputs, resolution) helper_prediction = helper_predict_with_model(self, helper_data[2], plot_raw, apply_link, None, ycols, predict_kw, samples) plots = _plot_samples(self, canvas, helper_data, helper_prediction, projection, label, **kwargs) return pl().add_to_canvas(canvas, plots)
def function[plot_samples, parameter[self, plot_limits, fixed_inputs, resolution, plot_raw, apply_link, visible_dims, which_data_ycols, samples, projection, label, predict_kw]]: constant[ Plot the mean of the GP. You can deactivate the legend for this one plot by supplying None to label. Give the Y_metadata in the predict_kw if you need it. :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits :type plot_limits: np.array :param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input dimension i should be set to value v. :type fixed_inputs: a list of tuples :param int resolution: The resolution of the prediction [defaults are 1D:200, 2D:50] :param bool plot_raw: plot the latent function (usually denoted f) only? This is usually what you want! :param bool apply_link: whether to apply the link function of the GP to the raw prediction. :param array-like visible_dims: which columns of the input X (!) to plot (array-like or list of ints) :param array-like which_data_ycols: which columns of y to plot (array-like or list of ints) :param dict predict_kw: the keyword arguments for the prediction. If you want to plot a specific kernel give dict(kern=<specific kernel>) in here :param int levels: for 2D plotting, the number of contour levels to use is ] <ast.Tuple object at 0x7da1b26aead0> assign[=] call[call[name[pl], parameter[]].new_canvas, parameter[]] variable[ycols] assign[=] call[name[get_which_data_ycols], parameter[name[self], name[which_data_ycols]]] variable[X] assign[=] call[call[name[get_x_y_var], parameter[name[self]]]][constant[0]] variable[helper_data] assign[=] call[name[helper_for_plot_data], parameter[name[self], name[X], name[plot_limits], name[visible_dims], name[fixed_inputs], name[resolution]]] variable[helper_prediction] assign[=] call[name[helper_predict_with_model], parameter[name[self], call[name[helper_data]][constant[2]], name[plot_raw], name[apply_link], constant[None], name[ycols], name[predict_kw], name[samples]]] variable[plots] assign[=] call[name[_plot_samples], parameter[name[self], name[canvas], name[helper_data], name[helper_prediction], name[projection], name[label]]] return[call[call[name[pl], parameter[]].add_to_canvas, parameter[name[canvas], name[plots]]]]
keyword[def] identifier[plot_samples] ( identifier[self] , identifier[plot_limits] = keyword[None] , identifier[fixed_inputs] = keyword[None] , identifier[resolution] = keyword[None] , identifier[plot_raw] = keyword[True] , identifier[apply_link] = keyword[False] , identifier[visible_dims] = keyword[None] , identifier[which_data_ycols] = literal[string] , identifier[samples] = literal[int] , identifier[projection] = literal[string] , identifier[label] = literal[string] , identifier[predict_kw] = keyword[None] , ** identifier[kwargs] ): literal[string] identifier[canvas] , identifier[kwargs] = identifier[pl] (). identifier[new_canvas] ( identifier[projection] = identifier[projection] ,** identifier[kwargs] ) identifier[ycols] = identifier[get_which_data_ycols] ( identifier[self] , identifier[which_data_ycols] ) identifier[X] = identifier[get_x_y_var] ( identifier[self] )[ literal[int] ] identifier[helper_data] = identifier[helper_for_plot_data] ( identifier[self] , identifier[X] , identifier[plot_limits] , identifier[visible_dims] , identifier[fixed_inputs] , identifier[resolution] ) identifier[helper_prediction] = identifier[helper_predict_with_model] ( identifier[self] , identifier[helper_data] [ literal[int] ], identifier[plot_raw] , identifier[apply_link] , keyword[None] , identifier[ycols] , identifier[predict_kw] , identifier[samples] ) identifier[plots] = identifier[_plot_samples] ( identifier[self] , identifier[canvas] , identifier[helper_data] , identifier[helper_prediction] , identifier[projection] , identifier[label] ,** identifier[kwargs] ) keyword[return] identifier[pl] (). identifier[add_to_canvas] ( identifier[canvas] , identifier[plots] )
def plot_samples(self, plot_limits=None, fixed_inputs=None, resolution=None, plot_raw=True, apply_link=False, visible_dims=None, which_data_ycols='all', samples=3, projection='2d', label='gp_samples', predict_kw=None, **kwargs): """ Plot the mean of the GP. You can deactivate the legend for this one plot by supplying None to label. Give the Y_metadata in the predict_kw if you need it. :param plot_limits: The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits :type plot_limits: np.array :param fixed_inputs: a list of tuple [(i,v), (i,v)...], specifying that input dimension i should be set to value v. :type fixed_inputs: a list of tuples :param int resolution: The resolution of the prediction [defaults are 1D:200, 2D:50] :param bool plot_raw: plot the latent function (usually denoted f) only? This is usually what you want! :param bool apply_link: whether to apply the link function of the GP to the raw prediction. :param array-like visible_dims: which columns of the input X (!) to plot (array-like or list of ints) :param array-like which_data_ycols: which columns of y to plot (array-like or list of ints) :param dict predict_kw: the keyword arguments for the prediction. If you want to plot a specific kernel give dict(kern=<specific kernel>) in here :param int levels: for 2D plotting, the number of contour levels to use is """ (canvas, kwargs) = pl().new_canvas(projection=projection, **kwargs) ycols = get_which_data_ycols(self, which_data_ycols) X = get_x_y_var(self)[0] helper_data = helper_for_plot_data(self, X, plot_limits, visible_dims, fixed_inputs, resolution) helper_prediction = helper_predict_with_model(self, helper_data[2], plot_raw, apply_link, None, ycols, predict_kw, samples) plots = _plot_samples(self, canvas, helper_data, helper_prediction, projection, label, **kwargs) return pl().add_to_canvas(canvas, plots)
def search_function(cls, encoding): """Search function to find 'rotunicode' codec.""" if encoding == cls._codec_name: return codecs.CodecInfo( name=cls._codec_name, encode=cls.encode, decode=cls.decode, ) return None
def function[search_function, parameter[cls, encoding]]: constant[Search function to find 'rotunicode' codec.] if compare[name[encoding] equal[==] name[cls]._codec_name] begin[:] return[call[name[codecs].CodecInfo, parameter[]]] return[constant[None]]
keyword[def] identifier[search_function] ( identifier[cls] , identifier[encoding] ): literal[string] keyword[if] identifier[encoding] == identifier[cls] . identifier[_codec_name] : keyword[return] identifier[codecs] . identifier[CodecInfo] ( identifier[name] = identifier[cls] . identifier[_codec_name] , identifier[encode] = identifier[cls] . identifier[encode] , identifier[decode] = identifier[cls] . identifier[decode] , ) keyword[return] keyword[None]
def search_function(cls, encoding): """Search function to find 'rotunicode' codec.""" if encoding == cls._codec_name: return codecs.CodecInfo(name=cls._codec_name, encode=cls.encode, decode=cls.decode) # depends on [control=['if'], data=[]] return None
def download_url(url, root, filename=None, md5=None): """Download a file from a url and place it in root. Args: url (str): URL to download file from root (str): Directory to place downloaded file in filename (str, optional): Name to save the file under. If None, use the basename of the URL md5 (str, optional): MD5 checksum of the download. If None, do not check """ from six.moves import urllib root = os.path.expanduser(root) if not filename: filename = os.path.basename(url) fpath = os.path.join(root, filename) makedir_exist_ok(root) # downloads file if os.path.isfile(fpath) and check_integrity(fpath, md5): print('Using downloaded and verified file: ' + fpath) else: try: print('Downloading ' + url + ' to ' + fpath) urllib.request.urlretrieve( url, fpath, reporthook=gen_bar_updater() ) except OSError: if url[:5] == 'https': url = url.replace('https:', 'http:') print('Failed download. Trying https -> http instead.' ' Downloading ' + url + ' to ' + fpath) urllib.request.urlretrieve( url, fpath, reporthook=gen_bar_updater() )
def function[download_url, parameter[url, root, filename, md5]]: constant[Download a file from a url and place it in root. Args: url (str): URL to download file from root (str): Directory to place downloaded file in filename (str, optional): Name to save the file under. If None, use the basename of the URL md5 (str, optional): MD5 checksum of the download. If None, do not check ] from relative_module[six.moves] import module[urllib] variable[root] assign[=] call[name[os].path.expanduser, parameter[name[root]]] if <ast.UnaryOp object at 0x7da1b03f93f0> begin[:] variable[filename] assign[=] call[name[os].path.basename, parameter[name[url]]] variable[fpath] assign[=] call[name[os].path.join, parameter[name[root], name[filename]]] call[name[makedir_exist_ok], parameter[name[root]]] if <ast.BoolOp object at 0x7da1b03f8130> begin[:] call[name[print], parameter[binary_operation[constant[Using downloaded and verified file: ] + name[fpath]]]]
keyword[def] identifier[download_url] ( identifier[url] , identifier[root] , identifier[filename] = keyword[None] , identifier[md5] = keyword[None] ): literal[string] keyword[from] identifier[six] . identifier[moves] keyword[import] identifier[urllib] identifier[root] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[root] ) keyword[if] keyword[not] identifier[filename] : identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[url] ) identifier[fpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[filename] ) identifier[makedir_exist_ok] ( identifier[root] ) keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[fpath] ) keyword[and] identifier[check_integrity] ( identifier[fpath] , identifier[md5] ): identifier[print] ( literal[string] + identifier[fpath] ) keyword[else] : keyword[try] : identifier[print] ( literal[string] + identifier[url] + literal[string] + identifier[fpath] ) identifier[urllib] . identifier[request] . identifier[urlretrieve] ( identifier[url] , identifier[fpath] , identifier[reporthook] = identifier[gen_bar_updater] () ) keyword[except] identifier[OSError] : keyword[if] identifier[url] [: literal[int] ]== literal[string] : identifier[url] = identifier[url] . identifier[replace] ( literal[string] , literal[string] ) identifier[print] ( literal[string] literal[string] + identifier[url] + literal[string] + identifier[fpath] ) identifier[urllib] . identifier[request] . identifier[urlretrieve] ( identifier[url] , identifier[fpath] , identifier[reporthook] = identifier[gen_bar_updater] () )
def download_url(url, root, filename=None, md5=None): """Download a file from a url and place it in root. Args: url (str): URL to download file from root (str): Directory to place downloaded file in filename (str, optional): Name to save the file under. If None, use the basename of the URL md5 (str, optional): MD5 checksum of the download. If None, do not check """ from six.moves import urllib root = os.path.expanduser(root) if not filename: filename = os.path.basename(url) # depends on [control=['if'], data=[]] fpath = os.path.join(root, filename) makedir_exist_ok(root) # downloads file if os.path.isfile(fpath) and check_integrity(fpath, md5): print('Using downloaded and verified file: ' + fpath) # depends on [control=['if'], data=[]] else: try: print('Downloading ' + url + ' to ' + fpath) urllib.request.urlretrieve(url, fpath, reporthook=gen_bar_updater()) # depends on [control=['try'], data=[]] except OSError: if url[:5] == 'https': url = url.replace('https:', 'http:') print('Failed download. Trying https -> http instead. Downloading ' + url + ' to ' + fpath) urllib.request.urlretrieve(url, fpath, reporthook=gen_bar_updater()) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]]
def textForSaving(self): """Get text with correct EOL symbols. Use this method for saving a file to storage """ lines = self.text.splitlines() if self.text.endswith('\n'): # splitlines ignores last \n lines.append('') return self.eol.join(lines) + self.eol
def function[textForSaving, parameter[self]]: constant[Get text with correct EOL symbols. Use this method for saving a file to storage ] variable[lines] assign[=] call[name[self].text.splitlines, parameter[]] if call[name[self].text.endswith, parameter[constant[ ]]] begin[:] call[name[lines].append, parameter[constant[]]] return[binary_operation[call[name[self].eol.join, parameter[name[lines]]] + name[self].eol]]
keyword[def] identifier[textForSaving] ( identifier[self] ): literal[string] identifier[lines] = identifier[self] . identifier[text] . identifier[splitlines] () keyword[if] identifier[self] . identifier[text] . identifier[endswith] ( literal[string] ): identifier[lines] . identifier[append] ( literal[string] ) keyword[return] identifier[self] . identifier[eol] . identifier[join] ( identifier[lines] )+ identifier[self] . identifier[eol]
def textForSaving(self): """Get text with correct EOL symbols. Use this method for saving a file to storage """ lines = self.text.splitlines() if self.text.endswith('\n'): # splitlines ignores last \n lines.append('') # depends on [control=['if'], data=[]] return self.eol.join(lines) + self.eol
def exec_command( client, container, command, interactive=True, stdout=None, stderr=None, stdin=None): """ Run provided command via exec API in provided container. This is just a wrapper for PseudoTerminal(client, container).exec_command() """ exec_id = exec_create(client, container, command, interactive=interactive) operation = ExecOperation(client, exec_id, interactive=interactive, stdout=stdout, stderr=stderr, stdin=stdin) PseudoTerminal(client, operation).start()
def function[exec_command, parameter[client, container, command, interactive, stdout, stderr, stdin]]: constant[ Run provided command via exec API in provided container. This is just a wrapper for PseudoTerminal(client, container).exec_command() ] variable[exec_id] assign[=] call[name[exec_create], parameter[name[client], name[container], name[command]]] variable[operation] assign[=] call[name[ExecOperation], parameter[name[client], name[exec_id]]] call[call[name[PseudoTerminal], parameter[name[client], name[operation]]].start, parameter[]]
keyword[def] identifier[exec_command] ( identifier[client] , identifier[container] , identifier[command] , identifier[interactive] = keyword[True] , identifier[stdout] = keyword[None] , identifier[stderr] = keyword[None] , identifier[stdin] = keyword[None] ): literal[string] identifier[exec_id] = identifier[exec_create] ( identifier[client] , identifier[container] , identifier[command] , identifier[interactive] = identifier[interactive] ) identifier[operation] = identifier[ExecOperation] ( identifier[client] , identifier[exec_id] , identifier[interactive] = identifier[interactive] , identifier[stdout] = identifier[stdout] , identifier[stderr] = identifier[stderr] , identifier[stdin] = identifier[stdin] ) identifier[PseudoTerminal] ( identifier[client] , identifier[operation] ). identifier[start] ()
def exec_command(client, container, command, interactive=True, stdout=None, stderr=None, stdin=None): """ Run provided command via exec API in provided container. This is just a wrapper for PseudoTerminal(client, container).exec_command() """ exec_id = exec_create(client, container, command, interactive=interactive) operation = ExecOperation(client, exec_id, interactive=interactive, stdout=stdout, stderr=stderr, stdin=stdin) PseudoTerminal(client, operation).start()
def get_long_description(): """Convert the README file into the long description. """ with open(path.join(root_path, 'README.md'), encoding='utf-8') as f: long_description = f.read() return long_description
def function[get_long_description, parameter[]]: constant[Convert the README file into the long description. ] with call[name[open], parameter[call[name[path].join, parameter[name[root_path], constant[README.md]]]]] begin[:] variable[long_description] assign[=] call[name[f].read, parameter[]] return[name[long_description]]
keyword[def] identifier[get_long_description] (): literal[string] keyword[with] identifier[open] ( identifier[path] . identifier[join] ( identifier[root_path] , literal[string] ), identifier[encoding] = literal[string] ) keyword[as] identifier[f] : identifier[long_description] = identifier[f] . identifier[read] () keyword[return] identifier[long_description]
def get_long_description(): """Convert the README file into the long description. """ with open(path.join(root_path, 'README.md'), encoding='utf-8') as f: long_description = f.read() # depends on [control=['with'], data=['f']] return long_description
def page(self, status=values.unset, phone_number=values.unset, incoming_phone_number_sid=values.unset, friendly_name=values.unset, unique_name=values.unset, page_token=values.unset, page_number=values.unset, page_size=values.unset): """ Retrieve a single page of HostedNumberOrderInstance records from the API. Request is executed immediately :param HostedNumberOrderInstance.Status status: The Status of this HostedNumberOrder. :param unicode phone_number: An E164 formatted phone number. :param unicode incoming_phone_number_sid: IncomingPhoneNumber sid. :param unicode friendly_name: A human readable description of this resource. :param unicode unique_name: A unique, developer assigned name of this HostedNumberOrder. :param str page_token: PageToken provided by the API :param int page_number: Page Number, this value is simply for client state :param int page_size: Number of records to return, defaults to 50 :returns: Page of HostedNumberOrderInstance :rtype: twilio.rest.preview.hosted_numbers.hosted_number_order.HostedNumberOrderPage """ params = values.of({ 'Status': status, 'PhoneNumber': phone_number, 'IncomingPhoneNumberSid': incoming_phone_number_sid, 'FriendlyName': friendly_name, 'UniqueName': unique_name, 'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, }) response = self._version.page( 'GET', self._uri, params=params, ) return HostedNumberOrderPage(self._version, response, self._solution)
def function[page, parameter[self, status, phone_number, incoming_phone_number_sid, friendly_name, unique_name, page_token, page_number, page_size]]: constant[ Retrieve a single page of HostedNumberOrderInstance records from the API. Request is executed immediately :param HostedNumberOrderInstance.Status status: The Status of this HostedNumberOrder. :param unicode phone_number: An E164 formatted phone number. :param unicode incoming_phone_number_sid: IncomingPhoneNumber sid. :param unicode friendly_name: A human readable description of this resource. :param unicode unique_name: A unique, developer assigned name of this HostedNumberOrder. :param str page_token: PageToken provided by the API :param int page_number: Page Number, this value is simply for client state :param int page_size: Number of records to return, defaults to 50 :returns: Page of HostedNumberOrderInstance :rtype: twilio.rest.preview.hosted_numbers.hosted_number_order.HostedNumberOrderPage ] variable[params] assign[=] call[name[values].of, parameter[dictionary[[<ast.Constant object at 0x7da20e74b790>, <ast.Constant object at 0x7da20e7486d0>, <ast.Constant object at 0x7da20e74bb80>, <ast.Constant object at 0x7da20e74bf40>, <ast.Constant object at 0x7da20e74b1c0>, <ast.Constant object at 0x7da20e74ad40>, <ast.Constant object at 0x7da20e74b070>, <ast.Constant object at 0x7da20e74b370>], [<ast.Name object at 0x7da20e74b700>, <ast.Name object at 0x7da20e7489a0>, <ast.Name object at 0x7da18dc043a0>, <ast.Name object at 0x7da18dc055d0>, <ast.Name object at 0x7da18dc070a0>, <ast.Name object at 0x7da18dc05330>, <ast.Name object at 0x7da18dc07820>, <ast.Name object at 0x7da18dc07460>]]]] variable[response] assign[=] call[name[self]._version.page, parameter[constant[GET], name[self]._uri]] return[call[name[HostedNumberOrderPage], parameter[name[self]._version, name[response], name[self]._solution]]]
keyword[def] identifier[page] ( identifier[self] , identifier[status] = identifier[values] . identifier[unset] , identifier[phone_number] = identifier[values] . identifier[unset] , identifier[incoming_phone_number_sid] = identifier[values] . identifier[unset] , identifier[friendly_name] = identifier[values] . identifier[unset] , identifier[unique_name] = identifier[values] . identifier[unset] , identifier[page_token] = identifier[values] . identifier[unset] , identifier[page_number] = identifier[values] . identifier[unset] , identifier[page_size] = identifier[values] . identifier[unset] ): literal[string] identifier[params] = identifier[values] . identifier[of] ({ literal[string] : identifier[status] , literal[string] : identifier[phone_number] , literal[string] : identifier[incoming_phone_number_sid] , literal[string] : identifier[friendly_name] , literal[string] : identifier[unique_name] , literal[string] : identifier[page_token] , literal[string] : identifier[page_number] , literal[string] : identifier[page_size] , }) identifier[response] = identifier[self] . identifier[_version] . identifier[page] ( literal[string] , identifier[self] . identifier[_uri] , identifier[params] = identifier[params] , ) keyword[return] identifier[HostedNumberOrderPage] ( identifier[self] . identifier[_version] , identifier[response] , identifier[self] . identifier[_solution] )
def page(self, status=values.unset, phone_number=values.unset, incoming_phone_number_sid=values.unset, friendly_name=values.unset, unique_name=values.unset, page_token=values.unset, page_number=values.unset, page_size=values.unset): """ Retrieve a single page of HostedNumberOrderInstance records from the API. Request is executed immediately :param HostedNumberOrderInstance.Status status: The Status of this HostedNumberOrder. :param unicode phone_number: An E164 formatted phone number. :param unicode incoming_phone_number_sid: IncomingPhoneNumber sid. :param unicode friendly_name: A human readable description of this resource. :param unicode unique_name: A unique, developer assigned name of this HostedNumberOrder. :param str page_token: PageToken provided by the API :param int page_number: Page Number, this value is simply for client state :param int page_size: Number of records to return, defaults to 50 :returns: Page of HostedNumberOrderInstance :rtype: twilio.rest.preview.hosted_numbers.hosted_number_order.HostedNumberOrderPage """ params = values.of({'Status': status, 'PhoneNumber': phone_number, 'IncomingPhoneNumberSid': incoming_phone_number_sid, 'FriendlyName': friendly_name, 'UniqueName': unique_name, 'PageToken': page_token, 'Page': page_number, 'PageSize': page_size}) response = self._version.page('GET', self._uri, params=params) return HostedNumberOrderPage(self._version, response, self._solution)
def random_tracing(): """ Create new Tracing() tuple with random IDs. """ new_id = _uniq_id() return Tracing( span_id=new_id, parent_id=0, trace_id=new_id, traceflags=0)
def function[random_tracing, parameter[]]: constant[ Create new Tracing() tuple with random IDs. ] variable[new_id] assign[=] call[name[_uniq_id], parameter[]] return[call[name[Tracing], parameter[]]]
keyword[def] identifier[random_tracing] (): literal[string] identifier[new_id] = identifier[_uniq_id] () keyword[return] identifier[Tracing] ( identifier[span_id] = identifier[new_id] , identifier[parent_id] = literal[int] , identifier[trace_id] = identifier[new_id] , identifier[traceflags] = literal[int] )
def random_tracing(): """ Create new Tracing() tuple with random IDs. """ new_id = _uniq_id() return Tracing(span_id=new_id, parent_id=0, trace_id=new_id, traceflags=0)
def run_subprocess(command): """ command is the command to run, as a string. runs a subprocess, returns stdout and stderr from the subprocess as strings. """ x = Popen(command, shell=True, stdout=PIPE, stderr=PIPE) out, err = x.communicate() out = out.decode('utf-8') err = err.decode('utf-8') return out, err
def function[run_subprocess, parameter[command]]: constant[ command is the command to run, as a string. runs a subprocess, returns stdout and stderr from the subprocess as strings. ] variable[x] assign[=] call[name[Popen], parameter[name[command]]] <ast.Tuple object at 0x7da1b1908c10> assign[=] call[name[x].communicate, parameter[]] variable[out] assign[=] call[name[out].decode, parameter[constant[utf-8]]] variable[err] assign[=] call[name[err].decode, parameter[constant[utf-8]]] return[tuple[[<ast.Name object at 0x7da1b1909360>, <ast.Name object at 0x7da1b1909ff0>]]]
keyword[def] identifier[run_subprocess] ( identifier[command] ): literal[string] identifier[x] = identifier[Popen] ( identifier[command] , identifier[shell] = keyword[True] , identifier[stdout] = identifier[PIPE] , identifier[stderr] = identifier[PIPE] ) identifier[out] , identifier[err] = identifier[x] . identifier[communicate] () identifier[out] = identifier[out] . identifier[decode] ( literal[string] ) identifier[err] = identifier[err] . identifier[decode] ( literal[string] ) keyword[return] identifier[out] , identifier[err]
def run_subprocess(command): """ command is the command to run, as a string. runs a subprocess, returns stdout and stderr from the subprocess as strings. """ x = Popen(command, shell=True, stdout=PIPE, stderr=PIPE) (out, err) = x.communicate() out = out.decode('utf-8') err = err.decode('utf-8') return (out, err)
def _compute_weight_std(self, C, mag): """ Common part of equations 8 and 9, page 971. """ if mag < 6.0: return C['a1'] elif mag >= 6.0 and mag < 6.5: return C['a1'] + (C['a2'] - C['a1']) * ((mag - 6.0) / 0.5) else: return C['a2']
def function[_compute_weight_std, parameter[self, C, mag]]: constant[ Common part of equations 8 and 9, page 971. ] if compare[name[mag] less[<] constant[6.0]] begin[:] return[call[name[C]][constant[a1]]]
keyword[def] identifier[_compute_weight_std] ( identifier[self] , identifier[C] , identifier[mag] ): literal[string] keyword[if] identifier[mag] < literal[int] : keyword[return] identifier[C] [ literal[string] ] keyword[elif] identifier[mag] >= literal[int] keyword[and] identifier[mag] < literal[int] : keyword[return] identifier[C] [ literal[string] ]+( identifier[C] [ literal[string] ]- identifier[C] [ literal[string] ])*(( identifier[mag] - literal[int] )/ literal[int] ) keyword[else] : keyword[return] identifier[C] [ literal[string] ]
def _compute_weight_std(self, C, mag): """ Common part of equations 8 and 9, page 971. """ if mag < 6.0: return C['a1'] # depends on [control=['if'], data=[]] elif mag >= 6.0 and mag < 6.5: return C['a1'] + (C['a2'] - C['a1']) * ((mag - 6.0) / 0.5) # depends on [control=['if'], data=[]] else: return C['a2']
def answer_callback_query(self, callback_query_id, text=None, show_alert=None, url=None, cache_time=None): """ Use this method to send answers to callback queries sent from inline keyboards. The answer will be displayed to the user as a notification at the top of the chat screen or as an alert. :param callback_query_id: :param text: :param show_alert: :return: """ return apihelper.answer_callback_query(self.token, callback_query_id, text, show_alert, url, cache_time)
def function[answer_callback_query, parameter[self, callback_query_id, text, show_alert, url, cache_time]]: constant[ Use this method to send answers to callback queries sent from inline keyboards. The answer will be displayed to the user as a notification at the top of the chat screen or as an alert. :param callback_query_id: :param text: :param show_alert: :return: ] return[call[name[apihelper].answer_callback_query, parameter[name[self].token, name[callback_query_id], name[text], name[show_alert], name[url], name[cache_time]]]]
keyword[def] identifier[answer_callback_query] ( identifier[self] , identifier[callback_query_id] , identifier[text] = keyword[None] , identifier[show_alert] = keyword[None] , identifier[url] = keyword[None] , identifier[cache_time] = keyword[None] ): literal[string] keyword[return] identifier[apihelper] . identifier[answer_callback_query] ( identifier[self] . identifier[token] , identifier[callback_query_id] , identifier[text] , identifier[show_alert] , identifier[url] , identifier[cache_time] )
def answer_callback_query(self, callback_query_id, text=None, show_alert=None, url=None, cache_time=None): """ Use this method to send answers to callback queries sent from inline keyboards. The answer will be displayed to the user as a notification at the top of the chat screen or as an alert. :param callback_query_id: :param text: :param show_alert: :return: """ return apihelper.answer_callback_query(self.token, callback_query_id, text, show_alert, url, cache_time)
def concatenate(self, other): """Return a new name which is the concatenation of self and other. @rtype: dns.name.Name object @raises AbsoluteConcatenation: self is absolute and other is not the empty name """ if self.is_absolute() and len(other) > 0: raise AbsoluteConcatenation labels = list(self.labels) labels.extend(list(other.labels)) return Name(labels)
def function[concatenate, parameter[self, other]]: constant[Return a new name which is the concatenation of self and other. @rtype: dns.name.Name object @raises AbsoluteConcatenation: self is absolute and other is not the empty name ] if <ast.BoolOp object at 0x7da18fe921d0> begin[:] <ast.Raise object at 0x7da18fe93100> variable[labels] assign[=] call[name[list], parameter[name[self].labels]] call[name[labels].extend, parameter[call[name[list], parameter[name[other].labels]]]] return[call[name[Name], parameter[name[labels]]]]
keyword[def] identifier[concatenate] ( identifier[self] , identifier[other] ): literal[string] keyword[if] identifier[self] . identifier[is_absolute] () keyword[and] identifier[len] ( identifier[other] )> literal[int] : keyword[raise] identifier[AbsoluteConcatenation] identifier[labels] = identifier[list] ( identifier[self] . identifier[labels] ) identifier[labels] . identifier[extend] ( identifier[list] ( identifier[other] . identifier[labels] )) keyword[return] identifier[Name] ( identifier[labels] )
def concatenate(self, other): """Return a new name which is the concatenation of self and other. @rtype: dns.name.Name object @raises AbsoluteConcatenation: self is absolute and other is not the empty name """ if self.is_absolute() and len(other) > 0: raise AbsoluteConcatenation # depends on [control=['if'], data=[]] labels = list(self.labels) labels.extend(list(other.labels)) return Name(labels)
def run(self): """ Queues all services to be polled for metrics. Should be run via beat. """ services = Service.objects.all() for service in services: service_metric_sync.apply_async( kwargs={"service_id": str(service.id)}) key = "services.downtime.%s.sum" % ( utils.normalise_string(service.name)) check = WidgetData.objects.filter(service=None, key=key) if not check.exists(): WidgetData.objects.create( key=key, title="TEMP - Pending update" ) return "Queued <%s> Service(s) for Metric Sync" % services.count()
def function[run, parameter[self]]: constant[ Queues all services to be polled for metrics. Should be run via beat. ] variable[services] assign[=] call[name[Service].objects.all, parameter[]] for taget[name[service]] in starred[name[services]] begin[:] call[name[service_metric_sync].apply_async, parameter[]] variable[key] assign[=] binary_operation[constant[services.downtime.%s.sum] <ast.Mod object at 0x7da2590d6920> call[name[utils].normalise_string, parameter[name[service].name]]] variable[check] assign[=] call[name[WidgetData].objects.filter, parameter[]] if <ast.UnaryOp object at 0x7da18bcca6b0> begin[:] call[name[WidgetData].objects.create, parameter[]] return[binary_operation[constant[Queued <%s> Service(s) for Metric Sync] <ast.Mod object at 0x7da2590d6920> call[name[services].count, parameter[]]]]
keyword[def] identifier[run] ( identifier[self] ): literal[string] identifier[services] = identifier[Service] . identifier[objects] . identifier[all] () keyword[for] identifier[service] keyword[in] identifier[services] : identifier[service_metric_sync] . identifier[apply_async] ( identifier[kwargs] ={ literal[string] : identifier[str] ( identifier[service] . identifier[id] )}) identifier[key] = literal[string] %( identifier[utils] . identifier[normalise_string] ( identifier[service] . identifier[name] )) identifier[check] = identifier[WidgetData] . identifier[objects] . identifier[filter] ( identifier[service] = keyword[None] , identifier[key] = identifier[key] ) keyword[if] keyword[not] identifier[check] . identifier[exists] (): identifier[WidgetData] . identifier[objects] . identifier[create] ( identifier[key] = identifier[key] , identifier[title] = literal[string] ) keyword[return] literal[string] % identifier[services] . identifier[count] ()
def run(self): """ Queues all services to be polled for metrics. Should be run via beat. """ services = Service.objects.all() for service in services: service_metric_sync.apply_async(kwargs={'service_id': str(service.id)}) key = 'services.downtime.%s.sum' % utils.normalise_string(service.name) check = WidgetData.objects.filter(service=None, key=key) if not check.exists(): WidgetData.objects.create(key=key, title='TEMP - Pending update') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['service']] return 'Queued <%s> Service(s) for Metric Sync' % services.count()
def allocate_ids(self, incomplete_key, num_ids): """Allocate a list of IDs from a partial key. :type incomplete_key: :class:`google.cloud.datastore.key.Key` :param incomplete_key: Partial key to use as base for allocated IDs. :type num_ids: int :param num_ids: The number of IDs to allocate. :rtype: list of :class:`google.cloud.datastore.key.Key` :returns: The (complete) keys allocated with ``incomplete_key`` as root. :raises: :class:`ValueError` if ``incomplete_key`` is not a partial key. """ if not incomplete_key.is_partial: raise ValueError(("Key is not partial.", incomplete_key)) incomplete_key_pb = incomplete_key.to_protobuf() incomplete_key_pbs = [incomplete_key_pb] * num_ids response_pb = self._datastore_api.allocate_ids( incomplete_key.project, incomplete_key_pbs ) allocated_ids = [ allocated_key_pb.path[-1].id for allocated_key_pb in response_pb.keys ] return [ incomplete_key.completed_key(allocated_id) for allocated_id in allocated_ids ]
def function[allocate_ids, parameter[self, incomplete_key, num_ids]]: constant[Allocate a list of IDs from a partial key. :type incomplete_key: :class:`google.cloud.datastore.key.Key` :param incomplete_key: Partial key to use as base for allocated IDs. :type num_ids: int :param num_ids: The number of IDs to allocate. :rtype: list of :class:`google.cloud.datastore.key.Key` :returns: The (complete) keys allocated with ``incomplete_key`` as root. :raises: :class:`ValueError` if ``incomplete_key`` is not a partial key. ] if <ast.UnaryOp object at 0x7da204344a60> begin[:] <ast.Raise object at 0x7da204345600> variable[incomplete_key_pb] assign[=] call[name[incomplete_key].to_protobuf, parameter[]] variable[incomplete_key_pbs] assign[=] binary_operation[list[[<ast.Name object at 0x7da204347bb0>]] * name[num_ids]] variable[response_pb] assign[=] call[name[self]._datastore_api.allocate_ids, parameter[name[incomplete_key].project, name[incomplete_key_pbs]]] variable[allocated_ids] assign[=] <ast.ListComp object at 0x7da204344160> return[<ast.ListComp object at 0x7da2043478e0>]
keyword[def] identifier[allocate_ids] ( identifier[self] , identifier[incomplete_key] , identifier[num_ids] ): literal[string] keyword[if] keyword[not] identifier[incomplete_key] . identifier[is_partial] : keyword[raise] identifier[ValueError] (( literal[string] , identifier[incomplete_key] )) identifier[incomplete_key_pb] = identifier[incomplete_key] . identifier[to_protobuf] () identifier[incomplete_key_pbs] =[ identifier[incomplete_key_pb] ]* identifier[num_ids] identifier[response_pb] = identifier[self] . identifier[_datastore_api] . identifier[allocate_ids] ( identifier[incomplete_key] . identifier[project] , identifier[incomplete_key_pbs] ) identifier[allocated_ids] =[ identifier[allocated_key_pb] . identifier[path] [- literal[int] ]. identifier[id] keyword[for] identifier[allocated_key_pb] keyword[in] identifier[response_pb] . identifier[keys] ] keyword[return] [ identifier[incomplete_key] . identifier[completed_key] ( identifier[allocated_id] ) keyword[for] identifier[allocated_id] keyword[in] identifier[allocated_ids] ]
def allocate_ids(self, incomplete_key, num_ids): """Allocate a list of IDs from a partial key. :type incomplete_key: :class:`google.cloud.datastore.key.Key` :param incomplete_key: Partial key to use as base for allocated IDs. :type num_ids: int :param num_ids: The number of IDs to allocate. :rtype: list of :class:`google.cloud.datastore.key.Key` :returns: The (complete) keys allocated with ``incomplete_key`` as root. :raises: :class:`ValueError` if ``incomplete_key`` is not a partial key. """ if not incomplete_key.is_partial: raise ValueError(('Key is not partial.', incomplete_key)) # depends on [control=['if'], data=[]] incomplete_key_pb = incomplete_key.to_protobuf() incomplete_key_pbs = [incomplete_key_pb] * num_ids response_pb = self._datastore_api.allocate_ids(incomplete_key.project, incomplete_key_pbs) allocated_ids = [allocated_key_pb.path[-1].id for allocated_key_pb in response_pb.keys] return [incomplete_key.completed_key(allocated_id) for allocated_id in allocated_ids]
def _write_var_attrs(self, f, varNum, var_attrs, zVar): ''' Writes ADRs and AEDRs for variables Parameters: f : file The open CDF file varNum : int The variable number for adding attributes var_attrs : dict A dictionary object full of variable attributes zVar : bool True if varNum is referencing a z variable Returns: None ''' if (not isinstance(var_attrs, dict)): raise TypeError('Variable attribute(s) should be in dictionary form.') for attr, entry in var_attrs.items(): if (attr in self.gattrs): print('Attribute: ', attr, ' already defined as a global attribute... Skip') continue if not (attr in self.attrs): attrNum, offset = self._write_adr(f, False, attr) if (len(self.attrs) == 0): # GDR's ADRhead self._update_offset_value(self.grd_offset+28, 8, offset) else: attrNum = self.attrs.index(attr) offset = self.attrsinfo[attrNum][2] if (entry is None): continue # Check if dataType was provided dataType = 0 if (isinstance(entry, list) or isinstance(entry, tuple)): items = len(entry) if (items == 2): dataType = CDF._datatype_token(entry[1]) if (dataType > 0): # CDF data type defined in entry data = entry[0] if (CDF._checklistofNums(data)): # All are numbers if (isinstance(data, list) or isinstance(data, tuple)): numElems = len(data) else: numElems = 1 else: # Then string(s) -- either in CDF_type or epoch in string(s) if (dataType == CDF.CDF_CHAR or dataType == CDF.CDF_UCHAR): if isinstance(data, (list, tuple)): items = len(data) odata = data data = str('') for x in range(0, items): if (x > 0): data += str('\\N ') data += odata[x] else: data = odata[x] numElems = len(data) elif (dataType == CDF.CDF_EPOCH or dataType == CDF.CDF_EPOCH16 or dataType == CDF.CDF_TIME_TT2000): cvalue = [] if isinstance(data, (list, tuple)): numElems = len(data) for x in range(0, numElems): cvalue.append(cdfepoch.CDFepoch.parse(data[x])) data = cvalue else: data = cdfepoch.CDFepoch.parse(data) numElems = 1 else: # No data type defined... data = entry if isinstance(entry, (list, tuple)): numElems, dataType = CDF._datatype_define(entry[0]) if (dataType == CDF.CDF_CHAR or dataType == CDF.CDF_UCHAR): data = str('') for x in range(0, len(entry)): if (x > 0): data += str('\\N ') data += entry[x] else: data = entry[x] numElems = len(data) else: numElems, dataType = CDF._datatype_define(entry) offset = self._write_aedr(f, False, attrNum, varNum, data, dataType, numElems, zVar) self._update_aedr_link(f, attrNum, zVar, varNum, offset)
def function[_write_var_attrs, parameter[self, f, varNum, var_attrs, zVar]]: constant[ Writes ADRs and AEDRs for variables Parameters: f : file The open CDF file varNum : int The variable number for adding attributes var_attrs : dict A dictionary object full of variable attributes zVar : bool True if varNum is referencing a z variable Returns: None ] if <ast.UnaryOp object at 0x7da1b06ad6c0> begin[:] <ast.Raise object at 0x7da1b06ac580> for taget[tuple[[<ast.Name object at 0x7da1b06ac910>, <ast.Name object at 0x7da1b06ac880>]]] in starred[call[name[var_attrs].items, parameter[]]] begin[:] if compare[name[attr] in name[self].gattrs] begin[:] call[name[print], parameter[constant[Attribute: ], name[attr], constant[ already defined as a global attribute... Skip]]] continue if <ast.UnaryOp object at 0x7da1b06ac370> begin[:] <ast.Tuple object at 0x7da1b06ac250> assign[=] call[name[self]._write_adr, parameter[name[f], constant[False], name[attr]]] if compare[call[name[len], parameter[name[self].attrs]] equal[==] constant[0]] begin[:] call[name[self]._update_offset_value, parameter[binary_operation[name[self].grd_offset + constant[28]], constant[8], name[offset]]] if compare[name[entry] is constant[None]] begin[:] continue variable[dataType] assign[=] constant[0] if <ast.BoolOp object at 0x7da1b06ad7e0> begin[:] variable[items] assign[=] call[name[len], parameter[name[entry]]] if compare[name[items] equal[==] constant[2]] begin[:] variable[dataType] assign[=] call[name[CDF]._datatype_token, parameter[call[name[entry]][constant[1]]]] if compare[name[dataType] greater[>] constant[0]] begin[:] variable[data] assign[=] call[name[entry]][constant[0]] if call[name[CDF]._checklistofNums, parameter[name[data]]] begin[:] if <ast.BoolOp object at 0x7da1b06597b0> begin[:] variable[numElems] assign[=] call[name[len], parameter[name[data]]] variable[offset] assign[=] call[name[self]._write_aedr, parameter[name[f], constant[False], name[attrNum], name[varNum], name[data], name[dataType], name[numElems], name[zVar]]] call[name[self]._update_aedr_link, parameter[name[f], name[attrNum], name[zVar], name[varNum], name[offset]]]
keyword[def] identifier[_write_var_attrs] ( identifier[self] , identifier[f] , identifier[varNum] , identifier[var_attrs] , identifier[zVar] ): literal[string] keyword[if] ( keyword[not] identifier[isinstance] ( identifier[var_attrs] , identifier[dict] )): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[for] identifier[attr] , identifier[entry] keyword[in] identifier[var_attrs] . identifier[items] (): keyword[if] ( identifier[attr] keyword[in] identifier[self] . identifier[gattrs] ): identifier[print] ( literal[string] , identifier[attr] , literal[string] ) keyword[continue] keyword[if] keyword[not] ( identifier[attr] keyword[in] identifier[self] . identifier[attrs] ): identifier[attrNum] , identifier[offset] = identifier[self] . identifier[_write_adr] ( identifier[f] , keyword[False] , identifier[attr] ) keyword[if] ( identifier[len] ( identifier[self] . identifier[attrs] )== literal[int] ): identifier[self] . identifier[_update_offset_value] ( identifier[self] . identifier[grd_offset] + literal[int] , literal[int] , identifier[offset] ) keyword[else] : identifier[attrNum] = identifier[self] . identifier[attrs] . identifier[index] ( identifier[attr] ) identifier[offset] = identifier[self] . identifier[attrsinfo] [ identifier[attrNum] ][ literal[int] ] keyword[if] ( identifier[entry] keyword[is] keyword[None] ): keyword[continue] identifier[dataType] = literal[int] keyword[if] ( identifier[isinstance] ( identifier[entry] , identifier[list] ) keyword[or] identifier[isinstance] ( identifier[entry] , identifier[tuple] )): identifier[items] = identifier[len] ( identifier[entry] ) keyword[if] ( identifier[items] == literal[int] ): identifier[dataType] = identifier[CDF] . identifier[_datatype_token] ( identifier[entry] [ literal[int] ]) keyword[if] ( identifier[dataType] > literal[int] ): identifier[data] = identifier[entry] [ literal[int] ] keyword[if] ( identifier[CDF] . identifier[_checklistofNums] ( identifier[data] )): keyword[if] ( identifier[isinstance] ( identifier[data] , identifier[list] ) keyword[or] identifier[isinstance] ( identifier[data] , identifier[tuple] )): identifier[numElems] = identifier[len] ( identifier[data] ) keyword[else] : identifier[numElems] = literal[int] keyword[else] : keyword[if] ( identifier[dataType] == identifier[CDF] . identifier[CDF_CHAR] keyword[or] identifier[dataType] == identifier[CDF] . identifier[CDF_UCHAR] ): keyword[if] identifier[isinstance] ( identifier[data] ,( identifier[list] , identifier[tuple] )): identifier[items] = identifier[len] ( identifier[data] ) identifier[odata] = identifier[data] identifier[data] = identifier[str] ( literal[string] ) keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , identifier[items] ): keyword[if] ( identifier[x] > literal[int] ): identifier[data] += identifier[str] ( literal[string] ) identifier[data] += identifier[odata] [ identifier[x] ] keyword[else] : identifier[data] = identifier[odata] [ identifier[x] ] identifier[numElems] = identifier[len] ( identifier[data] ) keyword[elif] ( identifier[dataType] == identifier[CDF] . identifier[CDF_EPOCH] keyword[or] identifier[dataType] == identifier[CDF] . identifier[CDF_EPOCH16] keyword[or] identifier[dataType] == identifier[CDF] . identifier[CDF_TIME_TT2000] ): identifier[cvalue] =[] keyword[if] identifier[isinstance] ( identifier[data] ,( identifier[list] , identifier[tuple] )): identifier[numElems] = identifier[len] ( identifier[data] ) keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , identifier[numElems] ): identifier[cvalue] . identifier[append] ( identifier[cdfepoch] . identifier[CDFepoch] . identifier[parse] ( identifier[data] [ identifier[x] ])) identifier[data] = identifier[cvalue] keyword[else] : identifier[data] = identifier[cdfepoch] . identifier[CDFepoch] . identifier[parse] ( identifier[data] ) identifier[numElems] = literal[int] keyword[else] : identifier[data] = identifier[entry] keyword[if] identifier[isinstance] ( identifier[entry] ,( identifier[list] , identifier[tuple] )): identifier[numElems] , identifier[dataType] = identifier[CDF] . identifier[_datatype_define] ( identifier[entry] [ literal[int] ]) keyword[if] ( identifier[dataType] == identifier[CDF] . identifier[CDF_CHAR] keyword[or] identifier[dataType] == identifier[CDF] . identifier[CDF_UCHAR] ): identifier[data] = identifier[str] ( literal[string] ) keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[entry] )): keyword[if] ( identifier[x] > literal[int] ): identifier[data] += identifier[str] ( literal[string] ) identifier[data] += identifier[entry] [ identifier[x] ] keyword[else] : identifier[data] = identifier[entry] [ identifier[x] ] identifier[numElems] = identifier[len] ( identifier[data] ) keyword[else] : identifier[numElems] , identifier[dataType] = identifier[CDF] . identifier[_datatype_define] ( identifier[entry] ) identifier[offset] = identifier[self] . identifier[_write_aedr] ( identifier[f] , keyword[False] , identifier[attrNum] , identifier[varNum] , identifier[data] , identifier[dataType] , identifier[numElems] , identifier[zVar] ) identifier[self] . identifier[_update_aedr_link] ( identifier[f] , identifier[attrNum] , identifier[zVar] , identifier[varNum] , identifier[offset] )
def _write_var_attrs(self, f, varNum, var_attrs, zVar): """ Writes ADRs and AEDRs for variables Parameters: f : file The open CDF file varNum : int The variable number for adding attributes var_attrs : dict A dictionary object full of variable attributes zVar : bool True if varNum is referencing a z variable Returns: None """ if not isinstance(var_attrs, dict): raise TypeError('Variable attribute(s) should be in dictionary form.') # depends on [control=['if'], data=[]] for (attr, entry) in var_attrs.items(): if attr in self.gattrs: print('Attribute: ', attr, ' already defined as a global attribute... Skip') continue # depends on [control=['if'], data=['attr']] if not attr in self.attrs: (attrNum, offset) = self._write_adr(f, False, attr) if len(self.attrs) == 0: # GDR's ADRhead self._update_offset_value(self.grd_offset + 28, 8, offset) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: attrNum = self.attrs.index(attr) offset = self.attrsinfo[attrNum][2] if entry is None: continue # depends on [control=['if'], data=[]] # Check if dataType was provided dataType = 0 if isinstance(entry, list) or isinstance(entry, tuple): items = len(entry) if items == 2: dataType = CDF._datatype_token(entry[1]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if dataType > 0: # CDF data type defined in entry data = entry[0] if CDF._checklistofNums(data): # All are numbers if isinstance(data, list) or isinstance(data, tuple): numElems = len(data) # depends on [control=['if'], data=[]] else: numElems = 1 # depends on [control=['if'], data=[]] # Then string(s) -- either in CDF_type or epoch in string(s) elif dataType == CDF.CDF_CHAR or dataType == CDF.CDF_UCHAR: if isinstance(data, (list, tuple)): items = len(data) odata = data data = str('') for x in range(0, items): if x > 0: data += str('\\N ') data += odata[x] # depends on [control=['if'], data=['x']] else: data = odata[x] # depends on [control=['for'], data=['x']] # depends on [control=['if'], data=[]] numElems = len(data) # depends on [control=['if'], data=[]] elif dataType == CDF.CDF_EPOCH or dataType == CDF.CDF_EPOCH16 or dataType == CDF.CDF_TIME_TT2000: cvalue = [] if isinstance(data, (list, tuple)): numElems = len(data) for x in range(0, numElems): cvalue.append(cdfepoch.CDFepoch.parse(data[x])) # depends on [control=['for'], data=['x']] data = cvalue # depends on [control=['if'], data=[]] else: data = cdfepoch.CDFepoch.parse(data) numElems = 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['dataType']] else: # No data type defined... data = entry if isinstance(entry, (list, tuple)): (numElems, dataType) = CDF._datatype_define(entry[0]) if dataType == CDF.CDF_CHAR or dataType == CDF.CDF_UCHAR: data = str('') for x in range(0, len(entry)): if x > 0: data += str('\\N ') data += entry[x] # depends on [control=['if'], data=['x']] else: data = entry[x] # depends on [control=['for'], data=['x']] # depends on [control=['if'], data=[]] numElems = len(data) # depends on [control=['if'], data=[]] else: (numElems, dataType) = CDF._datatype_define(entry) offset = self._write_aedr(f, False, attrNum, varNum, data, dataType, numElems, zVar) self._update_aedr_link(f, attrNum, zVar, varNum, offset) # depends on [control=['for'], data=[]]
def response(self): """ Dictionary of public and private, hostnames and ips. :rtype: dict """ describe_request_params = {} if self.filter is not None: if type(self.filter) is not dict: try: filters = json.loads(self.filter) except TypeError: filters = self._parse_cli_filters(self.filter) else: filters = self.filter describe_request_params['Filters'] = filters if self.vpc_ids is not None: if 'Filters' not in describe_request_params: describe_request_params['Filters'] = [] describe_request_params['Filters'].append({ 'Name': 'vpc-id', 'Values': self.vpc_ids.split(',') }) reservations = self.session().client('ec2').describe_instances(**describe_request_params) return self._process_reservations(reservations)
def function[response, parameter[self]]: constant[ Dictionary of public and private, hostnames and ips. :rtype: dict ] variable[describe_request_params] assign[=] dictionary[[], []] if compare[name[self].filter is_not constant[None]] begin[:] if compare[call[name[type], parameter[name[self].filter]] is_not name[dict]] begin[:] <ast.Try object at 0x7da18f811f30> call[name[describe_request_params]][constant[Filters]] assign[=] name[filters] if compare[name[self].vpc_ids is_not constant[None]] begin[:] if compare[constant[Filters] <ast.NotIn object at 0x7da2590d7190> name[describe_request_params]] begin[:] call[name[describe_request_params]][constant[Filters]] assign[=] list[[]] call[call[name[describe_request_params]][constant[Filters]].append, parameter[dictionary[[<ast.Constant object at 0x7da18f811ed0>, <ast.Constant object at 0x7da18f810cd0>], [<ast.Constant object at 0x7da18f810c70>, <ast.Call object at 0x7da18f813580>]]]] variable[reservations] assign[=] call[call[call[name[self].session, parameter[]].client, parameter[constant[ec2]]].describe_instances, parameter[]] return[call[name[self]._process_reservations, parameter[name[reservations]]]]
keyword[def] identifier[response] ( identifier[self] ): literal[string] identifier[describe_request_params] ={} keyword[if] identifier[self] . identifier[filter] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[type] ( identifier[self] . identifier[filter] ) keyword[is] keyword[not] identifier[dict] : keyword[try] : identifier[filters] = identifier[json] . identifier[loads] ( identifier[self] . identifier[filter] ) keyword[except] identifier[TypeError] : identifier[filters] = identifier[self] . identifier[_parse_cli_filters] ( identifier[self] . identifier[filter] ) keyword[else] : identifier[filters] = identifier[self] . identifier[filter] identifier[describe_request_params] [ literal[string] ]= identifier[filters] keyword[if] identifier[self] . identifier[vpc_ids] keyword[is] keyword[not] keyword[None] : keyword[if] literal[string] keyword[not] keyword[in] identifier[describe_request_params] : identifier[describe_request_params] [ literal[string] ]=[] identifier[describe_request_params] [ literal[string] ]. identifier[append] ({ literal[string] : literal[string] , literal[string] : identifier[self] . identifier[vpc_ids] . identifier[split] ( literal[string] ) }) identifier[reservations] = identifier[self] . identifier[session] (). identifier[client] ( literal[string] ). identifier[describe_instances] (** identifier[describe_request_params] ) keyword[return] identifier[self] . identifier[_process_reservations] ( identifier[reservations] )
def response(self): """ Dictionary of public and private, hostnames and ips. :rtype: dict """ describe_request_params = {} if self.filter is not None: if type(self.filter) is not dict: try: filters = json.loads(self.filter) # depends on [control=['try'], data=[]] except TypeError: filters = self._parse_cli_filters(self.filter) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: filters = self.filter describe_request_params['Filters'] = filters # depends on [control=['if'], data=[]] if self.vpc_ids is not None: if 'Filters' not in describe_request_params: describe_request_params['Filters'] = [] # depends on [control=['if'], data=['describe_request_params']] describe_request_params['Filters'].append({'Name': 'vpc-id', 'Values': self.vpc_ids.split(',')}) # depends on [control=['if'], data=[]] reservations = self.session().client('ec2').describe_instances(**describe_request_params) return self._process_reservations(reservations)
def thread_exception(self, raised_exception): """ :meth:`.WThreadTask.thread_exception` implementation. Register (if required) unhandled exception event by a tracker storage :param raised_exception: unhandled exception :return: None """ tracker = self.tracker_storage() if tracker is not None: try: if self.track_exception() is True: details = self.event_details(WTrackerEvents.exception) tracker.register_exception( self, raised_exception, traceback.format_exc(), event_details=details ) except Exception as e: self.thread_tracker_exception(e)
def function[thread_exception, parameter[self, raised_exception]]: constant[ :meth:`.WThreadTask.thread_exception` implementation. Register (if required) unhandled exception event by a tracker storage :param raised_exception: unhandled exception :return: None ] variable[tracker] assign[=] call[name[self].tracker_storage, parameter[]] if compare[name[tracker] is_not constant[None]] begin[:] <ast.Try object at 0x7da20c992500>
keyword[def] identifier[thread_exception] ( identifier[self] , identifier[raised_exception] ): literal[string] identifier[tracker] = identifier[self] . identifier[tracker_storage] () keyword[if] identifier[tracker] keyword[is] keyword[not] keyword[None] : keyword[try] : keyword[if] identifier[self] . identifier[track_exception] () keyword[is] keyword[True] : identifier[details] = identifier[self] . identifier[event_details] ( identifier[WTrackerEvents] . identifier[exception] ) identifier[tracker] . identifier[register_exception] ( identifier[self] , identifier[raised_exception] , identifier[traceback] . identifier[format_exc] (), identifier[event_details] = identifier[details] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[self] . identifier[thread_tracker_exception] ( identifier[e] )
def thread_exception(self, raised_exception): """ :meth:`.WThreadTask.thread_exception` implementation. Register (if required) unhandled exception event by a tracker storage :param raised_exception: unhandled exception :return: None """ tracker = self.tracker_storage() if tracker is not None: try: if self.track_exception() is True: details = self.event_details(WTrackerEvents.exception) tracker.register_exception(self, raised_exception, traceback.format_exc(), event_details=details) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Exception as e: self.thread_tracker_exception(e) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=['tracker']]
def find_values_added_to_enums( old_schema: GraphQLSchema, new_schema: GraphQLSchema ) -> List[DangerousChange]: """Find values added to enums. Given two schemas, returns a list containing descriptions of any dangerous changes in the new_schema related to adding values to an enum type. """ old_type_map = old_schema.type_map new_type_map = new_schema.type_map values_added_to_enums = [] for type_name, old_type in old_type_map.items(): new_type = new_type_map.get(type_name) if not (is_enum_type(old_type) and is_enum_type(new_type)): continue old_type = cast(GraphQLEnumType, old_type) new_type = cast(GraphQLEnumType, new_type) values_in_old_enum = old_type.values for value_name in new_type.values: if value_name not in values_in_old_enum: values_added_to_enums.append( DangerousChange( DangerousChangeType.VALUE_ADDED_TO_ENUM, f"{value_name} was added to enum type {type_name}.", ) ) return values_added_to_enums
def function[find_values_added_to_enums, parameter[old_schema, new_schema]]: constant[Find values added to enums. Given two schemas, returns a list containing descriptions of any dangerous changes in the new_schema related to adding values to an enum type. ] variable[old_type_map] assign[=] name[old_schema].type_map variable[new_type_map] assign[=] name[new_schema].type_map variable[values_added_to_enums] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b2260eb0>, <ast.Name object at 0x7da1b2260370>]]] in starred[call[name[old_type_map].items, parameter[]]] begin[:] variable[new_type] assign[=] call[name[new_type_map].get, parameter[name[type_name]]] if <ast.UnaryOp object at 0x7da1b2260f40> begin[:] continue variable[old_type] assign[=] call[name[cast], parameter[name[GraphQLEnumType], name[old_type]]] variable[new_type] assign[=] call[name[cast], parameter[name[GraphQLEnumType], name[new_type]]] variable[values_in_old_enum] assign[=] name[old_type].values for taget[name[value_name]] in starred[name[new_type].values] begin[:] if compare[name[value_name] <ast.NotIn object at 0x7da2590d7190> name[values_in_old_enum]] begin[:] call[name[values_added_to_enums].append, parameter[call[name[DangerousChange], parameter[name[DangerousChangeType].VALUE_ADDED_TO_ENUM, <ast.JoinedStr object at 0x7da1b22ac430>]]]] return[name[values_added_to_enums]]
keyword[def] identifier[find_values_added_to_enums] ( identifier[old_schema] : identifier[GraphQLSchema] , identifier[new_schema] : identifier[GraphQLSchema] )-> identifier[List] [ identifier[DangerousChange] ]: literal[string] identifier[old_type_map] = identifier[old_schema] . identifier[type_map] identifier[new_type_map] = identifier[new_schema] . identifier[type_map] identifier[values_added_to_enums] =[] keyword[for] identifier[type_name] , identifier[old_type] keyword[in] identifier[old_type_map] . identifier[items] (): identifier[new_type] = identifier[new_type_map] . identifier[get] ( identifier[type_name] ) keyword[if] keyword[not] ( identifier[is_enum_type] ( identifier[old_type] ) keyword[and] identifier[is_enum_type] ( identifier[new_type] )): keyword[continue] identifier[old_type] = identifier[cast] ( identifier[GraphQLEnumType] , identifier[old_type] ) identifier[new_type] = identifier[cast] ( identifier[GraphQLEnumType] , identifier[new_type] ) identifier[values_in_old_enum] = identifier[old_type] . identifier[values] keyword[for] identifier[value_name] keyword[in] identifier[new_type] . identifier[values] : keyword[if] identifier[value_name] keyword[not] keyword[in] identifier[values_in_old_enum] : identifier[values_added_to_enums] . identifier[append] ( identifier[DangerousChange] ( identifier[DangerousChangeType] . identifier[VALUE_ADDED_TO_ENUM] , literal[string] , ) ) keyword[return] identifier[values_added_to_enums]
def find_values_added_to_enums(old_schema: GraphQLSchema, new_schema: GraphQLSchema) -> List[DangerousChange]: """Find values added to enums. Given two schemas, returns a list containing descriptions of any dangerous changes in the new_schema related to adding values to an enum type. """ old_type_map = old_schema.type_map new_type_map = new_schema.type_map values_added_to_enums = [] for (type_name, old_type) in old_type_map.items(): new_type = new_type_map.get(type_name) if not (is_enum_type(old_type) and is_enum_type(new_type)): continue # depends on [control=['if'], data=[]] old_type = cast(GraphQLEnumType, old_type) new_type = cast(GraphQLEnumType, new_type) values_in_old_enum = old_type.values for value_name in new_type.values: if value_name not in values_in_old_enum: values_added_to_enums.append(DangerousChange(DangerousChangeType.VALUE_ADDED_TO_ENUM, f'{value_name} was added to enum type {type_name}.')) # depends on [control=['if'], data=['value_name']] # depends on [control=['for'], data=['value_name']] # depends on [control=['for'], data=[]] return values_added_to_enums
def variants(self, case_id, skip=0, count=1000, filters=None): """Return count variants for a case. This function needs to have different behaviours based on what is asked for. It should allways try to give minimal information back to improve on speed. For example, if consequences are not asked for we will not build all transcripts. If not sv variants we will not build sv coordinates. So the minimal case is to just show what is asked for in the variants interface. Args: case_id (str): A gemini db skip (int): Skip first variants count (int): The number of variants to return filters (dict): A dictionary with filters. Currently this will look like: { gene_list: [] (list of hgnc ids), frequency: None (float), cadd: None (float), consequence: [] (list of consequences), impact_severities: [] (list of consequences), genetic_models [] (list of genetic models) } Returns: puzzle.constants.Results : Named tuple with variants and nr_of_variants """ filters = filters or {} logger.debug("Looking for variants in {0}".format(case_id)) limit = count + skip gemini_query = filters.get('gemini_query') or "SELECT * from variants v" any_filter = False if filters.get('frequency'): frequency = filters['frequency'] extra_info = "(v.max_aaf_all < {0} or v.max_aaf_all is"\ " Null)".format(frequency) gemini_query = self.build_gemini_query(gemini_query, extra_info) if filters.get('cadd'): cadd_score = filters['cadd'] extra_info = "(v.cadd_scaled > {0})".format(cadd_score) gemini_query = self.build_gemini_query(gemini_query, extra_info) if filters.get('gene_ids'): gene_list = [gene_id.strip() for gene_id in filters['gene_ids']] gene_string = "v.gene in (" for index, gene_id in enumerate(gene_list): if index == 0: gene_string += "'{0}'".format(gene_id) else: gene_string += ", '{0}'".format(gene_id) gene_string += ")" gemini_query = self.build_gemini_query(gemini_query, gene_string) if filters.get('range'): chrom = filters['range']['chromosome'] if not chrom.startswith('chr'): chrom = "chr{0}".format(chrom) range_string = "v.chrom = '{0}' AND "\ "((v.start BETWEEN {1} AND {2}) OR "\ "(v.end BETWEEN {1} AND {2}))".format( chrom, filters['range']['start'], filters['range']['end'] ) gemini_query = self.build_gemini_query(gemini_query, range_string) filtered_variants = self._variants( case_id=case_id, gemini_query=gemini_query, ) if filters.get('consequence'): consequences = set(filters['consequence']) filtered_variants = (variant for variant in filtered_variants if set(variant.consequences).intersection(consequences)) if filters.get('impact_severities'): severities = set([severity.strip() for severity in filters['impact_severities']]) new_filtered_variants = [] filtered_variants = (variant for variant in filtered_variants if set([variant.impact_severity]).intersection(severities)) if filters.get('sv_len'): sv_len = int(filters['sv_len']) filtered_variants = (variant for variant in filtered_variants if variant.sv_len >= sv_len) variants = [] for index, variant_obj in enumerate(filtered_variants): if index >= skip: if index < limit: variants.append(variant_obj) else: break return Results(variants, len(variants))
def function[variants, parameter[self, case_id, skip, count, filters]]: constant[Return count variants for a case. This function needs to have different behaviours based on what is asked for. It should allways try to give minimal information back to improve on speed. For example, if consequences are not asked for we will not build all transcripts. If not sv variants we will not build sv coordinates. So the minimal case is to just show what is asked for in the variants interface. Args: case_id (str): A gemini db skip (int): Skip first variants count (int): The number of variants to return filters (dict): A dictionary with filters. Currently this will look like: { gene_list: [] (list of hgnc ids), frequency: None (float), cadd: None (float), consequence: [] (list of consequences), impact_severities: [] (list of consequences), genetic_models [] (list of genetic models) } Returns: puzzle.constants.Results : Named tuple with variants and nr_of_variants ] variable[filters] assign[=] <ast.BoolOp object at 0x7da18eb57400> call[name[logger].debug, parameter[call[constant[Looking for variants in {0}].format, parameter[name[case_id]]]]] variable[limit] assign[=] binary_operation[name[count] + name[skip]] variable[gemini_query] assign[=] <ast.BoolOp object at 0x7da18eb57370> variable[any_filter] assign[=] constant[False] if call[name[filters].get, parameter[constant[frequency]]] begin[:] variable[frequency] assign[=] call[name[filters]][constant[frequency]] variable[extra_info] assign[=] call[constant[(v.max_aaf_all < {0} or v.max_aaf_all is Null)].format, parameter[name[frequency]]] variable[gemini_query] assign[=] call[name[self].build_gemini_query, parameter[name[gemini_query], name[extra_info]]] if call[name[filters].get, parameter[constant[cadd]]] begin[:] variable[cadd_score] assign[=] call[name[filters]][constant[cadd]] variable[extra_info] assign[=] call[constant[(v.cadd_scaled > {0})].format, parameter[name[cadd_score]]] variable[gemini_query] assign[=] call[name[self].build_gemini_query, parameter[name[gemini_query], name[extra_info]]] if call[name[filters].get, parameter[constant[gene_ids]]] begin[:] variable[gene_list] assign[=] <ast.ListComp object at 0x7da18eb54dc0> variable[gene_string] assign[=] constant[v.gene in (] for taget[tuple[[<ast.Name object at 0x7da18eb56ec0>, <ast.Name object at 0x7da18eb57820>]]] in starred[call[name[enumerate], parameter[name[gene_list]]]] begin[:] if compare[name[index] equal[==] constant[0]] begin[:] <ast.AugAssign object at 0x7da18eb579d0> <ast.AugAssign object at 0x7da18eb54100> variable[gemini_query] assign[=] call[name[self].build_gemini_query, parameter[name[gemini_query], name[gene_string]]] if call[name[filters].get, parameter[constant[range]]] begin[:] variable[chrom] assign[=] call[call[name[filters]][constant[range]]][constant[chromosome]] if <ast.UnaryOp object at 0x7da18eb572e0> begin[:] variable[chrom] assign[=] call[constant[chr{0}].format, parameter[name[chrom]]] variable[range_string] assign[=] call[constant[v.chrom = '{0}' AND ((v.start BETWEEN {1} AND {2}) OR (v.end BETWEEN {1} AND {2}))].format, parameter[name[chrom], call[call[name[filters]][constant[range]]][constant[start]], call[call[name[filters]][constant[range]]][constant[end]]]] variable[gemini_query] assign[=] call[name[self].build_gemini_query, parameter[name[gemini_query], name[range_string]]] variable[filtered_variants] assign[=] call[name[self]._variants, parameter[]] if call[name[filters].get, parameter[constant[consequence]]] begin[:] variable[consequences] assign[=] call[name[set], parameter[call[name[filters]][constant[consequence]]]] variable[filtered_variants] assign[=] <ast.GeneratorExp object at 0x7da18eb55b10> if call[name[filters].get, parameter[constant[impact_severities]]] begin[:] variable[severities] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da18eb56da0>]] variable[new_filtered_variants] assign[=] list[[]] variable[filtered_variants] assign[=] <ast.GeneratorExp object at 0x7da18f00e560> if call[name[filters].get, parameter[constant[sv_len]]] begin[:] variable[sv_len] assign[=] call[name[int], parameter[call[name[filters]][constant[sv_len]]]] variable[filtered_variants] assign[=] <ast.GeneratorExp object at 0x7da18f00fcd0> variable[variants] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18f00ca90>, <ast.Name object at 0x7da18f00e4d0>]]] in starred[call[name[enumerate], parameter[name[filtered_variants]]]] begin[:] if compare[name[index] greater_or_equal[>=] name[skip]] begin[:] if compare[name[index] less[<] name[limit]] begin[:] call[name[variants].append, parameter[name[variant_obj]]] return[call[name[Results], parameter[name[variants], call[name[len], parameter[name[variants]]]]]]
keyword[def] identifier[variants] ( identifier[self] , identifier[case_id] , identifier[skip] = literal[int] , identifier[count] = literal[int] , identifier[filters] = keyword[None] ): literal[string] identifier[filters] = identifier[filters] keyword[or] {} identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[case_id] )) identifier[limit] = identifier[count] + identifier[skip] identifier[gemini_query] = identifier[filters] . identifier[get] ( literal[string] ) keyword[or] literal[string] identifier[any_filter] = keyword[False] keyword[if] identifier[filters] . identifier[get] ( literal[string] ): identifier[frequency] = identifier[filters] [ literal[string] ] identifier[extra_info] = literal[string] literal[string] . identifier[format] ( identifier[frequency] ) identifier[gemini_query] = identifier[self] . identifier[build_gemini_query] ( identifier[gemini_query] , identifier[extra_info] ) keyword[if] identifier[filters] . identifier[get] ( literal[string] ): identifier[cadd_score] = identifier[filters] [ literal[string] ] identifier[extra_info] = literal[string] . identifier[format] ( identifier[cadd_score] ) identifier[gemini_query] = identifier[self] . identifier[build_gemini_query] ( identifier[gemini_query] , identifier[extra_info] ) keyword[if] identifier[filters] . identifier[get] ( literal[string] ): identifier[gene_list] =[ identifier[gene_id] . identifier[strip] () keyword[for] identifier[gene_id] keyword[in] identifier[filters] [ literal[string] ]] identifier[gene_string] = literal[string] keyword[for] identifier[index] , identifier[gene_id] keyword[in] identifier[enumerate] ( identifier[gene_list] ): keyword[if] identifier[index] == literal[int] : identifier[gene_string] += literal[string] . identifier[format] ( identifier[gene_id] ) keyword[else] : identifier[gene_string] += literal[string] . identifier[format] ( identifier[gene_id] ) identifier[gene_string] += literal[string] identifier[gemini_query] = identifier[self] . identifier[build_gemini_query] ( identifier[gemini_query] , identifier[gene_string] ) keyword[if] identifier[filters] . identifier[get] ( literal[string] ): identifier[chrom] = identifier[filters] [ literal[string] ][ literal[string] ] keyword[if] keyword[not] identifier[chrom] . identifier[startswith] ( literal[string] ): identifier[chrom] = literal[string] . identifier[format] ( identifier[chrom] ) identifier[range_string] = literal[string] literal[string] literal[string] . identifier[format] ( identifier[chrom] , identifier[filters] [ literal[string] ][ literal[string] ], identifier[filters] [ literal[string] ][ literal[string] ] ) identifier[gemini_query] = identifier[self] . identifier[build_gemini_query] ( identifier[gemini_query] , identifier[range_string] ) identifier[filtered_variants] = identifier[self] . identifier[_variants] ( identifier[case_id] = identifier[case_id] , identifier[gemini_query] = identifier[gemini_query] , ) keyword[if] identifier[filters] . identifier[get] ( literal[string] ): identifier[consequences] = identifier[set] ( identifier[filters] [ literal[string] ]) identifier[filtered_variants] =( identifier[variant] keyword[for] identifier[variant] keyword[in] identifier[filtered_variants] keyword[if] identifier[set] ( identifier[variant] . identifier[consequences] ). identifier[intersection] ( identifier[consequences] )) keyword[if] identifier[filters] . identifier[get] ( literal[string] ): identifier[severities] = identifier[set] ([ identifier[severity] . identifier[strip] () keyword[for] identifier[severity] keyword[in] identifier[filters] [ literal[string] ]]) identifier[new_filtered_variants] =[] identifier[filtered_variants] =( identifier[variant] keyword[for] identifier[variant] keyword[in] identifier[filtered_variants] keyword[if] identifier[set] ([ identifier[variant] . identifier[impact_severity] ]). identifier[intersection] ( identifier[severities] )) keyword[if] identifier[filters] . identifier[get] ( literal[string] ): identifier[sv_len] = identifier[int] ( identifier[filters] [ literal[string] ]) identifier[filtered_variants] =( identifier[variant] keyword[for] identifier[variant] keyword[in] identifier[filtered_variants] keyword[if] identifier[variant] . identifier[sv_len] >= identifier[sv_len] ) identifier[variants] =[] keyword[for] identifier[index] , identifier[variant_obj] keyword[in] identifier[enumerate] ( identifier[filtered_variants] ): keyword[if] identifier[index] >= identifier[skip] : keyword[if] identifier[index] < identifier[limit] : identifier[variants] . identifier[append] ( identifier[variant_obj] ) keyword[else] : keyword[break] keyword[return] identifier[Results] ( identifier[variants] , identifier[len] ( identifier[variants] ))
def variants(self, case_id, skip=0, count=1000, filters=None): """Return count variants for a case. This function needs to have different behaviours based on what is asked for. It should allways try to give minimal information back to improve on speed. For example, if consequences are not asked for we will not build all transcripts. If not sv variants we will not build sv coordinates. So the minimal case is to just show what is asked for in the variants interface. Args: case_id (str): A gemini db skip (int): Skip first variants count (int): The number of variants to return filters (dict): A dictionary with filters. Currently this will look like: { gene_list: [] (list of hgnc ids), frequency: None (float), cadd: None (float), consequence: [] (list of consequences), impact_severities: [] (list of consequences), genetic_models [] (list of genetic models) } Returns: puzzle.constants.Results : Named tuple with variants and nr_of_variants """ filters = filters or {} logger.debug('Looking for variants in {0}'.format(case_id)) limit = count + skip gemini_query = filters.get('gemini_query') or 'SELECT * from variants v' any_filter = False if filters.get('frequency'): frequency = filters['frequency'] extra_info = '(v.max_aaf_all < {0} or v.max_aaf_all is Null)'.format(frequency) gemini_query = self.build_gemini_query(gemini_query, extra_info) # depends on [control=['if'], data=[]] if filters.get('cadd'): cadd_score = filters['cadd'] extra_info = '(v.cadd_scaled > {0})'.format(cadd_score) gemini_query = self.build_gemini_query(gemini_query, extra_info) # depends on [control=['if'], data=[]] if filters.get('gene_ids'): gene_list = [gene_id.strip() for gene_id in filters['gene_ids']] gene_string = 'v.gene in (' for (index, gene_id) in enumerate(gene_list): if index == 0: gene_string += "'{0}'".format(gene_id) # depends on [control=['if'], data=[]] else: gene_string += ", '{0}'".format(gene_id) # depends on [control=['for'], data=[]] gene_string += ')' gemini_query = self.build_gemini_query(gemini_query, gene_string) # depends on [control=['if'], data=[]] if filters.get('range'): chrom = filters['range']['chromosome'] if not chrom.startswith('chr'): chrom = 'chr{0}'.format(chrom) # depends on [control=['if'], data=[]] range_string = "v.chrom = '{0}' AND ((v.start BETWEEN {1} AND {2}) OR (v.end BETWEEN {1} AND {2}))".format(chrom, filters['range']['start'], filters['range']['end']) gemini_query = self.build_gemini_query(gemini_query, range_string) # depends on [control=['if'], data=[]] filtered_variants = self._variants(case_id=case_id, gemini_query=gemini_query) if filters.get('consequence'): consequences = set(filters['consequence']) filtered_variants = (variant for variant in filtered_variants if set(variant.consequences).intersection(consequences)) # depends on [control=['if'], data=[]] if filters.get('impact_severities'): severities = set([severity.strip() for severity in filters['impact_severities']]) new_filtered_variants = [] filtered_variants = (variant for variant in filtered_variants if set([variant.impact_severity]).intersection(severities)) # depends on [control=['if'], data=[]] if filters.get('sv_len'): sv_len = int(filters['sv_len']) filtered_variants = (variant for variant in filtered_variants if variant.sv_len >= sv_len) # depends on [control=['if'], data=[]] variants = [] for (index, variant_obj) in enumerate(filtered_variants): if index >= skip: if index < limit: variants.append(variant_obj) # depends on [control=['if'], data=[]] else: break # depends on [control=['if'], data=['index']] # depends on [control=['for'], data=[]] return Results(variants, len(variants))
def images_grouped_by_type(self): """ :return: A generator yielding 2-tuples of (type, [creators]) where adjacent creators who share the same role are grouped together. """ type = -1 images = [] for wc in self: if wc.type != type: if images: yield (type, images) role = wc.role creators = [] images.append(wc.image) if images: yield (type, images)
def function[images_grouped_by_type, parameter[self]]: constant[ :return: A generator yielding 2-tuples of (type, [creators]) where adjacent creators who share the same role are grouped together. ] variable[type] assign[=] <ast.UnaryOp object at 0x7da1b0ebce80> variable[images] assign[=] list[[]] for taget[name[wc]] in starred[name[self]] begin[:] if compare[name[wc].type not_equal[!=] name[type]] begin[:] if name[images] begin[:] <ast.Yield object at 0x7da1b0ebce50> variable[role] assign[=] name[wc].role variable[creators] assign[=] list[[]] call[name[images].append, parameter[name[wc].image]] if name[images] begin[:] <ast.Yield object at 0x7da1b0ebdcc0>
keyword[def] identifier[images_grouped_by_type] ( identifier[self] ): literal[string] identifier[type] =- literal[int] identifier[images] =[] keyword[for] identifier[wc] keyword[in] identifier[self] : keyword[if] identifier[wc] . identifier[type] != identifier[type] : keyword[if] identifier[images] : keyword[yield] ( identifier[type] , identifier[images] ) identifier[role] = identifier[wc] . identifier[role] identifier[creators] =[] identifier[images] . identifier[append] ( identifier[wc] . identifier[image] ) keyword[if] identifier[images] : keyword[yield] ( identifier[type] , identifier[images] )
def images_grouped_by_type(self): """ :return: A generator yielding 2-tuples of (type, [creators]) where adjacent creators who share the same role are grouped together. """ type = -1 images = [] for wc in self: if wc.type != type: if images: yield (type, images) # depends on [control=['if'], data=[]] role = wc.role creators = [] images.append(wc.image) # depends on [control=['if'], data=['type']] # depends on [control=['for'], data=['wc']] if images: yield (type, images) # depends on [control=['if'], data=[]]
def DVSFile(ID, season, cadence='lc'): ''' Returns the name of the DVS PDF for a given target. :param ID: The target ID :param int season: The target season number :param str cadence: The cadence type. Default `lc` ''' if cadence == 'sc': strcadence = '_sc' else: strcadence = '' return 'hlsp_everest_k2_llc_%d-c%02d_kepler_v%s_dvs%s.pdf' \ % (ID, season, EVEREST_MAJOR_MINOR, strcadence)
def function[DVSFile, parameter[ID, season, cadence]]: constant[ Returns the name of the DVS PDF for a given target. :param ID: The target ID :param int season: The target season number :param str cadence: The cadence type. Default `lc` ] if compare[name[cadence] equal[==] constant[sc]] begin[:] variable[strcadence] assign[=] constant[_sc] return[binary_operation[constant[hlsp_everest_k2_llc_%d-c%02d_kepler_v%s_dvs%s.pdf] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0e8bf40>, <ast.Name object at 0x7da1b0e8bfd0>, <ast.Name object at 0x7da1b0e8ab90>, <ast.Name object at 0x7da1b0e8b7f0>]]]]
keyword[def] identifier[DVSFile] ( identifier[ID] , identifier[season] , identifier[cadence] = literal[string] ): literal[string] keyword[if] identifier[cadence] == literal[string] : identifier[strcadence] = literal[string] keyword[else] : identifier[strcadence] = literal[string] keyword[return] literal[string] %( identifier[ID] , identifier[season] , identifier[EVEREST_MAJOR_MINOR] , identifier[strcadence] )
def DVSFile(ID, season, cadence='lc'): """ Returns the name of the DVS PDF for a given target. :param ID: The target ID :param int season: The target season number :param str cadence: The cadence type. Default `lc` """ if cadence == 'sc': strcadence = '_sc' # depends on [control=['if'], data=[]] else: strcadence = '' return 'hlsp_everest_k2_llc_%d-c%02d_kepler_v%s_dvs%s.pdf' % (ID, season, EVEREST_MAJOR_MINOR, strcadence)
def gain_chart(df, col_true=None, col_pred=None, col_scores=None, pos_label=1): r""" Compute positive proportion, true positive rate (TPR) and threshold from predicted DataFrame. The trace can be plotted as a cumulative gain chart Note that this method will trigger the defined flow to execute. :param df: predicted data frame :type df: DataFrame :param pos_label: positive label :type pos_label: str :param col_true: true column :type col_true: str :param col_pred: predicted column, 'prediction_result' if absent. :type col_pred: str :param col_scores: score column, 'prediction_score' if absent. :type col_scores: str :return: positive proportion, true positive rate and threshold, in numpy array format. :Example: >>> import matplotlib.pyplot as plt >>> depth, tpr, thresh = gain_chart(predicted) >>> plt.plot(depth, tpr) """ if not col_pred: col_pred = get_field_name_by_role(df, FieldRole.PREDICTED_CLASS) if not col_scores: col_scores = get_field_name_by_role(df, FieldRole.PREDICTED_SCORE) thresh, tp, fn, tn, fp = _run_roc_node(df, pos_label, col_true, col_pred, col_scores) depth = (tp + fp) * 1.0 / (tp + fp + tn + fn) tpr = tp * 1.0 / (tp + fn) gain_result = namedtuple('GainChartResult', 'depth tpr thresh') return gain_result(depth=depth, tpr=tpr, thresh=thresh)
def function[gain_chart, parameter[df, col_true, col_pred, col_scores, pos_label]]: constant[ Compute positive proportion, true positive rate (TPR) and threshold from predicted DataFrame. The trace can be plotted as a cumulative gain chart Note that this method will trigger the defined flow to execute. :param df: predicted data frame :type df: DataFrame :param pos_label: positive label :type pos_label: str :param col_true: true column :type col_true: str :param col_pred: predicted column, 'prediction_result' if absent. :type col_pred: str :param col_scores: score column, 'prediction_score' if absent. :type col_scores: str :return: positive proportion, true positive rate and threshold, in numpy array format. :Example: >>> import matplotlib.pyplot as plt >>> depth, tpr, thresh = gain_chart(predicted) >>> plt.plot(depth, tpr) ] if <ast.UnaryOp object at 0x7da20e74bb80> begin[:] variable[col_pred] assign[=] call[name[get_field_name_by_role], parameter[name[df], name[FieldRole].PREDICTED_CLASS]] if <ast.UnaryOp object at 0x7da20e74afe0> begin[:] variable[col_scores] assign[=] call[name[get_field_name_by_role], parameter[name[df], name[FieldRole].PREDICTED_SCORE]] <ast.Tuple object at 0x7da20cabfcd0> assign[=] call[name[_run_roc_node], parameter[name[df], name[pos_label], name[col_true], name[col_pred], name[col_scores]]] variable[depth] assign[=] binary_operation[binary_operation[binary_operation[name[tp] + name[fp]] * constant[1.0]] / binary_operation[binary_operation[binary_operation[name[tp] + name[fp]] + name[tn]] + name[fn]]] variable[tpr] assign[=] binary_operation[binary_operation[name[tp] * constant[1.0]] / binary_operation[name[tp] + name[fn]]] variable[gain_result] assign[=] call[name[namedtuple], parameter[constant[GainChartResult], constant[depth tpr thresh]]] return[call[name[gain_result], parameter[]]]
keyword[def] identifier[gain_chart] ( identifier[df] , identifier[col_true] = keyword[None] , identifier[col_pred] = keyword[None] , identifier[col_scores] = keyword[None] , identifier[pos_label] = literal[int] ): literal[string] keyword[if] keyword[not] identifier[col_pred] : identifier[col_pred] = identifier[get_field_name_by_role] ( identifier[df] , identifier[FieldRole] . identifier[PREDICTED_CLASS] ) keyword[if] keyword[not] identifier[col_scores] : identifier[col_scores] = identifier[get_field_name_by_role] ( identifier[df] , identifier[FieldRole] . identifier[PREDICTED_SCORE] ) identifier[thresh] , identifier[tp] , identifier[fn] , identifier[tn] , identifier[fp] = identifier[_run_roc_node] ( identifier[df] , identifier[pos_label] , identifier[col_true] , identifier[col_pred] , identifier[col_scores] ) identifier[depth] =( identifier[tp] + identifier[fp] )* literal[int] /( identifier[tp] + identifier[fp] + identifier[tn] + identifier[fn] ) identifier[tpr] = identifier[tp] * literal[int] /( identifier[tp] + identifier[fn] ) identifier[gain_result] = identifier[namedtuple] ( literal[string] , literal[string] ) keyword[return] identifier[gain_result] ( identifier[depth] = identifier[depth] , identifier[tpr] = identifier[tpr] , identifier[thresh] = identifier[thresh] )
def gain_chart(df, col_true=None, col_pred=None, col_scores=None, pos_label=1): """ Compute positive proportion, true positive rate (TPR) and threshold from predicted DataFrame. The trace can be plotted as a cumulative gain chart Note that this method will trigger the defined flow to execute. :param df: predicted data frame :type df: DataFrame :param pos_label: positive label :type pos_label: str :param col_true: true column :type col_true: str :param col_pred: predicted column, 'prediction_result' if absent. :type col_pred: str :param col_scores: score column, 'prediction_score' if absent. :type col_scores: str :return: positive proportion, true positive rate and threshold, in numpy array format. :Example: >>> import matplotlib.pyplot as plt >>> depth, tpr, thresh = gain_chart(predicted) >>> plt.plot(depth, tpr) """ if not col_pred: col_pred = get_field_name_by_role(df, FieldRole.PREDICTED_CLASS) # depends on [control=['if'], data=[]] if not col_scores: col_scores = get_field_name_by_role(df, FieldRole.PREDICTED_SCORE) # depends on [control=['if'], data=[]] (thresh, tp, fn, tn, fp) = _run_roc_node(df, pos_label, col_true, col_pred, col_scores) depth = (tp + fp) * 1.0 / (tp + fp + tn + fn) tpr = tp * 1.0 / (tp + fn) gain_result = namedtuple('GainChartResult', 'depth tpr thresh') return gain_result(depth=depth, tpr=tpr, thresh=thresh)
def noise2d(self, x, y): """ Generate 2D OpenSimplex noise from X,Y coordinates. """ # Place input coordinates onto grid. stretch_offset = (x + y) * STRETCH_CONSTANT_2D xs = x + stretch_offset ys = y + stretch_offset # Floor to get grid coordinates of rhombus (stretched square) super-cell origin. xsb = floor(xs) ysb = floor(ys) # Skew out to get actual coordinates of rhombus origin. We'll need these later. squish_offset = (xsb + ysb) * SQUISH_CONSTANT_2D xb = xsb + squish_offset yb = ysb + squish_offset # Compute grid coordinates relative to rhombus origin. xins = xs - xsb yins = ys - ysb # Sum those together to get a value that determines which region we're in. in_sum = xins + yins # Positions relative to origin point. dx0 = x - xb dy0 = y - yb value = 0 # Contribution (1,0) dx1 = dx0 - 1 - SQUISH_CONSTANT_2D dy1 = dy0 - 0 - SQUISH_CONSTANT_2D attn1 = 2 - dx1 * dx1 - dy1 * dy1 extrapolate = self._extrapolate2d if attn1 > 0: attn1 *= attn1 value += attn1 * attn1 * extrapolate(xsb + 1, ysb + 0, dx1, dy1) # Contribution (0,1) dx2 = dx0 - 0 - SQUISH_CONSTANT_2D dy2 = dy0 - 1 - SQUISH_CONSTANT_2D attn2 = 2 - dx2 * dx2 - dy2 * dy2 if attn2 > 0: attn2 *= attn2 value += attn2 * attn2 * extrapolate(xsb + 0, ysb + 1, dx2, dy2) if in_sum <= 1: # We're inside the triangle (2-Simplex) at (0,0) zins = 1 - in_sum if zins > xins or zins > yins: # (0,0) is one of the closest two triangular vertices if xins > yins: xsv_ext = xsb + 1 ysv_ext = ysb - 1 dx_ext = dx0 - 1 dy_ext = dy0 + 1 else: xsv_ext = xsb - 1 ysv_ext = ysb + 1 dx_ext = dx0 + 1 dy_ext = dy0 - 1 else: # (1,0) and (0,1) are the closest two vertices. xsv_ext = xsb + 1 ysv_ext = ysb + 1 dx_ext = dx0 - 1 - 2 * SQUISH_CONSTANT_2D dy_ext = dy0 - 1 - 2 * SQUISH_CONSTANT_2D else: # We're inside the triangle (2-Simplex) at (1,1) zins = 2 - in_sum if zins < xins or zins < yins: # (0,0) is one of the closest two triangular vertices if xins > yins: xsv_ext = xsb + 2 ysv_ext = ysb + 0 dx_ext = dx0 - 2 - 2 * SQUISH_CONSTANT_2D dy_ext = dy0 + 0 - 2 * SQUISH_CONSTANT_2D else: xsv_ext = xsb + 0 ysv_ext = ysb + 2 dx_ext = dx0 + 0 - 2 * SQUISH_CONSTANT_2D dy_ext = dy0 - 2 - 2 * SQUISH_CONSTANT_2D else: # (1,0) and (0,1) are the closest two vertices. dx_ext = dx0 dy_ext = dy0 xsv_ext = xsb ysv_ext = ysb xsb += 1 ysb += 1 dx0 = dx0 - 1 - 2 * SQUISH_CONSTANT_2D dy0 = dy0 - 1 - 2 * SQUISH_CONSTANT_2D # Contribution (0,0) or (1,1) attn0 = 2 - dx0 * dx0 - dy0 * dy0 if attn0 > 0: attn0 *= attn0 value += attn0 * attn0 * extrapolate(xsb, ysb, dx0, dy0) # Extra Vertex attn_ext = 2 - dx_ext * dx_ext - dy_ext * dy_ext if attn_ext > 0: attn_ext *= attn_ext value += attn_ext * attn_ext * extrapolate(xsv_ext, ysv_ext, dx_ext, dy_ext) return value / NORM_CONSTANT_2D
def function[noise2d, parameter[self, x, y]]: constant[ Generate 2D OpenSimplex noise from X,Y coordinates. ] variable[stretch_offset] assign[=] binary_operation[binary_operation[name[x] + name[y]] * name[STRETCH_CONSTANT_2D]] variable[xs] assign[=] binary_operation[name[x] + name[stretch_offset]] variable[ys] assign[=] binary_operation[name[y] + name[stretch_offset]] variable[xsb] assign[=] call[name[floor], parameter[name[xs]]] variable[ysb] assign[=] call[name[floor], parameter[name[ys]]] variable[squish_offset] assign[=] binary_operation[binary_operation[name[xsb] + name[ysb]] * name[SQUISH_CONSTANT_2D]] variable[xb] assign[=] binary_operation[name[xsb] + name[squish_offset]] variable[yb] assign[=] binary_operation[name[ysb] + name[squish_offset]] variable[xins] assign[=] binary_operation[name[xs] - name[xsb]] variable[yins] assign[=] binary_operation[name[ys] - name[ysb]] variable[in_sum] assign[=] binary_operation[name[xins] + name[yins]] variable[dx0] assign[=] binary_operation[name[x] - name[xb]] variable[dy0] assign[=] binary_operation[name[y] - name[yb]] variable[value] assign[=] constant[0] variable[dx1] assign[=] binary_operation[binary_operation[name[dx0] - constant[1]] - name[SQUISH_CONSTANT_2D]] variable[dy1] assign[=] binary_operation[binary_operation[name[dy0] - constant[0]] - name[SQUISH_CONSTANT_2D]] variable[attn1] assign[=] binary_operation[binary_operation[constant[2] - binary_operation[name[dx1] * name[dx1]]] - binary_operation[name[dy1] * name[dy1]]] variable[extrapolate] assign[=] name[self]._extrapolate2d if compare[name[attn1] greater[>] constant[0]] begin[:] <ast.AugAssign object at 0x7da1b26af640> <ast.AugAssign object at 0x7da1b26ac3a0> variable[dx2] assign[=] binary_operation[binary_operation[name[dx0] - constant[0]] - name[SQUISH_CONSTANT_2D]] variable[dy2] assign[=] binary_operation[binary_operation[name[dy0] - constant[1]] - name[SQUISH_CONSTANT_2D]] variable[attn2] assign[=] binary_operation[binary_operation[constant[2] - binary_operation[name[dx2] * name[dx2]]] - binary_operation[name[dy2] * name[dy2]]] if compare[name[attn2] greater[>] constant[0]] begin[:] <ast.AugAssign object at 0x7da18c4cc100> <ast.AugAssign object at 0x7da18c4ce9e0> if compare[name[in_sum] less_or_equal[<=] constant[1]] begin[:] variable[zins] assign[=] binary_operation[constant[1] - name[in_sum]] if <ast.BoolOp object at 0x7da18c4cc880> begin[:] if compare[name[xins] greater[>] name[yins]] begin[:] variable[xsv_ext] assign[=] binary_operation[name[xsb] + constant[1]] variable[ysv_ext] assign[=] binary_operation[name[ysb] - constant[1]] variable[dx_ext] assign[=] binary_operation[name[dx0] - constant[1]] variable[dy_ext] assign[=] binary_operation[name[dy0] + constant[1]] variable[attn0] assign[=] binary_operation[binary_operation[constant[2] - binary_operation[name[dx0] * name[dx0]]] - binary_operation[name[dy0] * name[dy0]]] if compare[name[attn0] greater[>] constant[0]] begin[:] <ast.AugAssign object at 0x7da20e9b39a0> <ast.AugAssign object at 0x7da20e9b08e0> variable[attn_ext] assign[=] binary_operation[binary_operation[constant[2] - binary_operation[name[dx_ext] * name[dx_ext]]] - binary_operation[name[dy_ext] * name[dy_ext]]] if compare[name[attn_ext] greater[>] constant[0]] begin[:] <ast.AugAssign object at 0x7da20e9b2950> <ast.AugAssign object at 0x7da20e9b37c0> return[binary_operation[name[value] / name[NORM_CONSTANT_2D]]]
keyword[def] identifier[noise2d] ( identifier[self] , identifier[x] , identifier[y] ): literal[string] identifier[stretch_offset] =( identifier[x] + identifier[y] )* identifier[STRETCH_CONSTANT_2D] identifier[xs] = identifier[x] + identifier[stretch_offset] identifier[ys] = identifier[y] + identifier[stretch_offset] identifier[xsb] = identifier[floor] ( identifier[xs] ) identifier[ysb] = identifier[floor] ( identifier[ys] ) identifier[squish_offset] =( identifier[xsb] + identifier[ysb] )* identifier[SQUISH_CONSTANT_2D] identifier[xb] = identifier[xsb] + identifier[squish_offset] identifier[yb] = identifier[ysb] + identifier[squish_offset] identifier[xins] = identifier[xs] - identifier[xsb] identifier[yins] = identifier[ys] - identifier[ysb] identifier[in_sum] = identifier[xins] + identifier[yins] identifier[dx0] = identifier[x] - identifier[xb] identifier[dy0] = identifier[y] - identifier[yb] identifier[value] = literal[int] identifier[dx1] = identifier[dx0] - literal[int] - identifier[SQUISH_CONSTANT_2D] identifier[dy1] = identifier[dy0] - literal[int] - identifier[SQUISH_CONSTANT_2D] identifier[attn1] = literal[int] - identifier[dx1] * identifier[dx1] - identifier[dy1] * identifier[dy1] identifier[extrapolate] = identifier[self] . identifier[_extrapolate2d] keyword[if] identifier[attn1] > literal[int] : identifier[attn1] *= identifier[attn1] identifier[value] += identifier[attn1] * identifier[attn1] * identifier[extrapolate] ( identifier[xsb] + literal[int] , identifier[ysb] + literal[int] , identifier[dx1] , identifier[dy1] ) identifier[dx2] = identifier[dx0] - literal[int] - identifier[SQUISH_CONSTANT_2D] identifier[dy2] = identifier[dy0] - literal[int] - identifier[SQUISH_CONSTANT_2D] identifier[attn2] = literal[int] - identifier[dx2] * identifier[dx2] - identifier[dy2] * identifier[dy2] keyword[if] identifier[attn2] > literal[int] : identifier[attn2] *= identifier[attn2] identifier[value] += identifier[attn2] * identifier[attn2] * identifier[extrapolate] ( identifier[xsb] + literal[int] , identifier[ysb] + literal[int] , identifier[dx2] , identifier[dy2] ) keyword[if] identifier[in_sum] <= literal[int] : identifier[zins] = literal[int] - identifier[in_sum] keyword[if] identifier[zins] > identifier[xins] keyword[or] identifier[zins] > identifier[yins] : keyword[if] identifier[xins] > identifier[yins] : identifier[xsv_ext] = identifier[xsb] + literal[int] identifier[ysv_ext] = identifier[ysb] - literal[int] identifier[dx_ext] = identifier[dx0] - literal[int] identifier[dy_ext] = identifier[dy0] + literal[int] keyword[else] : identifier[xsv_ext] = identifier[xsb] - literal[int] identifier[ysv_ext] = identifier[ysb] + literal[int] identifier[dx_ext] = identifier[dx0] + literal[int] identifier[dy_ext] = identifier[dy0] - literal[int] keyword[else] : identifier[xsv_ext] = identifier[xsb] + literal[int] identifier[ysv_ext] = identifier[ysb] + literal[int] identifier[dx_ext] = identifier[dx0] - literal[int] - literal[int] * identifier[SQUISH_CONSTANT_2D] identifier[dy_ext] = identifier[dy0] - literal[int] - literal[int] * identifier[SQUISH_CONSTANT_2D] keyword[else] : identifier[zins] = literal[int] - identifier[in_sum] keyword[if] identifier[zins] < identifier[xins] keyword[or] identifier[zins] < identifier[yins] : keyword[if] identifier[xins] > identifier[yins] : identifier[xsv_ext] = identifier[xsb] + literal[int] identifier[ysv_ext] = identifier[ysb] + literal[int] identifier[dx_ext] = identifier[dx0] - literal[int] - literal[int] * identifier[SQUISH_CONSTANT_2D] identifier[dy_ext] = identifier[dy0] + literal[int] - literal[int] * identifier[SQUISH_CONSTANT_2D] keyword[else] : identifier[xsv_ext] = identifier[xsb] + literal[int] identifier[ysv_ext] = identifier[ysb] + literal[int] identifier[dx_ext] = identifier[dx0] + literal[int] - literal[int] * identifier[SQUISH_CONSTANT_2D] identifier[dy_ext] = identifier[dy0] - literal[int] - literal[int] * identifier[SQUISH_CONSTANT_2D] keyword[else] : identifier[dx_ext] = identifier[dx0] identifier[dy_ext] = identifier[dy0] identifier[xsv_ext] = identifier[xsb] identifier[ysv_ext] = identifier[ysb] identifier[xsb] += literal[int] identifier[ysb] += literal[int] identifier[dx0] = identifier[dx0] - literal[int] - literal[int] * identifier[SQUISH_CONSTANT_2D] identifier[dy0] = identifier[dy0] - literal[int] - literal[int] * identifier[SQUISH_CONSTANT_2D] identifier[attn0] = literal[int] - identifier[dx0] * identifier[dx0] - identifier[dy0] * identifier[dy0] keyword[if] identifier[attn0] > literal[int] : identifier[attn0] *= identifier[attn0] identifier[value] += identifier[attn0] * identifier[attn0] * identifier[extrapolate] ( identifier[xsb] , identifier[ysb] , identifier[dx0] , identifier[dy0] ) identifier[attn_ext] = literal[int] - identifier[dx_ext] * identifier[dx_ext] - identifier[dy_ext] * identifier[dy_ext] keyword[if] identifier[attn_ext] > literal[int] : identifier[attn_ext] *= identifier[attn_ext] identifier[value] += identifier[attn_ext] * identifier[attn_ext] * identifier[extrapolate] ( identifier[xsv_ext] , identifier[ysv_ext] , identifier[dx_ext] , identifier[dy_ext] ) keyword[return] identifier[value] / identifier[NORM_CONSTANT_2D]
def noise2d(self, x, y): """ Generate 2D OpenSimplex noise from X,Y coordinates. """ # Place input coordinates onto grid. stretch_offset = (x + y) * STRETCH_CONSTANT_2D xs = x + stretch_offset ys = y + stretch_offset # Floor to get grid coordinates of rhombus (stretched square) super-cell origin. xsb = floor(xs) ysb = floor(ys) # Skew out to get actual coordinates of rhombus origin. We'll need these later. squish_offset = (xsb + ysb) * SQUISH_CONSTANT_2D xb = xsb + squish_offset yb = ysb + squish_offset # Compute grid coordinates relative to rhombus origin. xins = xs - xsb yins = ys - ysb # Sum those together to get a value that determines which region we're in. in_sum = xins + yins # Positions relative to origin point. dx0 = x - xb dy0 = y - yb value = 0 # Contribution (1,0) dx1 = dx0 - 1 - SQUISH_CONSTANT_2D dy1 = dy0 - 0 - SQUISH_CONSTANT_2D attn1 = 2 - dx1 * dx1 - dy1 * dy1 extrapolate = self._extrapolate2d if attn1 > 0: attn1 *= attn1 value += attn1 * attn1 * extrapolate(xsb + 1, ysb + 0, dx1, dy1) # depends on [control=['if'], data=['attn1']] # Contribution (0,1) dx2 = dx0 - 0 - SQUISH_CONSTANT_2D dy2 = dy0 - 1 - SQUISH_CONSTANT_2D attn2 = 2 - dx2 * dx2 - dy2 * dy2 if attn2 > 0: attn2 *= attn2 value += attn2 * attn2 * extrapolate(xsb + 0, ysb + 1, dx2, dy2) # depends on [control=['if'], data=['attn2']] if in_sum <= 1: # We're inside the triangle (2-Simplex) at (0,0) zins = 1 - in_sum if zins > xins or zins > yins: # (0,0) is one of the closest two triangular vertices if xins > yins: xsv_ext = xsb + 1 ysv_ext = ysb - 1 dx_ext = dx0 - 1 dy_ext = dy0 + 1 # depends on [control=['if'], data=[]] else: xsv_ext = xsb - 1 ysv_ext = ysb + 1 dx_ext = dx0 + 1 dy_ext = dy0 - 1 # depends on [control=['if'], data=[]] else: # (1,0) and (0,1) are the closest two vertices. xsv_ext = xsb + 1 ysv_ext = ysb + 1 dx_ext = dx0 - 1 - 2 * SQUISH_CONSTANT_2D dy_ext = dy0 - 1 - 2 * SQUISH_CONSTANT_2D # depends on [control=['if'], data=['in_sum']] else: # We're inside the triangle (2-Simplex) at (1,1) zins = 2 - in_sum if zins < xins or zins < yins: # (0,0) is one of the closest two triangular vertices if xins > yins: xsv_ext = xsb + 2 ysv_ext = ysb + 0 dx_ext = dx0 - 2 - 2 * SQUISH_CONSTANT_2D dy_ext = dy0 + 0 - 2 * SQUISH_CONSTANT_2D # depends on [control=['if'], data=[]] else: xsv_ext = xsb + 0 ysv_ext = ysb + 2 dx_ext = dx0 + 0 - 2 * SQUISH_CONSTANT_2D dy_ext = dy0 - 2 - 2 * SQUISH_CONSTANT_2D # depends on [control=['if'], data=[]] else: # (1,0) and (0,1) are the closest two vertices. dx_ext = dx0 dy_ext = dy0 xsv_ext = xsb ysv_ext = ysb xsb += 1 ysb += 1 dx0 = dx0 - 1 - 2 * SQUISH_CONSTANT_2D dy0 = dy0 - 1 - 2 * SQUISH_CONSTANT_2D # Contribution (0,0) or (1,1) attn0 = 2 - dx0 * dx0 - dy0 * dy0 if attn0 > 0: attn0 *= attn0 value += attn0 * attn0 * extrapolate(xsb, ysb, dx0, dy0) # depends on [control=['if'], data=['attn0']] # Extra Vertex attn_ext = 2 - dx_ext * dx_ext - dy_ext * dy_ext if attn_ext > 0: attn_ext *= attn_ext value += attn_ext * attn_ext * extrapolate(xsv_ext, ysv_ext, dx_ext, dy_ext) # depends on [control=['if'], data=['attn_ext']] return value / NORM_CONSTANT_2D
def flatten_fft(scale=1.0): """ Produces a nicer graph, I'm not sure if this is correct """ _len = len(audio.spectrogram) for i, v in enumerate(audio.spectrogram): yield scale * (i * v) / _len
def function[flatten_fft, parameter[scale]]: constant[ Produces a nicer graph, I'm not sure if this is correct ] variable[_len] assign[=] call[name[len], parameter[name[audio].spectrogram]] for taget[tuple[[<ast.Name object at 0x7da18dc049d0>, <ast.Name object at 0x7da18dc07ca0>]]] in starred[call[name[enumerate], parameter[name[audio].spectrogram]]] begin[:] <ast.Yield object at 0x7da18dc05a50>
keyword[def] identifier[flatten_fft] ( identifier[scale] = literal[int] ): literal[string] identifier[_len] = identifier[len] ( identifier[audio] . identifier[spectrogram] ) keyword[for] identifier[i] , identifier[v] keyword[in] identifier[enumerate] ( identifier[audio] . identifier[spectrogram] ): keyword[yield] identifier[scale] *( identifier[i] * identifier[v] )/ identifier[_len]
def flatten_fft(scale=1.0): """ Produces a nicer graph, I'm not sure if this is correct """ _len = len(audio.spectrogram) for (i, v) in enumerate(audio.spectrogram): yield (scale * (i * v) / _len) # depends on [control=['for'], data=[]]
def proxy_for(self, obj): """Returns the ``Proxy`` for the target object, creating it if necessary. :param object obj: The object that will be doubled. :return: The mapped ``Proxy``. :rtype: Proxy """ obj_id = id(obj) if obj_id not in self._proxies: self._proxies[obj_id] = Proxy(obj) return self._proxies[obj_id]
def function[proxy_for, parameter[self, obj]]: constant[Returns the ``Proxy`` for the target object, creating it if necessary. :param object obj: The object that will be doubled. :return: The mapped ``Proxy``. :rtype: Proxy ] variable[obj_id] assign[=] call[name[id], parameter[name[obj]]] if compare[name[obj_id] <ast.NotIn object at 0x7da2590d7190> name[self]._proxies] begin[:] call[name[self]._proxies][name[obj_id]] assign[=] call[name[Proxy], parameter[name[obj]]] return[call[name[self]._proxies][name[obj_id]]]
keyword[def] identifier[proxy_for] ( identifier[self] , identifier[obj] ): literal[string] identifier[obj_id] = identifier[id] ( identifier[obj] ) keyword[if] identifier[obj_id] keyword[not] keyword[in] identifier[self] . identifier[_proxies] : identifier[self] . identifier[_proxies] [ identifier[obj_id] ]= identifier[Proxy] ( identifier[obj] ) keyword[return] identifier[self] . identifier[_proxies] [ identifier[obj_id] ]
def proxy_for(self, obj): """Returns the ``Proxy`` for the target object, creating it if necessary. :param object obj: The object that will be doubled. :return: The mapped ``Proxy``. :rtype: Proxy """ obj_id = id(obj) if obj_id not in self._proxies: self._proxies[obj_id] = Proxy(obj) # depends on [control=['if'], data=['obj_id']] return self._proxies[obj_id]
def get_roles(server_context, container_path=None): """ Gets the set of permissions and roles available from the server :param server_context: A LabKey server context. See utils.create_server_context. :param container_path: :return: """ url = server_context.build_url(security_controller, 'getRoles.api', container_path=container_path) return server_context.make_request(url, None)
def function[get_roles, parameter[server_context, container_path]]: constant[ Gets the set of permissions and roles available from the server :param server_context: A LabKey server context. See utils.create_server_context. :param container_path: :return: ] variable[url] assign[=] call[name[server_context].build_url, parameter[name[security_controller], constant[getRoles.api]]] return[call[name[server_context].make_request, parameter[name[url], constant[None]]]]
keyword[def] identifier[get_roles] ( identifier[server_context] , identifier[container_path] = keyword[None] ): literal[string] identifier[url] = identifier[server_context] . identifier[build_url] ( identifier[security_controller] , literal[string] , identifier[container_path] = identifier[container_path] ) keyword[return] identifier[server_context] . identifier[make_request] ( identifier[url] , keyword[None] )
def get_roles(server_context, container_path=None): """ Gets the set of permissions and roles available from the server :param server_context: A LabKey server context. See utils.create_server_context. :param container_path: :return: """ url = server_context.build_url(security_controller, 'getRoles.api', container_path=container_path) return server_context.make_request(url, None)
def stopAll(self, timeout=10, stop=False): """ Stop all registered Workers. This is method assumes that the Worker has already received a stop message somehow, and simply joins the Process until it dies, as follows: 1. The Worker is retrieved. 2. The Worker is joined, and will wait until the Worker exits. 3. The Worker is unregistered. 4. If $stop = True, the main process is killed. """ self.logger.info("Stopping all workers...") for worker in self.getWorkers(): process = self.getWorker(worker) self.logger.debug("Stopping {0}".format(process.name)) if process.is_alive(): process.join(timeout) if process.is_alive(): self.logger.warning("Failed to stop {0}, terminating".format(process.name)) process.terminate() self.unregisterWorker(worker) self.logger.info("Stopped all workers") if stop: self.logger.fatal("Comitting suicide") os._exit(0)
def function[stopAll, parameter[self, timeout, stop]]: constant[ Stop all registered Workers. This is method assumes that the Worker has already received a stop message somehow, and simply joins the Process until it dies, as follows: 1. The Worker is retrieved. 2. The Worker is joined, and will wait until the Worker exits. 3. The Worker is unregistered. 4. If $stop = True, the main process is killed. ] call[name[self].logger.info, parameter[constant[Stopping all workers...]]] for taget[name[worker]] in starred[call[name[self].getWorkers, parameter[]]] begin[:] variable[process] assign[=] call[name[self].getWorker, parameter[name[worker]]] call[name[self].logger.debug, parameter[call[constant[Stopping {0}].format, parameter[name[process].name]]]] if call[name[process].is_alive, parameter[]] begin[:] call[name[process].join, parameter[name[timeout]]] if call[name[process].is_alive, parameter[]] begin[:] call[name[self].logger.warning, parameter[call[constant[Failed to stop {0}, terminating].format, parameter[name[process].name]]]] call[name[process].terminate, parameter[]] call[name[self].unregisterWorker, parameter[name[worker]]] call[name[self].logger.info, parameter[constant[Stopped all workers]]] if name[stop] begin[:] call[name[self].logger.fatal, parameter[constant[Comitting suicide]]] call[name[os]._exit, parameter[constant[0]]]
keyword[def] identifier[stopAll] ( identifier[self] , identifier[timeout] = literal[int] , identifier[stop] = keyword[False] ): literal[string] identifier[self] . identifier[logger] . identifier[info] ( literal[string] ) keyword[for] identifier[worker] keyword[in] identifier[self] . identifier[getWorkers] (): identifier[process] = identifier[self] . identifier[getWorker] ( identifier[worker] ) identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[process] . identifier[name] )) keyword[if] identifier[process] . identifier[is_alive] (): identifier[process] . identifier[join] ( identifier[timeout] ) keyword[if] identifier[process] . identifier[is_alive] (): identifier[self] . identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[process] . identifier[name] )) identifier[process] . identifier[terminate] () identifier[self] . identifier[unregisterWorker] ( identifier[worker] ) identifier[self] . identifier[logger] . identifier[info] ( literal[string] ) keyword[if] identifier[stop] : identifier[self] . identifier[logger] . identifier[fatal] ( literal[string] ) identifier[os] . identifier[_exit] ( literal[int] )
def stopAll(self, timeout=10, stop=False): """ Stop all registered Workers. This is method assumes that the Worker has already received a stop message somehow, and simply joins the Process until it dies, as follows: 1. The Worker is retrieved. 2. The Worker is joined, and will wait until the Worker exits. 3. The Worker is unregistered. 4. If $stop = True, the main process is killed. """ self.logger.info('Stopping all workers...') for worker in self.getWorkers(): process = self.getWorker(worker) self.logger.debug('Stopping {0}'.format(process.name)) if process.is_alive(): process.join(timeout) if process.is_alive(): self.logger.warning('Failed to stop {0}, terminating'.format(process.name)) process.terminate() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] self.unregisterWorker(worker) # depends on [control=['for'], data=['worker']] self.logger.info('Stopped all workers') if stop: self.logger.fatal('Comitting suicide') os._exit(0) # depends on [control=['if'], data=[]]
def cancel_queue(self): """ Cancel all requests in the queue so we can exit. """ q = list(self.queue) self.queue = [] log.debug("Canceling requests: {}".format(q)) for req in q: req.response = APIServerNotRunningErrorResponse() for req in q: req.signal()
def function[cancel_queue, parameter[self]]: constant[ Cancel all requests in the queue so we can exit. ] variable[q] assign[=] call[name[list], parameter[name[self].queue]] name[self].queue assign[=] list[[]] call[name[log].debug, parameter[call[constant[Canceling requests: {}].format, parameter[name[q]]]]] for taget[name[req]] in starred[name[q]] begin[:] name[req].response assign[=] call[name[APIServerNotRunningErrorResponse], parameter[]] for taget[name[req]] in starred[name[q]] begin[:] call[name[req].signal, parameter[]]
keyword[def] identifier[cancel_queue] ( identifier[self] ): literal[string] identifier[q] = identifier[list] ( identifier[self] . identifier[queue] ) identifier[self] . identifier[queue] =[] identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[q] )) keyword[for] identifier[req] keyword[in] identifier[q] : identifier[req] . identifier[response] = identifier[APIServerNotRunningErrorResponse] () keyword[for] identifier[req] keyword[in] identifier[q] : identifier[req] . identifier[signal] ()
def cancel_queue(self): """ Cancel all requests in the queue so we can exit. """ q = list(self.queue) self.queue = [] log.debug('Canceling requests: {}'.format(q)) for req in q: req.response = APIServerNotRunningErrorResponse() # depends on [control=['for'], data=['req']] for req in q: req.signal() # depends on [control=['for'], data=['req']]
def connection_from_url(self, url): """ Similar to :func:`urllib3.connectionpool.connection_from_url` but doesn't pass any additional parameters to the :class:`urllib3.connectionpool.ConnectionPool` constructor. Additional parameters are taken from the :class:`.PoolManager` constructor. """ scheme, host, port = get_host(url) port = port or port_by_scheme.get(scheme, 80) return self.connection_from_host(host, port=port, scheme=scheme)
def function[connection_from_url, parameter[self, url]]: constant[ Similar to :func:`urllib3.connectionpool.connection_from_url` but doesn't pass any additional parameters to the :class:`urllib3.connectionpool.ConnectionPool` constructor. Additional parameters are taken from the :class:`.PoolManager` constructor. ] <ast.Tuple object at 0x7da1b25d0b80> assign[=] call[name[get_host], parameter[name[url]]] variable[port] assign[=] <ast.BoolOp object at 0x7da1b25d2ce0> return[call[name[self].connection_from_host, parameter[name[host]]]]
keyword[def] identifier[connection_from_url] ( identifier[self] , identifier[url] ): literal[string] identifier[scheme] , identifier[host] , identifier[port] = identifier[get_host] ( identifier[url] ) identifier[port] = identifier[port] keyword[or] identifier[port_by_scheme] . identifier[get] ( identifier[scheme] , literal[int] ) keyword[return] identifier[self] . identifier[connection_from_host] ( identifier[host] , identifier[port] = identifier[port] , identifier[scheme] = identifier[scheme] )
def connection_from_url(self, url): """ Similar to :func:`urllib3.connectionpool.connection_from_url` but doesn't pass any additional parameters to the :class:`urllib3.connectionpool.ConnectionPool` constructor. Additional parameters are taken from the :class:`.PoolManager` constructor. """ (scheme, host, port) = get_host(url) port = port or port_by_scheme.get(scheme, 80) return self.connection_from_host(host, port=port, scheme=scheme)
def _create_examples(self, lines, set_type): """Creates examples for the training and dev sets.""" examples = [] for (i, line) in enumerate(lines): if i == 0: continue guid = "%s-%s" % (set_type, i) text_a = line[3] text_b = line[4] label = line[0] examples.append( InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) return examples
def function[_create_examples, parameter[self, lines, set_type]]: constant[Creates examples for the training and dev sets.] variable[examples] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18fe934f0>, <ast.Name object at 0x7da18fe92c50>]]] in starred[call[name[enumerate], parameter[name[lines]]]] begin[:] if compare[name[i] equal[==] constant[0]] begin[:] continue variable[guid] assign[=] binary_operation[constant[%s-%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18fe93f10>, <ast.Name object at 0x7da18fe92350>]]] variable[text_a] assign[=] call[name[line]][constant[3]] variable[text_b] assign[=] call[name[line]][constant[4]] variable[label] assign[=] call[name[line]][constant[0]] call[name[examples].append, parameter[call[name[InputExample], parameter[]]]] return[name[examples]]
keyword[def] identifier[_create_examples] ( identifier[self] , identifier[lines] , identifier[set_type] ): literal[string] identifier[examples] =[] keyword[for] ( identifier[i] , identifier[line] ) keyword[in] identifier[enumerate] ( identifier[lines] ): keyword[if] identifier[i] == literal[int] : keyword[continue] identifier[guid] = literal[string] %( identifier[set_type] , identifier[i] ) identifier[text_a] = identifier[line] [ literal[int] ] identifier[text_b] = identifier[line] [ literal[int] ] identifier[label] = identifier[line] [ literal[int] ] identifier[examples] . identifier[append] ( identifier[InputExample] ( identifier[guid] = identifier[guid] , identifier[text_a] = identifier[text_a] , identifier[text_b] = identifier[text_b] , identifier[label] = identifier[label] )) keyword[return] identifier[examples]
def _create_examples(self, lines, set_type): """Creates examples for the training and dev sets.""" examples = [] for (i, line) in enumerate(lines): if i == 0: continue # depends on [control=['if'], data=[]] guid = '%s-%s' % (set_type, i) text_a = line[3] text_b = line[4] label = line[0] examples.append(InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) # depends on [control=['for'], data=[]] return examples
def save_as_png(self, filename, width=300, height=250, render_time=1): """Open saved html file in an virtual browser and save a screen shot to PNG format.""" self.driver.set_window_size(width, height) self.driver.get('file://{path}/{filename}'.format( path=os.getcwd(), filename=filename + ".html")) time.sleep(render_time) self.driver.save_screenshot(filename + ".png")
def function[save_as_png, parameter[self, filename, width, height, render_time]]: constant[Open saved html file in an virtual browser and save a screen shot to PNG format.] call[name[self].driver.set_window_size, parameter[name[width], name[height]]] call[name[self].driver.get, parameter[call[constant[file://{path}/{filename}].format, parameter[]]]] call[name[time].sleep, parameter[name[render_time]]] call[name[self].driver.save_screenshot, parameter[binary_operation[name[filename] + constant[.png]]]]
keyword[def] identifier[save_as_png] ( identifier[self] , identifier[filename] , identifier[width] = literal[int] , identifier[height] = literal[int] , identifier[render_time] = literal[int] ): literal[string] identifier[self] . identifier[driver] . identifier[set_window_size] ( identifier[width] , identifier[height] ) identifier[self] . identifier[driver] . identifier[get] ( literal[string] . identifier[format] ( identifier[path] = identifier[os] . identifier[getcwd] (), identifier[filename] = identifier[filename] + literal[string] )) identifier[time] . identifier[sleep] ( identifier[render_time] ) identifier[self] . identifier[driver] . identifier[save_screenshot] ( identifier[filename] + literal[string] )
def save_as_png(self, filename, width=300, height=250, render_time=1): """Open saved html file in an virtual browser and save a screen shot to PNG format.""" self.driver.set_window_size(width, height) self.driver.get('file://{path}/{filename}'.format(path=os.getcwd(), filename=filename + '.html')) time.sleep(render_time) self.driver.save_screenshot(filename + '.png')
def invalidate(self, key): """Remove the given data item along with all items that depend on it in the graph.""" if key not in self.data: return del self.data[key] # Find all components that used it and invalidate their results for cname in self.components: if key in self.depends[cname]: for downstream_key in self.provides[cname]: self.invalidate(downstream_key)
def function[invalidate, parameter[self, key]]: constant[Remove the given data item along with all items that depend on it in the graph.] if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[self].data] begin[:] return[None] <ast.Delete object at 0x7da2054a7400> for taget[name[cname]] in starred[name[self].components] begin[:] if compare[name[key] in call[name[self].depends][name[cname]]] begin[:] for taget[name[downstream_key]] in starred[call[name[self].provides][name[cname]]] begin[:] call[name[self].invalidate, parameter[name[downstream_key]]]
keyword[def] identifier[invalidate] ( identifier[self] , identifier[key] ): literal[string] keyword[if] identifier[key] keyword[not] keyword[in] identifier[self] . identifier[data] : keyword[return] keyword[del] identifier[self] . identifier[data] [ identifier[key] ] keyword[for] identifier[cname] keyword[in] identifier[self] . identifier[components] : keyword[if] identifier[key] keyword[in] identifier[self] . identifier[depends] [ identifier[cname] ]: keyword[for] identifier[downstream_key] keyword[in] identifier[self] . identifier[provides] [ identifier[cname] ]: identifier[self] . identifier[invalidate] ( identifier[downstream_key] )
def invalidate(self, key): """Remove the given data item along with all items that depend on it in the graph.""" if key not in self.data: return # depends on [control=['if'], data=[]] del self.data[key] # Find all components that used it and invalidate their results for cname in self.components: if key in self.depends[cname]: for downstream_key in self.provides[cname]: self.invalidate(downstream_key) # depends on [control=['for'], data=['downstream_key']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cname']]
def parse_range_pairs(s, range_separator = '-', convert_to_tuple = True): ''' Based on parse_range but instead returns a list of lists with the ranges. A single index n is returned as a range (n, n) whereas a range m-n is returned as (m, n) if m <= n, else (n, m). ''' result = map(sorted, map(lambda r: (int(r.split(range_separator)[0]), int(r.split(range_separator)[1])) if range_separator in r else (int(r), int(r)), s.split(','))) if convert_to_tuple: return tuple(map(tuple, result)) return result
def function[parse_range_pairs, parameter[s, range_separator, convert_to_tuple]]: constant[ Based on parse_range but instead returns a list of lists with the ranges. A single index n is returned as a range (n, n) whereas a range m-n is returned as (m, n) if m <= n, else (n, m). ] variable[result] assign[=] call[name[map], parameter[name[sorted], call[name[map], parameter[<ast.Lambda object at 0x7da20c794100>, call[name[s].split, parameter[constant[,]]]]]]] if name[convert_to_tuple] begin[:] return[call[name[tuple], parameter[call[name[map], parameter[name[tuple], name[result]]]]]] return[name[result]]
keyword[def] identifier[parse_range_pairs] ( identifier[s] , identifier[range_separator] = literal[string] , identifier[convert_to_tuple] = keyword[True] ): literal[string] identifier[result] = identifier[map] ( identifier[sorted] , identifier[map] ( keyword[lambda] identifier[r] : ( identifier[int] ( identifier[r] . identifier[split] ( identifier[range_separator] )[ literal[int] ]), identifier[int] ( identifier[r] . identifier[split] ( identifier[range_separator] )[ literal[int] ])) keyword[if] identifier[range_separator] keyword[in] identifier[r] keyword[else] ( identifier[int] ( identifier[r] ), identifier[int] ( identifier[r] )), identifier[s] . identifier[split] ( literal[string] ))) keyword[if] identifier[convert_to_tuple] : keyword[return] identifier[tuple] ( identifier[map] ( identifier[tuple] , identifier[result] )) keyword[return] identifier[result]
def parse_range_pairs(s, range_separator='-', convert_to_tuple=True): """ Based on parse_range but instead returns a list of lists with the ranges. A single index n is returned as a range (n, n) whereas a range m-n is returned as (m, n) if m <= n, else (n, m). """ result = map(sorted, map(lambda r: (int(r.split(range_separator)[0]), int(r.split(range_separator)[1])) if range_separator in r else (int(r), int(r)), s.split(','))) if convert_to_tuple: return tuple(map(tuple, result)) # depends on [control=['if'], data=[]] return result
def from_timestamp(ts): """ Convert a numeric timestamp to a timezone-aware datetime. A client may override this function to change the default behavior, such as to use local time or timezone-naïve times. """ return datetime.datetime.utcfromtimestamp(ts).replace(tzinfo=pytz.utc)
def function[from_timestamp, parameter[ts]]: constant[ Convert a numeric timestamp to a timezone-aware datetime. A client may override this function to change the default behavior, such as to use local time or timezone-naïve times. ] return[call[call[name[datetime].datetime.utcfromtimestamp, parameter[name[ts]]].replace, parameter[]]]
keyword[def] identifier[from_timestamp] ( identifier[ts] ): literal[string] keyword[return] identifier[datetime] . identifier[datetime] . identifier[utcfromtimestamp] ( identifier[ts] ). identifier[replace] ( identifier[tzinfo] = identifier[pytz] . identifier[utc] )
def from_timestamp(ts): """ Convert a numeric timestamp to a timezone-aware datetime. A client may override this function to change the default behavior, such as to use local time or timezone-naïve times. """ return datetime.datetime.utcfromtimestamp(ts).replace(tzinfo=pytz.utc)
def download_parallel_gui(root, urls, directory, min_file_size, max_file_size, no_redirects): """ called when paralled downloading is true """ global parallel # create directory to save files if not os.path.exists(directory): os.makedirs(directory) parallel = True app = progress_class(root, urls, directory, min_file_size, max_file_size, no_redirects)
def function[download_parallel_gui, parameter[root, urls, directory, min_file_size, max_file_size, no_redirects]]: constant[ called when paralled downloading is true ] <ast.Global object at 0x7da1b0fd6050> if <ast.UnaryOp object at 0x7da1b0fd6b90> begin[:] call[name[os].makedirs, parameter[name[directory]]] variable[parallel] assign[=] constant[True] variable[app] assign[=] call[name[progress_class], parameter[name[root], name[urls], name[directory], name[min_file_size], name[max_file_size], name[no_redirects]]]
keyword[def] identifier[download_parallel_gui] ( identifier[root] , identifier[urls] , identifier[directory] , identifier[min_file_size] , identifier[max_file_size] , identifier[no_redirects] ): literal[string] keyword[global] identifier[parallel] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[directory] ): identifier[os] . identifier[makedirs] ( identifier[directory] ) identifier[parallel] = keyword[True] identifier[app] = identifier[progress_class] ( identifier[root] , identifier[urls] , identifier[directory] , identifier[min_file_size] , identifier[max_file_size] , identifier[no_redirects] )
def download_parallel_gui(root, urls, directory, min_file_size, max_file_size, no_redirects): """ called when paralled downloading is true """ global parallel # create directory to save files if not os.path.exists(directory): os.makedirs(directory) # depends on [control=['if'], data=[]] parallel = True app = progress_class(root, urls, directory, min_file_size, max_file_size, no_redirects)
def _set_member_entry(self, v, load=False): """ Setter method for member_entry, mapped from YANG variable /rbridge_id/secpolicy/defined_policy/policies/member_entry (list) If this variable is read-only (config: false) in the source YANG file, then _set_member_entry is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_member_entry() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("member",member_entry.member_entry, yang_name="member-entry", rest_name="member-entry", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='member', extensions={u'tailf-common': {u'info': u'List of defined members', u'cli-suppress-list-no': None, u'callpoint': u'secpolicy_defined_policy_member', u'cli-suppress-key-abbreviation': None, u'cli-suppress-mode': None}}), is_container='list', yang_name="member-entry", rest_name="member-entry", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'List of defined members', u'cli-suppress-list-no': None, u'callpoint': u'secpolicy_defined_policy_member', u'cli-suppress-key-abbreviation': None, u'cli-suppress-mode': None}}, namespace='urn:brocade.com:mgmt:brocade-fc-auth', defining_module='brocade-fc-auth', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """member_entry must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("member",member_entry.member_entry, yang_name="member-entry", rest_name="member-entry", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='member', extensions={u'tailf-common': {u'info': u'List of defined members', u'cli-suppress-list-no': None, u'callpoint': u'secpolicy_defined_policy_member', u'cli-suppress-key-abbreviation': None, u'cli-suppress-mode': None}}), is_container='list', yang_name="member-entry", rest_name="member-entry", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'List of defined members', u'cli-suppress-list-no': None, u'callpoint': u'secpolicy_defined_policy_member', u'cli-suppress-key-abbreviation': None, u'cli-suppress-mode': None}}, namespace='urn:brocade.com:mgmt:brocade-fc-auth', defining_module='brocade-fc-auth', yang_type='list', is_config=True)""", }) self.__member_entry = t if hasattr(self, '_set'): self._set()
def function[_set_member_entry, parameter[self, v, load]]: constant[ Setter method for member_entry, mapped from YANG variable /rbridge_id/secpolicy/defined_policy/policies/member_entry (list) If this variable is read-only (config: false) in the source YANG file, then _set_member_entry is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_member_entry() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da20c794cd0> name[self].__member_entry assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_member_entry] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[member_entry] . identifier[member_entry] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__member_entry] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_member_entry(self, v, load=False): """ Setter method for member_entry, mapped from YANG variable /rbridge_id/secpolicy/defined_policy/policies/member_entry (list) If this variable is read-only (config: false) in the source YANG file, then _set_member_entry is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_member_entry() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=YANGListType('member', member_entry.member_entry, yang_name='member-entry', rest_name='member-entry', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='member', extensions={u'tailf-common': {u'info': u'List of defined members', u'cli-suppress-list-no': None, u'callpoint': u'secpolicy_defined_policy_member', u'cli-suppress-key-abbreviation': None, u'cli-suppress-mode': None}}), is_container='list', yang_name='member-entry', rest_name='member-entry', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'List of defined members', u'cli-suppress-list-no': None, u'callpoint': u'secpolicy_defined_policy_member', u'cli-suppress-key-abbreviation': None, u'cli-suppress-mode': None}}, namespace='urn:brocade.com:mgmt:brocade-fc-auth', defining_module='brocade-fc-auth', yang_type='list', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'member_entry must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("member",member_entry.member_entry, yang_name="member-entry", rest_name="member-entry", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'member\', extensions={u\'tailf-common\': {u\'info\': u\'List of defined members\', u\'cli-suppress-list-no\': None, u\'callpoint\': u\'secpolicy_defined_policy_member\', u\'cli-suppress-key-abbreviation\': None, u\'cli-suppress-mode\': None}}), is_container=\'list\', yang_name="member-entry", rest_name="member-entry", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'List of defined members\', u\'cli-suppress-list-no\': None, u\'callpoint\': u\'secpolicy_defined_policy_member\', u\'cli-suppress-key-abbreviation\': None, u\'cli-suppress-mode\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-fc-auth\', defining_module=\'brocade-fc-auth\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__member_entry = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
async def send_file( filename: FilePath, mimetype: Optional[str]=None, as_attachment: bool=False, attachment_filename: Optional[str]=None, add_etags: bool=True, cache_timeout: Optional[int]=None, conditional: bool=False, last_modified: Optional[datetime]=None, ) -> Response: """Return a Reponse to send the filename given. Arguments: filename: The filename (path) to send, remember to use :func:`safe_join`. mimetype: Mimetype to use, by default it will be guessed or revert to the DEFAULT_MIMETYPE. as_attachment: If true use the attachment filename in a Content-Disposition attachment header. attachment_filename: Name for the filename, if it differs add_etags: Set etags based on the filename, size and modification time. last_modified: Used to override the last modified value. cache_timeout: Time in seconds for the response to be cached. """ file_path = file_path_to_path(filename) if attachment_filename is None: attachment_filename = file_path.name if mimetype is None: mimetype = mimetypes.guess_type(attachment_filename)[0] or DEFAULT_MIMETYPE file_body = current_app.response_class.file_body_class(file_path) response = current_app.response_class(file_body, mimetype=mimetype) if as_attachment: response.headers.add('Content-Disposition', 'attachment', filename=attachment_filename) if last_modified is not None: response.last_modified = last_modified else: response.last_modified = datetime.fromtimestamp(file_path.stat().st_mtime) response.cache_control.public = True cache_timeout = cache_timeout or current_app.get_send_file_max_age(file_path) if cache_timeout is not None: response.cache_control.max_age = cache_timeout response.expires = datetime.utcnow() + timedelta(seconds=cache_timeout) if add_etags: response.set_etag( '{}-{}-{}'.format( file_path.stat().st_mtime, file_path.stat().st_size, adler32(bytes(file_path)), ), ) if conditional: await response.make_conditional(request.range) return response
<ast.AsyncFunctionDef object at 0x7da204963bb0>
keyword[async] keyword[def] identifier[send_file] ( identifier[filename] : identifier[FilePath] , identifier[mimetype] : identifier[Optional] [ identifier[str] ]= keyword[None] , identifier[as_attachment] : identifier[bool] = keyword[False] , identifier[attachment_filename] : identifier[Optional] [ identifier[str] ]= keyword[None] , identifier[add_etags] : identifier[bool] = keyword[True] , identifier[cache_timeout] : identifier[Optional] [ identifier[int] ]= keyword[None] , identifier[conditional] : identifier[bool] = keyword[False] , identifier[last_modified] : identifier[Optional] [ identifier[datetime] ]= keyword[None] , )-> identifier[Response] : literal[string] identifier[file_path] = identifier[file_path_to_path] ( identifier[filename] ) keyword[if] identifier[attachment_filename] keyword[is] keyword[None] : identifier[attachment_filename] = identifier[file_path] . identifier[name] keyword[if] identifier[mimetype] keyword[is] keyword[None] : identifier[mimetype] = identifier[mimetypes] . identifier[guess_type] ( identifier[attachment_filename] )[ literal[int] ] keyword[or] identifier[DEFAULT_MIMETYPE] identifier[file_body] = identifier[current_app] . identifier[response_class] . identifier[file_body_class] ( identifier[file_path] ) identifier[response] = identifier[current_app] . identifier[response_class] ( identifier[file_body] , identifier[mimetype] = identifier[mimetype] ) keyword[if] identifier[as_attachment] : identifier[response] . identifier[headers] . identifier[add] ( literal[string] , literal[string] , identifier[filename] = identifier[attachment_filename] ) keyword[if] identifier[last_modified] keyword[is] keyword[not] keyword[None] : identifier[response] . identifier[last_modified] = identifier[last_modified] keyword[else] : identifier[response] . identifier[last_modified] = identifier[datetime] . identifier[fromtimestamp] ( identifier[file_path] . identifier[stat] (). identifier[st_mtime] ) identifier[response] . identifier[cache_control] . identifier[public] = keyword[True] identifier[cache_timeout] = identifier[cache_timeout] keyword[or] identifier[current_app] . identifier[get_send_file_max_age] ( identifier[file_path] ) keyword[if] identifier[cache_timeout] keyword[is] keyword[not] keyword[None] : identifier[response] . identifier[cache_control] . identifier[max_age] = identifier[cache_timeout] identifier[response] . identifier[expires] = identifier[datetime] . identifier[utcnow] ()+ identifier[timedelta] ( identifier[seconds] = identifier[cache_timeout] ) keyword[if] identifier[add_etags] : identifier[response] . identifier[set_etag] ( literal[string] . identifier[format] ( identifier[file_path] . identifier[stat] (). identifier[st_mtime] , identifier[file_path] . identifier[stat] (). identifier[st_size] , identifier[adler32] ( identifier[bytes] ( identifier[file_path] )), ), ) keyword[if] identifier[conditional] : keyword[await] identifier[response] . identifier[make_conditional] ( identifier[request] . identifier[range] ) keyword[return] identifier[response]
async def send_file(filename: FilePath, mimetype: Optional[str]=None, as_attachment: bool=False, attachment_filename: Optional[str]=None, add_etags: bool=True, cache_timeout: Optional[int]=None, conditional: bool=False, last_modified: Optional[datetime]=None) -> Response: """Return a Reponse to send the filename given. Arguments: filename: The filename (path) to send, remember to use :func:`safe_join`. mimetype: Mimetype to use, by default it will be guessed or revert to the DEFAULT_MIMETYPE. as_attachment: If true use the attachment filename in a Content-Disposition attachment header. attachment_filename: Name for the filename, if it differs add_etags: Set etags based on the filename, size and modification time. last_modified: Used to override the last modified value. cache_timeout: Time in seconds for the response to be cached. """ file_path = file_path_to_path(filename) if attachment_filename is None: attachment_filename = file_path.name # depends on [control=['if'], data=['attachment_filename']] if mimetype is None: mimetype = mimetypes.guess_type(attachment_filename)[0] or DEFAULT_MIMETYPE # depends on [control=['if'], data=['mimetype']] file_body = current_app.response_class.file_body_class(file_path) response = current_app.response_class(file_body, mimetype=mimetype) if as_attachment: response.headers.add('Content-Disposition', 'attachment', filename=attachment_filename) # depends on [control=['if'], data=[]] if last_modified is not None: response.last_modified = last_modified # depends on [control=['if'], data=['last_modified']] else: response.last_modified = datetime.fromtimestamp(file_path.stat().st_mtime) response.cache_control.public = True cache_timeout = cache_timeout or current_app.get_send_file_max_age(file_path) if cache_timeout is not None: response.cache_control.max_age = cache_timeout response.expires = datetime.utcnow() + timedelta(seconds=cache_timeout) # depends on [control=['if'], data=['cache_timeout']] if add_etags: response.set_etag('{}-{}-{}'.format(file_path.stat().st_mtime, file_path.stat().st_size, adler32(bytes(file_path)))) # depends on [control=['if'], data=[]] if conditional: await response.make_conditional(request.range) # depends on [control=['if'], data=[]] return response
def copy_constant_memory_args(self, cmem_args): """adds constant memory arguments to the most recently compiled module :param cmem_args: A dictionary containing the data to be passed to the device constant memory. The format to be used is as follows: A string key is used to name the constant memory symbol to which the value needs to be copied. Similar to regular arguments, these need to be numpy objects, such as numpy.ndarray or numpy.int32, and so on. :type cmem_args: dict( string: numpy.ndarray, ... ) """ logging.debug('copy_constant_memory_args called') logging.debug('current module: ' + str(self.current_module)) for k, v in cmem_args.items(): symbol = self.current_module.get_global(k)[0] logging.debug('copying to symbol: ' + str(symbol)) logging.debug('array to be copied: ') logging.debug(v.nbytes) logging.debug(v.dtype) logging.debug(v.flags) drv.memcpy_htod(symbol, v)
def function[copy_constant_memory_args, parameter[self, cmem_args]]: constant[adds constant memory arguments to the most recently compiled module :param cmem_args: A dictionary containing the data to be passed to the device constant memory. The format to be used is as follows: A string key is used to name the constant memory symbol to which the value needs to be copied. Similar to regular arguments, these need to be numpy objects, such as numpy.ndarray or numpy.int32, and so on. :type cmem_args: dict( string: numpy.ndarray, ... ) ] call[name[logging].debug, parameter[constant[copy_constant_memory_args called]]] call[name[logging].debug, parameter[binary_operation[constant[current module: ] + call[name[str], parameter[name[self].current_module]]]]] for taget[tuple[[<ast.Name object at 0x7da1b04efca0>, <ast.Name object at 0x7da1b0404520>]]] in starred[call[name[cmem_args].items, parameter[]]] begin[:] variable[symbol] assign[=] call[call[name[self].current_module.get_global, parameter[name[k]]]][constant[0]] call[name[logging].debug, parameter[binary_operation[constant[copying to symbol: ] + call[name[str], parameter[name[symbol]]]]]] call[name[logging].debug, parameter[constant[array to be copied: ]]] call[name[logging].debug, parameter[name[v].nbytes]] call[name[logging].debug, parameter[name[v].dtype]] call[name[logging].debug, parameter[name[v].flags]] call[name[drv].memcpy_htod, parameter[name[symbol], name[v]]]
keyword[def] identifier[copy_constant_memory_args] ( identifier[self] , identifier[cmem_args] ): literal[string] identifier[logging] . identifier[debug] ( literal[string] ) identifier[logging] . identifier[debug] ( literal[string] + identifier[str] ( identifier[self] . identifier[current_module] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[cmem_args] . identifier[items] (): identifier[symbol] = identifier[self] . identifier[current_module] . identifier[get_global] ( identifier[k] )[ literal[int] ] identifier[logging] . identifier[debug] ( literal[string] + identifier[str] ( identifier[symbol] )) identifier[logging] . identifier[debug] ( literal[string] ) identifier[logging] . identifier[debug] ( identifier[v] . identifier[nbytes] ) identifier[logging] . identifier[debug] ( identifier[v] . identifier[dtype] ) identifier[logging] . identifier[debug] ( identifier[v] . identifier[flags] ) identifier[drv] . identifier[memcpy_htod] ( identifier[symbol] , identifier[v] )
def copy_constant_memory_args(self, cmem_args): """adds constant memory arguments to the most recently compiled module :param cmem_args: A dictionary containing the data to be passed to the device constant memory. The format to be used is as follows: A string key is used to name the constant memory symbol to which the value needs to be copied. Similar to regular arguments, these need to be numpy objects, such as numpy.ndarray or numpy.int32, and so on. :type cmem_args: dict( string: numpy.ndarray, ... ) """ logging.debug('copy_constant_memory_args called') logging.debug('current module: ' + str(self.current_module)) for (k, v) in cmem_args.items(): symbol = self.current_module.get_global(k)[0] logging.debug('copying to symbol: ' + str(symbol)) logging.debug('array to be copied: ') logging.debug(v.nbytes) logging.debug(v.dtype) logging.debug(v.flags) drv.memcpy_htod(symbol, v) # depends on [control=['for'], data=[]]
def apply_iter(cls, rows, mapping, resolver, scope=None): """ Given an iterable ``rows`` that yield data records, and a ``mapping`` which is to be applied to them, return a tuple of ``data`` (the generated object graph) and ``err``, a validation exception if the resulting data did not match the expected schema. """ mapper = cls(mapping, resolver, scope=scope) for row in rows: _, data = mapper.apply(row) yield data
def function[apply_iter, parameter[cls, rows, mapping, resolver, scope]]: constant[ Given an iterable ``rows`` that yield data records, and a ``mapping`` which is to be applied to them, return a tuple of ``data`` (the generated object graph) and ``err``, a validation exception if the resulting data did not match the expected schema. ] variable[mapper] assign[=] call[name[cls], parameter[name[mapping], name[resolver]]] for taget[name[row]] in starred[name[rows]] begin[:] <ast.Tuple object at 0x7da18f00f3a0> assign[=] call[name[mapper].apply, parameter[name[row]]] <ast.Yield object at 0x7da18f00e1a0>
keyword[def] identifier[apply_iter] ( identifier[cls] , identifier[rows] , identifier[mapping] , identifier[resolver] , identifier[scope] = keyword[None] ): literal[string] identifier[mapper] = identifier[cls] ( identifier[mapping] , identifier[resolver] , identifier[scope] = identifier[scope] ) keyword[for] identifier[row] keyword[in] identifier[rows] : identifier[_] , identifier[data] = identifier[mapper] . identifier[apply] ( identifier[row] ) keyword[yield] identifier[data]
def apply_iter(cls, rows, mapping, resolver, scope=None): """ Given an iterable ``rows`` that yield data records, and a ``mapping`` which is to be applied to them, return a tuple of ``data`` (the generated object graph) and ``err``, a validation exception if the resulting data did not match the expected schema. """ mapper = cls(mapping, resolver, scope=scope) for row in rows: (_, data) = mapper.apply(row) yield data # depends on [control=['for'], data=['row']]
def disconnect(self, code): """Called when WebSocket connection is closed.""" Subscriber.objects.filter(session_id=self.session_id).delete()
def function[disconnect, parameter[self, code]]: constant[Called when WebSocket connection is closed.] call[call[name[Subscriber].objects.filter, parameter[]].delete, parameter[]]
keyword[def] identifier[disconnect] ( identifier[self] , identifier[code] ): literal[string] identifier[Subscriber] . identifier[objects] . identifier[filter] ( identifier[session_id] = identifier[self] . identifier[session_id] ). identifier[delete] ()
def disconnect(self, code): """Called when WebSocket connection is closed.""" Subscriber.objects.filter(session_id=self.session_id).delete()
def _handle_func_def(self, node, scope, ctxt, stream): """Handle FuncDef nodes :node: TODO :scope: TODO :ctxt: TODO :stream: TODO :returns: TODO """ self._dlog("handling function definition") func = self._handle_node(node.decl, scope, ctxt, stream) func.body = node.body
def function[_handle_func_def, parameter[self, node, scope, ctxt, stream]]: constant[Handle FuncDef nodes :node: TODO :scope: TODO :ctxt: TODO :stream: TODO :returns: TODO ] call[name[self]._dlog, parameter[constant[handling function definition]]] variable[func] assign[=] call[name[self]._handle_node, parameter[name[node].decl, name[scope], name[ctxt], name[stream]]] name[func].body assign[=] name[node].body
keyword[def] identifier[_handle_func_def] ( identifier[self] , identifier[node] , identifier[scope] , identifier[ctxt] , identifier[stream] ): literal[string] identifier[self] . identifier[_dlog] ( literal[string] ) identifier[func] = identifier[self] . identifier[_handle_node] ( identifier[node] . identifier[decl] , identifier[scope] , identifier[ctxt] , identifier[stream] ) identifier[func] . identifier[body] = identifier[node] . identifier[body]
def _handle_func_def(self, node, scope, ctxt, stream): """Handle FuncDef nodes :node: TODO :scope: TODO :ctxt: TODO :stream: TODO :returns: TODO """ self._dlog('handling function definition') func = self._handle_node(node.decl, scope, ctxt, stream) func.body = node.body
def get_key_from_request(self): '''Return a key for the current request url. :return: The storage key for the current url :rettype: string ''' path = "result:%s" % self.context.request.url if self.is_auto_webp(): path += '/webp' return path
def function[get_key_from_request, parameter[self]]: constant[Return a key for the current request url. :return: The storage key for the current url :rettype: string ] variable[path] assign[=] binary_operation[constant[result:%s] <ast.Mod object at 0x7da2590d6920> name[self].context.request.url] if call[name[self].is_auto_webp, parameter[]] begin[:] <ast.AugAssign object at 0x7da207f038e0> return[name[path]]
keyword[def] identifier[get_key_from_request] ( identifier[self] ): literal[string] identifier[path] = literal[string] % identifier[self] . identifier[context] . identifier[request] . identifier[url] keyword[if] identifier[self] . identifier[is_auto_webp] (): identifier[path] += literal[string] keyword[return] identifier[path]
def get_key_from_request(self): """Return a key for the current request url. :return: The storage key for the current url :rettype: string """ path = 'result:%s' % self.context.request.url if self.is_auto_webp(): path += '/webp' # depends on [control=['if'], data=[]] return path
def latitude_from_cross_section(cross): """Calculate the latitude of points in a cross-section. Parameters ---------- cross : `xarray.DataArray` The input DataArray of a cross-section from which to obtain latitudes. Returns ------- latitude : `xarray.DataArray` Latitude of points """ y = cross.metpy.y if CFConventionHandler.check_axis(y, 'lat'): return y else: import cartopy.crs as ccrs latitude = ccrs.Geodetic().transform_points(cross.metpy.cartopy_crs, cross.metpy.x.values, y.values)[..., 1] latitude = xr.DataArray(latitude, coords=y.coords, dims=y.dims, attrs={'units': 'degrees_north'}) return latitude
def function[latitude_from_cross_section, parameter[cross]]: constant[Calculate the latitude of points in a cross-section. Parameters ---------- cross : `xarray.DataArray` The input DataArray of a cross-section from which to obtain latitudes. Returns ------- latitude : `xarray.DataArray` Latitude of points ] variable[y] assign[=] name[cross].metpy.y if call[name[CFConventionHandler].check_axis, parameter[name[y], constant[lat]]] begin[:] return[name[y]]
keyword[def] identifier[latitude_from_cross_section] ( identifier[cross] ): literal[string] identifier[y] = identifier[cross] . identifier[metpy] . identifier[y] keyword[if] identifier[CFConventionHandler] . identifier[check_axis] ( identifier[y] , literal[string] ): keyword[return] identifier[y] keyword[else] : keyword[import] identifier[cartopy] . identifier[crs] keyword[as] identifier[ccrs] identifier[latitude] = identifier[ccrs] . identifier[Geodetic] (). identifier[transform_points] ( identifier[cross] . identifier[metpy] . identifier[cartopy_crs] , identifier[cross] . identifier[metpy] . identifier[x] . identifier[values] , identifier[y] . identifier[values] )[..., literal[int] ] identifier[latitude] = identifier[xr] . identifier[DataArray] ( identifier[latitude] , identifier[coords] = identifier[y] . identifier[coords] , identifier[dims] = identifier[y] . identifier[dims] , identifier[attrs] ={ literal[string] : literal[string] }) keyword[return] identifier[latitude]
def latitude_from_cross_section(cross): """Calculate the latitude of points in a cross-section. Parameters ---------- cross : `xarray.DataArray` The input DataArray of a cross-section from which to obtain latitudes. Returns ------- latitude : `xarray.DataArray` Latitude of points """ y = cross.metpy.y if CFConventionHandler.check_axis(y, 'lat'): return y # depends on [control=['if'], data=[]] else: import cartopy.crs as ccrs latitude = ccrs.Geodetic().transform_points(cross.metpy.cartopy_crs, cross.metpy.x.values, y.values)[..., 1] latitude = xr.DataArray(latitude, coords=y.coords, dims=y.dims, attrs={'units': 'degrees_north'}) return latitude
def validate(self, api_key=None): """ The original contents of the Event message must be confirmed by refetching it and comparing the fetched data with the original data. This function makes an API call to Stripe to redownload the Event data and returns whether or not it matches the WebhookEventTrigger data. """ local_data = self.json_body if "id" not in local_data or "livemode" not in local_data: return False if self.is_test_event: logger.info("Test webhook received: {}".format(local_data)) return False if djstripe_settings.WEBHOOK_VALIDATION is None: # validation disabled return True elif ( djstripe_settings.WEBHOOK_VALIDATION == "verify_signature" and djstripe_settings.WEBHOOK_SECRET ): try: stripe.WebhookSignature.verify_header( self.body, self.headers.get("stripe-signature"), djstripe_settings.WEBHOOK_SECRET, djstripe_settings.WEBHOOK_TOLERANCE, ) except stripe.error.SignatureVerificationError: return False else: return True livemode = local_data["livemode"] api_key = api_key or djstripe_settings.get_default_api_key(livemode) # Retrieve the event using the api_version specified in itself with stripe_temporary_api_version(local_data["api_version"], validate=False): remote_data = Event.stripe_class.retrieve(id=local_data["id"], api_key=api_key) return local_data["data"] == remote_data["data"]
def function[validate, parameter[self, api_key]]: constant[ The original contents of the Event message must be confirmed by refetching it and comparing the fetched data with the original data. This function makes an API call to Stripe to redownload the Event data and returns whether or not it matches the WebhookEventTrigger data. ] variable[local_data] assign[=] name[self].json_body if <ast.BoolOp object at 0x7da207f02c50> begin[:] return[constant[False]] if name[self].is_test_event begin[:] call[name[logger].info, parameter[call[constant[Test webhook received: {}].format, parameter[name[local_data]]]]] return[constant[False]] if compare[name[djstripe_settings].WEBHOOK_VALIDATION is constant[None]] begin[:] return[constant[True]] variable[livemode] assign[=] call[name[local_data]][constant[livemode]] variable[api_key] assign[=] <ast.BoolOp object at 0x7da207f032e0> with call[name[stripe_temporary_api_version], parameter[call[name[local_data]][constant[api_version]]]] begin[:] variable[remote_data] assign[=] call[name[Event].stripe_class.retrieve, parameter[]] return[compare[call[name[local_data]][constant[data]] equal[==] call[name[remote_data]][constant[data]]]]
keyword[def] identifier[validate] ( identifier[self] , identifier[api_key] = keyword[None] ): literal[string] identifier[local_data] = identifier[self] . identifier[json_body] keyword[if] literal[string] keyword[not] keyword[in] identifier[local_data] keyword[or] literal[string] keyword[not] keyword[in] identifier[local_data] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[is_test_event] : identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[local_data] )) keyword[return] keyword[False] keyword[if] identifier[djstripe_settings] . identifier[WEBHOOK_VALIDATION] keyword[is] keyword[None] : keyword[return] keyword[True] keyword[elif] ( identifier[djstripe_settings] . identifier[WEBHOOK_VALIDATION] == literal[string] keyword[and] identifier[djstripe_settings] . identifier[WEBHOOK_SECRET] ): keyword[try] : identifier[stripe] . identifier[WebhookSignature] . identifier[verify_header] ( identifier[self] . identifier[body] , identifier[self] . identifier[headers] . identifier[get] ( literal[string] ), identifier[djstripe_settings] . identifier[WEBHOOK_SECRET] , identifier[djstripe_settings] . identifier[WEBHOOK_TOLERANCE] , ) keyword[except] identifier[stripe] . identifier[error] . identifier[SignatureVerificationError] : keyword[return] keyword[False] keyword[else] : keyword[return] keyword[True] identifier[livemode] = identifier[local_data] [ literal[string] ] identifier[api_key] = identifier[api_key] keyword[or] identifier[djstripe_settings] . identifier[get_default_api_key] ( identifier[livemode] ) keyword[with] identifier[stripe_temporary_api_version] ( identifier[local_data] [ literal[string] ], identifier[validate] = keyword[False] ): identifier[remote_data] = identifier[Event] . identifier[stripe_class] . identifier[retrieve] ( identifier[id] = identifier[local_data] [ literal[string] ], identifier[api_key] = identifier[api_key] ) keyword[return] identifier[local_data] [ literal[string] ]== identifier[remote_data] [ literal[string] ]
def validate(self, api_key=None): """ The original contents of the Event message must be confirmed by refetching it and comparing the fetched data with the original data. This function makes an API call to Stripe to redownload the Event data and returns whether or not it matches the WebhookEventTrigger data. """ local_data = self.json_body if 'id' not in local_data or 'livemode' not in local_data: return False # depends on [control=['if'], data=[]] if self.is_test_event: logger.info('Test webhook received: {}'.format(local_data)) return False # depends on [control=['if'], data=[]] if djstripe_settings.WEBHOOK_VALIDATION is None: # validation disabled return True # depends on [control=['if'], data=[]] elif djstripe_settings.WEBHOOK_VALIDATION == 'verify_signature' and djstripe_settings.WEBHOOK_SECRET: try: stripe.WebhookSignature.verify_header(self.body, self.headers.get('stripe-signature'), djstripe_settings.WEBHOOK_SECRET, djstripe_settings.WEBHOOK_TOLERANCE) # depends on [control=['try'], data=[]] except stripe.error.SignatureVerificationError: return False # depends on [control=['except'], data=[]] else: return True # depends on [control=['if'], data=[]] livemode = local_data['livemode'] api_key = api_key or djstripe_settings.get_default_api_key(livemode) # Retrieve the event using the api_version specified in itself with stripe_temporary_api_version(local_data['api_version'], validate=False): remote_data = Event.stripe_class.retrieve(id=local_data['id'], api_key=api_key) # depends on [control=['with'], data=[]] return local_data['data'] == remote_data['data']
def _points(self, x_pos): """ Convert given data values into drawable points (x, y) and interpolated points if interpolate option is specified """ for series_group in (self.series, self.secondary_series): accumulation = [0] * self._len for serie in series_group[::-1 if self.stack_from_top else 1]: accumulation = list(map(sum, zip(accumulation, serie.values))) serie.points = [(x_pos[i], v) for i, v in enumerate(accumulation)] if serie.points and self.interpolate: serie.interpolated = self._interpolate(x_pos, accumulation) else: serie.interpolated = []
def function[_points, parameter[self, x_pos]]: constant[ Convert given data values into drawable points (x, y) and interpolated points if interpolate option is specified ] for taget[name[series_group]] in starred[tuple[[<ast.Attribute object at 0x7da18f811330>, <ast.Attribute object at 0x7da18f810520>]]] begin[:] variable[accumulation] assign[=] binary_operation[list[[<ast.Constant object at 0x7da18f813be0>]] * name[self]._len] for taget[name[serie]] in starred[call[name[series_group]][<ast.Slice object at 0x7da18f8137f0>]] begin[:] variable[accumulation] assign[=] call[name[list], parameter[call[name[map], parameter[name[sum], call[name[zip], parameter[name[accumulation], name[serie].values]]]]]] name[serie].points assign[=] <ast.ListComp object at 0x7da18f8122c0> if <ast.BoolOp object at 0x7da18f813ee0> begin[:] name[serie].interpolated assign[=] call[name[self]._interpolate, parameter[name[x_pos], name[accumulation]]]
keyword[def] identifier[_points] ( identifier[self] , identifier[x_pos] ): literal[string] keyword[for] identifier[series_group] keyword[in] ( identifier[self] . identifier[series] , identifier[self] . identifier[secondary_series] ): identifier[accumulation] =[ literal[int] ]* identifier[self] . identifier[_len] keyword[for] identifier[serie] keyword[in] identifier[series_group] [::- literal[int] keyword[if] identifier[self] . identifier[stack_from_top] keyword[else] literal[int] ]: identifier[accumulation] = identifier[list] ( identifier[map] ( identifier[sum] , identifier[zip] ( identifier[accumulation] , identifier[serie] . identifier[values] ))) identifier[serie] . identifier[points] =[( identifier[x_pos] [ identifier[i] ], identifier[v] ) keyword[for] identifier[i] , identifier[v] keyword[in] identifier[enumerate] ( identifier[accumulation] )] keyword[if] identifier[serie] . identifier[points] keyword[and] identifier[self] . identifier[interpolate] : identifier[serie] . identifier[interpolated] = identifier[self] . identifier[_interpolate] ( identifier[x_pos] , identifier[accumulation] ) keyword[else] : identifier[serie] . identifier[interpolated] =[]
def _points(self, x_pos): """ Convert given data values into drawable points (x, y) and interpolated points if interpolate option is specified """ for series_group in (self.series, self.secondary_series): accumulation = [0] * self._len for serie in series_group[::-1 if self.stack_from_top else 1]: accumulation = list(map(sum, zip(accumulation, serie.values))) serie.points = [(x_pos[i], v) for (i, v) in enumerate(accumulation)] if serie.points and self.interpolate: serie.interpolated = self._interpolate(x_pos, accumulation) # depends on [control=['if'], data=[]] else: serie.interpolated = [] # depends on [control=['for'], data=['serie']] # depends on [control=['for'], data=['series_group']]
def bytes_to_long(s): """bytes_to_long(string) : long Convert a byte string to a long integer. This is (essentially) the inverse of long_to_bytes(). """ if isinstance(s, int): # On Python 2, indexing into a bytearray returns a byte string; on Python 3, an int. return s acc = 0 if USING_PYTHON2: acc = long(acc) # noqa unpack = struct.unpack length = len(s) if length % 4: extra = (4 - length % 4) s = b'\000' * extra + s length = length + extra for i in range(0, length, 4): acc = (acc << 32) + unpack(b'>I', s[i:i + 4])[0] return acc
def function[bytes_to_long, parameter[s]]: constant[bytes_to_long(string) : long Convert a byte string to a long integer. This is (essentially) the inverse of long_to_bytes(). ] if call[name[isinstance], parameter[name[s], name[int]]] begin[:] return[name[s]] variable[acc] assign[=] constant[0] if name[USING_PYTHON2] begin[:] variable[acc] assign[=] call[name[long], parameter[name[acc]]] variable[unpack] assign[=] name[struct].unpack variable[length] assign[=] call[name[len], parameter[name[s]]] if binary_operation[name[length] <ast.Mod object at 0x7da2590d6920> constant[4]] begin[:] variable[extra] assign[=] binary_operation[constant[4] - binary_operation[name[length] <ast.Mod object at 0x7da2590d6920> constant[4]]] variable[s] assign[=] binary_operation[binary_operation[constant[b'\x00'] * name[extra]] + name[s]] variable[length] assign[=] binary_operation[name[length] + name[extra]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[length], constant[4]]]] begin[:] variable[acc] assign[=] binary_operation[binary_operation[name[acc] <ast.LShift object at 0x7da2590d69e0> constant[32]] + call[call[name[unpack], parameter[constant[b'>I'], call[name[s]][<ast.Slice object at 0x7da1b157c2b0>]]]][constant[0]]] return[name[acc]]
keyword[def] identifier[bytes_to_long] ( identifier[s] ): literal[string] keyword[if] identifier[isinstance] ( identifier[s] , identifier[int] ): keyword[return] identifier[s] identifier[acc] = literal[int] keyword[if] identifier[USING_PYTHON2] : identifier[acc] = identifier[long] ( identifier[acc] ) identifier[unpack] = identifier[struct] . identifier[unpack] identifier[length] = identifier[len] ( identifier[s] ) keyword[if] identifier[length] % literal[int] : identifier[extra] =( literal[int] - identifier[length] % literal[int] ) identifier[s] = literal[string] * identifier[extra] + identifier[s] identifier[length] = identifier[length] + identifier[extra] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[length] , literal[int] ): identifier[acc] =( identifier[acc] << literal[int] )+ identifier[unpack] ( literal[string] , identifier[s] [ identifier[i] : identifier[i] + literal[int] ])[ literal[int] ] keyword[return] identifier[acc]
def bytes_to_long(s): """bytes_to_long(string) : long Convert a byte string to a long integer. This is (essentially) the inverse of long_to_bytes(). """ if isinstance(s, int): # On Python 2, indexing into a bytearray returns a byte string; on Python 3, an int. return s # depends on [control=['if'], data=[]] acc = 0 if USING_PYTHON2: acc = long(acc) # noqa # depends on [control=['if'], data=[]] unpack = struct.unpack length = len(s) if length % 4: extra = 4 - length % 4 s = b'\x00' * extra + s length = length + extra # depends on [control=['if'], data=[]] for i in range(0, length, 4): acc = (acc << 32) + unpack(b'>I', s[i:i + 4])[0] # depends on [control=['for'], data=['i']] return acc
def close(self): """Close the socket without sending quit message.""" stream = self._stream if stream is None: return self._stream = None stream.close()
def function[close, parameter[self]]: constant[Close the socket without sending quit message.] variable[stream] assign[=] name[self]._stream if compare[name[stream] is constant[None]] begin[:] return[None] name[self]._stream assign[=] constant[None] call[name[stream].close, parameter[]]
keyword[def] identifier[close] ( identifier[self] ): literal[string] identifier[stream] = identifier[self] . identifier[_stream] keyword[if] identifier[stream] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[_stream] = keyword[None] identifier[stream] . identifier[close] ()
def close(self): """Close the socket without sending quit message.""" stream = self._stream if stream is None: return # depends on [control=['if'], data=[]] self._stream = None stream.close()
def _check_min_max_range(self, var, test_ctx): """ Checks that either both valid_min and valid_max exist, or valid_range exists. """ if 'valid_range' in var.ncattrs(): test_ctx.assert_true(var.valid_range.dtype == var.dtype and len(var.valid_range) == 2 and var.valid_range[0] <= var.valid_range[1], "valid_range must be a two element vector of min followed by max with the same data type as {}".format(var.name) ) else: for bound in ('valid_min', 'valid_max'): v_bound = getattr(var, bound, '') warn_msg = '{} attribute should exist, have the same type as {}, and not be empty or valid_range should be defined'.format(bound, var.name) # need to special case str attributes since they aren't directly # comparable to numpy dtypes if isinstance(v_bound, six.string_types): test_ctx.assert_true(v_bound != '' and var.dtype.char == 'S', warn_msg) # otherwise compare the numpy types directly else: test_ctx.assert_true(v_bound.dtype == var.dtype, warn_msg) return test_ctx
def function[_check_min_max_range, parameter[self, var, test_ctx]]: constant[ Checks that either both valid_min and valid_max exist, or valid_range exists. ] if compare[constant[valid_range] in call[name[var].ncattrs, parameter[]]] begin[:] call[name[test_ctx].assert_true, parameter[<ast.BoolOp object at 0x7da2041da350>, call[constant[valid_range must be a two element vector of min followed by max with the same data type as {}].format, parameter[name[var].name]]]] return[name[test_ctx]]
keyword[def] identifier[_check_min_max_range] ( identifier[self] , identifier[var] , identifier[test_ctx] ): literal[string] keyword[if] literal[string] keyword[in] identifier[var] . identifier[ncattrs] (): identifier[test_ctx] . identifier[assert_true] ( identifier[var] . identifier[valid_range] . identifier[dtype] == identifier[var] . identifier[dtype] keyword[and] identifier[len] ( identifier[var] . identifier[valid_range] )== literal[int] keyword[and] identifier[var] . identifier[valid_range] [ literal[int] ]<= identifier[var] . identifier[valid_range] [ literal[int] ], literal[string] . identifier[format] ( identifier[var] . identifier[name] ) ) keyword[else] : keyword[for] identifier[bound] keyword[in] ( literal[string] , literal[string] ): identifier[v_bound] = identifier[getattr] ( identifier[var] , identifier[bound] , literal[string] ) identifier[warn_msg] = literal[string] . identifier[format] ( identifier[bound] , identifier[var] . identifier[name] ) keyword[if] identifier[isinstance] ( identifier[v_bound] , identifier[six] . identifier[string_types] ): identifier[test_ctx] . identifier[assert_true] ( identifier[v_bound] != literal[string] keyword[and] identifier[var] . identifier[dtype] . identifier[char] == literal[string] , identifier[warn_msg] ) keyword[else] : identifier[test_ctx] . identifier[assert_true] ( identifier[v_bound] . identifier[dtype] == identifier[var] . identifier[dtype] , identifier[warn_msg] ) keyword[return] identifier[test_ctx]
def _check_min_max_range(self, var, test_ctx): """ Checks that either both valid_min and valid_max exist, or valid_range exists. """ if 'valid_range' in var.ncattrs(): test_ctx.assert_true(var.valid_range.dtype == var.dtype and len(var.valid_range) == 2 and (var.valid_range[0] <= var.valid_range[1]), 'valid_range must be a two element vector of min followed by max with the same data type as {}'.format(var.name)) # depends on [control=['if'], data=[]] else: for bound in ('valid_min', 'valid_max'): v_bound = getattr(var, bound, '') warn_msg = '{} attribute should exist, have the same type as {}, and not be empty or valid_range should be defined'.format(bound, var.name) # need to special case str attributes since they aren't directly # comparable to numpy dtypes if isinstance(v_bound, six.string_types): test_ctx.assert_true(v_bound != '' and var.dtype.char == 'S', warn_msg) # depends on [control=['if'], data=[]] else: # otherwise compare the numpy types directly test_ctx.assert_true(v_bound.dtype == var.dtype, warn_msg) # depends on [control=['for'], data=['bound']] return test_ctx
def wash_html_id(dirty): """Strip non-alphabetic or newline characters from a given string. It can be used as a HTML element ID (also with jQuery and in all browsers). :param dirty: the string to wash :returns: the HTML ID ready string """ import re if not dirty[0].isalpha(): # we make sure that the first character is a lowercase letter dirty = 'i' + dirty non_word = re.compile(r'[^\w]+') return non_word.sub('', dirty)
def function[wash_html_id, parameter[dirty]]: constant[Strip non-alphabetic or newline characters from a given string. It can be used as a HTML element ID (also with jQuery and in all browsers). :param dirty: the string to wash :returns: the HTML ID ready string ] import module[re] if <ast.UnaryOp object at 0x7da18dc99d50> begin[:] variable[dirty] assign[=] binary_operation[constant[i] + name[dirty]] variable[non_word] assign[=] call[name[re].compile, parameter[constant[[^\w]+]]] return[call[name[non_word].sub, parameter[constant[], name[dirty]]]]
keyword[def] identifier[wash_html_id] ( identifier[dirty] ): literal[string] keyword[import] identifier[re] keyword[if] keyword[not] identifier[dirty] [ literal[int] ]. identifier[isalpha] (): identifier[dirty] = literal[string] + identifier[dirty] identifier[non_word] = identifier[re] . identifier[compile] ( literal[string] ) keyword[return] identifier[non_word] . identifier[sub] ( literal[string] , identifier[dirty] )
def wash_html_id(dirty): """Strip non-alphabetic or newline characters from a given string. It can be used as a HTML element ID (also with jQuery and in all browsers). :param dirty: the string to wash :returns: the HTML ID ready string """ import re if not dirty[0].isalpha(): # we make sure that the first character is a lowercase letter dirty = 'i' + dirty # depends on [control=['if'], data=[]] non_word = re.compile('[^\\w]+') return non_word.sub('', dirty)
def from_array(array): """ Deserialize a new Location from a given dictionary. :return: new Location instance. :rtype: Location """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") data = {} data['longitude'] = float(array.get('longitude')) data['latitude'] = float(array.get('latitude')) data['_raw'] = array return Location(**data)
def function[from_array, parameter[array]]: constant[ Deserialize a new Location from a given dictionary. :return: new Location instance. :rtype: Location ] if <ast.BoolOp object at 0x7da1b0349120> begin[:] return[constant[None]] call[name[assert_type_or_raise], parameter[name[array], name[dict]]] variable[data] assign[=] dictionary[[], []] call[name[data]][constant[longitude]] assign[=] call[name[float], parameter[call[name[array].get, parameter[constant[longitude]]]]] call[name[data]][constant[latitude]] assign[=] call[name[float], parameter[call[name[array].get, parameter[constant[latitude]]]]] call[name[data]][constant[_raw]] assign[=] name[array] return[call[name[Location], parameter[]]]
keyword[def] identifier[from_array] ( identifier[array] ): literal[string] keyword[if] identifier[array] keyword[is] keyword[None] keyword[or] keyword[not] identifier[array] : keyword[return] keyword[None] identifier[assert_type_or_raise] ( identifier[array] , identifier[dict] , identifier[parameter_name] = literal[string] ) identifier[data] ={} identifier[data] [ literal[string] ]= identifier[float] ( identifier[array] . identifier[get] ( literal[string] )) identifier[data] [ literal[string] ]= identifier[float] ( identifier[array] . identifier[get] ( literal[string] )) identifier[data] [ literal[string] ]= identifier[array] keyword[return] identifier[Location] (** identifier[data] )
def from_array(array): """ Deserialize a new Location from a given dictionary. :return: new Location instance. :rtype: Location """ if array is None or not array: return None # depends on [control=['if'], data=[]] # end if assert_type_or_raise(array, dict, parameter_name='array') data = {} data['longitude'] = float(array.get('longitude')) data['latitude'] = float(array.get('latitude')) data['_raw'] = array return Location(**data)
def get_png_data_url(blob: Optional[bytes]) -> str: """ Converts a PNG blob into a local URL encapsulating the PNG. """ return BASE64_PNG_URL_PREFIX + base64.b64encode(blob).decode('ascii')
def function[get_png_data_url, parameter[blob]]: constant[ Converts a PNG blob into a local URL encapsulating the PNG. ] return[binary_operation[name[BASE64_PNG_URL_PREFIX] + call[call[name[base64].b64encode, parameter[name[blob]]].decode, parameter[constant[ascii]]]]]
keyword[def] identifier[get_png_data_url] ( identifier[blob] : identifier[Optional] [ identifier[bytes] ])-> identifier[str] : literal[string] keyword[return] identifier[BASE64_PNG_URL_PREFIX] + identifier[base64] . identifier[b64encode] ( identifier[blob] ). identifier[decode] ( literal[string] )
def get_png_data_url(blob: Optional[bytes]) -> str: """ Converts a PNG blob into a local URL encapsulating the PNG. """ return BASE64_PNG_URL_PREFIX + base64.b64encode(blob).decode('ascii')
def check_url(url): """Check whether the given URL is dead or alive. Returns a dict with four keys: "url": The URL that was checked (string) "alive": Whether the URL was working, True or False "status": The HTTP status code of the response from the URL, e.g. 200, 401, 500 (int) "reason": The reason for the success or failure of the check, e.g. "OK", "Unauthorized", "Internal Server Error" (string) The "status" may be None if we did not get a valid HTTP response, e.g. in the event of a timeout, DNS failure or invalid HTTP response. The "reason" will always be a string, but may be a requests library exception string rather than an HTTP reason string if we did not get a valid HTTP response. """ result = {"url": url} try: response = requests.get(url) result["status"] = response.status_code result["reason"] = response.reason response.raise_for_status() # Raise if status_code is not OK. result["alive"] = True except AttributeError as err: if err.message == "'NoneType' object has no attribute 'encode'": # requests seems to throw these for some invalid URLs. result["alive"] = False result["reason"] = "Invalid URL" result["status"] = None else: raise except requests.exceptions.RequestException as err: result["alive"] = False if "reason" not in result: result["reason"] = str(err) if "status" not in result: # This can happen if the response is invalid HTTP, if we get a DNS # failure, or a timeout, etc. result["status"] = None # We should always have these four fields in the result. assert "url" in result assert result.get("alive") in (True, False) assert "status" in result assert "reason" in result return result
def function[check_url, parameter[url]]: constant[Check whether the given URL is dead or alive. Returns a dict with four keys: "url": The URL that was checked (string) "alive": Whether the URL was working, True or False "status": The HTTP status code of the response from the URL, e.g. 200, 401, 500 (int) "reason": The reason for the success or failure of the check, e.g. "OK", "Unauthorized", "Internal Server Error" (string) The "status" may be None if we did not get a valid HTTP response, e.g. in the event of a timeout, DNS failure or invalid HTTP response. The "reason" will always be a string, but may be a requests library exception string rather than an HTTP reason string if we did not get a valid HTTP response. ] variable[result] assign[=] dictionary[[<ast.Constant object at 0x7da1b0ef46a0>], [<ast.Name object at 0x7da1b0ef4640>]] <ast.Try object at 0x7da1b0ef4820> assert[compare[constant[url] in name[result]]] assert[compare[call[name[result].get, parameter[constant[alive]]] in tuple[[<ast.Constant object at 0x7da1b0ef4760>, <ast.Constant object at 0x7da1b0ef44c0>]]]] assert[compare[constant[status] in name[result]]] assert[compare[constant[reason] in name[result]]] return[name[result]]
keyword[def] identifier[check_url] ( identifier[url] ): literal[string] identifier[result] ={ literal[string] : identifier[url] } keyword[try] : identifier[response] = identifier[requests] . identifier[get] ( identifier[url] ) identifier[result] [ literal[string] ]= identifier[response] . identifier[status_code] identifier[result] [ literal[string] ]= identifier[response] . identifier[reason] identifier[response] . identifier[raise_for_status] () identifier[result] [ literal[string] ]= keyword[True] keyword[except] identifier[AttributeError] keyword[as] identifier[err] : keyword[if] identifier[err] . identifier[message] == literal[string] : identifier[result] [ literal[string] ]= keyword[False] identifier[result] [ literal[string] ]= literal[string] identifier[result] [ literal[string] ]= keyword[None] keyword[else] : keyword[raise] keyword[except] identifier[requests] . identifier[exceptions] . identifier[RequestException] keyword[as] identifier[err] : identifier[result] [ literal[string] ]= keyword[False] keyword[if] literal[string] keyword[not] keyword[in] identifier[result] : identifier[result] [ literal[string] ]= identifier[str] ( identifier[err] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[result] : identifier[result] [ literal[string] ]= keyword[None] keyword[assert] literal[string] keyword[in] identifier[result] keyword[assert] identifier[result] . identifier[get] ( literal[string] ) keyword[in] ( keyword[True] , keyword[False] ) keyword[assert] literal[string] keyword[in] identifier[result] keyword[assert] literal[string] keyword[in] identifier[result] keyword[return] identifier[result]
def check_url(url): """Check whether the given URL is dead or alive. Returns a dict with four keys: "url": The URL that was checked (string) "alive": Whether the URL was working, True or False "status": The HTTP status code of the response from the URL, e.g. 200, 401, 500 (int) "reason": The reason for the success or failure of the check, e.g. "OK", "Unauthorized", "Internal Server Error" (string) The "status" may be None if we did not get a valid HTTP response, e.g. in the event of a timeout, DNS failure or invalid HTTP response. The "reason" will always be a string, but may be a requests library exception string rather than an HTTP reason string if we did not get a valid HTTP response. """ result = {'url': url} try: response = requests.get(url) result['status'] = response.status_code result['reason'] = response.reason response.raise_for_status() # Raise if status_code is not OK. result['alive'] = True # depends on [control=['try'], data=[]] except AttributeError as err: if err.message == "'NoneType' object has no attribute 'encode'": # requests seems to throw these for some invalid URLs. result['alive'] = False result['reason'] = 'Invalid URL' result['status'] = None # depends on [control=['if'], data=[]] else: raise # depends on [control=['except'], data=['err']] except requests.exceptions.RequestException as err: result['alive'] = False if 'reason' not in result: result['reason'] = str(err) # depends on [control=['if'], data=['result']] if 'status' not in result: # This can happen if the response is invalid HTTP, if we get a DNS # failure, or a timeout, etc. result['status'] = None # depends on [control=['if'], data=['result']] # depends on [control=['except'], data=['err']] # We should always have these four fields in the result. assert 'url' in result assert result.get('alive') in (True, False) assert 'status' in result assert 'reason' in result return result
def sample_without_replacement(n, k, num_trials=None, random_state=None): """ Randomly choose k integers without replacement from 0, ..., n-1. Parameters ---------- n : scalar(int) Number of integers, 0, ..., n-1, to sample from. k : scalar(int) Number of integers to sample. num_trials : scalar(int), optional(default=None) Number of trials. random_state : int or np.random.RandomState, optional Random seed (integer) or np.random.RandomState instance to set the initial state of the random number generator for reproducibility. If None, a randomly initialized RandomState is used. Returns ------- result : ndarray(int, ndim=1 or 2) Array of shape (k,) if num_trials is None, or of shape (num_trials, k) otherwise, (each row of) which contains k unique random elements chosen from 0, ..., n-1. Examples -------- >>> qe.random.sample_without_replacement(5, 3, random_state=1234) array([0, 2, 1]) >>> qe.random.sample_without_replacement(5, 3, num_trials=4, ... random_state=1234) array([[0, 2, 1], [3, 4, 0], [1, 3, 2], [4, 1, 3]]) """ if n <= 0: raise ValueError('n must be greater than 0') if k > n: raise ValueError('k must be smaller than or equal to n') size = k if num_trials is None else (num_trials, k) random_state = check_random_state(random_state) r = random_state.random_sample(size=size) result = _sample_without_replacement(n, r) return result
def function[sample_without_replacement, parameter[n, k, num_trials, random_state]]: constant[ Randomly choose k integers without replacement from 0, ..., n-1. Parameters ---------- n : scalar(int) Number of integers, 0, ..., n-1, to sample from. k : scalar(int) Number of integers to sample. num_trials : scalar(int), optional(default=None) Number of trials. random_state : int or np.random.RandomState, optional Random seed (integer) or np.random.RandomState instance to set the initial state of the random number generator for reproducibility. If None, a randomly initialized RandomState is used. Returns ------- result : ndarray(int, ndim=1 or 2) Array of shape (k,) if num_trials is None, or of shape (num_trials, k) otherwise, (each row of) which contains k unique random elements chosen from 0, ..., n-1. Examples -------- >>> qe.random.sample_without_replacement(5, 3, random_state=1234) array([0, 2, 1]) >>> qe.random.sample_without_replacement(5, 3, num_trials=4, ... random_state=1234) array([[0, 2, 1], [3, 4, 0], [1, 3, 2], [4, 1, 3]]) ] if compare[name[n] less_or_equal[<=] constant[0]] begin[:] <ast.Raise object at 0x7da18dc99bd0> if compare[name[k] greater[>] name[n]] begin[:] <ast.Raise object at 0x7da18dc998a0> variable[size] assign[=] <ast.IfExp object at 0x7da18dc9abf0> variable[random_state] assign[=] call[name[check_random_state], parameter[name[random_state]]] variable[r] assign[=] call[name[random_state].random_sample, parameter[]] variable[result] assign[=] call[name[_sample_without_replacement], parameter[name[n], name[r]]] return[name[result]]
keyword[def] identifier[sample_without_replacement] ( identifier[n] , identifier[k] , identifier[num_trials] = keyword[None] , identifier[random_state] = keyword[None] ): literal[string] keyword[if] identifier[n] <= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[k] > identifier[n] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[size] = identifier[k] keyword[if] identifier[num_trials] keyword[is] keyword[None] keyword[else] ( identifier[num_trials] , identifier[k] ) identifier[random_state] = identifier[check_random_state] ( identifier[random_state] ) identifier[r] = identifier[random_state] . identifier[random_sample] ( identifier[size] = identifier[size] ) identifier[result] = identifier[_sample_without_replacement] ( identifier[n] , identifier[r] ) keyword[return] identifier[result]
def sample_without_replacement(n, k, num_trials=None, random_state=None): """ Randomly choose k integers without replacement from 0, ..., n-1. Parameters ---------- n : scalar(int) Number of integers, 0, ..., n-1, to sample from. k : scalar(int) Number of integers to sample. num_trials : scalar(int), optional(default=None) Number of trials. random_state : int or np.random.RandomState, optional Random seed (integer) or np.random.RandomState instance to set the initial state of the random number generator for reproducibility. If None, a randomly initialized RandomState is used. Returns ------- result : ndarray(int, ndim=1 or 2) Array of shape (k,) if num_trials is None, or of shape (num_trials, k) otherwise, (each row of) which contains k unique random elements chosen from 0, ..., n-1. Examples -------- >>> qe.random.sample_without_replacement(5, 3, random_state=1234) array([0, 2, 1]) >>> qe.random.sample_without_replacement(5, 3, num_trials=4, ... random_state=1234) array([[0, 2, 1], [3, 4, 0], [1, 3, 2], [4, 1, 3]]) """ if n <= 0: raise ValueError('n must be greater than 0') # depends on [control=['if'], data=[]] if k > n: raise ValueError('k must be smaller than or equal to n') # depends on [control=['if'], data=[]] size = k if num_trials is None else (num_trials, k) random_state = check_random_state(random_state) r = random_state.random_sample(size=size) result = _sample_without_replacement(n, r) return result
def minkowski_distance(x, y, p=2): """ Calculates the minkowski distance between two points. :param x: the first point :param y: the second point :param p: the order of the minkowski algorithm. If *p=1* it is equal to the manhatten distance, if *p=2* it is equal to the euclidian distance. The higher the order, the closer it converges to the Chebyshev distance, which has *p=infinity*. """ from math import pow assert len(y) == len(x) assert len(x) >= 1 sum = 0 for i in range(len(x)): sum += abs(x[i] - y[i]) ** p return pow(sum, 1.0 / float(p))
def function[minkowski_distance, parameter[x, y, p]]: constant[ Calculates the minkowski distance between two points. :param x: the first point :param y: the second point :param p: the order of the minkowski algorithm. If *p=1* it is equal to the manhatten distance, if *p=2* it is equal to the euclidian distance. The higher the order, the closer it converges to the Chebyshev distance, which has *p=infinity*. ] from relative_module[math] import module[pow] assert[compare[call[name[len], parameter[name[y]]] equal[==] call[name[len], parameter[name[x]]]]] assert[compare[call[name[len], parameter[name[x]]] greater_or_equal[>=] constant[1]]] variable[sum] assign[=] constant[0] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[x]]]]]] begin[:] <ast.AugAssign object at 0x7da20e9b17e0> return[call[name[pow], parameter[name[sum], binary_operation[constant[1.0] / call[name[float], parameter[name[p]]]]]]]
keyword[def] identifier[minkowski_distance] ( identifier[x] , identifier[y] , identifier[p] = literal[int] ): literal[string] keyword[from] identifier[math] keyword[import] identifier[pow] keyword[assert] identifier[len] ( identifier[y] )== identifier[len] ( identifier[x] ) keyword[assert] identifier[len] ( identifier[x] )>= literal[int] identifier[sum] = literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[x] )): identifier[sum] += identifier[abs] ( identifier[x] [ identifier[i] ]- identifier[y] [ identifier[i] ])** identifier[p] keyword[return] identifier[pow] ( identifier[sum] , literal[int] / identifier[float] ( identifier[p] ))
def minkowski_distance(x, y, p=2): """ Calculates the minkowski distance between two points. :param x: the first point :param y: the second point :param p: the order of the minkowski algorithm. If *p=1* it is equal to the manhatten distance, if *p=2* it is equal to the euclidian distance. The higher the order, the closer it converges to the Chebyshev distance, which has *p=infinity*. """ from math import pow assert len(y) == len(x) assert len(x) >= 1 sum = 0 for i in range(len(x)): sum += abs(x[i] - y[i]) ** p # depends on [control=['for'], data=['i']] return pow(sum, 1.0 / float(p))
async def start(self): """Start the websocket server. When this method returns, the websocket server will be running and the port property of this class will have its assigned port number. This method should be called only once in the lifetime of the server and must be paired with a call to stop() to cleanly release the server's resources. """ if self._server_task is not None: self._logger.debug("AsyncValidatingWSServer.start() called twice, ignoring") return started_signal = self._loop.create_future() self._server_task = self._loop.add_task(self._run_server_task(started_signal)) await started_signal if self.port is None: self.port = started_signal.result()
<ast.AsyncFunctionDef object at 0x7da18f00cdc0>
keyword[async] keyword[def] identifier[start] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_server_task] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] ) keyword[return] identifier[started_signal] = identifier[self] . identifier[_loop] . identifier[create_future] () identifier[self] . identifier[_server_task] = identifier[self] . identifier[_loop] . identifier[add_task] ( identifier[self] . identifier[_run_server_task] ( identifier[started_signal] )) keyword[await] identifier[started_signal] keyword[if] identifier[self] . identifier[port] keyword[is] keyword[None] : identifier[self] . identifier[port] = identifier[started_signal] . identifier[result] ()
async def start(self): """Start the websocket server. When this method returns, the websocket server will be running and the port property of this class will have its assigned port number. This method should be called only once in the lifetime of the server and must be paired with a call to stop() to cleanly release the server's resources. """ if self._server_task is not None: self._logger.debug('AsyncValidatingWSServer.start() called twice, ignoring') return # depends on [control=['if'], data=[]] started_signal = self._loop.create_future() self._server_task = self._loop.add_task(self._run_server_task(started_signal)) await started_signal if self.port is None: self.port = started_signal.result() # depends on [control=['if'], data=[]]
def getCheck(self, checkid): """Returns a detailed description of a specified check.""" check = PingdomCheck(self, {'id': checkid}) check.getDetails() return check
def function[getCheck, parameter[self, checkid]]: constant[Returns a detailed description of a specified check.] variable[check] assign[=] call[name[PingdomCheck], parameter[name[self], dictionary[[<ast.Constant object at 0x7da1b0d1d330>], [<ast.Name object at 0x7da1b0d1dff0>]]]] call[name[check].getDetails, parameter[]] return[name[check]]
keyword[def] identifier[getCheck] ( identifier[self] , identifier[checkid] ): literal[string] identifier[check] = identifier[PingdomCheck] ( identifier[self] ,{ literal[string] : identifier[checkid] }) identifier[check] . identifier[getDetails] () keyword[return] identifier[check]
def getCheck(self, checkid): """Returns a detailed description of a specified check.""" check = PingdomCheck(self, {'id': checkid}) check.getDetails() return check
def invalidate(self): """Mark the data invalidated. Clients will refetch the volume.""" with self._mesh_generator_lock: self._mesh_generator_pending = None self._mesh_generator = None self._dispatch_changed_callbacks()
def function[invalidate, parameter[self]]: constant[Mark the data invalidated. Clients will refetch the volume.] with name[self]._mesh_generator_lock begin[:] name[self]._mesh_generator_pending assign[=] constant[None] name[self]._mesh_generator assign[=] constant[None] call[name[self]._dispatch_changed_callbacks, parameter[]]
keyword[def] identifier[invalidate] ( identifier[self] ): literal[string] keyword[with] identifier[self] . identifier[_mesh_generator_lock] : identifier[self] . identifier[_mesh_generator_pending] = keyword[None] identifier[self] . identifier[_mesh_generator] = keyword[None] identifier[self] . identifier[_dispatch_changed_callbacks] ()
def invalidate(self): """Mark the data invalidated. Clients will refetch the volume.""" with self._mesh_generator_lock: self._mesh_generator_pending = None self._mesh_generator = None # depends on [control=['with'], data=[]] self._dispatch_changed_callbacks()
def references(self): """ Return (tail, head) pairs for each edge in the graph. """ return [ (tail, head) for tail in self.vertices for head in self.children(tail) ]
def function[references, parameter[self]]: constant[ Return (tail, head) pairs for each edge in the graph. ] return[<ast.ListComp object at 0x7da18ede4c10>]
keyword[def] identifier[references] ( identifier[self] ): literal[string] keyword[return] [ ( identifier[tail] , identifier[head] ) keyword[for] identifier[tail] keyword[in] identifier[self] . identifier[vertices] keyword[for] identifier[head] keyword[in] identifier[self] . identifier[children] ( identifier[tail] ) ]
def references(self): """ Return (tail, head) pairs for each edge in the graph. """ return [(tail, head) for tail in self.vertices for head in self.children(tail)]
def add_domain(session, organization, domain_name, is_top_domain=False): """Add a domain to the session. This function adds a new domain to the session using `domain_name` as its identifier. The new domain will also be linked to the organization object of `organization`. Values assigned to `domain_name` cannot be `None` or empty. The parameter `is_top_domain` only accepts `bool` values. As a result, the function returns a new `Domain` object. :param session: database session :param organization: links the new domain to this organization object :param domain_name: name of the domain :param is_top_domain: set this domain as a top domain :return: a new domain :raises ValueError: raised when `domain_name` is `None` or an empty; when `is_top_domain` does not have a `bool` value. """ if domain_name is None: raise ValueError("'domain_name' cannot be None") if domain_name == '': raise ValueError("'domain_name' cannot be an empty string") if not isinstance(is_top_domain, bool): raise ValueError("'is_top_domain' must have a boolean value") dom = Domain(domain=domain_name, is_top_domain=is_top_domain) dom.organization = organization session.add(dom) return dom
def function[add_domain, parameter[session, organization, domain_name, is_top_domain]]: constant[Add a domain to the session. This function adds a new domain to the session using `domain_name` as its identifier. The new domain will also be linked to the organization object of `organization`. Values assigned to `domain_name` cannot be `None` or empty. The parameter `is_top_domain` only accepts `bool` values. As a result, the function returns a new `Domain` object. :param session: database session :param organization: links the new domain to this organization object :param domain_name: name of the domain :param is_top_domain: set this domain as a top domain :return: a new domain :raises ValueError: raised when `domain_name` is `None` or an empty; when `is_top_domain` does not have a `bool` value. ] if compare[name[domain_name] is constant[None]] begin[:] <ast.Raise object at 0x7da1b0e9cee0> if compare[name[domain_name] equal[==] constant[]] begin[:] <ast.Raise object at 0x7da1b0e26740> if <ast.UnaryOp object at 0x7da1b0e143a0> begin[:] <ast.Raise object at 0x7da1b0e15570> variable[dom] assign[=] call[name[Domain], parameter[]] name[dom].organization assign[=] name[organization] call[name[session].add, parameter[name[dom]]] return[name[dom]]
keyword[def] identifier[add_domain] ( identifier[session] , identifier[organization] , identifier[domain_name] , identifier[is_top_domain] = keyword[False] ): literal[string] keyword[if] identifier[domain_name] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[domain_name] == literal[string] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[is_top_domain] , identifier[bool] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[dom] = identifier[Domain] ( identifier[domain] = identifier[domain_name] , identifier[is_top_domain] = identifier[is_top_domain] ) identifier[dom] . identifier[organization] = identifier[organization] identifier[session] . identifier[add] ( identifier[dom] ) keyword[return] identifier[dom]
def add_domain(session, organization, domain_name, is_top_domain=False): """Add a domain to the session. This function adds a new domain to the session using `domain_name` as its identifier. The new domain will also be linked to the organization object of `organization`. Values assigned to `domain_name` cannot be `None` or empty. The parameter `is_top_domain` only accepts `bool` values. As a result, the function returns a new `Domain` object. :param session: database session :param organization: links the new domain to this organization object :param domain_name: name of the domain :param is_top_domain: set this domain as a top domain :return: a new domain :raises ValueError: raised when `domain_name` is `None` or an empty; when `is_top_domain` does not have a `bool` value. """ if domain_name is None: raise ValueError("'domain_name' cannot be None") # depends on [control=['if'], data=[]] if domain_name == '': raise ValueError("'domain_name' cannot be an empty string") # depends on [control=['if'], data=[]] if not isinstance(is_top_domain, bool): raise ValueError("'is_top_domain' must have a boolean value") # depends on [control=['if'], data=[]] dom = Domain(domain=domain_name, is_top_domain=is_top_domain) dom.organization = organization session.add(dom) return dom
def grid_angle_to_profile(self, grid_thetas): """The angle between each angle theta on the grid and the profile, in radians. Parameters ----------- grid_thetas : ndarray The angle theta counter-clockwise from the positive x-axis to each coordinate in radians. """ theta_coordinate_to_profile = np.add(grid_thetas, - self.phi_radians) return np.cos(theta_coordinate_to_profile), np.sin(theta_coordinate_to_profile)
def function[grid_angle_to_profile, parameter[self, grid_thetas]]: constant[The angle between each angle theta on the grid and the profile, in radians. Parameters ----------- grid_thetas : ndarray The angle theta counter-clockwise from the positive x-axis to each coordinate in radians. ] variable[theta_coordinate_to_profile] assign[=] call[name[np].add, parameter[name[grid_thetas], <ast.UnaryOp object at 0x7da18f00d570>]] return[tuple[[<ast.Call object at 0x7da18f00e110>, <ast.Call object at 0x7da18f00fb80>]]]
keyword[def] identifier[grid_angle_to_profile] ( identifier[self] , identifier[grid_thetas] ): literal[string] identifier[theta_coordinate_to_profile] = identifier[np] . identifier[add] ( identifier[grid_thetas] ,- identifier[self] . identifier[phi_radians] ) keyword[return] identifier[np] . identifier[cos] ( identifier[theta_coordinate_to_profile] ), identifier[np] . identifier[sin] ( identifier[theta_coordinate_to_profile] )
def grid_angle_to_profile(self, grid_thetas): """The angle between each angle theta on the grid and the profile, in radians. Parameters ----------- grid_thetas : ndarray The angle theta counter-clockwise from the positive x-axis to each coordinate in radians. """ theta_coordinate_to_profile = np.add(grid_thetas, -self.phi_radians) return (np.cos(theta_coordinate_to_profile), np.sin(theta_coordinate_to_profile))
def _fiss_agent_header(headers=None): """ Return request headers for fiss. Inserts FISS as the User-Agent. Initializes __SESSION if it hasn't been set. Args: headers (dict): Include additional headers as key-value pairs """ _set_session() fiss_headers = {"User-Agent" : FISS_USER_AGENT} if headers is not None: fiss_headers.update(headers) return fiss_headers
def function[_fiss_agent_header, parameter[headers]]: constant[ Return request headers for fiss. Inserts FISS as the User-Agent. Initializes __SESSION if it hasn't been set. Args: headers (dict): Include additional headers as key-value pairs ] call[name[_set_session], parameter[]] variable[fiss_headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b1bc0ca0>], [<ast.Name object at 0x7da1b1bc23b0>]] if compare[name[headers] is_not constant[None]] begin[:] call[name[fiss_headers].update, parameter[name[headers]]] return[name[fiss_headers]]
keyword[def] identifier[_fiss_agent_header] ( identifier[headers] = keyword[None] ): literal[string] identifier[_set_session] () identifier[fiss_headers] ={ literal[string] : identifier[FISS_USER_AGENT] } keyword[if] identifier[headers] keyword[is] keyword[not] keyword[None] : identifier[fiss_headers] . identifier[update] ( identifier[headers] ) keyword[return] identifier[fiss_headers]
def _fiss_agent_header(headers=None): """ Return request headers for fiss. Inserts FISS as the User-Agent. Initializes __SESSION if it hasn't been set. Args: headers (dict): Include additional headers as key-value pairs """ _set_session() fiss_headers = {'User-Agent': FISS_USER_AGENT} if headers is not None: fiss_headers.update(headers) # depends on [control=['if'], data=['headers']] return fiss_headers
def visibility(vis: Number, unit: str = 'm') -> str: """ Format visibility details into a spoken word string """ if not vis: return 'Visibility unknown' if vis.value is None or '/' in vis.repr: ret_vis = vis.spoken else: ret_vis = translate.visibility(vis, unit=unit) if unit == 'm': unit = 'km' ret_vis = ret_vis[:ret_vis.find(' (')].lower().replace(unit, '').strip() ret_vis = core.spoken_number(core.remove_leading_zeros(ret_vis)) ret = 'Visibility ' + ret_vis if unit in SPOKEN_UNITS: if '/' in vis.repr and 'half' not in ret: ret += ' of a' ret += ' ' + SPOKEN_UNITS[unit] if not (('one half' in ret and ' and ' not in ret) or 'of a' in ret): ret += 's' else: ret += unit return ret
def function[visibility, parameter[vis, unit]]: constant[ Format visibility details into a spoken word string ] if <ast.UnaryOp object at 0x7da20c76e920> begin[:] return[constant[Visibility unknown]] if <ast.BoolOp object at 0x7da20c76d6c0> begin[:] variable[ret_vis] assign[=] name[vis].spoken variable[ret] assign[=] binary_operation[constant[Visibility ] + name[ret_vis]] if compare[name[unit] in name[SPOKEN_UNITS]] begin[:] if <ast.BoolOp object at 0x7da1b1528b80> begin[:] <ast.AugAssign object at 0x7da1b1529690> <ast.AugAssign object at 0x7da1b1529b40> if <ast.UnaryOp object at 0x7da1b15286d0> begin[:] <ast.AugAssign object at 0x7da1b1529270> return[name[ret]]
keyword[def] identifier[visibility] ( identifier[vis] : identifier[Number] , identifier[unit] : identifier[str] = literal[string] )-> identifier[str] : literal[string] keyword[if] keyword[not] identifier[vis] : keyword[return] literal[string] keyword[if] identifier[vis] . identifier[value] keyword[is] keyword[None] keyword[or] literal[string] keyword[in] identifier[vis] . identifier[repr] : identifier[ret_vis] = identifier[vis] . identifier[spoken] keyword[else] : identifier[ret_vis] = identifier[translate] . identifier[visibility] ( identifier[vis] , identifier[unit] = identifier[unit] ) keyword[if] identifier[unit] == literal[string] : identifier[unit] = literal[string] identifier[ret_vis] = identifier[ret_vis] [: identifier[ret_vis] . identifier[find] ( literal[string] )]. identifier[lower] (). identifier[replace] ( identifier[unit] , literal[string] ). identifier[strip] () identifier[ret_vis] = identifier[core] . identifier[spoken_number] ( identifier[core] . identifier[remove_leading_zeros] ( identifier[ret_vis] )) identifier[ret] = literal[string] + identifier[ret_vis] keyword[if] identifier[unit] keyword[in] identifier[SPOKEN_UNITS] : keyword[if] literal[string] keyword[in] identifier[vis] . identifier[repr] keyword[and] literal[string] keyword[not] keyword[in] identifier[ret] : identifier[ret] += literal[string] identifier[ret] += literal[string] + identifier[SPOKEN_UNITS] [ identifier[unit] ] keyword[if] keyword[not] (( literal[string] keyword[in] identifier[ret] keyword[and] literal[string] keyword[not] keyword[in] identifier[ret] ) keyword[or] literal[string] keyword[in] identifier[ret] ): identifier[ret] += literal[string] keyword[else] : identifier[ret] += identifier[unit] keyword[return] identifier[ret]
def visibility(vis: Number, unit: str='m') -> str: """ Format visibility details into a spoken word string """ if not vis: return 'Visibility unknown' # depends on [control=['if'], data=[]] if vis.value is None or '/' in vis.repr: ret_vis = vis.spoken # depends on [control=['if'], data=[]] else: ret_vis = translate.visibility(vis, unit=unit) if unit == 'm': unit = 'km' # depends on [control=['if'], data=['unit']] ret_vis = ret_vis[:ret_vis.find(' (')].lower().replace(unit, '').strip() ret_vis = core.spoken_number(core.remove_leading_zeros(ret_vis)) ret = 'Visibility ' + ret_vis if unit in SPOKEN_UNITS: if '/' in vis.repr and 'half' not in ret: ret += ' of a' # depends on [control=['if'], data=[]] ret += ' ' + SPOKEN_UNITS[unit] if not ('one half' in ret and ' and ' not in ret or 'of a' in ret): ret += 's' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['unit', 'SPOKEN_UNITS']] else: ret += unit return ret
def rotate_ryb(self, angle=180): """ Returns a color rotated on the artistic RYB color wheel. An artistic color wheel has slightly different opposites (e.g. purple-yellow instead of purple-lime). It is mathematically incorrect but generally assumed to provide better complementary colors. http://en.wikipedia.org/wiki/RYB_color_model """ h = self.h * 360 angle = angle % 360 # Approximation of Itten's RYB color wheel. # In HSB, colors hues range from 0-360. # However, on the artistic color wheel these are not evenly distributed. # The second tuple value contains the actual distribution. wheel = [ (0, 0), (15, 8), (30, 17), (45, 26), (60, 34), (75, 41), (90, 48), (105, 54), (120, 60), (135, 81), (150, 103), (165, 123), (180, 138), (195, 155), (210, 171), (225, 187), (240, 204), (255, 219), (270, 234), (285, 251), (300, 267), (315, 282), (330, 298), (345, 329), (360, 0) ] # Given a hue, find out under what angle it is # located on the artistic color wheel. for i in _range(len(wheel) - 1): x0, y0 = wheel[i] x1, y1 = wheel[i + 1] if y1 < y0: y1 += 360 if y0 <= h <= y1: a = 1.0 * x0 + (x1 - x0) * (h - y0) / (y1 - y0) break # And the user-given angle (e.g. complement). a = (a + angle) % 360 # For the given angle, find out what hue is # located there on the artistic color wheel. for i in _range(len(wheel) - 1): x0, y0 = wheel[i] x1, y1 = wheel[i + 1] if y1 < y0: y1 += 360 if x0 <= a <= x1: h = 1.0 * y0 + (y1 - y0) * (a - x0) / (x1 - x0) break h = h % 360 return Color(h / 360, self.s, self.brightness, self.a, mode="hsb", name="")
def function[rotate_ryb, parameter[self, angle]]: constant[ Returns a color rotated on the artistic RYB color wheel. An artistic color wheel has slightly different opposites (e.g. purple-yellow instead of purple-lime). It is mathematically incorrect but generally assumed to provide better complementary colors. http://en.wikipedia.org/wiki/RYB_color_model ] variable[h] assign[=] binary_operation[name[self].h * constant[360]] variable[angle] assign[=] binary_operation[name[angle] <ast.Mod object at 0x7da2590d6920> constant[360]] variable[wheel] assign[=] list[[<ast.Tuple object at 0x7da1afff6ec0>, <ast.Tuple object at 0x7da1afff4a90>, <ast.Tuple object at 0x7da1afff4dc0>, <ast.Tuple object at 0x7da1afff64a0>, <ast.Tuple object at 0x7da1afff6500>, <ast.Tuple object at 0x7da1afff63b0>, <ast.Tuple object at 0x7da1afff6410>, <ast.Tuple object at 0x7da1afff61d0>, <ast.Tuple object at 0x7da1afff6230>, <ast.Tuple object at 0x7da1afff4fa0>, <ast.Tuple object at 0x7da1afff69b0>, <ast.Tuple object at 0x7da1afff6890>, <ast.Tuple object at 0x7da1afff6830>, <ast.Tuple object at 0x7da1afff68c0>, <ast.Tuple object at 0x7da1afff6740>, <ast.Tuple object at 0x7da1afff7bb0>, <ast.Tuple object at 0x7da1afff7fa0>, <ast.Tuple object at 0x7da1afff70a0>, <ast.Tuple object at 0x7da1afff7d00>, <ast.Tuple object at 0x7da1afff71f0>, <ast.Tuple object at 0x7da1afff7130>, <ast.Tuple object at 0x7da1afff7250>, <ast.Tuple object at 0x7da1afff6dd0>, <ast.Tuple object at 0x7da1afff6e30>, <ast.Tuple object at 0x7da1afff6bf0>]] for taget[name[i]] in starred[call[name[_range], parameter[binary_operation[call[name[len], parameter[name[wheel]]] - constant[1]]]]] begin[:] <ast.Tuple object at 0x7da1afff6560> assign[=] call[name[wheel]][name[i]] <ast.Tuple object at 0x7da1afff4c10> assign[=] call[name[wheel]][binary_operation[name[i] + constant[1]]] if compare[name[y1] less[<] name[y0]] begin[:] <ast.AugAssign object at 0x7da1afff7c10> if compare[name[y0] less_or_equal[<=] name[h]] begin[:] variable[a] assign[=] binary_operation[binary_operation[constant[1.0] * name[x0]] + binary_operation[binary_operation[binary_operation[name[x1] - name[x0]] * binary_operation[name[h] - name[y0]]] / binary_operation[name[y1] - name[y0]]]] break variable[a] assign[=] binary_operation[binary_operation[name[a] + name[angle]] <ast.Mod object at 0x7da2590d6920> constant[360]] for taget[name[i]] in starred[call[name[_range], parameter[binary_operation[call[name[len], parameter[name[wheel]]] - constant[1]]]]] begin[:] <ast.Tuple object at 0x7da1afff4a00> assign[=] call[name[wheel]][name[i]] <ast.Tuple object at 0x7da1afff4f40> assign[=] call[name[wheel]][binary_operation[name[i] + constant[1]]] if compare[name[y1] less[<] name[y0]] begin[:] <ast.AugAssign object at 0x7da1b00f7b80> if compare[name[x0] less_or_equal[<=] name[a]] begin[:] variable[h] assign[=] binary_operation[binary_operation[constant[1.0] * name[y0]] + binary_operation[binary_operation[binary_operation[name[y1] - name[y0]] * binary_operation[name[a] - name[x0]]] / binary_operation[name[x1] - name[x0]]]] break variable[h] assign[=] binary_operation[name[h] <ast.Mod object at 0x7da2590d6920> constant[360]] return[call[name[Color], parameter[binary_operation[name[h] / constant[360]], name[self].s, name[self].brightness, name[self].a]]]
keyword[def] identifier[rotate_ryb] ( identifier[self] , identifier[angle] = literal[int] ): literal[string] identifier[h] = identifier[self] . identifier[h] * literal[int] identifier[angle] = identifier[angle] % literal[int] identifier[wheel] =[ ( literal[int] , literal[int] ),( literal[int] , literal[int] ), ( literal[int] , literal[int] ),( literal[int] , literal[int] ), ( literal[int] , literal[int] ),( literal[int] , literal[int] ), ( literal[int] , literal[int] ),( literal[int] , literal[int] ), ( literal[int] , literal[int] ),( literal[int] , literal[int] ), ( literal[int] , literal[int] ),( literal[int] , literal[int] ), ( literal[int] , literal[int] ),( literal[int] , literal[int] ), ( literal[int] , literal[int] ),( literal[int] , literal[int] ), ( literal[int] , literal[int] ),( literal[int] , literal[int] ), ( literal[int] , literal[int] ),( literal[int] , literal[int] ), ( literal[int] , literal[int] ),( literal[int] , literal[int] ), ( literal[int] , literal[int] ),( literal[int] , literal[int] ), ( literal[int] , literal[int] ) ] keyword[for] identifier[i] keyword[in] identifier[_range] ( identifier[len] ( identifier[wheel] )- literal[int] ): identifier[x0] , identifier[y0] = identifier[wheel] [ identifier[i] ] identifier[x1] , identifier[y1] = identifier[wheel] [ identifier[i] + literal[int] ] keyword[if] identifier[y1] < identifier[y0] : identifier[y1] += literal[int] keyword[if] identifier[y0] <= identifier[h] <= identifier[y1] : identifier[a] = literal[int] * identifier[x0] +( identifier[x1] - identifier[x0] )*( identifier[h] - identifier[y0] )/( identifier[y1] - identifier[y0] ) keyword[break] identifier[a] =( identifier[a] + identifier[angle] )% literal[int] keyword[for] identifier[i] keyword[in] identifier[_range] ( identifier[len] ( identifier[wheel] )- literal[int] ): identifier[x0] , identifier[y0] = identifier[wheel] [ identifier[i] ] identifier[x1] , identifier[y1] = identifier[wheel] [ identifier[i] + literal[int] ] keyword[if] identifier[y1] < identifier[y0] : identifier[y1] += literal[int] keyword[if] identifier[x0] <= identifier[a] <= identifier[x1] : identifier[h] = literal[int] * identifier[y0] +( identifier[y1] - identifier[y0] )*( identifier[a] - identifier[x0] )/( identifier[x1] - identifier[x0] ) keyword[break] identifier[h] = identifier[h] % literal[int] keyword[return] identifier[Color] ( identifier[h] / literal[int] , identifier[self] . identifier[s] , identifier[self] . identifier[brightness] , identifier[self] . identifier[a] , identifier[mode] = literal[string] , identifier[name] = literal[string] )
def rotate_ryb(self, angle=180): """ Returns a color rotated on the artistic RYB color wheel. An artistic color wheel has slightly different opposites (e.g. purple-yellow instead of purple-lime). It is mathematically incorrect but generally assumed to provide better complementary colors. http://en.wikipedia.org/wiki/RYB_color_model """ h = self.h * 360 angle = angle % 360 # Approximation of Itten's RYB color wheel. # In HSB, colors hues range from 0-360. # However, on the artistic color wheel these are not evenly distributed. # The second tuple value contains the actual distribution. wheel = [(0, 0), (15, 8), (30, 17), (45, 26), (60, 34), (75, 41), (90, 48), (105, 54), (120, 60), (135, 81), (150, 103), (165, 123), (180, 138), (195, 155), (210, 171), (225, 187), (240, 204), (255, 219), (270, 234), (285, 251), (300, 267), (315, 282), (330, 298), (345, 329), (360, 0)] # Given a hue, find out under what angle it is # located on the artistic color wheel. for i in _range(len(wheel) - 1): (x0, y0) = wheel[i] (x1, y1) = wheel[i + 1] if y1 < y0: y1 += 360 # depends on [control=['if'], data=['y1']] if y0 <= h <= y1: a = 1.0 * x0 + (x1 - x0) * (h - y0) / (y1 - y0) break # depends on [control=['if'], data=['y0', 'h']] # depends on [control=['for'], data=['i']] # And the user-given angle (e.g. complement). a = (a + angle) % 360 # For the given angle, find out what hue is # located there on the artistic color wheel. for i in _range(len(wheel) - 1): (x0, y0) = wheel[i] (x1, y1) = wheel[i + 1] if y1 < y0: y1 += 360 # depends on [control=['if'], data=['y1']] if x0 <= a <= x1: h = 1.0 * y0 + (y1 - y0) * (a - x0) / (x1 - x0) break # depends on [control=['if'], data=['x0', 'a']] # depends on [control=['for'], data=['i']] h = h % 360 return Color(h / 360, self.s, self.brightness, self.a, mode='hsb', name='')
def FlashFromFile(self, partition, source_file, source_len=0, info_cb=DEFAULT_MESSAGE_CALLBACK, progress_callback=None): """Flashes a partition from the file on disk. Args: partition: Partition name to flash to. source_file: Filename to download to the device. source_len: Optional length of source_file, uses os.stat if not provided. info_cb: See Download. progress_callback: See Download. Returns: Download and flash responses, normally nothing. """ if source_len == 0: # Fall back to stat. source_len = os.stat(source_file).st_size download_response = self.Download( source_file, source_len=source_len, info_cb=info_cb, progress_callback=progress_callback) flash_response = self.Flash(partition, info_cb=info_cb) return download_response + flash_response
def function[FlashFromFile, parameter[self, partition, source_file, source_len, info_cb, progress_callback]]: constant[Flashes a partition from the file on disk. Args: partition: Partition name to flash to. source_file: Filename to download to the device. source_len: Optional length of source_file, uses os.stat if not provided. info_cb: See Download. progress_callback: See Download. Returns: Download and flash responses, normally nothing. ] if compare[name[source_len] equal[==] constant[0]] begin[:] variable[source_len] assign[=] call[name[os].stat, parameter[name[source_file]]].st_size variable[download_response] assign[=] call[name[self].Download, parameter[name[source_file]]] variable[flash_response] assign[=] call[name[self].Flash, parameter[name[partition]]] return[binary_operation[name[download_response] + name[flash_response]]]
keyword[def] identifier[FlashFromFile] ( identifier[self] , identifier[partition] , identifier[source_file] , identifier[source_len] = literal[int] , identifier[info_cb] = identifier[DEFAULT_MESSAGE_CALLBACK] , identifier[progress_callback] = keyword[None] ): literal[string] keyword[if] identifier[source_len] == literal[int] : identifier[source_len] = identifier[os] . identifier[stat] ( identifier[source_file] ). identifier[st_size] identifier[download_response] = identifier[self] . identifier[Download] ( identifier[source_file] , identifier[source_len] = identifier[source_len] , identifier[info_cb] = identifier[info_cb] , identifier[progress_callback] = identifier[progress_callback] ) identifier[flash_response] = identifier[self] . identifier[Flash] ( identifier[partition] , identifier[info_cb] = identifier[info_cb] ) keyword[return] identifier[download_response] + identifier[flash_response]
def FlashFromFile(self, partition, source_file, source_len=0, info_cb=DEFAULT_MESSAGE_CALLBACK, progress_callback=None): """Flashes a partition from the file on disk. Args: partition: Partition name to flash to. source_file: Filename to download to the device. source_len: Optional length of source_file, uses os.stat if not provided. info_cb: See Download. progress_callback: See Download. Returns: Download and flash responses, normally nothing. """ if source_len == 0: # Fall back to stat. source_len = os.stat(source_file).st_size # depends on [control=['if'], data=['source_len']] download_response = self.Download(source_file, source_len=source_len, info_cb=info_cb, progress_callback=progress_callback) flash_response = self.Flash(partition, info_cb=info_cb) return download_response + flash_response
def ttotdev(data, rate=1.0, data_type="phase", taus=None): """ Time Total Deviation modified total variance scaled by tau^2 / 3 NIST SP 1065 eqn (28) page 26 <--- formula should have tau squared !?! """ (taus, mtotdevs, mde, ns) = mtotdev(data, data_type=data_type, rate=rate, taus=taus) td = taus*mtotdevs / np.sqrt(3.0) tde = td / np.sqrt(ns) return taus, td, tde, ns
def function[ttotdev, parameter[data, rate, data_type, taus]]: constant[ Time Total Deviation modified total variance scaled by tau^2 / 3 NIST SP 1065 eqn (28) page 26 <--- formula should have tau squared !?! ] <ast.Tuple object at 0x7da1b155eec0> assign[=] call[name[mtotdev], parameter[name[data]]] variable[td] assign[=] binary_operation[binary_operation[name[taus] * name[mtotdevs]] / call[name[np].sqrt, parameter[constant[3.0]]]] variable[tde] assign[=] binary_operation[name[td] / call[name[np].sqrt, parameter[name[ns]]]] return[tuple[[<ast.Name object at 0x7da1b155dde0>, <ast.Name object at 0x7da1b155ec20>, <ast.Name object at 0x7da1b155ebf0>, <ast.Name object at 0x7da1b155f610>]]]
keyword[def] identifier[ttotdev] ( identifier[data] , identifier[rate] = literal[int] , identifier[data_type] = literal[string] , identifier[taus] = keyword[None] ): literal[string] ( identifier[taus] , identifier[mtotdevs] , identifier[mde] , identifier[ns] )= identifier[mtotdev] ( identifier[data] , identifier[data_type] = identifier[data_type] , identifier[rate] = identifier[rate] , identifier[taus] = identifier[taus] ) identifier[td] = identifier[taus] * identifier[mtotdevs] / identifier[np] . identifier[sqrt] ( literal[int] ) identifier[tde] = identifier[td] / identifier[np] . identifier[sqrt] ( identifier[ns] ) keyword[return] identifier[taus] , identifier[td] , identifier[tde] , identifier[ns]
def ttotdev(data, rate=1.0, data_type='phase', taus=None): """ Time Total Deviation modified total variance scaled by tau^2 / 3 NIST SP 1065 eqn (28) page 26 <--- formula should have tau squared !?! """ (taus, mtotdevs, mde, ns) = mtotdev(data, data_type=data_type, rate=rate, taus=taus) td = taus * mtotdevs / np.sqrt(3.0) tde = td / np.sqrt(ns) return (taus, td, tde, ns)
def write(self, data): ''' This could be a bit less clumsy. ''' if data == '\n': # print does this return self.stream.write(data) else: bytes_ = 0 for line in data.splitlines(True): nl = '' if line.endswith('\n'): # mv nl to end: line = line[:-1] nl = '\n' bytes_ += self.stream.write( f'{self.start}{line}{self.default}{nl}' ) or 0 # in case None returned (on Windows) return bytes_
def function[write, parameter[self, data]]: constant[ This could be a bit less clumsy. ] if compare[name[data] equal[==] constant[ ]] begin[:] return[call[name[self].stream.write, parameter[name[data]]]]
keyword[def] identifier[write] ( identifier[self] , identifier[data] ): literal[string] keyword[if] identifier[data] == literal[string] : keyword[return] identifier[self] . identifier[stream] . identifier[write] ( identifier[data] ) keyword[else] : identifier[bytes_] = literal[int] keyword[for] identifier[line] keyword[in] identifier[data] . identifier[splitlines] ( keyword[True] ): identifier[nl] = literal[string] keyword[if] identifier[line] . identifier[endswith] ( literal[string] ): identifier[line] = identifier[line] [:- literal[int] ] identifier[nl] = literal[string] identifier[bytes_] += identifier[self] . identifier[stream] . identifier[write] ( literal[string] ) keyword[or] literal[int] keyword[return] identifier[bytes_]
def write(self, data): """ This could be a bit less clumsy. """ if data == '\n': # print does this return self.stream.write(data) # depends on [control=['if'], data=['data']] else: bytes_ = 0 for line in data.splitlines(True): nl = '' if line.endswith('\n'): # mv nl to end: line = line[:-1] nl = '\n' # depends on [control=['if'], data=[]] bytes_ += self.stream.write(f'{self.start}{line}{self.default}{nl}') or 0 # in case None returned (on Windows) # depends on [control=['for'], data=['line']] return bytes_
def DefaultAdapter(self): '''Retrieve the default adapter ''' default_adapter = None for obj in mockobject.objects.keys(): if obj.startswith('/org/bluez/') and 'dev_' not in obj: default_adapter = obj if default_adapter: return dbus.ObjectPath(default_adapter, variant_level=1) else: raise dbus.exceptions.DBusException( 'No such adapter.', name='org.bluez.Error.NoSuchAdapter')
def function[DefaultAdapter, parameter[self]]: constant[Retrieve the default adapter ] variable[default_adapter] assign[=] constant[None] for taget[name[obj]] in starred[call[name[mockobject].objects.keys, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da20c992200> begin[:] variable[default_adapter] assign[=] name[obj] if name[default_adapter] begin[:] return[call[name[dbus].ObjectPath, parameter[name[default_adapter]]]]
keyword[def] identifier[DefaultAdapter] ( identifier[self] ): literal[string] identifier[default_adapter] = keyword[None] keyword[for] identifier[obj] keyword[in] identifier[mockobject] . identifier[objects] . identifier[keys] (): keyword[if] identifier[obj] . identifier[startswith] ( literal[string] ) keyword[and] literal[string] keyword[not] keyword[in] identifier[obj] : identifier[default_adapter] = identifier[obj] keyword[if] identifier[default_adapter] : keyword[return] identifier[dbus] . identifier[ObjectPath] ( identifier[default_adapter] , identifier[variant_level] = literal[int] ) keyword[else] : keyword[raise] identifier[dbus] . identifier[exceptions] . identifier[DBusException] ( literal[string] , identifier[name] = literal[string] )
def DefaultAdapter(self): """Retrieve the default adapter """ default_adapter = None for obj in mockobject.objects.keys(): if obj.startswith('/org/bluez/') and 'dev_' not in obj: default_adapter = obj # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['obj']] if default_adapter: return dbus.ObjectPath(default_adapter, variant_level=1) # depends on [control=['if'], data=[]] else: raise dbus.exceptions.DBusException('No such adapter.', name='org.bluez.Error.NoSuchAdapter')
def fetch_uri(self, directory, uri): """ Use ``urllib.urlretrieve`` to download package to file in sandbox dir. @param directory: directory to download to @type directory: string @param uri: uri to download @type uri: string @returns: 0 = success or 1 for failed download """ filename = os.path.basename(urlparse(uri)[2]) if os.path.exists(filename): self.logger.error("ERROR: File exists: " + filename) return 1 try: downloaded_filename, headers = urlretrieve(uri, filename) self.logger.info("Downloaded ./" + filename) except IOError as err_msg: self.logger.error("Error downloading package %s from URL %s" \ % (filename, uri)) self.logger.error(str(err_msg)) return 1 if headers.gettype() in ["text/html"]: dfile = open(downloaded_filename) if re.search("404 Not Found", "".join(dfile.readlines())): dfile.close() self.logger.error("'404 Not Found' error") return 1 dfile.close() return 0
def function[fetch_uri, parameter[self, directory, uri]]: constant[ Use ``urllib.urlretrieve`` to download package to file in sandbox dir. @param directory: directory to download to @type directory: string @param uri: uri to download @type uri: string @returns: 0 = success or 1 for failed download ] variable[filename] assign[=] call[name[os].path.basename, parameter[call[call[name[urlparse], parameter[name[uri]]]][constant[2]]]] if call[name[os].path.exists, parameter[name[filename]]] begin[:] call[name[self].logger.error, parameter[binary_operation[constant[ERROR: File exists: ] + name[filename]]]] return[constant[1]] <ast.Try object at 0x7da18dc99c90> if compare[call[name[headers].gettype, parameter[]] in list[[<ast.Constant object at 0x7da18dc990f0>]]] begin[:] variable[dfile] assign[=] call[name[open], parameter[name[downloaded_filename]]] if call[name[re].search, parameter[constant[404 Not Found], call[constant[].join, parameter[call[name[dfile].readlines, parameter[]]]]]] begin[:] call[name[dfile].close, parameter[]] call[name[self].logger.error, parameter[constant['404 Not Found' error]]] return[constant[1]] call[name[dfile].close, parameter[]] return[constant[0]]
keyword[def] identifier[fetch_uri] ( identifier[self] , identifier[directory] , identifier[uri] ): literal[string] identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[urlparse] ( identifier[uri] )[ literal[int] ]) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[filename] ): identifier[self] . identifier[logger] . identifier[error] ( literal[string] + identifier[filename] ) keyword[return] literal[int] keyword[try] : identifier[downloaded_filename] , identifier[headers] = identifier[urlretrieve] ( identifier[uri] , identifier[filename] ) identifier[self] . identifier[logger] . identifier[info] ( literal[string] + identifier[filename] ) keyword[except] identifier[IOError] keyword[as] identifier[err_msg] : identifier[self] . identifier[logger] . identifier[error] ( literal[string] %( identifier[filename] , identifier[uri] )) identifier[self] . identifier[logger] . identifier[error] ( identifier[str] ( identifier[err_msg] )) keyword[return] literal[int] keyword[if] identifier[headers] . identifier[gettype] () keyword[in] [ literal[string] ]: identifier[dfile] = identifier[open] ( identifier[downloaded_filename] ) keyword[if] identifier[re] . identifier[search] ( literal[string] , literal[string] . identifier[join] ( identifier[dfile] . identifier[readlines] ())): identifier[dfile] . identifier[close] () identifier[self] . identifier[logger] . identifier[error] ( literal[string] ) keyword[return] literal[int] identifier[dfile] . identifier[close] () keyword[return] literal[int]
def fetch_uri(self, directory, uri): """ Use ``urllib.urlretrieve`` to download package to file in sandbox dir. @param directory: directory to download to @type directory: string @param uri: uri to download @type uri: string @returns: 0 = success or 1 for failed download """ filename = os.path.basename(urlparse(uri)[2]) if os.path.exists(filename): self.logger.error('ERROR: File exists: ' + filename) return 1 # depends on [control=['if'], data=[]] try: (downloaded_filename, headers) = urlretrieve(uri, filename) self.logger.info('Downloaded ./' + filename) # depends on [control=['try'], data=[]] except IOError as err_msg: self.logger.error('Error downloading package %s from URL %s' % (filename, uri)) self.logger.error(str(err_msg)) return 1 # depends on [control=['except'], data=['err_msg']] if headers.gettype() in ['text/html']: dfile = open(downloaded_filename) if re.search('404 Not Found', ''.join(dfile.readlines())): dfile.close() self.logger.error("'404 Not Found' error") return 1 # depends on [control=['if'], data=[]] dfile.close() # depends on [control=['if'], data=[]] return 0
def is_decomposed(P): """ Check if a polynomial (array) is on component form. Args: P (Poly): Input data. Returns: (bool): True if all polynomials in ``P`` are on component form. Examples: >>> x,y = cp.variable(2) >>> print(cp.is_decomposed(cp.Poly([1,x,x*y]))) True >>> print(cp.is_decomposed(cp.Poly([x+1,x*y]))) False """ if P.shape: return min([is_decomposed(poly) for poly in P]) return len(P.keys) <= 1
def function[is_decomposed, parameter[P]]: constant[ Check if a polynomial (array) is on component form. Args: P (Poly): Input data. Returns: (bool): True if all polynomials in ``P`` are on component form. Examples: >>> x,y = cp.variable(2) >>> print(cp.is_decomposed(cp.Poly([1,x,x*y]))) True >>> print(cp.is_decomposed(cp.Poly([x+1,x*y]))) False ] if name[P].shape begin[:] return[call[name[min], parameter[<ast.ListComp object at 0x7da2045644f0>]]] return[compare[call[name[len], parameter[name[P].keys]] less_or_equal[<=] constant[1]]]
keyword[def] identifier[is_decomposed] ( identifier[P] ): literal[string] keyword[if] identifier[P] . identifier[shape] : keyword[return] identifier[min] ([ identifier[is_decomposed] ( identifier[poly] ) keyword[for] identifier[poly] keyword[in] identifier[P] ]) keyword[return] identifier[len] ( identifier[P] . identifier[keys] )<= literal[int]
def is_decomposed(P): """ Check if a polynomial (array) is on component form. Args: P (Poly): Input data. Returns: (bool): True if all polynomials in ``P`` are on component form. Examples: >>> x,y = cp.variable(2) >>> print(cp.is_decomposed(cp.Poly([1,x,x*y]))) True >>> print(cp.is_decomposed(cp.Poly([x+1,x*y]))) False """ if P.shape: return min([is_decomposed(poly) for poly in P]) # depends on [control=['if'], data=[]] return len(P.keys) <= 1
def batch_update(self, command, rows): """ Для массовой вставки умеренных объемов 1-5к записей за вызов :param command: SQL insert or updtae :param rows: list of dict :return: dict """ request = { "database": { "alias": self.__options['dbAlias'] }, "batchUpdate": { "command": command, "rows": rows, "shardKey": self.__options.get('shardKey'), } } dr = self.__app.native_api_call('db', 'batch-update', request, self.__options, False) return json.loads(dr.text)
def function[batch_update, parameter[self, command, rows]]: constant[ Для массовой вставки умеренных объемов 1-5к записей за вызов :param command: SQL insert or updtae :param rows: list of dict :return: dict ] variable[request] assign[=] dictionary[[<ast.Constant object at 0x7da18f09e0b0>, <ast.Constant object at 0x7da18f09e170>], [<ast.Dict object at 0x7da18f09c520>, <ast.Dict object at 0x7da2054a4e50>]] variable[dr] assign[=] call[name[self].__app.native_api_call, parameter[constant[db], constant[batch-update], name[request], name[self].__options, constant[False]]] return[call[name[json].loads, parameter[name[dr].text]]]
keyword[def] identifier[batch_update] ( identifier[self] , identifier[command] , identifier[rows] ): literal[string] identifier[request] ={ literal[string] :{ literal[string] : identifier[self] . identifier[__options] [ literal[string] ] }, literal[string] :{ literal[string] : identifier[command] , literal[string] : identifier[rows] , literal[string] : identifier[self] . identifier[__options] . identifier[get] ( literal[string] ), } } identifier[dr] = identifier[self] . identifier[__app] . identifier[native_api_call] ( literal[string] , literal[string] , identifier[request] , identifier[self] . identifier[__options] , keyword[False] ) keyword[return] identifier[json] . identifier[loads] ( identifier[dr] . identifier[text] )
def batch_update(self, command, rows): """ Для массовой вставки умеренных объемов 1-5к записей за вызов :param command: SQL insert or updtae :param rows: list of dict :return: dict """ request = {'database': {'alias': self.__options['dbAlias']}, 'batchUpdate': {'command': command, 'rows': rows, 'shardKey': self.__options.get('shardKey')}} dr = self.__app.native_api_call('db', 'batch-update', request, self.__options, False) return json.loads(dr.text)
def _colorspace(self, image, colorspace): """ Sets the image's colorspace. This is typical 'RGB' or 'GRAY', but may be other things, depending on your choice of Engine. :param PIL.Image image: The image whose colorspace to adjust. :param str colorspace: One of either 'RGB' or 'GRAY'. :rtype: PIL.Image :returns: The colorspace-adjusted image. """ if colorspace == 'RGB': if image.mode == 'RGBA': # RGBA is just RGB + Alpha return image if image.mode == 'P' and 'transparency' in image.info: return image.convert('RGBA') return image.convert('RGB') if colorspace == 'GRAY': return image.convert('L') return image
def function[_colorspace, parameter[self, image, colorspace]]: constant[ Sets the image's colorspace. This is typical 'RGB' or 'GRAY', but may be other things, depending on your choice of Engine. :param PIL.Image image: The image whose colorspace to adjust. :param str colorspace: One of either 'RGB' or 'GRAY'. :rtype: PIL.Image :returns: The colorspace-adjusted image. ] if compare[name[colorspace] equal[==] constant[RGB]] begin[:] if compare[name[image].mode equal[==] constant[RGBA]] begin[:] return[name[image]] if <ast.BoolOp object at 0x7da204961630> begin[:] return[call[name[image].convert, parameter[constant[RGBA]]]] return[call[name[image].convert, parameter[constant[RGB]]]] if compare[name[colorspace] equal[==] constant[GRAY]] begin[:] return[call[name[image].convert, parameter[constant[L]]]] return[name[image]]
keyword[def] identifier[_colorspace] ( identifier[self] , identifier[image] , identifier[colorspace] ): literal[string] keyword[if] identifier[colorspace] == literal[string] : keyword[if] identifier[image] . identifier[mode] == literal[string] : keyword[return] identifier[image] keyword[if] identifier[image] . identifier[mode] == literal[string] keyword[and] literal[string] keyword[in] identifier[image] . identifier[info] : keyword[return] identifier[image] . identifier[convert] ( literal[string] ) keyword[return] identifier[image] . identifier[convert] ( literal[string] ) keyword[if] identifier[colorspace] == literal[string] : keyword[return] identifier[image] . identifier[convert] ( literal[string] ) keyword[return] identifier[image]
def _colorspace(self, image, colorspace): """ Sets the image's colorspace. This is typical 'RGB' or 'GRAY', but may be other things, depending on your choice of Engine. :param PIL.Image image: The image whose colorspace to adjust. :param str colorspace: One of either 'RGB' or 'GRAY'. :rtype: PIL.Image :returns: The colorspace-adjusted image. """ if colorspace == 'RGB': if image.mode == 'RGBA': # RGBA is just RGB + Alpha return image # depends on [control=['if'], data=[]] if image.mode == 'P' and 'transparency' in image.info: return image.convert('RGBA') # depends on [control=['if'], data=[]] return image.convert('RGB') # depends on [control=['if'], data=[]] if colorspace == 'GRAY': return image.convert('L') # depends on [control=['if'], data=[]] return image
def _on_disconnect(self): """Callback when a device is disconnected unexpectedly. Args: adapter_id (int): An ID for the adapter that was connected to the device connection_id (int): An ID for the connection that has become disconnected """ self._logger.info("Connection to device %s was interrupted", self.connection_string) self.connection_interrupted = True
def function[_on_disconnect, parameter[self]]: constant[Callback when a device is disconnected unexpectedly. Args: adapter_id (int): An ID for the adapter that was connected to the device connection_id (int): An ID for the connection that has become disconnected ] call[name[self]._logger.info, parameter[constant[Connection to device %s was interrupted], name[self].connection_string]] name[self].connection_interrupted assign[=] constant[True]
keyword[def] identifier[_on_disconnect] ( identifier[self] ): literal[string] identifier[self] . identifier[_logger] . identifier[info] ( literal[string] , identifier[self] . identifier[connection_string] ) identifier[self] . identifier[connection_interrupted] = keyword[True]
def _on_disconnect(self): """Callback when a device is disconnected unexpectedly. Args: adapter_id (int): An ID for the adapter that was connected to the device connection_id (int): An ID for the connection that has become disconnected """ self._logger.info('Connection to device %s was interrupted', self.connection_string) self.connection_interrupted = True
def _log_level_changed(self, name, old, new): """Adjust the log level when log_level is set.""" if isinstance(new, basestring): new = getattr(logging, new) self.log_level = new self.log.setLevel(new)
def function[_log_level_changed, parameter[self, name, old, new]]: constant[Adjust the log level when log_level is set.] if call[name[isinstance], parameter[name[new], name[basestring]]] begin[:] variable[new] assign[=] call[name[getattr], parameter[name[logging], name[new]]] name[self].log_level assign[=] name[new] call[name[self].log.setLevel, parameter[name[new]]]
keyword[def] identifier[_log_level_changed] ( identifier[self] , identifier[name] , identifier[old] , identifier[new] ): literal[string] keyword[if] identifier[isinstance] ( identifier[new] , identifier[basestring] ): identifier[new] = identifier[getattr] ( identifier[logging] , identifier[new] ) identifier[self] . identifier[log_level] = identifier[new] identifier[self] . identifier[log] . identifier[setLevel] ( identifier[new] )
def _log_level_changed(self, name, old, new): """Adjust the log level when log_level is set.""" if isinstance(new, basestring): new = getattr(logging, new) self.log_level = new # depends on [control=['if'], data=[]] self.log.setLevel(new)
def absent(name, **kwargs): ''' Ensures that the mediatype does not exist, eventually deletes the mediatype. :param name: name of the mediatype :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) .. code-block:: yaml delete_mediatype: zabbix_mediatype.absent: - name: 'Email' ''' connection_args = {} if '_connection_user' in kwargs: connection_args['_connection_user'] = kwargs['_connection_user'] if '_connection_password' in kwargs: connection_args['_connection_password'] = kwargs['_connection_password'] if '_connection_url' in kwargs: connection_args['_connection_url'] = kwargs['_connection_url'] ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} # Comment and change messages comment_mediatype_deleted = 'Mediatype {0} deleted.'.format(name) comment_mediatype_notdeleted = 'Unable to delete mediatype: {0}. '.format(name) comment_mediatype_notexists = 'Mediatype {0} does not exist.'.format(name) changes_mediatype_deleted = {name: {'old': 'Mediatype {0} exists.'.format(name), 'new': 'Mediatype {0} deleted.'.format(name), } } mediatype_exists = __salt__['zabbix.mediatype_get'](name, **connection_args) # Dry run, test=true mode if __opts__['test']: if not mediatype_exists: ret['result'] = True ret['comment'] = comment_mediatype_notexists else: ret['result'] = None ret['comment'] = comment_mediatype_deleted return ret if not mediatype_exists: ret['result'] = True ret['comment'] = comment_mediatype_notexists else: try: mediatypeid = mediatype_exists[0]['mediatypeid'] mediatype_delete = __salt__['zabbix.mediatype_delete'](mediatypeid, **connection_args) except KeyError: mediatype_delete = False if mediatype_delete and 'error' not in mediatype_delete: ret['result'] = True ret['comment'] = comment_mediatype_deleted ret['changes'] = changes_mediatype_deleted else: ret['result'] = False ret['comment'] = comment_mediatype_notdeleted + six.text_type(mediatype_delete['error']) return ret
def function[absent, parameter[name]]: constant[ Ensures that the mediatype does not exist, eventually deletes the mediatype. :param name: name of the mediatype :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) .. code-block:: yaml delete_mediatype: zabbix_mediatype.absent: - name: 'Email' ] variable[connection_args] assign[=] dictionary[[], []] if compare[constant[_connection_user] in name[kwargs]] begin[:] call[name[connection_args]][constant[_connection_user]] assign[=] call[name[kwargs]][constant[_connection_user]] if compare[constant[_connection_password] in name[kwargs]] begin[:] call[name[connection_args]][constant[_connection_password]] assign[=] call[name[kwargs]][constant[_connection_password]] if compare[constant[_connection_url] in name[kwargs]] begin[:] call[name[connection_args]][constant[_connection_url]] assign[=] call[name[kwargs]][constant[_connection_url]] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f6f790>, <ast.Constant object at 0x7da1b1f6f760>, <ast.Constant object at 0x7da1b1f6f730>, <ast.Constant object at 0x7da1b1f6f700>], [<ast.Name object at 0x7da1b1f6f6d0>, <ast.Dict object at 0x7da1b1f6f6a0>, <ast.Constant object at 0x7da1b1f6f670>, <ast.Constant object at 0x7da1b1f6f640>]] variable[comment_mediatype_deleted] assign[=] call[constant[Mediatype {0} deleted.].format, parameter[name[name]]] variable[comment_mediatype_notdeleted] assign[=] call[constant[Unable to delete mediatype: {0}. ].format, parameter[name[name]]] variable[comment_mediatype_notexists] assign[=] call[constant[Mediatype {0} does not exist.].format, parameter[name[name]]] variable[changes_mediatype_deleted] assign[=] dictionary[[<ast.Name object at 0x7da1b1f6f220>], [<ast.Dict object at 0x7da1b1f6f1f0>]] variable[mediatype_exists] assign[=] call[call[name[__salt__]][constant[zabbix.mediatype_get]], parameter[name[name]]] if call[name[__opts__]][constant[test]] begin[:] if <ast.UnaryOp object at 0x7da1b1f6ed40> begin[:] call[name[ret]][constant[result]] assign[=] constant[True] call[name[ret]][constant[comment]] assign[=] name[comment_mediatype_notexists] return[name[ret]] if <ast.UnaryOp object at 0x7da1b1f6e860> begin[:] call[name[ret]][constant[result]] assign[=] constant[True] call[name[ret]][constant[comment]] assign[=] name[comment_mediatype_notexists] return[name[ret]]
keyword[def] identifier[absent] ( identifier[name] ,** identifier[kwargs] ): literal[string] identifier[connection_args] ={} keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[connection_args] [ literal[string] ]= identifier[kwargs] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[connection_args] [ literal[string] ]= identifier[kwargs] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[connection_args] [ literal[string] ]= identifier[kwargs] [ literal[string] ] identifier[ret] ={ literal[string] : identifier[name] , literal[string] :{}, literal[string] : keyword[False] , literal[string] : literal[string] } identifier[comment_mediatype_deleted] = literal[string] . identifier[format] ( identifier[name] ) identifier[comment_mediatype_notdeleted] = literal[string] . identifier[format] ( identifier[name] ) identifier[comment_mediatype_notexists] = literal[string] . identifier[format] ( identifier[name] ) identifier[changes_mediatype_deleted] ={ identifier[name] :{ literal[string] : literal[string] . identifier[format] ( identifier[name] ), literal[string] : literal[string] . identifier[format] ( identifier[name] ), } } identifier[mediatype_exists] = identifier[__salt__] [ literal[string] ]( identifier[name] ,** identifier[connection_args] ) keyword[if] identifier[__opts__] [ literal[string] ]: keyword[if] keyword[not] identifier[mediatype_exists] : identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= identifier[comment_mediatype_notexists] keyword[else] : identifier[ret] [ literal[string] ]= keyword[None] identifier[ret] [ literal[string] ]= identifier[comment_mediatype_deleted] keyword[return] identifier[ret] keyword[if] keyword[not] identifier[mediatype_exists] : identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= identifier[comment_mediatype_notexists] keyword[else] : keyword[try] : identifier[mediatypeid] = identifier[mediatype_exists] [ literal[int] ][ literal[string] ] identifier[mediatype_delete] = identifier[__salt__] [ literal[string] ]( identifier[mediatypeid] ,** identifier[connection_args] ) keyword[except] identifier[KeyError] : identifier[mediatype_delete] = keyword[False] keyword[if] identifier[mediatype_delete] keyword[and] literal[string] keyword[not] keyword[in] identifier[mediatype_delete] : identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= identifier[comment_mediatype_deleted] identifier[ret] [ literal[string] ]= identifier[changes_mediatype_deleted] keyword[else] : identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]= identifier[comment_mediatype_notdeleted] + identifier[six] . identifier[text_type] ( identifier[mediatype_delete] [ literal[string] ]) keyword[return] identifier[ret]
def absent(name, **kwargs): """ Ensures that the mediatype does not exist, eventually deletes the mediatype. :param name: name of the mediatype :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) .. code-block:: yaml delete_mediatype: zabbix_mediatype.absent: - name: 'Email' """ connection_args = {} if '_connection_user' in kwargs: connection_args['_connection_user'] = kwargs['_connection_user'] # depends on [control=['if'], data=['kwargs']] if '_connection_password' in kwargs: connection_args['_connection_password'] = kwargs['_connection_password'] # depends on [control=['if'], data=['kwargs']] if '_connection_url' in kwargs: connection_args['_connection_url'] = kwargs['_connection_url'] # depends on [control=['if'], data=['kwargs']] ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} # Comment and change messages comment_mediatype_deleted = 'Mediatype {0} deleted.'.format(name) comment_mediatype_notdeleted = 'Unable to delete mediatype: {0}. '.format(name) comment_mediatype_notexists = 'Mediatype {0} does not exist.'.format(name) changes_mediatype_deleted = {name: {'old': 'Mediatype {0} exists.'.format(name), 'new': 'Mediatype {0} deleted.'.format(name)}} mediatype_exists = __salt__['zabbix.mediatype_get'](name, **connection_args) # Dry run, test=true mode if __opts__['test']: if not mediatype_exists: ret['result'] = True ret['comment'] = comment_mediatype_notexists # depends on [control=['if'], data=[]] else: ret['result'] = None ret['comment'] = comment_mediatype_deleted return ret # depends on [control=['if'], data=[]] if not mediatype_exists: ret['result'] = True ret['comment'] = comment_mediatype_notexists # depends on [control=['if'], data=[]] else: try: mediatypeid = mediatype_exists[0]['mediatypeid'] mediatype_delete = __salt__['zabbix.mediatype_delete'](mediatypeid, **connection_args) # depends on [control=['try'], data=[]] except KeyError: mediatype_delete = False # depends on [control=['except'], data=[]] if mediatype_delete and 'error' not in mediatype_delete: ret['result'] = True ret['comment'] = comment_mediatype_deleted ret['changes'] = changes_mediatype_deleted # depends on [control=['if'], data=[]] else: ret['result'] = False ret['comment'] = comment_mediatype_notdeleted + six.text_type(mediatype_delete['error']) return ret
def handle(self, *args, **options): """This function is called by the Django API to specify how this object will be saved to the database. """ taxonomy_id = options['taxonomy_id'] # Remove leading and trailing blank characters in "common_name" # and "scientific_name common_name = options['common_name'].strip() scientific_name = options['scientific_name'].strip() if common_name and scientific_name: # A 'slug' is a label for an object in django, which only contains # letters, numbers, underscores, and hyphens, thus making it URL- # usable. The slugify method in django takes any string and # converts it to this format. For more information, see: # http://stackoverflow.com/questions/427102/what-is-a-slug-in-django slug = slugify(scientific_name) logger.info("Slug generated: %s", slug) # If organism exists, update with passed parameters try: org = Organism.objects.get(taxonomy_id=taxonomy_id) org.common_name = common_name org.scientific_name = scientific_name org.slug = slug # If organism doesn't exist, construct an organism object # (see organisms/models.py). except Organism.DoesNotExist: org = Organism(taxonomy_id=taxonomy_id, common_name=common_name, scientific_name=scientific_name, slug=slug ) org.save() # Save to the database. else: # Report an error if the user did not fill out all fields. logger.error( "Failed to add or update organism. " "Please check that all fields are filled correctly." )
def function[handle, parameter[self]]: constant[This function is called by the Django API to specify how this object will be saved to the database. ] variable[taxonomy_id] assign[=] call[name[options]][constant[taxonomy_id]] variable[common_name] assign[=] call[call[name[options]][constant[common_name]].strip, parameter[]] variable[scientific_name] assign[=] call[call[name[options]][constant[scientific_name]].strip, parameter[]] if <ast.BoolOp object at 0x7da1b10d5e40> begin[:] variable[slug] assign[=] call[name[slugify], parameter[name[scientific_name]]] call[name[logger].info, parameter[constant[Slug generated: %s], name[slug]]] <ast.Try object at 0x7da204347670> call[name[org].save, parameter[]]
keyword[def] identifier[handle] ( identifier[self] ,* identifier[args] ,** identifier[options] ): literal[string] identifier[taxonomy_id] = identifier[options] [ literal[string] ] identifier[common_name] = identifier[options] [ literal[string] ]. identifier[strip] () identifier[scientific_name] = identifier[options] [ literal[string] ]. identifier[strip] () keyword[if] identifier[common_name] keyword[and] identifier[scientific_name] : identifier[slug] = identifier[slugify] ( identifier[scientific_name] ) identifier[logger] . identifier[info] ( literal[string] , identifier[slug] ) keyword[try] : identifier[org] = identifier[Organism] . identifier[objects] . identifier[get] ( identifier[taxonomy_id] = identifier[taxonomy_id] ) identifier[org] . identifier[common_name] = identifier[common_name] identifier[org] . identifier[scientific_name] = identifier[scientific_name] identifier[org] . identifier[slug] = identifier[slug] keyword[except] identifier[Organism] . identifier[DoesNotExist] : identifier[org] = identifier[Organism] ( identifier[taxonomy_id] = identifier[taxonomy_id] , identifier[common_name] = identifier[common_name] , identifier[scientific_name] = identifier[scientific_name] , identifier[slug] = identifier[slug] ) identifier[org] . identifier[save] () keyword[else] : identifier[logger] . identifier[error] ( literal[string] literal[string] )
def handle(self, *args, **options): """This function is called by the Django API to specify how this object will be saved to the database. """ taxonomy_id = options['taxonomy_id'] # Remove leading and trailing blank characters in "common_name" # and "scientific_name common_name = options['common_name'].strip() scientific_name = options['scientific_name'].strip() if common_name and scientific_name: # A 'slug' is a label for an object in django, which only contains # letters, numbers, underscores, and hyphens, thus making it URL- # usable. The slugify method in django takes any string and # converts it to this format. For more information, see: # http://stackoverflow.com/questions/427102/what-is-a-slug-in-django slug = slugify(scientific_name) logger.info('Slug generated: %s', slug) # If organism exists, update with passed parameters try: org = Organism.objects.get(taxonomy_id=taxonomy_id) org.common_name = common_name org.scientific_name = scientific_name org.slug = slug # depends on [control=['try'], data=[]] # If organism doesn't exist, construct an organism object # (see organisms/models.py). except Organism.DoesNotExist: org = Organism(taxonomy_id=taxonomy_id, common_name=common_name, scientific_name=scientific_name, slug=slug) # depends on [control=['except'], data=[]] org.save() # Save to the database. # depends on [control=['if'], data=[]] else: # Report an error if the user did not fill out all fields. logger.error('Failed to add or update organism. Please check that all fields are filled correctly.')
def _JoinKeyPath(self, path_segments): """Joins the path segments into key path. Args: path_segments (list[str]): Windows Registry key path segments. Returns: str: key path. """ # This is an optimized way to combine the path segments into a single path # and combine multiple successive path separators to one. # Split all the path segments based on the path (segment) separator. path_segments = [ segment.split(definitions.KEY_PATH_SEPARATOR) for segment in path_segments] # Flatten the sublists into one list. path_segments = [ element for sublist in path_segments for element in sublist] # Remove empty path segments. path_segments = filter(None, path_segments) return definitions.KEY_PATH_SEPARATOR.join(path_segments)
def function[_JoinKeyPath, parameter[self, path_segments]]: constant[Joins the path segments into key path. Args: path_segments (list[str]): Windows Registry key path segments. Returns: str: key path. ] variable[path_segments] assign[=] <ast.ListComp object at 0x7da18dc042b0> variable[path_segments] assign[=] <ast.ListComp object at 0x7da18dc070d0> variable[path_segments] assign[=] call[name[filter], parameter[constant[None], name[path_segments]]] return[call[name[definitions].KEY_PATH_SEPARATOR.join, parameter[name[path_segments]]]]
keyword[def] identifier[_JoinKeyPath] ( identifier[self] , identifier[path_segments] ): literal[string] identifier[path_segments] =[ identifier[segment] . identifier[split] ( identifier[definitions] . identifier[KEY_PATH_SEPARATOR] ) keyword[for] identifier[segment] keyword[in] identifier[path_segments] ] identifier[path_segments] =[ identifier[element] keyword[for] identifier[sublist] keyword[in] identifier[path_segments] keyword[for] identifier[element] keyword[in] identifier[sublist] ] identifier[path_segments] = identifier[filter] ( keyword[None] , identifier[path_segments] ) keyword[return] identifier[definitions] . identifier[KEY_PATH_SEPARATOR] . identifier[join] ( identifier[path_segments] )
def _JoinKeyPath(self, path_segments): """Joins the path segments into key path. Args: path_segments (list[str]): Windows Registry key path segments. Returns: str: key path. """ # This is an optimized way to combine the path segments into a single path # and combine multiple successive path separators to one. # Split all the path segments based on the path (segment) separator. path_segments = [segment.split(definitions.KEY_PATH_SEPARATOR) for segment in path_segments] # Flatten the sublists into one list. path_segments = [element for sublist in path_segments for element in sublist] # Remove empty path segments. path_segments = filter(None, path_segments) return definitions.KEY_PATH_SEPARATOR.join(path_segments)
def bed(args): """ %prog bed binfile fastafile Write bed files where the bases have at least certain depth. """ p = OptionParser(bed.__doc__) p.add_option("-o", dest="output", default="stdout", help="Output file name [default: %default]") p.add_option("--cutoff", dest="cutoff", default=10, type="int", help="Minimum read depth to report intervals [default: %default]") opts, args = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) binfile, fastafile = args fw = must_open(opts.output, "w") cutoff = opts.cutoff assert cutoff >= 0, "Need non-negative cutoff" b = BinFile(binfile) ar = b.array fastasize, sizes, offsets = get_offsets(fastafile) s = Sizes(fastafile) for ctg, ctglen in s.iter_sizes(): offset = offsets[ctg] subarray = ar[offset:offset + ctglen] key = lambda x: x[1] >= cutoff for tf, array_elements in groupby(enumerate(subarray), key=key): array_elements = list(array_elements) if not tf: continue # 0-based system => 1-based system start = array_elements[0][0] + 1 end = array_elements[-1][0] + 1 mean_depth = sum([x[1] for x in array_elements]) / \ len(array_elements) mean_depth = int(mean_depth) name = "na" print("\t".join(str(x) for x in (ctg, \ start - 1, end, name, mean_depth)), file=fw)
def function[bed, parameter[args]]: constant[ %prog bed binfile fastafile Write bed files where the bases have at least certain depth. ] variable[p] assign[=] call[name[OptionParser], parameter[name[bed].__doc__]] call[name[p].add_option, parameter[constant[-o]]] call[name[p].add_option, parameter[constant[--cutoff]]] <ast.Tuple object at 0x7da2041d9900> assign[=] call[name[p].parse_args, parameter[name[args]]] if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[2]] begin[:] call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da2041d8f70>]] <ast.Tuple object at 0x7da2041d9690> assign[=] name[args] variable[fw] assign[=] call[name[must_open], parameter[name[opts].output, constant[w]]] variable[cutoff] assign[=] name[opts].cutoff assert[compare[name[cutoff] greater_or_equal[>=] constant[0]]] variable[b] assign[=] call[name[BinFile], parameter[name[binfile]]] variable[ar] assign[=] name[b].array <ast.Tuple object at 0x7da2041d9c90> assign[=] call[name[get_offsets], parameter[name[fastafile]]] variable[s] assign[=] call[name[Sizes], parameter[name[fastafile]]] for taget[tuple[[<ast.Name object at 0x7da2041d80a0>, <ast.Name object at 0x7da2041d8520>]]] in starred[call[name[s].iter_sizes, parameter[]]] begin[:] variable[offset] assign[=] call[name[offsets]][name[ctg]] variable[subarray] assign[=] call[name[ar]][<ast.Slice object at 0x7da2041dae30>] variable[key] assign[=] <ast.Lambda object at 0x7da2041d8ee0> for taget[tuple[[<ast.Name object at 0x7da2041d9c60>, <ast.Name object at 0x7da2041dba30>]]] in starred[call[name[groupby], parameter[call[name[enumerate], parameter[name[subarray]]]]]] begin[:] variable[array_elements] assign[=] call[name[list], parameter[name[array_elements]]] if <ast.UnaryOp object at 0x7da2041d8d60> begin[:] continue variable[start] assign[=] binary_operation[call[call[name[array_elements]][constant[0]]][constant[0]] + constant[1]] variable[end] assign[=] binary_operation[call[call[name[array_elements]][<ast.UnaryOp object at 0x7da2041d87c0>]][constant[0]] + constant[1]] variable[mean_depth] assign[=] binary_operation[call[name[sum], parameter[<ast.ListComp object at 0x7da2041dace0>]] / call[name[len], parameter[name[array_elements]]]] variable[mean_depth] assign[=] call[name[int], parameter[name[mean_depth]]] variable[name] assign[=] constant[na] call[name[print], parameter[call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da20c76e290>]]]]
keyword[def] identifier[bed] ( identifier[args] ): literal[string] identifier[p] = identifier[OptionParser] ( identifier[bed] . identifier[__doc__] ) identifier[p] . identifier[add_option] ( literal[string] , identifier[dest] = literal[string] , identifier[default] = literal[string] , identifier[help] = literal[string] ) identifier[p] . identifier[add_option] ( literal[string] , identifier[dest] = literal[string] , identifier[default] = literal[int] , identifier[type] = literal[string] , identifier[help] = literal[string] ) identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] ) keyword[if] identifier[len] ( identifier[args] )!= literal[int] : identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ()) identifier[binfile] , identifier[fastafile] = identifier[args] identifier[fw] = identifier[must_open] ( identifier[opts] . identifier[output] , literal[string] ) identifier[cutoff] = identifier[opts] . identifier[cutoff] keyword[assert] identifier[cutoff] >= literal[int] , literal[string] identifier[b] = identifier[BinFile] ( identifier[binfile] ) identifier[ar] = identifier[b] . identifier[array] identifier[fastasize] , identifier[sizes] , identifier[offsets] = identifier[get_offsets] ( identifier[fastafile] ) identifier[s] = identifier[Sizes] ( identifier[fastafile] ) keyword[for] identifier[ctg] , identifier[ctglen] keyword[in] identifier[s] . identifier[iter_sizes] (): identifier[offset] = identifier[offsets] [ identifier[ctg] ] identifier[subarray] = identifier[ar] [ identifier[offset] : identifier[offset] + identifier[ctglen] ] identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ]>= identifier[cutoff] keyword[for] identifier[tf] , identifier[array_elements] keyword[in] identifier[groupby] ( identifier[enumerate] ( identifier[subarray] ), identifier[key] = identifier[key] ): identifier[array_elements] = identifier[list] ( identifier[array_elements] ) keyword[if] keyword[not] identifier[tf] : keyword[continue] identifier[start] = identifier[array_elements] [ literal[int] ][ literal[int] ]+ literal[int] identifier[end] = identifier[array_elements] [- literal[int] ][ literal[int] ]+ literal[int] identifier[mean_depth] = identifier[sum] ([ identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[array_elements] ])/ identifier[len] ( identifier[array_elements] ) identifier[mean_depth] = identifier[int] ( identifier[mean_depth] ) identifier[name] = literal[string] identifier[print] ( literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] ( identifier[ctg] , identifier[start] - literal[int] , identifier[end] , identifier[name] , identifier[mean_depth] )), identifier[file] = identifier[fw] )
def bed(args): """ %prog bed binfile fastafile Write bed files where the bases have at least certain depth. """ p = OptionParser(bed.__doc__) p.add_option('-o', dest='output', default='stdout', help='Output file name [default: %default]') p.add_option('--cutoff', dest='cutoff', default=10, type='int', help='Minimum read depth to report intervals [default: %default]') (opts, args) = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) # depends on [control=['if'], data=[]] (binfile, fastafile) = args fw = must_open(opts.output, 'w') cutoff = opts.cutoff assert cutoff >= 0, 'Need non-negative cutoff' b = BinFile(binfile) ar = b.array (fastasize, sizes, offsets) = get_offsets(fastafile) s = Sizes(fastafile) for (ctg, ctglen) in s.iter_sizes(): offset = offsets[ctg] subarray = ar[offset:offset + ctglen] key = lambda x: x[1] >= cutoff for (tf, array_elements) in groupby(enumerate(subarray), key=key): array_elements = list(array_elements) if not tf: continue # depends on [control=['if'], data=[]] # 0-based system => 1-based system start = array_elements[0][0] + 1 end = array_elements[-1][0] + 1 mean_depth = sum([x[1] for x in array_elements]) / len(array_elements) mean_depth = int(mean_depth) name = 'na' print('\t'.join((str(x) for x in (ctg, start - 1, end, name, mean_depth))), file=fw) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
def show_xref(self, f_a): """ Display where this field is read or written """ if f_a: bytecode._PrintSubBanner("XREF Read") xrefs_from = f_a.get_xref_read() for ref_class, ref_method in xrefs_from: bytecode._PrintDefault(ref_method.get_name()) bytecode._PrintDefault('\n') bytecode._PrintDefault('\n') bytecode._PrintSubBanner("XREF Write") xrefs_to = f_a.get_xref_write() for ref_class, ref_method in xrefs_to: bytecode._PrintDefault(ref_method.get_name()) bytecode._PrintDefault('\n')
def function[show_xref, parameter[self, f_a]]: constant[ Display where this field is read or written ] if name[f_a] begin[:] call[name[bytecode]._PrintSubBanner, parameter[constant[XREF Read]]] variable[xrefs_from] assign[=] call[name[f_a].get_xref_read, parameter[]] for taget[tuple[[<ast.Name object at 0x7da2046233a0>, <ast.Name object at 0x7da204622380>]]] in starred[name[xrefs_from]] begin[:] call[name[bytecode]._PrintDefault, parameter[call[name[ref_method].get_name, parameter[]]]] call[name[bytecode]._PrintDefault, parameter[constant[ ]]] call[name[bytecode]._PrintDefault, parameter[constant[ ]]] call[name[bytecode]._PrintSubBanner, parameter[constant[XREF Write]]] variable[xrefs_to] assign[=] call[name[f_a].get_xref_write, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b0a4b010>, <ast.Name object at 0x7da1b0a4a410>]]] in starred[name[xrefs_to]] begin[:] call[name[bytecode]._PrintDefault, parameter[call[name[ref_method].get_name, parameter[]]]] call[name[bytecode]._PrintDefault, parameter[constant[ ]]]
keyword[def] identifier[show_xref] ( identifier[self] , identifier[f_a] ): literal[string] keyword[if] identifier[f_a] : identifier[bytecode] . identifier[_PrintSubBanner] ( literal[string] ) identifier[xrefs_from] = identifier[f_a] . identifier[get_xref_read] () keyword[for] identifier[ref_class] , identifier[ref_method] keyword[in] identifier[xrefs_from] : identifier[bytecode] . identifier[_PrintDefault] ( identifier[ref_method] . identifier[get_name] ()) identifier[bytecode] . identifier[_PrintDefault] ( literal[string] ) identifier[bytecode] . identifier[_PrintDefault] ( literal[string] ) identifier[bytecode] . identifier[_PrintSubBanner] ( literal[string] ) identifier[xrefs_to] = identifier[f_a] . identifier[get_xref_write] () keyword[for] identifier[ref_class] , identifier[ref_method] keyword[in] identifier[xrefs_to] : identifier[bytecode] . identifier[_PrintDefault] ( identifier[ref_method] . identifier[get_name] ()) identifier[bytecode] . identifier[_PrintDefault] ( literal[string] )
def show_xref(self, f_a): """ Display where this field is read or written """ if f_a: bytecode._PrintSubBanner('XREF Read') xrefs_from = f_a.get_xref_read() for (ref_class, ref_method) in xrefs_from: bytecode._PrintDefault(ref_method.get_name()) bytecode._PrintDefault('\n') # depends on [control=['for'], data=[]] bytecode._PrintDefault('\n') bytecode._PrintSubBanner('XREF Write') xrefs_to = f_a.get_xref_write() for (ref_class, ref_method) in xrefs_to: bytecode._PrintDefault(ref_method.get_name()) bytecode._PrintDefault('\n') # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
def _update_mean_coords(self, dig, N, centers_sum, **paircoords): """ Update the mean coordinate sums """ if N is None or centers_sum is None: return N.flat[:] += utils.bincount(dig, 1., minlength=N.size) for i, dim in enumerate(self.dims): size = centers_sum[i].size centers_sum[i].flat[:] += utils.bincount(dig, paircoords[dim], minlength=size)
def function[_update_mean_coords, parameter[self, dig, N, centers_sum]]: constant[ Update the mean coordinate sums ] if <ast.BoolOp object at 0x7da18bcc9bd0> begin[:] return[None] <ast.AugAssign object at 0x7da18bccb5b0> for taget[tuple[[<ast.Name object at 0x7da20e956aa0>, <ast.Name object at 0x7da20e955e40>]]] in starred[call[name[enumerate], parameter[name[self].dims]]] begin[:] variable[size] assign[=] call[name[centers_sum]][name[i]].size <ast.AugAssign object at 0x7da20e9547c0>
keyword[def] identifier[_update_mean_coords] ( identifier[self] , identifier[dig] , identifier[N] , identifier[centers_sum] ,** identifier[paircoords] ): literal[string] keyword[if] identifier[N] keyword[is] keyword[None] keyword[or] identifier[centers_sum] keyword[is] keyword[None] : keyword[return] identifier[N] . identifier[flat] [:]+= identifier[utils] . identifier[bincount] ( identifier[dig] , literal[int] , identifier[minlength] = identifier[N] . identifier[size] ) keyword[for] identifier[i] , identifier[dim] keyword[in] identifier[enumerate] ( identifier[self] . identifier[dims] ): identifier[size] = identifier[centers_sum] [ identifier[i] ]. identifier[size] identifier[centers_sum] [ identifier[i] ]. identifier[flat] [:]+= identifier[utils] . identifier[bincount] ( identifier[dig] , identifier[paircoords] [ identifier[dim] ], identifier[minlength] = identifier[size] )
def _update_mean_coords(self, dig, N, centers_sum, **paircoords): """ Update the mean coordinate sums """ if N is None or centers_sum is None: return # depends on [control=['if'], data=[]] N.flat[:] += utils.bincount(dig, 1.0, minlength=N.size) for (i, dim) in enumerate(self.dims): size = centers_sum[i].size centers_sum[i].flat[:] += utils.bincount(dig, paircoords[dim], minlength=size) # depends on [control=['for'], data=[]]
def parse_tasks_file_header(header, input_file_param_util, output_file_param_util): """Parse the header from the tasks file into env, input, output definitions. Elements are formatted similar to their equivalent command-line arguments, but with associated values coming from the data rows. Environment variables columns are headered as "--env <name>" Inputs columns are headered as "--input <name>" with the name optional. Outputs columns are headered as "--output <name>" with the name optional. For historical reasons, bareword column headers (such as "JOB_ID") are equivalent to "--env var_name". Args: header: Array of header fields input_file_param_util: Utility for producing InputFileParam objects. output_file_param_util: Utility for producing OutputFileParam objects. Returns: job_params: A list of EnvParams and FileParams for the environment variables, LabelParams, input file parameters, and output file parameters. Raises: ValueError: If a header contains a ":" and the prefix is not supported. """ job_params = [] for col in header: # Reserve the "-" and "--" namespace. # If the column has no leading "-", treat it as an environment variable col_type = '--env' col_value = col if col.startswith('-'): col_type, col_value = split_pair(col, ' ', 1) if col_type == '--env': job_params.append(job_model.EnvParam(col_value)) elif col_type == '--label': job_params.append(job_model.LabelParam(col_value)) elif col_type == '--input' or col_type == '--input-recursive': name = input_file_param_util.get_variable_name(col_value) job_params.append( job_model.InputFileParam( name, recursive=(col_type.endswith('recursive')))) elif col_type == '--output' or col_type == '--output-recursive': name = output_file_param_util.get_variable_name(col_value) job_params.append( job_model.OutputFileParam( name, recursive=(col_type.endswith('recursive')))) else: raise ValueError('Unrecognized column header: %s' % col) return job_params
def function[parse_tasks_file_header, parameter[header, input_file_param_util, output_file_param_util]]: constant[Parse the header from the tasks file into env, input, output definitions. Elements are formatted similar to their equivalent command-line arguments, but with associated values coming from the data rows. Environment variables columns are headered as "--env <name>" Inputs columns are headered as "--input <name>" with the name optional. Outputs columns are headered as "--output <name>" with the name optional. For historical reasons, bareword column headers (such as "JOB_ID") are equivalent to "--env var_name". Args: header: Array of header fields input_file_param_util: Utility for producing InputFileParam objects. output_file_param_util: Utility for producing OutputFileParam objects. Returns: job_params: A list of EnvParams and FileParams for the environment variables, LabelParams, input file parameters, and output file parameters. Raises: ValueError: If a header contains a ":" and the prefix is not supported. ] variable[job_params] assign[=] list[[]] for taget[name[col]] in starred[name[header]] begin[:] variable[col_type] assign[=] constant[--env] variable[col_value] assign[=] name[col] if call[name[col].startswith, parameter[constant[-]]] begin[:] <ast.Tuple object at 0x7da1b010b670> assign[=] call[name[split_pair], parameter[name[col], constant[ ], constant[1]]] if compare[name[col_type] equal[==] constant[--env]] begin[:] call[name[job_params].append, parameter[call[name[job_model].EnvParam, parameter[name[col_value]]]]] return[name[job_params]]
keyword[def] identifier[parse_tasks_file_header] ( identifier[header] , identifier[input_file_param_util] , identifier[output_file_param_util] ): literal[string] identifier[job_params] =[] keyword[for] identifier[col] keyword[in] identifier[header] : identifier[col_type] = literal[string] identifier[col_value] = identifier[col] keyword[if] identifier[col] . identifier[startswith] ( literal[string] ): identifier[col_type] , identifier[col_value] = identifier[split_pair] ( identifier[col] , literal[string] , literal[int] ) keyword[if] identifier[col_type] == literal[string] : identifier[job_params] . identifier[append] ( identifier[job_model] . identifier[EnvParam] ( identifier[col_value] )) keyword[elif] identifier[col_type] == literal[string] : identifier[job_params] . identifier[append] ( identifier[job_model] . identifier[LabelParam] ( identifier[col_value] )) keyword[elif] identifier[col_type] == literal[string] keyword[or] identifier[col_type] == literal[string] : identifier[name] = identifier[input_file_param_util] . identifier[get_variable_name] ( identifier[col_value] ) identifier[job_params] . identifier[append] ( identifier[job_model] . identifier[InputFileParam] ( identifier[name] , identifier[recursive] =( identifier[col_type] . identifier[endswith] ( literal[string] )))) keyword[elif] identifier[col_type] == literal[string] keyword[or] identifier[col_type] == literal[string] : identifier[name] = identifier[output_file_param_util] . identifier[get_variable_name] ( identifier[col_value] ) identifier[job_params] . identifier[append] ( identifier[job_model] . identifier[OutputFileParam] ( identifier[name] , identifier[recursive] =( identifier[col_type] . identifier[endswith] ( literal[string] )))) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[col] ) keyword[return] identifier[job_params]
def parse_tasks_file_header(header, input_file_param_util, output_file_param_util): """Parse the header from the tasks file into env, input, output definitions. Elements are formatted similar to their equivalent command-line arguments, but with associated values coming from the data rows. Environment variables columns are headered as "--env <name>" Inputs columns are headered as "--input <name>" with the name optional. Outputs columns are headered as "--output <name>" with the name optional. For historical reasons, bareword column headers (such as "JOB_ID") are equivalent to "--env var_name". Args: header: Array of header fields input_file_param_util: Utility for producing InputFileParam objects. output_file_param_util: Utility for producing OutputFileParam objects. Returns: job_params: A list of EnvParams and FileParams for the environment variables, LabelParams, input file parameters, and output file parameters. Raises: ValueError: If a header contains a ":" and the prefix is not supported. """ job_params = [] for col in header: # Reserve the "-" and "--" namespace. # If the column has no leading "-", treat it as an environment variable col_type = '--env' col_value = col if col.startswith('-'): (col_type, col_value) = split_pair(col, ' ', 1) # depends on [control=['if'], data=[]] if col_type == '--env': job_params.append(job_model.EnvParam(col_value)) # depends on [control=['if'], data=[]] elif col_type == '--label': job_params.append(job_model.LabelParam(col_value)) # depends on [control=['if'], data=[]] elif col_type == '--input' or col_type == '--input-recursive': name = input_file_param_util.get_variable_name(col_value) job_params.append(job_model.InputFileParam(name, recursive=col_type.endswith('recursive'))) # depends on [control=['if'], data=[]] elif col_type == '--output' or col_type == '--output-recursive': name = output_file_param_util.get_variable_name(col_value) job_params.append(job_model.OutputFileParam(name, recursive=col_type.endswith('recursive'))) # depends on [control=['if'], data=[]] else: raise ValueError('Unrecognized column header: %s' % col) # depends on [control=['for'], data=['col']] return job_params
def run(self): ''' Execute the batch run ''' args = [[], self.opts['fun'], self.opts['arg'], self.opts['timeout'], 'list', ] bnum = self.get_bnum() # No targets to run if not self.minions: return to_run = copy.deepcopy(self.minions) active = [] ret = {} iters = [] # wait the specified time before decide a job is actually done bwait = self.opts.get('batch_wait', 0) wait = [] if self.options: show_jid = self.options.show_jid show_verbose = self.options.verbose else: show_jid = False show_verbose = False # the minion tracker keeps track of responses and iterators # - it removes finished iterators from iters[] # - if a previously detected minion does not respond, its # added with an empty answer to ret{} once the timeout is reached # - unresponsive minions are removed from active[] to make # sure that the main while loop finishes even with unresp minions minion_tracker = {} if not self.quiet: # We already know some minions didn't respond to the ping, so inform # the user we won't be attempting to run a job on them for down_minion in self.down_minions: salt.utils.stringutils.print_cli('Minion {0} did not respond. No job will be sent.'.format(down_minion)) # Iterate while we still have things to execute while len(ret) < len(self.minions): next_ = [] if bwait and wait: self.__update_wait(wait) if len(to_run) <= bnum - len(wait) and not active: # last bit of them, add them all to next iterator while to_run: next_.append(to_run.pop()) else: for i in range(bnum - len(active) - len(wait)): if to_run: minion_id = to_run.pop() if isinstance(minion_id, dict): next_.append(minion_id.keys()[0]) else: next_.append(minion_id) active += next_ args[0] = next_ if next_: if not self.quiet: salt.utils.stringutils.print_cli('\nExecuting run on {0}\n'.format(sorted(next_))) # create a new iterator for this batch of minions new_iter = self.local.cmd_iter_no_block( *args, raw=self.opts.get('raw', False), ret=self.opts.get('return', ''), show_jid=show_jid, verbose=show_verbose, gather_job_timeout=self.opts['gather_job_timeout'], **self.eauth) # add it to our iterators and to the minion_tracker iters.append(new_iter) minion_tracker[new_iter] = {} # every iterator added is 'active' and has its set of minions minion_tracker[new_iter]['minions'] = next_ minion_tracker[new_iter]['active'] = True else: time.sleep(0.02) parts = {} # see if we found more minions for ping_ret in self.ping_gen: if ping_ret is None: break m = next(six.iterkeys(ping_ret)) if m not in self.minions: self.minions.append(m) to_run.append(m) for queue in iters: try: # Gather returns until we get to the bottom ncnt = 0 while True: part = next(queue) if part is None: time.sleep(0.01) ncnt += 1 if ncnt > 5: break continue if self.opts.get('raw'): parts.update({part['data']['id']: part}) if part['data']['id'] in minion_tracker[queue]['minions']: minion_tracker[queue]['minions'].remove(part['data']['id']) else: salt.utils.stringutils.print_cli('minion {0} was already deleted from tracker, probably a duplicate key'.format(part['id'])) else: parts.update(part) for id in part: if id in minion_tracker[queue]['minions']: minion_tracker[queue]['minions'].remove(id) else: salt.utils.stringutils.print_cli('minion {0} was already deleted from tracker, probably a duplicate key'.format(id)) except StopIteration: # if a iterator is done: # - set it to inactive # - add minions that have not responded to parts{} # check if the tracker contains the iterator if queue in minion_tracker: minion_tracker[queue]['active'] = False # add all minions that belong to this iterator and # that have not responded to parts{} with an empty response for minion in minion_tracker[queue]['minions']: if minion not in parts: parts[minion] = {} parts[minion]['ret'] = {} for minion, data in six.iteritems(parts): if minion in active: active.remove(minion) if bwait: wait.append(datetime.now() + timedelta(seconds=bwait)) # Munge retcode into return data failhard = False if 'retcode' in data and isinstance(data['ret'], dict) and 'retcode' not in data['ret']: data['ret']['retcode'] = data['retcode'] if self.opts.get('failhard') and data['ret']['retcode'] > 0: failhard = True if self.opts.get('raw'): ret[minion] = data yield data else: ret[minion] = data['ret'] yield {minion: data['ret']} if not self.quiet: ret[minion] = data['ret'] data[minion] = data.pop('ret') if 'out' in data: out = data.pop('out') else: out = None salt.output.display_output( data, out, self.opts) if failhard: log.error( 'Minion %s returned with non-zero exit code. ' 'Batch run stopped due to failhard', minion ) raise StopIteration # remove inactive iterators from the iters list for queue in minion_tracker: # only remove inactive queues if not minion_tracker[queue]['active'] and queue in iters: iters.remove(queue) # also remove the iterator's minions from the active list for minion in minion_tracker[queue]['minions']: if minion in active: active.remove(minion) if bwait: wait.append(datetime.now() + timedelta(seconds=bwait))
def function[run, parameter[self]]: constant[ Execute the batch run ] variable[args] assign[=] list[[<ast.List object at 0x7da1b208fac0>, <ast.Subscript object at 0x7da1b208f9d0>, <ast.Subscript object at 0x7da1b208da20>, <ast.Subscript object at 0x7da1b208d150>, <ast.Constant object at 0x7da1b208eb00>]] variable[bnum] assign[=] call[name[self].get_bnum, parameter[]] if <ast.UnaryOp object at 0x7da1b208ed10> begin[:] return[None] variable[to_run] assign[=] call[name[copy].deepcopy, parameter[name[self].minions]] variable[active] assign[=] list[[]] variable[ret] assign[=] dictionary[[], []] variable[iters] assign[=] list[[]] variable[bwait] assign[=] call[name[self].opts.get, parameter[constant[batch_wait], constant[0]]] variable[wait] assign[=] list[[]] if name[self].options begin[:] variable[show_jid] assign[=] name[self].options.show_jid variable[show_verbose] assign[=] name[self].options.verbose variable[minion_tracker] assign[=] dictionary[[], []] if <ast.UnaryOp object at 0x7da1b208fb50> begin[:] for taget[name[down_minion]] in starred[name[self].down_minions] begin[:] call[name[salt].utils.stringutils.print_cli, parameter[call[constant[Minion {0} did not respond. No job will be sent.].format, parameter[name[down_minion]]]]] while compare[call[name[len], parameter[name[ret]]] less[<] call[name[len], parameter[name[self].minions]]] begin[:] variable[next_] assign[=] list[[]] if <ast.BoolOp object at 0x7da1b208c550> begin[:] call[name[self].__update_wait, parameter[name[wait]]] if <ast.BoolOp object at 0x7da1b208f940> begin[:] while name[to_run] begin[:] call[name[next_].append, parameter[call[name[to_run].pop, parameter[]]]] <ast.AugAssign object at 0x7da1b208c100> call[name[args]][constant[0]] assign[=] name[next_] if name[next_] begin[:] if <ast.UnaryOp object at 0x7da1b208f070> begin[:] call[name[salt].utils.stringutils.print_cli, parameter[call[constant[ Executing run on {0} ].format, parameter[call[name[sorted], parameter[name[next_]]]]]]] variable[new_iter] assign[=] call[name[self].local.cmd_iter_no_block, parameter[<ast.Starred object at 0x7da1b2022b30>]] call[name[iters].append, parameter[name[new_iter]]] call[name[minion_tracker]][name[new_iter]] assign[=] dictionary[[], []] call[call[name[minion_tracker]][name[new_iter]]][constant[minions]] assign[=] name[next_] call[call[name[minion_tracker]][name[new_iter]]][constant[active]] assign[=] constant[True] variable[parts] assign[=] dictionary[[], []] for taget[name[ping_ret]] in starred[name[self].ping_gen] begin[:] if compare[name[ping_ret] is constant[None]] begin[:] break variable[m] assign[=] call[name[next], parameter[call[name[six].iterkeys, parameter[name[ping_ret]]]]] if compare[name[m] <ast.NotIn object at 0x7da2590d7190> name[self].minions] begin[:] call[name[self].minions.append, parameter[name[m]]] call[name[to_run].append, parameter[name[m]]] for taget[name[queue]] in starred[name[iters]] begin[:] <ast.Try object at 0x7da1b2023fa0> for taget[tuple[[<ast.Name object at 0x7da1b2184070>, <ast.Name object at 0x7da1b21855a0>]]] in starred[call[name[six].iteritems, parameter[name[parts]]]] begin[:] if compare[name[minion] in name[active]] begin[:] call[name[active].remove, parameter[name[minion]]] if name[bwait] begin[:] call[name[wait].append, parameter[binary_operation[call[name[datetime].now, parameter[]] + call[name[timedelta], parameter[]]]]] variable[failhard] assign[=] constant[False] if <ast.BoolOp object at 0x7da1b21846d0> begin[:] call[call[name[data]][constant[ret]]][constant[retcode]] assign[=] call[name[data]][constant[retcode]] if <ast.BoolOp object at 0x7da1b2184eb0> begin[:] variable[failhard] assign[=] constant[True] if call[name[self].opts.get, parameter[constant[raw]]] begin[:] call[name[ret]][name[minion]] assign[=] name[data] <ast.Yield object at 0x7da1b2187490> if <ast.UnaryOp object at 0x7da1b2184ac0> begin[:] call[name[ret]][name[minion]] assign[=] call[name[data]][constant[ret]] call[name[data]][name[minion]] assign[=] call[name[data].pop, parameter[constant[ret]]] if compare[constant[out] in name[data]] begin[:] variable[out] assign[=] call[name[data].pop, parameter[constant[out]]] call[name[salt].output.display_output, parameter[name[data], name[out], name[self].opts]] if name[failhard] begin[:] call[name[log].error, parameter[constant[Minion %s returned with non-zero exit code. Batch run stopped due to failhard], name[minion]]] <ast.Raise object at 0x7da18f810760> for taget[name[queue]] in starred[name[minion_tracker]] begin[:] if <ast.BoolOp object at 0x7da18f811660> begin[:] call[name[iters].remove, parameter[name[queue]]] for taget[name[minion]] in starred[call[call[name[minion_tracker]][name[queue]]][constant[minions]]] begin[:] if compare[name[minion] in name[active]] begin[:] call[name[active].remove, parameter[name[minion]]] if name[bwait] begin[:] call[name[wait].append, parameter[binary_operation[call[name[datetime].now, parameter[]] + call[name[timedelta], parameter[]]]]]
keyword[def] identifier[run] ( identifier[self] ): literal[string] identifier[args] =[[], identifier[self] . identifier[opts] [ literal[string] ], identifier[self] . identifier[opts] [ literal[string] ], identifier[self] . identifier[opts] [ literal[string] ], literal[string] , ] identifier[bnum] = identifier[self] . identifier[get_bnum] () keyword[if] keyword[not] identifier[self] . identifier[minions] : keyword[return] identifier[to_run] = identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[minions] ) identifier[active] =[] identifier[ret] ={} identifier[iters] =[] identifier[bwait] = identifier[self] . identifier[opts] . identifier[get] ( literal[string] , literal[int] ) identifier[wait] =[] keyword[if] identifier[self] . identifier[options] : identifier[show_jid] = identifier[self] . identifier[options] . identifier[show_jid] identifier[show_verbose] = identifier[self] . identifier[options] . identifier[verbose] keyword[else] : identifier[show_jid] = keyword[False] identifier[show_verbose] = keyword[False] identifier[minion_tracker] ={} keyword[if] keyword[not] identifier[self] . identifier[quiet] : keyword[for] identifier[down_minion] keyword[in] identifier[self] . identifier[down_minions] : identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[print_cli] ( literal[string] . identifier[format] ( identifier[down_minion] )) keyword[while] identifier[len] ( identifier[ret] )< identifier[len] ( identifier[self] . identifier[minions] ): identifier[next_] =[] keyword[if] identifier[bwait] keyword[and] identifier[wait] : identifier[self] . identifier[__update_wait] ( identifier[wait] ) keyword[if] identifier[len] ( identifier[to_run] )<= identifier[bnum] - identifier[len] ( identifier[wait] ) keyword[and] keyword[not] identifier[active] : keyword[while] identifier[to_run] : identifier[next_] . identifier[append] ( identifier[to_run] . identifier[pop] ()) keyword[else] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[bnum] - identifier[len] ( identifier[active] )- identifier[len] ( identifier[wait] )): keyword[if] identifier[to_run] : identifier[minion_id] = identifier[to_run] . identifier[pop] () keyword[if] identifier[isinstance] ( identifier[minion_id] , identifier[dict] ): identifier[next_] . identifier[append] ( identifier[minion_id] . identifier[keys] ()[ literal[int] ]) keyword[else] : identifier[next_] . identifier[append] ( identifier[minion_id] ) identifier[active] += identifier[next_] identifier[args] [ literal[int] ]= identifier[next_] keyword[if] identifier[next_] : keyword[if] keyword[not] identifier[self] . identifier[quiet] : identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[print_cli] ( literal[string] . identifier[format] ( identifier[sorted] ( identifier[next_] ))) identifier[new_iter] = identifier[self] . identifier[local] . identifier[cmd_iter_no_block] ( * identifier[args] , identifier[raw] = identifier[self] . identifier[opts] . identifier[get] ( literal[string] , keyword[False] ), identifier[ret] = identifier[self] . identifier[opts] . identifier[get] ( literal[string] , literal[string] ), identifier[show_jid] = identifier[show_jid] , identifier[verbose] = identifier[show_verbose] , identifier[gather_job_timeout] = identifier[self] . identifier[opts] [ literal[string] ], ** identifier[self] . identifier[eauth] ) identifier[iters] . identifier[append] ( identifier[new_iter] ) identifier[minion_tracker] [ identifier[new_iter] ]={} identifier[minion_tracker] [ identifier[new_iter] ][ literal[string] ]= identifier[next_] identifier[minion_tracker] [ identifier[new_iter] ][ literal[string] ]= keyword[True] keyword[else] : identifier[time] . identifier[sleep] ( literal[int] ) identifier[parts] ={} keyword[for] identifier[ping_ret] keyword[in] identifier[self] . identifier[ping_gen] : keyword[if] identifier[ping_ret] keyword[is] keyword[None] : keyword[break] identifier[m] = identifier[next] ( identifier[six] . identifier[iterkeys] ( identifier[ping_ret] )) keyword[if] identifier[m] keyword[not] keyword[in] identifier[self] . identifier[minions] : identifier[self] . identifier[minions] . identifier[append] ( identifier[m] ) identifier[to_run] . identifier[append] ( identifier[m] ) keyword[for] identifier[queue] keyword[in] identifier[iters] : keyword[try] : identifier[ncnt] = literal[int] keyword[while] keyword[True] : identifier[part] = identifier[next] ( identifier[queue] ) keyword[if] identifier[part] keyword[is] keyword[None] : identifier[time] . identifier[sleep] ( literal[int] ) identifier[ncnt] += literal[int] keyword[if] identifier[ncnt] > literal[int] : keyword[break] keyword[continue] keyword[if] identifier[self] . identifier[opts] . identifier[get] ( literal[string] ): identifier[parts] . identifier[update] ({ identifier[part] [ literal[string] ][ literal[string] ]: identifier[part] }) keyword[if] identifier[part] [ literal[string] ][ literal[string] ] keyword[in] identifier[minion_tracker] [ identifier[queue] ][ literal[string] ]: identifier[minion_tracker] [ identifier[queue] ][ literal[string] ]. identifier[remove] ( identifier[part] [ literal[string] ][ literal[string] ]) keyword[else] : identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[print_cli] ( literal[string] . identifier[format] ( identifier[part] [ literal[string] ])) keyword[else] : identifier[parts] . identifier[update] ( identifier[part] ) keyword[for] identifier[id] keyword[in] identifier[part] : keyword[if] identifier[id] keyword[in] identifier[minion_tracker] [ identifier[queue] ][ literal[string] ]: identifier[minion_tracker] [ identifier[queue] ][ literal[string] ]. identifier[remove] ( identifier[id] ) keyword[else] : identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[print_cli] ( literal[string] . identifier[format] ( identifier[id] )) keyword[except] identifier[StopIteration] : keyword[if] identifier[queue] keyword[in] identifier[minion_tracker] : identifier[minion_tracker] [ identifier[queue] ][ literal[string] ]= keyword[False] keyword[for] identifier[minion] keyword[in] identifier[minion_tracker] [ identifier[queue] ][ literal[string] ]: keyword[if] identifier[minion] keyword[not] keyword[in] identifier[parts] : identifier[parts] [ identifier[minion] ]={} identifier[parts] [ identifier[minion] ][ literal[string] ]={} keyword[for] identifier[minion] , identifier[data] keyword[in] identifier[six] . identifier[iteritems] ( identifier[parts] ): keyword[if] identifier[minion] keyword[in] identifier[active] : identifier[active] . identifier[remove] ( identifier[minion] ) keyword[if] identifier[bwait] : identifier[wait] . identifier[append] ( identifier[datetime] . identifier[now] ()+ identifier[timedelta] ( identifier[seconds] = identifier[bwait] )) identifier[failhard] = keyword[False] keyword[if] literal[string] keyword[in] identifier[data] keyword[and] identifier[isinstance] ( identifier[data] [ literal[string] ], identifier[dict] ) keyword[and] literal[string] keyword[not] keyword[in] identifier[data] [ literal[string] ]: identifier[data] [ literal[string] ][ literal[string] ]= identifier[data] [ literal[string] ] keyword[if] identifier[self] . identifier[opts] . identifier[get] ( literal[string] ) keyword[and] identifier[data] [ literal[string] ][ literal[string] ]> literal[int] : identifier[failhard] = keyword[True] keyword[if] identifier[self] . identifier[opts] . identifier[get] ( literal[string] ): identifier[ret] [ identifier[minion] ]= identifier[data] keyword[yield] identifier[data] keyword[else] : identifier[ret] [ identifier[minion] ]= identifier[data] [ literal[string] ] keyword[yield] { identifier[minion] : identifier[data] [ literal[string] ]} keyword[if] keyword[not] identifier[self] . identifier[quiet] : identifier[ret] [ identifier[minion] ]= identifier[data] [ literal[string] ] identifier[data] [ identifier[minion] ]= identifier[data] . identifier[pop] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[data] : identifier[out] = identifier[data] . identifier[pop] ( literal[string] ) keyword[else] : identifier[out] = keyword[None] identifier[salt] . identifier[output] . identifier[display_output] ( identifier[data] , identifier[out] , identifier[self] . identifier[opts] ) keyword[if] identifier[failhard] : identifier[log] . identifier[error] ( literal[string] literal[string] , identifier[minion] ) keyword[raise] identifier[StopIteration] keyword[for] identifier[queue] keyword[in] identifier[minion_tracker] : keyword[if] keyword[not] identifier[minion_tracker] [ identifier[queue] ][ literal[string] ] keyword[and] identifier[queue] keyword[in] identifier[iters] : identifier[iters] . identifier[remove] ( identifier[queue] ) keyword[for] identifier[minion] keyword[in] identifier[minion_tracker] [ identifier[queue] ][ literal[string] ]: keyword[if] identifier[minion] keyword[in] identifier[active] : identifier[active] . identifier[remove] ( identifier[minion] ) keyword[if] identifier[bwait] : identifier[wait] . identifier[append] ( identifier[datetime] . identifier[now] ()+ identifier[timedelta] ( identifier[seconds] = identifier[bwait] ))
def run(self): """ Execute the batch run """ args = [[], self.opts['fun'], self.opts['arg'], self.opts['timeout'], 'list'] bnum = self.get_bnum() # No targets to run if not self.minions: return # depends on [control=['if'], data=[]] to_run = copy.deepcopy(self.minions) active = [] ret = {} iters = [] # wait the specified time before decide a job is actually done bwait = self.opts.get('batch_wait', 0) wait = [] if self.options: show_jid = self.options.show_jid show_verbose = self.options.verbose # depends on [control=['if'], data=[]] else: show_jid = False show_verbose = False # the minion tracker keeps track of responses and iterators # - it removes finished iterators from iters[] # - if a previously detected minion does not respond, its # added with an empty answer to ret{} once the timeout is reached # - unresponsive minions are removed from active[] to make # sure that the main while loop finishes even with unresp minions minion_tracker = {} if not self.quiet: # We already know some minions didn't respond to the ping, so inform # the user we won't be attempting to run a job on them for down_minion in self.down_minions: salt.utils.stringutils.print_cli('Minion {0} did not respond. No job will be sent.'.format(down_minion)) # depends on [control=['for'], data=['down_minion']] # depends on [control=['if'], data=[]] # Iterate while we still have things to execute while len(ret) < len(self.minions): next_ = [] if bwait and wait: self.__update_wait(wait) # depends on [control=['if'], data=[]] if len(to_run) <= bnum - len(wait) and (not active): # last bit of them, add them all to next iterator while to_run: next_.append(to_run.pop()) # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] else: for i in range(bnum - len(active) - len(wait)): if to_run: minion_id = to_run.pop() if isinstance(minion_id, dict): next_.append(minion_id.keys()[0]) # depends on [control=['if'], data=[]] else: next_.append(minion_id) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] active += next_ args[0] = next_ if next_: if not self.quiet: salt.utils.stringutils.print_cli('\nExecuting run on {0}\n'.format(sorted(next_))) # depends on [control=['if'], data=[]] # create a new iterator for this batch of minions new_iter = self.local.cmd_iter_no_block(*args, raw=self.opts.get('raw', False), ret=self.opts.get('return', ''), show_jid=show_jid, verbose=show_verbose, gather_job_timeout=self.opts['gather_job_timeout'], **self.eauth) # add it to our iterators and to the minion_tracker iters.append(new_iter) minion_tracker[new_iter] = {} # every iterator added is 'active' and has its set of minions minion_tracker[new_iter]['minions'] = next_ minion_tracker[new_iter]['active'] = True # depends on [control=['if'], data=[]] else: time.sleep(0.02) parts = {} # see if we found more minions for ping_ret in self.ping_gen: if ping_ret is None: break # depends on [control=['if'], data=[]] m = next(six.iterkeys(ping_ret)) if m not in self.minions: self.minions.append(m) to_run.append(m) # depends on [control=['if'], data=['m']] # depends on [control=['for'], data=['ping_ret']] for queue in iters: try: # Gather returns until we get to the bottom ncnt = 0 while True: part = next(queue) if part is None: time.sleep(0.01) ncnt += 1 if ncnt > 5: break # depends on [control=['if'], data=[]] continue # depends on [control=['if'], data=[]] if self.opts.get('raw'): parts.update({part['data']['id']: part}) if part['data']['id'] in minion_tracker[queue]['minions']: minion_tracker[queue]['minions'].remove(part['data']['id']) # depends on [control=['if'], data=[]] else: salt.utils.stringutils.print_cli('minion {0} was already deleted from tracker, probably a duplicate key'.format(part['id'])) # depends on [control=['if'], data=[]] else: parts.update(part) for id in part: if id in minion_tracker[queue]['minions']: minion_tracker[queue]['minions'].remove(id) # depends on [control=['if'], data=['id']] else: salt.utils.stringutils.print_cli('minion {0} was already deleted from tracker, probably a duplicate key'.format(id)) # depends on [control=['for'], data=['id']] # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]] except StopIteration: # if a iterator is done: # - set it to inactive # - add minions that have not responded to parts{} # check if the tracker contains the iterator if queue in minion_tracker: minion_tracker[queue]['active'] = False # add all minions that belong to this iterator and # that have not responded to parts{} with an empty response for minion in minion_tracker[queue]['minions']: if minion not in parts: parts[minion] = {} parts[minion]['ret'] = {} # depends on [control=['if'], data=['minion', 'parts']] # depends on [control=['for'], data=['minion']] # depends on [control=['if'], data=['queue', 'minion_tracker']] # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['queue']] for (minion, data) in six.iteritems(parts): if minion in active: active.remove(minion) if bwait: wait.append(datetime.now() + timedelta(seconds=bwait)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['minion', 'active']] # Munge retcode into return data failhard = False if 'retcode' in data and isinstance(data['ret'], dict) and ('retcode' not in data['ret']): data['ret']['retcode'] = data['retcode'] if self.opts.get('failhard') and data['ret']['retcode'] > 0: failhard = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if self.opts.get('raw'): ret[minion] = data yield data # depends on [control=['if'], data=[]] else: ret[minion] = data['ret'] yield {minion: data['ret']} if not self.quiet: ret[minion] = data['ret'] data[minion] = data.pop('ret') if 'out' in data: out = data.pop('out') # depends on [control=['if'], data=['data']] else: out = None salt.output.display_output(data, out, self.opts) # depends on [control=['if'], data=[]] if failhard: log.error('Minion %s returned with non-zero exit code. Batch run stopped due to failhard', minion) raise StopIteration # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # remove inactive iterators from the iters list for queue in minion_tracker: # only remove inactive queues if not minion_tracker[queue]['active'] and queue in iters: iters.remove(queue) # also remove the iterator's minions from the active list for minion in minion_tracker[queue]['minions']: if minion in active: active.remove(minion) if bwait: wait.append(datetime.now() + timedelta(seconds=bwait)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['minion', 'active']] # depends on [control=['for'], data=['minion']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['queue']] # depends on [control=['while'], data=[]]
def main(): """ NAME aarm_magic.py DESCRIPTION Converts AARM data to best-fit tensor (6 elements plus sigma) Original program ARMcrunch written to accomodate ARM anisotropy data collected from 6 axial directions (+X,+Y,+Z,-X,-Y,-Z) using the off-axis remanence terms to construct the tensor. A better way to do the anisotropy of ARMs is to use 9,12 or 15 measurements in the Hext rotational scheme. SYNTAX aarm_magic.py [-h][command line options] OPTIONS -h prints help message and quits -f FILE: specify input file, default is aarm_measurements.txt -crd [s,g,t] specify coordinate system, requires samples file -fsa FILE: specify er_samples.txt file, default is er_samples.txt (2.5) or samples.txt (3.0) -Fa FILE: specify anisotropy output file, default is arm_anisotropy.txt (MagIC 2.5 only) -Fr FILE: specify results output file, default is aarm_results.txt (MagIC 2.5 only) -Fsi FILE: specify output file, default is specimens.txt (MagIC 3 only) -DM DATA_MODEL: specify MagIC 2 or MagIC 3, default is 3 INPUT Input for the present program is a series of baseline, ARM pairs. The baseline should be the AF demagnetized state (3 axis demag is preferable) for the following ARM acquisition. The order of the measurements is: positions 1,2,3, 6,7,8, 11,12,13 (for 9 positions) positions 1,2,3,4, 6,7,8,9, 11,12,13,14 (for 12 positions) positions 1-15 (for 15 positions) """ # initialize some parameters args = sys.argv if "-h" in args: print(main.__doc__) sys.exit() #meas_file = "aarm_measurements.txt" #rmag_anis = "arm_anisotropy.txt" #rmag_res = "aarm_results.txt" # # get name of file from command line # data_model_num = int(pmag.get_named_arg("-DM", 3)) spec_file = pmag.get_named_arg("-Fsi", "specimens.txt") if data_model_num == 3: samp_file = pmag.get_named_arg("-fsa", "samples.txt") else: samp_file = pmag.get_named_arg("-fsa", "er_samples.txt") dir_path = pmag.get_named_arg('-WD', '.') input_dir_path = pmag.get_named_arg('-ID', '') infile = pmag.get_named_arg('-f', reqd=True) coord = pmag.get_named_arg('-crd', '-1') #if "-Fa" in args: # ind = args.index("-Fa") # rmag_anis = args[ind + 1] #if "-Fr" in args: # ind = args.index("-Fr") # rmag_res = args[ind + 1] ipmag.aarm_magic(infile, dir_path, input_dir_path, spec_file, samp_file, data_model_num, coord)
def function[main, parameter[]]: constant[ NAME aarm_magic.py DESCRIPTION Converts AARM data to best-fit tensor (6 elements plus sigma) Original program ARMcrunch written to accomodate ARM anisotropy data collected from 6 axial directions (+X,+Y,+Z,-X,-Y,-Z) using the off-axis remanence terms to construct the tensor. A better way to do the anisotropy of ARMs is to use 9,12 or 15 measurements in the Hext rotational scheme. SYNTAX aarm_magic.py [-h][command line options] OPTIONS -h prints help message and quits -f FILE: specify input file, default is aarm_measurements.txt -crd [s,g,t] specify coordinate system, requires samples file -fsa FILE: specify er_samples.txt file, default is er_samples.txt (2.5) or samples.txt (3.0) -Fa FILE: specify anisotropy output file, default is arm_anisotropy.txt (MagIC 2.5 only) -Fr FILE: specify results output file, default is aarm_results.txt (MagIC 2.5 only) -Fsi FILE: specify output file, default is specimens.txt (MagIC 3 only) -DM DATA_MODEL: specify MagIC 2 or MagIC 3, default is 3 INPUT Input for the present program is a series of baseline, ARM pairs. The baseline should be the AF demagnetized state (3 axis demag is preferable) for the following ARM acquisition. The order of the measurements is: positions 1,2,3, 6,7,8, 11,12,13 (for 9 positions) positions 1,2,3,4, 6,7,8,9, 11,12,13,14 (for 12 positions) positions 1-15 (for 15 positions) ] variable[args] assign[=] name[sys].argv if compare[constant[-h] in name[args]] begin[:] call[name[print], parameter[name[main].__doc__]] call[name[sys].exit, parameter[]] variable[data_model_num] assign[=] call[name[int], parameter[call[name[pmag].get_named_arg, parameter[constant[-DM], constant[3]]]]] variable[spec_file] assign[=] call[name[pmag].get_named_arg, parameter[constant[-Fsi], constant[specimens.txt]]] if compare[name[data_model_num] equal[==] constant[3]] begin[:] variable[samp_file] assign[=] call[name[pmag].get_named_arg, parameter[constant[-fsa], constant[samples.txt]]] variable[dir_path] assign[=] call[name[pmag].get_named_arg, parameter[constant[-WD], constant[.]]] variable[input_dir_path] assign[=] call[name[pmag].get_named_arg, parameter[constant[-ID], constant[]]] variable[infile] assign[=] call[name[pmag].get_named_arg, parameter[constant[-f]]] variable[coord] assign[=] call[name[pmag].get_named_arg, parameter[constant[-crd], constant[-1]]] call[name[ipmag].aarm_magic, parameter[name[infile], name[dir_path], name[input_dir_path], name[spec_file], name[samp_file], name[data_model_num], name[coord]]]
keyword[def] identifier[main] (): literal[string] identifier[args] = identifier[sys] . identifier[argv] keyword[if] literal[string] keyword[in] identifier[args] : identifier[print] ( identifier[main] . identifier[__doc__] ) identifier[sys] . identifier[exit] () identifier[data_model_num] = identifier[int] ( identifier[pmag] . identifier[get_named_arg] ( literal[string] , literal[int] )) identifier[spec_file] = identifier[pmag] . identifier[get_named_arg] ( literal[string] , literal[string] ) keyword[if] identifier[data_model_num] == literal[int] : identifier[samp_file] = identifier[pmag] . identifier[get_named_arg] ( literal[string] , literal[string] ) keyword[else] : identifier[samp_file] = identifier[pmag] . identifier[get_named_arg] ( literal[string] , literal[string] ) identifier[dir_path] = identifier[pmag] . identifier[get_named_arg] ( literal[string] , literal[string] ) identifier[input_dir_path] = identifier[pmag] . identifier[get_named_arg] ( literal[string] , literal[string] ) identifier[infile] = identifier[pmag] . identifier[get_named_arg] ( literal[string] , identifier[reqd] = keyword[True] ) identifier[coord] = identifier[pmag] . identifier[get_named_arg] ( literal[string] , literal[string] ) identifier[ipmag] . identifier[aarm_magic] ( identifier[infile] , identifier[dir_path] , identifier[input_dir_path] , identifier[spec_file] , identifier[samp_file] , identifier[data_model_num] , identifier[coord] )
def main(): """ NAME aarm_magic.py DESCRIPTION Converts AARM data to best-fit tensor (6 elements plus sigma) Original program ARMcrunch written to accomodate ARM anisotropy data collected from 6 axial directions (+X,+Y,+Z,-X,-Y,-Z) using the off-axis remanence terms to construct the tensor. A better way to do the anisotropy of ARMs is to use 9,12 or 15 measurements in the Hext rotational scheme. SYNTAX aarm_magic.py [-h][command line options] OPTIONS -h prints help message and quits -f FILE: specify input file, default is aarm_measurements.txt -crd [s,g,t] specify coordinate system, requires samples file -fsa FILE: specify er_samples.txt file, default is er_samples.txt (2.5) or samples.txt (3.0) -Fa FILE: specify anisotropy output file, default is arm_anisotropy.txt (MagIC 2.5 only) -Fr FILE: specify results output file, default is aarm_results.txt (MagIC 2.5 only) -Fsi FILE: specify output file, default is specimens.txt (MagIC 3 only) -DM DATA_MODEL: specify MagIC 2 or MagIC 3, default is 3 INPUT Input for the present program is a series of baseline, ARM pairs. The baseline should be the AF demagnetized state (3 axis demag is preferable) for the following ARM acquisition. The order of the measurements is: positions 1,2,3, 6,7,8, 11,12,13 (for 9 positions) positions 1,2,3,4, 6,7,8,9, 11,12,13,14 (for 12 positions) positions 1-15 (for 15 positions) """ # initialize some parameters args = sys.argv if '-h' in args: print(main.__doc__) sys.exit() # depends on [control=['if'], data=[]] #meas_file = "aarm_measurements.txt" #rmag_anis = "arm_anisotropy.txt" #rmag_res = "aarm_results.txt" # # get name of file from command line # data_model_num = int(pmag.get_named_arg('-DM', 3)) spec_file = pmag.get_named_arg('-Fsi', 'specimens.txt') if data_model_num == 3: samp_file = pmag.get_named_arg('-fsa', 'samples.txt') # depends on [control=['if'], data=[]] else: samp_file = pmag.get_named_arg('-fsa', 'er_samples.txt') dir_path = pmag.get_named_arg('-WD', '.') input_dir_path = pmag.get_named_arg('-ID', '') infile = pmag.get_named_arg('-f', reqd=True) coord = pmag.get_named_arg('-crd', '-1') #if "-Fa" in args: # ind = args.index("-Fa") # rmag_anis = args[ind + 1] #if "-Fr" in args: # ind = args.index("-Fr") # rmag_res = args[ind + 1] ipmag.aarm_magic(infile, dir_path, input_dir_path, spec_file, samp_file, data_model_num, coord)
def flattened(self, pred=flattened_pred_default): """Flattens nodes by hoisting children up to ancestor nodes. A node is hoisted if pred(node) returns True. """ if self.is_value: return self new_children = [] for child in self.children: if child.is_empty: continue new_child = child.flattened(pred) if pred(new_child, self): new_children.extend(new_child.children) else: new_children.append(new_child) return ParseNode(self.node_type, children=new_children, consumed=self.consumed, position=self.position, ignored=self.ignored)
def function[flattened, parameter[self, pred]]: constant[Flattens nodes by hoisting children up to ancestor nodes. A node is hoisted if pred(node) returns True. ] if name[self].is_value begin[:] return[name[self]] variable[new_children] assign[=] list[[]] for taget[name[child]] in starred[name[self].children] begin[:] if name[child].is_empty begin[:] continue variable[new_child] assign[=] call[name[child].flattened, parameter[name[pred]]] if call[name[pred], parameter[name[new_child], name[self]]] begin[:] call[name[new_children].extend, parameter[name[new_child].children]] return[call[name[ParseNode], parameter[name[self].node_type]]]
keyword[def] identifier[flattened] ( identifier[self] , identifier[pred] = identifier[flattened_pred_default] ): literal[string] keyword[if] identifier[self] . identifier[is_value] : keyword[return] identifier[self] identifier[new_children] =[] keyword[for] identifier[child] keyword[in] identifier[self] . identifier[children] : keyword[if] identifier[child] . identifier[is_empty] : keyword[continue] identifier[new_child] = identifier[child] . identifier[flattened] ( identifier[pred] ) keyword[if] identifier[pred] ( identifier[new_child] , identifier[self] ): identifier[new_children] . identifier[extend] ( identifier[new_child] . identifier[children] ) keyword[else] : identifier[new_children] . identifier[append] ( identifier[new_child] ) keyword[return] identifier[ParseNode] ( identifier[self] . identifier[node_type] , identifier[children] = identifier[new_children] , identifier[consumed] = identifier[self] . identifier[consumed] , identifier[position] = identifier[self] . identifier[position] , identifier[ignored] = identifier[self] . identifier[ignored] )
def flattened(self, pred=flattened_pred_default): """Flattens nodes by hoisting children up to ancestor nodes. A node is hoisted if pred(node) returns True. """ if self.is_value: return self # depends on [control=['if'], data=[]] new_children = [] for child in self.children: if child.is_empty: continue # depends on [control=['if'], data=[]] new_child = child.flattened(pred) if pred(new_child, self): new_children.extend(new_child.children) # depends on [control=['if'], data=[]] else: new_children.append(new_child) # depends on [control=['for'], data=['child']] return ParseNode(self.node_type, children=new_children, consumed=self.consumed, position=self.position, ignored=self.ignored)
def getMovie(): """ Returns the movie instance for the loader widget. :return <QMovie> """ if not XLoaderWidget.MOVIE: filename = projexui.resources.find('img/ajax_loader.gif') XLoaderWidget.MOVIE = QMovie() XLoaderWidget.MOVIE.setFileName(filename) XLoaderWidget.MOVIE.start() return XLoaderWidget.MOVIE
def function[getMovie, parameter[]]: constant[ Returns the movie instance for the loader widget. :return <QMovie> ] if <ast.UnaryOp object at 0x7da2043445b0> begin[:] variable[filename] assign[=] call[name[projexui].resources.find, parameter[constant[img/ajax_loader.gif]]] name[XLoaderWidget].MOVIE assign[=] call[name[QMovie], parameter[]] call[name[XLoaderWidget].MOVIE.setFileName, parameter[name[filename]]] call[name[XLoaderWidget].MOVIE.start, parameter[]] return[name[XLoaderWidget].MOVIE]
keyword[def] identifier[getMovie] (): literal[string] keyword[if] keyword[not] identifier[XLoaderWidget] . identifier[MOVIE] : identifier[filename] = identifier[projexui] . identifier[resources] . identifier[find] ( literal[string] ) identifier[XLoaderWidget] . identifier[MOVIE] = identifier[QMovie] () identifier[XLoaderWidget] . identifier[MOVIE] . identifier[setFileName] ( identifier[filename] ) identifier[XLoaderWidget] . identifier[MOVIE] . identifier[start] () keyword[return] identifier[XLoaderWidget] . identifier[MOVIE]
def getMovie(): """ Returns the movie instance for the loader widget. :return <QMovie> """ if not XLoaderWidget.MOVIE: filename = projexui.resources.find('img/ajax_loader.gif') XLoaderWidget.MOVIE = QMovie() XLoaderWidget.MOVIE.setFileName(filename) XLoaderWidget.MOVIE.start() # depends on [control=['if'], data=[]] return XLoaderWidget.MOVIE
def add_resource(self, descriptor): """https://github.com/frictionlessdata/datapackage-py#package """ self.__current_descriptor.setdefault('resources', []) self.__current_descriptor['resources'].append(descriptor) self.__build() return self.__resources[-1]
def function[add_resource, parameter[self, descriptor]]: constant[https://github.com/frictionlessdata/datapackage-py#package ] call[name[self].__current_descriptor.setdefault, parameter[constant[resources], list[[]]]] call[call[name[self].__current_descriptor][constant[resources]].append, parameter[name[descriptor]]] call[name[self].__build, parameter[]] return[call[name[self].__resources][<ast.UnaryOp object at 0x7da1b0008460>]]
keyword[def] identifier[add_resource] ( identifier[self] , identifier[descriptor] ): literal[string] identifier[self] . identifier[__current_descriptor] . identifier[setdefault] ( literal[string] ,[]) identifier[self] . identifier[__current_descriptor] [ literal[string] ]. identifier[append] ( identifier[descriptor] ) identifier[self] . identifier[__build] () keyword[return] identifier[self] . identifier[__resources] [- literal[int] ]
def add_resource(self, descriptor): """https://github.com/frictionlessdata/datapackage-py#package """ self.__current_descriptor.setdefault('resources', []) self.__current_descriptor['resources'].append(descriptor) self.__build() return self.__resources[-1]
def find_instances(instance_id=None, name=None, tags=None, region=None, key=None, keyid=None, profile=None, return_objs=False, in_states=None, filters=None): ''' Given instance properties, find and return matching instance ids CLI Examples: .. code-block:: bash salt myminion boto_ec2.find_instances # Lists all instances salt myminion boto_ec2.find_instances name=myinstance salt myminion boto_ec2.find_instances tags='{"mytag": "value"}' salt myminion boto_ec2.find_instances filters='{"vpc-id": "vpc-12345678"}' ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: filter_parameters = {'filters': {}} if instance_id: filter_parameters['instance_ids'] = [instance_id] if name: filter_parameters['filters']['tag:Name'] = name if tags: for tag_name, tag_value in six.iteritems(tags): filter_parameters['filters']['tag:{0}'.format(tag_name)] = tag_value if filters: filter_parameters['filters'].update(filters) reservations = conn.get_all_reservations(**filter_parameters) instances = [i for r in reservations for i in r.instances] log.debug('The filters criteria %s matched the following ' 'instances:%s', filter_parameters, instances) if in_states: instances = [i for i in instances if i.state in in_states] log.debug( 'Limiting instance matches to those in the requested states: %s', instances ) if instances: if return_objs: return instances return [instance.id for instance in instances] else: return [] except boto.exception.BotoServerError as exc: log.error(exc) return []
def function[find_instances, parameter[instance_id, name, tags, region, key, keyid, profile, return_objs, in_states, filters]]: constant[ Given instance properties, find and return matching instance ids CLI Examples: .. code-block:: bash salt myminion boto_ec2.find_instances # Lists all instances salt myminion boto_ec2.find_instances name=myinstance salt myminion boto_ec2.find_instances tags='{"mytag": "value"}' salt myminion boto_ec2.find_instances filters='{"vpc-id": "vpc-12345678"}' ] variable[conn] assign[=] call[name[_get_conn], parameter[]] <ast.Try object at 0x7da1b2344370>
keyword[def] identifier[find_instances] ( identifier[instance_id] = keyword[None] , identifier[name] = keyword[None] , identifier[tags] = keyword[None] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] , identifier[return_objs] = keyword[False] , identifier[in_states] = keyword[None] , identifier[filters] = keyword[None] ): literal[string] identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) keyword[try] : identifier[filter_parameters] ={ literal[string] :{}} keyword[if] identifier[instance_id] : identifier[filter_parameters] [ literal[string] ]=[ identifier[instance_id] ] keyword[if] identifier[name] : identifier[filter_parameters] [ literal[string] ][ literal[string] ]= identifier[name] keyword[if] identifier[tags] : keyword[for] identifier[tag_name] , identifier[tag_value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[tags] ): identifier[filter_parameters] [ literal[string] ][ literal[string] . identifier[format] ( identifier[tag_name] )]= identifier[tag_value] keyword[if] identifier[filters] : identifier[filter_parameters] [ literal[string] ]. identifier[update] ( identifier[filters] ) identifier[reservations] = identifier[conn] . identifier[get_all_reservations] (** identifier[filter_parameters] ) identifier[instances] =[ identifier[i] keyword[for] identifier[r] keyword[in] identifier[reservations] keyword[for] identifier[i] keyword[in] identifier[r] . identifier[instances] ] identifier[log] . identifier[debug] ( literal[string] literal[string] , identifier[filter_parameters] , identifier[instances] ) keyword[if] identifier[in_states] : identifier[instances] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[instances] keyword[if] identifier[i] . identifier[state] keyword[in] identifier[in_states] ] identifier[log] . identifier[debug] ( literal[string] , identifier[instances] ) keyword[if] identifier[instances] : keyword[if] identifier[return_objs] : keyword[return] identifier[instances] keyword[return] [ identifier[instance] . identifier[id] keyword[for] identifier[instance] keyword[in] identifier[instances] ] keyword[else] : keyword[return] [] keyword[except] identifier[boto] . identifier[exception] . identifier[BotoServerError] keyword[as] identifier[exc] : identifier[log] . identifier[error] ( identifier[exc] ) keyword[return] []
def find_instances(instance_id=None, name=None, tags=None, region=None, key=None, keyid=None, profile=None, return_objs=False, in_states=None, filters=None): """ Given instance properties, find and return matching instance ids CLI Examples: .. code-block:: bash salt myminion boto_ec2.find_instances # Lists all instances salt myminion boto_ec2.find_instances name=myinstance salt myminion boto_ec2.find_instances tags='{"mytag": "value"}' salt myminion boto_ec2.find_instances filters='{"vpc-id": "vpc-12345678"}' """ conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: filter_parameters = {'filters': {}} if instance_id: filter_parameters['instance_ids'] = [instance_id] # depends on [control=['if'], data=[]] if name: filter_parameters['filters']['tag:Name'] = name # depends on [control=['if'], data=[]] if tags: for (tag_name, tag_value) in six.iteritems(tags): filter_parameters['filters']['tag:{0}'.format(tag_name)] = tag_value # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] if filters: filter_parameters['filters'].update(filters) # depends on [control=['if'], data=[]] reservations = conn.get_all_reservations(**filter_parameters) instances = [i for r in reservations for i in r.instances] log.debug('The filters criteria %s matched the following instances:%s', filter_parameters, instances) if in_states: instances = [i for i in instances if i.state in in_states] log.debug('Limiting instance matches to those in the requested states: %s', instances) # depends on [control=['if'], data=[]] if instances: if return_objs: return instances # depends on [control=['if'], data=[]] return [instance.id for instance in instances] # depends on [control=['if'], data=[]] else: return [] # depends on [control=['try'], data=[]] except boto.exception.BotoServerError as exc: log.error(exc) return [] # depends on [control=['except'], data=['exc']]
def create(**kwargs): """ Create and a return a specialized contract based on the given secType, or a general Contract if secType is not given. """ secType = kwargs.get('secType', '') cls = { '': Contract, 'STK': Stock, 'OPT': Option, 'FUT': Future, 'CONTFUT': ContFuture, 'CASH': Forex, 'IND': Index, 'CFD': CFD, 'BOND': Bond, 'CMDTY': Commodity, 'FOP': FuturesOption, 'FUND': MutualFund, 'WAR': Warrant, 'IOPT': Warrant, 'BAG': Bag, 'NEWS': Contract }.get(secType, Contract) if cls is not Contract: kwargs.pop('secType', '') return cls(**kwargs)
def function[create, parameter[]]: constant[ Create and a return a specialized contract based on the given secType, or a general Contract if secType is not given. ] variable[secType] assign[=] call[name[kwargs].get, parameter[constant[secType], constant[]]] variable[cls] assign[=] call[dictionary[[<ast.Constant object at 0x7da18bcc8970>, <ast.Constant object at 0x7da18bcc8d90>, <ast.Constant object at 0x7da18bccbca0>, <ast.Constant object at 0x7da18bccbdc0>, <ast.Constant object at 0x7da18bcc88b0>, <ast.Constant object at 0x7da18bccbbe0>, <ast.Constant object at 0x7da18bccbf10>, <ast.Constant object at 0x7da18bccb250>, <ast.Constant object at 0x7da18bccaf20>, <ast.Constant object at 0x7da18bcc80d0>, <ast.Constant object at 0x7da18bcc8ca0>, <ast.Constant object at 0x7da18bcc9f00>, <ast.Constant object at 0x7da18bccab30>, <ast.Constant object at 0x7da18bcca8c0>, <ast.Constant object at 0x7da18bccbfd0>, <ast.Constant object at 0x7da18bccba30>], [<ast.Name object at 0x7da18bccbf70>, <ast.Name object at 0x7da18bcc97e0>, <ast.Name object at 0x7da18bccb700>, <ast.Name object at 0x7da18bcc8f10>, <ast.Name object at 0x7da18bcca6b0>, <ast.Name object at 0x7da18bcc9750>, <ast.Name object at 0x7da18bcc92d0>, <ast.Name object at 0x7da18bccb790>, <ast.Name object at 0x7da18bccbbb0>, <ast.Name object at 0x7da18bcc84f0>, <ast.Name object at 0x7da18bccab00>, <ast.Name object at 0x7da18bcca320>, <ast.Name object at 0x7da18bcc8640>, <ast.Name object at 0x7da18bccb7f0>, <ast.Name object at 0x7da18bcc8820>, <ast.Name object at 0x7da18bccbc10>]].get, parameter[name[secType], name[Contract]]] if compare[name[cls] is_not name[Contract]] begin[:] call[name[kwargs].pop, parameter[constant[secType], constant[]]] return[call[name[cls], parameter[]]]
keyword[def] identifier[create] (** identifier[kwargs] ): literal[string] identifier[secType] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] ) identifier[cls] ={ literal[string] : identifier[Contract] , literal[string] : identifier[Stock] , literal[string] : identifier[Option] , literal[string] : identifier[Future] , literal[string] : identifier[ContFuture] , literal[string] : identifier[Forex] , literal[string] : identifier[Index] , literal[string] : identifier[CFD] , literal[string] : identifier[Bond] , literal[string] : identifier[Commodity] , literal[string] : identifier[FuturesOption] , literal[string] : identifier[MutualFund] , literal[string] : identifier[Warrant] , literal[string] : identifier[Warrant] , literal[string] : identifier[Bag] , literal[string] : identifier[Contract] }. identifier[get] ( identifier[secType] , identifier[Contract] ) keyword[if] identifier[cls] keyword[is] keyword[not] identifier[Contract] : identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] ) keyword[return] identifier[cls] (** identifier[kwargs] )
def create(**kwargs): """ Create and a return a specialized contract based on the given secType, or a general Contract if secType is not given. """ secType = kwargs.get('secType', '') cls = {'': Contract, 'STK': Stock, 'OPT': Option, 'FUT': Future, 'CONTFUT': ContFuture, 'CASH': Forex, 'IND': Index, 'CFD': CFD, 'BOND': Bond, 'CMDTY': Commodity, 'FOP': FuturesOption, 'FUND': MutualFund, 'WAR': Warrant, 'IOPT': Warrant, 'BAG': Bag, 'NEWS': Contract}.get(secType, Contract) if cls is not Contract: kwargs.pop('secType', '') # depends on [control=['if'], data=[]] return cls(**kwargs)
def processHierarchical(self): """Main process.for hierarchial segmentation. Returns ------- est_idxs : list List with np.arrays for each layer of segmentation containing the estimated indeces for the segment boundaries. est_labels : list List with np.arrays containing the labels for each layer of the hierarchical segmentation. """ F = self._preprocess() F = librosa.util.normalize(F, axis=0) F = librosa.feature.stack_memory(F.T).T self.config["hier"] = True est_idxs, est_labels, F = main.scluster_segment(F, self.config, self.in_bound_idxs) for layer in range(len(est_idxs)): assert est_idxs[layer][0] == 0 and \ est_idxs[layer][-1] == F.shape[1] - 1 est_idxs[layer], est_labels[layer] = \ self._postprocess(est_idxs[layer], est_labels[layer]) return est_idxs, est_labels
def function[processHierarchical, parameter[self]]: constant[Main process.for hierarchial segmentation. Returns ------- est_idxs : list List with np.arrays for each layer of segmentation containing the estimated indeces for the segment boundaries. est_labels : list List with np.arrays containing the labels for each layer of the hierarchical segmentation. ] variable[F] assign[=] call[name[self]._preprocess, parameter[]] variable[F] assign[=] call[name[librosa].util.normalize, parameter[name[F]]] variable[F] assign[=] call[name[librosa].feature.stack_memory, parameter[name[F].T]].T call[name[self].config][constant[hier]] assign[=] constant[True] <ast.Tuple object at 0x7da1b02a4040> assign[=] call[name[main].scluster_segment, parameter[name[F], name[self].config, name[self].in_bound_idxs]] for taget[name[layer]] in starred[call[name[range], parameter[call[name[len], parameter[name[est_idxs]]]]]] begin[:] assert[<ast.BoolOp object at 0x7da1b02a4ca0>] <ast.Tuple object at 0x7da1b02a5ba0> assign[=] call[name[self]._postprocess, parameter[call[name[est_idxs]][name[layer]], call[name[est_labels]][name[layer]]]] return[tuple[[<ast.Name object at 0x7da1b02a72b0>, <ast.Name object at 0x7da1b02a7310>]]]
keyword[def] identifier[processHierarchical] ( identifier[self] ): literal[string] identifier[F] = identifier[self] . identifier[_preprocess] () identifier[F] = identifier[librosa] . identifier[util] . identifier[normalize] ( identifier[F] , identifier[axis] = literal[int] ) identifier[F] = identifier[librosa] . identifier[feature] . identifier[stack_memory] ( identifier[F] . identifier[T] ). identifier[T] identifier[self] . identifier[config] [ literal[string] ]= keyword[True] identifier[est_idxs] , identifier[est_labels] , identifier[F] = identifier[main] . identifier[scluster_segment] ( identifier[F] , identifier[self] . identifier[config] , identifier[self] . identifier[in_bound_idxs] ) keyword[for] identifier[layer] keyword[in] identifier[range] ( identifier[len] ( identifier[est_idxs] )): keyword[assert] identifier[est_idxs] [ identifier[layer] ][ literal[int] ]== literal[int] keyword[and] identifier[est_idxs] [ identifier[layer] ][- literal[int] ]== identifier[F] . identifier[shape] [ literal[int] ]- literal[int] identifier[est_idxs] [ identifier[layer] ], identifier[est_labels] [ identifier[layer] ]= identifier[self] . identifier[_postprocess] ( identifier[est_idxs] [ identifier[layer] ], identifier[est_labels] [ identifier[layer] ]) keyword[return] identifier[est_idxs] , identifier[est_labels]
def processHierarchical(self): """Main process.for hierarchial segmentation. Returns ------- est_idxs : list List with np.arrays for each layer of segmentation containing the estimated indeces for the segment boundaries. est_labels : list List with np.arrays containing the labels for each layer of the hierarchical segmentation. """ F = self._preprocess() F = librosa.util.normalize(F, axis=0) F = librosa.feature.stack_memory(F.T).T self.config['hier'] = True (est_idxs, est_labels, F) = main.scluster_segment(F, self.config, self.in_bound_idxs) for layer in range(len(est_idxs)): assert est_idxs[layer][0] == 0 and est_idxs[layer][-1] == F.shape[1] - 1 (est_idxs[layer], est_labels[layer]) = self._postprocess(est_idxs[layer], est_labels[layer]) # depends on [control=['for'], data=['layer']] return (est_idxs, est_labels)
def xccdf(params): ''' Run ``oscap xccdf`` commands on minions. It uses cp.push_dir to upload the generated files to the salt master in the master's minion files cachedir (defaults to ``/var/cache/salt/master/minions/minion-id/files``) It needs ``file_recv`` set to ``True`` in the master configuration file. CLI Example: .. code-block:: bash salt '*' openscap.xccdf "eval --profile Default /usr/share/openscap/scap-yast2sec-xccdf.xml" ''' params = shlex.split(params) policy = params[-1] success = True error = None upload_dir = None action = None returncode = None try: parser = _ArgumentParser() action = parser.parse_known_args(params)[0].action args, argv = _ArgumentParser(action=action).parse_known_args(args=params) except Exception as err: success = False error = six.text_type(err) if success: cmd = _XCCDF_MAP[action]['cmd_pattern'].format(args.profile, policy) tempdir = tempfile.mkdtemp() proc = Popen( shlex.split(cmd), stdout=PIPE, stderr=PIPE, cwd=tempdir) (stdoutdata, error) = proc.communicate() success = _OSCAP_EXIT_CODES_MAP[proc.returncode] returncode = proc.returncode if success: __salt__['cp.push_dir'](tempdir) shutil.rmtree(tempdir, ignore_errors=True) upload_dir = tempdir return dict( success=success, upload_dir=upload_dir, error=error, returncode=returncode)
def function[xccdf, parameter[params]]: constant[ Run ``oscap xccdf`` commands on minions. It uses cp.push_dir to upload the generated files to the salt master in the master's minion files cachedir (defaults to ``/var/cache/salt/master/minions/minion-id/files``) It needs ``file_recv`` set to ``True`` in the master configuration file. CLI Example: .. code-block:: bash salt '*' openscap.xccdf "eval --profile Default /usr/share/openscap/scap-yast2sec-xccdf.xml" ] variable[params] assign[=] call[name[shlex].split, parameter[name[params]]] variable[policy] assign[=] call[name[params]][<ast.UnaryOp object at 0x7da20c76f520>] variable[success] assign[=] constant[True] variable[error] assign[=] constant[None] variable[upload_dir] assign[=] constant[None] variable[action] assign[=] constant[None] variable[returncode] assign[=] constant[None] <ast.Try object at 0x7da20c76e350> if name[success] begin[:] variable[cmd] assign[=] call[call[call[name[_XCCDF_MAP]][name[action]]][constant[cmd_pattern]].format, parameter[name[args].profile, name[policy]]] variable[tempdir] assign[=] call[name[tempfile].mkdtemp, parameter[]] variable[proc] assign[=] call[name[Popen], parameter[call[name[shlex].split, parameter[name[cmd]]]]] <ast.Tuple object at 0x7da20c794070> assign[=] call[name[proc].communicate, parameter[]] variable[success] assign[=] call[name[_OSCAP_EXIT_CODES_MAP]][name[proc].returncode] variable[returncode] assign[=] name[proc].returncode if name[success] begin[:] call[call[name[__salt__]][constant[cp.push_dir]], parameter[name[tempdir]]] call[name[shutil].rmtree, parameter[name[tempdir]]] variable[upload_dir] assign[=] name[tempdir] return[call[name[dict], parameter[]]]
keyword[def] identifier[xccdf] ( identifier[params] ): literal[string] identifier[params] = identifier[shlex] . identifier[split] ( identifier[params] ) identifier[policy] = identifier[params] [- literal[int] ] identifier[success] = keyword[True] identifier[error] = keyword[None] identifier[upload_dir] = keyword[None] identifier[action] = keyword[None] identifier[returncode] = keyword[None] keyword[try] : identifier[parser] = identifier[_ArgumentParser] () identifier[action] = identifier[parser] . identifier[parse_known_args] ( identifier[params] )[ literal[int] ]. identifier[action] identifier[args] , identifier[argv] = identifier[_ArgumentParser] ( identifier[action] = identifier[action] ). identifier[parse_known_args] ( identifier[args] = identifier[params] ) keyword[except] identifier[Exception] keyword[as] identifier[err] : identifier[success] = keyword[False] identifier[error] = identifier[six] . identifier[text_type] ( identifier[err] ) keyword[if] identifier[success] : identifier[cmd] = identifier[_XCCDF_MAP] [ identifier[action] ][ literal[string] ]. identifier[format] ( identifier[args] . identifier[profile] , identifier[policy] ) identifier[tempdir] = identifier[tempfile] . identifier[mkdtemp] () identifier[proc] = identifier[Popen] ( identifier[shlex] . identifier[split] ( identifier[cmd] ), identifier[stdout] = identifier[PIPE] , identifier[stderr] = identifier[PIPE] , identifier[cwd] = identifier[tempdir] ) ( identifier[stdoutdata] , identifier[error] )= identifier[proc] . identifier[communicate] () identifier[success] = identifier[_OSCAP_EXIT_CODES_MAP] [ identifier[proc] . identifier[returncode] ] identifier[returncode] = identifier[proc] . identifier[returncode] keyword[if] identifier[success] : identifier[__salt__] [ literal[string] ]( identifier[tempdir] ) identifier[shutil] . identifier[rmtree] ( identifier[tempdir] , identifier[ignore_errors] = keyword[True] ) identifier[upload_dir] = identifier[tempdir] keyword[return] identifier[dict] ( identifier[success] = identifier[success] , identifier[upload_dir] = identifier[upload_dir] , identifier[error] = identifier[error] , identifier[returncode] = identifier[returncode] )
def xccdf(params): """ Run ``oscap xccdf`` commands on minions. It uses cp.push_dir to upload the generated files to the salt master in the master's minion files cachedir (defaults to ``/var/cache/salt/master/minions/minion-id/files``) It needs ``file_recv`` set to ``True`` in the master configuration file. CLI Example: .. code-block:: bash salt '*' openscap.xccdf "eval --profile Default /usr/share/openscap/scap-yast2sec-xccdf.xml" """ params = shlex.split(params) policy = params[-1] success = True error = None upload_dir = None action = None returncode = None try: parser = _ArgumentParser() action = parser.parse_known_args(params)[0].action (args, argv) = _ArgumentParser(action=action).parse_known_args(args=params) # depends on [control=['try'], data=[]] except Exception as err: success = False error = six.text_type(err) # depends on [control=['except'], data=['err']] if success: cmd = _XCCDF_MAP[action]['cmd_pattern'].format(args.profile, policy) tempdir = tempfile.mkdtemp() proc = Popen(shlex.split(cmd), stdout=PIPE, stderr=PIPE, cwd=tempdir) (stdoutdata, error) = proc.communicate() success = _OSCAP_EXIT_CODES_MAP[proc.returncode] returncode = proc.returncode if success: __salt__['cp.push_dir'](tempdir) shutil.rmtree(tempdir, ignore_errors=True) upload_dir = tempdir # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return dict(success=success, upload_dir=upload_dir, error=error, returncode=returncode)
def ascii_encoding(self): """ :return: str: Returns the ASCII-encoded string Thorn (Þ, þ) and Ash(Æ, æ) are substituted by the digraphs 'th' and 'ae' respectively. Wynn(Ƿ, ƿ) and Eth(Ð, ð) are replaced by 'w' and 'd'. Examples: >>> Word('ġelǣd').ascii_encoding() 'gelaed' >>> Word('ƿeorðunga').ascii_encoding() 'weordunga' """ w = self.remove_diacritics() for k, val in zip(Normalize.keys(), Normalize.values()): w = w.replace(k, val) return w
def function[ascii_encoding, parameter[self]]: constant[ :return: str: Returns the ASCII-encoded string Thorn (Þ, þ) and Ash(Æ, æ) are substituted by the digraphs 'th' and 'ae' respectively. Wynn(Ƿ, ƿ) and Eth(Ð, ð) are replaced by 'w' and 'd'. Examples: >>> Word('ġelǣd').ascii_encoding() 'gelaed' >>> Word('ƿeorðunga').ascii_encoding() 'weordunga' ] variable[w] assign[=] call[name[self].remove_diacritics, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b26ad0c0>, <ast.Name object at 0x7da1b26ad4e0>]]] in starred[call[name[zip], parameter[call[name[Normalize].keys, parameter[]], call[name[Normalize].values, parameter[]]]]] begin[:] variable[w] assign[=] call[name[w].replace, parameter[name[k], name[val]]] return[name[w]]
keyword[def] identifier[ascii_encoding] ( identifier[self] ): literal[string] identifier[w] = identifier[self] . identifier[remove_diacritics] () keyword[for] identifier[k] , identifier[val] keyword[in] identifier[zip] ( identifier[Normalize] . identifier[keys] (), identifier[Normalize] . identifier[values] ()): identifier[w] = identifier[w] . identifier[replace] ( identifier[k] , identifier[val] ) keyword[return] identifier[w]
def ascii_encoding(self): """ :return: str: Returns the ASCII-encoded string Thorn (Þ, þ) and Ash(Æ, æ) are substituted by the digraphs 'th' and 'ae' respectively. Wynn(Ƿ, ƿ) and Eth(Ð, ð) are replaced by 'w' and 'd'. Examples: >>> Word('ġelǣd').ascii_encoding() 'gelaed' >>> Word('ƿeorðunga').ascii_encoding() 'weordunga' """ w = self.remove_diacritics() for (k, val) in zip(Normalize.keys(), Normalize.values()): w = w.replace(k, val) # depends on [control=['for'], data=[]] return w
def _fit_RSA_UV(self, X, Y, X_base, scan_onsets=None, coords=None, inten=None): """ The major utility of fitting Bayesian RSA. Note that there is a naming change of variable. X in fit() is changed to Y here, and design in fit() is changed to X here. This is because we follow the tradition that X expresses the variable defined (controlled) by the experimenter, i.e., the time course of experimental conditions convolved by an HRF, and Y expresses data. However, in wrapper function fit(), we follow the naming routine of scikit-learn. """ GP_inten = self.GP_inten GP_space = self.GP_space rank = self.rank n_V = np.size(Y, axis=1) n_T = np.size(Y, axis=0) n_C = np.size(X, axis=1) l_idx, rank = self._chol_idx(n_C, rank) n_l = np.size(l_idx[0]) # the number of parameters for L t_start = time.time() D, F, run_TRs, n_run = self._prepare_DF( n_T, scan_onsets=scan_onsets) XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag, XTX, \ XTDX, XTFX = self._prepare_data_XY(X, Y, D, F) X0TX0, X0TDX0, X0TFX0, XTX0, XTDX0, XTFX0, \ X0TY, X0TDY, X0TFY, X0, X_base, n_X0, idx_DC = \ self._prepare_data_XYX0( X, Y, X_base, None, D, F, run_TRs, no_DC=False) # Prepare the data for fitting. These pre-calculated matrices # will be re-used a lot in evaluating likelihood function and # gradient. # DC component will be added to the nuisance regressors. # In later steps, we do not need to add DC components again dist2, inten_diff2, space_smooth_range, inten_smooth_range,\ n_smooth = self._calc_dist2_GP( coords=coords, inten=inten, GP_space=GP_space, GP_inten=GP_inten) # Calculating the distance between voxel locations and betweeen # voxel intensities. These are used if a Gaussian Process prior # is requested to regularize log(SNR^2) idx_param_sing, idx_param_fitU, idx_param_fitV = \ self._build_index_param(n_l, n_V, n_smooth) # Indexes to find each parameter in a combined parameter vector. current_GP = np.zeros(n_smooth) # We will perform the fitting in 2~3 steps: # (1) A preliminary fitting assuming all voxels share # exactly the same temporal covariance matrix for their noise. # SNR is assumed to be 1 for all voxels in this fitting. # Therefore, there are only n_l+2 free parameters. # (2) (optional) A fitting which allows each voxel to have their # own pseudo-SNR and AR(1) coefficients. But no Gaussian Process # prior is imposed on log(SNR). This step is neglected if GP # prior is not requested. This step allows the SNR parameters to # move closer to their correct values before GP is introduced. # This step alternately fits the shared covariance and voxel- # specific variance. It fits for init_iter steps and the # tolerance is also increased by a factor of 5 to speed up # fitting. # (3) Final fitting. If GP prior is requested, it will be # introduced in this step. Otherwise, just fit as the previous # step, but using un-altered tolerance setting, and n_iter # as the number of iteration. # Step 1 fitting, with a simplified model current_vec_U_chlsk_l, current_a1, current_logSigma2 = \ self._initial_fit_singpara( XTX, XTDX, XTFX, YTY_diag, YTDY_diag, YTFY_diag, XTY, XTDY, XTFY, X0TX0, X0TDX0, X0TFX0, XTX0, XTDX0, XTFX0, X0TY, X0TDY, X0TFY, X, Y, X0, idx_param_sing, l_idx, n_C, n_T, n_V, n_l, n_run, n_X0, rank) current_logSNR2 = -current_logSigma2 norm_factor = np.mean(current_logSNR2) current_logSNR2 = current_logSNR2 - norm_factor X_res = None # Step 2 fitting, which only happens if # GP prior is requested if GP_space: current_vec_U_chlsk_l, current_a1, current_logSNR2, X_res\ = self._fit_diagV_noGP( XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag, XTX, XTDX, XTFX, X, Y, X_base, X_res, D, F, run_TRs, current_vec_U_chlsk_l, current_a1, current_logSNR2, idx_param_fitU, idx_param_fitV, l_idx, n_C, n_T, n_V, n_l, n_run, n_X0, rank) current_GP[0] = np.log(np.min( dist2[np.tril_indices_from(dist2, k=-1)])) # We start fitting the model with GP prior with a small # length scale: the size of voxels. # Alternatively, initialize with a large distance. # Further testing of initial parameters need to be done. # current_GP[0] = np.log(np.max(dist2)/4.0) logger.debug('current GP[0]:{}'.format(current_GP[0])) if GP_inten: current_GP[1] = np.log(np.maximum( np.percentile(inten_diff2[np.tril_indices_from( inten_diff2, k=-1)], 2), 0.5)) logger.debug( 'current GP[1]:{}'.format(current_GP[1])) # We start the length scale for intensity with # a small value. A heuristic is 2 percentile of # all the square differences. But it should not be # smaller than 0.5. This limit is set in case # many voxels have close to equal intensities, # which might render 2 percentile to 0. # Step 3 fitting. GP prior is imposed if requested. # In this step, unless auto_nuisance is set to False, X_res # will be re-estimated from the residuals after each step # of fitting. And X0 will be concatenation of X_base and X_res logger.debug('indexing:{}'.format(idx_param_fitV)) logger.debug('initial GP parameters:{}'.format(current_GP)) current_vec_U_chlsk_l, current_a1, current_logSNR2,\ current_GP, X_res = self._fit_diagV_GP( XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag, XTX, XTDX, XTFX, X, Y, X_base, X_res, D, F, run_TRs, current_vec_U_chlsk_l, current_a1, current_logSNR2, current_GP, n_smooth, idx_param_fitU, idx_param_fitV, l_idx, n_C, n_T, n_V, n_l, n_run, n_X0, rank, GP_space, GP_inten, dist2, inten_diff2, space_smooth_range, inten_smooth_range) estU_chlsk_l_AR1_UV = np.zeros([n_C, rank]) estU_chlsk_l_AR1_UV[l_idx] = current_vec_U_chlsk_l est_cov_AR1_UV = np.dot(estU_chlsk_l_AR1_UV, estU_chlsk_l_AR1_UV.T) est_rho1_AR1_UV = 2 / np.pi * np.arctan(current_a1) est_SNR_AR1_UV = np.exp(current_logSNR2 / 2.0) # Calculating est_sigma_AR1_UV, est_sigma_AR1_UV, # est_beta_AR1_UV and est_beta0_AR1_UV X0TX0, X0TDX0, X0TFX0, XTX0, XTDX0, XTFX0, \ X0TY, X0TDY, X0TFY, X0, X_base, n_X0, _ \ = self._prepare_data_XYX0( X, Y, X_base, X_res, D, F, run_TRs, no_DC=True) X0TAX0, XTAX0, X0TAY, X0TAX0_i, \ XTAcorrX, XTAcorrY, YTAcorrY, LTXTAcorrY, XTAcorrXL, LTXTAcorrXL\ = self._precompute_ar1_quad_forms(XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag, XTX, XTDX, XTFX, X0TX0, X0TDX0, X0TFX0, XTX0, XTDX0, XTFX0, X0TY, X0TDY, X0TFY, estU_chlsk_l_AR1_UV, est_rho1_AR1_UV, n_V, n_X0) LL, LAMBDA_i, LAMBDA, YTAcorrXL_LAMBDA, sigma2 \ = self._calc_LL(est_rho1_AR1_UV, LTXTAcorrXL, LTXTAcorrY, YTAcorrY, X0TAX0, est_SNR_AR1_UV**2, n_V, n_T, n_run, rank, n_X0) est_sigma_AR1_UV = sigma2**0.5 est_beta_AR1_UV = est_SNR_AR1_UV**2 \ * np.dot(estU_chlsk_l_AR1_UV, YTAcorrXL_LAMBDA.T) est_beta_AR1_UV_latent = \ est_SNR_AR1_UV**2 * YTAcorrXL_LAMBDA.T # the latent term means that X*L multiplied by this term # is the same as X*beta. This will be used for decoding # and cross-validating, in case L is low-rank est_beta0_AR1_UV = np.einsum( 'ijk,ki->ji', X0TAX0_i, (X0TAY - np.einsum('ikj,ki->ji', XTAX0, est_beta_AR1_UV))) # Now we want to collapse all beta0 corresponding to DC components # of different runs to a single map, and preserve only one DC component # across runs. This is because they should express the same component # and the new data to transform do not necessarily have the same # numbers of runs as the training data. if idx_DC.size > 1: collapsed_DC = np.sum(X0[:, idx_DC], axis=1) X0 = np.insert(np.delete(X0, idx_DC, axis=1), 0, collapsed_DC, axis=1) collapsed_beta0 = np.mean(est_beta0_AR1_UV[idx_DC, :], axis=0) est_beta0_AR1_UV = np.insert( np.delete(est_beta0_AR1_UV, idx_DC, axis=0), 0, collapsed_beta0, axis=0) t_finish = time.time() logger.info( 'total time of fitting: {} seconds'.format(t_finish - t_start)) logger.debug('final GP parameters:{}'.format(current_GP)) if GP_space: est_space_smooth_r = np.exp(current_GP[0] / 2.0) if GP_inten: est_intensity_kernel_r = np.exp(current_GP[1] / 2.0) K_major = np.exp(- (dist2 / est_space_smooth_r**2 + inten_diff2 / est_intensity_kernel_r**2) / 2.0) else: est_intensity_kernel_r = None K_major = np.exp(- dist2 / est_space_smooth_r**2 / 2.0) K = K_major + np.diag(np.ones(n_V) * self.eta) invK_tilde_log_SNR = np.linalg.solve(K, current_logSNR2) / 2 log_SNR_invK_tilde_log_SNR = np.dot(current_logSNR2, invK_tilde_log_SNR) / 2 tau2, _ = self.tau2_prior(log_SNR_invK_tilde_log_SNR, n_V, self.tau_range) est_std_log_SNR = tau2 ** 0.5 else: est_space_smooth_r = None est_intensity_kernel_r = None est_std_log_SNR = None return est_cov_AR1_UV, estU_chlsk_l_AR1_UV, est_SNR_AR1_UV, \ est_beta_AR1_UV, est_beta0_AR1_UV, est_beta_AR1_UV_latent,\ est_sigma_AR1_UV, est_rho1_AR1_UV, est_space_smooth_r, \ est_std_log_SNR, est_intensity_kernel_r, X0
def function[_fit_RSA_UV, parameter[self, X, Y, X_base, scan_onsets, coords, inten]]: constant[ The major utility of fitting Bayesian RSA. Note that there is a naming change of variable. X in fit() is changed to Y here, and design in fit() is changed to X here. This is because we follow the tradition that X expresses the variable defined (controlled) by the experimenter, i.e., the time course of experimental conditions convolved by an HRF, and Y expresses data. However, in wrapper function fit(), we follow the naming routine of scikit-learn. ] variable[GP_inten] assign[=] name[self].GP_inten variable[GP_space] assign[=] name[self].GP_space variable[rank] assign[=] name[self].rank variable[n_V] assign[=] call[name[np].size, parameter[name[Y]]] variable[n_T] assign[=] call[name[np].size, parameter[name[Y]]] variable[n_C] assign[=] call[name[np].size, parameter[name[X]]] <ast.Tuple object at 0x7da1b0890fd0> assign[=] call[name[self]._chol_idx, parameter[name[n_C], name[rank]]] variable[n_l] assign[=] call[name[np].size, parameter[call[name[l_idx]][constant[0]]]] variable[t_start] assign[=] call[name[time].time, parameter[]] <ast.Tuple object at 0x7da1b0891480> assign[=] call[name[self]._prepare_DF, parameter[name[n_T]]] <ast.Tuple object at 0x7da1b08922c0> assign[=] call[name[self]._prepare_data_XY, parameter[name[X], name[Y], name[D], name[F]]] <ast.Tuple object at 0x7da1b0891f60> assign[=] call[name[self]._prepare_data_XYX0, parameter[name[X], name[Y], name[X_base], constant[None], name[D], name[F], name[run_TRs]]] <ast.Tuple object at 0x7da1b0891bd0> assign[=] call[name[self]._calc_dist2_GP, parameter[]] <ast.Tuple object at 0x7da1b08918a0> assign[=] call[name[self]._build_index_param, parameter[name[n_l], name[n_V], name[n_smooth]]] variable[current_GP] assign[=] call[name[np].zeros, parameter[name[n_smooth]]] <ast.Tuple object at 0x7da1b08934c0> assign[=] call[name[self]._initial_fit_singpara, parameter[name[XTX], name[XTDX], name[XTFX], name[YTY_diag], name[YTDY_diag], name[YTFY_diag], name[XTY], name[XTDY], name[XTFY], name[X0TX0], name[X0TDX0], name[X0TFX0], name[XTX0], name[XTDX0], name[XTFX0], name[X0TY], name[X0TDY], name[X0TFY], name[X], name[Y], name[X0], name[idx_param_sing], name[l_idx], name[n_C], name[n_T], name[n_V], name[n_l], name[n_run], name[n_X0], name[rank]]] variable[current_logSNR2] assign[=] <ast.UnaryOp object at 0x7da1b0892650> variable[norm_factor] assign[=] call[name[np].mean, parameter[name[current_logSNR2]]] variable[current_logSNR2] assign[=] binary_operation[name[current_logSNR2] - name[norm_factor]] variable[X_res] assign[=] constant[None] if name[GP_space] begin[:] <ast.Tuple object at 0x7da20c6e78b0> assign[=] call[name[self]._fit_diagV_noGP, parameter[name[XTY], name[XTDY], name[XTFY], name[YTY_diag], name[YTDY_diag], name[YTFY_diag], name[XTX], name[XTDX], name[XTFX], name[X], name[Y], name[X_base], name[X_res], name[D], name[F], name[run_TRs], name[current_vec_U_chlsk_l], name[current_a1], name[current_logSNR2], name[idx_param_fitU], name[idx_param_fitV], name[l_idx], name[n_C], name[n_T], name[n_V], name[n_l], name[n_run], name[n_X0], name[rank]]] call[name[current_GP]][constant[0]] assign[=] call[name[np].log, parameter[call[name[np].min, parameter[call[name[dist2]][call[name[np].tril_indices_from, parameter[name[dist2]]]]]]]] call[name[logger].debug, parameter[call[constant[current GP[0]:{}].format, parameter[call[name[current_GP]][constant[0]]]]]] if name[GP_inten] begin[:] call[name[current_GP]][constant[1]] assign[=] call[name[np].log, parameter[call[name[np].maximum, parameter[call[name[np].percentile, parameter[call[name[inten_diff2]][call[name[np].tril_indices_from, parameter[name[inten_diff2]]]], constant[2]]], constant[0.5]]]]] call[name[logger].debug, parameter[call[constant[current GP[1]:{}].format, parameter[call[name[current_GP]][constant[1]]]]]] call[name[logger].debug, parameter[call[constant[indexing:{}].format, parameter[name[idx_param_fitV]]]]] call[name[logger].debug, parameter[call[constant[initial GP parameters:{}].format, parameter[name[current_GP]]]]] <ast.Tuple object at 0x7da1b074e140> assign[=] call[name[self]._fit_diagV_GP, parameter[name[XTY], name[XTDY], name[XTFY], name[YTY_diag], name[YTDY_diag], name[YTFY_diag], name[XTX], name[XTDX], name[XTFX], name[X], name[Y], name[X_base], name[X_res], name[D], name[F], name[run_TRs], name[current_vec_U_chlsk_l], name[current_a1], name[current_logSNR2], name[current_GP], name[n_smooth], name[idx_param_fitU], name[idx_param_fitV], name[l_idx], name[n_C], name[n_T], name[n_V], name[n_l], name[n_run], name[n_X0], name[rank], name[GP_space], name[GP_inten], name[dist2], name[inten_diff2], name[space_smooth_range], name[inten_smooth_range]]] variable[estU_chlsk_l_AR1_UV] assign[=] call[name[np].zeros, parameter[list[[<ast.Name object at 0x7da1b074f340>, <ast.Name object at 0x7da1b074c610>]]]] call[name[estU_chlsk_l_AR1_UV]][name[l_idx]] assign[=] name[current_vec_U_chlsk_l] variable[est_cov_AR1_UV] assign[=] call[name[np].dot, parameter[name[estU_chlsk_l_AR1_UV], name[estU_chlsk_l_AR1_UV].T]] variable[est_rho1_AR1_UV] assign[=] binary_operation[binary_operation[constant[2] / name[np].pi] * call[name[np].arctan, parameter[name[current_a1]]]] variable[est_SNR_AR1_UV] assign[=] call[name[np].exp, parameter[binary_operation[name[current_logSNR2] / constant[2.0]]]] <ast.Tuple object at 0x7da1b074d690> assign[=] call[name[self]._prepare_data_XYX0, parameter[name[X], name[Y], name[X_base], name[X_res], name[D], name[F], name[run_TRs]]] <ast.Tuple object at 0x7da1b074e890> assign[=] call[name[self]._precompute_ar1_quad_forms, parameter[name[XTY], name[XTDY], name[XTFY], name[YTY_diag], name[YTDY_diag], name[YTFY_diag], name[XTX], name[XTDX], name[XTFX], name[X0TX0], name[X0TDX0], name[X0TFX0], name[XTX0], name[XTDX0], name[XTFX0], name[X0TY], name[X0TDY], name[X0TFY], name[estU_chlsk_l_AR1_UV], name[est_rho1_AR1_UV], name[n_V], name[n_X0]]] <ast.Tuple object at 0x7da1b074d2a0> assign[=] call[name[self]._calc_LL, parameter[name[est_rho1_AR1_UV], name[LTXTAcorrXL], name[LTXTAcorrY], name[YTAcorrY], name[X0TAX0], binary_operation[name[est_SNR_AR1_UV] ** constant[2]], name[n_V], name[n_T], name[n_run], name[rank], name[n_X0]]] variable[est_sigma_AR1_UV] assign[=] binary_operation[name[sigma2] ** constant[0.5]] variable[est_beta_AR1_UV] assign[=] binary_operation[binary_operation[name[est_SNR_AR1_UV] ** constant[2]] * call[name[np].dot, parameter[name[estU_chlsk_l_AR1_UV], name[YTAcorrXL_LAMBDA].T]]] variable[est_beta_AR1_UV_latent] assign[=] binary_operation[binary_operation[name[est_SNR_AR1_UV] ** constant[2]] * name[YTAcorrXL_LAMBDA].T] variable[est_beta0_AR1_UV] assign[=] call[name[np].einsum, parameter[constant[ijk,ki->ji], name[X0TAX0_i], binary_operation[name[X0TAY] - call[name[np].einsum, parameter[constant[ikj,ki->ji], name[XTAX0], name[est_beta_AR1_UV]]]]]] if compare[name[idx_DC].size greater[>] constant[1]] begin[:] variable[collapsed_DC] assign[=] call[name[np].sum, parameter[call[name[X0]][tuple[[<ast.Slice object at 0x7da1b07a0e50>, <ast.Name object at 0x7da1b07a0be0>]]]]] variable[X0] assign[=] call[name[np].insert, parameter[call[name[np].delete, parameter[name[X0], name[idx_DC]]], constant[0], name[collapsed_DC]]] variable[collapsed_beta0] assign[=] call[name[np].mean, parameter[call[name[est_beta0_AR1_UV]][tuple[[<ast.Name object at 0x7da1b07a2110>, <ast.Slice object at 0x7da1b07a2170>]]]]] variable[est_beta0_AR1_UV] assign[=] call[name[np].insert, parameter[call[name[np].delete, parameter[name[est_beta0_AR1_UV], name[idx_DC]]], constant[0], name[collapsed_beta0]]] variable[t_finish] assign[=] call[name[time].time, parameter[]] call[name[logger].info, parameter[call[constant[total time of fitting: {} seconds].format, parameter[binary_operation[name[t_finish] - name[t_start]]]]]] call[name[logger].debug, parameter[call[constant[final GP parameters:{}].format, parameter[name[current_GP]]]]] if name[GP_space] begin[:] variable[est_space_smooth_r] assign[=] call[name[np].exp, parameter[binary_operation[call[name[current_GP]][constant[0]] / constant[2.0]]]] if name[GP_inten] begin[:] variable[est_intensity_kernel_r] assign[=] call[name[np].exp, parameter[binary_operation[call[name[current_GP]][constant[1]] / constant[2.0]]]] variable[K_major] assign[=] call[name[np].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b07a0b20> / constant[2.0]]]] variable[K] assign[=] binary_operation[name[K_major] + call[name[np].diag, parameter[binary_operation[call[name[np].ones, parameter[name[n_V]]] * name[self].eta]]]] variable[invK_tilde_log_SNR] assign[=] binary_operation[call[name[np].linalg.solve, parameter[name[K], name[current_logSNR2]]] / constant[2]] variable[log_SNR_invK_tilde_log_SNR] assign[=] binary_operation[call[name[np].dot, parameter[name[current_logSNR2], name[invK_tilde_log_SNR]]] / constant[2]] <ast.Tuple object at 0x7da1b07a0d60> assign[=] call[name[self].tau2_prior, parameter[name[log_SNR_invK_tilde_log_SNR], name[n_V], name[self].tau_range]] variable[est_std_log_SNR] assign[=] binary_operation[name[tau2] ** constant[0.5]] return[tuple[[<ast.Name object at 0x7da18bccbb50>, <ast.Name object at 0x7da18bcca830>, <ast.Name object at 0x7da18bcc91e0>, <ast.Name object at 0x7da18bcca560>, <ast.Name object at 0x7da18bcc8d00>, <ast.Name object at 0x7da18bcca500>, <ast.Name object at 0x7da18bcc9f30>, <ast.Name object at 0x7da18bcc95d0>, <ast.Name object at 0x7da18bcc9270>, <ast.Name object at 0x7da18bcca4a0>, <ast.Name object at 0x7da18bcca5c0>, <ast.Name object at 0x7da18bcca5f0>]]]
keyword[def] identifier[_fit_RSA_UV] ( identifier[self] , identifier[X] , identifier[Y] , identifier[X_base] , identifier[scan_onsets] = keyword[None] , identifier[coords] = keyword[None] , identifier[inten] = keyword[None] ): literal[string] identifier[GP_inten] = identifier[self] . identifier[GP_inten] identifier[GP_space] = identifier[self] . identifier[GP_space] identifier[rank] = identifier[self] . identifier[rank] identifier[n_V] = identifier[np] . identifier[size] ( identifier[Y] , identifier[axis] = literal[int] ) identifier[n_T] = identifier[np] . identifier[size] ( identifier[Y] , identifier[axis] = literal[int] ) identifier[n_C] = identifier[np] . identifier[size] ( identifier[X] , identifier[axis] = literal[int] ) identifier[l_idx] , identifier[rank] = identifier[self] . identifier[_chol_idx] ( identifier[n_C] , identifier[rank] ) identifier[n_l] = identifier[np] . identifier[size] ( identifier[l_idx] [ literal[int] ]) identifier[t_start] = identifier[time] . identifier[time] () identifier[D] , identifier[F] , identifier[run_TRs] , identifier[n_run] = identifier[self] . identifier[_prepare_DF] ( identifier[n_T] , identifier[scan_onsets] = identifier[scan_onsets] ) identifier[XTY] , identifier[XTDY] , identifier[XTFY] , identifier[YTY_diag] , identifier[YTDY_diag] , identifier[YTFY_diag] , identifier[XTX] , identifier[XTDX] , identifier[XTFX] = identifier[self] . identifier[_prepare_data_XY] ( identifier[X] , identifier[Y] , identifier[D] , identifier[F] ) identifier[X0TX0] , identifier[X0TDX0] , identifier[X0TFX0] , identifier[XTX0] , identifier[XTDX0] , identifier[XTFX0] , identifier[X0TY] , identifier[X0TDY] , identifier[X0TFY] , identifier[X0] , identifier[X_base] , identifier[n_X0] , identifier[idx_DC] = identifier[self] . identifier[_prepare_data_XYX0] ( identifier[X] , identifier[Y] , identifier[X_base] , keyword[None] , identifier[D] , identifier[F] , identifier[run_TRs] , identifier[no_DC] = keyword[False] ) identifier[dist2] , identifier[inten_diff2] , identifier[space_smooth_range] , identifier[inten_smooth_range] , identifier[n_smooth] = identifier[self] . identifier[_calc_dist2_GP] ( identifier[coords] = identifier[coords] , identifier[inten] = identifier[inten] , identifier[GP_space] = identifier[GP_space] , identifier[GP_inten] = identifier[GP_inten] ) identifier[idx_param_sing] , identifier[idx_param_fitU] , identifier[idx_param_fitV] = identifier[self] . identifier[_build_index_param] ( identifier[n_l] , identifier[n_V] , identifier[n_smooth] ) identifier[current_GP] = identifier[np] . identifier[zeros] ( identifier[n_smooth] ) identifier[current_vec_U_chlsk_l] , identifier[current_a1] , identifier[current_logSigma2] = identifier[self] . identifier[_initial_fit_singpara] ( identifier[XTX] , identifier[XTDX] , identifier[XTFX] , identifier[YTY_diag] , identifier[YTDY_diag] , identifier[YTFY_diag] , identifier[XTY] , identifier[XTDY] , identifier[XTFY] , identifier[X0TX0] , identifier[X0TDX0] , identifier[X0TFX0] , identifier[XTX0] , identifier[XTDX0] , identifier[XTFX0] , identifier[X0TY] , identifier[X0TDY] , identifier[X0TFY] , identifier[X] , identifier[Y] , identifier[X0] , identifier[idx_param_sing] , identifier[l_idx] , identifier[n_C] , identifier[n_T] , identifier[n_V] , identifier[n_l] , identifier[n_run] , identifier[n_X0] , identifier[rank] ) identifier[current_logSNR2] =- identifier[current_logSigma2] identifier[norm_factor] = identifier[np] . identifier[mean] ( identifier[current_logSNR2] ) identifier[current_logSNR2] = identifier[current_logSNR2] - identifier[norm_factor] identifier[X_res] = keyword[None] keyword[if] identifier[GP_space] : identifier[current_vec_U_chlsk_l] , identifier[current_a1] , identifier[current_logSNR2] , identifier[X_res] = identifier[self] . identifier[_fit_diagV_noGP] ( identifier[XTY] , identifier[XTDY] , identifier[XTFY] , identifier[YTY_diag] , identifier[YTDY_diag] , identifier[YTFY_diag] , identifier[XTX] , identifier[XTDX] , identifier[XTFX] , identifier[X] , identifier[Y] , identifier[X_base] , identifier[X_res] , identifier[D] , identifier[F] , identifier[run_TRs] , identifier[current_vec_U_chlsk_l] , identifier[current_a1] , identifier[current_logSNR2] , identifier[idx_param_fitU] , identifier[idx_param_fitV] , identifier[l_idx] , identifier[n_C] , identifier[n_T] , identifier[n_V] , identifier[n_l] , identifier[n_run] , identifier[n_X0] , identifier[rank] ) identifier[current_GP] [ literal[int] ]= identifier[np] . identifier[log] ( identifier[np] . identifier[min] ( identifier[dist2] [ identifier[np] . identifier[tril_indices_from] ( identifier[dist2] , identifier[k] =- literal[int] )])) identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[current_GP] [ literal[int] ])) keyword[if] identifier[GP_inten] : identifier[current_GP] [ literal[int] ]= identifier[np] . identifier[log] ( identifier[np] . identifier[maximum] ( identifier[np] . identifier[percentile] ( identifier[inten_diff2] [ identifier[np] . identifier[tril_indices_from] ( identifier[inten_diff2] , identifier[k] =- literal[int] )], literal[int] ), literal[int] )) identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[current_GP] [ literal[int] ])) identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[idx_param_fitV] )) identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[current_GP] )) identifier[current_vec_U_chlsk_l] , identifier[current_a1] , identifier[current_logSNR2] , identifier[current_GP] , identifier[X_res] = identifier[self] . identifier[_fit_diagV_GP] ( identifier[XTY] , identifier[XTDY] , identifier[XTFY] , identifier[YTY_diag] , identifier[YTDY_diag] , identifier[YTFY_diag] , identifier[XTX] , identifier[XTDX] , identifier[XTFX] , identifier[X] , identifier[Y] , identifier[X_base] , identifier[X_res] , identifier[D] , identifier[F] , identifier[run_TRs] , identifier[current_vec_U_chlsk_l] , identifier[current_a1] , identifier[current_logSNR2] , identifier[current_GP] , identifier[n_smooth] , identifier[idx_param_fitU] , identifier[idx_param_fitV] , identifier[l_idx] , identifier[n_C] , identifier[n_T] , identifier[n_V] , identifier[n_l] , identifier[n_run] , identifier[n_X0] , identifier[rank] , identifier[GP_space] , identifier[GP_inten] , identifier[dist2] , identifier[inten_diff2] , identifier[space_smooth_range] , identifier[inten_smooth_range] ) identifier[estU_chlsk_l_AR1_UV] = identifier[np] . identifier[zeros] ([ identifier[n_C] , identifier[rank] ]) identifier[estU_chlsk_l_AR1_UV] [ identifier[l_idx] ]= identifier[current_vec_U_chlsk_l] identifier[est_cov_AR1_UV] = identifier[np] . identifier[dot] ( identifier[estU_chlsk_l_AR1_UV] , identifier[estU_chlsk_l_AR1_UV] . identifier[T] ) identifier[est_rho1_AR1_UV] = literal[int] / identifier[np] . identifier[pi] * identifier[np] . identifier[arctan] ( identifier[current_a1] ) identifier[est_SNR_AR1_UV] = identifier[np] . identifier[exp] ( identifier[current_logSNR2] / literal[int] ) identifier[X0TX0] , identifier[X0TDX0] , identifier[X0TFX0] , identifier[XTX0] , identifier[XTDX0] , identifier[XTFX0] , identifier[X0TY] , identifier[X0TDY] , identifier[X0TFY] , identifier[X0] , identifier[X_base] , identifier[n_X0] , identifier[_] = identifier[self] . identifier[_prepare_data_XYX0] ( identifier[X] , identifier[Y] , identifier[X_base] , identifier[X_res] , identifier[D] , identifier[F] , identifier[run_TRs] , identifier[no_DC] = keyword[True] ) identifier[X0TAX0] , identifier[XTAX0] , identifier[X0TAY] , identifier[X0TAX0_i] , identifier[XTAcorrX] , identifier[XTAcorrY] , identifier[YTAcorrY] , identifier[LTXTAcorrY] , identifier[XTAcorrXL] , identifier[LTXTAcorrXL] = identifier[self] . identifier[_precompute_ar1_quad_forms] ( identifier[XTY] , identifier[XTDY] , identifier[XTFY] , identifier[YTY_diag] , identifier[YTDY_diag] , identifier[YTFY_diag] , identifier[XTX] , identifier[XTDX] , identifier[XTFX] , identifier[X0TX0] , identifier[X0TDX0] , identifier[X0TFX0] , identifier[XTX0] , identifier[XTDX0] , identifier[XTFX0] , identifier[X0TY] , identifier[X0TDY] , identifier[X0TFY] , identifier[estU_chlsk_l_AR1_UV] , identifier[est_rho1_AR1_UV] , identifier[n_V] , identifier[n_X0] ) identifier[LL] , identifier[LAMBDA_i] , identifier[LAMBDA] , identifier[YTAcorrXL_LAMBDA] , identifier[sigma2] = identifier[self] . identifier[_calc_LL] ( identifier[est_rho1_AR1_UV] , identifier[LTXTAcorrXL] , identifier[LTXTAcorrY] , identifier[YTAcorrY] , identifier[X0TAX0] , identifier[est_SNR_AR1_UV] ** literal[int] , identifier[n_V] , identifier[n_T] , identifier[n_run] , identifier[rank] , identifier[n_X0] ) identifier[est_sigma_AR1_UV] = identifier[sigma2] ** literal[int] identifier[est_beta_AR1_UV] = identifier[est_SNR_AR1_UV] ** literal[int] * identifier[np] . identifier[dot] ( identifier[estU_chlsk_l_AR1_UV] , identifier[YTAcorrXL_LAMBDA] . identifier[T] ) identifier[est_beta_AR1_UV_latent] = identifier[est_SNR_AR1_UV] ** literal[int] * identifier[YTAcorrXL_LAMBDA] . identifier[T] identifier[est_beta0_AR1_UV] = identifier[np] . identifier[einsum] ( literal[string] , identifier[X0TAX0_i] , ( identifier[X0TAY] - identifier[np] . identifier[einsum] ( literal[string] , identifier[XTAX0] , identifier[est_beta_AR1_UV] ))) keyword[if] identifier[idx_DC] . identifier[size] > literal[int] : identifier[collapsed_DC] = identifier[np] . identifier[sum] ( identifier[X0] [:, identifier[idx_DC] ], identifier[axis] = literal[int] ) identifier[X0] = identifier[np] . identifier[insert] ( identifier[np] . identifier[delete] ( identifier[X0] , identifier[idx_DC] , identifier[axis] = literal[int] ), literal[int] , identifier[collapsed_DC] , identifier[axis] = literal[int] ) identifier[collapsed_beta0] = identifier[np] . identifier[mean] ( identifier[est_beta0_AR1_UV] [ identifier[idx_DC] ,:], identifier[axis] = literal[int] ) identifier[est_beta0_AR1_UV] = identifier[np] . identifier[insert] ( identifier[np] . identifier[delete] ( identifier[est_beta0_AR1_UV] , identifier[idx_DC] , identifier[axis] = literal[int] ), literal[int] , identifier[collapsed_beta0] , identifier[axis] = literal[int] ) identifier[t_finish] = identifier[time] . identifier[time] () identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[t_finish] - identifier[t_start] )) identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[current_GP] )) keyword[if] identifier[GP_space] : identifier[est_space_smooth_r] = identifier[np] . identifier[exp] ( identifier[current_GP] [ literal[int] ]/ literal[int] ) keyword[if] identifier[GP_inten] : identifier[est_intensity_kernel_r] = identifier[np] . identifier[exp] ( identifier[current_GP] [ literal[int] ]/ literal[int] ) identifier[K_major] = identifier[np] . identifier[exp] (-( identifier[dist2] / identifier[est_space_smooth_r] ** literal[int] + identifier[inten_diff2] / identifier[est_intensity_kernel_r] ** literal[int] ) / literal[int] ) keyword[else] : identifier[est_intensity_kernel_r] = keyword[None] identifier[K_major] = identifier[np] . identifier[exp] (- identifier[dist2] / identifier[est_space_smooth_r] ** literal[int] / literal[int] ) identifier[K] = identifier[K_major] + identifier[np] . identifier[diag] ( identifier[np] . identifier[ones] ( identifier[n_V] )* identifier[self] . identifier[eta] ) identifier[invK_tilde_log_SNR] = identifier[np] . identifier[linalg] . identifier[solve] ( identifier[K] , identifier[current_logSNR2] )/ literal[int] identifier[log_SNR_invK_tilde_log_SNR] = identifier[np] . identifier[dot] ( identifier[current_logSNR2] , identifier[invK_tilde_log_SNR] )/ literal[int] identifier[tau2] , identifier[_] = identifier[self] . identifier[tau2_prior] ( identifier[log_SNR_invK_tilde_log_SNR] , identifier[n_V] , identifier[self] . identifier[tau_range] ) identifier[est_std_log_SNR] = identifier[tau2] ** literal[int] keyword[else] : identifier[est_space_smooth_r] = keyword[None] identifier[est_intensity_kernel_r] = keyword[None] identifier[est_std_log_SNR] = keyword[None] keyword[return] identifier[est_cov_AR1_UV] , identifier[estU_chlsk_l_AR1_UV] , identifier[est_SNR_AR1_UV] , identifier[est_beta_AR1_UV] , identifier[est_beta0_AR1_UV] , identifier[est_beta_AR1_UV_latent] , identifier[est_sigma_AR1_UV] , identifier[est_rho1_AR1_UV] , identifier[est_space_smooth_r] , identifier[est_std_log_SNR] , identifier[est_intensity_kernel_r] , identifier[X0]
def _fit_RSA_UV(self, X, Y, X_base, scan_onsets=None, coords=None, inten=None): """ The major utility of fitting Bayesian RSA. Note that there is a naming change of variable. X in fit() is changed to Y here, and design in fit() is changed to X here. This is because we follow the tradition that X expresses the variable defined (controlled) by the experimenter, i.e., the time course of experimental conditions convolved by an HRF, and Y expresses data. However, in wrapper function fit(), we follow the naming routine of scikit-learn. """ GP_inten = self.GP_inten GP_space = self.GP_space rank = self.rank n_V = np.size(Y, axis=1) n_T = np.size(Y, axis=0) n_C = np.size(X, axis=1) (l_idx, rank) = self._chol_idx(n_C, rank) n_l = np.size(l_idx[0]) # the number of parameters for L t_start = time.time() (D, F, run_TRs, n_run) = self._prepare_DF(n_T, scan_onsets=scan_onsets) (XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag, XTX, XTDX, XTFX) = self._prepare_data_XY(X, Y, D, F) (X0TX0, X0TDX0, X0TFX0, XTX0, XTDX0, XTFX0, X0TY, X0TDY, X0TFY, X0, X_base, n_X0, idx_DC) = self._prepare_data_XYX0(X, Y, X_base, None, D, F, run_TRs, no_DC=False) # Prepare the data for fitting. These pre-calculated matrices # will be re-used a lot in evaluating likelihood function and # gradient. # DC component will be added to the nuisance regressors. # In later steps, we do not need to add DC components again (dist2, inten_diff2, space_smooth_range, inten_smooth_range, n_smooth) = self._calc_dist2_GP(coords=coords, inten=inten, GP_space=GP_space, GP_inten=GP_inten) # Calculating the distance between voxel locations and betweeen # voxel intensities. These are used if a Gaussian Process prior # is requested to regularize log(SNR^2) (idx_param_sing, idx_param_fitU, idx_param_fitV) = self._build_index_param(n_l, n_V, n_smooth) # Indexes to find each parameter in a combined parameter vector. current_GP = np.zeros(n_smooth) # We will perform the fitting in 2~3 steps: # (1) A preliminary fitting assuming all voxels share # exactly the same temporal covariance matrix for their noise. # SNR is assumed to be 1 for all voxels in this fitting. # Therefore, there are only n_l+2 free parameters. # (2) (optional) A fitting which allows each voxel to have their # own pseudo-SNR and AR(1) coefficients. But no Gaussian Process # prior is imposed on log(SNR). This step is neglected if GP # prior is not requested. This step allows the SNR parameters to # move closer to their correct values before GP is introduced. # This step alternately fits the shared covariance and voxel- # specific variance. It fits for init_iter steps and the # tolerance is also increased by a factor of 5 to speed up # fitting. # (3) Final fitting. If GP prior is requested, it will be # introduced in this step. Otherwise, just fit as the previous # step, but using un-altered tolerance setting, and n_iter # as the number of iteration. # Step 1 fitting, with a simplified model (current_vec_U_chlsk_l, current_a1, current_logSigma2) = self._initial_fit_singpara(XTX, XTDX, XTFX, YTY_diag, YTDY_diag, YTFY_diag, XTY, XTDY, XTFY, X0TX0, X0TDX0, X0TFX0, XTX0, XTDX0, XTFX0, X0TY, X0TDY, X0TFY, X, Y, X0, idx_param_sing, l_idx, n_C, n_T, n_V, n_l, n_run, n_X0, rank) current_logSNR2 = -current_logSigma2 norm_factor = np.mean(current_logSNR2) current_logSNR2 = current_logSNR2 - norm_factor X_res = None # Step 2 fitting, which only happens if # GP prior is requested if GP_space: (current_vec_U_chlsk_l, current_a1, current_logSNR2, X_res) = self._fit_diagV_noGP(XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag, XTX, XTDX, XTFX, X, Y, X_base, X_res, D, F, run_TRs, current_vec_U_chlsk_l, current_a1, current_logSNR2, idx_param_fitU, idx_param_fitV, l_idx, n_C, n_T, n_V, n_l, n_run, n_X0, rank) current_GP[0] = np.log(np.min(dist2[np.tril_indices_from(dist2, k=-1)])) # We start fitting the model with GP prior with a small # length scale: the size of voxels. # Alternatively, initialize with a large distance. # Further testing of initial parameters need to be done. # current_GP[0] = np.log(np.max(dist2)/4.0) logger.debug('current GP[0]:{}'.format(current_GP[0])) if GP_inten: current_GP[1] = np.log(np.maximum(np.percentile(inten_diff2[np.tril_indices_from(inten_diff2, k=-1)], 2), 0.5)) logger.debug('current GP[1]:{}'.format(current_GP[1])) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # We start the length scale for intensity with # a small value. A heuristic is 2 percentile of # all the square differences. But it should not be # smaller than 0.5. This limit is set in case # many voxels have close to equal intensities, # which might render 2 percentile to 0. # Step 3 fitting. GP prior is imposed if requested. # In this step, unless auto_nuisance is set to False, X_res # will be re-estimated from the residuals after each step # of fitting. And X0 will be concatenation of X_base and X_res logger.debug('indexing:{}'.format(idx_param_fitV)) logger.debug('initial GP parameters:{}'.format(current_GP)) (current_vec_U_chlsk_l, current_a1, current_logSNR2, current_GP, X_res) = self._fit_diagV_GP(XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag, XTX, XTDX, XTFX, X, Y, X_base, X_res, D, F, run_TRs, current_vec_U_chlsk_l, current_a1, current_logSNR2, current_GP, n_smooth, idx_param_fitU, idx_param_fitV, l_idx, n_C, n_T, n_V, n_l, n_run, n_X0, rank, GP_space, GP_inten, dist2, inten_diff2, space_smooth_range, inten_smooth_range) estU_chlsk_l_AR1_UV = np.zeros([n_C, rank]) estU_chlsk_l_AR1_UV[l_idx] = current_vec_U_chlsk_l est_cov_AR1_UV = np.dot(estU_chlsk_l_AR1_UV, estU_chlsk_l_AR1_UV.T) est_rho1_AR1_UV = 2 / np.pi * np.arctan(current_a1) est_SNR_AR1_UV = np.exp(current_logSNR2 / 2.0) # Calculating est_sigma_AR1_UV, est_sigma_AR1_UV, # est_beta_AR1_UV and est_beta0_AR1_UV (X0TX0, X0TDX0, X0TFX0, XTX0, XTDX0, XTFX0, X0TY, X0TDY, X0TFY, X0, X_base, n_X0, _) = self._prepare_data_XYX0(X, Y, X_base, X_res, D, F, run_TRs, no_DC=True) (X0TAX0, XTAX0, X0TAY, X0TAX0_i, XTAcorrX, XTAcorrY, YTAcorrY, LTXTAcorrY, XTAcorrXL, LTXTAcorrXL) = self._precompute_ar1_quad_forms(XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag, XTX, XTDX, XTFX, X0TX0, X0TDX0, X0TFX0, XTX0, XTDX0, XTFX0, X0TY, X0TDY, X0TFY, estU_chlsk_l_AR1_UV, est_rho1_AR1_UV, n_V, n_X0) (LL, LAMBDA_i, LAMBDA, YTAcorrXL_LAMBDA, sigma2) = self._calc_LL(est_rho1_AR1_UV, LTXTAcorrXL, LTXTAcorrY, YTAcorrY, X0TAX0, est_SNR_AR1_UV ** 2, n_V, n_T, n_run, rank, n_X0) est_sigma_AR1_UV = sigma2 ** 0.5 est_beta_AR1_UV = est_SNR_AR1_UV ** 2 * np.dot(estU_chlsk_l_AR1_UV, YTAcorrXL_LAMBDA.T) est_beta_AR1_UV_latent = est_SNR_AR1_UV ** 2 * YTAcorrXL_LAMBDA.T # the latent term means that X*L multiplied by this term # is the same as X*beta. This will be used for decoding # and cross-validating, in case L is low-rank est_beta0_AR1_UV = np.einsum('ijk,ki->ji', X0TAX0_i, X0TAY - np.einsum('ikj,ki->ji', XTAX0, est_beta_AR1_UV)) # Now we want to collapse all beta0 corresponding to DC components # of different runs to a single map, and preserve only one DC component # across runs. This is because they should express the same component # and the new data to transform do not necessarily have the same # numbers of runs as the training data. if idx_DC.size > 1: collapsed_DC = np.sum(X0[:, idx_DC], axis=1) X0 = np.insert(np.delete(X0, idx_DC, axis=1), 0, collapsed_DC, axis=1) collapsed_beta0 = np.mean(est_beta0_AR1_UV[idx_DC, :], axis=0) est_beta0_AR1_UV = np.insert(np.delete(est_beta0_AR1_UV, idx_DC, axis=0), 0, collapsed_beta0, axis=0) # depends on [control=['if'], data=[]] t_finish = time.time() logger.info('total time of fitting: {} seconds'.format(t_finish - t_start)) logger.debug('final GP parameters:{}'.format(current_GP)) if GP_space: est_space_smooth_r = np.exp(current_GP[0] / 2.0) if GP_inten: est_intensity_kernel_r = np.exp(current_GP[1] / 2.0) K_major = np.exp(-(dist2 / est_space_smooth_r ** 2 + inten_diff2 / est_intensity_kernel_r ** 2) / 2.0) # depends on [control=['if'], data=[]] else: est_intensity_kernel_r = None K_major = np.exp(-dist2 / est_space_smooth_r ** 2 / 2.0) K = K_major + np.diag(np.ones(n_V) * self.eta) invK_tilde_log_SNR = np.linalg.solve(K, current_logSNR2) / 2 log_SNR_invK_tilde_log_SNR = np.dot(current_logSNR2, invK_tilde_log_SNR) / 2 (tau2, _) = self.tau2_prior(log_SNR_invK_tilde_log_SNR, n_V, self.tau_range) est_std_log_SNR = tau2 ** 0.5 # depends on [control=['if'], data=[]] else: est_space_smooth_r = None est_intensity_kernel_r = None est_std_log_SNR = None return (est_cov_AR1_UV, estU_chlsk_l_AR1_UV, est_SNR_AR1_UV, est_beta_AR1_UV, est_beta0_AR1_UV, est_beta_AR1_UV_latent, est_sigma_AR1_UV, est_rho1_AR1_UV, est_space_smooth_r, est_std_log_SNR, est_intensity_kernel_r, X0)
def submit_reading(basename, pmid_list_filename, readers, start_ix=None, end_ix=None, pmids_per_job=3000, num_tries=2, force_read=False, force_fulltext=False, project_name=None): """Submit an old-style pmid-centered no-database s3 only reading job. This function is provided for the sake of backward compatibility. It is preferred that you use the object-oriented PmidSubmitter and the submit_reading job going forward. """ sub = PmidSubmitter(basename, readers, project_name) sub.set_options(force_read, force_fulltext) sub.submit_reading(pmid_list_filename, start_ix, end_ix, pmids_per_job, num_tries) return sub.job_list
def function[submit_reading, parameter[basename, pmid_list_filename, readers, start_ix, end_ix, pmids_per_job, num_tries, force_read, force_fulltext, project_name]]: constant[Submit an old-style pmid-centered no-database s3 only reading job. This function is provided for the sake of backward compatibility. It is preferred that you use the object-oriented PmidSubmitter and the submit_reading job going forward. ] variable[sub] assign[=] call[name[PmidSubmitter], parameter[name[basename], name[readers], name[project_name]]] call[name[sub].set_options, parameter[name[force_read], name[force_fulltext]]] call[name[sub].submit_reading, parameter[name[pmid_list_filename], name[start_ix], name[end_ix], name[pmids_per_job], name[num_tries]]] return[name[sub].job_list]
keyword[def] identifier[submit_reading] ( identifier[basename] , identifier[pmid_list_filename] , identifier[readers] , identifier[start_ix] = keyword[None] , identifier[end_ix] = keyword[None] , identifier[pmids_per_job] = literal[int] , identifier[num_tries] = literal[int] , identifier[force_read] = keyword[False] , identifier[force_fulltext] = keyword[False] , identifier[project_name] = keyword[None] ): literal[string] identifier[sub] = identifier[PmidSubmitter] ( identifier[basename] , identifier[readers] , identifier[project_name] ) identifier[sub] . identifier[set_options] ( identifier[force_read] , identifier[force_fulltext] ) identifier[sub] . identifier[submit_reading] ( identifier[pmid_list_filename] , identifier[start_ix] , identifier[end_ix] , identifier[pmids_per_job] , identifier[num_tries] ) keyword[return] identifier[sub] . identifier[job_list]
def submit_reading(basename, pmid_list_filename, readers, start_ix=None, end_ix=None, pmids_per_job=3000, num_tries=2, force_read=False, force_fulltext=False, project_name=None): """Submit an old-style pmid-centered no-database s3 only reading job. This function is provided for the sake of backward compatibility. It is preferred that you use the object-oriented PmidSubmitter and the submit_reading job going forward. """ sub = PmidSubmitter(basename, readers, project_name) sub.set_options(force_read, force_fulltext) sub.submit_reading(pmid_list_filename, start_ix, end_ix, pmids_per_job, num_tries) return sub.job_list
def chunks(sliceable, n): """ returns a list of lists of any slice-able object each of max lentgh n :Parameters: -sliceable: (string|list|tuple) any sliceable object - n max elements of ech chunk :Example: >>> chunksn([1,2,3,4,5,6,7,8,9,'x'], 4) [[1, 2, 3, 4], [5, 6, 7, 8], [9, 'x']] >>> chunksn('123456789X', 3) ['123', '456', '789', 'X'] """ return [sliceable[i:i+n] for i in range(0, len(sliceable), n)]
def function[chunks, parameter[sliceable, n]]: constant[ returns a list of lists of any slice-able object each of max lentgh n :Parameters: -sliceable: (string|list|tuple) any sliceable object - n max elements of ech chunk :Example: >>> chunksn([1,2,3,4,5,6,7,8,9,'x'], 4) [[1, 2, 3, 4], [5, 6, 7, 8], [9, 'x']] >>> chunksn('123456789X', 3) ['123', '456', '789', 'X'] ] return[<ast.ListComp object at 0x7da20cabe2c0>]
keyword[def] identifier[chunks] ( identifier[sliceable] , identifier[n] ): literal[string] keyword[return] [ identifier[sliceable] [ identifier[i] : identifier[i] + identifier[n] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[sliceable] ), identifier[n] )]
def chunks(sliceable, n): """ returns a list of lists of any slice-able object each of max lentgh n :Parameters: -sliceable: (string|list|tuple) any sliceable object - n max elements of ech chunk :Example: >>> chunksn([1,2,3,4,5,6,7,8,9,'x'], 4) [[1, 2, 3, 4], [5, 6, 7, 8], [9, 'x']] >>> chunksn('123456789X', 3) ['123', '456', '789', 'X'] """ return [sliceable[i:i + n] for i in range(0, len(sliceable), n)]
def rmon_alarm_entry_alarm_falling_threshold(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") rmon = ET.SubElement(config, "rmon", xmlns="urn:brocade.com:mgmt:brocade-rmon") alarm_entry = ET.SubElement(rmon, "alarm-entry") alarm_index_key = ET.SubElement(alarm_entry, "alarm-index") alarm_index_key.text = kwargs.pop('alarm_index') alarm_falling_threshold = ET.SubElement(alarm_entry, "alarm-falling-threshold") alarm_falling_threshold.text = kwargs.pop('alarm_falling_threshold') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[rmon_alarm_entry_alarm_falling_threshold, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[rmon] assign[=] call[name[ET].SubElement, parameter[name[config], constant[rmon]]] variable[alarm_entry] assign[=] call[name[ET].SubElement, parameter[name[rmon], constant[alarm-entry]]] variable[alarm_index_key] assign[=] call[name[ET].SubElement, parameter[name[alarm_entry], constant[alarm-index]]] name[alarm_index_key].text assign[=] call[name[kwargs].pop, parameter[constant[alarm_index]]] variable[alarm_falling_threshold] assign[=] call[name[ET].SubElement, parameter[name[alarm_entry], constant[alarm-falling-threshold]]] name[alarm_falling_threshold].text assign[=] call[name[kwargs].pop, parameter[constant[alarm_falling_threshold]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[rmon_alarm_entry_alarm_falling_threshold] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[rmon] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[alarm_entry] = identifier[ET] . identifier[SubElement] ( identifier[rmon] , literal[string] ) identifier[alarm_index_key] = identifier[ET] . identifier[SubElement] ( identifier[alarm_entry] , literal[string] ) identifier[alarm_index_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[alarm_falling_threshold] = identifier[ET] . identifier[SubElement] ( identifier[alarm_entry] , literal[string] ) identifier[alarm_falling_threshold] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def rmon_alarm_entry_alarm_falling_threshold(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') rmon = ET.SubElement(config, 'rmon', xmlns='urn:brocade.com:mgmt:brocade-rmon') alarm_entry = ET.SubElement(rmon, 'alarm-entry') alarm_index_key = ET.SubElement(alarm_entry, 'alarm-index') alarm_index_key.text = kwargs.pop('alarm_index') alarm_falling_threshold = ET.SubElement(alarm_entry, 'alarm-falling-threshold') alarm_falling_threshold.text = kwargs.pop('alarm_falling_threshold') callback = kwargs.pop('callback', self._callback) return callback(config)
def modified(self): 'return datetime.datetime' return dateutil.parser.parse(str(self.f.currentRevision.modified))
def function[modified, parameter[self]]: constant[return datetime.datetime] return[call[name[dateutil].parser.parse, parameter[call[name[str], parameter[name[self].f.currentRevision.modified]]]]]
keyword[def] identifier[modified] ( identifier[self] ): literal[string] keyword[return] identifier[dateutil] . identifier[parser] . identifier[parse] ( identifier[str] ( identifier[self] . identifier[f] . identifier[currentRevision] . identifier[modified] ))
def modified(self): """return datetime.datetime""" return dateutil.parser.parse(str(self.f.currentRevision.modified))
def assign(self, expr): """Give *expr* a name.""" name = self.variable() self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr)) return ast.Name(name, ast.Load())
def function[assign, parameter[self, expr]]: constant[Give *expr* a name.] variable[name] assign[=] call[name[self].variable, parameter[]] call[name[self].statements.append, parameter[call[name[ast].Assign, parameter[list[[<ast.Call object at 0x7da1b1649390>]], name[expr]]]]] return[call[name[ast].Name, parameter[name[name], call[name[ast].Load, parameter[]]]]]
keyword[def] identifier[assign] ( identifier[self] , identifier[expr] ): literal[string] identifier[name] = identifier[self] . identifier[variable] () identifier[self] . identifier[statements] . identifier[append] ( identifier[ast] . identifier[Assign] ([ identifier[ast] . identifier[Name] ( identifier[name] , identifier[ast] . identifier[Store] ())], identifier[expr] )) keyword[return] identifier[ast] . identifier[Name] ( identifier[name] , identifier[ast] . identifier[Load] ())
def assign(self, expr): """Give *expr* a name.""" name = self.variable() self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr)) return ast.Name(name, ast.Load())