code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def write(cls, output_file, samples, labels, delimiter=None): """ Writes a text file with samples. Parameters ----------- output_file : str The path of the file to write. samples : FieldArray Samples to write to file. labels : list A list of strings to include as header in TXT file. delimiter : str Delimiter to use in TXT file. """ delimiter = delimiter if delimiter is not None else cls.delimiter header = delimiter.join(labels) numpy.savetxt(output_file, samples, comments=cls.comments, header=header, delimiter=delimiter)
def function[write, parameter[cls, output_file, samples, labels, delimiter]]: constant[ Writes a text file with samples. Parameters ----------- output_file : str The path of the file to write. samples : FieldArray Samples to write to file. labels : list A list of strings to include as header in TXT file. delimiter : str Delimiter to use in TXT file. ] variable[delimiter] assign[=] <ast.IfExp object at 0x7da2041d86d0> variable[header] assign[=] call[name[delimiter].join, parameter[name[labels]]] call[name[numpy].savetxt, parameter[name[output_file], name[samples]]]
keyword[def] identifier[write] ( identifier[cls] , identifier[output_file] , identifier[samples] , identifier[labels] , identifier[delimiter] = keyword[None] ): literal[string] identifier[delimiter] = identifier[delimiter] keyword[if] identifier[delimiter] keyword[is] keyword[not] keyword[None] keyword[else] identifier[cls] . identifier[delimiter] identifier[header] = identifier[delimiter] . identifier[join] ( identifier[labels] ) identifier[numpy] . identifier[savetxt] ( identifier[output_file] , identifier[samples] , identifier[comments] = identifier[cls] . identifier[comments] , identifier[header] = identifier[header] , identifier[delimiter] = identifier[delimiter] )
def write(cls, output_file, samples, labels, delimiter=None): """ Writes a text file with samples. Parameters ----------- output_file : str The path of the file to write. samples : FieldArray Samples to write to file. labels : list A list of strings to include as header in TXT file. delimiter : str Delimiter to use in TXT file. """ delimiter = delimiter if delimiter is not None else cls.delimiter header = delimiter.join(labels) numpy.savetxt(output_file, samples, comments=cls.comments, header=header, delimiter=delimiter)
def safe_makedirs(fdir): """ Make an arbitrary directory. This is safe to call for Python 2 users. :param fdir: Directory path to make. :return: """ if os.path.isdir(fdir): pass # print 'dir already exists: %s' % str(dir) else: try: os.makedirs(fdir) except WindowsError as e: if 'Cannot create a file when that file already exists' in e: log.debug('relevant dir already exists') else: raise WindowsError(e) return True
def function[safe_makedirs, parameter[fdir]]: constant[ Make an arbitrary directory. This is safe to call for Python 2 users. :param fdir: Directory path to make. :return: ] if call[name[os].path.isdir, parameter[name[fdir]]] begin[:] pass return[constant[True]]
keyword[def] identifier[safe_makedirs] ( identifier[fdir] ): literal[string] keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[fdir] ): keyword[pass] keyword[else] : keyword[try] : identifier[os] . identifier[makedirs] ( identifier[fdir] ) keyword[except] identifier[WindowsError] keyword[as] identifier[e] : keyword[if] literal[string] keyword[in] identifier[e] : identifier[log] . identifier[debug] ( literal[string] ) keyword[else] : keyword[raise] identifier[WindowsError] ( identifier[e] ) keyword[return] keyword[True]
def safe_makedirs(fdir): """ Make an arbitrary directory. This is safe to call for Python 2 users. :param fdir: Directory path to make. :return: """ if os.path.isdir(fdir): pass # depends on [control=['if'], data=[]] else: # print 'dir already exists: %s' % str(dir) try: os.makedirs(fdir) # depends on [control=['try'], data=[]] except WindowsError as e: if 'Cannot create a file when that file already exists' in e: log.debug('relevant dir already exists') # depends on [control=['if'], data=[]] else: raise WindowsError(e) # depends on [control=['except'], data=['e']] return True
def _set_response_headers(self, response): """Applies all configured headers to the given response.""" options = self._get_local_options() self._set_feature_headers(response.headers, options) self._set_frame_options_headers(response.headers, options) self._set_content_security_policy_headers(response.headers, options) self._set_hsts_headers(response.headers) self._set_referrer_policy_headers(response.headers) return response
def function[_set_response_headers, parameter[self, response]]: constant[Applies all configured headers to the given response.] variable[options] assign[=] call[name[self]._get_local_options, parameter[]] call[name[self]._set_feature_headers, parameter[name[response].headers, name[options]]] call[name[self]._set_frame_options_headers, parameter[name[response].headers, name[options]]] call[name[self]._set_content_security_policy_headers, parameter[name[response].headers, name[options]]] call[name[self]._set_hsts_headers, parameter[name[response].headers]] call[name[self]._set_referrer_policy_headers, parameter[name[response].headers]] return[name[response]]
keyword[def] identifier[_set_response_headers] ( identifier[self] , identifier[response] ): literal[string] identifier[options] = identifier[self] . identifier[_get_local_options] () identifier[self] . identifier[_set_feature_headers] ( identifier[response] . identifier[headers] , identifier[options] ) identifier[self] . identifier[_set_frame_options_headers] ( identifier[response] . identifier[headers] , identifier[options] ) identifier[self] . identifier[_set_content_security_policy_headers] ( identifier[response] . identifier[headers] , identifier[options] ) identifier[self] . identifier[_set_hsts_headers] ( identifier[response] . identifier[headers] ) identifier[self] . identifier[_set_referrer_policy_headers] ( identifier[response] . identifier[headers] ) keyword[return] identifier[response]
def _set_response_headers(self, response): """Applies all configured headers to the given response.""" options = self._get_local_options() self._set_feature_headers(response.headers, options) self._set_frame_options_headers(response.headers, options) self._set_content_security_policy_headers(response.headers, options) self._set_hsts_headers(response.headers) self._set_referrer_policy_headers(response.headers) return response
def _get_args(self, node, keywords): """ Intercept calls to get template and return our own node-specific template """ args = super(ArcanaSlurmGraphPlugin, self)._get_args( node, keywords) # Substitute the template arg with the node-specific one new_args = [] for name, arg in zip(keywords, args): if name == 'template': new_args.append(self._processor.slurm_template(node)) else: new_args.append(arg) return tuple(new_args)
def function[_get_args, parameter[self, node, keywords]]: constant[ Intercept calls to get template and return our own node-specific template ] variable[args] assign[=] call[call[name[super], parameter[name[ArcanaSlurmGraphPlugin], name[self]]]._get_args, parameter[name[node], name[keywords]]] variable[new_args] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18f58de40>, <ast.Name object at 0x7da18f58e0b0>]]] in starred[call[name[zip], parameter[name[keywords], name[args]]]] begin[:] if compare[name[name] equal[==] constant[template]] begin[:] call[name[new_args].append, parameter[call[name[self]._processor.slurm_template, parameter[name[node]]]]] return[call[name[tuple], parameter[name[new_args]]]]
keyword[def] identifier[_get_args] ( identifier[self] , identifier[node] , identifier[keywords] ): literal[string] identifier[args] = identifier[super] ( identifier[ArcanaSlurmGraphPlugin] , identifier[self] ). identifier[_get_args] ( identifier[node] , identifier[keywords] ) identifier[new_args] =[] keyword[for] identifier[name] , identifier[arg] keyword[in] identifier[zip] ( identifier[keywords] , identifier[args] ): keyword[if] identifier[name] == literal[string] : identifier[new_args] . identifier[append] ( identifier[self] . identifier[_processor] . identifier[slurm_template] ( identifier[node] )) keyword[else] : identifier[new_args] . identifier[append] ( identifier[arg] ) keyword[return] identifier[tuple] ( identifier[new_args] )
def _get_args(self, node, keywords): """ Intercept calls to get template and return our own node-specific template """ args = super(ArcanaSlurmGraphPlugin, self)._get_args(node, keywords) # Substitute the template arg with the node-specific one new_args = [] for (name, arg) in zip(keywords, args): if name == 'template': new_args.append(self._processor.slurm_template(node)) # depends on [control=['if'], data=[]] else: new_args.append(arg) # depends on [control=['for'], data=[]] return tuple(new_args)
def _get_result_paths(self, data): """ Build the dict of result filepaths """ # access data through self.Parameters so we know it's been cast # to a FilePath wd = self.WorkingDir db_name = self.Parameters['-n'].Value log_name = self.Parameters['-l'].Value result = {} result['log'] = ResultPath(Path=wd + log_name, IsWritten=True) if self.Parameters['-p'].Value == 'F': extensions = ['nhr', 'nin', 'nsq', 'nsd', 'nsi'] else: extensions = ['phr', 'pin', 'psq', 'psd', 'psi'] for extension in extensions: for file_path in glob(wd + (db_name + '*' + extension)): # this will match e.g. nr.01.psd and nr.psd key = file_path.split(db_name + '.')[1] result_path = ResultPath(Path=file_path, IsWritten=True) result[key] = result_path return result
def function[_get_result_paths, parameter[self, data]]: constant[ Build the dict of result filepaths ] variable[wd] assign[=] name[self].WorkingDir variable[db_name] assign[=] call[name[self].Parameters][constant[-n]].Value variable[log_name] assign[=] call[name[self].Parameters][constant[-l]].Value variable[result] assign[=] dictionary[[], []] call[name[result]][constant[log]] assign[=] call[name[ResultPath], parameter[]] if compare[call[name[self].Parameters][constant[-p]].Value equal[==] constant[F]] begin[:] variable[extensions] assign[=] list[[<ast.Constant object at 0x7da1b0b70a60>, <ast.Constant object at 0x7da1b0b73a60>, <ast.Constant object at 0x7da1b0b71240>, <ast.Constant object at 0x7da1b0b70df0>, <ast.Constant object at 0x7da1b0b70e50>]] for taget[name[extension]] in starred[name[extensions]] begin[:] for taget[name[file_path]] in starred[call[name[glob], parameter[binary_operation[name[wd] + binary_operation[binary_operation[name[db_name] + constant[*]] + name[extension]]]]]] begin[:] variable[key] assign[=] call[call[name[file_path].split, parameter[binary_operation[name[db_name] + constant[.]]]]][constant[1]] variable[result_path] assign[=] call[name[ResultPath], parameter[]] call[name[result]][name[key]] assign[=] name[result_path] return[name[result]]
keyword[def] identifier[_get_result_paths] ( identifier[self] , identifier[data] ): literal[string] identifier[wd] = identifier[self] . identifier[WorkingDir] identifier[db_name] = identifier[self] . identifier[Parameters] [ literal[string] ]. identifier[Value] identifier[log_name] = identifier[self] . identifier[Parameters] [ literal[string] ]. identifier[Value] identifier[result] ={} identifier[result] [ literal[string] ]= identifier[ResultPath] ( identifier[Path] = identifier[wd] + identifier[log_name] , identifier[IsWritten] = keyword[True] ) keyword[if] identifier[self] . identifier[Parameters] [ literal[string] ]. identifier[Value] == literal[string] : identifier[extensions] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[else] : identifier[extensions] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[for] identifier[extension] keyword[in] identifier[extensions] : keyword[for] identifier[file_path] keyword[in] identifier[glob] ( identifier[wd] +( identifier[db_name] + literal[string] + identifier[extension] )): identifier[key] = identifier[file_path] . identifier[split] ( identifier[db_name] + literal[string] )[ literal[int] ] identifier[result_path] = identifier[ResultPath] ( identifier[Path] = identifier[file_path] , identifier[IsWritten] = keyword[True] ) identifier[result] [ identifier[key] ]= identifier[result_path] keyword[return] identifier[result]
def _get_result_paths(self, data): """ Build the dict of result filepaths """ # access data through self.Parameters so we know it's been cast # to a FilePath wd = self.WorkingDir db_name = self.Parameters['-n'].Value log_name = self.Parameters['-l'].Value result = {} result['log'] = ResultPath(Path=wd + log_name, IsWritten=True) if self.Parameters['-p'].Value == 'F': extensions = ['nhr', 'nin', 'nsq', 'nsd', 'nsi'] # depends on [control=['if'], data=[]] else: extensions = ['phr', 'pin', 'psq', 'psd', 'psi'] for extension in extensions: for file_path in glob(wd + (db_name + '*' + extension)): # this will match e.g. nr.01.psd and nr.psd key = file_path.split(db_name + '.')[1] result_path = ResultPath(Path=file_path, IsWritten=True) result[key] = result_path # depends on [control=['for'], data=['file_path']] # depends on [control=['for'], data=['extension']] return result
def draw(self, can=None): "Draw the charts." if can == None: can = canvas.default_canvas() assert self.check_integrity() for plot in self.__plots: plot.check_integrity() self.x_range, self.x_grid_interval = \ self.__get_data_range(self.x_range, 'X', self.x_coord, self.x_grid_interval) self.y_range, self.y_grid_interval = \ self.__get_data_range(self.y_range, 'Y', self.y_coord, self.y_grid_interval) can.rectangle(self.border_line_style, self.bg_style, self.loc[0], self.loc[1], self.loc[0] + self.size[0], self.loc[1] + self.size[1]) if not self.x_grid_over_plot: self.__draw_x_grid_and_axis(can) if not self.y_grid_over_plot: self.__draw_y_grid_and_axis(can) clipbox = theme.adjust_bounding_box([self.loc[0], self.loc[1], self.loc[0] + self.size[0], self.loc[1] + self.size[1]]) can.clip(clipbox[0], clipbox[1], clipbox[2], clipbox[3]) for plot in self.__plots: plot.draw(self, can) can.endclip() if self.x_grid_over_plot: self.__draw_x_grid_and_axis(can) if self.y_grid_over_plot: self.__draw_y_grid_and_axis(can) if self.legend == _dummy_legend: self.legend = legend.T() if self.legend: legends = [] for plot in self.__plots: entry = plot.get_legend_entry() if entry == None: pass elif type(entry) != ListType: legends.append(entry) else: for e in entry: legends.append(e) self.legend.draw(self, legends, can)
def function[draw, parameter[self, can]]: constant[Draw the charts.] if compare[name[can] equal[==] constant[None]] begin[:] variable[can] assign[=] call[name[canvas].default_canvas, parameter[]] assert[call[name[self].check_integrity, parameter[]]] for taget[name[plot]] in starred[name[self].__plots] begin[:] call[name[plot].check_integrity, parameter[]] <ast.Tuple object at 0x7da204620730> assign[=] call[name[self].__get_data_range, parameter[name[self].x_range, constant[X], name[self].x_coord, name[self].x_grid_interval]] <ast.Tuple object at 0x7da18fe92a10> assign[=] call[name[self].__get_data_range, parameter[name[self].y_range, constant[Y], name[self].y_coord, name[self].y_grid_interval]] call[name[can].rectangle, parameter[name[self].border_line_style, name[self].bg_style, call[name[self].loc][constant[0]], call[name[self].loc][constant[1]], binary_operation[call[name[self].loc][constant[0]] + call[name[self].size][constant[0]]], binary_operation[call[name[self].loc][constant[1]] + call[name[self].size][constant[1]]]]] if <ast.UnaryOp object at 0x7da18fe91120> begin[:] call[name[self].__draw_x_grid_and_axis, parameter[name[can]]] if <ast.UnaryOp object at 0x7da18fe909a0> begin[:] call[name[self].__draw_y_grid_and_axis, parameter[name[can]]] variable[clipbox] assign[=] call[name[theme].adjust_bounding_box, parameter[list[[<ast.Subscript object at 0x7da1b0ae36d0>, <ast.Subscript object at 0x7da1b0ae1d20>, <ast.BinOp object at 0x7da1b0ae07f0>, <ast.BinOp object at 0x7da1b0ae2290>]]]] call[name[can].clip, parameter[call[name[clipbox]][constant[0]], call[name[clipbox]][constant[1]], call[name[clipbox]][constant[2]], call[name[clipbox]][constant[3]]]] for taget[name[plot]] in starred[name[self].__plots] begin[:] call[name[plot].draw, parameter[name[self], name[can]]] call[name[can].endclip, parameter[]] if name[self].x_grid_over_plot begin[:] call[name[self].__draw_x_grid_and_axis, parameter[name[can]]] if name[self].y_grid_over_plot begin[:] call[name[self].__draw_y_grid_and_axis, parameter[name[can]]] if compare[name[self].legend equal[==] name[_dummy_legend]] begin[:] name[self].legend assign[=] call[name[legend].T, parameter[]] if name[self].legend begin[:] variable[legends] assign[=] list[[]] for taget[name[plot]] in starred[name[self].__plots] begin[:] variable[entry] assign[=] call[name[plot].get_legend_entry, parameter[]] if compare[name[entry] equal[==] constant[None]] begin[:] pass call[name[self].legend.draw, parameter[name[self], name[legends], name[can]]]
keyword[def] identifier[draw] ( identifier[self] , identifier[can] = keyword[None] ): literal[string] keyword[if] identifier[can] == keyword[None] : identifier[can] = identifier[canvas] . identifier[default_canvas] () keyword[assert] identifier[self] . identifier[check_integrity] () keyword[for] identifier[plot] keyword[in] identifier[self] . identifier[__plots] : identifier[plot] . identifier[check_integrity] () identifier[self] . identifier[x_range] , identifier[self] . identifier[x_grid_interval] = identifier[self] . identifier[__get_data_range] ( identifier[self] . identifier[x_range] , literal[string] , identifier[self] . identifier[x_coord] , identifier[self] . identifier[x_grid_interval] ) identifier[self] . identifier[y_range] , identifier[self] . identifier[y_grid_interval] = identifier[self] . identifier[__get_data_range] ( identifier[self] . identifier[y_range] , literal[string] , identifier[self] . identifier[y_coord] , identifier[self] . identifier[y_grid_interval] ) identifier[can] . identifier[rectangle] ( identifier[self] . identifier[border_line_style] , identifier[self] . identifier[bg_style] , identifier[self] . identifier[loc] [ literal[int] ], identifier[self] . identifier[loc] [ literal[int] ], identifier[self] . identifier[loc] [ literal[int] ]+ identifier[self] . identifier[size] [ literal[int] ], identifier[self] . identifier[loc] [ literal[int] ]+ identifier[self] . identifier[size] [ literal[int] ]) keyword[if] keyword[not] identifier[self] . identifier[x_grid_over_plot] : identifier[self] . identifier[__draw_x_grid_and_axis] ( identifier[can] ) keyword[if] keyword[not] identifier[self] . identifier[y_grid_over_plot] : identifier[self] . identifier[__draw_y_grid_and_axis] ( identifier[can] ) identifier[clipbox] = identifier[theme] . identifier[adjust_bounding_box] ([ identifier[self] . identifier[loc] [ literal[int] ], identifier[self] . identifier[loc] [ literal[int] ], identifier[self] . identifier[loc] [ literal[int] ]+ identifier[self] . identifier[size] [ literal[int] ], identifier[self] . identifier[loc] [ literal[int] ]+ identifier[self] . identifier[size] [ literal[int] ]]) identifier[can] . identifier[clip] ( identifier[clipbox] [ literal[int] ], identifier[clipbox] [ literal[int] ], identifier[clipbox] [ literal[int] ], identifier[clipbox] [ literal[int] ]) keyword[for] identifier[plot] keyword[in] identifier[self] . identifier[__plots] : identifier[plot] . identifier[draw] ( identifier[self] , identifier[can] ) identifier[can] . identifier[endclip] () keyword[if] identifier[self] . identifier[x_grid_over_plot] : identifier[self] . identifier[__draw_x_grid_and_axis] ( identifier[can] ) keyword[if] identifier[self] . identifier[y_grid_over_plot] : identifier[self] . identifier[__draw_y_grid_and_axis] ( identifier[can] ) keyword[if] identifier[self] . identifier[legend] == identifier[_dummy_legend] : identifier[self] . identifier[legend] = identifier[legend] . identifier[T] () keyword[if] identifier[self] . identifier[legend] : identifier[legends] =[] keyword[for] identifier[plot] keyword[in] identifier[self] . identifier[__plots] : identifier[entry] = identifier[plot] . identifier[get_legend_entry] () keyword[if] identifier[entry] == keyword[None] : keyword[pass] keyword[elif] identifier[type] ( identifier[entry] )!= identifier[ListType] : identifier[legends] . identifier[append] ( identifier[entry] ) keyword[else] : keyword[for] identifier[e] keyword[in] identifier[entry] : identifier[legends] . identifier[append] ( identifier[e] ) identifier[self] . identifier[legend] . identifier[draw] ( identifier[self] , identifier[legends] , identifier[can] )
def draw(self, can=None): """Draw the charts.""" if can == None: can = canvas.default_canvas() # depends on [control=['if'], data=['can']] assert self.check_integrity() for plot in self.__plots: plot.check_integrity() # depends on [control=['for'], data=['plot']] (self.x_range, self.x_grid_interval) = self.__get_data_range(self.x_range, 'X', self.x_coord, self.x_grid_interval) (self.y_range, self.y_grid_interval) = self.__get_data_range(self.y_range, 'Y', self.y_coord, self.y_grid_interval) can.rectangle(self.border_line_style, self.bg_style, self.loc[0], self.loc[1], self.loc[0] + self.size[0], self.loc[1] + self.size[1]) if not self.x_grid_over_plot: self.__draw_x_grid_and_axis(can) # depends on [control=['if'], data=[]] if not self.y_grid_over_plot: self.__draw_y_grid_and_axis(can) # depends on [control=['if'], data=[]] clipbox = theme.adjust_bounding_box([self.loc[0], self.loc[1], self.loc[0] + self.size[0], self.loc[1] + self.size[1]]) can.clip(clipbox[0], clipbox[1], clipbox[2], clipbox[3]) for plot in self.__plots: plot.draw(self, can) # depends on [control=['for'], data=['plot']] can.endclip() if self.x_grid_over_plot: self.__draw_x_grid_and_axis(can) # depends on [control=['if'], data=[]] if self.y_grid_over_plot: self.__draw_y_grid_and_axis(can) # depends on [control=['if'], data=[]] if self.legend == _dummy_legend: self.legend = legend.T() # depends on [control=['if'], data=[]] if self.legend: legends = [] for plot in self.__plots: entry = plot.get_legend_entry() if entry == None: pass # depends on [control=['if'], data=[]] elif type(entry) != ListType: legends.append(entry) # depends on [control=['if'], data=[]] else: for e in entry: legends.append(e) # depends on [control=['for'], data=['e']] # depends on [control=['for'], data=['plot']] self.legend.draw(self, legends, can) # depends on [control=['if'], data=[]]
def visit_BinOp(self, node): """ Return type depend from both operand of the binary operation. """ args = [self.visit(arg) for arg in (node.left, node.right)] return list({frozenset.union(*x) for x in itertools.product(*args)})
def function[visit_BinOp, parameter[self, node]]: constant[ Return type depend from both operand of the binary operation. ] variable[args] assign[=] <ast.ListComp object at 0x7da2045654e0> return[call[name[list], parameter[<ast.SetComp object at 0x7da204567ca0>]]]
keyword[def] identifier[visit_BinOp] ( identifier[self] , identifier[node] ): literal[string] identifier[args] =[ identifier[self] . identifier[visit] ( identifier[arg] ) keyword[for] identifier[arg] keyword[in] ( identifier[node] . identifier[left] , identifier[node] . identifier[right] )] keyword[return] identifier[list] ({ identifier[frozenset] . identifier[union] (* identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[itertools] . identifier[product] (* identifier[args] )})
def visit_BinOp(self, node): """ Return type depend from both operand of the binary operation. """ args = [self.visit(arg) for arg in (node.left, node.right)] return list({frozenset.union(*x) for x in itertools.product(*args)})
def reject(self, pn_condition=None): """See Link Reject, AMQP1.0 spec.""" self._pn_link.target.type = proton.Terminus.UNSPECIFIED super(ReceiverLink, self).reject(pn_condition)
def function[reject, parameter[self, pn_condition]]: constant[See Link Reject, AMQP1.0 spec.] name[self]._pn_link.target.type assign[=] name[proton].Terminus.UNSPECIFIED call[call[name[super], parameter[name[ReceiverLink], name[self]]].reject, parameter[name[pn_condition]]]
keyword[def] identifier[reject] ( identifier[self] , identifier[pn_condition] = keyword[None] ): literal[string] identifier[self] . identifier[_pn_link] . identifier[target] . identifier[type] = identifier[proton] . identifier[Terminus] . identifier[UNSPECIFIED] identifier[super] ( identifier[ReceiverLink] , identifier[self] ). identifier[reject] ( identifier[pn_condition] )
def reject(self, pn_condition=None): """See Link Reject, AMQP1.0 spec.""" self._pn_link.target.type = proton.Terminus.UNSPECIFIED super(ReceiverLink, self).reject(pn_condition)
def _set_fc_port(self, v, load=False): """ Setter method for fc_port, mapped from YANG variable /interface/fc_port (list) If this variable is read-only (config: false) in the source YANG file, then _set_fc_port is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_fc_port() directly. YANG Description: The list of fibrechannel interfaces in the managed device. Each row represents a fibrechannel interface. The list provides a way to discover all the fibrechannel interfaces in a managed device. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("name",fc_port.fc_port, yang_name="fc-port", rest_name="FibreChannel", parent=self, is_container='list', user_ordered=True, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'info': u'The list of fibrechannel interfaces.', u'cli-no-key-completion': None, u'alt-name': u'FibreChannel', u'sort-priority': u'RUNNCFG_LEVEL_INTERFACE_TYPE_PORT_CHANNEL', u'cli-suppress-no': None, u'cli-suppress-show-path': None, u'display-when': u'/vcsmode/vcs-mode = "true"', u'cli-custom-range-actionpoint': u'FcRangeCliActionpoint', u'cli-custom-range-enumerator': u'FcRangeCliActionpoint', u'cli-suppress-key-abbreviation': None, u'cli-no-match-completion': None, u'cli-incomplete-no': None, u'callpoint': u'interface_fcport', u'cli-mode-name': u'conf-if-fi-$(name)'}}), is_container='list', yang_name="fc-port", rest_name="FibreChannel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The list of fibrechannel interfaces.', u'cli-no-key-completion': None, u'alt-name': u'FibreChannel', u'sort-priority': u'RUNNCFG_LEVEL_INTERFACE_TYPE_PORT_CHANNEL', u'cli-suppress-no': None, u'cli-suppress-show-path': None, u'display-when': u'/vcsmode/vcs-mode = "true"', u'cli-custom-range-actionpoint': u'FcRangeCliActionpoint', u'cli-custom-range-enumerator': u'FcRangeCliActionpoint', u'cli-suppress-key-abbreviation': None, u'cli-no-match-completion': None, u'cli-incomplete-no': None, u'callpoint': u'interface_fcport', u'cli-mode-name': u'conf-if-fi-$(name)'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """fc_port must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("name",fc_port.fc_port, yang_name="fc-port", rest_name="FibreChannel", parent=self, is_container='list', user_ordered=True, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'info': u'The list of fibrechannel interfaces.', u'cli-no-key-completion': None, u'alt-name': u'FibreChannel', u'sort-priority': u'RUNNCFG_LEVEL_INTERFACE_TYPE_PORT_CHANNEL', u'cli-suppress-no': None, u'cli-suppress-show-path': None, u'display-when': u'/vcsmode/vcs-mode = "true"', u'cli-custom-range-actionpoint': u'FcRangeCliActionpoint', u'cli-custom-range-enumerator': u'FcRangeCliActionpoint', u'cli-suppress-key-abbreviation': None, u'cli-no-match-completion': None, u'cli-incomplete-no': None, u'callpoint': u'interface_fcport', u'cli-mode-name': u'conf-if-fi-$(name)'}}), is_container='list', yang_name="fc-port", rest_name="FibreChannel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The list of fibrechannel interfaces.', u'cli-no-key-completion': None, u'alt-name': u'FibreChannel', u'sort-priority': u'RUNNCFG_LEVEL_INTERFACE_TYPE_PORT_CHANNEL', u'cli-suppress-no': None, u'cli-suppress-show-path': None, u'display-when': u'/vcsmode/vcs-mode = "true"', u'cli-custom-range-actionpoint': u'FcRangeCliActionpoint', u'cli-custom-range-enumerator': u'FcRangeCliActionpoint', u'cli-suppress-key-abbreviation': None, u'cli-no-match-completion': None, u'cli-incomplete-no': None, u'callpoint': u'interface_fcport', u'cli-mode-name': u'conf-if-fi-$(name)'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='list', is_config=True)""", }) self.__fc_port = t if hasattr(self, '_set'): self._set()
def function[_set_fc_port, parameter[self, v, load]]: constant[ Setter method for fc_port, mapped from YANG variable /interface/fc_port (list) If this variable is read-only (config: false) in the source YANG file, then _set_fc_port is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_fc_port() directly. YANG Description: The list of fibrechannel interfaces in the managed device. Each row represents a fibrechannel interface. The list provides a way to discover all the fibrechannel interfaces in a managed device. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da1b24a0ca0> name[self].__fc_port assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_fc_port] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[fc_port] . identifier[fc_port] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[True] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__fc_port] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_fc_port(self, v, load=False): """ Setter method for fc_port, mapped from YANG variable /interface/fc_port (list) If this variable is read-only (config: false) in the source YANG file, then _set_fc_port is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_fc_port() directly. YANG Description: The list of fibrechannel interfaces in the managed device. Each row represents a fibrechannel interface. The list provides a way to discover all the fibrechannel interfaces in a managed device. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=YANGListType('name', fc_port.fc_port, yang_name='fc-port', rest_name='FibreChannel', parent=self, is_container='list', user_ordered=True, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'info': u'The list of fibrechannel interfaces.', u'cli-no-key-completion': None, u'alt-name': u'FibreChannel', u'sort-priority': u'RUNNCFG_LEVEL_INTERFACE_TYPE_PORT_CHANNEL', u'cli-suppress-no': None, u'cli-suppress-show-path': None, u'display-when': u'/vcsmode/vcs-mode = "true"', u'cli-custom-range-actionpoint': u'FcRangeCliActionpoint', u'cli-custom-range-enumerator': u'FcRangeCliActionpoint', u'cli-suppress-key-abbreviation': None, u'cli-no-match-completion': None, u'cli-incomplete-no': None, u'callpoint': u'interface_fcport', u'cli-mode-name': u'conf-if-fi-$(name)'}}), is_container='list', yang_name='fc-port', rest_name='FibreChannel', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The list of fibrechannel interfaces.', u'cli-no-key-completion': None, u'alt-name': u'FibreChannel', u'sort-priority': u'RUNNCFG_LEVEL_INTERFACE_TYPE_PORT_CHANNEL', u'cli-suppress-no': None, u'cli-suppress-show-path': None, u'display-when': u'/vcsmode/vcs-mode = "true"', u'cli-custom-range-actionpoint': u'FcRangeCliActionpoint', u'cli-custom-range-enumerator': u'FcRangeCliActionpoint', u'cli-suppress-key-abbreviation': None, u'cli-no-match-completion': None, u'cli-incomplete-no': None, u'callpoint': u'interface_fcport', u'cli-mode-name': u'conf-if-fi-$(name)'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='list', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'fc_port must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("name",fc_port.fc_port, yang_name="fc-port", rest_name="FibreChannel", parent=self, is_container=\'list\', user_ordered=True, path_helper=self._path_helper, yang_keys=\'name\', extensions={u\'tailf-common\': {u\'info\': u\'The list of fibrechannel interfaces.\', u\'cli-no-key-completion\': None, u\'alt-name\': u\'FibreChannel\', u\'sort-priority\': u\'RUNNCFG_LEVEL_INTERFACE_TYPE_PORT_CHANNEL\', u\'cli-suppress-no\': None, u\'cli-suppress-show-path\': None, u\'display-when\': u\'/vcsmode/vcs-mode = "true"\', u\'cli-custom-range-actionpoint\': u\'FcRangeCliActionpoint\', u\'cli-custom-range-enumerator\': u\'FcRangeCliActionpoint\', u\'cli-suppress-key-abbreviation\': None, u\'cli-no-match-completion\': None, u\'cli-incomplete-no\': None, u\'callpoint\': u\'interface_fcport\', u\'cli-mode-name\': u\'conf-if-fi-$(name)\'}}), is_container=\'list\', yang_name="fc-port", rest_name="FibreChannel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'The list of fibrechannel interfaces.\', u\'cli-no-key-completion\': None, u\'alt-name\': u\'FibreChannel\', u\'sort-priority\': u\'RUNNCFG_LEVEL_INTERFACE_TYPE_PORT_CHANNEL\', u\'cli-suppress-no\': None, u\'cli-suppress-show-path\': None, u\'display-when\': u\'/vcsmode/vcs-mode = "true"\', u\'cli-custom-range-actionpoint\': u\'FcRangeCliActionpoint\', u\'cli-custom-range-enumerator\': u\'FcRangeCliActionpoint\', u\'cli-suppress-key-abbreviation\': None, u\'cli-no-match-completion\': None, u\'cli-incomplete-no\': None, u\'callpoint\': u\'interface_fcport\', u\'cli-mode-name\': u\'conf-if-fi-$(name)\'}}, namespace=\'urn:brocade.com:mgmt:brocade-interface\', defining_module=\'brocade-interface\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__fc_port = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def model_delta(old_model, new_model): """ Provides delta/difference between two models :param old: The old state of the model instance. :type old: Model :param new: The new state of the model instance. :type new: Model :return: A dictionary with the names of the changed fields as keys and a two tuple of the old and new field values as value. :rtype: dict """ delta = {} fields = new_model._meta.fields for field in fields: old_value = get_field_value(old_model, field) new_value = get_field_value(new_model, field) if old_value != new_value: delta[field.name] = [smart_text(old_value), smart_text(new_value)] if len(delta) == 0: delta = None return delta
def function[model_delta, parameter[old_model, new_model]]: constant[ Provides delta/difference between two models :param old: The old state of the model instance. :type old: Model :param new: The new state of the model instance. :type new: Model :return: A dictionary with the names of the changed fields as keys and a two tuple of the old and new field values as value. :rtype: dict ] variable[delta] assign[=] dictionary[[], []] variable[fields] assign[=] name[new_model]._meta.fields for taget[name[field]] in starred[name[fields]] begin[:] variable[old_value] assign[=] call[name[get_field_value], parameter[name[old_model], name[field]]] variable[new_value] assign[=] call[name[get_field_value], parameter[name[new_model], name[field]]] if compare[name[old_value] not_equal[!=] name[new_value]] begin[:] call[name[delta]][name[field].name] assign[=] list[[<ast.Call object at 0x7da1b16bd060>, <ast.Call object at 0x7da1b16bee60>]] if compare[call[name[len], parameter[name[delta]]] equal[==] constant[0]] begin[:] variable[delta] assign[=] constant[None] return[name[delta]]
keyword[def] identifier[model_delta] ( identifier[old_model] , identifier[new_model] ): literal[string] identifier[delta] ={} identifier[fields] = identifier[new_model] . identifier[_meta] . identifier[fields] keyword[for] identifier[field] keyword[in] identifier[fields] : identifier[old_value] = identifier[get_field_value] ( identifier[old_model] , identifier[field] ) identifier[new_value] = identifier[get_field_value] ( identifier[new_model] , identifier[field] ) keyword[if] identifier[old_value] != identifier[new_value] : identifier[delta] [ identifier[field] . identifier[name] ]=[ identifier[smart_text] ( identifier[old_value] ), identifier[smart_text] ( identifier[new_value] )] keyword[if] identifier[len] ( identifier[delta] )== literal[int] : identifier[delta] = keyword[None] keyword[return] identifier[delta]
def model_delta(old_model, new_model): """ Provides delta/difference between two models :param old: The old state of the model instance. :type old: Model :param new: The new state of the model instance. :type new: Model :return: A dictionary with the names of the changed fields as keys and a two tuple of the old and new field values as value. :rtype: dict """ delta = {} fields = new_model._meta.fields for field in fields: old_value = get_field_value(old_model, field) new_value = get_field_value(new_model, field) if old_value != new_value: delta[field.name] = [smart_text(old_value), smart_text(new_value)] # depends on [control=['if'], data=['old_value', 'new_value']] # depends on [control=['for'], data=['field']] if len(delta) == 0: delta = None # depends on [control=['if'], data=[]] return delta
def _print_verbose(*args): """Print diagnostic message.""" try: return '{0} {1} ({2})'.format(args[0], args[1], ctime()) except IndexError: return '{0} ({1})'.format(args[0], ctime())
def function[_print_verbose, parameter[]]: constant[Print diagnostic message.] <ast.Try object at 0x7da1b0673820>
keyword[def] identifier[_print_verbose] (* identifier[args] ): literal[string] keyword[try] : keyword[return] literal[string] . identifier[format] ( identifier[args] [ literal[int] ], identifier[args] [ literal[int] ], identifier[ctime] ()) keyword[except] identifier[IndexError] : keyword[return] literal[string] . identifier[format] ( identifier[args] [ literal[int] ], identifier[ctime] ())
def _print_verbose(*args): """Print diagnostic message.""" try: return '{0} {1} ({2})'.format(args[0], args[1], ctime()) # depends on [control=['try'], data=[]] except IndexError: return '{0} ({1})'.format(args[0], ctime()) # depends on [control=['except'], data=[]]
def getWidth(self): ''' Gets the width. ''' if self.useUiAutomator: return self.map['bounds'][1][0] - self.map['bounds'][0][0] else: try: return int(self.map[self.widthProperty]) except: return 0
def function[getWidth, parameter[self]]: constant[ Gets the width. ] if name[self].useUiAutomator begin[:] return[binary_operation[call[call[call[name[self].map][constant[bounds]]][constant[1]]][constant[0]] - call[call[call[name[self].map][constant[bounds]]][constant[0]]][constant[0]]]]
keyword[def] identifier[getWidth] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[useUiAutomator] : keyword[return] identifier[self] . identifier[map] [ literal[string] ][ literal[int] ][ literal[int] ]- identifier[self] . identifier[map] [ literal[string] ][ literal[int] ][ literal[int] ] keyword[else] : keyword[try] : keyword[return] identifier[int] ( identifier[self] . identifier[map] [ identifier[self] . identifier[widthProperty] ]) keyword[except] : keyword[return] literal[int]
def getWidth(self): """ Gets the width. """ if self.useUiAutomator: return self.map['bounds'][1][0] - self.map['bounds'][0][0] # depends on [control=['if'], data=[]] else: try: return int(self.map[self.widthProperty]) # depends on [control=['try'], data=[]] except: return 0 # depends on [control=['except'], data=[]]
async def triggerHook(self, *args, **kwargs): """ Trigger a hook This endpoint will trigger the creation of a task from a hook definition. The HTTP payload must match the hooks `triggerSchema`. If it does, it is provided as the `payload` property of the JSON-e context used to render the task template. This method takes input: ``v1/trigger-hook.json#`` This method gives output: ``v1/trigger-hook-response.json#`` This method is ``stable`` """ return await self._makeApiCall(self.funcinfo["triggerHook"], *args, **kwargs)
<ast.AsyncFunctionDef object at 0x7da204344550>
keyword[async] keyword[def] identifier[triggerHook] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[return] keyword[await] identifier[self] . identifier[_makeApiCall] ( identifier[self] . identifier[funcinfo] [ literal[string] ],* identifier[args] ,** identifier[kwargs] )
async def triggerHook(self, *args, **kwargs): """ Trigger a hook This endpoint will trigger the creation of a task from a hook definition. The HTTP payload must match the hooks `triggerSchema`. If it does, it is provided as the `payload` property of the JSON-e context used to render the task template. This method takes input: ``v1/trigger-hook.json#`` This method gives output: ``v1/trigger-hook-response.json#`` This method is ``stable`` """ return await self._makeApiCall(self.funcinfo['triggerHook'], *args, **kwargs)
def _uniq(self): """ Create a list of all the pixels that cover this region. This list contains overlapping pixels of different orders. Returns ------- pix : list A list of HEALPix pixel numbers. """ pd = [] for d in range(1, self.maxdepth): pd.extend(map(lambda x: int(4**(d+1) + x), self.pixeldict[d])) return sorted(pd)
def function[_uniq, parameter[self]]: constant[ Create a list of all the pixels that cover this region. This list contains overlapping pixels of different orders. Returns ------- pix : list A list of HEALPix pixel numbers. ] variable[pd] assign[=] list[[]] for taget[name[d]] in starred[call[name[range], parameter[constant[1], name[self].maxdepth]]] begin[:] call[name[pd].extend, parameter[call[name[map], parameter[<ast.Lambda object at 0x7da20c794f70>, call[name[self].pixeldict][name[d]]]]]] return[call[name[sorted], parameter[name[pd]]]]
keyword[def] identifier[_uniq] ( identifier[self] ): literal[string] identifier[pd] =[] keyword[for] identifier[d] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[maxdepth] ): identifier[pd] . identifier[extend] ( identifier[map] ( keyword[lambda] identifier[x] : identifier[int] ( literal[int] **( identifier[d] + literal[int] )+ identifier[x] ), identifier[self] . identifier[pixeldict] [ identifier[d] ])) keyword[return] identifier[sorted] ( identifier[pd] )
def _uniq(self): """ Create a list of all the pixels that cover this region. This list contains overlapping pixels of different orders. Returns ------- pix : list A list of HEALPix pixel numbers. """ pd = [] for d in range(1, self.maxdepth): pd.extend(map(lambda x: int(4 ** (d + 1) + x), self.pixeldict[d])) # depends on [control=['for'], data=['d']] return sorted(pd)
def arg(*args, **kwargs): """Annotate a function by adding the args/kwargs to the meta-data. This appends an Argparse "argument" to the function's ``ARGPARSE_ARGS_LIST`` attribute, creating ``ARGPARSE_ARGS_LIST`` if it does not already exist. Aside from that, it returns the decorated function unmodified, and unwrapped. The "arguments" are simply ``(args, kwargs)`` tuples which will be passed to the Argparse parser created from the function as ``parser.add_argument(*args, **kwargs)``. `argparse.ArgumentParser.add_argument <https://docs.python.org/2/library/argparse.html#the-add-argument-method>`_ should be consulted for up-to-date documentation on the accepted arguments. For convenience, a list has been included here. Args ---- name/flags : str or list Either a name or a list of (positional) option strings, e.g. ('foo') or ('-f', '--foo'). action : str The basic type of action to be taken when this argument is encountered at the command line. nargs : str The number of command-line arguments that should be consumed. const A constant value required by some action and nargs selections. default The value produced if the argument is absent from the command line. type : type The type to which the command-line argument should be converted. choices A container of the allowable values for the argument. required : bool Whether or not the command-line option may be omitted (optionals only). help : str A brief description of what the argument does. metavar : str A name for the argument in usage messages. dest : str The name of the attribute to be added to the object returned by parse_args(). Example ------- .. testsetup:: mycommand = Command(name='mycommand') .. testcode:: @command(name='echo') @arg('-n', '--num', type=int, default=42) @arg('-s', '--some-switch', action='store_false') @arg('foo') def echo(foo, num, some_switch): print foo, num .. doctest:: >>> echo_subcommand = mycommand.add_subcommand(echo) >>> mycommand.init() >>> mycommand.parse_args(['echo', 'hi', '-n', '42']) hi 42 See also -------- `argparse.ArgumentParser.add_argument <https://docs.python.org/2/library/argparse.html#the-add-argument-method>`_ """ def annotate(func): # Get the list of argparse args already added to func (if any). argparse_args_list = getattr(func, 'ARGPARSE_ARGS_LIST', []) # Since we're only annotating (not wrapping) the function, appending # the argument to the list would result in the decorators being applied # in reverse order. To prevent that, we simply add to the beginning. argparse_args_list.insert(0, (args, kwargs)) setattr(func, 'ARGPARSE_ARGS_LIST', argparse_args_list) return func return annotate
def function[arg, parameter[]]: constant[Annotate a function by adding the args/kwargs to the meta-data. This appends an Argparse "argument" to the function's ``ARGPARSE_ARGS_LIST`` attribute, creating ``ARGPARSE_ARGS_LIST`` if it does not already exist. Aside from that, it returns the decorated function unmodified, and unwrapped. The "arguments" are simply ``(args, kwargs)`` tuples which will be passed to the Argparse parser created from the function as ``parser.add_argument(*args, **kwargs)``. `argparse.ArgumentParser.add_argument <https://docs.python.org/2/library/argparse.html#the-add-argument-method>`_ should be consulted for up-to-date documentation on the accepted arguments. For convenience, a list has been included here. Args ---- name/flags : str or list Either a name or a list of (positional) option strings, e.g. ('foo') or ('-f', '--foo'). action : str The basic type of action to be taken when this argument is encountered at the command line. nargs : str The number of command-line arguments that should be consumed. const A constant value required by some action and nargs selections. default The value produced if the argument is absent from the command line. type : type The type to which the command-line argument should be converted. choices A container of the allowable values for the argument. required : bool Whether or not the command-line option may be omitted (optionals only). help : str A brief description of what the argument does. metavar : str A name for the argument in usage messages. dest : str The name of the attribute to be added to the object returned by parse_args(). Example ------- .. testsetup:: mycommand = Command(name='mycommand') .. testcode:: @command(name='echo') @arg('-n', '--num', type=int, default=42) @arg('-s', '--some-switch', action='store_false') @arg('foo') def echo(foo, num, some_switch): print foo, num .. doctest:: >>> echo_subcommand = mycommand.add_subcommand(echo) >>> mycommand.init() >>> mycommand.parse_args(['echo', 'hi', '-n', '42']) hi 42 See also -------- `argparse.ArgumentParser.add_argument <https://docs.python.org/2/library/argparse.html#the-add-argument-method>`_ ] def function[annotate, parameter[func]]: variable[argparse_args_list] assign[=] call[name[getattr], parameter[name[func], constant[ARGPARSE_ARGS_LIST], list[[]]]] call[name[argparse_args_list].insert, parameter[constant[0], tuple[[<ast.Name object at 0x7da1b157abf0>, <ast.Name object at 0x7da1b1578520>]]]] call[name[setattr], parameter[name[func], constant[ARGPARSE_ARGS_LIST], name[argparse_args_list]]] return[name[func]] return[name[annotate]]
keyword[def] identifier[arg] (* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[def] identifier[annotate] ( identifier[func] ): identifier[argparse_args_list] = identifier[getattr] ( identifier[func] , literal[string] ,[]) identifier[argparse_args_list] . identifier[insert] ( literal[int] ,( identifier[args] , identifier[kwargs] )) identifier[setattr] ( identifier[func] , literal[string] , identifier[argparse_args_list] ) keyword[return] identifier[func] keyword[return] identifier[annotate]
def arg(*args, **kwargs): """Annotate a function by adding the args/kwargs to the meta-data. This appends an Argparse "argument" to the function's ``ARGPARSE_ARGS_LIST`` attribute, creating ``ARGPARSE_ARGS_LIST`` if it does not already exist. Aside from that, it returns the decorated function unmodified, and unwrapped. The "arguments" are simply ``(args, kwargs)`` tuples which will be passed to the Argparse parser created from the function as ``parser.add_argument(*args, **kwargs)``. `argparse.ArgumentParser.add_argument <https://docs.python.org/2/library/argparse.html#the-add-argument-method>`_ should be consulted for up-to-date documentation on the accepted arguments. For convenience, a list has been included here. Args ---- name/flags : str or list Either a name or a list of (positional) option strings, e.g. ('foo') or ('-f', '--foo'). action : str The basic type of action to be taken when this argument is encountered at the command line. nargs : str The number of command-line arguments that should be consumed. const A constant value required by some action and nargs selections. default The value produced if the argument is absent from the command line. type : type The type to which the command-line argument should be converted. choices A container of the allowable values for the argument. required : bool Whether or not the command-line option may be omitted (optionals only). help : str A brief description of what the argument does. metavar : str A name for the argument in usage messages. dest : str The name of the attribute to be added to the object returned by parse_args(). Example ------- .. testsetup:: mycommand = Command(name='mycommand') .. testcode:: @command(name='echo') @arg('-n', '--num', type=int, default=42) @arg('-s', '--some-switch', action='store_false') @arg('foo') def echo(foo, num, some_switch): print foo, num .. doctest:: >>> echo_subcommand = mycommand.add_subcommand(echo) >>> mycommand.init() >>> mycommand.parse_args(['echo', 'hi', '-n', '42']) hi 42 See also -------- `argparse.ArgumentParser.add_argument <https://docs.python.org/2/library/argparse.html#the-add-argument-method>`_ """ def annotate(func): # Get the list of argparse args already added to func (if any). argparse_args_list = getattr(func, 'ARGPARSE_ARGS_LIST', []) # Since we're only annotating (not wrapping) the function, appending # the argument to the list would result in the decorators being applied # in reverse order. To prevent that, we simply add to the beginning. argparse_args_list.insert(0, (args, kwargs)) setattr(func, 'ARGPARSE_ARGS_LIST', argparse_args_list) return func return annotate
def _drop_definition(self): """Remove footer definition (footer part) associated with this section.""" rId = self._sectPr.remove_footerReference(self._hdrftr_index) self._document_part.drop_rel(rId)
def function[_drop_definition, parameter[self]]: constant[Remove footer definition (footer part) associated with this section.] variable[rId] assign[=] call[name[self]._sectPr.remove_footerReference, parameter[name[self]._hdrftr_index]] call[name[self]._document_part.drop_rel, parameter[name[rId]]]
keyword[def] identifier[_drop_definition] ( identifier[self] ): literal[string] identifier[rId] = identifier[self] . identifier[_sectPr] . identifier[remove_footerReference] ( identifier[self] . identifier[_hdrftr_index] ) identifier[self] . identifier[_document_part] . identifier[drop_rel] ( identifier[rId] )
def _drop_definition(self): """Remove footer definition (footer part) associated with this section.""" rId = self._sectPr.remove_footerReference(self._hdrftr_index) self._document_part.drop_rel(rId)
def _solve_location_param(self): """ We're lazy here and simply iterate to find the location parameter such that growth_curve(0.5)=1. """ params = copy.copy(self.params) del params['loc'] f = lambda location: self.distr_f.ppf(0.5, loc=location, **params) - 1 return optimize.brentq(f, -10, 10)
def function[_solve_location_param, parameter[self]]: constant[ We're lazy here and simply iterate to find the location parameter such that growth_curve(0.5)=1. ] variable[params] assign[=] call[name[copy].copy, parameter[name[self].params]] <ast.Delete object at 0x7da20e9b3cd0> variable[f] assign[=] <ast.Lambda object at 0x7da20e9b1450> return[call[name[optimize].brentq, parameter[name[f], <ast.UnaryOp object at 0x7da20e9b2ef0>, constant[10]]]]
keyword[def] identifier[_solve_location_param] ( identifier[self] ): literal[string] identifier[params] = identifier[copy] . identifier[copy] ( identifier[self] . identifier[params] ) keyword[del] identifier[params] [ literal[string] ] identifier[f] = keyword[lambda] identifier[location] : identifier[self] . identifier[distr_f] . identifier[ppf] ( literal[int] , identifier[loc] = identifier[location] ,** identifier[params] )- literal[int] keyword[return] identifier[optimize] . identifier[brentq] ( identifier[f] ,- literal[int] , literal[int] )
def _solve_location_param(self): """ We're lazy here and simply iterate to find the location parameter such that growth_curve(0.5)=1. """ params = copy.copy(self.params) del params['loc'] f = lambda location: self.distr_f.ppf(0.5, loc=location, **params) - 1 return optimize.brentq(f, -10, 10)
def get_calculated_display_values(self, immediate: bool=False) -> DisplayValues: """Return the display values. Return the current (possibly uncalculated) display values unless 'immediate' is specified. If 'immediate', return the existing (calculated) values if they exist. Using the 'immediate' values avoids calculation except in cases where the display values haven't already been calculated. """ if not immediate or not self.__is_master or not self.__last_display_values: if not self.__current_display_values and self.__data_item: self.__current_display_values = DisplayValues(self.__data_item.xdata, self.sequence_index, self.collection_index, self.slice_center, self.slice_width, self.display_limits, self.complex_display_type, self.__color_map_data) def finalize(display_values): self.__last_display_values = display_values self.display_values_changed_event.fire() self.__current_display_values.on_finalize = finalize return self.__current_display_values return self.__last_display_values
def function[get_calculated_display_values, parameter[self, immediate]]: constant[Return the display values. Return the current (possibly uncalculated) display values unless 'immediate' is specified. If 'immediate', return the existing (calculated) values if they exist. Using the 'immediate' values avoids calculation except in cases where the display values haven't already been calculated. ] if <ast.BoolOp object at 0x7da1b0e3c5e0> begin[:] if <ast.BoolOp object at 0x7da1b0e3ea10> begin[:] name[self].__current_display_values assign[=] call[name[DisplayValues], parameter[name[self].__data_item.xdata, name[self].sequence_index, name[self].collection_index, name[self].slice_center, name[self].slice_width, name[self].display_limits, name[self].complex_display_type, name[self].__color_map_data]] def function[finalize, parameter[display_values]]: name[self].__last_display_values assign[=] name[display_values] call[name[self].display_values_changed_event.fire, parameter[]] name[self].__current_display_values.on_finalize assign[=] name[finalize] return[name[self].__current_display_values] return[name[self].__last_display_values]
keyword[def] identifier[get_calculated_display_values] ( identifier[self] , identifier[immediate] : identifier[bool] = keyword[False] )-> identifier[DisplayValues] : literal[string] keyword[if] keyword[not] identifier[immediate] keyword[or] keyword[not] identifier[self] . identifier[__is_master] keyword[or] keyword[not] identifier[self] . identifier[__last_display_values] : keyword[if] keyword[not] identifier[self] . identifier[__current_display_values] keyword[and] identifier[self] . identifier[__data_item] : identifier[self] . identifier[__current_display_values] = identifier[DisplayValues] ( identifier[self] . identifier[__data_item] . identifier[xdata] , identifier[self] . identifier[sequence_index] , identifier[self] . identifier[collection_index] , identifier[self] . identifier[slice_center] , identifier[self] . identifier[slice_width] , identifier[self] . identifier[display_limits] , identifier[self] . identifier[complex_display_type] , identifier[self] . identifier[__color_map_data] ) keyword[def] identifier[finalize] ( identifier[display_values] ): identifier[self] . identifier[__last_display_values] = identifier[display_values] identifier[self] . identifier[display_values_changed_event] . identifier[fire] () identifier[self] . identifier[__current_display_values] . identifier[on_finalize] = identifier[finalize] keyword[return] identifier[self] . identifier[__current_display_values] keyword[return] identifier[self] . identifier[__last_display_values]
def get_calculated_display_values(self, immediate: bool=False) -> DisplayValues: """Return the display values. Return the current (possibly uncalculated) display values unless 'immediate' is specified. If 'immediate', return the existing (calculated) values if they exist. Using the 'immediate' values avoids calculation except in cases where the display values haven't already been calculated. """ if not immediate or not self.__is_master or (not self.__last_display_values): if not self.__current_display_values and self.__data_item: self.__current_display_values = DisplayValues(self.__data_item.xdata, self.sequence_index, self.collection_index, self.slice_center, self.slice_width, self.display_limits, self.complex_display_type, self.__color_map_data) def finalize(display_values): self.__last_display_values = display_values self.display_values_changed_event.fire() self.__current_display_values.on_finalize = finalize # depends on [control=['if'], data=[]] return self.__current_display_values # depends on [control=['if'], data=[]] return self.__last_display_values
def flatten_to(atom: Union[Tuple[Type[T]], Type[T]]): """ >>> from Redy.Collections import Traversal, Flow >>> lst: Iterable[int] = [[1, 2, 3]] >>> x = Flow(lst)[Traversal.flatten_to(int)] >>> assert isinstance(x.unbox, Generator) and list(x.unbox) == [1, 2, 3] """ def inner(nested: ActualIterable[Union[T, ActualIterable[T]]]) -> ActualIterable[T]: for each in nested: if isinstance(each, atom): yield each else: yield from inner(each) return inner
def function[flatten_to, parameter[atom]]: constant[ >>> from Redy.Collections import Traversal, Flow >>> lst: Iterable[int] = [[1, 2, 3]] >>> x = Flow(lst)[Traversal.flatten_to(int)] >>> assert isinstance(x.unbox, Generator) and list(x.unbox) == [1, 2, 3] ] def function[inner, parameter[nested]]: for taget[name[each]] in starred[name[nested]] begin[:] if call[name[isinstance], parameter[name[each], name[atom]]] begin[:] <ast.Yield object at 0x7da18eb57f70> return[name[inner]]
keyword[def] identifier[flatten_to] ( identifier[atom] : identifier[Union] [ identifier[Tuple] [ identifier[Type] [ identifier[T] ]], identifier[Type] [ identifier[T] ]]): literal[string] keyword[def] identifier[inner] ( identifier[nested] : identifier[ActualIterable] [ identifier[Union] [ identifier[T] , identifier[ActualIterable] [ identifier[T] ]]])-> identifier[ActualIterable] [ identifier[T] ]: keyword[for] identifier[each] keyword[in] identifier[nested] : keyword[if] identifier[isinstance] ( identifier[each] , identifier[atom] ): keyword[yield] identifier[each] keyword[else] : keyword[yield] keyword[from] identifier[inner] ( identifier[each] ) keyword[return] identifier[inner]
def flatten_to(atom: Union[Tuple[Type[T]], Type[T]]): """ >>> from Redy.Collections import Traversal, Flow >>> lst: Iterable[int] = [[1, 2, 3]] >>> x = Flow(lst)[Traversal.flatten_to(int)] >>> assert isinstance(x.unbox, Generator) and list(x.unbox) == [1, 2, 3] """ def inner(nested: ActualIterable[Union[T, ActualIterable[T]]]) -> ActualIterable[T]: for each in nested: if isinstance(each, atom): yield each # depends on [control=['if'], data=[]] else: yield from inner(each) # depends on [control=['for'], data=['each']] return inner
def ungroup_gen(grouped_items, groupxs, fill=None): """ Ungroups items returning a generator. Note that this is much slower than the list version and is not gaurenteed to have better memory usage. Args: grouped_items (list): groupxs (list): maxval (int): (default = None) Returns: list: ungrouped_items SeeAlso: vt.invert_apply_grouping CommandLine: python -m utool.util_alg ungroup_unique Example: >>> # ENABLE_DOCTEST >>> from utool.util_alg import * # NOQA >>> import utool as ut >>> grouped_items = [[1.1, 1.2], [2.1, 2.2], [3.1, 3.2]] >>> groupxs = [[1, 2], [5, 6], [9, 3]] >>> ungrouped_items1 = list(ungroup_gen(grouped_items, groupxs)) >>> ungrouped_items2 = ungroup(grouped_items, groupxs) >>> assert ungrouped_items1 == ungrouped_items2 >>> grouped_items = [[1.1, 1.2], [2.1, 2.2], [3.1, 3.2]] >>> groupxs = [[0, 2], [1, 5], [4, 3]] >>> ungrouped_items1 = list(ungroup_gen(grouped_items, groupxs)) >>> ungrouped_items2 = ungroup(grouped_items, groupxs) >>> assert ungrouped_items1 == ungrouped_items2 Ignore: labels = np.random.randint(0, 64, 10000) unique_labels, groupxs = ut.group_indices(labels) grouped_items = ut.apply_grouping(np.arange(len(labels)), groupxs) ungrouped_items1 = list(ungroup_gen(grouped_items, groupxs)) ungrouped_items2 = ungroup(grouped_items, groupxs) assert ungrouped_items2 == ungrouped_items1 %timeit list(ungroup_gen(grouped_items, groupxs)) %timeit ungroup(grouped_items, groupxs) """ import utool as ut # Determine the number of items if unknown #maxpergroup = [max(xs) if len(xs) else 0 for xs in groupxs] #maxval = max(maxpergroup) if len(maxpergroup) else 0 minpergroup = [min(xs) if len(xs) else 0 for xs in groupxs] minval = min(minpergroup) if len(minpergroup) else 0 flat_groupx = ut.flatten(groupxs) sortx = ut.argsort(flat_groupx) # Indicates the index being yeilded groupx_sorted = ut.take(flat_groupx, sortx) flat_items = ut.iflatten(grouped_items) # Storage for data weiting to be yeilded toyeild = {} items_yeilded = 0 # Indicates the index we are curently yeilding current_index = 0 # Determine where fills need to happen num_fills_before = [minval] + (np.diff(groupx_sorted) - 1).tolist() + [0] # Check if there are fills before the first item fills = num_fills_before[items_yeilded] if fills > 0: for _ in range(fills): yield None current_index += 1 # Yield items as possible for yeild_at, item in zip(flat_groupx, flat_items): if yeild_at > current_index: toyeild[yeild_at] = item elif yeild_at == current_index: # When we find the next element to yeild yield item current_index += 1 items_yeilded += 1 # Check if there are fills before the next item fills = num_fills_before[items_yeilded] if fills > 0: for _ in range(fills): yield None current_index += 1 # Now yield everything that came before this while current_index in toyeild: item = toyeild.pop(current_index) yield item current_index += 1 items_yeilded += 1 # Check if there are fills before the next item fills = num_fills_before[items_yeilded] if fills > 0: for _ in range(fills): yield None current_index += 1
def function[ungroup_gen, parameter[grouped_items, groupxs, fill]]: constant[ Ungroups items returning a generator. Note that this is much slower than the list version and is not gaurenteed to have better memory usage. Args: grouped_items (list): groupxs (list): maxval (int): (default = None) Returns: list: ungrouped_items SeeAlso: vt.invert_apply_grouping CommandLine: python -m utool.util_alg ungroup_unique Example: >>> # ENABLE_DOCTEST >>> from utool.util_alg import * # NOQA >>> import utool as ut >>> grouped_items = [[1.1, 1.2], [2.1, 2.2], [3.1, 3.2]] >>> groupxs = [[1, 2], [5, 6], [9, 3]] >>> ungrouped_items1 = list(ungroup_gen(grouped_items, groupxs)) >>> ungrouped_items2 = ungroup(grouped_items, groupxs) >>> assert ungrouped_items1 == ungrouped_items2 >>> grouped_items = [[1.1, 1.2], [2.1, 2.2], [3.1, 3.2]] >>> groupxs = [[0, 2], [1, 5], [4, 3]] >>> ungrouped_items1 = list(ungroup_gen(grouped_items, groupxs)) >>> ungrouped_items2 = ungroup(grouped_items, groupxs) >>> assert ungrouped_items1 == ungrouped_items2 Ignore: labels = np.random.randint(0, 64, 10000) unique_labels, groupxs = ut.group_indices(labels) grouped_items = ut.apply_grouping(np.arange(len(labels)), groupxs) ungrouped_items1 = list(ungroup_gen(grouped_items, groupxs)) ungrouped_items2 = ungroup(grouped_items, groupxs) assert ungrouped_items2 == ungrouped_items1 %timeit list(ungroup_gen(grouped_items, groupxs)) %timeit ungroup(grouped_items, groupxs) ] import module[utool] as alias[ut] variable[minpergroup] assign[=] <ast.ListComp object at 0x7da1b2407b50> variable[minval] assign[=] <ast.IfExp object at 0x7da1b2407e50> variable[flat_groupx] assign[=] call[name[ut].flatten, parameter[name[groupxs]]] variable[sortx] assign[=] call[name[ut].argsort, parameter[name[flat_groupx]]] variable[groupx_sorted] assign[=] call[name[ut].take, parameter[name[flat_groupx], name[sortx]]] variable[flat_items] assign[=] call[name[ut].iflatten, parameter[name[grouped_items]]] variable[toyeild] assign[=] dictionary[[], []] variable[items_yeilded] assign[=] constant[0] variable[current_index] assign[=] constant[0] variable[num_fills_before] assign[=] binary_operation[binary_operation[list[[<ast.Name object at 0x7da1b24af6a0>]] + call[binary_operation[call[name[np].diff, parameter[name[groupx_sorted]]] - constant[1]].tolist, parameter[]]] + list[[<ast.Constant object at 0x7da1b24ac730>]]] variable[fills] assign[=] call[name[num_fills_before]][name[items_yeilded]] if compare[name[fills] greater[>] constant[0]] begin[:] for taget[name[_]] in starred[call[name[range], parameter[name[fills]]]] begin[:] <ast.Yield object at 0x7da1b24ad780> <ast.AugAssign object at 0x7da1b24ae0b0> for taget[tuple[[<ast.Name object at 0x7da1b24afb20>, <ast.Name object at 0x7da1b24aeaa0>]]] in starred[call[name[zip], parameter[name[flat_groupx], name[flat_items]]]] begin[:] if compare[name[yeild_at] greater[>] name[current_index]] begin[:] call[name[toyeild]][name[yeild_at]] assign[=] name[item]
keyword[def] identifier[ungroup_gen] ( identifier[grouped_items] , identifier[groupxs] , identifier[fill] = keyword[None] ): literal[string] keyword[import] identifier[utool] keyword[as] identifier[ut] identifier[minpergroup] =[ identifier[min] ( identifier[xs] ) keyword[if] identifier[len] ( identifier[xs] ) keyword[else] literal[int] keyword[for] identifier[xs] keyword[in] identifier[groupxs] ] identifier[minval] = identifier[min] ( identifier[minpergroup] ) keyword[if] identifier[len] ( identifier[minpergroup] ) keyword[else] literal[int] identifier[flat_groupx] = identifier[ut] . identifier[flatten] ( identifier[groupxs] ) identifier[sortx] = identifier[ut] . identifier[argsort] ( identifier[flat_groupx] ) identifier[groupx_sorted] = identifier[ut] . identifier[take] ( identifier[flat_groupx] , identifier[sortx] ) identifier[flat_items] = identifier[ut] . identifier[iflatten] ( identifier[grouped_items] ) identifier[toyeild] ={} identifier[items_yeilded] = literal[int] identifier[current_index] = literal[int] identifier[num_fills_before] =[ identifier[minval] ]+( identifier[np] . identifier[diff] ( identifier[groupx_sorted] )- literal[int] ). identifier[tolist] ()+[ literal[int] ] identifier[fills] = identifier[num_fills_before] [ identifier[items_yeilded] ] keyword[if] identifier[fills] > literal[int] : keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[fills] ): keyword[yield] keyword[None] identifier[current_index] += literal[int] keyword[for] identifier[yeild_at] , identifier[item] keyword[in] identifier[zip] ( identifier[flat_groupx] , identifier[flat_items] ): keyword[if] identifier[yeild_at] > identifier[current_index] : identifier[toyeild] [ identifier[yeild_at] ]= identifier[item] keyword[elif] identifier[yeild_at] == identifier[current_index] : keyword[yield] identifier[item] identifier[current_index] += literal[int] identifier[items_yeilded] += literal[int] identifier[fills] = identifier[num_fills_before] [ identifier[items_yeilded] ] keyword[if] identifier[fills] > literal[int] : keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[fills] ): keyword[yield] keyword[None] identifier[current_index] += literal[int] keyword[while] identifier[current_index] keyword[in] identifier[toyeild] : identifier[item] = identifier[toyeild] . identifier[pop] ( identifier[current_index] ) keyword[yield] identifier[item] identifier[current_index] += literal[int] identifier[items_yeilded] += literal[int] identifier[fills] = identifier[num_fills_before] [ identifier[items_yeilded] ] keyword[if] identifier[fills] > literal[int] : keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[fills] ): keyword[yield] keyword[None] identifier[current_index] += literal[int]
def ungroup_gen(grouped_items, groupxs, fill=None): """ Ungroups items returning a generator. Note that this is much slower than the list version and is not gaurenteed to have better memory usage. Args: grouped_items (list): groupxs (list): maxval (int): (default = None) Returns: list: ungrouped_items SeeAlso: vt.invert_apply_grouping CommandLine: python -m utool.util_alg ungroup_unique Example: >>> # ENABLE_DOCTEST >>> from utool.util_alg import * # NOQA >>> import utool as ut >>> grouped_items = [[1.1, 1.2], [2.1, 2.2], [3.1, 3.2]] >>> groupxs = [[1, 2], [5, 6], [9, 3]] >>> ungrouped_items1 = list(ungroup_gen(grouped_items, groupxs)) >>> ungrouped_items2 = ungroup(grouped_items, groupxs) >>> assert ungrouped_items1 == ungrouped_items2 >>> grouped_items = [[1.1, 1.2], [2.1, 2.2], [3.1, 3.2]] >>> groupxs = [[0, 2], [1, 5], [4, 3]] >>> ungrouped_items1 = list(ungroup_gen(grouped_items, groupxs)) >>> ungrouped_items2 = ungroup(grouped_items, groupxs) >>> assert ungrouped_items1 == ungrouped_items2 Ignore: labels = np.random.randint(0, 64, 10000) unique_labels, groupxs = ut.group_indices(labels) grouped_items = ut.apply_grouping(np.arange(len(labels)), groupxs) ungrouped_items1 = list(ungroup_gen(grouped_items, groupxs)) ungrouped_items2 = ungroup(grouped_items, groupxs) assert ungrouped_items2 == ungrouped_items1 %timeit list(ungroup_gen(grouped_items, groupxs)) %timeit ungroup(grouped_items, groupxs) """ import utool as ut # Determine the number of items if unknown #maxpergroup = [max(xs) if len(xs) else 0 for xs in groupxs] #maxval = max(maxpergroup) if len(maxpergroup) else 0 minpergroup = [min(xs) if len(xs) else 0 for xs in groupxs] minval = min(minpergroup) if len(minpergroup) else 0 flat_groupx = ut.flatten(groupxs) sortx = ut.argsort(flat_groupx) # Indicates the index being yeilded groupx_sorted = ut.take(flat_groupx, sortx) flat_items = ut.iflatten(grouped_items) # Storage for data weiting to be yeilded toyeild = {} items_yeilded = 0 # Indicates the index we are curently yeilding current_index = 0 # Determine where fills need to happen num_fills_before = [minval] + (np.diff(groupx_sorted) - 1).tolist() + [0] # Check if there are fills before the first item fills = num_fills_before[items_yeilded] if fills > 0: for _ in range(fills): yield None current_index += 1 # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['fills']] # Yield items as possible for (yeild_at, item) in zip(flat_groupx, flat_items): if yeild_at > current_index: toyeild[yeild_at] = item # depends on [control=['if'], data=['yeild_at']] elif yeild_at == current_index: # When we find the next element to yeild yield item current_index += 1 items_yeilded += 1 # Check if there are fills before the next item fills = num_fills_before[items_yeilded] if fills > 0: for _ in range(fills): yield None current_index += 1 # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['fills']] # Now yield everything that came before this while current_index in toyeild: item = toyeild.pop(current_index) yield item current_index += 1 items_yeilded += 1 # Check if there are fills before the next item fills = num_fills_before[items_yeilded] if fills > 0: for _ in range(fills): yield None current_index += 1 # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['fills']] # depends on [control=['while'], data=['current_index', 'toyeild']] # depends on [control=['if'], data=['current_index']] # depends on [control=['for'], data=[]]
def _node_cost(self, y_true, cost_mat): """ Private function to calculate the cost of a node. Parameters ---------- y_true : array indicator matrix Ground truth (correct) labels. cost_mat : array-like of shape = [n_samples, 4] Cost matrix of the classification problem Where the columns represents the costs of: false positives, false negatives, true positives and true negatives, for each example. Returns ------- tuple(cost_loss : float, node prediction : int, node predicted probability : float) """ n_samples = len(y_true) # Evaluates the cost by predicting the node as positive and negative costs = np.zeros(2) costs[0] = cost_loss(y_true, np.zeros(y_true.shape), cost_mat) costs[1] = cost_loss(y_true, np.ones(y_true.shape), cost_mat) pi = np.array([1 - y_true.mean(), y_true.mean()]) if self.criterion == 'direct_cost': costs = costs elif self.criterion == 'pi_cost': costs *= pi elif self.criterion == 'gini_cost': costs *= pi ** 2 elif self.criterion in 'entropy_cost': if pi[0] == 0 or pi[1] == 0: costs *= 0 else: costs *= -np.log(pi) y_pred = np.argmin(costs) # Calculate the predicted probability of a node using laplace correction. n_positives = y_true.sum() y_prob = (n_positives + 1.0) / (n_samples + 2.0) return costs[y_pred], y_pred, y_prob
def function[_node_cost, parameter[self, y_true, cost_mat]]: constant[ Private function to calculate the cost of a node. Parameters ---------- y_true : array indicator matrix Ground truth (correct) labels. cost_mat : array-like of shape = [n_samples, 4] Cost matrix of the classification problem Where the columns represents the costs of: false positives, false negatives, true positives and true negatives, for each example. Returns ------- tuple(cost_loss : float, node prediction : int, node predicted probability : float) ] variable[n_samples] assign[=] call[name[len], parameter[name[y_true]]] variable[costs] assign[=] call[name[np].zeros, parameter[constant[2]]] call[name[costs]][constant[0]] assign[=] call[name[cost_loss], parameter[name[y_true], call[name[np].zeros, parameter[name[y_true].shape]], name[cost_mat]]] call[name[costs]][constant[1]] assign[=] call[name[cost_loss], parameter[name[y_true], call[name[np].ones, parameter[name[y_true].shape]], name[cost_mat]]] variable[pi] assign[=] call[name[np].array, parameter[list[[<ast.BinOp object at 0x7da1b138f3d0>, <ast.Call object at 0x7da1b138f160>]]]] if compare[name[self].criterion equal[==] constant[direct_cost]] begin[:] variable[costs] assign[=] name[costs] variable[y_pred] assign[=] call[name[np].argmin, parameter[name[costs]]] variable[n_positives] assign[=] call[name[y_true].sum, parameter[]] variable[y_prob] assign[=] binary_operation[binary_operation[name[n_positives] + constant[1.0]] / binary_operation[name[n_samples] + constant[2.0]]] return[tuple[[<ast.Subscript object at 0x7da1b122ad70>, <ast.Name object at 0x7da1b1229f00>, <ast.Name object at 0x7da1b122ace0>]]]
keyword[def] identifier[_node_cost] ( identifier[self] , identifier[y_true] , identifier[cost_mat] ): literal[string] identifier[n_samples] = identifier[len] ( identifier[y_true] ) identifier[costs] = identifier[np] . identifier[zeros] ( literal[int] ) identifier[costs] [ literal[int] ]= identifier[cost_loss] ( identifier[y_true] , identifier[np] . identifier[zeros] ( identifier[y_true] . identifier[shape] ), identifier[cost_mat] ) identifier[costs] [ literal[int] ]= identifier[cost_loss] ( identifier[y_true] , identifier[np] . identifier[ones] ( identifier[y_true] . identifier[shape] ), identifier[cost_mat] ) identifier[pi] = identifier[np] . identifier[array] ([ literal[int] - identifier[y_true] . identifier[mean] (), identifier[y_true] . identifier[mean] ()]) keyword[if] identifier[self] . identifier[criterion] == literal[string] : identifier[costs] = identifier[costs] keyword[elif] identifier[self] . identifier[criterion] == literal[string] : identifier[costs] *= identifier[pi] keyword[elif] identifier[self] . identifier[criterion] == literal[string] : identifier[costs] *= identifier[pi] ** literal[int] keyword[elif] identifier[self] . identifier[criterion] keyword[in] literal[string] : keyword[if] identifier[pi] [ literal[int] ]== literal[int] keyword[or] identifier[pi] [ literal[int] ]== literal[int] : identifier[costs] *= literal[int] keyword[else] : identifier[costs] *=- identifier[np] . identifier[log] ( identifier[pi] ) identifier[y_pred] = identifier[np] . identifier[argmin] ( identifier[costs] ) identifier[n_positives] = identifier[y_true] . identifier[sum] () identifier[y_prob] =( identifier[n_positives] + literal[int] )/( identifier[n_samples] + literal[int] ) keyword[return] identifier[costs] [ identifier[y_pred] ], identifier[y_pred] , identifier[y_prob]
def _node_cost(self, y_true, cost_mat): """ Private function to calculate the cost of a node. Parameters ---------- y_true : array indicator matrix Ground truth (correct) labels. cost_mat : array-like of shape = [n_samples, 4] Cost matrix of the classification problem Where the columns represents the costs of: false positives, false negatives, true positives and true negatives, for each example. Returns ------- tuple(cost_loss : float, node prediction : int, node predicted probability : float) """ n_samples = len(y_true) # Evaluates the cost by predicting the node as positive and negative costs = np.zeros(2) costs[0] = cost_loss(y_true, np.zeros(y_true.shape), cost_mat) costs[1] = cost_loss(y_true, np.ones(y_true.shape), cost_mat) pi = np.array([1 - y_true.mean(), y_true.mean()]) if self.criterion == 'direct_cost': costs = costs # depends on [control=['if'], data=[]] elif self.criterion == 'pi_cost': costs *= pi # depends on [control=['if'], data=[]] elif self.criterion == 'gini_cost': costs *= pi ** 2 # depends on [control=['if'], data=[]] elif self.criterion in 'entropy_cost': if pi[0] == 0 or pi[1] == 0: costs *= 0 # depends on [control=['if'], data=[]] else: costs *= -np.log(pi) # depends on [control=['if'], data=[]] y_pred = np.argmin(costs) # Calculate the predicted probability of a node using laplace correction. n_positives = y_true.sum() y_prob = (n_positives + 1.0) / (n_samples + 2.0) return (costs[y_pred], y_pred, y_prob)
def clear_dag_runs(): """ Remove any existing DAG runs for the perf test DAGs. """ session = settings.Session() drs = session.query(DagRun).filter( DagRun.dag_id.in_(DAG_IDS), ).all() for dr in drs: logging.info('Deleting DagRun :: {}'.format(dr)) session.delete(dr)
def function[clear_dag_runs, parameter[]]: constant[ Remove any existing DAG runs for the perf test DAGs. ] variable[session] assign[=] call[name[settings].Session, parameter[]] variable[drs] assign[=] call[call[call[name[session].query, parameter[name[DagRun]]].filter, parameter[call[name[DagRun].dag_id.in_, parameter[name[DAG_IDS]]]]].all, parameter[]] for taget[name[dr]] in starred[name[drs]] begin[:] call[name[logging].info, parameter[call[constant[Deleting DagRun :: {}].format, parameter[name[dr]]]]] call[name[session].delete, parameter[name[dr]]]
keyword[def] identifier[clear_dag_runs] (): literal[string] identifier[session] = identifier[settings] . identifier[Session] () identifier[drs] = identifier[session] . identifier[query] ( identifier[DagRun] ). identifier[filter] ( identifier[DagRun] . identifier[dag_id] . identifier[in_] ( identifier[DAG_IDS] ), ). identifier[all] () keyword[for] identifier[dr] keyword[in] identifier[drs] : identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[dr] )) identifier[session] . identifier[delete] ( identifier[dr] )
def clear_dag_runs(): """ Remove any existing DAG runs for the perf test DAGs. """ session = settings.Session() drs = session.query(DagRun).filter(DagRun.dag_id.in_(DAG_IDS)).all() for dr in drs: logging.info('Deleting DagRun :: {}'.format(dr)) session.delete(dr) # depends on [control=['for'], data=['dr']]
def normalize(self, dt): '''Correct the timezone information on the given datetime If date arithmetic crosses DST boundaries, the tzinfo is not magically adjusted. This method normalizes the tzinfo to the correct one. To test, first we need to do some setup >>> from pytz import timezone >>> utc = timezone('UTC') >>> eastern = timezone('US/Eastern') >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' We next create a datetime right on an end-of-DST transition point, the instant when the wallclocks are wound back one hour. >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) >>> loc_dt = utc_dt.astimezone(eastern) >>> loc_dt.strftime(fmt) '2002-10-27 01:00:00 EST (-0500)' Now, if we subtract a few minutes from it, note that the timezone information has not changed. >>> before = loc_dt - timedelta(minutes=10) >>> before.strftime(fmt) '2002-10-27 00:50:00 EST (-0500)' But we can fix that by calling the normalize method >>> before = eastern.normalize(before) >>> before.strftime(fmt) '2002-10-27 01:50:00 EDT (-0400)' The supported method of converting between timezones is to use datetime.astimezone(). Currently, normalize() also works: >>> th = timezone('Asia/Bangkok') >>> am = timezone('Europe/Amsterdam') >>> dt = th.localize(datetime(2011, 5, 7, 1, 2, 3)) >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' >>> am.normalize(dt).strftime(fmt) '2011-05-06 20:02:03 CEST (+0200)' ''' if dt.tzinfo is None: raise ValueError('Naive time - no tzinfo set') # Convert dt in localtime to UTC offset = dt.tzinfo._utcoffset dt = dt.replace(tzinfo=None) dt = dt - offset # convert it back, and return it return self.fromutc(dt)
def function[normalize, parameter[self, dt]]: constant[Correct the timezone information on the given datetime If date arithmetic crosses DST boundaries, the tzinfo is not magically adjusted. This method normalizes the tzinfo to the correct one. To test, first we need to do some setup >>> from pytz import timezone >>> utc = timezone('UTC') >>> eastern = timezone('US/Eastern') >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' We next create a datetime right on an end-of-DST transition point, the instant when the wallclocks are wound back one hour. >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) >>> loc_dt = utc_dt.astimezone(eastern) >>> loc_dt.strftime(fmt) '2002-10-27 01:00:00 EST (-0500)' Now, if we subtract a few minutes from it, note that the timezone information has not changed. >>> before = loc_dt - timedelta(minutes=10) >>> before.strftime(fmt) '2002-10-27 00:50:00 EST (-0500)' But we can fix that by calling the normalize method >>> before = eastern.normalize(before) >>> before.strftime(fmt) '2002-10-27 01:50:00 EDT (-0400)' The supported method of converting between timezones is to use datetime.astimezone(). Currently, normalize() also works: >>> th = timezone('Asia/Bangkok') >>> am = timezone('Europe/Amsterdam') >>> dt = th.localize(datetime(2011, 5, 7, 1, 2, 3)) >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' >>> am.normalize(dt).strftime(fmt) '2011-05-06 20:02:03 CEST (+0200)' ] if compare[name[dt].tzinfo is constant[None]] begin[:] <ast.Raise object at 0x7da20c795090> variable[offset] assign[=] name[dt].tzinfo._utcoffset variable[dt] assign[=] call[name[dt].replace, parameter[]] variable[dt] assign[=] binary_operation[name[dt] - name[offset]] return[call[name[self].fromutc, parameter[name[dt]]]]
keyword[def] identifier[normalize] ( identifier[self] , identifier[dt] ): literal[string] keyword[if] identifier[dt] . identifier[tzinfo] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[offset] = identifier[dt] . identifier[tzinfo] . identifier[_utcoffset] identifier[dt] = identifier[dt] . identifier[replace] ( identifier[tzinfo] = keyword[None] ) identifier[dt] = identifier[dt] - identifier[offset] keyword[return] identifier[self] . identifier[fromutc] ( identifier[dt] )
def normalize(self, dt): """Correct the timezone information on the given datetime If date arithmetic crosses DST boundaries, the tzinfo is not magically adjusted. This method normalizes the tzinfo to the correct one. To test, first we need to do some setup >>> from pytz import timezone >>> utc = timezone('UTC') >>> eastern = timezone('US/Eastern') >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' We next create a datetime right on an end-of-DST transition point, the instant when the wallclocks are wound back one hour. >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) >>> loc_dt = utc_dt.astimezone(eastern) >>> loc_dt.strftime(fmt) '2002-10-27 01:00:00 EST (-0500)' Now, if we subtract a few minutes from it, note that the timezone information has not changed. >>> before = loc_dt - timedelta(minutes=10) >>> before.strftime(fmt) '2002-10-27 00:50:00 EST (-0500)' But we can fix that by calling the normalize method >>> before = eastern.normalize(before) >>> before.strftime(fmt) '2002-10-27 01:50:00 EDT (-0400)' The supported method of converting between timezones is to use datetime.astimezone(). Currently, normalize() also works: >>> th = timezone('Asia/Bangkok') >>> am = timezone('Europe/Amsterdam') >>> dt = th.localize(datetime(2011, 5, 7, 1, 2, 3)) >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' >>> am.normalize(dt).strftime(fmt) '2011-05-06 20:02:03 CEST (+0200)' """ if dt.tzinfo is None: raise ValueError('Naive time - no tzinfo set') # depends on [control=['if'], data=[]] # Convert dt in localtime to UTC offset = dt.tzinfo._utcoffset dt = dt.replace(tzinfo=None) dt = dt - offset # convert it back, and return it return self.fromutc(dt)
def generate(self, delim='-', length=4, chars='0123456789'): ''' Generate a robot name. Inspiration from Haikunator, but much more poorly implemented ;) Parameters ========== delim: Delimiter length: TokenLength chars: TokenChars ''' descriptor = self._select(self._descriptors) noun = self._select(self._nouns) numbers = ''.join((self._select(chars) for _ in range(length))) return delim.join([descriptor, noun, numbers])
def function[generate, parameter[self, delim, length, chars]]: constant[ Generate a robot name. Inspiration from Haikunator, but much more poorly implemented ;) Parameters ========== delim: Delimiter length: TokenLength chars: TokenChars ] variable[descriptor] assign[=] call[name[self]._select, parameter[name[self]._descriptors]] variable[noun] assign[=] call[name[self]._select, parameter[name[self]._nouns]] variable[numbers] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b03fa2c0>]] return[call[name[delim].join, parameter[list[[<ast.Name object at 0x7da1b03f9e70>, <ast.Name object at 0x7da1b03bb850>, <ast.Name object at 0x7da1b03b8730>]]]]]
keyword[def] identifier[generate] ( identifier[self] , identifier[delim] = literal[string] , identifier[length] = literal[int] , identifier[chars] = literal[string] ): literal[string] identifier[descriptor] = identifier[self] . identifier[_select] ( identifier[self] . identifier[_descriptors] ) identifier[noun] = identifier[self] . identifier[_select] ( identifier[self] . identifier[_nouns] ) identifier[numbers] = literal[string] . identifier[join] (( identifier[self] . identifier[_select] ( identifier[chars] ) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[length] ))) keyword[return] identifier[delim] . identifier[join] ([ identifier[descriptor] , identifier[noun] , identifier[numbers] ])
def generate(self, delim='-', length=4, chars='0123456789'): """ Generate a robot name. Inspiration from Haikunator, but much more poorly implemented ;) Parameters ========== delim: Delimiter length: TokenLength chars: TokenChars """ descriptor = self._select(self._descriptors) noun = self._select(self._nouns) numbers = ''.join((self._select(chars) for _ in range(length))) return delim.join([descriptor, noun, numbers])
def _check_consumer(self): """ Validates the :attr:`.consumer`. """ # 'magic' using _kwarg method # pylint:disable=no-member if not self.consumer.key: raise ConfigError( 'Consumer key not specified for provider {0}!'.format( self.name)) if not self.consumer.secret: raise ConfigError( 'Consumer secret not specified for provider {0}!'.format( self.name))
def function[_check_consumer, parameter[self]]: constant[ Validates the :attr:`.consumer`. ] if <ast.UnaryOp object at 0x7da1b0383790> begin[:] <ast.Raise object at 0x7da1b0550820> if <ast.UnaryOp object at 0x7da1b0553b50> begin[:] <ast.Raise object at 0x7da1b0552590>
keyword[def] identifier[_check_consumer] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[consumer] . identifier[key] : keyword[raise] identifier[ConfigError] ( literal[string] . identifier[format] ( identifier[self] . identifier[name] )) keyword[if] keyword[not] identifier[self] . identifier[consumer] . identifier[secret] : keyword[raise] identifier[ConfigError] ( literal[string] . identifier[format] ( identifier[self] . identifier[name] ))
def _check_consumer(self): """ Validates the :attr:`.consumer`. """ # 'magic' using _kwarg method # pylint:disable=no-member if not self.consumer.key: raise ConfigError('Consumer key not specified for provider {0}!'.format(self.name)) # depends on [control=['if'], data=[]] if not self.consumer.secret: raise ConfigError('Consumer secret not specified for provider {0}!'.format(self.name)) # depends on [control=['if'], data=[]]
def list_dynamodb(region, filter_by_kwargs): """List all DynamoDB tables.""" conn = boto.dynamodb.connect_to_region(region) tables = conn.list_tables() return lookup(tables, filter_by=filter_by_kwargs)
def function[list_dynamodb, parameter[region, filter_by_kwargs]]: constant[List all DynamoDB tables.] variable[conn] assign[=] call[name[boto].dynamodb.connect_to_region, parameter[name[region]]] variable[tables] assign[=] call[name[conn].list_tables, parameter[]] return[call[name[lookup], parameter[name[tables]]]]
keyword[def] identifier[list_dynamodb] ( identifier[region] , identifier[filter_by_kwargs] ): literal[string] identifier[conn] = identifier[boto] . identifier[dynamodb] . identifier[connect_to_region] ( identifier[region] ) identifier[tables] = identifier[conn] . identifier[list_tables] () keyword[return] identifier[lookup] ( identifier[tables] , identifier[filter_by] = identifier[filter_by_kwargs] )
def list_dynamodb(region, filter_by_kwargs): """List all DynamoDB tables.""" conn = boto.dynamodb.connect_to_region(region) tables = conn.list_tables() return lookup(tables, filter_by=filter_by_kwargs)
def uris(self): """Extract list of all resource URIs (in the order added).""" uris = [] for r in self: uris.append(r.uri) return(uris)
def function[uris, parameter[self]]: constant[Extract list of all resource URIs (in the order added).] variable[uris] assign[=] list[[]] for taget[name[r]] in starred[name[self]] begin[:] call[name[uris].append, parameter[name[r].uri]] return[name[uris]]
keyword[def] identifier[uris] ( identifier[self] ): literal[string] identifier[uris] =[] keyword[for] identifier[r] keyword[in] identifier[self] : identifier[uris] . identifier[append] ( identifier[r] . identifier[uri] ) keyword[return] ( identifier[uris] )
def uris(self): """Extract list of all resource URIs (in the order added).""" uris = [] for r in self: uris.append(r.uri) # depends on [control=['for'], data=['r']] return uris
def hflip_detections(label, w): """ Horizontally flip detections according to an image flip. :param label: The label dict containing all detection lists. :param w: The width of the image as a number. :return: """ for k in label.keys(): if k.startswith("detection"): detections = label[k] for detection in detections: detection.cx = w - detection.cx if k == "detections_2.5d": detection.theta = math.pi - detection.theta
def function[hflip_detections, parameter[label, w]]: constant[ Horizontally flip detections according to an image flip. :param label: The label dict containing all detection lists. :param w: The width of the image as a number. :return: ] for taget[name[k]] in starred[call[name[label].keys, parameter[]]] begin[:] if call[name[k].startswith, parameter[constant[detection]]] begin[:] variable[detections] assign[=] call[name[label]][name[k]] for taget[name[detection]] in starred[name[detections]] begin[:] name[detection].cx assign[=] binary_operation[name[w] - name[detection].cx] if compare[name[k] equal[==] constant[detections_2.5d]] begin[:] name[detection].theta assign[=] binary_operation[name[math].pi - name[detection].theta]
keyword[def] identifier[hflip_detections] ( identifier[label] , identifier[w] ): literal[string] keyword[for] identifier[k] keyword[in] identifier[label] . identifier[keys] (): keyword[if] identifier[k] . identifier[startswith] ( literal[string] ): identifier[detections] = identifier[label] [ identifier[k] ] keyword[for] identifier[detection] keyword[in] identifier[detections] : identifier[detection] . identifier[cx] = identifier[w] - identifier[detection] . identifier[cx] keyword[if] identifier[k] == literal[string] : identifier[detection] . identifier[theta] = identifier[math] . identifier[pi] - identifier[detection] . identifier[theta]
def hflip_detections(label, w): """ Horizontally flip detections according to an image flip. :param label: The label dict containing all detection lists. :param w: The width of the image as a number. :return: """ for k in label.keys(): if k.startswith('detection'): detections = label[k] for detection in detections: detection.cx = w - detection.cx if k == 'detections_2.5d': detection.theta = math.pi - detection.theta # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['detection']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']]
def taskinfo(self): """ Retrieve the Task Information """ task_input = {'taskName': 'QueryTask', 'inputParameters': {"Task_Name": self._name}} info = taskengine.execute(task_input, self._engine, cwd=self._cwd) task_def = info['outputParameters']['DEFINITION'] task_def['name'] = str(task_def.pop('NAME')) task_def['description'] = str(task_def.pop('DESCRIPTION')) task_def['displayName'] = str(task_def.pop('DISPLAY_NAME')) if 'COMMUTE_ON_SUBSET' in task_def: task_def['commute_on_subset'] = task_def.pop('COMMUTE_ON_SUBSET') if 'COMMUTE_ON_DOWNSAMPLE' in task_def: task_def['commute_on_downsample'] = task_def.pop('COMMUTE_ON_DOWNSAMPLE') # Convert PARAMETERS into a list instead of a dictionary # which matches the gsf side things task_def['parameters'] = \ [v for v in task_def['PARAMETERS'].values()] task_def.pop('PARAMETERS') parameters = task_def['parameters'] for parameter in parameters: parameter['name'] = str(parameter.pop('NAME')) parameter['description'] = str(parameter.pop('DESCRIPTION')) parameter['display_name'] = str(parameter.pop('DISPLAY_NAME')) parameter['required'] = bool(parameter.pop('REQUIRED')) if 'MIN' in parameter: parameter['min'] = parameter.pop('MIN') if 'MAX' in parameter: parameter['max'] = parameter.pop('MAX') if parameter['TYPE'].count('['): parameter['type'], parameter['dimensions'] = parameter.pop('TYPE').split('[') parameter['dimensions'] = '[' + parameter['dimensions'] parameter['type'] = str(parameter['type']) else: parameter['type'] = str(parameter.pop('TYPE').split('ARRAY')[0]) if 'DIMENSIONS' in parameter: parameter['dimensions'] = parameter.pop('DIMENSIONS') if 'DIRECTION' in parameter: parameter['direction'] = parameter.pop('DIRECTION').lower() if 'DEFAULT' in parameter: if parameter['DEFAULT'] is not None: parameter['default_value'] = parameter.pop('DEFAULT') else: parameter.pop('DEFAULT') if 'CHOICE_LIST' in parameter: if parameter['CHOICE_LIST'] is not None: parameter['choice_list'] = parameter.pop('CHOICE_LIST') else: parameter.pop('CHOICE_LIST') if 'FOLD_CASE' in parameter: parameter['fold_case'] = parameter.pop('FOLD_CASE') if 'AUTO_EXTENSION' in parameter: parameter['auto_extension'] = parameter.pop('AUTO_EXTENSION') if 'IS_TEMPORARY' in parameter: parameter['is_temporary'] = parameter.pop('IS_TEMPORARY') if 'IS_DIRECTORY' in parameter: parameter['is_directory'] = parameter.pop('IS_DIRECTORY') return task_def
def function[taskinfo, parameter[self]]: constant[ Retrieve the Task Information ] variable[task_input] assign[=] dictionary[[<ast.Constant object at 0x7da1b23e7df0>, <ast.Constant object at 0x7da1b23e7dc0>], [<ast.Constant object at 0x7da1b23e7d90>, <ast.Dict object at 0x7da1b23e7d60>]] variable[info] assign[=] call[name[taskengine].execute, parameter[name[task_input], name[self]._engine]] variable[task_def] assign[=] call[call[name[info]][constant[outputParameters]]][constant[DEFINITION]] call[name[task_def]][constant[name]] assign[=] call[name[str], parameter[call[name[task_def].pop, parameter[constant[NAME]]]]] call[name[task_def]][constant[description]] assign[=] call[name[str], parameter[call[name[task_def].pop, parameter[constant[DESCRIPTION]]]]] call[name[task_def]][constant[displayName]] assign[=] call[name[str], parameter[call[name[task_def].pop, parameter[constant[DISPLAY_NAME]]]]] if compare[constant[COMMUTE_ON_SUBSET] in name[task_def]] begin[:] call[name[task_def]][constant[commute_on_subset]] assign[=] call[name[task_def].pop, parameter[constant[COMMUTE_ON_SUBSET]]] if compare[constant[COMMUTE_ON_DOWNSAMPLE] in name[task_def]] begin[:] call[name[task_def]][constant[commute_on_downsample]] assign[=] call[name[task_def].pop, parameter[constant[COMMUTE_ON_DOWNSAMPLE]]] call[name[task_def]][constant[parameters]] assign[=] <ast.ListComp object at 0x7da1b25ef580> call[name[task_def].pop, parameter[constant[PARAMETERS]]] variable[parameters] assign[=] call[name[task_def]][constant[parameters]] for taget[name[parameter]] in starred[name[parameters]] begin[:] call[name[parameter]][constant[name]] assign[=] call[name[str], parameter[call[name[parameter].pop, parameter[constant[NAME]]]]] call[name[parameter]][constant[description]] assign[=] call[name[str], parameter[call[name[parameter].pop, parameter[constant[DESCRIPTION]]]]] call[name[parameter]][constant[display_name]] assign[=] call[name[str], parameter[call[name[parameter].pop, parameter[constant[DISPLAY_NAME]]]]] call[name[parameter]][constant[required]] assign[=] call[name[bool], parameter[call[name[parameter].pop, parameter[constant[REQUIRED]]]]] if compare[constant[MIN] in name[parameter]] begin[:] call[name[parameter]][constant[min]] assign[=] call[name[parameter].pop, parameter[constant[MIN]]] if compare[constant[MAX] in name[parameter]] begin[:] call[name[parameter]][constant[max]] assign[=] call[name[parameter].pop, parameter[constant[MAX]]] if call[call[name[parameter]][constant[TYPE]].count, parameter[constant[[]]] begin[:] <ast.Tuple object at 0x7da1b2555a50> assign[=] call[call[name[parameter].pop, parameter[constant[TYPE]]].split, parameter[constant[[]]] call[name[parameter]][constant[dimensions]] assign[=] binary_operation[constant[[] + call[name[parameter]][constant[dimensions]]] call[name[parameter]][constant[type]] assign[=] call[name[str], parameter[call[name[parameter]][constant[type]]]] if compare[constant[DIMENSIONS] in name[parameter]] begin[:] call[name[parameter]][constant[dimensions]] assign[=] call[name[parameter].pop, parameter[constant[DIMENSIONS]]] if compare[constant[DIRECTION] in name[parameter]] begin[:] call[name[parameter]][constant[direction]] assign[=] call[call[name[parameter].pop, parameter[constant[DIRECTION]]].lower, parameter[]] if compare[constant[DEFAULT] in name[parameter]] begin[:] if compare[call[name[parameter]][constant[DEFAULT]] is_not constant[None]] begin[:] call[name[parameter]][constant[default_value]] assign[=] call[name[parameter].pop, parameter[constant[DEFAULT]]] if compare[constant[CHOICE_LIST] in name[parameter]] begin[:] if compare[call[name[parameter]][constant[CHOICE_LIST]] is_not constant[None]] begin[:] call[name[parameter]][constant[choice_list]] assign[=] call[name[parameter].pop, parameter[constant[CHOICE_LIST]]] if compare[constant[FOLD_CASE] in name[parameter]] begin[:] call[name[parameter]][constant[fold_case]] assign[=] call[name[parameter].pop, parameter[constant[FOLD_CASE]]] if compare[constant[AUTO_EXTENSION] in name[parameter]] begin[:] call[name[parameter]][constant[auto_extension]] assign[=] call[name[parameter].pop, parameter[constant[AUTO_EXTENSION]]] if compare[constant[IS_TEMPORARY] in name[parameter]] begin[:] call[name[parameter]][constant[is_temporary]] assign[=] call[name[parameter].pop, parameter[constant[IS_TEMPORARY]]] if compare[constant[IS_DIRECTORY] in name[parameter]] begin[:] call[name[parameter]][constant[is_directory]] assign[=] call[name[parameter].pop, parameter[constant[IS_DIRECTORY]]] return[name[task_def]]
keyword[def] identifier[taskinfo] ( identifier[self] ): literal[string] identifier[task_input] ={ literal[string] : literal[string] , literal[string] :{ literal[string] : identifier[self] . identifier[_name] }} identifier[info] = identifier[taskengine] . identifier[execute] ( identifier[task_input] , identifier[self] . identifier[_engine] , identifier[cwd] = identifier[self] . identifier[_cwd] ) identifier[task_def] = identifier[info] [ literal[string] ][ literal[string] ] identifier[task_def] [ literal[string] ]= identifier[str] ( identifier[task_def] . identifier[pop] ( literal[string] )) identifier[task_def] [ literal[string] ]= identifier[str] ( identifier[task_def] . identifier[pop] ( literal[string] )) identifier[task_def] [ literal[string] ]= identifier[str] ( identifier[task_def] . identifier[pop] ( literal[string] )) keyword[if] literal[string] keyword[in] identifier[task_def] : identifier[task_def] [ literal[string] ]= identifier[task_def] . identifier[pop] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[task_def] : identifier[task_def] [ literal[string] ]= identifier[task_def] . identifier[pop] ( literal[string] ) identifier[task_def] [ literal[string] ]=[ identifier[v] keyword[for] identifier[v] keyword[in] identifier[task_def] [ literal[string] ]. identifier[values] ()] identifier[task_def] . identifier[pop] ( literal[string] ) identifier[parameters] = identifier[task_def] [ literal[string] ] keyword[for] identifier[parameter] keyword[in] identifier[parameters] : identifier[parameter] [ literal[string] ]= identifier[str] ( identifier[parameter] . identifier[pop] ( literal[string] )) identifier[parameter] [ literal[string] ]= identifier[str] ( identifier[parameter] . identifier[pop] ( literal[string] )) identifier[parameter] [ literal[string] ]= identifier[str] ( identifier[parameter] . identifier[pop] ( literal[string] )) identifier[parameter] [ literal[string] ]= identifier[bool] ( identifier[parameter] . identifier[pop] ( literal[string] )) keyword[if] literal[string] keyword[in] identifier[parameter] : identifier[parameter] [ literal[string] ]= identifier[parameter] . identifier[pop] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[parameter] : identifier[parameter] [ literal[string] ]= identifier[parameter] . identifier[pop] ( literal[string] ) keyword[if] identifier[parameter] [ literal[string] ]. identifier[count] ( literal[string] ): identifier[parameter] [ literal[string] ], identifier[parameter] [ literal[string] ]= identifier[parameter] . identifier[pop] ( literal[string] ). identifier[split] ( literal[string] ) identifier[parameter] [ literal[string] ]= literal[string] + identifier[parameter] [ literal[string] ] identifier[parameter] [ literal[string] ]= identifier[str] ( identifier[parameter] [ literal[string] ]) keyword[else] : identifier[parameter] [ literal[string] ]= identifier[str] ( identifier[parameter] . identifier[pop] ( literal[string] ). identifier[split] ( literal[string] )[ literal[int] ]) keyword[if] literal[string] keyword[in] identifier[parameter] : identifier[parameter] [ literal[string] ]= identifier[parameter] . identifier[pop] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[parameter] : identifier[parameter] [ literal[string] ]= identifier[parameter] . identifier[pop] ( literal[string] ). identifier[lower] () keyword[if] literal[string] keyword[in] identifier[parameter] : keyword[if] identifier[parameter] [ literal[string] ] keyword[is] keyword[not] keyword[None] : identifier[parameter] [ literal[string] ]= identifier[parameter] . identifier[pop] ( literal[string] ) keyword[else] : identifier[parameter] . identifier[pop] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[parameter] : keyword[if] identifier[parameter] [ literal[string] ] keyword[is] keyword[not] keyword[None] : identifier[parameter] [ literal[string] ]= identifier[parameter] . identifier[pop] ( literal[string] ) keyword[else] : identifier[parameter] . identifier[pop] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[parameter] : identifier[parameter] [ literal[string] ]= identifier[parameter] . identifier[pop] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[parameter] : identifier[parameter] [ literal[string] ]= identifier[parameter] . identifier[pop] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[parameter] : identifier[parameter] [ literal[string] ]= identifier[parameter] . identifier[pop] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[parameter] : identifier[parameter] [ literal[string] ]= identifier[parameter] . identifier[pop] ( literal[string] ) keyword[return] identifier[task_def]
def taskinfo(self): """ Retrieve the Task Information """ task_input = {'taskName': 'QueryTask', 'inputParameters': {'Task_Name': self._name}} info = taskengine.execute(task_input, self._engine, cwd=self._cwd) task_def = info['outputParameters']['DEFINITION'] task_def['name'] = str(task_def.pop('NAME')) task_def['description'] = str(task_def.pop('DESCRIPTION')) task_def['displayName'] = str(task_def.pop('DISPLAY_NAME')) if 'COMMUTE_ON_SUBSET' in task_def: task_def['commute_on_subset'] = task_def.pop('COMMUTE_ON_SUBSET') # depends on [control=['if'], data=['task_def']] if 'COMMUTE_ON_DOWNSAMPLE' in task_def: task_def['commute_on_downsample'] = task_def.pop('COMMUTE_ON_DOWNSAMPLE') # depends on [control=['if'], data=['task_def']] # Convert PARAMETERS into a list instead of a dictionary # which matches the gsf side things task_def['parameters'] = [v for v in task_def['PARAMETERS'].values()] task_def.pop('PARAMETERS') parameters = task_def['parameters'] for parameter in parameters: parameter['name'] = str(parameter.pop('NAME')) parameter['description'] = str(parameter.pop('DESCRIPTION')) parameter['display_name'] = str(parameter.pop('DISPLAY_NAME')) parameter['required'] = bool(parameter.pop('REQUIRED')) if 'MIN' in parameter: parameter['min'] = parameter.pop('MIN') # depends on [control=['if'], data=['parameter']] if 'MAX' in parameter: parameter['max'] = parameter.pop('MAX') # depends on [control=['if'], data=['parameter']] if parameter['TYPE'].count('['): (parameter['type'], parameter['dimensions']) = parameter.pop('TYPE').split('[') parameter['dimensions'] = '[' + parameter['dimensions'] parameter['type'] = str(parameter['type']) # depends on [control=['if'], data=[]] else: parameter['type'] = str(parameter.pop('TYPE').split('ARRAY')[0]) if 'DIMENSIONS' in parameter: parameter['dimensions'] = parameter.pop('DIMENSIONS') # depends on [control=['if'], data=['parameter']] if 'DIRECTION' in parameter: parameter['direction'] = parameter.pop('DIRECTION').lower() # depends on [control=['if'], data=['parameter']] if 'DEFAULT' in parameter: if parameter['DEFAULT'] is not None: parameter['default_value'] = parameter.pop('DEFAULT') # depends on [control=['if'], data=[]] else: parameter.pop('DEFAULT') # depends on [control=['if'], data=['parameter']] if 'CHOICE_LIST' in parameter: if parameter['CHOICE_LIST'] is not None: parameter['choice_list'] = parameter.pop('CHOICE_LIST') # depends on [control=['if'], data=[]] else: parameter.pop('CHOICE_LIST') # depends on [control=['if'], data=['parameter']] if 'FOLD_CASE' in parameter: parameter['fold_case'] = parameter.pop('FOLD_CASE') # depends on [control=['if'], data=['parameter']] if 'AUTO_EXTENSION' in parameter: parameter['auto_extension'] = parameter.pop('AUTO_EXTENSION') # depends on [control=['if'], data=['parameter']] if 'IS_TEMPORARY' in parameter: parameter['is_temporary'] = parameter.pop('IS_TEMPORARY') # depends on [control=['if'], data=['parameter']] if 'IS_DIRECTORY' in parameter: parameter['is_directory'] = parameter.pop('IS_DIRECTORY') # depends on [control=['if'], data=['parameter']] # depends on [control=['for'], data=['parameter']] return task_def
def create_credit_card_payment(cls, credit_card_payment, **kwargs): """Create CreditCardPayment Create a new CreditCardPayment This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.create_credit_card_payment(credit_card_payment, async=True) >>> result = thread.get() :param async bool :param CreditCardPayment credit_card_payment: Attributes of creditCardPayment to create (required) :return: CreditCardPayment If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._create_credit_card_payment_with_http_info(credit_card_payment, **kwargs) else: (data) = cls._create_credit_card_payment_with_http_info(credit_card_payment, **kwargs) return data
def function[create_credit_card_payment, parameter[cls, credit_card_payment]]: constant[Create CreditCardPayment Create a new CreditCardPayment This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.create_credit_card_payment(credit_card_payment, async=True) >>> result = thread.get() :param async bool :param CreditCardPayment credit_card_payment: Attributes of creditCardPayment to create (required) :return: CreditCardPayment If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async]]] begin[:] return[call[name[cls]._create_credit_card_payment_with_http_info, parameter[name[credit_card_payment]]]]
keyword[def] identifier[create_credit_card_payment] ( identifier[cls] , identifier[credit_card_payment] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[cls] . identifier[_create_credit_card_payment_with_http_info] ( identifier[credit_card_payment] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[cls] . identifier[_create_credit_card_payment_with_http_info] ( identifier[credit_card_payment] ,** identifier[kwargs] ) keyword[return] identifier[data]
def create_credit_card_payment(cls, credit_card_payment, **kwargs): """Create CreditCardPayment Create a new CreditCardPayment This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.create_credit_card_payment(credit_card_payment, async=True) >>> result = thread.get() :param async bool :param CreditCardPayment credit_card_payment: Attributes of creditCardPayment to create (required) :return: CreditCardPayment If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._create_credit_card_payment_with_http_info(credit_card_payment, **kwargs) # depends on [control=['if'], data=[]] else: data = cls._create_credit_card_payment_with_http_info(credit_card_payment, **kwargs) return data
def callRemote(self, methodName, *args, **kwargs): """ Calls the remote method and returns a Deferred instance to the result. DBus does not support passing keyword arguments over the wire. The keyword arguments accepted by this method alter the behavior of the remote call as described in the kwargs prameter description. @type methodName: C{string} @param methodName: Name of the method to call @param args: Positional arguments to be passed to the remote method @param kwargs: Three keyword parameters may be passed to alter the behavior of the remote method call. If \"expectReply=False\" is supplied, the returned Deferred will be immediately called back with the value None. If \"autoStart=False\" is supplied the DBus daemon will not attempt to auto-start a service to fulfill the call if the service is not yet running (defaults to True). If \"timeout=VALUE\" is supplied, the returned Deferred will be errbacked with a L{error.TimeOut} instance if the remote call does not return before the timeout elapses. If \"interface\" is specified, the remote call use the method of the named interface. @rtype: L{twisted.internet.defer.Deferred} @returns: a Deferred to the result of the remote call """ expectReply = kwargs.get('expectReply', True) autoStart = kwargs.get('autoStart', True) timeout = kwargs.get('timeout', None) interface = kwargs.get('interface', None) m = None for i in self.interfaces: if interface and not interface == i.name: continue m = i.methods.get(methodName, None) if m: break if m is None: raise AttributeError( 'Requested method "%s" is not a member of any of the ' 'supported interfaces' % (methodName,), ) if len(args) != m.nargs: raise TypeError( '%s.%s takes %d arguments (%d given)' % (i.name, methodName, m.nargs, len(args)), ) return self.objHandler.conn.callRemote( self.objectPath, methodName, interface=i.name, destination=self.busName, signature=m.sigIn, body=args, expectReply=expectReply, autoStart=autoStart, timeout=timeout, returnSignature=m.sigOut, )
def function[callRemote, parameter[self, methodName]]: constant[ Calls the remote method and returns a Deferred instance to the result. DBus does not support passing keyword arguments over the wire. The keyword arguments accepted by this method alter the behavior of the remote call as described in the kwargs prameter description. @type methodName: C{string} @param methodName: Name of the method to call @param args: Positional arguments to be passed to the remote method @param kwargs: Three keyword parameters may be passed to alter the behavior of the remote method call. If "expectReply=False" is supplied, the returned Deferred will be immediately called back with the value None. If "autoStart=False" is supplied the DBus daemon will not attempt to auto-start a service to fulfill the call if the service is not yet running (defaults to True). If "timeout=VALUE" is supplied, the returned Deferred will be errbacked with a L{error.TimeOut} instance if the remote call does not return before the timeout elapses. If "interface" is specified, the remote call use the method of the named interface. @rtype: L{twisted.internet.defer.Deferred} @returns: a Deferred to the result of the remote call ] variable[expectReply] assign[=] call[name[kwargs].get, parameter[constant[expectReply], constant[True]]] variable[autoStart] assign[=] call[name[kwargs].get, parameter[constant[autoStart], constant[True]]] variable[timeout] assign[=] call[name[kwargs].get, parameter[constant[timeout], constant[None]]] variable[interface] assign[=] call[name[kwargs].get, parameter[constant[interface], constant[None]]] variable[m] assign[=] constant[None] for taget[name[i]] in starred[name[self].interfaces] begin[:] if <ast.BoolOp object at 0x7da1b06cdb70> begin[:] continue variable[m] assign[=] call[name[i].methods.get, parameter[name[methodName], constant[None]]] if name[m] begin[:] break if compare[name[m] is constant[None]] begin[:] <ast.Raise object at 0x7da1b06ce4a0> if compare[call[name[len], parameter[name[args]]] not_equal[!=] name[m].nargs] begin[:] <ast.Raise object at 0x7da1b06ce560> return[call[name[self].objHandler.conn.callRemote, parameter[name[self].objectPath, name[methodName]]]]
keyword[def] identifier[callRemote] ( identifier[self] , identifier[methodName] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[expectReply] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[True] ) identifier[autoStart] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[True] ) identifier[timeout] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) identifier[interface] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) identifier[m] = keyword[None] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[interfaces] : keyword[if] identifier[interface] keyword[and] keyword[not] identifier[interface] == identifier[i] . identifier[name] : keyword[continue] identifier[m] = identifier[i] . identifier[methods] . identifier[get] ( identifier[methodName] , keyword[None] ) keyword[if] identifier[m] : keyword[break] keyword[if] identifier[m] keyword[is] keyword[None] : keyword[raise] identifier[AttributeError] ( literal[string] literal[string] %( identifier[methodName] ,), ) keyword[if] identifier[len] ( identifier[args] )!= identifier[m] . identifier[nargs] : keyword[raise] identifier[TypeError] ( literal[string] % ( identifier[i] . identifier[name] , identifier[methodName] , identifier[m] . identifier[nargs] , identifier[len] ( identifier[args] )), ) keyword[return] identifier[self] . identifier[objHandler] . identifier[conn] . identifier[callRemote] ( identifier[self] . identifier[objectPath] , identifier[methodName] , identifier[interface] = identifier[i] . identifier[name] , identifier[destination] = identifier[self] . identifier[busName] , identifier[signature] = identifier[m] . identifier[sigIn] , identifier[body] = identifier[args] , identifier[expectReply] = identifier[expectReply] , identifier[autoStart] = identifier[autoStart] , identifier[timeout] = identifier[timeout] , identifier[returnSignature] = identifier[m] . identifier[sigOut] , )
def callRemote(self, methodName, *args, **kwargs): """ Calls the remote method and returns a Deferred instance to the result. DBus does not support passing keyword arguments over the wire. The keyword arguments accepted by this method alter the behavior of the remote call as described in the kwargs prameter description. @type methodName: C{string} @param methodName: Name of the method to call @param args: Positional arguments to be passed to the remote method @param kwargs: Three keyword parameters may be passed to alter the behavior of the remote method call. If "expectReply=False" is supplied, the returned Deferred will be immediately called back with the value None. If "autoStart=False" is supplied the DBus daemon will not attempt to auto-start a service to fulfill the call if the service is not yet running (defaults to True). If "timeout=VALUE" is supplied, the returned Deferred will be errbacked with a L{error.TimeOut} instance if the remote call does not return before the timeout elapses. If "interface" is specified, the remote call use the method of the named interface. @rtype: L{twisted.internet.defer.Deferred} @returns: a Deferred to the result of the remote call """ expectReply = kwargs.get('expectReply', True) autoStart = kwargs.get('autoStart', True) timeout = kwargs.get('timeout', None) interface = kwargs.get('interface', None) m = None for i in self.interfaces: if interface and (not interface == i.name): continue # depends on [control=['if'], data=[]] m = i.methods.get(methodName, None) if m: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] if m is None: raise AttributeError('Requested method "%s" is not a member of any of the supported interfaces' % (methodName,)) # depends on [control=['if'], data=[]] if len(args) != m.nargs: raise TypeError('%s.%s takes %d arguments (%d given)' % (i.name, methodName, m.nargs, len(args))) # depends on [control=['if'], data=[]] return self.objHandler.conn.callRemote(self.objectPath, methodName, interface=i.name, destination=self.busName, signature=m.sigIn, body=args, expectReply=expectReply, autoStart=autoStart, timeout=timeout, returnSignature=m.sigOut)
def collect(self): """ Overrides the Collector.collect method """ # Handle collection time intervals correctly CollectTime = int(time.time()) time_delta = float(self.config['interval']) if not self.LastCollectTime: self.LastCollectTime = CollectTime - time_delta host = self.config['host'] port = self.config['port'] celerymon_url = "http://%s:%s/api/task/?since=%i" % ( host, port, self.LastCollectTime) response = urllib2.urlopen(celerymon_url) body = response.read() celery_data = json.loads(body) results = dict() total_messages = 0 for data in celery_data: name = str(data[1]['name']) if name not in results: results[name] = dict() state = str(data[1]['state']) if state not in results[name]: results[name][state] = 1 else: results[name][state] += 1 total_messages += 1 # Publish Metric self.publish('total_messages', total_messages) for result in results: for state in results[result]: metric_value = results[result][state] metric_name = "%s.%s" % (result, state) self.publish(metric_name, metric_value) self.LastCollectTime = CollectTime
def function[collect, parameter[self]]: constant[ Overrides the Collector.collect method ] variable[CollectTime] assign[=] call[name[int], parameter[call[name[time].time, parameter[]]]] variable[time_delta] assign[=] call[name[float], parameter[call[name[self].config][constant[interval]]]] if <ast.UnaryOp object at 0x7da18f723b20> begin[:] name[self].LastCollectTime assign[=] binary_operation[name[CollectTime] - name[time_delta]] variable[host] assign[=] call[name[self].config][constant[host]] variable[port] assign[=] call[name[self].config][constant[port]] variable[celerymon_url] assign[=] binary_operation[constant[http://%s:%s/api/task/?since=%i] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f722c80>, <ast.Name object at 0x7da18f723400>, <ast.Attribute object at 0x7da18f7207c0>]]] variable[response] assign[=] call[name[urllib2].urlopen, parameter[name[celerymon_url]]] variable[body] assign[=] call[name[response].read, parameter[]] variable[celery_data] assign[=] call[name[json].loads, parameter[name[body]]] variable[results] assign[=] call[name[dict], parameter[]] variable[total_messages] assign[=] constant[0] for taget[name[data]] in starred[name[celery_data]] begin[:] variable[name] assign[=] call[name[str], parameter[call[call[name[data]][constant[1]]][constant[name]]]] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[results]] begin[:] call[name[results]][name[name]] assign[=] call[name[dict], parameter[]] variable[state] assign[=] call[name[str], parameter[call[call[name[data]][constant[1]]][constant[state]]]] if compare[name[state] <ast.NotIn object at 0x7da2590d7190> call[name[results]][name[name]]] begin[:] call[call[name[results]][name[name]]][name[state]] assign[=] constant[1] <ast.AugAssign object at 0x7da1b170c460> call[name[self].publish, parameter[constant[total_messages], name[total_messages]]] for taget[name[result]] in starred[name[results]] begin[:] for taget[name[state]] in starred[call[name[results]][name[result]]] begin[:] variable[metric_value] assign[=] call[call[name[results]][name[result]]][name[state]] variable[metric_name] assign[=] binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b170c760>, <ast.Name object at 0x7da1b170d210>]]] call[name[self].publish, parameter[name[metric_name], name[metric_value]]] name[self].LastCollectTime assign[=] name[CollectTime]
keyword[def] identifier[collect] ( identifier[self] ): literal[string] identifier[CollectTime] = identifier[int] ( identifier[time] . identifier[time] ()) identifier[time_delta] = identifier[float] ( identifier[self] . identifier[config] [ literal[string] ]) keyword[if] keyword[not] identifier[self] . identifier[LastCollectTime] : identifier[self] . identifier[LastCollectTime] = identifier[CollectTime] - identifier[time_delta] identifier[host] = identifier[self] . identifier[config] [ literal[string] ] identifier[port] = identifier[self] . identifier[config] [ literal[string] ] identifier[celerymon_url] = literal[string] %( identifier[host] , identifier[port] , identifier[self] . identifier[LastCollectTime] ) identifier[response] = identifier[urllib2] . identifier[urlopen] ( identifier[celerymon_url] ) identifier[body] = identifier[response] . identifier[read] () identifier[celery_data] = identifier[json] . identifier[loads] ( identifier[body] ) identifier[results] = identifier[dict] () identifier[total_messages] = literal[int] keyword[for] identifier[data] keyword[in] identifier[celery_data] : identifier[name] = identifier[str] ( identifier[data] [ literal[int] ][ literal[string] ]) keyword[if] identifier[name] keyword[not] keyword[in] identifier[results] : identifier[results] [ identifier[name] ]= identifier[dict] () identifier[state] = identifier[str] ( identifier[data] [ literal[int] ][ literal[string] ]) keyword[if] identifier[state] keyword[not] keyword[in] identifier[results] [ identifier[name] ]: identifier[results] [ identifier[name] ][ identifier[state] ]= literal[int] keyword[else] : identifier[results] [ identifier[name] ][ identifier[state] ]+= literal[int] identifier[total_messages] += literal[int] identifier[self] . identifier[publish] ( literal[string] , identifier[total_messages] ) keyword[for] identifier[result] keyword[in] identifier[results] : keyword[for] identifier[state] keyword[in] identifier[results] [ identifier[result] ]: identifier[metric_value] = identifier[results] [ identifier[result] ][ identifier[state] ] identifier[metric_name] = literal[string] %( identifier[result] , identifier[state] ) identifier[self] . identifier[publish] ( identifier[metric_name] , identifier[metric_value] ) identifier[self] . identifier[LastCollectTime] = identifier[CollectTime]
def collect(self): """ Overrides the Collector.collect method """ # Handle collection time intervals correctly CollectTime = int(time.time()) time_delta = float(self.config['interval']) if not self.LastCollectTime: self.LastCollectTime = CollectTime - time_delta # depends on [control=['if'], data=[]] host = self.config['host'] port = self.config['port'] celerymon_url = 'http://%s:%s/api/task/?since=%i' % (host, port, self.LastCollectTime) response = urllib2.urlopen(celerymon_url) body = response.read() celery_data = json.loads(body) results = dict() total_messages = 0 for data in celery_data: name = str(data[1]['name']) if name not in results: results[name] = dict() # depends on [control=['if'], data=['name', 'results']] state = str(data[1]['state']) if state not in results[name]: results[name][state] = 1 # depends on [control=['if'], data=['state']] else: results[name][state] += 1 total_messages += 1 # depends on [control=['for'], data=['data']] # Publish Metric self.publish('total_messages', total_messages) for result in results: for state in results[result]: metric_value = results[result][state] metric_name = '%s.%s' % (result, state) self.publish(metric_name, metric_value) # depends on [control=['for'], data=['state']] # depends on [control=['for'], data=['result']] self.LastCollectTime = CollectTime
def endure_multi(self, keys, persist_to=-1, replicate_to=-1, timeout=5.0, interval=0.010, check_removed=False): """Check durability requirements for multiple keys :param keys: The keys to check The type of keys may be one of the following: * Sequence of keys * A :class:`~couchbase.result.MultiResult` object * A ``dict`` with CAS values as the dictionary value * A sequence of :class:`~couchbase.result.Result` objects :return: A :class:`~.MultiResult` object of :class:`~.OperationResult` items. .. seealso:: :meth:`endure` """ return _Base.endure_multi(self, keys, persist_to=persist_to, replicate_to=replicate_to, timeout=timeout, interval=interval, check_removed=check_removed)
def function[endure_multi, parameter[self, keys, persist_to, replicate_to, timeout, interval, check_removed]]: constant[Check durability requirements for multiple keys :param keys: The keys to check The type of keys may be one of the following: * Sequence of keys * A :class:`~couchbase.result.MultiResult` object * A ``dict`` with CAS values as the dictionary value * A sequence of :class:`~couchbase.result.Result` objects :return: A :class:`~.MultiResult` object of :class:`~.OperationResult` items. .. seealso:: :meth:`endure` ] return[call[name[_Base].endure_multi, parameter[name[self], name[keys]]]]
keyword[def] identifier[endure_multi] ( identifier[self] , identifier[keys] , identifier[persist_to] =- literal[int] , identifier[replicate_to] =- literal[int] , identifier[timeout] = literal[int] , identifier[interval] = literal[int] , identifier[check_removed] = keyword[False] ): literal[string] keyword[return] identifier[_Base] . identifier[endure_multi] ( identifier[self] , identifier[keys] , identifier[persist_to] = identifier[persist_to] , identifier[replicate_to] = identifier[replicate_to] , identifier[timeout] = identifier[timeout] , identifier[interval] = identifier[interval] , identifier[check_removed] = identifier[check_removed] )
def endure_multi(self, keys, persist_to=-1, replicate_to=-1, timeout=5.0, interval=0.01, check_removed=False): """Check durability requirements for multiple keys :param keys: The keys to check The type of keys may be one of the following: * Sequence of keys * A :class:`~couchbase.result.MultiResult` object * A ``dict`` with CAS values as the dictionary value * A sequence of :class:`~couchbase.result.Result` objects :return: A :class:`~.MultiResult` object of :class:`~.OperationResult` items. .. seealso:: :meth:`endure` """ return _Base.endure_multi(self, keys, persist_to=persist_to, replicate_to=replicate_to, timeout=timeout, interval=interval, check_removed=check_removed)
def plot(self, grid=None, size=256, limits=None, square=False, center=None, weight=None, weight_stat="mean", figsize=None, aspect="auto", f="identity", axes=None, xlabel=None, ylabel=None, group_by=None, group_limits=None, group_colors='jet', group_labels=None, group_count=None, vmin=None, vmax=None, cmap="afmhot", **kwargs): """Plot the subspace using sane defaults to get a quick look at the data. :param grid: A 2d numpy array with the counts, if None it will be calculated using limits provided and Subspace.histogram :param size: Passed to Subspace.histogram :param limits: Limits for the subspace in the form [[xmin, xmax], [ymin, ymax]], if None it will be calculated using Subspace.limits_sigma :param square: argument passed to Subspace.limits_sigma :param Executor executor: responsible for executing the tasks :param figsize: (x, y) tuple passed to pylab.figure for setting the figure size :param aspect: Passed to matplotlib's axes.set_aspect :param xlabel: String for label on x axis (may contain latex) :param ylabel: Same for y axis :param kwargs: extra argument passed to axes.imshow, useful for setting the colormap for instance, e.g. cmap='afmhot' :return: matplotlib.image.AxesImage """ import pylab f = _parse_f(f) limits = self.limits(limits) if limits is None: limits = self.limits_sigma() # if grid is None: if group_limits is None and group_by: group_limits = tuple(self.df(group_by).minmax()[0]) + (group_count,) # grid = self.histogram(limits=limits, size=size, weight=weight, group_limits=group_limits, group_by=group_by) if figsize is not None: pylab.figure(num=None, figsize=figsize, dpi=80, facecolor='w', edgecolor='k') if axes is None: axes = pylab.gca() fig = pylab.gcf() # if xlabel: pylab.xlabel(xlabel or self.expressions[0]) # if ylabel: pylab.ylabel(ylabel or self.expressions[1]) # axes.set_aspect(aspect) rgba8 = self.image_rgba(grid=grid, size=size, limits=limits, square=square, center=center, weight=weight, weight_stat=weight_stat, f=f, axes=axes, group_by=group_by, group_limits=group_limits, group_colors=group_colors, group_count=group_count, vmin=vmin, vmax=vmax, cmap=cmap) import matplotlib if group_by: if isinstance(group_colors, six.string_types): group_colors = matplotlib.cm.get_cmap(group_colors) if isinstance(group_colors, matplotlib.colors.Colormap): group_count = group_limits[2] colors = [group_colors(k / float(group_count - 1.)) for k in range(group_count)] else: colors = [matplotlib.colors.colorConverter.to_rgba(k) for k in group_colors] colormap = matplotlib.colors.ListedColormap(colors) gmin, gmax, group_count = group_limits # [:2] delta = (gmax - gmin) / (group_count - 1.) norm = matplotlib.colors.Normalize(gmin - delta / 2, gmax + delta / 2) sm = matplotlib.cm.ScalarMappable(norm, colormap) sm.set_array(1) # make matplotlib happy (strange behavious) colorbar = fig.colorbar(sm) if group_labels: colorbar.set_ticks(np.arange(gmin, gmax + delta / 2, delta)) colorbar.set_ticklabels(group_labels) else: colorbar.set_ticks(np.arange(gmin, gmax + delta / 2, delta)) colorbar.set_ticklabels(map(lambda x: "%f" % x, np.arange(gmin, gmax + delta / 2, delta))) colorbar.ax.set_ylabel(group_by) # matplotlib.colorbar.ColorbarBase(axes, norm=norm, cmap=colormap) im = axes.imshow(rgba8, extent=np.array(limits).flatten(), origin="lower", aspect=aspect, **kwargs) else: norm = matplotlib.colors.Normalize(0, 23) sm = matplotlib.cm.ScalarMappable(norm, cmap) sm.set_array(1) # make matplotlib happy (strange behavious) colorbar = fig.colorbar(sm) im = axes.imshow(rgba8, extent=np.array(limits).flatten(), origin="lower", aspect=aspect, **kwargs) colorbar = None return im, colorbar
def function[plot, parameter[self, grid, size, limits, square, center, weight, weight_stat, figsize, aspect, f, axes, xlabel, ylabel, group_by, group_limits, group_colors, group_labels, group_count, vmin, vmax, cmap]]: constant[Plot the subspace using sane defaults to get a quick look at the data. :param grid: A 2d numpy array with the counts, if None it will be calculated using limits provided and Subspace.histogram :param size: Passed to Subspace.histogram :param limits: Limits for the subspace in the form [[xmin, xmax], [ymin, ymax]], if None it will be calculated using Subspace.limits_sigma :param square: argument passed to Subspace.limits_sigma :param Executor executor: responsible for executing the tasks :param figsize: (x, y) tuple passed to pylab.figure for setting the figure size :param aspect: Passed to matplotlib's axes.set_aspect :param xlabel: String for label on x axis (may contain latex) :param ylabel: Same for y axis :param kwargs: extra argument passed to axes.imshow, useful for setting the colormap for instance, e.g. cmap='afmhot' :return: matplotlib.image.AxesImage ] import module[pylab] variable[f] assign[=] call[name[_parse_f], parameter[name[f]]] variable[limits] assign[=] call[name[self].limits, parameter[name[limits]]] if compare[name[limits] is constant[None]] begin[:] variable[limits] assign[=] call[name[self].limits_sigma, parameter[]] if <ast.BoolOp object at 0x7da18f722860> begin[:] variable[group_limits] assign[=] binary_operation[call[name[tuple], parameter[call[call[call[name[self].df, parameter[name[group_by]]].minmax, parameter[]]][constant[0]]]] + tuple[[<ast.Name object at 0x7da18f7202e0>]]] if compare[name[figsize] is_not constant[None]] begin[:] call[name[pylab].figure, parameter[]] if compare[name[axes] is constant[None]] begin[:] variable[axes] assign[=] call[name[pylab].gca, parameter[]] variable[fig] assign[=] call[name[pylab].gcf, parameter[]] call[name[pylab].xlabel, parameter[<ast.BoolOp object at 0x7da18f813ac0>]] call[name[pylab].ylabel, parameter[<ast.BoolOp object at 0x7da18f812950>]] variable[rgba8] assign[=] call[name[self].image_rgba, parameter[]] import module[matplotlib] if name[group_by] begin[:] if call[name[isinstance], parameter[name[group_colors], name[six].string_types]] begin[:] variable[group_colors] assign[=] call[name[matplotlib].cm.get_cmap, parameter[name[group_colors]]] if call[name[isinstance], parameter[name[group_colors], name[matplotlib].colors.Colormap]] begin[:] variable[group_count] assign[=] call[name[group_limits]][constant[2]] variable[colors] assign[=] <ast.ListComp object at 0x7da18f812620> variable[colormap] assign[=] call[name[matplotlib].colors.ListedColormap, parameter[name[colors]]] <ast.Tuple object at 0x7da18f812020> assign[=] name[group_limits] variable[delta] assign[=] binary_operation[binary_operation[name[gmax] - name[gmin]] / binary_operation[name[group_count] - constant[1.0]]] variable[norm] assign[=] call[name[matplotlib].colors.Normalize, parameter[binary_operation[name[gmin] - binary_operation[name[delta] / constant[2]]], binary_operation[name[gmax] + binary_operation[name[delta] / constant[2]]]]] variable[sm] assign[=] call[name[matplotlib].cm.ScalarMappable, parameter[name[norm], name[colormap]]] call[name[sm].set_array, parameter[constant[1]]] variable[colorbar] assign[=] call[name[fig].colorbar, parameter[name[sm]]] if name[group_labels] begin[:] call[name[colorbar].set_ticks, parameter[call[name[np].arange, parameter[name[gmin], binary_operation[name[gmax] + binary_operation[name[delta] / constant[2]]], name[delta]]]]] call[name[colorbar].set_ticklabels, parameter[name[group_labels]]] call[name[colorbar].ax.set_ylabel, parameter[name[group_by]]] variable[im] assign[=] call[name[axes].imshow, parameter[name[rgba8]]] return[tuple[[<ast.Name object at 0x7da18dc06c80>, <ast.Name object at 0x7da18dc07f70>]]]
keyword[def] identifier[plot] ( identifier[self] , identifier[grid] = keyword[None] , identifier[size] = literal[int] , identifier[limits] = keyword[None] , identifier[square] = keyword[False] , identifier[center] = keyword[None] , identifier[weight] = keyword[None] , identifier[weight_stat] = literal[string] , identifier[figsize] = keyword[None] , identifier[aspect] = literal[string] , identifier[f] = literal[string] , identifier[axes] = keyword[None] , identifier[xlabel] = keyword[None] , identifier[ylabel] = keyword[None] , identifier[group_by] = keyword[None] , identifier[group_limits] = keyword[None] , identifier[group_colors] = literal[string] , identifier[group_labels] = keyword[None] , identifier[group_count] = keyword[None] , identifier[vmin] = keyword[None] , identifier[vmax] = keyword[None] , identifier[cmap] = literal[string] , ** identifier[kwargs] ): literal[string] keyword[import] identifier[pylab] identifier[f] = identifier[_parse_f] ( identifier[f] ) identifier[limits] = identifier[self] . identifier[limits] ( identifier[limits] ) keyword[if] identifier[limits] keyword[is] keyword[None] : identifier[limits] = identifier[self] . identifier[limits_sigma] () keyword[if] identifier[group_limits] keyword[is] keyword[None] keyword[and] identifier[group_by] : identifier[group_limits] = identifier[tuple] ( identifier[self] . identifier[df] ( identifier[group_by] ). identifier[minmax] ()[ literal[int] ])+( identifier[group_count] ,) keyword[if] identifier[figsize] keyword[is] keyword[not] keyword[None] : identifier[pylab] . identifier[figure] ( identifier[num] = keyword[None] , identifier[figsize] = identifier[figsize] , identifier[dpi] = literal[int] , identifier[facecolor] = literal[string] , identifier[edgecolor] = literal[string] ) keyword[if] identifier[axes] keyword[is] keyword[None] : identifier[axes] = identifier[pylab] . identifier[gca] () identifier[fig] = identifier[pylab] . identifier[gcf] () identifier[pylab] . identifier[xlabel] ( identifier[xlabel] keyword[or] identifier[self] . identifier[expressions] [ literal[int] ]) identifier[pylab] . identifier[ylabel] ( identifier[ylabel] keyword[or] identifier[self] . identifier[expressions] [ literal[int] ]) identifier[rgba8] = identifier[self] . identifier[image_rgba] ( identifier[grid] = identifier[grid] , identifier[size] = identifier[size] , identifier[limits] = identifier[limits] , identifier[square] = identifier[square] , identifier[center] = identifier[center] , identifier[weight] = identifier[weight] , identifier[weight_stat] = identifier[weight_stat] , identifier[f] = identifier[f] , identifier[axes] = identifier[axes] , identifier[group_by] = identifier[group_by] , identifier[group_limits] = identifier[group_limits] , identifier[group_colors] = identifier[group_colors] , identifier[group_count] = identifier[group_count] , identifier[vmin] = identifier[vmin] , identifier[vmax] = identifier[vmax] , identifier[cmap] = identifier[cmap] ) keyword[import] identifier[matplotlib] keyword[if] identifier[group_by] : keyword[if] identifier[isinstance] ( identifier[group_colors] , identifier[six] . identifier[string_types] ): identifier[group_colors] = identifier[matplotlib] . identifier[cm] . identifier[get_cmap] ( identifier[group_colors] ) keyword[if] identifier[isinstance] ( identifier[group_colors] , identifier[matplotlib] . identifier[colors] . identifier[Colormap] ): identifier[group_count] = identifier[group_limits] [ literal[int] ] identifier[colors] =[ identifier[group_colors] ( identifier[k] / identifier[float] ( identifier[group_count] - literal[int] )) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[group_count] )] keyword[else] : identifier[colors] =[ identifier[matplotlib] . identifier[colors] . identifier[colorConverter] . identifier[to_rgba] ( identifier[k] ) keyword[for] identifier[k] keyword[in] identifier[group_colors] ] identifier[colormap] = identifier[matplotlib] . identifier[colors] . identifier[ListedColormap] ( identifier[colors] ) identifier[gmin] , identifier[gmax] , identifier[group_count] = identifier[group_limits] identifier[delta] =( identifier[gmax] - identifier[gmin] )/( identifier[group_count] - literal[int] ) identifier[norm] = identifier[matplotlib] . identifier[colors] . identifier[Normalize] ( identifier[gmin] - identifier[delta] / literal[int] , identifier[gmax] + identifier[delta] / literal[int] ) identifier[sm] = identifier[matplotlib] . identifier[cm] . identifier[ScalarMappable] ( identifier[norm] , identifier[colormap] ) identifier[sm] . identifier[set_array] ( literal[int] ) identifier[colorbar] = identifier[fig] . identifier[colorbar] ( identifier[sm] ) keyword[if] identifier[group_labels] : identifier[colorbar] . identifier[set_ticks] ( identifier[np] . identifier[arange] ( identifier[gmin] , identifier[gmax] + identifier[delta] / literal[int] , identifier[delta] )) identifier[colorbar] . identifier[set_ticklabels] ( identifier[group_labels] ) keyword[else] : identifier[colorbar] . identifier[set_ticks] ( identifier[np] . identifier[arange] ( identifier[gmin] , identifier[gmax] + identifier[delta] / literal[int] , identifier[delta] )) identifier[colorbar] . identifier[set_ticklabels] ( identifier[map] ( keyword[lambda] identifier[x] : literal[string] % identifier[x] , identifier[np] . identifier[arange] ( identifier[gmin] , identifier[gmax] + identifier[delta] / literal[int] , identifier[delta] ))) identifier[colorbar] . identifier[ax] . identifier[set_ylabel] ( identifier[group_by] ) identifier[im] = identifier[axes] . identifier[imshow] ( identifier[rgba8] , identifier[extent] = identifier[np] . identifier[array] ( identifier[limits] ). identifier[flatten] (), identifier[origin] = literal[string] , identifier[aspect] = identifier[aspect] ,** identifier[kwargs] ) keyword[else] : identifier[norm] = identifier[matplotlib] . identifier[colors] . identifier[Normalize] ( literal[int] , literal[int] ) identifier[sm] = identifier[matplotlib] . identifier[cm] . identifier[ScalarMappable] ( identifier[norm] , identifier[cmap] ) identifier[sm] . identifier[set_array] ( literal[int] ) identifier[colorbar] = identifier[fig] . identifier[colorbar] ( identifier[sm] ) identifier[im] = identifier[axes] . identifier[imshow] ( identifier[rgba8] , identifier[extent] = identifier[np] . identifier[array] ( identifier[limits] ). identifier[flatten] (), identifier[origin] = literal[string] , identifier[aspect] = identifier[aspect] ,** identifier[kwargs] ) identifier[colorbar] = keyword[None] keyword[return] identifier[im] , identifier[colorbar]
def plot(self, grid=None, size=256, limits=None, square=False, center=None, weight=None, weight_stat='mean', figsize=None, aspect='auto', f='identity', axes=None, xlabel=None, ylabel=None, group_by=None, group_limits=None, group_colors='jet', group_labels=None, group_count=None, vmin=None, vmax=None, cmap='afmhot', **kwargs): """Plot the subspace using sane defaults to get a quick look at the data. :param grid: A 2d numpy array with the counts, if None it will be calculated using limits provided and Subspace.histogram :param size: Passed to Subspace.histogram :param limits: Limits for the subspace in the form [[xmin, xmax], [ymin, ymax]], if None it will be calculated using Subspace.limits_sigma :param square: argument passed to Subspace.limits_sigma :param Executor executor: responsible for executing the tasks :param figsize: (x, y) tuple passed to pylab.figure for setting the figure size :param aspect: Passed to matplotlib's axes.set_aspect :param xlabel: String for label on x axis (may contain latex) :param ylabel: Same for y axis :param kwargs: extra argument passed to axes.imshow, useful for setting the colormap for instance, e.g. cmap='afmhot' :return: matplotlib.image.AxesImage """ import pylab f = _parse_f(f) limits = self.limits(limits) if limits is None: limits = self.limits_sigma() # depends on [control=['if'], data=['limits']] # if grid is None: if group_limits is None and group_by: group_limits = tuple(self.df(group_by).minmax()[0]) + (group_count,) # depends on [control=['if'], data=[]] # grid = self.histogram(limits=limits, size=size, weight=weight, group_limits=group_limits, group_by=group_by) if figsize is not None: pylab.figure(num=None, figsize=figsize, dpi=80, facecolor='w', edgecolor='k') # depends on [control=['if'], data=['figsize']] if axes is None: axes = pylab.gca() # depends on [control=['if'], data=['axes']] fig = pylab.gcf() # if xlabel: pylab.xlabel(xlabel or self.expressions[0]) # if ylabel: pylab.ylabel(ylabel or self.expressions[1]) # axes.set_aspect(aspect) rgba8 = self.image_rgba(grid=grid, size=size, limits=limits, square=square, center=center, weight=weight, weight_stat=weight_stat, f=f, axes=axes, group_by=group_by, group_limits=group_limits, group_colors=group_colors, group_count=group_count, vmin=vmin, vmax=vmax, cmap=cmap) import matplotlib if group_by: if isinstance(group_colors, six.string_types): group_colors = matplotlib.cm.get_cmap(group_colors) # depends on [control=['if'], data=[]] if isinstance(group_colors, matplotlib.colors.Colormap): group_count = group_limits[2] colors = [group_colors(k / float(group_count - 1.0)) for k in range(group_count)] # depends on [control=['if'], data=[]] else: colors = [matplotlib.colors.colorConverter.to_rgba(k) for k in group_colors] colormap = matplotlib.colors.ListedColormap(colors) (gmin, gmax, group_count) = group_limits # [:2] delta = (gmax - gmin) / (group_count - 1.0) norm = matplotlib.colors.Normalize(gmin - delta / 2, gmax + delta / 2) sm = matplotlib.cm.ScalarMappable(norm, colormap) sm.set_array(1) # make matplotlib happy (strange behavious) colorbar = fig.colorbar(sm) if group_labels: colorbar.set_ticks(np.arange(gmin, gmax + delta / 2, delta)) colorbar.set_ticklabels(group_labels) # depends on [control=['if'], data=[]] else: colorbar.set_ticks(np.arange(gmin, gmax + delta / 2, delta)) colorbar.set_ticklabels(map(lambda x: '%f' % x, np.arange(gmin, gmax + delta / 2, delta))) colorbar.ax.set_ylabel(group_by) # matplotlib.colorbar.ColorbarBase(axes, norm=norm, cmap=colormap) im = axes.imshow(rgba8, extent=np.array(limits).flatten(), origin='lower', aspect=aspect, **kwargs) # depends on [control=['if'], data=[]] else: norm = matplotlib.colors.Normalize(0, 23) sm = matplotlib.cm.ScalarMappable(norm, cmap) sm.set_array(1) # make matplotlib happy (strange behavious) colorbar = fig.colorbar(sm) im = axes.imshow(rgba8, extent=np.array(limits).flatten(), origin='lower', aspect=aspect, **kwargs) colorbar = None return (im, colorbar)
def stations(self, station, limit=10): """ Find stations for given queries Args: station (str): search query limit (int): limit number of results """ query = { 'start': 1, 'S': station + '?', 'REQ0JourneyStopsB': limit } rsp = requests.get('http://reiseauskunft.bahn.de/bin/ajax-getstop.exe/dn', params=query) return parse_stations(rsp.text)
def function[stations, parameter[self, station, limit]]: constant[ Find stations for given queries Args: station (str): search query limit (int): limit number of results ] variable[query] assign[=] dictionary[[<ast.Constant object at 0x7da2054a5f00>, <ast.Constant object at 0x7da2054a67a0>, <ast.Constant object at 0x7da2054a5660>], [<ast.Constant object at 0x7da2054a6b60>, <ast.BinOp object at 0x7da2054a6cb0>, <ast.Name object at 0x7da2054a5300>]] variable[rsp] assign[=] call[name[requests].get, parameter[constant[http://reiseauskunft.bahn.de/bin/ajax-getstop.exe/dn]]] return[call[name[parse_stations], parameter[name[rsp].text]]]
keyword[def] identifier[stations] ( identifier[self] , identifier[station] , identifier[limit] = literal[int] ): literal[string] identifier[query] ={ literal[string] : literal[int] , literal[string] : identifier[station] + literal[string] , literal[string] : identifier[limit] } identifier[rsp] = identifier[requests] . identifier[get] ( literal[string] , identifier[params] = identifier[query] ) keyword[return] identifier[parse_stations] ( identifier[rsp] . identifier[text] )
def stations(self, station, limit=10): """ Find stations for given queries Args: station (str): search query limit (int): limit number of results """ query = {'start': 1, 'S': station + '?', 'REQ0JourneyStopsB': limit} rsp = requests.get('http://reiseauskunft.bahn.de/bin/ajax-getstop.exe/dn', params=query) return parse_stations(rsp.text)
def set_layer(self, layer, keywords=None): """Set layer and update UI accordingly. :param layer: A vector layer that has been already patched with metadata. :type layer: QgsVectorLayer :param keywords: Custom keyword for the layer. :type keywords: dict, None """ self.layer = layer if keywords is not None: self.metadata = keywords else: # Check if it has keywords if not hasattr(layer, 'keywords'): message = 'Layer {layer_name} does not have keywords.'.format( layer_name=layer.name()) raise KeywordNotFoundError(message) self.metadata = layer.keywords self.populate_parameter()
def function[set_layer, parameter[self, layer, keywords]]: constant[Set layer and update UI accordingly. :param layer: A vector layer that has been already patched with metadata. :type layer: QgsVectorLayer :param keywords: Custom keyword for the layer. :type keywords: dict, None ] name[self].layer assign[=] name[layer] if compare[name[keywords] is_not constant[None]] begin[:] name[self].metadata assign[=] name[keywords] call[name[self].populate_parameter, parameter[]]
keyword[def] identifier[set_layer] ( identifier[self] , identifier[layer] , identifier[keywords] = keyword[None] ): literal[string] identifier[self] . identifier[layer] = identifier[layer] keyword[if] identifier[keywords] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[metadata] = identifier[keywords] keyword[else] : keyword[if] keyword[not] identifier[hasattr] ( identifier[layer] , literal[string] ): identifier[message] = literal[string] . identifier[format] ( identifier[layer_name] = identifier[layer] . identifier[name] ()) keyword[raise] identifier[KeywordNotFoundError] ( identifier[message] ) identifier[self] . identifier[metadata] = identifier[layer] . identifier[keywords] identifier[self] . identifier[populate_parameter] ()
def set_layer(self, layer, keywords=None): """Set layer and update UI accordingly. :param layer: A vector layer that has been already patched with metadata. :type layer: QgsVectorLayer :param keywords: Custom keyword for the layer. :type keywords: dict, None """ self.layer = layer if keywords is not None: self.metadata = keywords # depends on [control=['if'], data=['keywords']] else: # Check if it has keywords if not hasattr(layer, 'keywords'): message = 'Layer {layer_name} does not have keywords.'.format(layer_name=layer.name()) raise KeywordNotFoundError(message) # depends on [control=['if'], data=[]] self.metadata = layer.keywords self.populate_parameter()
def add_element(self, e): r""" Appends a pipeline stage. Appends the given element to the end of the current chain. """ if not isinstance(e, Iterable): raise TypeError("given element {} is not iterable in terms of " "PyEMMAs coordinate pipeline.".format(e)) # only if we have more than one element if not e.is_reader and len(self._chain) >= 1: data_producer = self._chain[-1] # avoid calling the setter of StreamingTransformer.data_producer, since this # triggers a re-parametrization even on readers (where it makes not sense) e._data_producer = data_producer e.chunksize = self.chunksize self._chain.append(e)
def function[add_element, parameter[self, e]]: constant[ Appends a pipeline stage. Appends the given element to the end of the current chain. ] if <ast.UnaryOp object at 0x7da20e954f10> begin[:] <ast.Raise object at 0x7da20e954a30> if <ast.BoolOp object at 0x7da20e955cc0> begin[:] variable[data_producer] assign[=] call[name[self]._chain][<ast.UnaryOp object at 0x7da1b07e2d10>] name[e]._data_producer assign[=] name[data_producer] name[e].chunksize assign[=] name[self].chunksize call[name[self]._chain.append, parameter[name[e]]]
keyword[def] identifier[add_element] ( identifier[self] , identifier[e] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[e] , identifier[Iterable] ): keyword[raise] identifier[TypeError] ( literal[string] literal[string] . identifier[format] ( identifier[e] )) keyword[if] keyword[not] identifier[e] . identifier[is_reader] keyword[and] identifier[len] ( identifier[self] . identifier[_chain] )>= literal[int] : identifier[data_producer] = identifier[self] . identifier[_chain] [- literal[int] ] identifier[e] . identifier[_data_producer] = identifier[data_producer] identifier[e] . identifier[chunksize] = identifier[self] . identifier[chunksize] identifier[self] . identifier[_chain] . identifier[append] ( identifier[e] )
def add_element(self, e): """ Appends a pipeline stage. Appends the given element to the end of the current chain. """ if not isinstance(e, Iterable): raise TypeError('given element {} is not iterable in terms of PyEMMAs coordinate pipeline.'.format(e)) # depends on [control=['if'], data=[]] # only if we have more than one element if not e.is_reader and len(self._chain) >= 1: data_producer = self._chain[-1] # avoid calling the setter of StreamingTransformer.data_producer, since this # triggers a re-parametrization even on readers (where it makes not sense) e._data_producer = data_producer # depends on [control=['if'], data=[]] e.chunksize = self.chunksize self._chain.append(e)
def transform_using_this_method(original_sample): """ This function implements a log transformation on the data. """ # Copy the original sample new_sample = original_sample.copy() new_data = new_sample.data # Our transformation goes here new_data['Y2-A'] = log(new_data['Y2-A']) new_data = new_data.dropna() # Removes all NaN entries new_sample.data = new_data return new_sample
def function[transform_using_this_method, parameter[original_sample]]: constant[ This function implements a log transformation on the data. ] variable[new_sample] assign[=] call[name[original_sample].copy, parameter[]] variable[new_data] assign[=] name[new_sample].data call[name[new_data]][constant[Y2-A]] assign[=] call[name[log], parameter[call[name[new_data]][constant[Y2-A]]]] variable[new_data] assign[=] call[name[new_data].dropna, parameter[]] name[new_sample].data assign[=] name[new_data] return[name[new_sample]]
keyword[def] identifier[transform_using_this_method] ( identifier[original_sample] ): literal[string] identifier[new_sample] = identifier[original_sample] . identifier[copy] () identifier[new_data] = identifier[new_sample] . identifier[data] identifier[new_data] [ literal[string] ]= identifier[log] ( identifier[new_data] [ literal[string] ]) identifier[new_data] = identifier[new_data] . identifier[dropna] () identifier[new_sample] . identifier[data] = identifier[new_data] keyword[return] identifier[new_sample]
def transform_using_this_method(original_sample): """ This function implements a log transformation on the data. """ # Copy the original sample new_sample = original_sample.copy() new_data = new_sample.data # Our transformation goes here new_data['Y2-A'] = log(new_data['Y2-A']) new_data = new_data.dropna() # Removes all NaN entries new_sample.data = new_data return new_sample
def _iter_key_ranges(self): """Iterates over self._key_ranges, delegating to self._iter_key_range().""" while True: if self._current_key_range is None: if self._key_ranges: self._current_key_range = self._key_ranges.pop() # The most recently popped key_range may be None, so continue here # to find the next keyrange that's valid. continue else: break for key, o in self._iter_key_range( copy.deepcopy(self._current_key_range)): # The caller must consume yielded values so advancing the KeyRange # before yielding is safe. self._current_key_range.advance(key) yield o self._current_key_range = None
def function[_iter_key_ranges, parameter[self]]: constant[Iterates over self._key_ranges, delegating to self._iter_key_range().] while constant[True] begin[:] if compare[name[self]._current_key_range is constant[None]] begin[:] if name[self]._key_ranges begin[:] name[self]._current_key_range assign[=] call[name[self]._key_ranges.pop, parameter[]] continue for taget[tuple[[<ast.Name object at 0x7da18dc9a500>, <ast.Name object at 0x7da18dc98970>]]] in starred[call[name[self]._iter_key_range, parameter[call[name[copy].deepcopy, parameter[name[self]._current_key_range]]]]] begin[:] call[name[self]._current_key_range.advance, parameter[name[key]]] <ast.Yield object at 0x7da18dc980a0> name[self]._current_key_range assign[=] constant[None]
keyword[def] identifier[_iter_key_ranges] ( identifier[self] ): literal[string] keyword[while] keyword[True] : keyword[if] identifier[self] . identifier[_current_key_range] keyword[is] keyword[None] : keyword[if] identifier[self] . identifier[_key_ranges] : identifier[self] . identifier[_current_key_range] = identifier[self] . identifier[_key_ranges] . identifier[pop] () keyword[continue] keyword[else] : keyword[break] keyword[for] identifier[key] , identifier[o] keyword[in] identifier[self] . identifier[_iter_key_range] ( identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[_current_key_range] )): identifier[self] . identifier[_current_key_range] . identifier[advance] ( identifier[key] ) keyword[yield] identifier[o] identifier[self] . identifier[_current_key_range] = keyword[None]
def _iter_key_ranges(self): """Iterates over self._key_ranges, delegating to self._iter_key_range().""" while True: if self._current_key_range is None: if self._key_ranges: self._current_key_range = self._key_ranges.pop() # The most recently popped key_range may be None, so continue here # to find the next keyrange that's valid. continue # depends on [control=['if'], data=[]] else: break # depends on [control=['if'], data=[]] for (key, o) in self._iter_key_range(copy.deepcopy(self._current_key_range)): # The caller must consume yielded values so advancing the KeyRange # before yielding is safe. self._current_key_range.advance(key) yield o # depends on [control=['for'], data=[]] self._current_key_range = None # depends on [control=['while'], data=[]]
def _parse_example_spec(self): """Returns a `tf.Example` parsing spec as dict.""" height, width = image_util.get_expected_image_size(self.module_spec) input_shape = [height, width, 3] return {self.key: tf_v1.FixedLenFeature(input_shape, tf.float32)}
def function[_parse_example_spec, parameter[self]]: constant[Returns a `tf.Example` parsing spec as dict.] <ast.Tuple object at 0x7da1b2036ef0> assign[=] call[name[image_util].get_expected_image_size, parameter[name[self].module_spec]] variable[input_shape] assign[=] list[[<ast.Name object at 0x7da20c6a9ae0>, <ast.Name object at 0x7da20c6a87f0>, <ast.Constant object at 0x7da20c6ab160>]] return[dictionary[[<ast.Attribute object at 0x7da20c6abaf0>], [<ast.Call object at 0x7da20c6a9930>]]]
keyword[def] identifier[_parse_example_spec] ( identifier[self] ): literal[string] identifier[height] , identifier[width] = identifier[image_util] . identifier[get_expected_image_size] ( identifier[self] . identifier[module_spec] ) identifier[input_shape] =[ identifier[height] , identifier[width] , literal[int] ] keyword[return] { identifier[self] . identifier[key] : identifier[tf_v1] . identifier[FixedLenFeature] ( identifier[input_shape] , identifier[tf] . identifier[float32] )}
def _parse_example_spec(self): """Returns a `tf.Example` parsing spec as dict.""" (height, width) = image_util.get_expected_image_size(self.module_spec) input_shape = [height, width, 3] return {self.key: tf_v1.FixedLenFeature(input_shape, tf.float32)}
def delete_process_work_item_type(self, process_id, wit_ref_name): """DeleteProcessWorkItemType. [Preview API] Removes a work itewm type in the process. :param str process_id: The ID of the process. :param str wit_ref_name: The reference name of the work item type. """ route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') self._send(http_method='DELETE', location_id='e2e9d1a6-432d-4062-8870-bfcb8c324ad7', version='5.0-preview.2', route_values=route_values)
def function[delete_process_work_item_type, parameter[self, process_id, wit_ref_name]]: constant[DeleteProcessWorkItemType. [Preview API] Removes a work itewm type in the process. :param str process_id: The ID of the process. :param str wit_ref_name: The reference name of the work item type. ] variable[route_values] assign[=] dictionary[[], []] if compare[name[process_id] is_not constant[None]] begin[:] call[name[route_values]][constant[processId]] assign[=] call[name[self]._serialize.url, parameter[constant[process_id], name[process_id], constant[str]]] if compare[name[wit_ref_name] is_not constant[None]] begin[:] call[name[route_values]][constant[witRefName]] assign[=] call[name[self]._serialize.url, parameter[constant[wit_ref_name], name[wit_ref_name], constant[str]]] call[name[self]._send, parameter[]]
keyword[def] identifier[delete_process_work_item_type] ( identifier[self] , identifier[process_id] , identifier[wit_ref_name] ): literal[string] identifier[route_values] ={} keyword[if] identifier[process_id] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[process_id] , literal[string] ) keyword[if] identifier[wit_ref_name] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[wit_ref_name] , literal[string] ) identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] , identifier[location_id] = literal[string] , identifier[version] = literal[string] , identifier[route_values] = identifier[route_values] )
def delete_process_work_item_type(self, process_id, wit_ref_name): """DeleteProcessWorkItemType. [Preview API] Removes a work itewm type in the process. :param str process_id: The ID of the process. :param str wit_ref_name: The reference name of the work item type. """ route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') # depends on [control=['if'], data=['process_id']] if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') # depends on [control=['if'], data=['wit_ref_name']] self._send(http_method='DELETE', location_id='e2e9d1a6-432d-4062-8870-bfcb8c324ad7', version='5.0-preview.2', route_values=route_values)
def add_nullable_boolean_argument(parser, name, default=None, help_=None): """Add a boolean argument to an ArgumentParser instance.""" group = parser.add_mutually_exclusive_group() group.add_argument( '--' + name, nargs='?', default=default, const=True, type=_str_to_bool, help=help_) group.add_argument('--no' + name, dest=name, action='store_false', help=help_)
def function[add_nullable_boolean_argument, parameter[parser, name, default, help_]]: constant[Add a boolean argument to an ArgumentParser instance.] variable[group] assign[=] call[name[parser].add_mutually_exclusive_group, parameter[]] call[name[group].add_argument, parameter[binary_operation[constant[--] + name[name]]]] call[name[group].add_argument, parameter[binary_operation[constant[--no] + name[name]]]]
keyword[def] identifier[add_nullable_boolean_argument] ( identifier[parser] , identifier[name] , identifier[default] = keyword[None] , identifier[help_] = keyword[None] ): literal[string] identifier[group] = identifier[parser] . identifier[add_mutually_exclusive_group] () identifier[group] . identifier[add_argument] ( literal[string] + identifier[name] , identifier[nargs] = literal[string] , identifier[default] = identifier[default] , identifier[const] = keyword[True] , identifier[type] = identifier[_str_to_bool] , identifier[help] = identifier[help_] ) identifier[group] . identifier[add_argument] ( literal[string] + identifier[name] , identifier[dest] = identifier[name] , identifier[action] = literal[string] , identifier[help] = identifier[help_] )
def add_nullable_boolean_argument(parser, name, default=None, help_=None): """Add a boolean argument to an ArgumentParser instance.""" group = parser.add_mutually_exclusive_group() group.add_argument('--' + name, nargs='?', default=default, const=True, type=_str_to_bool, help=help_) group.add_argument('--no' + name, dest=name, action='store_false', help=help_)
def _to_xml(self, xml, data, key=None): """ Recursively convert the data into xml. This function was originally copied from the `Piston project <https://bitbucket.org/jespern/django-piston/>`_ It has been modified since. :param xml: the xml document :type xml: SimplerXMLGenerator :param data: data to be formatted :param key: name of the parent element (for root this is ``None``) """ if isinstance(data, (list, tuple)): for item in data: elemname = self._list_item_element_name(key) xml.startElement(elemname, {}) self._to_xml(xml, item) xml.endElement(elemname) elif isinstance(data, dict): for key, value in data.iteritems(): xml.startElement(key, {}) self._to_xml(xml, value, key) xml.endElement(key) else: xml.characters(smart_unicode(data))
def function[_to_xml, parameter[self, xml, data, key]]: constant[ Recursively convert the data into xml. This function was originally copied from the `Piston project <https://bitbucket.org/jespern/django-piston/>`_ It has been modified since. :param xml: the xml document :type xml: SimplerXMLGenerator :param data: data to be formatted :param key: name of the parent element (for root this is ``None``) ] if call[name[isinstance], parameter[name[data], tuple[[<ast.Name object at 0x7da20c6a9600>, <ast.Name object at 0x7da20c6ab9a0>]]]] begin[:] for taget[name[item]] in starred[name[data]] begin[:] variable[elemname] assign[=] call[name[self]._list_item_element_name, parameter[name[key]]] call[name[xml].startElement, parameter[name[elemname], dictionary[[], []]]] call[name[self]._to_xml, parameter[name[xml], name[item]]] call[name[xml].endElement, parameter[name[elemname]]]
keyword[def] identifier[_to_xml] ( identifier[self] , identifier[xml] , identifier[data] , identifier[key] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[data] ,( identifier[list] , identifier[tuple] )): keyword[for] identifier[item] keyword[in] identifier[data] : identifier[elemname] = identifier[self] . identifier[_list_item_element_name] ( identifier[key] ) identifier[xml] . identifier[startElement] ( identifier[elemname] ,{}) identifier[self] . identifier[_to_xml] ( identifier[xml] , identifier[item] ) identifier[xml] . identifier[endElement] ( identifier[elemname] ) keyword[elif] identifier[isinstance] ( identifier[data] , identifier[dict] ): keyword[for] identifier[key] , identifier[value] keyword[in] identifier[data] . identifier[iteritems] (): identifier[xml] . identifier[startElement] ( identifier[key] ,{}) identifier[self] . identifier[_to_xml] ( identifier[xml] , identifier[value] , identifier[key] ) identifier[xml] . identifier[endElement] ( identifier[key] ) keyword[else] : identifier[xml] . identifier[characters] ( identifier[smart_unicode] ( identifier[data] ))
def _to_xml(self, xml, data, key=None): """ Recursively convert the data into xml. This function was originally copied from the `Piston project <https://bitbucket.org/jespern/django-piston/>`_ It has been modified since. :param xml: the xml document :type xml: SimplerXMLGenerator :param data: data to be formatted :param key: name of the parent element (for root this is ``None``) """ if isinstance(data, (list, tuple)): for item in data: elemname = self._list_item_element_name(key) xml.startElement(elemname, {}) self._to_xml(xml, item) xml.endElement(elemname) # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]] elif isinstance(data, dict): for (key, value) in data.iteritems(): xml.startElement(key, {}) self._to_xml(xml, value, key) xml.endElement(key) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: xml.characters(smart_unicode(data))
def _set_flow_rate( pipette_name, pipette, command_type, params, default_values): """ Set flow rate in uL/mm, to value obtained from command's params, or if unspecified in command params, then from protocol's "default-values". """ default_aspirate = default_values.get( 'aspirate-flow-rate', {}).get(pipette_name) default_dispense = default_values.get( 'dispense-flow-rate', {}).get(pipette_name) flow_rate_param = params.get('flow-rate') if flow_rate_param is not None: if command_type == 'aspirate': pipette.flow_rate = { 'aspirate': flow_rate_param, 'dispense': default_dispense } return if command_type == 'dispense': pipette.flow_rate = { 'aspirate': default_aspirate, 'dispense': flow_rate_param } return pipette.flow_rate = { 'aspirate': default_aspirate, 'dispense': default_dispense }
def function[_set_flow_rate, parameter[pipette_name, pipette, command_type, params, default_values]]: constant[ Set flow rate in uL/mm, to value obtained from command's params, or if unspecified in command params, then from protocol's "default-values". ] variable[default_aspirate] assign[=] call[call[name[default_values].get, parameter[constant[aspirate-flow-rate], dictionary[[], []]]].get, parameter[name[pipette_name]]] variable[default_dispense] assign[=] call[call[name[default_values].get, parameter[constant[dispense-flow-rate], dictionary[[], []]]].get, parameter[name[pipette_name]]] variable[flow_rate_param] assign[=] call[name[params].get, parameter[constant[flow-rate]]] if compare[name[flow_rate_param] is_not constant[None]] begin[:] if compare[name[command_type] equal[==] constant[aspirate]] begin[:] name[pipette].flow_rate assign[=] dictionary[[<ast.Constant object at 0x7da1b08a2110>, <ast.Constant object at 0x7da1b08a21d0>], [<ast.Name object at 0x7da1b08a21a0>, <ast.Name object at 0x7da1b08a2260>]] return[None] if compare[name[command_type] equal[==] constant[dispense]] begin[:] name[pipette].flow_rate assign[=] dictionary[[<ast.Constant object at 0x7da1b08a0ca0>, <ast.Constant object at 0x7da1b08a3b20>], [<ast.Name object at 0x7da1b08a19c0>, <ast.Name object at 0x7da1b08a1900>]] return[None] name[pipette].flow_rate assign[=] dictionary[[<ast.Constant object at 0x7da1b08a2980>, <ast.Constant object at 0x7da1b08a2ad0>], [<ast.Name object at 0x7da1b08a2f20>, <ast.Name object at 0x7da1b08a29b0>]]
keyword[def] identifier[_set_flow_rate] ( identifier[pipette_name] , identifier[pipette] , identifier[command_type] , identifier[params] , identifier[default_values] ): literal[string] identifier[default_aspirate] = identifier[default_values] . identifier[get] ( literal[string] ,{}). identifier[get] ( identifier[pipette_name] ) identifier[default_dispense] = identifier[default_values] . identifier[get] ( literal[string] ,{}). identifier[get] ( identifier[pipette_name] ) identifier[flow_rate_param] = identifier[params] . identifier[get] ( literal[string] ) keyword[if] identifier[flow_rate_param] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[command_type] == literal[string] : identifier[pipette] . identifier[flow_rate] ={ literal[string] : identifier[flow_rate_param] , literal[string] : identifier[default_dispense] } keyword[return] keyword[if] identifier[command_type] == literal[string] : identifier[pipette] . identifier[flow_rate] ={ literal[string] : identifier[default_aspirate] , literal[string] : identifier[flow_rate_param] } keyword[return] identifier[pipette] . identifier[flow_rate] ={ literal[string] : identifier[default_aspirate] , literal[string] : identifier[default_dispense] }
def _set_flow_rate(pipette_name, pipette, command_type, params, default_values): """ Set flow rate in uL/mm, to value obtained from command's params, or if unspecified in command params, then from protocol's "default-values". """ default_aspirate = default_values.get('aspirate-flow-rate', {}).get(pipette_name) default_dispense = default_values.get('dispense-flow-rate', {}).get(pipette_name) flow_rate_param = params.get('flow-rate') if flow_rate_param is not None: if command_type == 'aspirate': pipette.flow_rate = {'aspirate': flow_rate_param, 'dispense': default_dispense} return # depends on [control=['if'], data=[]] if command_type == 'dispense': pipette.flow_rate = {'aspirate': default_aspirate, 'dispense': flow_rate_param} return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['flow_rate_param']] pipette.flow_rate = {'aspirate': default_aspirate, 'dispense': default_dispense}
def fields_to_dict(obj, skip_fields=None): """ Generates a dictionary with the field values of the object passed in, where keys are the field names. Skips computed fields """ data = {} obj = api.get_object(obj) for field_name, field in api.get_fields(obj).items(): if skip_fields and field_name in skip_fields: continue if field.type == "computed": continue data[field_name] = field.get(obj) return data
def function[fields_to_dict, parameter[obj, skip_fields]]: constant[ Generates a dictionary with the field values of the object passed in, where keys are the field names. Skips computed fields ] variable[data] assign[=] dictionary[[], []] variable[obj] assign[=] call[name[api].get_object, parameter[name[obj]]] for taget[tuple[[<ast.Name object at 0x7da1b1d4b3d0>, <ast.Name object at 0x7da1b1d48dc0>]]] in starred[call[call[name[api].get_fields, parameter[name[obj]]].items, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da1b1d4bd30> begin[:] continue if compare[name[field].type equal[==] constant[computed]] begin[:] continue call[name[data]][name[field_name]] assign[=] call[name[field].get, parameter[name[obj]]] return[name[data]]
keyword[def] identifier[fields_to_dict] ( identifier[obj] , identifier[skip_fields] = keyword[None] ): literal[string] identifier[data] ={} identifier[obj] = identifier[api] . identifier[get_object] ( identifier[obj] ) keyword[for] identifier[field_name] , identifier[field] keyword[in] identifier[api] . identifier[get_fields] ( identifier[obj] ). identifier[items] (): keyword[if] identifier[skip_fields] keyword[and] identifier[field_name] keyword[in] identifier[skip_fields] : keyword[continue] keyword[if] identifier[field] . identifier[type] == literal[string] : keyword[continue] identifier[data] [ identifier[field_name] ]= identifier[field] . identifier[get] ( identifier[obj] ) keyword[return] identifier[data]
def fields_to_dict(obj, skip_fields=None): """ Generates a dictionary with the field values of the object passed in, where keys are the field names. Skips computed fields """ data = {} obj = api.get_object(obj) for (field_name, field) in api.get_fields(obj).items(): if skip_fields and field_name in skip_fields: continue # depends on [control=['if'], data=[]] if field.type == 'computed': continue # depends on [control=['if'], data=[]] data[field_name] = field.get(obj) # depends on [control=['for'], data=[]] return data
def import_by_path(dotted_path, error_prefix=''): """ Import a dotted module path and return the attribute/class designated by the last name in the path. Raise ImproperlyConfigured if something goes wrong. This has come straight from Django 1.6 """ try: module_path, class_name = dotted_path.rsplit('.', 1) except ValueError: raise ImproperlyConfigured("%s%s doesn't look like a module path" % ( error_prefix, dotted_path)) try: module = import_module(module_path) except ImportError as e: raise ImproperlyConfigured('%sError importing module %s: "%s"' % ( error_prefix, module_path, e)) try: attr = getattr(module, class_name) except AttributeError: raise ImproperlyConfigured( '%sModule "%s" does not define a "%s" attribute/class' % ( error_prefix, module_path, class_name ) ) return attr
def function[import_by_path, parameter[dotted_path, error_prefix]]: constant[ Import a dotted module path and return the attribute/class designated by the last name in the path. Raise ImproperlyConfigured if something goes wrong. This has come straight from Django 1.6 ] <ast.Try object at 0x7da18f58f1c0> <ast.Try object at 0x7da18dc9a380> <ast.Try object at 0x7da18dc99e40> return[name[attr]]
keyword[def] identifier[import_by_path] ( identifier[dotted_path] , identifier[error_prefix] = literal[string] ): literal[string] keyword[try] : identifier[module_path] , identifier[class_name] = identifier[dotted_path] . identifier[rsplit] ( literal[string] , literal[int] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[ImproperlyConfigured] ( literal[string] %( identifier[error_prefix] , identifier[dotted_path] )) keyword[try] : identifier[module] = identifier[import_module] ( identifier[module_path] ) keyword[except] identifier[ImportError] keyword[as] identifier[e] : keyword[raise] identifier[ImproperlyConfigured] ( literal[string] %( identifier[error_prefix] , identifier[module_path] , identifier[e] )) keyword[try] : identifier[attr] = identifier[getattr] ( identifier[module] , identifier[class_name] ) keyword[except] identifier[AttributeError] : keyword[raise] identifier[ImproperlyConfigured] ( literal[string] %( identifier[error_prefix] , identifier[module_path] , identifier[class_name] ) ) keyword[return] identifier[attr]
def import_by_path(dotted_path, error_prefix=''): """ Import a dotted module path and return the attribute/class designated by the last name in the path. Raise ImproperlyConfigured if something goes wrong. This has come straight from Django 1.6 """ try: (module_path, class_name) = dotted_path.rsplit('.', 1) # depends on [control=['try'], data=[]] except ValueError: raise ImproperlyConfigured("%s%s doesn't look like a module path" % (error_prefix, dotted_path)) # depends on [control=['except'], data=[]] try: module = import_module(module_path) # depends on [control=['try'], data=[]] except ImportError as e: raise ImproperlyConfigured('%sError importing module %s: "%s"' % (error_prefix, module_path, e)) # depends on [control=['except'], data=['e']] try: attr = getattr(module, class_name) # depends on [control=['try'], data=[]] except AttributeError: raise ImproperlyConfigured('%sModule "%s" does not define a "%s" attribute/class' % (error_prefix, module_path, class_name)) # depends on [control=['except'], data=[]] return attr
def _safe_cast(out_type, val): """Try to covert val to out_type but never raise an exception. If the value can't be converted, then a sensible default value is returned. out_type should be bool, int, or unicode; otherwise, the value is just passed through. """ if val is None: return None if out_type == int: if isinstance(val, int) or isinstance(val, float): # Just a number. return int(val) else: # Process any other type as a string. if isinstance(val, bytes): val = val.decode('utf-8', 'ignore') elif not isinstance(val, six.string_types): val = six.text_type(val) # Get a number from the front of the string. match = re.match(r'[\+-]?[0-9]+', val.strip()) return int(match.group(0)) if match else 0 elif out_type == bool: try: # Should work for strings, bools, ints: return bool(int(val)) except ValueError: return False elif out_type == six.text_type: if isinstance(val, bytes): return val.decode('utf-8', 'ignore') elif isinstance(val, six.text_type): return val else: return six.text_type(val) elif out_type == float: if isinstance(val, int) or isinstance(val, float): return float(val) else: if isinstance(val, bytes): val = val.decode('utf-8', 'ignore') else: val = six.text_type(val) match = re.match(r'[\+-]?([0-9]+\.?[0-9]*|[0-9]*\.[0-9]+)', val.strip()) if match: val = match.group(0) if val: return float(val) return 0.0 else: return val
def function[_safe_cast, parameter[out_type, val]]: constant[Try to covert val to out_type but never raise an exception. If the value can't be converted, then a sensible default value is returned. out_type should be bool, int, or unicode; otherwise, the value is just passed through. ] if compare[name[val] is constant[None]] begin[:] return[constant[None]] if compare[name[out_type] equal[==] name[int]] begin[:] if <ast.BoolOp object at 0x7da1b10e70d0> begin[:] return[call[name[int], parameter[name[val]]]]
keyword[def] identifier[_safe_cast] ( identifier[out_type] , identifier[val] ): literal[string] keyword[if] identifier[val] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[if] identifier[out_type] == identifier[int] : keyword[if] identifier[isinstance] ( identifier[val] , identifier[int] ) keyword[or] identifier[isinstance] ( identifier[val] , identifier[float] ): keyword[return] identifier[int] ( identifier[val] ) keyword[else] : keyword[if] identifier[isinstance] ( identifier[val] , identifier[bytes] ): identifier[val] = identifier[val] . identifier[decode] ( literal[string] , literal[string] ) keyword[elif] keyword[not] identifier[isinstance] ( identifier[val] , identifier[six] . identifier[string_types] ): identifier[val] = identifier[six] . identifier[text_type] ( identifier[val] ) identifier[match] = identifier[re] . identifier[match] ( literal[string] , identifier[val] . identifier[strip] ()) keyword[return] identifier[int] ( identifier[match] . identifier[group] ( literal[int] )) keyword[if] identifier[match] keyword[else] literal[int] keyword[elif] identifier[out_type] == identifier[bool] : keyword[try] : keyword[return] identifier[bool] ( identifier[int] ( identifier[val] )) keyword[except] identifier[ValueError] : keyword[return] keyword[False] keyword[elif] identifier[out_type] == identifier[six] . identifier[text_type] : keyword[if] identifier[isinstance] ( identifier[val] , identifier[bytes] ): keyword[return] identifier[val] . identifier[decode] ( literal[string] , literal[string] ) keyword[elif] identifier[isinstance] ( identifier[val] , identifier[six] . identifier[text_type] ): keyword[return] identifier[val] keyword[else] : keyword[return] identifier[six] . identifier[text_type] ( identifier[val] ) keyword[elif] identifier[out_type] == identifier[float] : keyword[if] identifier[isinstance] ( identifier[val] , identifier[int] ) keyword[or] identifier[isinstance] ( identifier[val] , identifier[float] ): keyword[return] identifier[float] ( identifier[val] ) keyword[else] : keyword[if] identifier[isinstance] ( identifier[val] , identifier[bytes] ): identifier[val] = identifier[val] . identifier[decode] ( literal[string] , literal[string] ) keyword[else] : identifier[val] = identifier[six] . identifier[text_type] ( identifier[val] ) identifier[match] = identifier[re] . identifier[match] ( literal[string] , identifier[val] . identifier[strip] ()) keyword[if] identifier[match] : identifier[val] = identifier[match] . identifier[group] ( literal[int] ) keyword[if] identifier[val] : keyword[return] identifier[float] ( identifier[val] ) keyword[return] literal[int] keyword[else] : keyword[return] identifier[val]
def _safe_cast(out_type, val): """Try to covert val to out_type but never raise an exception. If the value can't be converted, then a sensible default value is returned. out_type should be bool, int, or unicode; otherwise, the value is just passed through. """ if val is None: return None # depends on [control=['if'], data=[]] if out_type == int: if isinstance(val, int) or isinstance(val, float): # Just a number. return int(val) # depends on [control=['if'], data=[]] else: # Process any other type as a string. if isinstance(val, bytes): val = val.decode('utf-8', 'ignore') # depends on [control=['if'], data=[]] elif not isinstance(val, six.string_types): val = six.text_type(val) # depends on [control=['if'], data=[]] # Get a number from the front of the string. match = re.match('[\\+-]?[0-9]+', val.strip()) return int(match.group(0)) if match else 0 # depends on [control=['if'], data=['int']] elif out_type == bool: try: # Should work for strings, bools, ints: return bool(int(val)) # depends on [control=['try'], data=[]] except ValueError: return False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['bool']] elif out_type == six.text_type: if isinstance(val, bytes): return val.decode('utf-8', 'ignore') # depends on [control=['if'], data=[]] elif isinstance(val, six.text_type): return val # depends on [control=['if'], data=[]] else: return six.text_type(val) # depends on [control=['if'], data=[]] elif out_type == float: if isinstance(val, int) or isinstance(val, float): return float(val) # depends on [control=['if'], data=[]] else: if isinstance(val, bytes): val = val.decode('utf-8', 'ignore') # depends on [control=['if'], data=[]] else: val = six.text_type(val) match = re.match('[\\+-]?([0-9]+\\.?[0-9]*|[0-9]*\\.[0-9]+)', val.strip()) if match: val = match.group(0) if val: return float(val) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return 0.0 # depends on [control=['if'], data=['float']] else: return val
def available_writers(as_dict=False): """Available writers based on current configuration. Args: as_dict (bool): Optionally return writer information as a dictionary. Default: False Returns: List of available writer names. If `as_dict` is `True` then a list of dictionaries including additionally writer information is returned. """ writers = [] for writer_configs in configs_for_writer(): try: writer_info = read_writer_config(writer_configs) except (KeyError, IOError, yaml.YAMLError): LOG.warning("Could not import writer config from: %s", writer_configs) LOG.debug("Error loading YAML", exc_info=True) continue writers.append(writer_info if as_dict else writer_info['name']) return writers
def function[available_writers, parameter[as_dict]]: constant[Available writers based on current configuration. Args: as_dict (bool): Optionally return writer information as a dictionary. Default: False Returns: List of available writer names. If `as_dict` is `True` then a list of dictionaries including additionally writer information is returned. ] variable[writers] assign[=] list[[]] for taget[name[writer_configs]] in starred[call[name[configs_for_writer], parameter[]]] begin[:] <ast.Try object at 0x7da1b1d5d150> call[name[writers].append, parameter[<ast.IfExp object at 0x7da1b1d89090>]] return[name[writers]]
keyword[def] identifier[available_writers] ( identifier[as_dict] = keyword[False] ): literal[string] identifier[writers] =[] keyword[for] identifier[writer_configs] keyword[in] identifier[configs_for_writer] (): keyword[try] : identifier[writer_info] = identifier[read_writer_config] ( identifier[writer_configs] ) keyword[except] ( identifier[KeyError] , identifier[IOError] , identifier[yaml] . identifier[YAMLError] ): identifier[LOG] . identifier[warning] ( literal[string] , identifier[writer_configs] ) identifier[LOG] . identifier[debug] ( literal[string] , identifier[exc_info] = keyword[True] ) keyword[continue] identifier[writers] . identifier[append] ( identifier[writer_info] keyword[if] identifier[as_dict] keyword[else] identifier[writer_info] [ literal[string] ]) keyword[return] identifier[writers]
def available_writers(as_dict=False): """Available writers based on current configuration. Args: as_dict (bool): Optionally return writer information as a dictionary. Default: False Returns: List of available writer names. If `as_dict` is `True` then a list of dictionaries including additionally writer information is returned. """ writers = [] for writer_configs in configs_for_writer(): try: writer_info = read_writer_config(writer_configs) # depends on [control=['try'], data=[]] except (KeyError, IOError, yaml.YAMLError): LOG.warning('Could not import writer config from: %s', writer_configs) LOG.debug('Error loading YAML', exc_info=True) continue # depends on [control=['except'], data=[]] writers.append(writer_info if as_dict else writer_info['name']) # depends on [control=['for'], data=['writer_configs']] return writers
def range_type_to_dtype(range_type: str) -> Optional[tf.DType]: '''Maps RDDL range types to TensorFlow dtypes.''' range2dtype = { 'real': tf.float32, 'int': tf.int32, 'bool': tf.bool } return range2dtype[range_type]
def function[range_type_to_dtype, parameter[range_type]]: constant[Maps RDDL range types to TensorFlow dtypes.] variable[range2dtype] assign[=] dictionary[[<ast.Constant object at 0x7da18f09d240>, <ast.Constant object at 0x7da18f09f820>, <ast.Constant object at 0x7da18f09ed70>], [<ast.Attribute object at 0x7da18f09cb80>, <ast.Attribute object at 0x7da18f09f460>, <ast.Attribute object at 0x7da18f09e980>]] return[call[name[range2dtype]][name[range_type]]]
keyword[def] identifier[range_type_to_dtype] ( identifier[range_type] : identifier[str] )-> identifier[Optional] [ identifier[tf] . identifier[DType] ]: literal[string] identifier[range2dtype] ={ literal[string] : identifier[tf] . identifier[float32] , literal[string] : identifier[tf] . identifier[int32] , literal[string] : identifier[tf] . identifier[bool] } keyword[return] identifier[range2dtype] [ identifier[range_type] ]
def range_type_to_dtype(range_type: str) -> Optional[tf.DType]: """Maps RDDL range types to TensorFlow dtypes.""" range2dtype = {'real': tf.float32, 'int': tf.int32, 'bool': tf.bool} return range2dtype[range_type]
def inverse(self): """Inverse Fourier transform.""" sign = '-' if self.sign == '+' else '+' return DiscreteFourierTransform( domain=self.range, range=self.domain, axes=self.axes, halfcomplex=self.halfcomplex, sign=sign)
def function[inverse, parameter[self]]: constant[Inverse Fourier transform.] variable[sign] assign[=] <ast.IfExp object at 0x7da1b1e587c0> return[call[name[DiscreteFourierTransform], parameter[]]]
keyword[def] identifier[inverse] ( identifier[self] ): literal[string] identifier[sign] = literal[string] keyword[if] identifier[self] . identifier[sign] == literal[string] keyword[else] literal[string] keyword[return] identifier[DiscreteFourierTransform] ( identifier[domain] = identifier[self] . identifier[range] , identifier[range] = identifier[self] . identifier[domain] , identifier[axes] = identifier[self] . identifier[axes] , identifier[halfcomplex] = identifier[self] . identifier[halfcomplex] , identifier[sign] = identifier[sign] )
def inverse(self): """Inverse Fourier transform.""" sign = '-' if self.sign == '+' else '+' return DiscreteFourierTransform(domain=self.range, range=self.domain, axes=self.axes, halfcomplex=self.halfcomplex, sign=sign)
def pairedBEDIterator(inputStreams, mirror=False, mirrorScore=None, ignoreStrand=False, ignoreScore=True, ignoreName=True, sortedby=ITERATOR_SORTED_END, scoreType=float, verbose=False): """ Iterate over multiple BED format files simultaneously and yield lists of genomic intervals for each matching set of intervals found. By default, regions which are not found in all files will be skipped (mirror = false). Optionally (by setting mirror to true) if a file is missing an interval, it can be added on-the-fly, and will have the same chrom, start and end and name as in other files. The score will be taken from the first file in inputStreams if mirrorScore is not set, otherwise that value will be used. :param inputStreams: a list of input streams in BED format :param mirror: if true, add missing elements so all streams contain the same elements. Inserted elements will have the same :param ignoreStrand: ignore strand when comparing elements for equality? :param ignoreScore: ignore score when comparing elements for equality? :param ignoreScore: ignore name when comparing elements for equality? :param sortedby: must be set to one of the sorting orders for BED streams; we require the streams to be sorted in some fashion. :param scoreType: interpret scores as what type? Defaults to float, which is generally the most flexible. """ # let's build our sorting order... sortOrder = ["chrom"] if sortedby == ITERATOR_SORTED_START: sortOrder.append("start") sortOrder.append("end") elif sortedby == ITERATOR_SORTED_END: sortOrder.append("end") sortOrder.append("start") if not ignoreStrand: sortOrder.append("strand") if not ignoreName: sortOrder.append("name") if not ignoreScore: sortOrder.append("score") keyFunc = attrgetter(*sortOrder) def next_item(iterator): """ little internal function to return the next item, or None """ try: return iterator.next() except StopIteration: return None bIterators = [BEDIterator(bfh, verbose=verbose, sortedby=sortedby, scoreType=scoreType) for bfh in inputStreams] elements = [next_item(it) for it in bIterators] while True: assert(len(elements) >= 2) if None not in elements and len(set([keyFunc(x) for x in elements])) == 1: # All equal -- yield and move on for all streams yield [e for e in elements] elements = [next_item(it) for it in bIterators] else: # something wasn't equal.. find the smallest thing, it's about to drop # out of range and will never have the chance to match anything again minElement = min([x for x in elements if x is not None], key=keyFunc) minIndices = [i for i in range(0, len(elements)) if elements[i] is not None and keyFunc(elements[i]) == keyFunc(minElement)] if mirror: # mirror the min item for any streams in which it doesn't match score = minElement.score if mirrorScore is None else mirrorScore yield [elements[i] if i in minIndices else GenomicInterval(minElement.chrom, minElement.start, minElement.end, minElement.name, score, minElement.strand, scoreType=scoreType) for i in range(0, len(elements))] # move the smallest element onwards now, we're done with it for index in minIndices: elements[index] = next_item(bIterators[index]) # stop once all streams are exhausted if reduce(lambda x, y: x and y, [e is None for e in elements]): break
def function[pairedBEDIterator, parameter[inputStreams, mirror, mirrorScore, ignoreStrand, ignoreScore, ignoreName, sortedby, scoreType, verbose]]: constant[ Iterate over multiple BED format files simultaneously and yield lists of genomic intervals for each matching set of intervals found. By default, regions which are not found in all files will be skipped (mirror = false). Optionally (by setting mirror to true) if a file is missing an interval, it can be added on-the-fly, and will have the same chrom, start and end and name as in other files. The score will be taken from the first file in inputStreams if mirrorScore is not set, otherwise that value will be used. :param inputStreams: a list of input streams in BED format :param mirror: if true, add missing elements so all streams contain the same elements. Inserted elements will have the same :param ignoreStrand: ignore strand when comparing elements for equality? :param ignoreScore: ignore score when comparing elements for equality? :param ignoreScore: ignore name when comparing elements for equality? :param sortedby: must be set to one of the sorting orders for BED streams; we require the streams to be sorted in some fashion. :param scoreType: interpret scores as what type? Defaults to float, which is generally the most flexible. ] variable[sortOrder] assign[=] list[[<ast.Constant object at 0x7da1b14e6ec0>]] if compare[name[sortedby] equal[==] name[ITERATOR_SORTED_START]] begin[:] call[name[sortOrder].append, parameter[constant[start]]] call[name[sortOrder].append, parameter[constant[end]]] if <ast.UnaryOp object at 0x7da1b14e5300> begin[:] call[name[sortOrder].append, parameter[constant[strand]]] if <ast.UnaryOp object at 0x7da1b14e5690> begin[:] call[name[sortOrder].append, parameter[constant[name]]] if <ast.UnaryOp object at 0x7da1b14e59f0> begin[:] call[name[sortOrder].append, parameter[constant[score]]] variable[keyFunc] assign[=] call[name[attrgetter], parameter[<ast.Starred object at 0x7da1b14e43a0>]] def function[next_item, parameter[iterator]]: constant[ little internal function to return the next item, or None ] <ast.Try object at 0x7da1b14e6740> variable[bIterators] assign[=] <ast.ListComp object at 0x7da1b14e4f70> variable[elements] assign[=] <ast.ListComp object at 0x7da1b14e58d0> while constant[True] begin[:] assert[compare[call[name[len], parameter[name[elements]]] greater_or_equal[>=] constant[2]]] if <ast.BoolOp object at 0x7da1b14e77f0> begin[:] <ast.Yield object at 0x7da1b14e6140> variable[elements] assign[=] <ast.ListComp object at 0x7da1b14e4880> if call[name[reduce], parameter[<ast.Lambda object at 0x7da1b1454790>, <ast.ListComp object at 0x7da1b1455120>]] begin[:] break
keyword[def] identifier[pairedBEDIterator] ( identifier[inputStreams] , identifier[mirror] = keyword[False] , identifier[mirrorScore] = keyword[None] , identifier[ignoreStrand] = keyword[False] , identifier[ignoreScore] = keyword[True] , identifier[ignoreName] = keyword[True] , identifier[sortedby] = identifier[ITERATOR_SORTED_END] , identifier[scoreType] = identifier[float] , identifier[verbose] = keyword[False] ): literal[string] identifier[sortOrder] =[ literal[string] ] keyword[if] identifier[sortedby] == identifier[ITERATOR_SORTED_START] : identifier[sortOrder] . identifier[append] ( literal[string] ) identifier[sortOrder] . identifier[append] ( literal[string] ) keyword[elif] identifier[sortedby] == identifier[ITERATOR_SORTED_END] : identifier[sortOrder] . identifier[append] ( literal[string] ) identifier[sortOrder] . identifier[append] ( literal[string] ) keyword[if] keyword[not] identifier[ignoreStrand] : identifier[sortOrder] . identifier[append] ( literal[string] ) keyword[if] keyword[not] identifier[ignoreName] : identifier[sortOrder] . identifier[append] ( literal[string] ) keyword[if] keyword[not] identifier[ignoreScore] : identifier[sortOrder] . identifier[append] ( literal[string] ) identifier[keyFunc] = identifier[attrgetter] (* identifier[sortOrder] ) keyword[def] identifier[next_item] ( identifier[iterator] ): literal[string] keyword[try] : keyword[return] identifier[iterator] . identifier[next] () keyword[except] identifier[StopIteration] : keyword[return] keyword[None] identifier[bIterators] =[ identifier[BEDIterator] ( identifier[bfh] , identifier[verbose] = identifier[verbose] , identifier[sortedby] = identifier[sortedby] , identifier[scoreType] = identifier[scoreType] ) keyword[for] identifier[bfh] keyword[in] identifier[inputStreams] ] identifier[elements] =[ identifier[next_item] ( identifier[it] ) keyword[for] identifier[it] keyword[in] identifier[bIterators] ] keyword[while] keyword[True] : keyword[assert] ( identifier[len] ( identifier[elements] )>= literal[int] ) keyword[if] keyword[None] keyword[not] keyword[in] identifier[elements] keyword[and] identifier[len] ( identifier[set] ([ identifier[keyFunc] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[elements] ]))== literal[int] : keyword[yield] [ identifier[e] keyword[for] identifier[e] keyword[in] identifier[elements] ] identifier[elements] =[ identifier[next_item] ( identifier[it] ) keyword[for] identifier[it] keyword[in] identifier[bIterators] ] keyword[else] : identifier[minElement] = identifier[min] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[elements] keyword[if] identifier[x] keyword[is] keyword[not] keyword[None] ], identifier[key] = identifier[keyFunc] ) identifier[minIndices] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[elements] )) keyword[if] identifier[elements] [ identifier[i] ] keyword[is] keyword[not] keyword[None] keyword[and] identifier[keyFunc] ( identifier[elements] [ identifier[i] ])== identifier[keyFunc] ( identifier[minElement] )] keyword[if] identifier[mirror] : identifier[score] = identifier[minElement] . identifier[score] keyword[if] identifier[mirrorScore] keyword[is] keyword[None] keyword[else] identifier[mirrorScore] keyword[yield] [ identifier[elements] [ identifier[i] ] keyword[if] identifier[i] keyword[in] identifier[minIndices] keyword[else] identifier[GenomicInterval] ( identifier[minElement] . identifier[chrom] , identifier[minElement] . identifier[start] , identifier[minElement] . identifier[end] , identifier[minElement] . identifier[name] , identifier[score] , identifier[minElement] . identifier[strand] , identifier[scoreType] = identifier[scoreType] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[elements] ))] keyword[for] identifier[index] keyword[in] identifier[minIndices] : identifier[elements] [ identifier[index] ]= identifier[next_item] ( identifier[bIterators] [ identifier[index] ]) keyword[if] identifier[reduce] ( keyword[lambda] identifier[x] , identifier[y] : identifier[x] keyword[and] identifier[y] ,[ identifier[e] keyword[is] keyword[None] keyword[for] identifier[e] keyword[in] identifier[elements] ]): keyword[break]
def pairedBEDIterator(inputStreams, mirror=False, mirrorScore=None, ignoreStrand=False, ignoreScore=True, ignoreName=True, sortedby=ITERATOR_SORTED_END, scoreType=float, verbose=False): """ Iterate over multiple BED format files simultaneously and yield lists of genomic intervals for each matching set of intervals found. By default, regions which are not found in all files will be skipped (mirror = false). Optionally (by setting mirror to true) if a file is missing an interval, it can be added on-the-fly, and will have the same chrom, start and end and name as in other files. The score will be taken from the first file in inputStreams if mirrorScore is not set, otherwise that value will be used. :param inputStreams: a list of input streams in BED format :param mirror: if true, add missing elements so all streams contain the same elements. Inserted elements will have the same :param ignoreStrand: ignore strand when comparing elements for equality? :param ignoreScore: ignore score when comparing elements for equality? :param ignoreScore: ignore name when comparing elements for equality? :param sortedby: must be set to one of the sorting orders for BED streams; we require the streams to be sorted in some fashion. :param scoreType: interpret scores as what type? Defaults to float, which is generally the most flexible. """ # let's build our sorting order... sortOrder = ['chrom'] if sortedby == ITERATOR_SORTED_START: sortOrder.append('start') sortOrder.append('end') # depends on [control=['if'], data=[]] elif sortedby == ITERATOR_SORTED_END: sortOrder.append('end') sortOrder.append('start') # depends on [control=['if'], data=[]] if not ignoreStrand: sortOrder.append('strand') # depends on [control=['if'], data=[]] if not ignoreName: sortOrder.append('name') # depends on [control=['if'], data=[]] if not ignoreScore: sortOrder.append('score') # depends on [control=['if'], data=[]] keyFunc = attrgetter(*sortOrder) def next_item(iterator): """ little internal function to return the next item, or None """ try: return iterator.next() # depends on [control=['try'], data=[]] except StopIteration: return None # depends on [control=['except'], data=[]] bIterators = [BEDIterator(bfh, verbose=verbose, sortedby=sortedby, scoreType=scoreType) for bfh in inputStreams] elements = [next_item(it) for it in bIterators] while True: assert len(elements) >= 2 if None not in elements and len(set([keyFunc(x) for x in elements])) == 1: # All equal -- yield and move on for all streams yield [e for e in elements] elements = [next_item(it) for it in bIterators] # depends on [control=['if'], data=[]] else: # something wasn't equal.. find the smallest thing, it's about to drop # out of range and will never have the chance to match anything again minElement = min([x for x in elements if x is not None], key=keyFunc) minIndices = [i for i in range(0, len(elements)) if elements[i] is not None and keyFunc(elements[i]) == keyFunc(minElement)] if mirror: # mirror the min item for any streams in which it doesn't match score = minElement.score if mirrorScore is None else mirrorScore yield [elements[i] if i in minIndices else GenomicInterval(minElement.chrom, minElement.start, minElement.end, minElement.name, score, minElement.strand, scoreType=scoreType) for i in range(0, len(elements))] # depends on [control=['if'], data=[]] # move the smallest element onwards now, we're done with it for index in minIndices: elements[index] = next_item(bIterators[index]) # depends on [control=['for'], data=['index']] # stop once all streams are exhausted if reduce(lambda x, y: x and y, [e is None for e in elements]): break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
def __discover_jmeter_udp_port(self): """Searching for line in jmeter.log such as Waiting for possible shutdown message on port 4445 """ r = re.compile(self.DISCOVER_PORT_PATTERN) with open(self.process_stderr.name, 'r') as f: cnt = 0 while self.process.pid and cnt < 10: line = f.readline() m = r.match(line) if m is None: cnt += 1 time.sleep(1) else: port = int(m.group('port')) return port else: logger.warning('JMeter UDP port wasn\'t discovered') return None
def function[__discover_jmeter_udp_port, parameter[self]]: constant[Searching for line in jmeter.log such as Waiting for possible shutdown message on port 4445 ] variable[r] assign[=] call[name[re].compile, parameter[name[self].DISCOVER_PORT_PATTERN]] with call[name[open], parameter[name[self].process_stderr.name, constant[r]]] begin[:] variable[cnt] assign[=] constant[0] while <ast.BoolOp object at 0x7da1b0352f50> begin[:] variable[line] assign[=] call[name[f].readline, parameter[]] variable[m] assign[=] call[name[r].match, parameter[name[line]]] if compare[name[m] is constant[None]] begin[:] <ast.AugAssign object at 0x7da1b0350910> call[name[time].sleep, parameter[constant[1]]]
keyword[def] identifier[__discover_jmeter_udp_port] ( identifier[self] ): literal[string] identifier[r] = identifier[re] . identifier[compile] ( identifier[self] . identifier[DISCOVER_PORT_PATTERN] ) keyword[with] identifier[open] ( identifier[self] . identifier[process_stderr] . identifier[name] , literal[string] ) keyword[as] identifier[f] : identifier[cnt] = literal[int] keyword[while] identifier[self] . identifier[process] . identifier[pid] keyword[and] identifier[cnt] < literal[int] : identifier[line] = identifier[f] . identifier[readline] () identifier[m] = identifier[r] . identifier[match] ( identifier[line] ) keyword[if] identifier[m] keyword[is] keyword[None] : identifier[cnt] += literal[int] identifier[time] . identifier[sleep] ( literal[int] ) keyword[else] : identifier[port] = identifier[int] ( identifier[m] . identifier[group] ( literal[string] )) keyword[return] identifier[port] keyword[else] : identifier[logger] . identifier[warning] ( literal[string] ) keyword[return] keyword[None]
def __discover_jmeter_udp_port(self): """Searching for line in jmeter.log such as Waiting for possible shutdown message on port 4445 """ r = re.compile(self.DISCOVER_PORT_PATTERN) with open(self.process_stderr.name, 'r') as f: cnt = 0 while self.process.pid and cnt < 10: line = f.readline() m = r.match(line) if m is None: cnt += 1 time.sleep(1) # depends on [control=['if'], data=[]] else: port = int(m.group('port')) return port # depends on [control=['while'], data=[]] else: logger.warning("JMeter UDP port wasn't discovered") return None # depends on [control=['with'], data=['f']]
def main(): '''main reoutine''' # validate command line arguments arg_parser = argparse.ArgumentParser() arg_parser.add_argument('--vmssname', '-n', required=True, action='store', help='VMSS Name') arg_parser.add_argument('--rgname', '-g', required=True, action='store', help='Resource Group Name') arg_parser.add_argument('--details', '-a', required=False, action='store', help='Print all details') args = arg_parser.parse_args() name = args.vmssname rgname = args.rgname details = args.details # Load Azure app defaults try: with open('azurermconfig.json') as config_file: config_data = json.load(config_file) except FileNotFoundError: print("Error: Expecting azurermconfig.json in current folder") sys.exit() tenant_id = config_data['tenantId'] app_id = config_data['appId'] app_secret = config_data['appSecret'] subscription_id = config_data['subscriptionId'] # authenticate access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) # get public IPs public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rgname, name) # print details if details is True: print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': '))) else: for pip in public_ips['value']: vm_id = re.search('Machines/(.*)/networkInt', pip['id']).group(1) ipaddr = pip['properties']['ipAddress'] print('VM id: ' + vm_id + ', IP: ' + ipaddr)
def function[main, parameter[]]: constant[main reoutine] variable[arg_parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[arg_parser].add_argument, parameter[constant[--vmssname], constant[-n]]] call[name[arg_parser].add_argument, parameter[constant[--rgname], constant[-g]]] call[name[arg_parser].add_argument, parameter[constant[--details], constant[-a]]] variable[args] assign[=] call[name[arg_parser].parse_args, parameter[]] variable[name] assign[=] name[args].vmssname variable[rgname] assign[=] name[args].rgname variable[details] assign[=] name[args].details <ast.Try object at 0x7da1b054b670> variable[tenant_id] assign[=] call[name[config_data]][constant[tenantId]] variable[app_id] assign[=] call[name[config_data]][constant[appId]] variable[app_secret] assign[=] call[name[config_data]][constant[appSecret]] variable[subscription_id] assign[=] call[name[config_data]][constant[subscriptionId]] variable[access_token] assign[=] call[name[azurerm].get_access_token, parameter[name[tenant_id], name[app_id], name[app_secret]]] variable[public_ips] assign[=] call[name[azurerm].get_vmss_public_ips, parameter[name[access_token], name[subscription_id], name[rgname], name[name]]] if compare[name[details] is constant[True]] begin[:] call[name[print], parameter[call[name[json].dumps, parameter[name[public_ips]]]]]
keyword[def] identifier[main] (): literal[string] identifier[arg_parser] = identifier[argparse] . identifier[ArgumentParser] () identifier[arg_parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[required] = keyword[True] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[arg_parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[required] = keyword[True] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[arg_parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[required] = keyword[False] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[args] = identifier[arg_parser] . identifier[parse_args] () identifier[name] = identifier[args] . identifier[vmssname] identifier[rgname] = identifier[args] . identifier[rgname] identifier[details] = identifier[args] . identifier[details] keyword[try] : keyword[with] identifier[open] ( literal[string] ) keyword[as] identifier[config_file] : identifier[config_data] = identifier[json] . identifier[load] ( identifier[config_file] ) keyword[except] identifier[FileNotFoundError] : identifier[print] ( literal[string] ) identifier[sys] . identifier[exit] () identifier[tenant_id] = identifier[config_data] [ literal[string] ] identifier[app_id] = identifier[config_data] [ literal[string] ] identifier[app_secret] = identifier[config_data] [ literal[string] ] identifier[subscription_id] = identifier[config_data] [ literal[string] ] identifier[access_token] = identifier[azurerm] . identifier[get_access_token] ( identifier[tenant_id] , identifier[app_id] , identifier[app_secret] ) identifier[public_ips] = identifier[azurerm] . identifier[get_vmss_public_ips] ( identifier[access_token] , identifier[subscription_id] , identifier[rgname] , identifier[name] ) keyword[if] identifier[details] keyword[is] keyword[True] : identifier[print] ( identifier[json] . identifier[dumps] ( identifier[public_ips] , identifier[sort_keys] = keyword[False] , identifier[indent] = literal[int] , identifier[separators] =( literal[string] , literal[string] ))) keyword[else] : keyword[for] identifier[pip] keyword[in] identifier[public_ips] [ literal[string] ]: identifier[vm_id] = identifier[re] . identifier[search] ( literal[string] , identifier[pip] [ literal[string] ]). identifier[group] ( literal[int] ) identifier[ipaddr] = identifier[pip] [ literal[string] ][ literal[string] ] identifier[print] ( literal[string] + identifier[vm_id] + literal[string] + identifier[ipaddr] )
def main(): """main reoutine""" # validate command line arguments arg_parser = argparse.ArgumentParser() arg_parser.add_argument('--vmssname', '-n', required=True, action='store', help='VMSS Name') arg_parser.add_argument('--rgname', '-g', required=True, action='store', help='Resource Group Name') arg_parser.add_argument('--details', '-a', required=False, action='store', help='Print all details') args = arg_parser.parse_args() name = args.vmssname rgname = args.rgname details = args.details # Load Azure app defaults try: with open('azurermconfig.json') as config_file: config_data = json.load(config_file) # depends on [control=['with'], data=['config_file']] # depends on [control=['try'], data=[]] except FileNotFoundError: print('Error: Expecting azurermconfig.json in current folder') sys.exit() # depends on [control=['except'], data=[]] tenant_id = config_data['tenantId'] app_id = config_data['appId'] app_secret = config_data['appSecret'] subscription_id = config_data['subscriptionId'] # authenticate access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) # get public IPs public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rgname, name) # print details if details is True: print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': '))) # depends on [control=['if'], data=[]] else: for pip in public_ips['value']: vm_id = re.search('Machines/(.*)/networkInt', pip['id']).group(1) ipaddr = pip['properties']['ipAddress'] print('VM id: ' + vm_id + ', IP: ' + ipaddr) # depends on [control=['for'], data=['pip']]
def category_changed_cb(self, selection, model): """ enables and disables action buttons depending on selected item """ (model, iter) = selection.get_selected() id = 0 if iter is None: self.activity_store.clear() else: self.prev_selected_activity = None id = model[iter][0] self.activity_store.load(model[iter][0]) #start with nothing self.get_widget('activity_edit').set_sensitive(False) self.get_widget('activity_remove').set_sensitive(False) return True
def function[category_changed_cb, parameter[self, selection, model]]: constant[ enables and disables action buttons depending on selected item ] <ast.Tuple object at 0x7da2049600d0> assign[=] call[name[selection].get_selected, parameter[]] variable[id] assign[=] constant[0] if compare[name[iter] is constant[None]] begin[:] call[name[self].activity_store.clear, parameter[]] call[call[name[self].get_widget, parameter[constant[activity_edit]]].set_sensitive, parameter[constant[False]]] call[call[name[self].get_widget, parameter[constant[activity_remove]]].set_sensitive, parameter[constant[False]]] return[constant[True]]
keyword[def] identifier[category_changed_cb] ( identifier[self] , identifier[selection] , identifier[model] ): literal[string] ( identifier[model] , identifier[iter] )= identifier[selection] . identifier[get_selected] () identifier[id] = literal[int] keyword[if] identifier[iter] keyword[is] keyword[None] : identifier[self] . identifier[activity_store] . identifier[clear] () keyword[else] : identifier[self] . identifier[prev_selected_activity] = keyword[None] identifier[id] = identifier[model] [ identifier[iter] ][ literal[int] ] identifier[self] . identifier[activity_store] . identifier[load] ( identifier[model] [ identifier[iter] ][ literal[int] ]) identifier[self] . identifier[get_widget] ( literal[string] ). identifier[set_sensitive] ( keyword[False] ) identifier[self] . identifier[get_widget] ( literal[string] ). identifier[set_sensitive] ( keyword[False] ) keyword[return] keyword[True]
def category_changed_cb(self, selection, model): """ enables and disables action buttons depending on selected item """ (model, iter) = selection.get_selected() id = 0 if iter is None: self.activity_store.clear() # depends on [control=['if'], data=[]] else: self.prev_selected_activity = None id = model[iter][0] self.activity_store.load(model[iter][0]) #start with nothing self.get_widget('activity_edit').set_sensitive(False) self.get_widget('activity_remove').set_sensitive(False) return True
def get_blob(self, index): """Return a blob with the event at the given index""" self.log.info("Retrieving blob #{}".format(index)) if index > len(self.event_offsets) - 1: self.log.info("Index not in cache, caching offsets") self._cache_offsets(index, verbose=False) self.blob_file.seek(self.event_offsets[index], 0) blob = self._create_blob() if blob is None: self.log.info("Empty blob created...") raise IndexError else: self.log.debug("Applying parsers...") for parser in self.parsers: parser(blob) self.log.debug("Returning the blob") return blob
def function[get_blob, parameter[self, index]]: constant[Return a blob with the event at the given index] call[name[self].log.info, parameter[call[constant[Retrieving blob #{}].format, parameter[name[index]]]]] if compare[name[index] greater[>] binary_operation[call[name[len], parameter[name[self].event_offsets]] - constant[1]]] begin[:] call[name[self].log.info, parameter[constant[Index not in cache, caching offsets]]] call[name[self]._cache_offsets, parameter[name[index]]] call[name[self].blob_file.seek, parameter[call[name[self].event_offsets][name[index]], constant[0]]] variable[blob] assign[=] call[name[self]._create_blob, parameter[]] if compare[name[blob] is constant[None]] begin[:] call[name[self].log.info, parameter[constant[Empty blob created...]]] <ast.Raise object at 0x7da207f01c30>
keyword[def] identifier[get_blob] ( identifier[self] , identifier[index] ): literal[string] identifier[self] . identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[index] )) keyword[if] identifier[index] > identifier[len] ( identifier[self] . identifier[event_offsets] )- literal[int] : identifier[self] . identifier[log] . identifier[info] ( literal[string] ) identifier[self] . identifier[_cache_offsets] ( identifier[index] , identifier[verbose] = keyword[False] ) identifier[self] . identifier[blob_file] . identifier[seek] ( identifier[self] . identifier[event_offsets] [ identifier[index] ], literal[int] ) identifier[blob] = identifier[self] . identifier[_create_blob] () keyword[if] identifier[blob] keyword[is] keyword[None] : identifier[self] . identifier[log] . identifier[info] ( literal[string] ) keyword[raise] identifier[IndexError] keyword[else] : identifier[self] . identifier[log] . identifier[debug] ( literal[string] ) keyword[for] identifier[parser] keyword[in] identifier[self] . identifier[parsers] : identifier[parser] ( identifier[blob] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] ) keyword[return] identifier[blob]
def get_blob(self, index): """Return a blob with the event at the given index""" self.log.info('Retrieving blob #{}'.format(index)) if index > len(self.event_offsets) - 1: self.log.info('Index not in cache, caching offsets') self._cache_offsets(index, verbose=False) # depends on [control=['if'], data=['index']] self.blob_file.seek(self.event_offsets[index], 0) blob = self._create_blob() if blob is None: self.log.info('Empty blob created...') raise IndexError # depends on [control=['if'], data=[]] else: self.log.debug('Applying parsers...') for parser in self.parsers: parser(blob) # depends on [control=['for'], data=['parser']] self.log.debug('Returning the blob') return blob
def detect_id_type(sid): """Method that tries to infer the type of abstract ID. Parameters ---------- sid : str The ID of an abstract on Scopus. Raises ------ ValueError If the ID type cannot be inferred. Notes ----- PII usually has 17 chars, but in Scopus there are valid cases with only 16 for old converted articles. Scopus ID contains only digits, but it can have leading zeros. If ID with leading zeros is treated as a number, SyntaxError can occur, or the ID will be rendered invalid and the type will be misinterpreted. """ sid = str(sid) if not sid.isnumeric(): if sid.startswith('2-s2.0-'): id_type = 'eid' elif '/' in sid: id_type = 'doi' elif 16 <= len(sid) <= 17: id_type = 'pii' elif sid.isnumeric(): if len(sid) < 10: id_type = 'pubmed_id' else: id_type = 'scopus_id' else: raise ValueError('ID type detection failed for \'{}\'.'.format(sid)) return id_type
def function[detect_id_type, parameter[sid]]: constant[Method that tries to infer the type of abstract ID. Parameters ---------- sid : str The ID of an abstract on Scopus. Raises ------ ValueError If the ID type cannot be inferred. Notes ----- PII usually has 17 chars, but in Scopus there are valid cases with only 16 for old converted articles. Scopus ID contains only digits, but it can have leading zeros. If ID with leading zeros is treated as a number, SyntaxError can occur, or the ID will be rendered invalid and the type will be misinterpreted. ] variable[sid] assign[=] call[name[str], parameter[name[sid]]] if <ast.UnaryOp object at 0x7da204345660> begin[:] if call[name[sid].startswith, parameter[constant[2-s2.0-]]] begin[:] variable[id_type] assign[=] constant[eid] return[name[id_type]]
keyword[def] identifier[detect_id_type] ( identifier[sid] ): literal[string] identifier[sid] = identifier[str] ( identifier[sid] ) keyword[if] keyword[not] identifier[sid] . identifier[isnumeric] (): keyword[if] identifier[sid] . identifier[startswith] ( literal[string] ): identifier[id_type] = literal[string] keyword[elif] literal[string] keyword[in] identifier[sid] : identifier[id_type] = literal[string] keyword[elif] literal[int] <= identifier[len] ( identifier[sid] )<= literal[int] : identifier[id_type] = literal[string] keyword[elif] identifier[sid] . identifier[isnumeric] (): keyword[if] identifier[len] ( identifier[sid] )< literal[int] : identifier[id_type] = literal[string] keyword[else] : identifier[id_type] = literal[string] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[sid] )) keyword[return] identifier[id_type]
def detect_id_type(sid): """Method that tries to infer the type of abstract ID. Parameters ---------- sid : str The ID of an abstract on Scopus. Raises ------ ValueError If the ID type cannot be inferred. Notes ----- PII usually has 17 chars, but in Scopus there are valid cases with only 16 for old converted articles. Scopus ID contains only digits, but it can have leading zeros. If ID with leading zeros is treated as a number, SyntaxError can occur, or the ID will be rendered invalid and the type will be misinterpreted. """ sid = str(sid) if not sid.isnumeric(): if sid.startswith('2-s2.0-'): id_type = 'eid' # depends on [control=['if'], data=[]] elif '/' in sid: id_type = 'doi' # depends on [control=['if'], data=[]] elif 16 <= len(sid) <= 17: id_type = 'pii' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif sid.isnumeric(): if len(sid) < 10: id_type = 'pubmed_id' # depends on [control=['if'], data=[]] else: id_type = 'scopus_id' # depends on [control=['if'], data=[]] else: raise ValueError("ID type detection failed for '{}'.".format(sid)) return id_type
def annotated(func, name=None): """Mark a function as callable from the command line. This function is meant to be called as decorator. This function also initializes metadata about the function's arguments that is built up by the param decorator. Args: func (callable): The function that we wish to mark as callable from the command line. name (str): Optional string that will override the function's built-in name. """ if hasattr(func, 'metadata'): if name is not None: func.metadata = AnnotatedMetadata(func, name) return func func.metadata = AnnotatedMetadata(func, name) func.finalizer = False func.takes_cmdline = False func.decorated = False func.context = False return func
def function[annotated, parameter[func, name]]: constant[Mark a function as callable from the command line. This function is meant to be called as decorator. This function also initializes metadata about the function's arguments that is built up by the param decorator. Args: func (callable): The function that we wish to mark as callable from the command line. name (str): Optional string that will override the function's built-in name. ] if call[name[hasattr], parameter[name[func], constant[metadata]]] begin[:] if compare[name[name] is_not constant[None]] begin[:] name[func].metadata assign[=] call[name[AnnotatedMetadata], parameter[name[func], name[name]]] return[name[func]] name[func].metadata assign[=] call[name[AnnotatedMetadata], parameter[name[func], name[name]]] name[func].finalizer assign[=] constant[False] name[func].takes_cmdline assign[=] constant[False] name[func].decorated assign[=] constant[False] name[func].context assign[=] constant[False] return[name[func]]
keyword[def] identifier[annotated] ( identifier[func] , identifier[name] = keyword[None] ): literal[string] keyword[if] identifier[hasattr] ( identifier[func] , literal[string] ): keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] : identifier[func] . identifier[metadata] = identifier[AnnotatedMetadata] ( identifier[func] , identifier[name] ) keyword[return] identifier[func] identifier[func] . identifier[metadata] = identifier[AnnotatedMetadata] ( identifier[func] , identifier[name] ) identifier[func] . identifier[finalizer] = keyword[False] identifier[func] . identifier[takes_cmdline] = keyword[False] identifier[func] . identifier[decorated] = keyword[False] identifier[func] . identifier[context] = keyword[False] keyword[return] identifier[func]
def annotated(func, name=None): """Mark a function as callable from the command line. This function is meant to be called as decorator. This function also initializes metadata about the function's arguments that is built up by the param decorator. Args: func (callable): The function that we wish to mark as callable from the command line. name (str): Optional string that will override the function's built-in name. """ if hasattr(func, 'metadata'): if name is not None: func.metadata = AnnotatedMetadata(func, name) # depends on [control=['if'], data=['name']] return func # depends on [control=['if'], data=[]] func.metadata = AnnotatedMetadata(func, name) func.finalizer = False func.takes_cmdline = False func.decorated = False func.context = False return func
def make_table_parser() -> cmd2.argparse_completer.ACArgumentParser: """Create a unique instance of an argparse Argument parser for processing table arguments. NOTE: The two cmd2 argparse decorators require that each parser be unique, even if they are essentially a deep copy of each other. For cases like that, you can create a function to return a unique instance of a parser, which is what is being done here. """ table_parser = cmd2.argparse_completer.ACArgumentParser() table_item_group = table_parser.add_mutually_exclusive_group() table_item_group.add_argument('-c', '--color', action='store_true', help='Enable color') table_item_group.add_argument('-f', '--fancy', action='store_true', help='Fancy Grid') table_item_group.add_argument('-s', '--sparse', action='store_true', help='Sparse Grid') return table_parser
def function[make_table_parser, parameter[]]: constant[Create a unique instance of an argparse Argument parser for processing table arguments. NOTE: The two cmd2 argparse decorators require that each parser be unique, even if they are essentially a deep copy of each other. For cases like that, you can create a function to return a unique instance of a parser, which is what is being done here. ] variable[table_parser] assign[=] call[name[cmd2].argparse_completer.ACArgumentParser, parameter[]] variable[table_item_group] assign[=] call[name[table_parser].add_mutually_exclusive_group, parameter[]] call[name[table_item_group].add_argument, parameter[constant[-c], constant[--color]]] call[name[table_item_group].add_argument, parameter[constant[-f], constant[--fancy]]] call[name[table_item_group].add_argument, parameter[constant[-s], constant[--sparse]]] return[name[table_parser]]
keyword[def] identifier[make_table_parser] ()-> identifier[cmd2] . identifier[argparse_completer] . identifier[ACArgumentParser] : literal[string] identifier[table_parser] = identifier[cmd2] . identifier[argparse_completer] . identifier[ACArgumentParser] () identifier[table_item_group] = identifier[table_parser] . identifier[add_mutually_exclusive_group] () identifier[table_item_group] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[table_item_group] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[table_item_group] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] ) keyword[return] identifier[table_parser]
def make_table_parser() -> cmd2.argparse_completer.ACArgumentParser: """Create a unique instance of an argparse Argument parser for processing table arguments. NOTE: The two cmd2 argparse decorators require that each parser be unique, even if they are essentially a deep copy of each other. For cases like that, you can create a function to return a unique instance of a parser, which is what is being done here. """ table_parser = cmd2.argparse_completer.ACArgumentParser() table_item_group = table_parser.add_mutually_exclusive_group() table_item_group.add_argument('-c', '--color', action='store_true', help='Enable color') table_item_group.add_argument('-f', '--fancy', action='store_true', help='Fancy Grid') table_item_group.add_argument('-s', '--sparse', action='store_true', help='Sparse Grid') return table_parser
def process_headers(data, filename): """Read headers from toc data.""" headers = [] if 'toc' in data: for element in PyQuery(data['toc'])('a'): headers.append(recurse_while_none(element)) if None in headers: log.info('Unable to index file headers for: %s', filename) return headers
def function[process_headers, parameter[data, filename]]: constant[Read headers from toc data.] variable[headers] assign[=] list[[]] if compare[constant[toc] in name[data]] begin[:] for taget[name[element]] in starred[call[call[name[PyQuery], parameter[call[name[data]][constant[toc]]]], parameter[constant[a]]]] begin[:] call[name[headers].append, parameter[call[name[recurse_while_none], parameter[name[element]]]]] if compare[constant[None] in name[headers]] begin[:] call[name[log].info, parameter[constant[Unable to index file headers for: %s], name[filename]]] return[name[headers]]
keyword[def] identifier[process_headers] ( identifier[data] , identifier[filename] ): literal[string] identifier[headers] =[] keyword[if] literal[string] keyword[in] identifier[data] : keyword[for] identifier[element] keyword[in] identifier[PyQuery] ( identifier[data] [ literal[string] ])( literal[string] ): identifier[headers] . identifier[append] ( identifier[recurse_while_none] ( identifier[element] )) keyword[if] keyword[None] keyword[in] identifier[headers] : identifier[log] . identifier[info] ( literal[string] , identifier[filename] ) keyword[return] identifier[headers]
def process_headers(data, filename): """Read headers from toc data.""" headers = [] if 'toc' in data: for element in PyQuery(data['toc'])('a'): headers.append(recurse_while_none(element)) # depends on [control=['for'], data=['element']] if None in headers: log.info('Unable to index file headers for: %s', filename) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['data']] return headers
def assert_allclose(actual, desired, rtol=1.e-5, atol=1.e-8, err_msg='', verbose=True): r"""wrapper for numpy.testing.allclose with default tolerances of numpy.allclose. Needed since testing method has different values.""" return assert_allclose_np(actual, desired, rtol=rtol, atol=atol, err_msg=err_msg, verbose=verbose)
def function[assert_allclose, parameter[actual, desired, rtol, atol, err_msg, verbose]]: constant[wrapper for numpy.testing.allclose with default tolerances of numpy.allclose. Needed since testing method has different values.] return[call[name[assert_allclose_np], parameter[name[actual], name[desired]]]]
keyword[def] identifier[assert_allclose] ( identifier[actual] , identifier[desired] , identifier[rtol] = literal[int] , identifier[atol] = literal[int] , identifier[err_msg] = literal[string] , identifier[verbose] = keyword[True] ): literal[string] keyword[return] identifier[assert_allclose_np] ( identifier[actual] , identifier[desired] , identifier[rtol] = identifier[rtol] , identifier[atol] = identifier[atol] , identifier[err_msg] = identifier[err_msg] , identifier[verbose] = identifier[verbose] )
def assert_allclose(actual, desired, rtol=1e-05, atol=1e-08, err_msg='', verbose=True): """wrapper for numpy.testing.allclose with default tolerances of numpy.allclose. Needed since testing method has different values.""" return assert_allclose_np(actual, desired, rtol=rtol, atol=atol, err_msg=err_msg, verbose=verbose)
def _fill_cache(self, namespace): """Load all modules found in a namespace""" modules = self._findPluginModules(namespace) self._cache = list(modules)
def function[_fill_cache, parameter[self, namespace]]: constant[Load all modules found in a namespace] variable[modules] assign[=] call[name[self]._findPluginModules, parameter[name[namespace]]] name[self]._cache assign[=] call[name[list], parameter[name[modules]]]
keyword[def] identifier[_fill_cache] ( identifier[self] , identifier[namespace] ): literal[string] identifier[modules] = identifier[self] . identifier[_findPluginModules] ( identifier[namespace] ) identifier[self] . identifier[_cache] = identifier[list] ( identifier[modules] )
def _fill_cache(self, namespace): """Load all modules found in a namespace""" modules = self._findPluginModules(namespace) self._cache = list(modules)
def rewrite_reserved_words(func): """ Given a function whos kwargs need to contain a reserved word such as `in`, allow calling that function with the keyword as `in_`, such that function kwargs are rewritten to use the reserved word. """ @partial_safe_wraps(func) def inner(*args, **kwargs): for word in RESERVED_WORDS: key = "{0}_".format(word) if key in kwargs: kwargs[word] = kwargs.pop(key) return func(*args, **kwargs) return inner
def function[rewrite_reserved_words, parameter[func]]: constant[ Given a function whos kwargs need to contain a reserved word such as `in`, allow calling that function with the keyword as `in_`, such that function kwargs are rewritten to use the reserved word. ] def function[inner, parameter[]]: for taget[name[word]] in starred[name[RESERVED_WORDS]] begin[:] variable[key] assign[=] call[constant[{0}_].format, parameter[name[word]]] if compare[name[key] in name[kwargs]] begin[:] call[name[kwargs]][name[word]] assign[=] call[name[kwargs].pop, parameter[name[key]]] return[call[name[func], parameter[<ast.Starred object at 0x7da18bc73d90>]]] return[name[inner]]
keyword[def] identifier[rewrite_reserved_words] ( identifier[func] ): literal[string] @ identifier[partial_safe_wraps] ( identifier[func] ) keyword[def] identifier[inner] (* identifier[args] ,** identifier[kwargs] ): keyword[for] identifier[word] keyword[in] identifier[RESERVED_WORDS] : identifier[key] = literal[string] . identifier[format] ( identifier[word] ) keyword[if] identifier[key] keyword[in] identifier[kwargs] : identifier[kwargs] [ identifier[word] ]= identifier[kwargs] . identifier[pop] ( identifier[key] ) keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[inner]
def rewrite_reserved_words(func): """ Given a function whos kwargs need to contain a reserved word such as `in`, allow calling that function with the keyword as `in_`, such that function kwargs are rewritten to use the reserved word. """ @partial_safe_wraps(func) def inner(*args, **kwargs): for word in RESERVED_WORDS: key = '{0}_'.format(word) if key in kwargs: kwargs[word] = kwargs.pop(key) # depends on [control=['if'], data=['key', 'kwargs']] # depends on [control=['for'], data=['word']] return func(*args, **kwargs) return inner
def get_public_cms_app_namespaces(): """ :return: a tuple() with all cms app namespaces """ qs = Page.objects.public() qs = qs.exclude(application_namespace=None) qs = qs.order_by('application_namespace') try: application_namespaces = list( qs.distinct('application_namespace').values_list( 'application_namespace', flat=True)) except NotImplementedError: # If SQLite used: # DISTINCT ON fields is not supported by this database backend application_namespaces = list( set(qs.values_list('application_namespace', flat=True))) application_namespaces.sort() return tuple(application_namespaces)
def function[get_public_cms_app_namespaces, parameter[]]: constant[ :return: a tuple() with all cms app namespaces ] variable[qs] assign[=] call[name[Page].objects.public, parameter[]] variable[qs] assign[=] call[name[qs].exclude, parameter[]] variable[qs] assign[=] call[name[qs].order_by, parameter[constant[application_namespace]]] <ast.Try object at 0x7da20c6ab820> call[name[application_namespaces].sort, parameter[]] return[call[name[tuple], parameter[name[application_namespaces]]]]
keyword[def] identifier[get_public_cms_app_namespaces] (): literal[string] identifier[qs] = identifier[Page] . identifier[objects] . identifier[public] () identifier[qs] = identifier[qs] . identifier[exclude] ( identifier[application_namespace] = keyword[None] ) identifier[qs] = identifier[qs] . identifier[order_by] ( literal[string] ) keyword[try] : identifier[application_namespaces] = identifier[list] ( identifier[qs] . identifier[distinct] ( literal[string] ). identifier[values_list] ( literal[string] , identifier[flat] = keyword[True] )) keyword[except] identifier[NotImplementedError] : identifier[application_namespaces] = identifier[list] ( identifier[set] ( identifier[qs] . identifier[values_list] ( literal[string] , identifier[flat] = keyword[True] ))) identifier[application_namespaces] . identifier[sort] () keyword[return] identifier[tuple] ( identifier[application_namespaces] )
def get_public_cms_app_namespaces(): """ :return: a tuple() with all cms app namespaces """ qs = Page.objects.public() qs = qs.exclude(application_namespace=None) qs = qs.order_by('application_namespace') try: application_namespaces = list(qs.distinct('application_namespace').values_list('application_namespace', flat=True)) # depends on [control=['try'], data=[]] except NotImplementedError: # If SQLite used: # DISTINCT ON fields is not supported by this database backend application_namespaces = list(set(qs.values_list('application_namespace', flat=True))) # depends on [control=['except'], data=[]] application_namespaces.sort() return tuple(application_namespaces)
def python_2_unicode_compatible(klass): """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if six.PY2: klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass
def function[python_2_unicode_compatible, parameter[klass]]: constant[ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. ] if name[six].PY2 begin[:] name[klass].__unicode__ assign[=] name[klass].__str__ name[klass].__str__ assign[=] <ast.Lambda object at 0x7da1b0fc5d80> return[name[klass]]
keyword[def] identifier[python_2_unicode_compatible] ( identifier[klass] ): literal[string] keyword[if] identifier[six] . identifier[PY2] : identifier[klass] . identifier[__unicode__] = identifier[klass] . identifier[__str__] identifier[klass] . identifier[__str__] = keyword[lambda] identifier[self] : identifier[self] . identifier[__unicode__] (). identifier[encode] ( literal[string] ) keyword[return] identifier[klass]
def python_2_unicode_compatible(klass): """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if six.PY2: klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') # depends on [control=['if'], data=[]] return klass
def diffheaders(t1, t2): """ Return the difference between the headers of the two tables as a pair of sets. E.g.:: >>> import petl as etl >>> table1 = [['foo', 'bar', 'baz'], ... ['a', 1, .3]] >>> table2 = [['baz', 'bar', 'quux'], ... ['a', 1, .3]] >>> add, sub = etl.diffheaders(table1, table2) >>> add {'quux'} >>> sub {'foo'} """ t1h = set(header(t1)) t2h = set(header(t2)) return t2h - t1h, t1h - t2h
def function[diffheaders, parameter[t1, t2]]: constant[ Return the difference between the headers of the two tables as a pair of sets. E.g.:: >>> import petl as etl >>> table1 = [['foo', 'bar', 'baz'], ... ['a', 1, .3]] >>> table2 = [['baz', 'bar', 'quux'], ... ['a', 1, .3]] >>> add, sub = etl.diffheaders(table1, table2) >>> add {'quux'} >>> sub {'foo'} ] variable[t1h] assign[=] call[name[set], parameter[call[name[header], parameter[name[t1]]]]] variable[t2h] assign[=] call[name[set], parameter[call[name[header], parameter[name[t2]]]]] return[tuple[[<ast.BinOp object at 0x7da18f09e320>, <ast.BinOp object at 0x7da18f09f580>]]]
keyword[def] identifier[diffheaders] ( identifier[t1] , identifier[t2] ): literal[string] identifier[t1h] = identifier[set] ( identifier[header] ( identifier[t1] )) identifier[t2h] = identifier[set] ( identifier[header] ( identifier[t2] )) keyword[return] identifier[t2h] - identifier[t1h] , identifier[t1h] - identifier[t2h]
def diffheaders(t1, t2): """ Return the difference between the headers of the two tables as a pair of sets. E.g.:: >>> import petl as etl >>> table1 = [['foo', 'bar', 'baz'], ... ['a', 1, .3]] >>> table2 = [['baz', 'bar', 'quux'], ... ['a', 1, .3]] >>> add, sub = etl.diffheaders(table1, table2) >>> add {'quux'} >>> sub {'foo'} """ t1h = set(header(t1)) t2h = set(header(t2)) return (t2h - t1h, t1h - t2h)
def set_link_status(link_id, status, **kwargs): """ Set the status of a link """ user_id = kwargs.get('user_id') #check_perm(user_id, 'edit_topology') try: link_i = db.DBSession.query(Link).filter(Link.id == link_id).one() except NoResultFound: raise ResourceNotFoundError("Link %s not found"%(link_id)) link_i.network.check_write_permission(user_id) link_i.status = status db.DBSession.flush()
def function[set_link_status, parameter[link_id, status]]: constant[ Set the status of a link ] variable[user_id] assign[=] call[name[kwargs].get, parameter[constant[user_id]]] <ast.Try object at 0x7da2045660b0> call[name[link_i].network.check_write_permission, parameter[name[user_id]]] name[link_i].status assign[=] name[status] call[name[db].DBSession.flush, parameter[]]
keyword[def] identifier[set_link_status] ( identifier[link_id] , identifier[status] ,** identifier[kwargs] ): literal[string] identifier[user_id] = identifier[kwargs] . identifier[get] ( literal[string] ) keyword[try] : identifier[link_i] = identifier[db] . identifier[DBSession] . identifier[query] ( identifier[Link] ). identifier[filter] ( identifier[Link] . identifier[id] == identifier[link_id] ). identifier[one] () keyword[except] identifier[NoResultFound] : keyword[raise] identifier[ResourceNotFoundError] ( literal[string] %( identifier[link_id] )) identifier[link_i] . identifier[network] . identifier[check_write_permission] ( identifier[user_id] ) identifier[link_i] . identifier[status] = identifier[status] identifier[db] . identifier[DBSession] . identifier[flush] ()
def set_link_status(link_id, status, **kwargs): """ Set the status of a link """ user_id = kwargs.get('user_id') #check_perm(user_id, 'edit_topology') try: link_i = db.DBSession.query(Link).filter(Link.id == link_id).one() # depends on [control=['try'], data=[]] except NoResultFound: raise ResourceNotFoundError('Link %s not found' % link_id) # depends on [control=['except'], data=[]] link_i.network.check_write_permission(user_id) link_i.status = status db.DBSession.flush()
def rpoplpush(self, src, dst): """ RPOP a value off of the ``src`` list and atomically LPUSH it on to the ``dst`` list. Returns the value. """ with self.pipe as pipe: f = Future() res = pipe.rpoplpush(self.redis_key(src), self.redis_key(dst)) def cb(): f.set(self.valueparse.decode(res.result)) pipe.on_execute(cb) return f
def function[rpoplpush, parameter[self, src, dst]]: constant[ RPOP a value off of the ``src`` list and atomically LPUSH it on to the ``dst`` list. Returns the value. ] with name[self].pipe begin[:] variable[f] assign[=] call[name[Future], parameter[]] variable[res] assign[=] call[name[pipe].rpoplpush, parameter[call[name[self].redis_key, parameter[name[src]]], call[name[self].redis_key, parameter[name[dst]]]]] def function[cb, parameter[]]: call[name[f].set, parameter[call[name[self].valueparse.decode, parameter[name[res].result]]]] call[name[pipe].on_execute, parameter[name[cb]]] return[name[f]]
keyword[def] identifier[rpoplpush] ( identifier[self] , identifier[src] , identifier[dst] ): literal[string] keyword[with] identifier[self] . identifier[pipe] keyword[as] identifier[pipe] : identifier[f] = identifier[Future] () identifier[res] = identifier[pipe] . identifier[rpoplpush] ( identifier[self] . identifier[redis_key] ( identifier[src] ), identifier[self] . identifier[redis_key] ( identifier[dst] )) keyword[def] identifier[cb] (): identifier[f] . identifier[set] ( identifier[self] . identifier[valueparse] . identifier[decode] ( identifier[res] . identifier[result] )) identifier[pipe] . identifier[on_execute] ( identifier[cb] ) keyword[return] identifier[f]
def rpoplpush(self, src, dst): """ RPOP a value off of the ``src`` list and atomically LPUSH it on to the ``dst`` list. Returns the value. """ with self.pipe as pipe: f = Future() res = pipe.rpoplpush(self.redis_key(src), self.redis_key(dst)) def cb(): f.set(self.valueparse.decode(res.result)) pipe.on_execute(cb) return f # depends on [control=['with'], data=['pipe']]
def inbounds(self, min, max): """ Check if a region falls entirely inside bounds. Parameters ---------- min : tuple Minimum bound to check for each axis. max : tuple Maximum bound to check for each axis. """ mincheck = sum(self.coordinates >= min, axis=1) == 0 maxcheck = sum(self.coordinates < max, axis=1) == 0 return True if (mincheck.sum() + maxcheck.sum()) == 0 else False
def function[inbounds, parameter[self, min, max]]: constant[ Check if a region falls entirely inside bounds. Parameters ---------- min : tuple Minimum bound to check for each axis. max : tuple Maximum bound to check for each axis. ] variable[mincheck] assign[=] compare[call[name[sum], parameter[compare[name[self].coordinates greater_or_equal[>=] name[min]]]] equal[==] constant[0]] variable[maxcheck] assign[=] compare[call[name[sum], parameter[compare[name[self].coordinates less[<] name[max]]]] equal[==] constant[0]] return[<ast.IfExp object at 0x7da1b095ece0>]
keyword[def] identifier[inbounds] ( identifier[self] , identifier[min] , identifier[max] ): literal[string] identifier[mincheck] = identifier[sum] ( identifier[self] . identifier[coordinates] >= identifier[min] , identifier[axis] = literal[int] )== literal[int] identifier[maxcheck] = identifier[sum] ( identifier[self] . identifier[coordinates] < identifier[max] , identifier[axis] = literal[int] )== literal[int] keyword[return] keyword[True] keyword[if] ( identifier[mincheck] . identifier[sum] ()+ identifier[maxcheck] . identifier[sum] ())== literal[int] keyword[else] keyword[False]
def inbounds(self, min, max): """ Check if a region falls entirely inside bounds. Parameters ---------- min : tuple Minimum bound to check for each axis. max : tuple Maximum bound to check for each axis. """ mincheck = sum(self.coordinates >= min, axis=1) == 0 maxcheck = sum(self.coordinates < max, axis=1) == 0 return True if mincheck.sum() + maxcheck.sum() == 0 else False
def validate(self, request, response): """ refreshes a resource when a validation response is received :param request: :param response: :return: """ element = self.search_response(request) if element is not None: element.cached_response.options = response.options element.freshness = True element.max_age = response.max_age element.creation_time = time.time() element.uri = request.proxy_uri
def function[validate, parameter[self, request, response]]: constant[ refreshes a resource when a validation response is received :param request: :param response: :return: ] variable[element] assign[=] call[name[self].search_response, parameter[name[request]]] if compare[name[element] is_not constant[None]] begin[:] name[element].cached_response.options assign[=] name[response].options name[element].freshness assign[=] constant[True] name[element].max_age assign[=] name[response].max_age name[element].creation_time assign[=] call[name[time].time, parameter[]] name[element].uri assign[=] name[request].proxy_uri
keyword[def] identifier[validate] ( identifier[self] , identifier[request] , identifier[response] ): literal[string] identifier[element] = identifier[self] . identifier[search_response] ( identifier[request] ) keyword[if] identifier[element] keyword[is] keyword[not] keyword[None] : identifier[element] . identifier[cached_response] . identifier[options] = identifier[response] . identifier[options] identifier[element] . identifier[freshness] = keyword[True] identifier[element] . identifier[max_age] = identifier[response] . identifier[max_age] identifier[element] . identifier[creation_time] = identifier[time] . identifier[time] () identifier[element] . identifier[uri] = identifier[request] . identifier[proxy_uri]
def validate(self, request, response): """ refreshes a resource when a validation response is received :param request: :param response: :return: """ element = self.search_response(request) if element is not None: element.cached_response.options = response.options element.freshness = True element.max_age = response.max_age element.creation_time = time.time() element.uri = request.proxy_uri # depends on [control=['if'], data=['element']]
def is_str(tg_type, inc_array=False): """Tells if the given tango type is string :param tg_type: tango type :type tg_type: :class:`tango.CmdArgType` :param inc_array: (optional, default is False) determines if include array in the list of checked types :type inc_array: :py:obj:`bool` :return: True if the given tango type is string or False otherwise :rtype: :py:obj:`bool` """ global _scalar_str_types, _array_str_types if tg_type in _scalar_str_types: return True if not inc_array: return False return tg_type in _array_str_types
def function[is_str, parameter[tg_type, inc_array]]: constant[Tells if the given tango type is string :param tg_type: tango type :type tg_type: :class:`tango.CmdArgType` :param inc_array: (optional, default is False) determines if include array in the list of checked types :type inc_array: :py:obj:`bool` :return: True if the given tango type is string or False otherwise :rtype: :py:obj:`bool` ] <ast.Global object at 0x7da20c991ed0> if compare[name[tg_type] in name[_scalar_str_types]] begin[:] return[constant[True]] if <ast.UnaryOp object at 0x7da20c991900> begin[:] return[constant[False]] return[compare[name[tg_type] in name[_array_str_types]]]
keyword[def] identifier[is_str] ( identifier[tg_type] , identifier[inc_array] = keyword[False] ): literal[string] keyword[global] identifier[_scalar_str_types] , identifier[_array_str_types] keyword[if] identifier[tg_type] keyword[in] identifier[_scalar_str_types] : keyword[return] keyword[True] keyword[if] keyword[not] identifier[inc_array] : keyword[return] keyword[False] keyword[return] identifier[tg_type] keyword[in] identifier[_array_str_types]
def is_str(tg_type, inc_array=False): """Tells if the given tango type is string :param tg_type: tango type :type tg_type: :class:`tango.CmdArgType` :param inc_array: (optional, default is False) determines if include array in the list of checked types :type inc_array: :py:obj:`bool` :return: True if the given tango type is string or False otherwise :rtype: :py:obj:`bool` """ global _scalar_str_types, _array_str_types if tg_type in _scalar_str_types: return True # depends on [control=['if'], data=[]] if not inc_array: return False # depends on [control=['if'], data=[]] return tg_type in _array_str_types
def renamecol(self, old, new): """ Rename column or color in-place. Method wraps:: tabular.spreadsheet.renamecol(self, old, new) """ spreadsheet.renamecol(self,old,new) for x in self.coloring.keys(): if old in self.coloring[x]: ind = self.coloring[x].index(old) self.coloring[x][ind] = new
def function[renamecol, parameter[self, old, new]]: constant[ Rename column or color in-place. Method wraps:: tabular.spreadsheet.renamecol(self, old, new) ] call[name[spreadsheet].renamecol, parameter[name[self], name[old], name[new]]] for taget[name[x]] in starred[call[name[self].coloring.keys, parameter[]]] begin[:] if compare[name[old] in call[name[self].coloring][name[x]]] begin[:] variable[ind] assign[=] call[call[name[self].coloring][name[x]].index, parameter[name[old]]] call[call[name[self].coloring][name[x]]][name[ind]] assign[=] name[new]
keyword[def] identifier[renamecol] ( identifier[self] , identifier[old] , identifier[new] ): literal[string] identifier[spreadsheet] . identifier[renamecol] ( identifier[self] , identifier[old] , identifier[new] ) keyword[for] identifier[x] keyword[in] identifier[self] . identifier[coloring] . identifier[keys] (): keyword[if] identifier[old] keyword[in] identifier[self] . identifier[coloring] [ identifier[x] ]: identifier[ind] = identifier[self] . identifier[coloring] [ identifier[x] ]. identifier[index] ( identifier[old] ) identifier[self] . identifier[coloring] [ identifier[x] ][ identifier[ind] ]= identifier[new]
def renamecol(self, old, new): """ Rename column or color in-place. Method wraps:: tabular.spreadsheet.renamecol(self, old, new) """ spreadsheet.renamecol(self, old, new) for x in self.coloring.keys(): if old in self.coloring[x]: ind = self.coloring[x].index(old) self.coloring[x][ind] = new # depends on [control=['if'], data=['old']] # depends on [control=['for'], data=['x']]
def _remove_exts(self,string): """ Sets the string, to create the Robohash """ # If the user hasn't disabled it, we will detect image extensions, such as .png, .jpg, etc. # We'll remove them from the string before hashing. # This ensures that /Bear.png and /Bear.bmp will send back the same image, in different formats. if string.lower().endswith(('.png','.gif','.jpg','.bmp','.jpeg','.ppm','.datauri')): format = string[string.rfind('.') +1 :len(string)] if format.lower() == 'jpg': format = 'jpeg' self.format = format string = string[0:string.rfind('.')] return string
def function[_remove_exts, parameter[self, string]]: constant[ Sets the string, to create the Robohash ] if call[call[name[string].lower, parameter[]].endswith, parameter[tuple[[<ast.Constant object at 0x7da1b12c8f40>, <ast.Constant object at 0x7da1b12cbf40>, <ast.Constant object at 0x7da1b12ca2c0>, <ast.Constant object at 0x7da1b12c9360>, <ast.Constant object at 0x7da1b12ca830>, <ast.Constant object at 0x7da1b12c95d0>, <ast.Constant object at 0x7da1b12c9960>]]]] begin[:] variable[format] assign[=] call[name[string]][<ast.Slice object at 0x7da1b12cacb0>] if compare[call[name[format].lower, parameter[]] equal[==] constant[jpg]] begin[:] variable[format] assign[=] constant[jpeg] name[self].format assign[=] name[format] variable[string] assign[=] call[name[string]][<ast.Slice object at 0x7da2044c0280>] return[name[string]]
keyword[def] identifier[_remove_exts] ( identifier[self] , identifier[string] ): literal[string] keyword[if] identifier[string] . identifier[lower] (). identifier[endswith] (( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] )): identifier[format] = identifier[string] [ identifier[string] . identifier[rfind] ( literal[string] )+ literal[int] : identifier[len] ( identifier[string] )] keyword[if] identifier[format] . identifier[lower] ()== literal[string] : identifier[format] = literal[string] identifier[self] . identifier[format] = identifier[format] identifier[string] = identifier[string] [ literal[int] : identifier[string] . identifier[rfind] ( literal[string] )] keyword[return] identifier[string]
def _remove_exts(self, string): """ Sets the string, to create the Robohash """ # If the user hasn't disabled it, we will detect image extensions, such as .png, .jpg, etc. # We'll remove them from the string before hashing. # This ensures that /Bear.png and /Bear.bmp will send back the same image, in different formats. if string.lower().endswith(('.png', '.gif', '.jpg', '.bmp', '.jpeg', '.ppm', '.datauri')): format = string[string.rfind('.') + 1:len(string)] if format.lower() == 'jpg': format = 'jpeg' # depends on [control=['if'], data=[]] self.format = format string = string[0:string.rfind('.')] # depends on [control=['if'], data=[]] return string
def get_fields(node, fields_tag="field_list"): """Get the field names and their values from a node. :sig: (Document, str) -> Dict[str, str] :param node: Node to get the fields from. :param fields_tag: Tag of child node that contains the fields. :return: Names and values of fields. """ fields_nodes = [c for c in node.children if c.tagname == fields_tag] if len(fields_nodes) == 0: return {} assert len(fields_nodes) == 1, "multiple nodes with tag " + fields_tag fields_node = fields_nodes[0] fields = [ {f.tagname: f.rawsource.strip() for f in n.children} for n in fields_node.children if n.tagname == "field" ] return {f["field_name"]: f["field_body"] for f in fields}
def function[get_fields, parameter[node, fields_tag]]: constant[Get the field names and their values from a node. :sig: (Document, str) -> Dict[str, str] :param node: Node to get the fields from. :param fields_tag: Tag of child node that contains the fields. :return: Names and values of fields. ] variable[fields_nodes] assign[=] <ast.ListComp object at 0x7da2054a4040> if compare[call[name[len], parameter[name[fields_nodes]]] equal[==] constant[0]] begin[:] return[dictionary[[], []]] assert[compare[call[name[len], parameter[name[fields_nodes]]] equal[==] constant[1]]] variable[fields_node] assign[=] call[name[fields_nodes]][constant[0]] variable[fields] assign[=] <ast.ListComp object at 0x7da2054a4be0> return[<ast.DictComp object at 0x7da2054a6d40>]
keyword[def] identifier[get_fields] ( identifier[node] , identifier[fields_tag] = literal[string] ): literal[string] identifier[fields_nodes] =[ identifier[c] keyword[for] identifier[c] keyword[in] identifier[node] . identifier[children] keyword[if] identifier[c] . identifier[tagname] == identifier[fields_tag] ] keyword[if] identifier[len] ( identifier[fields_nodes] )== literal[int] : keyword[return] {} keyword[assert] identifier[len] ( identifier[fields_nodes] )== literal[int] , literal[string] + identifier[fields_tag] identifier[fields_node] = identifier[fields_nodes] [ literal[int] ] identifier[fields] =[ { identifier[f] . identifier[tagname] : identifier[f] . identifier[rawsource] . identifier[strip] () keyword[for] identifier[f] keyword[in] identifier[n] . identifier[children] } keyword[for] identifier[n] keyword[in] identifier[fields_node] . identifier[children] keyword[if] identifier[n] . identifier[tagname] == literal[string] ] keyword[return] { identifier[f] [ literal[string] ]: identifier[f] [ literal[string] ] keyword[for] identifier[f] keyword[in] identifier[fields] }
def get_fields(node, fields_tag='field_list'): """Get the field names and their values from a node. :sig: (Document, str) -> Dict[str, str] :param node: Node to get the fields from. :param fields_tag: Tag of child node that contains the fields. :return: Names and values of fields. """ fields_nodes = [c for c in node.children if c.tagname == fields_tag] if len(fields_nodes) == 0: return {} # depends on [control=['if'], data=[]] assert len(fields_nodes) == 1, 'multiple nodes with tag ' + fields_tag fields_node = fields_nodes[0] fields = [{f.tagname: f.rawsource.strip() for f in n.children} for n in fields_node.children if n.tagname == 'field'] return {f['field_name']: f['field_body'] for f in fields}
def to_csv(self, filename, delimiter=",", recommended_only=False, include_io=True): """ Return a CSV for each model and dataset. Parameters ---------- filename : str or file Either the file name (string) or an open file (file-like object) where the data will be saved. delimiter : str, optional Delimiter used in CSV file between fields. recommended_only : bool, optional If True, only recommended models for each session are included. If no model is recommended, then a row with it's ID will be included, but all fields will be null. include_io : bool, optional If True, then the input/output files from BMDS will also be included, specifically the (d) input file and the out file. Returns ------- None """ df = self.to_df(recommended_only, include_io) df.to_csv(filename, index=False, sep=delimiter)
def function[to_csv, parameter[self, filename, delimiter, recommended_only, include_io]]: constant[ Return a CSV for each model and dataset. Parameters ---------- filename : str or file Either the file name (string) or an open file (file-like object) where the data will be saved. delimiter : str, optional Delimiter used in CSV file between fields. recommended_only : bool, optional If True, only recommended models for each session are included. If no model is recommended, then a row with it's ID will be included, but all fields will be null. include_io : bool, optional If True, then the input/output files from BMDS will also be included, specifically the (d) input file and the out file. Returns ------- None ] variable[df] assign[=] call[name[self].to_df, parameter[name[recommended_only], name[include_io]]] call[name[df].to_csv, parameter[name[filename]]]
keyword[def] identifier[to_csv] ( identifier[self] , identifier[filename] , identifier[delimiter] = literal[string] , identifier[recommended_only] = keyword[False] , identifier[include_io] = keyword[True] ): literal[string] identifier[df] = identifier[self] . identifier[to_df] ( identifier[recommended_only] , identifier[include_io] ) identifier[df] . identifier[to_csv] ( identifier[filename] , identifier[index] = keyword[False] , identifier[sep] = identifier[delimiter] )
def to_csv(self, filename, delimiter=',', recommended_only=False, include_io=True): """ Return a CSV for each model and dataset. Parameters ---------- filename : str or file Either the file name (string) or an open file (file-like object) where the data will be saved. delimiter : str, optional Delimiter used in CSV file between fields. recommended_only : bool, optional If True, only recommended models for each session are included. If no model is recommended, then a row with it's ID will be included, but all fields will be null. include_io : bool, optional If True, then the input/output files from BMDS will also be included, specifically the (d) input file and the out file. Returns ------- None """ df = self.to_df(recommended_only, include_io) df.to_csv(filename, index=False, sep=delimiter)
async def unignore(self, ctx, *channels: discord.Channel): """Unignores channels from being processed. If no channels are specified, it unignores the current channel. To use this command you must have the Manage Channels permission or have the Bot Admin role. """ if len(channels) == 0: channels = (ctx.message.channel,) # a set is the proper data type for the ignore list # however, JSON only supports arrays and objects not sets. ignored = self.config.get('ignored', []) result = [] for channel in channels: try: ignored.remove(channel.id) except ValueError: pass else: result.append('<#{}>'.format(channel.id)) await self.config.put('ignored', ignored) await self.bot.responses.success(message='Channel(s) {} will no longer be ignored.'.format(', '.join(result)))
<ast.AsyncFunctionDef object at 0x7da1b282ab90>
keyword[async] keyword[def] identifier[unignore] ( identifier[self] , identifier[ctx] ,* identifier[channels] : identifier[discord] . identifier[Channel] ): literal[string] keyword[if] identifier[len] ( identifier[channels] )== literal[int] : identifier[channels] =( identifier[ctx] . identifier[message] . identifier[channel] ,) identifier[ignored] = identifier[self] . identifier[config] . identifier[get] ( literal[string] ,[]) identifier[result] =[] keyword[for] identifier[channel] keyword[in] identifier[channels] : keyword[try] : identifier[ignored] . identifier[remove] ( identifier[channel] . identifier[id] ) keyword[except] identifier[ValueError] : keyword[pass] keyword[else] : identifier[result] . identifier[append] ( literal[string] . identifier[format] ( identifier[channel] . identifier[id] )) keyword[await] identifier[self] . identifier[config] . identifier[put] ( literal[string] , identifier[ignored] ) keyword[await] identifier[self] . identifier[bot] . identifier[responses] . identifier[success] ( identifier[message] = literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[result] )))
async def unignore(self, ctx, *channels: discord.Channel): """Unignores channels from being processed. If no channels are specified, it unignores the current channel. To use this command you must have the Manage Channels permission or have the Bot Admin role. """ if len(channels) == 0: channels = (ctx.message.channel,) # depends on [control=['if'], data=[]] # a set is the proper data type for the ignore list # however, JSON only supports arrays and objects not sets. ignored = self.config.get('ignored', []) result = [] for channel in channels: try: ignored.remove(channel.id) # depends on [control=['try'], data=[]] except ValueError: pass # depends on [control=['except'], data=[]] else: result.append('<#{}>'.format(channel.id)) # depends on [control=['for'], data=['channel']] await self.config.put('ignored', ignored) await self.bot.responses.success(message='Channel(s) {} will no longer be ignored.'.format(', '.join(result)))
def model_results(self, **kwargs) -> str: """ Collates model results from all phases in the directory or some subset if filters are applied. Parameters ---------- kwargs Filters, e.g. pipeline=pipeline1 Returns ------- model_results A string joining headers and results for all included phases. """ return "\n\n".join("{}\n\n{}".format(phase.header, phase.model_results) for phase in self.phases_with(**kwargs))
def function[model_results, parameter[self]]: constant[ Collates model results from all phases in the directory or some subset if filters are applied. Parameters ---------- kwargs Filters, e.g. pipeline=pipeline1 Returns ------- model_results A string joining headers and results for all included phases. ] return[call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da1b22971c0>]]]
keyword[def] identifier[model_results] ( identifier[self] ,** identifier[kwargs] )-> identifier[str] : literal[string] keyword[return] literal[string] . identifier[join] ( literal[string] . identifier[format] ( identifier[phase] . identifier[header] , identifier[phase] . identifier[model_results] ) keyword[for] identifier[phase] keyword[in] identifier[self] . identifier[phases_with] (** identifier[kwargs] ))
def model_results(self, **kwargs) -> str: """ Collates model results from all phases in the directory or some subset if filters are applied. Parameters ---------- kwargs Filters, e.g. pipeline=pipeline1 Returns ------- model_results A string joining headers and results for all included phases. """ return '\n\n'.join(('{}\n\n{}'.format(phase.header, phase.model_results) for phase in self.phases_with(**kwargs)))
def getEyeToHeadTransform(self, eEye): """ Returns the transform from eye space to the head space. Eye space is the per-eye flavor of head space that provides stereo disparity. Instead of Model * View * Projection the sequence is Model * View * Eye^-1 * Projection. Normally View and Eye^-1 will be multiplied together and treated as View in your application. """ fn = self.function_table.getEyeToHeadTransform result = fn(eEye) return result
def function[getEyeToHeadTransform, parameter[self, eEye]]: constant[ Returns the transform from eye space to the head space. Eye space is the per-eye flavor of head space that provides stereo disparity. Instead of Model * View * Projection the sequence is Model * View * Eye^-1 * Projection. Normally View and Eye^-1 will be multiplied together and treated as View in your application. ] variable[fn] assign[=] name[self].function_table.getEyeToHeadTransform variable[result] assign[=] call[name[fn], parameter[name[eEye]]] return[name[result]]
keyword[def] identifier[getEyeToHeadTransform] ( identifier[self] , identifier[eEye] ): literal[string] identifier[fn] = identifier[self] . identifier[function_table] . identifier[getEyeToHeadTransform] identifier[result] = identifier[fn] ( identifier[eEye] ) keyword[return] identifier[result]
def getEyeToHeadTransform(self, eEye): """ Returns the transform from eye space to the head space. Eye space is the per-eye flavor of head space that provides stereo disparity. Instead of Model * View * Projection the sequence is Model * View * Eye^-1 * Projection. Normally View and Eye^-1 will be multiplied together and treated as View in your application. """ fn = self.function_table.getEyeToHeadTransform result = fn(eEye) return result
def _send_connect_request(self): "sends CONNECT request" # XXX needs to support v6 ... or something else does host = self._addr.host port = self._addr.port if isinstance(self._addr, (IPv4Address, IPv6Address)): is_v6 = isinstance(self._addr, IPv6Address) self._data_to_send( struct.pack( '!BBBB4sH', 5, # version 0x01, # command 0x00, # reserved 0x04 if is_v6 else 0x01, inet_pton(AF_INET6 if is_v6 else AF_INET, host), port, ) ) else: host = host.encode('ascii') self._data_to_send( struct.pack( '!BBBBB{}sH'.format(len(host)), 5, # version 0x01, # command 0x00, # reserved 0x03, len(host), host, port, ) )
def function[_send_connect_request, parameter[self]]: constant[sends CONNECT request] variable[host] assign[=] name[self]._addr.host variable[port] assign[=] name[self]._addr.port if call[name[isinstance], parameter[name[self]._addr, tuple[[<ast.Name object at 0x7da18dc04e50>, <ast.Name object at 0x7da18dc076d0>]]]] begin[:] variable[is_v6] assign[=] call[name[isinstance], parameter[name[self]._addr, name[IPv6Address]]] call[name[self]._data_to_send, parameter[call[name[struct].pack, parameter[constant[!BBBB4sH], constant[5], constant[1], constant[0], <ast.IfExp object at 0x7da18dc040a0>, call[name[inet_pton], parameter[<ast.IfExp object at 0x7da18dc05990>, name[host]]], name[port]]]]]
keyword[def] identifier[_send_connect_request] ( identifier[self] ): literal[string] identifier[host] = identifier[self] . identifier[_addr] . identifier[host] identifier[port] = identifier[self] . identifier[_addr] . identifier[port] keyword[if] identifier[isinstance] ( identifier[self] . identifier[_addr] ,( identifier[IPv4Address] , identifier[IPv6Address] )): identifier[is_v6] = identifier[isinstance] ( identifier[self] . identifier[_addr] , identifier[IPv6Address] ) identifier[self] . identifier[_data_to_send] ( identifier[struct] . identifier[pack] ( literal[string] , literal[int] , literal[int] , literal[int] , literal[int] keyword[if] identifier[is_v6] keyword[else] literal[int] , identifier[inet_pton] ( identifier[AF_INET6] keyword[if] identifier[is_v6] keyword[else] identifier[AF_INET] , identifier[host] ), identifier[port] , ) ) keyword[else] : identifier[host] = identifier[host] . identifier[encode] ( literal[string] ) identifier[self] . identifier[_data_to_send] ( identifier[struct] . identifier[pack] ( literal[string] . identifier[format] ( identifier[len] ( identifier[host] )), literal[int] , literal[int] , literal[int] , literal[int] , identifier[len] ( identifier[host] ), identifier[host] , identifier[port] , ) )
def _send_connect_request(self): """sends CONNECT request""" # XXX needs to support v6 ... or something else does host = self._addr.host port = self._addr.port if isinstance(self._addr, (IPv4Address, IPv6Address)): is_v6 = isinstance(self._addr, IPv6Address) # version # command # reserved self._data_to_send(struct.pack('!BBBB4sH', 5, 1, 0, 4 if is_v6 else 1, inet_pton(AF_INET6 if is_v6 else AF_INET, host), port)) # depends on [control=['if'], data=[]] else: host = host.encode('ascii') # version # command # reserved self._data_to_send(struct.pack('!BBBBB{}sH'.format(len(host)), 5, 1, 0, 3, len(host), host, port))
def pop_event(self, event_name, timeout=DEFAULT_TIMEOUT): """Pop an event from its queue. Return and remove the oldest entry of an event. Block until an event of specified name is available or times out if timeout is set. Args: event_name: Name of the event to be popped. timeout: Number of seconds to wait when event is not present. Never times out if None. Returns: The oldest entry of the specified event. None if timed out. Raises: IllegalStateError: Raised if pop is called before the dispatcher starts polling. """ if not self.started: raise IllegalStateError( "Dispatcher needs to be started before popping.") e_queue = self.get_event_q(event_name) if not e_queue: raise TypeError("Failed to get an event queue for {}".format( event_name)) try: # Block for timeout if timeout: return e_queue.get(True, timeout) # Non-blocking poll for event elif timeout == 0: return e_queue.get(False) else: # Block forever on event wait return e_queue.get(True) except queue.Empty: raise queue.Empty('Timeout after {}s waiting for event: {}'.format( timeout, event_name))
def function[pop_event, parameter[self, event_name, timeout]]: constant[Pop an event from its queue. Return and remove the oldest entry of an event. Block until an event of specified name is available or times out if timeout is set. Args: event_name: Name of the event to be popped. timeout: Number of seconds to wait when event is not present. Never times out if None. Returns: The oldest entry of the specified event. None if timed out. Raises: IllegalStateError: Raised if pop is called before the dispatcher starts polling. ] if <ast.UnaryOp object at 0x7da1b08a55d0> begin[:] <ast.Raise object at 0x7da1b08a4070> variable[e_queue] assign[=] call[name[self].get_event_q, parameter[name[event_name]]] if <ast.UnaryOp object at 0x7da1b08a51e0> begin[:] <ast.Raise object at 0x7da1b08a5210> <ast.Try object at 0x7da1b08a5ab0>
keyword[def] identifier[pop_event] ( identifier[self] , identifier[event_name] , identifier[timeout] = identifier[DEFAULT_TIMEOUT] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[started] : keyword[raise] identifier[IllegalStateError] ( literal[string] ) identifier[e_queue] = identifier[self] . identifier[get_event_q] ( identifier[event_name] ) keyword[if] keyword[not] identifier[e_queue] : keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[event_name] )) keyword[try] : keyword[if] identifier[timeout] : keyword[return] identifier[e_queue] . identifier[get] ( keyword[True] , identifier[timeout] ) keyword[elif] identifier[timeout] == literal[int] : keyword[return] identifier[e_queue] . identifier[get] ( keyword[False] ) keyword[else] : keyword[return] identifier[e_queue] . identifier[get] ( keyword[True] ) keyword[except] identifier[queue] . identifier[Empty] : keyword[raise] identifier[queue] . identifier[Empty] ( literal[string] . identifier[format] ( identifier[timeout] , identifier[event_name] ))
def pop_event(self, event_name, timeout=DEFAULT_TIMEOUT): """Pop an event from its queue. Return and remove the oldest entry of an event. Block until an event of specified name is available or times out if timeout is set. Args: event_name: Name of the event to be popped. timeout: Number of seconds to wait when event is not present. Never times out if None. Returns: The oldest entry of the specified event. None if timed out. Raises: IllegalStateError: Raised if pop is called before the dispatcher starts polling. """ if not self.started: raise IllegalStateError('Dispatcher needs to be started before popping.') # depends on [control=['if'], data=[]] e_queue = self.get_event_q(event_name) if not e_queue: raise TypeError('Failed to get an event queue for {}'.format(event_name)) # depends on [control=['if'], data=[]] try: # Block for timeout if timeout: return e_queue.get(True, timeout) # depends on [control=['if'], data=[]] # Non-blocking poll for event elif timeout == 0: return e_queue.get(False) # depends on [control=['if'], data=[]] else: # Block forever on event wait return e_queue.get(True) # depends on [control=['try'], data=[]] except queue.Empty: raise queue.Empty('Timeout after {}s waiting for event: {}'.format(timeout, event_name)) # depends on [control=['except'], data=[]]
def keypress(self, size, key): """Handle keypresses for changing tabs.""" key = super().keypress(size, key) num_tabs = len(self._widgets) if key == self._keys['prev_tab']: self._tab_index = (self._tab_index - 1) % num_tabs self._update_tabs() elif key == self._keys['next_tab']: self._tab_index = (self._tab_index + 1) % num_tabs self._update_tabs() elif key == self._keys['close_tab']: # Don't allow closing the Conversations tab if self._tab_index > 0: curr_tab = self._widgets[self._tab_index] self._widgets.remove(curr_tab) del self._widget_title[curr_tab] self._tab_index -= 1 self._update_tabs() else: return key
def function[keypress, parameter[self, size, key]]: constant[Handle keypresses for changing tabs.] variable[key] assign[=] call[call[name[super], parameter[]].keypress, parameter[name[size], name[key]]] variable[num_tabs] assign[=] call[name[len], parameter[name[self]._widgets]] if compare[name[key] equal[==] call[name[self]._keys][constant[prev_tab]]] begin[:] name[self]._tab_index assign[=] binary_operation[binary_operation[name[self]._tab_index - constant[1]] <ast.Mod object at 0x7da2590d6920> name[num_tabs]] call[name[self]._update_tabs, parameter[]]
keyword[def] identifier[keypress] ( identifier[self] , identifier[size] , identifier[key] ): literal[string] identifier[key] = identifier[super] (). identifier[keypress] ( identifier[size] , identifier[key] ) identifier[num_tabs] = identifier[len] ( identifier[self] . identifier[_widgets] ) keyword[if] identifier[key] == identifier[self] . identifier[_keys] [ literal[string] ]: identifier[self] . identifier[_tab_index] =( identifier[self] . identifier[_tab_index] - literal[int] )% identifier[num_tabs] identifier[self] . identifier[_update_tabs] () keyword[elif] identifier[key] == identifier[self] . identifier[_keys] [ literal[string] ]: identifier[self] . identifier[_tab_index] =( identifier[self] . identifier[_tab_index] + literal[int] )% identifier[num_tabs] identifier[self] . identifier[_update_tabs] () keyword[elif] identifier[key] == identifier[self] . identifier[_keys] [ literal[string] ]: keyword[if] identifier[self] . identifier[_tab_index] > literal[int] : identifier[curr_tab] = identifier[self] . identifier[_widgets] [ identifier[self] . identifier[_tab_index] ] identifier[self] . identifier[_widgets] . identifier[remove] ( identifier[curr_tab] ) keyword[del] identifier[self] . identifier[_widget_title] [ identifier[curr_tab] ] identifier[self] . identifier[_tab_index] -= literal[int] identifier[self] . identifier[_update_tabs] () keyword[else] : keyword[return] identifier[key]
def keypress(self, size, key): """Handle keypresses for changing tabs.""" key = super().keypress(size, key) num_tabs = len(self._widgets) if key == self._keys['prev_tab']: self._tab_index = (self._tab_index - 1) % num_tabs self._update_tabs() # depends on [control=['if'], data=[]] elif key == self._keys['next_tab']: self._tab_index = (self._tab_index + 1) % num_tabs self._update_tabs() # depends on [control=['if'], data=[]] elif key == self._keys['close_tab']: # Don't allow closing the Conversations tab if self._tab_index > 0: curr_tab = self._widgets[self._tab_index] self._widgets.remove(curr_tab) del self._widget_title[curr_tab] self._tab_index -= 1 self._update_tabs() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: return key
def OnUndo(self, event): """Calls the grid undo method""" statustext = undo.stack().undotext() undo.stack().undo() # Update content changed state try: post_command_event(self.grid.main_window, self.grid.ContentChangedMsg) except TypeError: # The main window does not exist any more pass self.grid.code_array.result_cache.clear() post_command_event(self.grid.main_window, self.grid.TableChangedMsg, updated_cell=True) # Reset row heights and column widths by zooming self.grid.actions.zoom() # Change grid table dimensions self.grid.GetTable().ResetView() # Update TableChoiceIntCtrl shape = self.grid.code_array.shape post_command_event(self.main_window, self.grid.ResizeGridMsg, shape=shape) # Update toolbars self.grid.update_entry_line() self.grid.update_attribute_toolbar() post_command_event(self.grid.main_window, self.grid.StatusBarMsg, text=statustext)
def function[OnUndo, parameter[self, event]]: constant[Calls the grid undo method] variable[statustext] assign[=] call[call[name[undo].stack, parameter[]].undotext, parameter[]] call[call[name[undo].stack, parameter[]].undo, parameter[]] <ast.Try object at 0x7da1b1631060> call[name[self].grid.code_array.result_cache.clear, parameter[]] call[name[post_command_event], parameter[name[self].grid.main_window, name[self].grid.TableChangedMsg]] call[name[self].grid.actions.zoom, parameter[]] call[call[name[self].grid.GetTable, parameter[]].ResetView, parameter[]] variable[shape] assign[=] name[self].grid.code_array.shape call[name[post_command_event], parameter[name[self].main_window, name[self].grid.ResizeGridMsg]] call[name[self].grid.update_entry_line, parameter[]] call[name[self].grid.update_attribute_toolbar, parameter[]] call[name[post_command_event], parameter[name[self].grid.main_window, name[self].grid.StatusBarMsg]]
keyword[def] identifier[OnUndo] ( identifier[self] , identifier[event] ): literal[string] identifier[statustext] = identifier[undo] . identifier[stack] (). identifier[undotext] () identifier[undo] . identifier[stack] (). identifier[undo] () keyword[try] : identifier[post_command_event] ( identifier[self] . identifier[grid] . identifier[main_window] , identifier[self] . identifier[grid] . identifier[ContentChangedMsg] ) keyword[except] identifier[TypeError] : keyword[pass] identifier[self] . identifier[grid] . identifier[code_array] . identifier[result_cache] . identifier[clear] () identifier[post_command_event] ( identifier[self] . identifier[grid] . identifier[main_window] , identifier[self] . identifier[grid] . identifier[TableChangedMsg] , identifier[updated_cell] = keyword[True] ) identifier[self] . identifier[grid] . identifier[actions] . identifier[zoom] () identifier[self] . identifier[grid] . identifier[GetTable] (). identifier[ResetView] () identifier[shape] = identifier[self] . identifier[grid] . identifier[code_array] . identifier[shape] identifier[post_command_event] ( identifier[self] . identifier[main_window] , identifier[self] . identifier[grid] . identifier[ResizeGridMsg] , identifier[shape] = identifier[shape] ) identifier[self] . identifier[grid] . identifier[update_entry_line] () identifier[self] . identifier[grid] . identifier[update_attribute_toolbar] () identifier[post_command_event] ( identifier[self] . identifier[grid] . identifier[main_window] , identifier[self] . identifier[grid] . identifier[StatusBarMsg] , identifier[text] = identifier[statustext] )
def OnUndo(self, event): """Calls the grid undo method""" statustext = undo.stack().undotext() undo.stack().undo() # Update content changed state try: post_command_event(self.grid.main_window, self.grid.ContentChangedMsg) # depends on [control=['try'], data=[]] except TypeError: # The main window does not exist any more pass # depends on [control=['except'], data=[]] self.grid.code_array.result_cache.clear() post_command_event(self.grid.main_window, self.grid.TableChangedMsg, updated_cell=True) # Reset row heights and column widths by zooming self.grid.actions.zoom() # Change grid table dimensions self.grid.GetTable().ResetView() # Update TableChoiceIntCtrl shape = self.grid.code_array.shape post_command_event(self.main_window, self.grid.ResizeGridMsg, shape=shape) # Update toolbars self.grid.update_entry_line() self.grid.update_attribute_toolbar() post_command_event(self.grid.main_window, self.grid.StatusBarMsg, text=statustext)
def sample_outcomes(probs, n): """ For a discrete probability distribution ``probs`` with outcomes 0, 1, ..., k-1 draw ``n`` random samples. :param list probs: A list of probabilities. :param Number n: The number of random samples to draw. :return: An array of samples drawn from distribution probs over 0, ..., len(probs) - 1 :rtype: numpy.ndarray """ dist = np.cumsum(probs) rs = np.random.rand(n) return np.array([(np.where(r < dist)[0][0]) for r in rs])
def function[sample_outcomes, parameter[probs, n]]: constant[ For a discrete probability distribution ``probs`` with outcomes 0, 1, ..., k-1 draw ``n`` random samples. :param list probs: A list of probabilities. :param Number n: The number of random samples to draw. :return: An array of samples drawn from distribution probs over 0, ..., len(probs) - 1 :rtype: numpy.ndarray ] variable[dist] assign[=] call[name[np].cumsum, parameter[name[probs]]] variable[rs] assign[=] call[name[np].random.rand, parameter[name[n]]] return[call[name[np].array, parameter[<ast.ListComp object at 0x7da2054a73d0>]]]
keyword[def] identifier[sample_outcomes] ( identifier[probs] , identifier[n] ): literal[string] identifier[dist] = identifier[np] . identifier[cumsum] ( identifier[probs] ) identifier[rs] = identifier[np] . identifier[random] . identifier[rand] ( identifier[n] ) keyword[return] identifier[np] . identifier[array] ([( identifier[np] . identifier[where] ( identifier[r] < identifier[dist] )[ literal[int] ][ literal[int] ]) keyword[for] identifier[r] keyword[in] identifier[rs] ])
def sample_outcomes(probs, n): """ For a discrete probability distribution ``probs`` with outcomes 0, 1, ..., k-1 draw ``n`` random samples. :param list probs: A list of probabilities. :param Number n: The number of random samples to draw. :return: An array of samples drawn from distribution probs over 0, ..., len(probs) - 1 :rtype: numpy.ndarray """ dist = np.cumsum(probs) rs = np.random.rand(n) return np.array([np.where(r < dist)[0][0] for r in rs])
def _init_subj_data(self, subj_files): """ Parameters ---------- subj_files: list or dict of str file_path -> int/str """ try: if isinstance(subj_files, list): self.from_list(subj_files) elif isinstance(subj_files, dict): self.from_dict(subj_files) else: raise ValueError('Could not recognize subj_files argument variable type.') except Exception as exc: raise Exception('Cannot read subj_files input argument.') from exc
def function[_init_subj_data, parameter[self, subj_files]]: constant[ Parameters ---------- subj_files: list or dict of str file_path -> int/str ] <ast.Try object at 0x7da1afe043d0>
keyword[def] identifier[_init_subj_data] ( identifier[self] , identifier[subj_files] ): literal[string] keyword[try] : keyword[if] identifier[isinstance] ( identifier[subj_files] , identifier[list] ): identifier[self] . identifier[from_list] ( identifier[subj_files] ) keyword[elif] identifier[isinstance] ( identifier[subj_files] , identifier[dict] ): identifier[self] . identifier[from_dict] ( identifier[subj_files] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[except] identifier[Exception] keyword[as] identifier[exc] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[from] identifier[exc]
def _init_subj_data(self, subj_files): """ Parameters ---------- subj_files: list or dict of str file_path -> int/str """ try: if isinstance(subj_files, list): self.from_list(subj_files) # depends on [control=['if'], data=[]] elif isinstance(subj_files, dict): self.from_dict(subj_files) # depends on [control=['if'], data=[]] else: raise ValueError('Could not recognize subj_files argument variable type.') # depends on [control=['try'], data=[]] except Exception as exc: raise Exception('Cannot read subj_files input argument.') from exc # depends on [control=['except'], data=['exc']]
def agp(args): """ %prog agp tpffile certificatefile agpfile Build agpfile from overlap certificates. Tiling Path File (tpf) is a file that lists the component and the gaps. It is a three-column file similar to below, also see jcvi.formats.agp.tpf(): telomere chr1 na AC229737.8 chr1 + AC202463.29 chr1 + Note: the orientation of the component is only used as a guide. If the orientation is derivable from a terminal overlap, it will use it regardless of what the tpf says. See jcvi.assembly.goldenpath.certificate() which generates a list of certificates based on agpfile. At first, it seems counter-productive to convert first agp to certificates then certificates back to agp. The certificates provide a way to edit the overlap information, so that the agpfile can be corrected (without changing agpfile directly). """ from jcvi.formats.base import DictFile p = OptionParser(agp.__doc__) opts, args = p.parse_args(args) if len(args) != 3: sys.exit(not p.print_help()) tpffile, certificatefile, agpfile = args orientationguide = DictFile(tpffile, valuepos=2) cert = Certificate(certificatefile) cert.write_AGP(agpfile, orientationguide=orientationguide)
def function[agp, parameter[args]]: constant[ %prog agp tpffile certificatefile agpfile Build agpfile from overlap certificates. Tiling Path File (tpf) is a file that lists the component and the gaps. It is a three-column file similar to below, also see jcvi.formats.agp.tpf(): telomere chr1 na AC229737.8 chr1 + AC202463.29 chr1 + Note: the orientation of the component is only used as a guide. If the orientation is derivable from a terminal overlap, it will use it regardless of what the tpf says. See jcvi.assembly.goldenpath.certificate() which generates a list of certificates based on agpfile. At first, it seems counter-productive to convert first agp to certificates then certificates back to agp. The certificates provide a way to edit the overlap information, so that the agpfile can be corrected (without changing agpfile directly). ] from relative_module[jcvi.formats.base] import module[DictFile] variable[p] assign[=] call[name[OptionParser], parameter[name[agp].__doc__]] <ast.Tuple object at 0x7da20c76f640> assign[=] call[name[p].parse_args, parameter[name[args]]] if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[3]] begin[:] call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da20c76ca00>]] <ast.Tuple object at 0x7da20c76e500> assign[=] name[args] variable[orientationguide] assign[=] call[name[DictFile], parameter[name[tpffile]]] variable[cert] assign[=] call[name[Certificate], parameter[name[certificatefile]]] call[name[cert].write_AGP, parameter[name[agpfile]]]
keyword[def] identifier[agp] ( identifier[args] ): literal[string] keyword[from] identifier[jcvi] . identifier[formats] . identifier[base] keyword[import] identifier[DictFile] identifier[p] = identifier[OptionParser] ( identifier[agp] . identifier[__doc__] ) identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] ) keyword[if] identifier[len] ( identifier[args] )!= literal[int] : identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ()) identifier[tpffile] , identifier[certificatefile] , identifier[agpfile] = identifier[args] identifier[orientationguide] = identifier[DictFile] ( identifier[tpffile] , identifier[valuepos] = literal[int] ) identifier[cert] = identifier[Certificate] ( identifier[certificatefile] ) identifier[cert] . identifier[write_AGP] ( identifier[agpfile] , identifier[orientationguide] = identifier[orientationguide] )
def agp(args): """ %prog agp tpffile certificatefile agpfile Build agpfile from overlap certificates. Tiling Path File (tpf) is a file that lists the component and the gaps. It is a three-column file similar to below, also see jcvi.formats.agp.tpf(): telomere chr1 na AC229737.8 chr1 + AC202463.29 chr1 + Note: the orientation of the component is only used as a guide. If the orientation is derivable from a terminal overlap, it will use it regardless of what the tpf says. See jcvi.assembly.goldenpath.certificate() which generates a list of certificates based on agpfile. At first, it seems counter-productive to convert first agp to certificates then certificates back to agp. The certificates provide a way to edit the overlap information, so that the agpfile can be corrected (without changing agpfile directly). """ from jcvi.formats.base import DictFile p = OptionParser(agp.__doc__) (opts, args) = p.parse_args(args) if len(args) != 3: sys.exit(not p.print_help()) # depends on [control=['if'], data=[]] (tpffile, certificatefile, agpfile) = args orientationguide = DictFile(tpffile, valuepos=2) cert = Certificate(certificatefile) cert.write_AGP(agpfile, orientationguide=orientationguide)
def run_cell(self, cell): """Run a notebook cell and update the output of that cell in-place.""" logging.info('Running cell:\n%s\n', cell.input) self.kc.execute(cell.input) reply = self.kc.get_shell_msg() status = reply['content']['status'] traceback_text = '' if status == 'error': traceback_text = 'Cell raised uncaught exception: \n' + \ '\n'.join(reply['content']['traceback']) logging.info(traceback_text) else: logging.info('Cell returned') outs = list() while True: try: msg = self.kc.get_iopub_msg(timeout=1) if msg['msg_type'] == 'status': if msg['content']['execution_state'] == 'idle': break except Empty: # execution state should return to idle # before the queue becomes empty, # if it doesn't, something bad has happened raise content = msg['content'] msg_type = msg['msg_type'] # IPython 3.0.0-dev writes pyerr/pyout in the notebook format # but uses error/execute_result in the message spec. This does the # translation needed for tests to pass with IPython 3.0.0-dev notebook3_format_conversions = { 'error': 'pyerr', 'execute_result': 'pyout' } msg_type = notebook3_format_conversions.get(msg_type, msg_type) out = NotebookNode(output_type=msg_type) if 'execution_count' in content: cell['prompt_number'] = content['execution_count'] out.prompt_number = content['execution_count'] if msg_type in ('status', 'pyin', 'execute_input'): continue elif msg_type == 'stream': out.stream = content['name'] # in msgspec 5, this is name, text # in msgspec 4, this is name, data if 'text' in content: out.text = content['text'] else: out.text = content['data'] elif msg_type in ('display_data', 'pyout'): for mime, data in content['data'].items(): try: attr = self.MIME_MAP[mime] except KeyError: raise NotImplementedError( 'unhandled mime type: %s' % mime ) # In notebook version <= 3 JSON data is stored as a string # Evaluation of IPython2's JSON gives strings directly # Therefore do not encode for IPython versions prior to 3 json_encode = ( IPython.version_info[0] >= 3 and mime == "application/json") data_out = data if not json_encode else json.dumps(data) setattr(out, attr, data_out) elif msg_type == 'pyerr': out.ename = content['ename'] out.evalue = content['evalue'] out.traceback = content['traceback'] elif msg_type == 'clear_output': outs = list() continue else: raise NotImplementedError( 'unhandled iopub message: %s' % msg_type ) outs.append(out) cell['outputs'] = outs if status == 'error': raise NotebookError(traceback_text)
def function[run_cell, parameter[self, cell]]: constant[Run a notebook cell and update the output of that cell in-place.] call[name[logging].info, parameter[constant[Running cell: %s ], name[cell].input]] call[name[self].kc.execute, parameter[name[cell].input]] variable[reply] assign[=] call[name[self].kc.get_shell_msg, parameter[]] variable[status] assign[=] call[call[name[reply]][constant[content]]][constant[status]] variable[traceback_text] assign[=] constant[] if compare[name[status] equal[==] constant[error]] begin[:] variable[traceback_text] assign[=] binary_operation[constant[Cell raised uncaught exception: ] + call[constant[ ].join, parameter[call[call[name[reply]][constant[content]]][constant[traceback]]]]] call[name[logging].info, parameter[name[traceback_text]]] variable[outs] assign[=] call[name[list], parameter[]] while constant[True] begin[:] <ast.Try object at 0x7da1b12f37c0> variable[content] assign[=] call[name[msg]][constant[content]] variable[msg_type] assign[=] call[name[msg]][constant[msg_type]] variable[notebook3_format_conversions] assign[=] dictionary[[<ast.Constant object at 0x7da18bc72ef0>, <ast.Constant object at 0x7da18bc72410>], [<ast.Constant object at 0x7da18bc71600>, <ast.Constant object at 0x7da18bc73e50>]] variable[msg_type] assign[=] call[name[notebook3_format_conversions].get, parameter[name[msg_type], name[msg_type]]] variable[out] assign[=] call[name[NotebookNode], parameter[]] if compare[constant[execution_count] in name[content]] begin[:] call[name[cell]][constant[prompt_number]] assign[=] call[name[content]][constant[execution_count]] name[out].prompt_number assign[=] call[name[content]][constant[execution_count]] if compare[name[msg_type] in tuple[[<ast.Constant object at 0x7da18bc73340>, <ast.Constant object at 0x7da18bc734f0>, <ast.Constant object at 0x7da18bc739a0>]]] begin[:] continue call[name[outs].append, parameter[name[out]]] call[name[cell]][constant[outputs]] assign[=] name[outs] if compare[name[status] equal[==] constant[error]] begin[:] <ast.Raise object at 0x7da2044c3d90>
keyword[def] identifier[run_cell] ( identifier[self] , identifier[cell] ): literal[string] identifier[logging] . identifier[info] ( literal[string] , identifier[cell] . identifier[input] ) identifier[self] . identifier[kc] . identifier[execute] ( identifier[cell] . identifier[input] ) identifier[reply] = identifier[self] . identifier[kc] . identifier[get_shell_msg] () identifier[status] = identifier[reply] [ literal[string] ][ literal[string] ] identifier[traceback_text] = literal[string] keyword[if] identifier[status] == literal[string] : identifier[traceback_text] = literal[string] + literal[string] . identifier[join] ( identifier[reply] [ literal[string] ][ literal[string] ]) identifier[logging] . identifier[info] ( identifier[traceback_text] ) keyword[else] : identifier[logging] . identifier[info] ( literal[string] ) identifier[outs] = identifier[list] () keyword[while] keyword[True] : keyword[try] : identifier[msg] = identifier[self] . identifier[kc] . identifier[get_iopub_msg] ( identifier[timeout] = literal[int] ) keyword[if] identifier[msg] [ literal[string] ]== literal[string] : keyword[if] identifier[msg] [ literal[string] ][ literal[string] ]== literal[string] : keyword[break] keyword[except] identifier[Empty] : keyword[raise] identifier[content] = identifier[msg] [ literal[string] ] identifier[msg_type] = identifier[msg] [ literal[string] ] identifier[notebook3_format_conversions] ={ literal[string] : literal[string] , literal[string] : literal[string] } identifier[msg_type] = identifier[notebook3_format_conversions] . identifier[get] ( identifier[msg_type] , identifier[msg_type] ) identifier[out] = identifier[NotebookNode] ( identifier[output_type] = identifier[msg_type] ) keyword[if] literal[string] keyword[in] identifier[content] : identifier[cell] [ literal[string] ]= identifier[content] [ literal[string] ] identifier[out] . identifier[prompt_number] = identifier[content] [ literal[string] ] keyword[if] identifier[msg_type] keyword[in] ( literal[string] , literal[string] , literal[string] ): keyword[continue] keyword[elif] identifier[msg_type] == literal[string] : identifier[out] . identifier[stream] = identifier[content] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[content] : identifier[out] . identifier[text] = identifier[content] [ literal[string] ] keyword[else] : identifier[out] . identifier[text] = identifier[content] [ literal[string] ] keyword[elif] identifier[msg_type] keyword[in] ( literal[string] , literal[string] ): keyword[for] identifier[mime] , identifier[data] keyword[in] identifier[content] [ literal[string] ]. identifier[items] (): keyword[try] : identifier[attr] = identifier[self] . identifier[MIME_MAP] [ identifier[mime] ] keyword[except] identifier[KeyError] : keyword[raise] identifier[NotImplementedError] ( literal[string] % identifier[mime] ) identifier[json_encode] =( identifier[IPython] . identifier[version_info] [ literal[int] ]>= literal[int] keyword[and] identifier[mime] == literal[string] ) identifier[data_out] = identifier[data] keyword[if] keyword[not] identifier[json_encode] keyword[else] identifier[json] . identifier[dumps] ( identifier[data] ) identifier[setattr] ( identifier[out] , identifier[attr] , identifier[data_out] ) keyword[elif] identifier[msg_type] == literal[string] : identifier[out] . identifier[ename] = identifier[content] [ literal[string] ] identifier[out] . identifier[evalue] = identifier[content] [ literal[string] ] identifier[out] . identifier[traceback] = identifier[content] [ literal[string] ] keyword[elif] identifier[msg_type] == literal[string] : identifier[outs] = identifier[list] () keyword[continue] keyword[else] : keyword[raise] identifier[NotImplementedError] ( literal[string] % identifier[msg_type] ) identifier[outs] . identifier[append] ( identifier[out] ) identifier[cell] [ literal[string] ]= identifier[outs] keyword[if] identifier[status] == literal[string] : keyword[raise] identifier[NotebookError] ( identifier[traceback_text] )
def run_cell(self, cell): """Run a notebook cell and update the output of that cell in-place.""" logging.info('Running cell:\n%s\n', cell.input) self.kc.execute(cell.input) reply = self.kc.get_shell_msg() status = reply['content']['status'] traceback_text = '' if status == 'error': traceback_text = 'Cell raised uncaught exception: \n' + '\n'.join(reply['content']['traceback']) logging.info(traceback_text) # depends on [control=['if'], data=[]] else: logging.info('Cell returned') outs = list() while True: try: msg = self.kc.get_iopub_msg(timeout=1) if msg['msg_type'] == 'status': if msg['content']['execution_state'] == 'idle': break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Empty: # execution state should return to idle # before the queue becomes empty, # if it doesn't, something bad has happened raise # depends on [control=['except'], data=[]] content = msg['content'] msg_type = msg['msg_type'] # IPython 3.0.0-dev writes pyerr/pyout in the notebook format # but uses error/execute_result in the message spec. This does the # translation needed for tests to pass with IPython 3.0.0-dev notebook3_format_conversions = {'error': 'pyerr', 'execute_result': 'pyout'} msg_type = notebook3_format_conversions.get(msg_type, msg_type) out = NotebookNode(output_type=msg_type) if 'execution_count' in content: cell['prompt_number'] = content['execution_count'] out.prompt_number = content['execution_count'] # depends on [control=['if'], data=['content']] if msg_type in ('status', 'pyin', 'execute_input'): continue # depends on [control=['if'], data=[]] elif msg_type == 'stream': out.stream = content['name'] # in msgspec 5, this is name, text # in msgspec 4, this is name, data if 'text' in content: out.text = content['text'] # depends on [control=['if'], data=['content']] else: out.text = content['data'] # depends on [control=['if'], data=[]] elif msg_type in ('display_data', 'pyout'): for (mime, data) in content['data'].items(): try: attr = self.MIME_MAP[mime] # depends on [control=['try'], data=[]] except KeyError: raise NotImplementedError('unhandled mime type: %s' % mime) # depends on [control=['except'], data=[]] # In notebook version <= 3 JSON data is stored as a string # Evaluation of IPython2's JSON gives strings directly # Therefore do not encode for IPython versions prior to 3 json_encode = IPython.version_info[0] >= 3 and mime == 'application/json' data_out = data if not json_encode else json.dumps(data) setattr(out, attr, data_out) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] elif msg_type == 'pyerr': out.ename = content['ename'] out.evalue = content['evalue'] out.traceback = content['traceback'] # depends on [control=['if'], data=[]] elif msg_type == 'clear_output': outs = list() continue # depends on [control=['if'], data=[]] else: raise NotImplementedError('unhandled iopub message: %s' % msg_type) outs.append(out) # depends on [control=['while'], data=[]] cell['outputs'] = outs if status == 'error': raise NotebookError(traceback_text) # depends on [control=['if'], data=[]]
def set_group_member_orphan(self, member_id): """ Make a non-orphan member trigger into an orphan. :param member_id: Member Trigger id to be made an orphan. """ self._put(self._service_url(['triggers', 'groups', 'members', member_id, 'orphan']), data=None, parse_json=False)
def function[set_group_member_orphan, parameter[self, member_id]]: constant[ Make a non-orphan member trigger into an orphan. :param member_id: Member Trigger id to be made an orphan. ] call[name[self]._put, parameter[call[name[self]._service_url, parameter[list[[<ast.Constant object at 0x7da20e957b20>, <ast.Constant object at 0x7da20e956470>, <ast.Constant object at 0x7da20e956b90>, <ast.Name object at 0x7da20e955f30>, <ast.Constant object at 0x7da20e957550>]]]]]]
keyword[def] identifier[set_group_member_orphan] ( identifier[self] , identifier[member_id] ): literal[string] identifier[self] . identifier[_put] ( identifier[self] . identifier[_service_url] ([ literal[string] , literal[string] , literal[string] , identifier[member_id] , literal[string] ]), identifier[data] = keyword[None] , identifier[parse_json] = keyword[False] )
def set_group_member_orphan(self, member_id): """ Make a non-orphan member trigger into an orphan. :param member_id: Member Trigger id to be made an orphan. """ self._put(self._service_url(['triggers', 'groups', 'members', member_id, 'orphan']), data=None, parse_json=False)
def get_person_from_legacy_format(profile_record): """ Given a whole profile, convert it into zone-file format. In the full profile JSON, this method operates on the 'data_record' object. @profile is a dict that contains the legacy profile data Return a dict with the zone-file formatting. """ if not is_profile_in_legacy_format(profile_record): raise ValueError("Not a legacy profile") profile = profile_record try: profile = json.loads(json.dumps(profile)) except ValueError: pass images = [] accounts = [] profile_data = { "@type": "Person" } if profile.has_key("name") and type(profile["name"]) == dict \ and profile["name"].has_key("formatted"): profile_data["name"] = profile["name"]["formatted"] if profile.has_key("bio"): profile_data["description"] = profile["bio"] if profile.has_key("location") and type(profile["location"]) == dict \ and profile["location"].has_key("formatted"): profile_data["address"] = { "@type": "PostalAddress", "addressLocality": profile["location"]["formatted"] } if profile.has_key("avatar") and type(profile["avatar"]) == dict and \ profile["avatar"].has_key("url"): images.append({ "@type": "ImageObject", "name": "avatar", "contentUrl": profile["avatar"]["url"] }) if profile.has_key("cover") and type(profile["cover"]) == dict and \ profile["cover"].has_key("url"): images.append({ "@type": "ImageObject", "name": "cover", "contentUrl": profile["cover"]["url"] }) if len(images) > 0: profile_data["image"] = images if profile.has_key("website") and type(profile["website"]) in [str, unicode]: profile_data["website"] = [{ "@type": "WebSite", "url": profile["website"] }] for service_name in ["twitter", "facebook", "github"]: if profile.has_key(service_name): accounts.append( format_account(service_name, profile[service_name]) ) if profile.has_key("bitcoin") and type(profile["bitcoin"]) == dict and \ profile["bitcoin"].has_key("address"): accounts.append({ "@type": "Account", "role": "payment", "service": "bitcoin", "identifier": profile["bitcoin"]["address"] }) if profile.has_key("auth"): if len(profile["auth"]) > 0 and type(profile["auth"]) == dict: if profile["auth"][0].has_key("publicKeychain"): accounts.append({ "@type": "Account", "role": "key", "service": "bip32", "identifier": profile["auth"][0]["publicKeychain"] }) if profile.has_key("pgp") and type(profile["pgp"]) == dict \ and profile["pgp"].has_key("url") \ and profile["pgp"].has_key("fingerprint"): accounts.append({ "@type": "Account", "role": "key", "service": "pgp", "identifier": profile["pgp"]["fingerprint"], "contentUrl": profile["pgp"]["url"] }) profile_data["account"] = accounts return profile_data
def function[get_person_from_legacy_format, parameter[profile_record]]: constant[ Given a whole profile, convert it into zone-file format. In the full profile JSON, this method operates on the 'data_record' object. @profile is a dict that contains the legacy profile data Return a dict with the zone-file formatting. ] if <ast.UnaryOp object at 0x7da204963520> begin[:] <ast.Raise object at 0x7da204960430> variable[profile] assign[=] name[profile_record] <ast.Try object at 0x7da2049615a0> variable[images] assign[=] list[[]] variable[accounts] assign[=] list[[]] variable[profile_data] assign[=] dictionary[[<ast.Constant object at 0x7da204963be0>], [<ast.Constant object at 0x7da204961c60>]] if <ast.BoolOp object at 0x7da204960460> begin[:] call[name[profile_data]][constant[name]] assign[=] call[call[name[profile]][constant[name]]][constant[formatted]] if call[name[profile].has_key, parameter[constant[bio]]] begin[:] call[name[profile_data]][constant[description]] assign[=] call[name[profile]][constant[bio]] if <ast.BoolOp object at 0x7da204963850> begin[:] call[name[profile_data]][constant[address]] assign[=] dictionary[[<ast.Constant object at 0x7da2049623b0>, <ast.Constant object at 0x7da204962650>], [<ast.Constant object at 0x7da204960d30>, <ast.Subscript object at 0x7da2049624d0>]] if <ast.BoolOp object at 0x7da204962710> begin[:] call[name[images].append, parameter[dictionary[[<ast.Constant object at 0x7da204961b70>, <ast.Constant object at 0x7da2049605e0>, <ast.Constant object at 0x7da204960610>], [<ast.Constant object at 0x7da204960580>, <ast.Constant object at 0x7da204962110>, <ast.Subscript object at 0x7da2049623e0>]]]] if <ast.BoolOp object at 0x7da204961db0> begin[:] call[name[images].append, parameter[dictionary[[<ast.Constant object at 0x7da2054a7e20>, <ast.Constant object at 0x7da2054a4040>, <ast.Constant object at 0x7da2054a7460>], [<ast.Constant object at 0x7da2054a6440>, <ast.Constant object at 0x7da2054a65f0>, <ast.Subscript object at 0x7da2054a5d80>]]]] if compare[call[name[len], parameter[name[images]]] greater[>] constant[0]] begin[:] call[name[profile_data]][constant[image]] assign[=] name[images] if <ast.BoolOp object at 0x7da2054a6b00> begin[:] call[name[profile_data]][constant[website]] assign[=] list[[<ast.Dict object at 0x7da2054a7880>]] for taget[name[service_name]] in starred[list[[<ast.Constant object at 0x7da2054a66e0>, <ast.Constant object at 0x7da2054a4130>, <ast.Constant object at 0x7da2054a5f00>]]] begin[:] if call[name[profile].has_key, parameter[name[service_name]]] begin[:] call[name[accounts].append, parameter[call[name[format_account], parameter[name[service_name], call[name[profile]][name[service_name]]]]]] if <ast.BoolOp object at 0x7da2054a5600> begin[:] call[name[accounts].append, parameter[dictionary[[<ast.Constant object at 0x7da2054a5900>, <ast.Constant object at 0x7da2054a5f60>, <ast.Constant object at 0x7da2054a5e70>, <ast.Constant object at 0x7da2054a7c10>], [<ast.Constant object at 0x7da2054a5240>, <ast.Constant object at 0x7da2054a5ff0>, <ast.Constant object at 0x7da2054a6920>, <ast.Subscript object at 0x7da2054a6ce0>]]]] if call[name[profile].has_key, parameter[constant[auth]]] begin[:] if <ast.BoolOp object at 0x7da2054a7130> begin[:] if call[call[call[name[profile]][constant[auth]]][constant[0]].has_key, parameter[constant[publicKeychain]]] begin[:] call[name[accounts].append, parameter[dictionary[[<ast.Constant object at 0x7da2054a54e0>, <ast.Constant object at 0x7da2054a5180>, <ast.Constant object at 0x7da2054a6b60>, <ast.Constant object at 0x7da2054a7b20>], [<ast.Constant object at 0x7da2054a7160>, <ast.Constant object at 0x7da2054a5270>, <ast.Constant object at 0x7da2054a5e10>, <ast.Subscript object at 0x7da2054a6260>]]]] if <ast.BoolOp object at 0x7da2054a5a50> begin[:] call[name[accounts].append, parameter[dictionary[[<ast.Constant object at 0x7da2054a4400>, <ast.Constant object at 0x7da2054a5330>, <ast.Constant object at 0x7da2054a4460>, <ast.Constant object at 0x7da2054a5ea0>, <ast.Constant object at 0x7da2054a5b10>], [<ast.Constant object at 0x7da2054a6cb0>, <ast.Constant object at 0x7da2054a7490>, <ast.Constant object at 0x7da2054a4a60>, <ast.Subscript object at 0x7da2054a7f10>, <ast.Subscript object at 0x7da20c6a9b40>]]]] call[name[profile_data]][constant[account]] assign[=] name[accounts] return[name[profile_data]]
keyword[def] identifier[get_person_from_legacy_format] ( identifier[profile_record] ): literal[string] keyword[if] keyword[not] identifier[is_profile_in_legacy_format] ( identifier[profile_record] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[profile] = identifier[profile_record] keyword[try] : identifier[profile] = identifier[json] . identifier[loads] ( identifier[json] . identifier[dumps] ( identifier[profile] )) keyword[except] identifier[ValueError] : keyword[pass] identifier[images] =[] identifier[accounts] =[] identifier[profile_data] ={ literal[string] : literal[string] } keyword[if] identifier[profile] . identifier[has_key] ( literal[string] ) keyword[and] identifier[type] ( identifier[profile] [ literal[string] ])== identifier[dict] keyword[and] identifier[profile] [ literal[string] ]. identifier[has_key] ( literal[string] ): identifier[profile_data] [ literal[string] ]= identifier[profile] [ literal[string] ][ literal[string] ] keyword[if] identifier[profile] . identifier[has_key] ( literal[string] ): identifier[profile_data] [ literal[string] ]= identifier[profile] [ literal[string] ] keyword[if] identifier[profile] . identifier[has_key] ( literal[string] ) keyword[and] identifier[type] ( identifier[profile] [ literal[string] ])== identifier[dict] keyword[and] identifier[profile] [ literal[string] ]. identifier[has_key] ( literal[string] ): identifier[profile_data] [ literal[string] ]={ literal[string] : literal[string] , literal[string] : identifier[profile] [ literal[string] ][ literal[string] ] } keyword[if] identifier[profile] . identifier[has_key] ( literal[string] ) keyword[and] identifier[type] ( identifier[profile] [ literal[string] ])== identifier[dict] keyword[and] identifier[profile] [ literal[string] ]. identifier[has_key] ( literal[string] ): identifier[images] . identifier[append] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[profile] [ literal[string] ][ literal[string] ] }) keyword[if] identifier[profile] . identifier[has_key] ( literal[string] ) keyword[and] identifier[type] ( identifier[profile] [ literal[string] ])== identifier[dict] keyword[and] identifier[profile] [ literal[string] ]. identifier[has_key] ( literal[string] ): identifier[images] . identifier[append] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[profile] [ literal[string] ][ literal[string] ] }) keyword[if] identifier[len] ( identifier[images] )> literal[int] : identifier[profile_data] [ literal[string] ]= identifier[images] keyword[if] identifier[profile] . identifier[has_key] ( literal[string] ) keyword[and] identifier[type] ( identifier[profile] [ literal[string] ]) keyword[in] [ identifier[str] , identifier[unicode] ]: identifier[profile_data] [ literal[string] ]=[{ literal[string] : literal[string] , literal[string] : identifier[profile] [ literal[string] ] }] keyword[for] identifier[service_name] keyword[in] [ literal[string] , literal[string] , literal[string] ]: keyword[if] identifier[profile] . identifier[has_key] ( identifier[service_name] ): identifier[accounts] . identifier[append] ( identifier[format_account] ( identifier[service_name] , identifier[profile] [ identifier[service_name] ]) ) keyword[if] identifier[profile] . identifier[has_key] ( literal[string] ) keyword[and] identifier[type] ( identifier[profile] [ literal[string] ])== identifier[dict] keyword[and] identifier[profile] [ literal[string] ]. identifier[has_key] ( literal[string] ): identifier[accounts] . identifier[append] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[profile] [ literal[string] ][ literal[string] ] }) keyword[if] identifier[profile] . identifier[has_key] ( literal[string] ): keyword[if] identifier[len] ( identifier[profile] [ literal[string] ])> literal[int] keyword[and] identifier[type] ( identifier[profile] [ literal[string] ])== identifier[dict] : keyword[if] identifier[profile] [ literal[string] ][ literal[int] ]. identifier[has_key] ( literal[string] ): identifier[accounts] . identifier[append] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[profile] [ literal[string] ][ literal[int] ][ literal[string] ] }) keyword[if] identifier[profile] . identifier[has_key] ( literal[string] ) keyword[and] identifier[type] ( identifier[profile] [ literal[string] ])== identifier[dict] keyword[and] identifier[profile] [ literal[string] ]. identifier[has_key] ( literal[string] ) keyword[and] identifier[profile] [ literal[string] ]. identifier[has_key] ( literal[string] ): identifier[accounts] . identifier[append] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[profile] [ literal[string] ][ literal[string] ], literal[string] : identifier[profile] [ literal[string] ][ literal[string] ] }) identifier[profile_data] [ literal[string] ]= identifier[accounts] keyword[return] identifier[profile_data]
def get_person_from_legacy_format(profile_record): """ Given a whole profile, convert it into zone-file format. In the full profile JSON, this method operates on the 'data_record' object. @profile is a dict that contains the legacy profile data Return a dict with the zone-file formatting. """ if not is_profile_in_legacy_format(profile_record): raise ValueError('Not a legacy profile') # depends on [control=['if'], data=[]] profile = profile_record try: profile = json.loads(json.dumps(profile)) # depends on [control=['try'], data=[]] except ValueError: pass # depends on [control=['except'], data=[]] images = [] accounts = [] profile_data = {'@type': 'Person'} if profile.has_key('name') and type(profile['name']) == dict and profile['name'].has_key('formatted'): profile_data['name'] = profile['name']['formatted'] # depends on [control=['if'], data=[]] if profile.has_key('bio'): profile_data['description'] = profile['bio'] # depends on [control=['if'], data=[]] if profile.has_key('location') and type(profile['location']) == dict and profile['location'].has_key('formatted'): profile_data['address'] = {'@type': 'PostalAddress', 'addressLocality': profile['location']['formatted']} # depends on [control=['if'], data=[]] if profile.has_key('avatar') and type(profile['avatar']) == dict and profile['avatar'].has_key('url'): images.append({'@type': 'ImageObject', 'name': 'avatar', 'contentUrl': profile['avatar']['url']}) # depends on [control=['if'], data=[]] if profile.has_key('cover') and type(profile['cover']) == dict and profile['cover'].has_key('url'): images.append({'@type': 'ImageObject', 'name': 'cover', 'contentUrl': profile['cover']['url']}) # depends on [control=['if'], data=[]] if len(images) > 0: profile_data['image'] = images # depends on [control=['if'], data=[]] if profile.has_key('website') and type(profile['website']) in [str, unicode]: profile_data['website'] = [{'@type': 'WebSite', 'url': profile['website']}] # depends on [control=['if'], data=[]] for service_name in ['twitter', 'facebook', 'github']: if profile.has_key(service_name): accounts.append(format_account(service_name, profile[service_name])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['service_name']] if profile.has_key('bitcoin') and type(profile['bitcoin']) == dict and profile['bitcoin'].has_key('address'): accounts.append({'@type': 'Account', 'role': 'payment', 'service': 'bitcoin', 'identifier': profile['bitcoin']['address']}) # depends on [control=['if'], data=[]] if profile.has_key('auth'): if len(profile['auth']) > 0 and type(profile['auth']) == dict: if profile['auth'][0].has_key('publicKeychain'): accounts.append({'@type': 'Account', 'role': 'key', 'service': 'bip32', 'identifier': profile['auth'][0]['publicKeychain']}) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if profile.has_key('pgp') and type(profile['pgp']) == dict and profile['pgp'].has_key('url') and profile['pgp'].has_key('fingerprint'): accounts.append({'@type': 'Account', 'role': 'key', 'service': 'pgp', 'identifier': profile['pgp']['fingerprint'], 'contentUrl': profile['pgp']['url']}) # depends on [control=['if'], data=[]] profile_data['account'] = accounts return profile_data
def select_visible_page_image(infiles, output_file, log, context): """Selects a whole page image that we can show the user (if necessary)""" options = context.get_options() if options.clean_final: image_suffix = '.pp-clean.png' elif options.deskew: image_suffix = '.pp-deskew.png' elif options.remove_background: image_suffix = '.pp-background.png' else: image_suffix = '.page.png' image = next(ii for ii in infiles if ii.endswith(image_suffix)) pageinfo = get_pageinfo(image, context) if pageinfo.images and all(im.enc == 'jpeg' for im in pageinfo.images): log.debug(f'{page_number(image):4d}: JPEG input -> JPEG output') # If all images were JPEGs originally, produce a JPEG as output with Image.open(image) as im: # At this point the image should be a .png, but deskew, unpaper # might have removed the DPI information. In this case, fall back to # square DPI used to rasterize. When the preview image was # rasterized, it was also converted to square resolution, which is # what we want to give tesseract, so keep it square. fallback_dpi = get_page_square_dpi(pageinfo, options) dpi = im.info.get('dpi', (fallback_dpi, fallback_dpi)) # Pillow requires integer DPI dpi = round(dpi[0]), round(dpi[1]) im.save(output_file, format='JPEG', dpi=dpi) else: re_symlink(image, output_file, log)
def function[select_visible_page_image, parameter[infiles, output_file, log, context]]: constant[Selects a whole page image that we can show the user (if necessary)] variable[options] assign[=] call[name[context].get_options, parameter[]] if name[options].clean_final begin[:] variable[image_suffix] assign[=] constant[.pp-clean.png] variable[image] assign[=] call[name[next], parameter[<ast.GeneratorExp object at 0x7da1b1bbb340>]] variable[pageinfo] assign[=] call[name[get_pageinfo], parameter[name[image], name[context]]] if <ast.BoolOp object at 0x7da1b1bb9420> begin[:] call[name[log].debug, parameter[<ast.JoinedStr object at 0x7da1b1bbb040>]] with call[name[Image].open, parameter[name[image]]] begin[:] variable[fallback_dpi] assign[=] call[name[get_page_square_dpi], parameter[name[pageinfo], name[options]]] variable[dpi] assign[=] call[name[im].info.get, parameter[constant[dpi], tuple[[<ast.Name object at 0x7da1b1bd2a70>, <ast.Name object at 0x7da1b1bd3520>]]]] variable[dpi] assign[=] tuple[[<ast.Call object at 0x7da1b1bd3d60>, <ast.Call object at 0x7da1b1bd3f40>]] call[name[im].save, parameter[name[output_file]]]
keyword[def] identifier[select_visible_page_image] ( identifier[infiles] , identifier[output_file] , identifier[log] , identifier[context] ): literal[string] identifier[options] = identifier[context] . identifier[get_options] () keyword[if] identifier[options] . identifier[clean_final] : identifier[image_suffix] = literal[string] keyword[elif] identifier[options] . identifier[deskew] : identifier[image_suffix] = literal[string] keyword[elif] identifier[options] . identifier[remove_background] : identifier[image_suffix] = literal[string] keyword[else] : identifier[image_suffix] = literal[string] identifier[image] = identifier[next] ( identifier[ii] keyword[for] identifier[ii] keyword[in] identifier[infiles] keyword[if] identifier[ii] . identifier[endswith] ( identifier[image_suffix] )) identifier[pageinfo] = identifier[get_pageinfo] ( identifier[image] , identifier[context] ) keyword[if] identifier[pageinfo] . identifier[images] keyword[and] identifier[all] ( identifier[im] . identifier[enc] == literal[string] keyword[for] identifier[im] keyword[in] identifier[pageinfo] . identifier[images] ): identifier[log] . identifier[debug] ( literal[string] ) keyword[with] identifier[Image] . identifier[open] ( identifier[image] ) keyword[as] identifier[im] : identifier[fallback_dpi] = identifier[get_page_square_dpi] ( identifier[pageinfo] , identifier[options] ) identifier[dpi] = identifier[im] . identifier[info] . identifier[get] ( literal[string] ,( identifier[fallback_dpi] , identifier[fallback_dpi] )) identifier[dpi] = identifier[round] ( identifier[dpi] [ literal[int] ]), identifier[round] ( identifier[dpi] [ literal[int] ]) identifier[im] . identifier[save] ( identifier[output_file] , identifier[format] = literal[string] , identifier[dpi] = identifier[dpi] ) keyword[else] : identifier[re_symlink] ( identifier[image] , identifier[output_file] , identifier[log] )
def select_visible_page_image(infiles, output_file, log, context): """Selects a whole page image that we can show the user (if necessary)""" options = context.get_options() if options.clean_final: image_suffix = '.pp-clean.png' # depends on [control=['if'], data=[]] elif options.deskew: image_suffix = '.pp-deskew.png' # depends on [control=['if'], data=[]] elif options.remove_background: image_suffix = '.pp-background.png' # depends on [control=['if'], data=[]] else: image_suffix = '.page.png' image = next((ii for ii in infiles if ii.endswith(image_suffix))) pageinfo = get_pageinfo(image, context) if pageinfo.images and all((im.enc == 'jpeg' for im in pageinfo.images)): log.debug(f'{page_number(image):4d}: JPEG input -> JPEG output') # If all images were JPEGs originally, produce a JPEG as output with Image.open(image) as im: # At this point the image should be a .png, but deskew, unpaper # might have removed the DPI information. In this case, fall back to # square DPI used to rasterize. When the preview image was # rasterized, it was also converted to square resolution, which is # what we want to give tesseract, so keep it square. fallback_dpi = get_page_square_dpi(pageinfo, options) dpi = im.info.get('dpi', (fallback_dpi, fallback_dpi)) # Pillow requires integer DPI dpi = (round(dpi[0]), round(dpi[1])) im.save(output_file, format='JPEG', dpi=dpi) # depends on [control=['with'], data=['im']] # depends on [control=['if'], data=[]] else: re_symlink(image, output_file, log)
def copy_to(name, source, dest, overwrite=False, makedirs=False, path=None): ''' .. versionchanged:: 2015.8.0 Function renamed from ``lxc.cp`` to ``lxc.copy_to`` for consistency with other container types. ``lxc.cp`` will continue to work, however. For versions 2015.2.x and earlier, use ``lxc.cp``. Copy a file or directory from the host into a container name Container name source File to be copied to the container path path to the container parent default: /var/lib/lxc (system default) .. versionadded:: 2015.8.0 dest Destination on the container. Must be an absolute path. .. versionchanged:: 2015.5.0 If the destination is a directory, the file will be copied into that directory. overwrite : False Unless this option is set to ``True``, then if a file exists at the location specified by the ``dest`` argument, an error will be raised. .. versionadded:: 2015.8.0 makedirs : False Create the parent directory on the container if it does not already exist. .. versionadded:: 2015.5.0 CLI Example: .. code-block:: bash salt 'minion' lxc.copy_to /tmp/foo /root/foo salt 'minion' lxc.cp /tmp/foo /root/foo ''' _ensure_running(name, no_start=True, path=path) return __salt__['container_resource.copy_to']( name, source, dest, container_type=__virtualname__, path=path, exec_driver=EXEC_DRIVER, overwrite=overwrite, makedirs=makedirs)
def function[copy_to, parameter[name, source, dest, overwrite, makedirs, path]]: constant[ .. versionchanged:: 2015.8.0 Function renamed from ``lxc.cp`` to ``lxc.copy_to`` for consistency with other container types. ``lxc.cp`` will continue to work, however. For versions 2015.2.x and earlier, use ``lxc.cp``. Copy a file or directory from the host into a container name Container name source File to be copied to the container path path to the container parent default: /var/lib/lxc (system default) .. versionadded:: 2015.8.0 dest Destination on the container. Must be an absolute path. .. versionchanged:: 2015.5.0 If the destination is a directory, the file will be copied into that directory. overwrite : False Unless this option is set to ``True``, then if a file exists at the location specified by the ``dest`` argument, an error will be raised. .. versionadded:: 2015.8.0 makedirs : False Create the parent directory on the container if it does not already exist. .. versionadded:: 2015.5.0 CLI Example: .. code-block:: bash salt 'minion' lxc.copy_to /tmp/foo /root/foo salt 'minion' lxc.cp /tmp/foo /root/foo ] call[name[_ensure_running], parameter[name[name]]] return[call[call[name[__salt__]][constant[container_resource.copy_to]], parameter[name[name], name[source], name[dest]]]]
keyword[def] identifier[copy_to] ( identifier[name] , identifier[source] , identifier[dest] , identifier[overwrite] = keyword[False] , identifier[makedirs] = keyword[False] , identifier[path] = keyword[None] ): literal[string] identifier[_ensure_running] ( identifier[name] , identifier[no_start] = keyword[True] , identifier[path] = identifier[path] ) keyword[return] identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[source] , identifier[dest] , identifier[container_type] = identifier[__virtualname__] , identifier[path] = identifier[path] , identifier[exec_driver] = identifier[EXEC_DRIVER] , identifier[overwrite] = identifier[overwrite] , identifier[makedirs] = identifier[makedirs] )
def copy_to(name, source, dest, overwrite=False, makedirs=False, path=None): """ .. versionchanged:: 2015.8.0 Function renamed from ``lxc.cp`` to ``lxc.copy_to`` for consistency with other container types. ``lxc.cp`` will continue to work, however. For versions 2015.2.x and earlier, use ``lxc.cp``. Copy a file or directory from the host into a container name Container name source File to be copied to the container path path to the container parent default: /var/lib/lxc (system default) .. versionadded:: 2015.8.0 dest Destination on the container. Must be an absolute path. .. versionchanged:: 2015.5.0 If the destination is a directory, the file will be copied into that directory. overwrite : False Unless this option is set to ``True``, then if a file exists at the location specified by the ``dest`` argument, an error will be raised. .. versionadded:: 2015.8.0 makedirs : False Create the parent directory on the container if it does not already exist. .. versionadded:: 2015.5.0 CLI Example: .. code-block:: bash salt 'minion' lxc.copy_to /tmp/foo /root/foo salt 'minion' lxc.cp /tmp/foo /root/foo """ _ensure_running(name, no_start=True, path=path) return __salt__['container_resource.copy_to'](name, source, dest, container_type=__virtualname__, path=path, exec_driver=EXEC_DRIVER, overwrite=overwrite, makedirs=makedirs)
def add_template_global(self, f, name=None): """Register a custom template global function. Works exactly like the :meth:`template_global` decorator. .. versionadded:: 0.10 :param name: the optional name of the global function, otherwise the function name will be used. """ self.jinja_env.globals[name or f.__name__] = f
def function[add_template_global, parameter[self, f, name]]: constant[Register a custom template global function. Works exactly like the :meth:`template_global` decorator. .. versionadded:: 0.10 :param name: the optional name of the global function, otherwise the function name will be used. ] call[name[self].jinja_env.globals][<ast.BoolOp object at 0x7da20c6a9c60>] assign[=] name[f]
keyword[def] identifier[add_template_global] ( identifier[self] , identifier[f] , identifier[name] = keyword[None] ): literal[string] identifier[self] . identifier[jinja_env] . identifier[globals] [ identifier[name] keyword[or] identifier[f] . identifier[__name__] ]= identifier[f]
def add_template_global(self, f, name=None): """Register a custom template global function. Works exactly like the :meth:`template_global` decorator. .. versionadded:: 0.10 :param name: the optional name of the global function, otherwise the function name will be used. """ self.jinja_env.globals[name or f.__name__] = f
def get_icloud_folder_location(): """ Try to locate the iCloud Drive folder. Returns: (str) Full path to the iCloud Drive folder. """ yosemite_icloud_path = '~/Library/Mobile Documents/com~apple~CloudDocs/' icloud_home = os.path.expanduser(yosemite_icloud_path) if not os.path.isdir(icloud_home): error('Unable to find your iCloud Drive =(') return str(icloud_home)
def function[get_icloud_folder_location, parameter[]]: constant[ Try to locate the iCloud Drive folder. Returns: (str) Full path to the iCloud Drive folder. ] variable[yosemite_icloud_path] assign[=] constant[~/Library/Mobile Documents/com~apple~CloudDocs/] variable[icloud_home] assign[=] call[name[os].path.expanduser, parameter[name[yosemite_icloud_path]]] if <ast.UnaryOp object at 0x7da18bc71810> begin[:] call[name[error], parameter[constant[Unable to find your iCloud Drive =(]]] return[call[name[str], parameter[name[icloud_home]]]]
keyword[def] identifier[get_icloud_folder_location] (): literal[string] identifier[yosemite_icloud_path] = literal[string] identifier[icloud_home] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[yosemite_icloud_path] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[icloud_home] ): identifier[error] ( literal[string] ) keyword[return] identifier[str] ( identifier[icloud_home] )
def get_icloud_folder_location(): """ Try to locate the iCloud Drive folder. Returns: (str) Full path to the iCloud Drive folder. """ yosemite_icloud_path = '~/Library/Mobile Documents/com~apple~CloudDocs/' icloud_home = os.path.expanduser(yosemite_icloud_path) if not os.path.isdir(icloud_home): error('Unable to find your iCloud Drive =(') # depends on [control=['if'], data=[]] return str(icloud_home)
def utils(opts, whitelist=None, context=None, proxy=proxy): ''' Returns the utility modules ''' return LazyLoader( _module_dirs(opts, 'utils', ext_type_dirs='utils_dirs'), opts, tag='utils', whitelist=whitelist, pack={'__context__': context, '__proxy__': proxy or {}}, )
def function[utils, parameter[opts, whitelist, context, proxy]]: constant[ Returns the utility modules ] return[call[name[LazyLoader], parameter[call[name[_module_dirs], parameter[name[opts], constant[utils]]], name[opts]]]]
keyword[def] identifier[utils] ( identifier[opts] , identifier[whitelist] = keyword[None] , identifier[context] = keyword[None] , identifier[proxy] = identifier[proxy] ): literal[string] keyword[return] identifier[LazyLoader] ( identifier[_module_dirs] ( identifier[opts] , literal[string] , identifier[ext_type_dirs] = literal[string] ), identifier[opts] , identifier[tag] = literal[string] , identifier[whitelist] = identifier[whitelist] , identifier[pack] ={ literal[string] : identifier[context] , literal[string] : identifier[proxy] keyword[or] {}}, )
def utils(opts, whitelist=None, context=None, proxy=proxy): """ Returns the utility modules """ return LazyLoader(_module_dirs(opts, 'utils', ext_type_dirs='utils_dirs'), opts, tag='utils', whitelist=whitelist, pack={'__context__': context, '__proxy__': proxy or {}})
def create_parser_options(lazy_mfcollection_parsing: bool = False) -> Dict[str, Dict[str, Any]]: """ Utility method to create a default options structure with the lazy parsing inside :param lazy_mfcollection_parsing: :return: the options structure filled with lazyparsing option (for the MultifileCollectionParser) """ return {MultifileCollectionParser.__name__: {'lazy_parsing': lazy_mfcollection_parsing}}
def function[create_parser_options, parameter[lazy_mfcollection_parsing]]: constant[ Utility method to create a default options structure with the lazy parsing inside :param lazy_mfcollection_parsing: :return: the options structure filled with lazyparsing option (for the MultifileCollectionParser) ] return[dictionary[[<ast.Attribute object at 0x7da207f01f00>], [<ast.Dict object at 0x7da207f02bf0>]]]
keyword[def] identifier[create_parser_options] ( identifier[lazy_mfcollection_parsing] : identifier[bool] = keyword[False] )-> identifier[Dict] [ identifier[str] , identifier[Dict] [ identifier[str] , identifier[Any] ]]: literal[string] keyword[return] { identifier[MultifileCollectionParser] . identifier[__name__] :{ literal[string] : identifier[lazy_mfcollection_parsing] }}
def create_parser_options(lazy_mfcollection_parsing: bool=False) -> Dict[str, Dict[str, Any]]: """ Utility method to create a default options structure with the lazy parsing inside :param lazy_mfcollection_parsing: :return: the options structure filled with lazyparsing option (for the MultifileCollectionParser) """ return {MultifileCollectionParser.__name__: {'lazy_parsing': lazy_mfcollection_parsing}}
def trailing_stop(api_svr_ip='127.0.0.1', api_svr_port=11111, unlock_password="", code='HK.00700', trade_env=ft.TrdEnv.SIMULATE, method=TrailingMethod.DROP_ABS, drop=1.0, volume=100, how_to_sell=SellMethod.SMART_SELL, diff=0, rest_time=2, enable_email_notification=False, receiver=''): """ 止损策略函数 :param api_svr_ip: (string)ip :param api_svr_port: (int)port :param unlock_password: (string)交易解锁密码, 必需修改! 模拟交易设为一个非空字符串即可 :param code: (string)股票 :param trade_env: ft.TrdEnv.REAL: 真实交易 ft.TrdEnv.SIMULATE: 模拟交易 :param method: method == TrailingMethod.DROP_ABS: 股票下跌drop价格就会止损 railingMethod.DROP_PER: 股票下跌drop的百分比就会止损 :param drop: method == TrailingMethod.DROP_ABS, 股票下跌的价格 method == TrailingMethod.DROP_PER,股票下跌的百分比,0.01表示下跌1%则止损 :param volume: 需要卖掉的股票数量 :param how_to_sell: 以何种方式卖出股票, SellMethod 类型 :param diff: 默认为0,当how_to_sell为SellMethod.DROP_ABS时,以(市价-diff)的价格卖出 :param rest_time: 每隔REST_TIME秒,会检查订单状态, 需要>=2 :param enable_email_notification: 激活email功能 :param receiver: 邮件接收者 """ EmailNotification.set_enable(enable_email_notification) if how_to_sell not in [SellMethod.SIMPLE_SELL, SellMethod.SMART_SELL]: raise Exception('how_to_sell value error') if method not in [TrailingMethod.DROP_ABS, TrailingMethod.DROP_PER]: raise Exception('method value error') quote_ctx = ft.OpenQuoteContext(host=api_svr_ip, port=api_svr_port) is_hk_trade = 'HK.' in code if is_hk_trade: trade_ctx = ft.OpenHKTradeContext(host=api_svr_ip, port=api_svr_port) else: trade_ctx = ft.OpenUSTradeContext(host=api_svr_ip, port=api_svr_port) if unlock_password == "": raise Exception('请先配置交易密码') ret, data = trade_ctx.unlock_trade(unlock_password) if ret != ft.RET_OK: raise Exception('解锁交易失败') ret, data = trade_ctx.position_list_query(trd_env=trd_env) if ret != ft.RET_OK: raise Exception("无法获取持仓列表") try: qty = data[data['code'] == code].iloc[0]['qty'] except: raise Exception('你没有持仓!无法买卖') qty = int(qty) if volume == 0: volume = qty if volume < 0: raise Exception('volume lower than 0') elif qty < volume: raise Exception('持仓不足') ret, data = quote_ctx.get_market_snapshot(code) if ret != ft.RET_OK: raise Exception('获取lot size失败') lot_size = data.iloc[0]['lot_size'] if volume % lot_size != 0: raise Exception('volume 必须是{}的整数倍'.format(lot_size)) ret, data = quote_ctx.subscribe(code, ft.SubType.QUOTE) if ret != ft.RET_OK: raise Exception('订阅QUOTE错误: error {}:{}'.format(ret, data)) ret, data = quote_ctx.subscribe(code, ft.SubType.ORDER_BOOK) if ret != ft.RET_OK: print('error {}:{}'.format(ret, data)) raise Exception('订阅order book失败: error {}:{}'.format(ret, data)) if diff: if is_hk_trade: ret, data = quote_ctx.get_order_book(code) if ret != ft.RET_OK: raise Exception('获取order book失败: cannot get order book'.format(data)) min_diff = round(abs(data['Bid'][0][0] - data['Bid'][1][0]), 3) if floor(diff / min_diff) * min_diff != diff: raise Exception('diff 应是{}的整数倍'.format(min_diff)) else: if round(diff, 2) != diff: raise Exception('美股价差保留2位小数{}->{}'.format(diff, round(diff, 2))) if method == TrailingMethod.DROP_ABS: if is_hk_trade: if floor(drop / min_diff) * min_diff != drop: raise Exception('drop必须是{}的整数倍'.format(min_diff)) else: if round(drop, 2) != drop: raise Exception('drop必须保留2位小数{}->{}'.format(drop, round(drop, 2))) elif method == TrailingMethod.DROP_PER: if drop < 0 or drop > 1: raise Exception('drop must in [0, 1] if method is DROP_PER') trailing_stop_handler = TrailingStopHandler(quote_ctx, is_hk_trade, method, drop) quote_ctx.set_handler(trailing_stop_handler) quote_ctx.start() while True: if trailing_stop_handler.finished: # sell the stock qty = volume sell_price = trailing_stop_handler.stop while qty > 0: if how_to_sell == SellMethod.SIMPLE_SELL: data = simple_sell(quote_ctx, trade_ctx, code, sell_price - diff, qty, trade_env, ft.OrderType.SPECIAL_LIMIT) else: data = smart_sell(quote_ctx, trade_ctx, code, qty, trade_env, ft.OrderType.SPECIAL_LIMIT) if data is None: print('下单失败') EmailNotification.send_email(receiver, '下单失败', '股票代码{},数量{}'.format(code, volume)) sleep(rest_time) continue order_id = data.iloc[0]['order_id'] sleep(rest_time) while True: ret, data = trade_ctx.order_list_query(order_id=order_id, trd_env=trade_env) if ret != ft.RET_OK: sleep(rest_time) continue status = data.iloc[0]['order_status'] dealt_qty = int(data.iloc[0]['dealt_qty']) order_price = data.iloc[0]['price'] qty -= dealt_qty if status == ft.OrderStatus.FILLED_ALL: print('全部成交:股票代码{}, 成交总数{},价格{}'.format(code, dealt_qty, order_price)) EmailNotification.send_email(receiver, '全部成交', '股票代码{},成交总数{},价格{}' .format(code, dealt_qty, order_price)) break elif status == ft.OrderStatus.FILLED_PART: print('部分成交:股票代码{},成交总数{},价格{}'.format(code, dealt_qty, order_price)) EmailNotification.send_email(receiver, '部分成交', '股票代码{},成交总数{},价格{}' .format(code, dealt_qty, order_price)) break elif status == ft.OrderStatus.FAILED or status == ft.OrderStatus.SUBMIT_FAILED or \ status == ft.OrderStatus.CANCELLED_ALL or status == ft.OrderStatus.DELETED: break else: trade_ctx.modify_order(ft.ModifyOrderOp.CANCEL, order_id, 0, 0) sleep(rest_time) continue if how_to_sell == SellMethod.SIMPLE_SELL: ret, data = quote_ctx.get_order_book(code) if ret != ft.RET_OK: raise Exception('获取order_book失败') sell_price = data['Bid'][0][0] # draw price and stop price_lst = trailing_stop_handler.price_lst plt.plot(np.arange(len(price_lst)), price_lst) stop_list = trailing_stop_handler.stop_lst plt.plot(np.arange(len(stop_list)), stop_list) break quote_ctx.close() trade_ctx.close()
def function[trailing_stop, parameter[api_svr_ip, api_svr_port, unlock_password, code, trade_env, method, drop, volume, how_to_sell, diff, rest_time, enable_email_notification, receiver]]: constant[ 止损策略函数 :param api_svr_ip: (string)ip :param api_svr_port: (int)port :param unlock_password: (string)交易解锁密码, 必需修改! 模拟交易设为一个非空字符串即可 :param code: (string)股票 :param trade_env: ft.TrdEnv.REAL: 真实交易 ft.TrdEnv.SIMULATE: 模拟交易 :param method: method == TrailingMethod.DROP_ABS: 股票下跌drop价格就会止损 railingMethod.DROP_PER: 股票下跌drop的百分比就会止损 :param drop: method == TrailingMethod.DROP_ABS, 股票下跌的价格 method == TrailingMethod.DROP_PER,股票下跌的百分比,0.01表示下跌1%则止损 :param volume: 需要卖掉的股票数量 :param how_to_sell: 以何种方式卖出股票, SellMethod 类型 :param diff: 默认为0,当how_to_sell为SellMethod.DROP_ABS时,以(市价-diff)的价格卖出 :param rest_time: 每隔REST_TIME秒,会检查订单状态, 需要>=2 :param enable_email_notification: 激活email功能 :param receiver: 邮件接收者 ] call[name[EmailNotification].set_enable, parameter[name[enable_email_notification]]] if compare[name[how_to_sell] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Attribute object at 0x7da1b0783730>, <ast.Attribute object at 0x7da1b07836d0>]]] begin[:] <ast.Raise object at 0x7da1b0783670> if compare[name[method] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Attribute object at 0x7da1b07834f0>, <ast.Attribute object at 0x7da1b0783490>]]] begin[:] <ast.Raise object at 0x7da1b0783430> variable[quote_ctx] assign[=] call[name[ft].OpenQuoteContext, parameter[]] variable[is_hk_trade] assign[=] compare[constant[HK.] in name[code]] if name[is_hk_trade] begin[:] variable[trade_ctx] assign[=] call[name[ft].OpenHKTradeContext, parameter[]] if compare[name[unlock_password] equal[==] constant[]] begin[:] <ast.Raise object at 0x7da1b0782c50> <ast.Tuple object at 0x7da1b0782b60> assign[=] call[name[trade_ctx].unlock_trade, parameter[name[unlock_password]]] if compare[name[ret] not_equal[!=] name[ft].RET_OK] begin[:] <ast.Raise object at 0x7da1b0782920> <ast.Tuple object at 0x7da1b0782830> assign[=] call[name[trade_ctx].position_list_query, parameter[]] if compare[name[ret] not_equal[!=] name[ft].RET_OK] begin[:] <ast.Raise object at 0x7da1b07825c0> <ast.Try object at 0x7da1b07bc040> variable[qty] assign[=] call[name[int], parameter[name[qty]]] if compare[name[volume] equal[==] constant[0]] begin[:] variable[volume] assign[=] name[qty] if compare[name[volume] less[<] constant[0]] begin[:] <ast.Raise object at 0x7da1b07bca90> <ast.Tuple object at 0x7da1b07bdba0> assign[=] call[name[quote_ctx].get_market_snapshot, parameter[name[code]]] if compare[name[ret] not_equal[!=] name[ft].RET_OK] begin[:] <ast.Raise object at 0x7da1b07bcb80> variable[lot_size] assign[=] call[call[name[data].iloc][constant[0]]][constant[lot_size]] if compare[binary_operation[name[volume] <ast.Mod object at 0x7da2590d6920> name[lot_size]] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da1b07bd990> <ast.Tuple object at 0x7da1b07bf9d0> assign[=] call[name[quote_ctx].subscribe, parameter[name[code], name[ft].SubType.QUOTE]] if compare[name[ret] not_equal[!=] name[ft].RET_OK] begin[:] <ast.Raise object at 0x7da1b07bd2d0> <ast.Tuple object at 0x7da1b07bdc30> assign[=] call[name[quote_ctx].subscribe, parameter[name[code], name[ft].SubType.ORDER_BOOK]] if compare[name[ret] not_equal[!=] name[ft].RET_OK] begin[:] call[name[print], parameter[call[constant[error {}:{}].format, parameter[name[ret], name[data]]]]] <ast.Raise object at 0x7da1b07bd330> if name[diff] begin[:] if name[is_hk_trade] begin[:] <ast.Tuple object at 0x7da1b07bc1c0> assign[=] call[name[quote_ctx].get_order_book, parameter[name[code]]] if compare[name[ret] not_equal[!=] name[ft].RET_OK] begin[:] <ast.Raise object at 0x7da1b07bd270> variable[min_diff] assign[=] call[name[round], parameter[call[name[abs], parameter[binary_operation[call[call[call[name[data]][constant[Bid]]][constant[0]]][constant[0]] - call[call[call[name[data]][constant[Bid]]][constant[1]]][constant[0]]]]], constant[3]]] if compare[binary_operation[call[name[floor], parameter[binary_operation[name[diff] / name[min_diff]]]] * name[min_diff]] not_equal[!=] name[diff]] begin[:] <ast.Raise object at 0x7da1b07bda20> if compare[name[method] equal[==] name[TrailingMethod].DROP_ABS] begin[:] if name[is_hk_trade] begin[:] if compare[binary_operation[call[name[floor], parameter[binary_operation[name[drop] / name[min_diff]]]] * name[min_diff]] not_equal[!=] name[drop]] begin[:] <ast.Raise object at 0x7da18f810ee0> variable[trailing_stop_handler] assign[=] call[name[TrailingStopHandler], parameter[name[quote_ctx], name[is_hk_trade], name[method], name[drop]]] call[name[quote_ctx].set_handler, parameter[name[trailing_stop_handler]]] call[name[quote_ctx].start, parameter[]] while constant[True] begin[:] if name[trailing_stop_handler].finished begin[:] variable[qty] assign[=] name[volume] variable[sell_price] assign[=] name[trailing_stop_handler].stop while compare[name[qty] greater[>] constant[0]] begin[:] if compare[name[how_to_sell] equal[==] name[SellMethod].SIMPLE_SELL] begin[:] variable[data] assign[=] call[name[simple_sell], parameter[name[quote_ctx], name[trade_ctx], name[code], binary_operation[name[sell_price] - name[diff]], name[qty], name[trade_env], name[ft].OrderType.SPECIAL_LIMIT]] if compare[name[data] is constant[None]] begin[:] call[name[print], parameter[constant[下单失败]]] call[name[EmailNotification].send_email, parameter[name[receiver], constant[下单失败], call[constant[股票代码{},数量{}].format, parameter[name[code], name[volume]]]]] call[name[sleep], parameter[name[rest_time]]] continue variable[order_id] assign[=] call[call[name[data].iloc][constant[0]]][constant[order_id]] call[name[sleep], parameter[name[rest_time]]] while constant[True] begin[:] <ast.Tuple object at 0x7da18f58ec50> assign[=] call[name[trade_ctx].order_list_query, parameter[]] if compare[name[ret] not_equal[!=] name[ft].RET_OK] begin[:] call[name[sleep], parameter[name[rest_time]]] continue variable[status] assign[=] call[call[name[data].iloc][constant[0]]][constant[order_status]] variable[dealt_qty] assign[=] call[name[int], parameter[call[call[name[data].iloc][constant[0]]][constant[dealt_qty]]]] variable[order_price] assign[=] call[call[name[data].iloc][constant[0]]][constant[price]] <ast.AugAssign object at 0x7da18f58d840> if compare[name[status] equal[==] name[ft].OrderStatus.FILLED_ALL] begin[:] call[name[print], parameter[call[constant[全部成交:股票代码{}, 成交总数{},价格{}].format, parameter[name[code], name[dealt_qty], name[order_price]]]]] call[name[EmailNotification].send_email, parameter[name[receiver], constant[全部成交], call[constant[股票代码{},成交总数{},价格{}].format, parameter[name[code], name[dealt_qty], name[order_price]]]]] break if compare[name[how_to_sell] equal[==] name[SellMethod].SIMPLE_SELL] begin[:] <ast.Tuple object at 0x7da1b07f9480> assign[=] call[name[quote_ctx].get_order_book, parameter[name[code]]] if compare[name[ret] not_equal[!=] name[ft].RET_OK] begin[:] <ast.Raise object at 0x7da1b07fab00> variable[sell_price] assign[=] call[call[call[name[data]][constant[Bid]]][constant[0]]][constant[0]] variable[price_lst] assign[=] name[trailing_stop_handler].price_lst call[name[plt].plot, parameter[call[name[np].arange, parameter[call[name[len], parameter[name[price_lst]]]]], name[price_lst]]] variable[stop_list] assign[=] name[trailing_stop_handler].stop_lst call[name[plt].plot, parameter[call[name[np].arange, parameter[call[name[len], parameter[name[stop_list]]]]], name[stop_list]]] break call[name[quote_ctx].close, parameter[]] call[name[trade_ctx].close, parameter[]]
keyword[def] identifier[trailing_stop] ( identifier[api_svr_ip] = literal[string] , identifier[api_svr_port] = literal[int] , identifier[unlock_password] = literal[string] , identifier[code] = literal[string] , identifier[trade_env] = identifier[ft] . identifier[TrdEnv] . identifier[SIMULATE] , identifier[method] = identifier[TrailingMethod] . identifier[DROP_ABS] , identifier[drop] = literal[int] , identifier[volume] = literal[int] , identifier[how_to_sell] = identifier[SellMethod] . identifier[SMART_SELL] , identifier[diff] = literal[int] , identifier[rest_time] = literal[int] , identifier[enable_email_notification] = keyword[False] , identifier[receiver] = literal[string] ): literal[string] identifier[EmailNotification] . identifier[set_enable] ( identifier[enable_email_notification] ) keyword[if] identifier[how_to_sell] keyword[not] keyword[in] [ identifier[SellMethod] . identifier[SIMPLE_SELL] , identifier[SellMethod] . identifier[SMART_SELL] ]: keyword[raise] identifier[Exception] ( literal[string] ) keyword[if] identifier[method] keyword[not] keyword[in] [ identifier[TrailingMethod] . identifier[DROP_ABS] , identifier[TrailingMethod] . identifier[DROP_PER] ]: keyword[raise] identifier[Exception] ( literal[string] ) identifier[quote_ctx] = identifier[ft] . identifier[OpenQuoteContext] ( identifier[host] = identifier[api_svr_ip] , identifier[port] = identifier[api_svr_port] ) identifier[is_hk_trade] = literal[string] keyword[in] identifier[code] keyword[if] identifier[is_hk_trade] : identifier[trade_ctx] = identifier[ft] . identifier[OpenHKTradeContext] ( identifier[host] = identifier[api_svr_ip] , identifier[port] = identifier[api_svr_port] ) keyword[else] : identifier[trade_ctx] = identifier[ft] . identifier[OpenUSTradeContext] ( identifier[host] = identifier[api_svr_ip] , identifier[port] = identifier[api_svr_port] ) keyword[if] identifier[unlock_password] == literal[string] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[ret] , identifier[data] = identifier[trade_ctx] . identifier[unlock_trade] ( identifier[unlock_password] ) keyword[if] identifier[ret] != identifier[ft] . identifier[RET_OK] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[ret] , identifier[data] = identifier[trade_ctx] . identifier[position_list_query] ( identifier[trd_env] = identifier[trd_env] ) keyword[if] identifier[ret] != identifier[ft] . identifier[RET_OK] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[try] : identifier[qty] = identifier[data] [ identifier[data] [ literal[string] ]== identifier[code] ]. identifier[iloc] [ literal[int] ][ literal[string] ] keyword[except] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[qty] = identifier[int] ( identifier[qty] ) keyword[if] identifier[volume] == literal[int] : identifier[volume] = identifier[qty] keyword[if] identifier[volume] < literal[int] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[elif] identifier[qty] < identifier[volume] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[ret] , identifier[data] = identifier[quote_ctx] . identifier[get_market_snapshot] ( identifier[code] ) keyword[if] identifier[ret] != identifier[ft] . identifier[RET_OK] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[lot_size] = identifier[data] . identifier[iloc] [ literal[int] ][ literal[string] ] keyword[if] identifier[volume] % identifier[lot_size] != literal[int] : keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[lot_size] )) identifier[ret] , identifier[data] = identifier[quote_ctx] . identifier[subscribe] ( identifier[code] , identifier[ft] . identifier[SubType] . identifier[QUOTE] ) keyword[if] identifier[ret] != identifier[ft] . identifier[RET_OK] : keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[ret] , identifier[data] )) identifier[ret] , identifier[data] = identifier[quote_ctx] . identifier[subscribe] ( identifier[code] , identifier[ft] . identifier[SubType] . identifier[ORDER_BOOK] ) keyword[if] identifier[ret] != identifier[ft] . identifier[RET_OK] : identifier[print] ( literal[string] . identifier[format] ( identifier[ret] , identifier[data] )) keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[ret] , identifier[data] )) keyword[if] identifier[diff] : keyword[if] identifier[is_hk_trade] : identifier[ret] , identifier[data] = identifier[quote_ctx] . identifier[get_order_book] ( identifier[code] ) keyword[if] identifier[ret] != identifier[ft] . identifier[RET_OK] : keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[data] )) identifier[min_diff] = identifier[round] ( identifier[abs] ( identifier[data] [ literal[string] ][ literal[int] ][ literal[int] ]- identifier[data] [ literal[string] ][ literal[int] ][ literal[int] ]), literal[int] ) keyword[if] identifier[floor] ( identifier[diff] / identifier[min_diff] )* identifier[min_diff] != identifier[diff] : keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[min_diff] )) keyword[else] : keyword[if] identifier[round] ( identifier[diff] , literal[int] )!= identifier[diff] : keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[diff] , identifier[round] ( identifier[diff] , literal[int] ))) keyword[if] identifier[method] == identifier[TrailingMethod] . identifier[DROP_ABS] : keyword[if] identifier[is_hk_trade] : keyword[if] identifier[floor] ( identifier[drop] / identifier[min_diff] )* identifier[min_diff] != identifier[drop] : keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[min_diff] )) keyword[else] : keyword[if] identifier[round] ( identifier[drop] , literal[int] )!= identifier[drop] : keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[drop] , identifier[round] ( identifier[drop] , literal[int] ))) keyword[elif] identifier[method] == identifier[TrailingMethod] . identifier[DROP_PER] : keyword[if] identifier[drop] < literal[int] keyword[or] identifier[drop] > literal[int] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[trailing_stop_handler] = identifier[TrailingStopHandler] ( identifier[quote_ctx] , identifier[is_hk_trade] , identifier[method] , identifier[drop] ) identifier[quote_ctx] . identifier[set_handler] ( identifier[trailing_stop_handler] ) identifier[quote_ctx] . identifier[start] () keyword[while] keyword[True] : keyword[if] identifier[trailing_stop_handler] . identifier[finished] : identifier[qty] = identifier[volume] identifier[sell_price] = identifier[trailing_stop_handler] . identifier[stop] keyword[while] identifier[qty] > literal[int] : keyword[if] identifier[how_to_sell] == identifier[SellMethod] . identifier[SIMPLE_SELL] : identifier[data] = identifier[simple_sell] ( identifier[quote_ctx] , identifier[trade_ctx] , identifier[code] , identifier[sell_price] - identifier[diff] , identifier[qty] , identifier[trade_env] , identifier[ft] . identifier[OrderType] . identifier[SPECIAL_LIMIT] ) keyword[else] : identifier[data] = identifier[smart_sell] ( identifier[quote_ctx] , identifier[trade_ctx] , identifier[code] , identifier[qty] , identifier[trade_env] , identifier[ft] . identifier[OrderType] . identifier[SPECIAL_LIMIT] ) keyword[if] identifier[data] keyword[is] keyword[None] : identifier[print] ( literal[string] ) identifier[EmailNotification] . identifier[send_email] ( identifier[receiver] , literal[string] , literal[string] . identifier[format] ( identifier[code] , identifier[volume] )) identifier[sleep] ( identifier[rest_time] ) keyword[continue] identifier[order_id] = identifier[data] . identifier[iloc] [ literal[int] ][ literal[string] ] identifier[sleep] ( identifier[rest_time] ) keyword[while] keyword[True] : identifier[ret] , identifier[data] = identifier[trade_ctx] . identifier[order_list_query] ( identifier[order_id] = identifier[order_id] , identifier[trd_env] = identifier[trade_env] ) keyword[if] identifier[ret] != identifier[ft] . identifier[RET_OK] : identifier[sleep] ( identifier[rest_time] ) keyword[continue] identifier[status] = identifier[data] . identifier[iloc] [ literal[int] ][ literal[string] ] identifier[dealt_qty] = identifier[int] ( identifier[data] . identifier[iloc] [ literal[int] ][ literal[string] ]) identifier[order_price] = identifier[data] . identifier[iloc] [ literal[int] ][ literal[string] ] identifier[qty] -= identifier[dealt_qty] keyword[if] identifier[status] == identifier[ft] . identifier[OrderStatus] . identifier[FILLED_ALL] : identifier[print] ( literal[string] . identifier[format] ( identifier[code] , identifier[dealt_qty] , identifier[order_price] )) identifier[EmailNotification] . identifier[send_email] ( identifier[receiver] , literal[string] , literal[string] . identifier[format] ( identifier[code] , identifier[dealt_qty] , identifier[order_price] )) keyword[break] keyword[elif] identifier[status] == identifier[ft] . identifier[OrderStatus] . identifier[FILLED_PART] : identifier[print] ( literal[string] . identifier[format] ( identifier[code] , identifier[dealt_qty] , identifier[order_price] )) identifier[EmailNotification] . identifier[send_email] ( identifier[receiver] , literal[string] , literal[string] . identifier[format] ( identifier[code] , identifier[dealt_qty] , identifier[order_price] )) keyword[break] keyword[elif] identifier[status] == identifier[ft] . identifier[OrderStatus] . identifier[FAILED] keyword[or] identifier[status] == identifier[ft] . identifier[OrderStatus] . identifier[SUBMIT_FAILED] keyword[or] identifier[status] == identifier[ft] . identifier[OrderStatus] . identifier[CANCELLED_ALL] keyword[or] identifier[status] == identifier[ft] . identifier[OrderStatus] . identifier[DELETED] : keyword[break] keyword[else] : identifier[trade_ctx] . identifier[modify_order] ( identifier[ft] . identifier[ModifyOrderOp] . identifier[CANCEL] , identifier[order_id] , literal[int] , literal[int] ) identifier[sleep] ( identifier[rest_time] ) keyword[continue] keyword[if] identifier[how_to_sell] == identifier[SellMethod] . identifier[SIMPLE_SELL] : identifier[ret] , identifier[data] = identifier[quote_ctx] . identifier[get_order_book] ( identifier[code] ) keyword[if] identifier[ret] != identifier[ft] . identifier[RET_OK] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[sell_price] = identifier[data] [ literal[string] ][ literal[int] ][ literal[int] ] identifier[price_lst] = identifier[trailing_stop_handler] . identifier[price_lst] identifier[plt] . identifier[plot] ( identifier[np] . identifier[arange] ( identifier[len] ( identifier[price_lst] )), identifier[price_lst] ) identifier[stop_list] = identifier[trailing_stop_handler] . identifier[stop_lst] identifier[plt] . identifier[plot] ( identifier[np] . identifier[arange] ( identifier[len] ( identifier[stop_list] )), identifier[stop_list] ) keyword[break] identifier[quote_ctx] . identifier[close] () identifier[trade_ctx] . identifier[close] ()
def trailing_stop(api_svr_ip='127.0.0.1', api_svr_port=11111, unlock_password='', code='HK.00700', trade_env=ft.TrdEnv.SIMULATE, method=TrailingMethod.DROP_ABS, drop=1.0, volume=100, how_to_sell=SellMethod.SMART_SELL, diff=0, rest_time=2, enable_email_notification=False, receiver=''): """ 止损策略函数 :param api_svr_ip: (string)ip :param api_svr_port: (int)port :param unlock_password: (string)交易解锁密码, 必需修改! 模拟交易设为一个非空字符串即可 :param code: (string)股票 :param trade_env: ft.TrdEnv.REAL: 真实交易 ft.TrdEnv.SIMULATE: 模拟交易 :param method: method == TrailingMethod.DROP_ABS: 股票下跌drop价格就会止损 railingMethod.DROP_PER: 股票下跌drop的百分比就会止损 :param drop: method == TrailingMethod.DROP_ABS, 股票下跌的价格 method == TrailingMethod.DROP_PER,股票下跌的百分比,0.01表示下跌1%则止损 :param volume: 需要卖掉的股票数量 :param how_to_sell: 以何种方式卖出股票, SellMethod 类型 :param diff: 默认为0,当how_to_sell为SellMethod.DROP_ABS时,以(市价-diff)的价格卖出 :param rest_time: 每隔REST_TIME秒,会检查订单状态, 需要>=2 :param enable_email_notification: 激活email功能 :param receiver: 邮件接收者 """ EmailNotification.set_enable(enable_email_notification) if how_to_sell not in [SellMethod.SIMPLE_SELL, SellMethod.SMART_SELL]: raise Exception('how_to_sell value error') # depends on [control=['if'], data=[]] if method not in [TrailingMethod.DROP_ABS, TrailingMethod.DROP_PER]: raise Exception('method value error') # depends on [control=['if'], data=[]] quote_ctx = ft.OpenQuoteContext(host=api_svr_ip, port=api_svr_port) is_hk_trade = 'HK.' in code if is_hk_trade: trade_ctx = ft.OpenHKTradeContext(host=api_svr_ip, port=api_svr_port) # depends on [control=['if'], data=[]] else: trade_ctx = ft.OpenUSTradeContext(host=api_svr_ip, port=api_svr_port) if unlock_password == '': raise Exception('请先配置交易密码') # depends on [control=['if'], data=[]] (ret, data) = trade_ctx.unlock_trade(unlock_password) if ret != ft.RET_OK: raise Exception('解锁交易失败') # depends on [control=['if'], data=[]] (ret, data) = trade_ctx.position_list_query(trd_env=trd_env) if ret != ft.RET_OK: raise Exception('无法获取持仓列表') # depends on [control=['if'], data=[]] try: qty = data[data['code'] == code].iloc[0]['qty'] # depends on [control=['try'], data=[]] except: raise Exception('你没有持仓!无法买卖') # depends on [control=['except'], data=[]] qty = int(qty) if volume == 0: volume = qty # depends on [control=['if'], data=['volume']] if volume < 0: raise Exception('volume lower than 0') # depends on [control=['if'], data=[]] elif qty < volume: raise Exception('持仓不足') # depends on [control=['if'], data=[]] (ret, data) = quote_ctx.get_market_snapshot(code) if ret != ft.RET_OK: raise Exception('获取lot size失败') # depends on [control=['if'], data=[]] lot_size = data.iloc[0]['lot_size'] if volume % lot_size != 0: raise Exception('volume 必须是{}的整数倍'.format(lot_size)) # depends on [control=['if'], data=[]] (ret, data) = quote_ctx.subscribe(code, ft.SubType.QUOTE) if ret != ft.RET_OK: raise Exception('订阅QUOTE错误: error {}:{}'.format(ret, data)) # depends on [control=['if'], data=['ret']] (ret, data) = quote_ctx.subscribe(code, ft.SubType.ORDER_BOOK) if ret != ft.RET_OK: print('error {}:{}'.format(ret, data)) raise Exception('订阅order book失败: error {}:{}'.format(ret, data)) # depends on [control=['if'], data=['ret']] if diff: if is_hk_trade: (ret, data) = quote_ctx.get_order_book(code) if ret != ft.RET_OK: raise Exception('获取order book失败: cannot get order book'.format(data)) # depends on [control=['if'], data=[]] min_diff = round(abs(data['Bid'][0][0] - data['Bid'][1][0]), 3) if floor(diff / min_diff) * min_diff != diff: raise Exception('diff 应是{}的整数倍'.format(min_diff)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif round(diff, 2) != diff: raise Exception('美股价差保留2位小数{}->{}'.format(diff, round(diff, 2))) # depends on [control=['if'], data=['diff']] # depends on [control=['if'], data=[]] if method == TrailingMethod.DROP_ABS: if is_hk_trade: if floor(drop / min_diff) * min_diff != drop: raise Exception('drop必须是{}的整数倍'.format(min_diff)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif round(drop, 2) != drop: raise Exception('drop必须保留2位小数{}->{}'.format(drop, round(drop, 2))) # depends on [control=['if'], data=['drop']] # depends on [control=['if'], data=[]] elif method == TrailingMethod.DROP_PER: if drop < 0 or drop > 1: raise Exception('drop must in [0, 1] if method is DROP_PER') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] trailing_stop_handler = TrailingStopHandler(quote_ctx, is_hk_trade, method, drop) quote_ctx.set_handler(trailing_stop_handler) quote_ctx.start() while True: if trailing_stop_handler.finished: # sell the stock qty = volume sell_price = trailing_stop_handler.stop while qty > 0: if how_to_sell == SellMethod.SIMPLE_SELL: data = simple_sell(quote_ctx, trade_ctx, code, sell_price - diff, qty, trade_env, ft.OrderType.SPECIAL_LIMIT) # depends on [control=['if'], data=[]] else: data = smart_sell(quote_ctx, trade_ctx, code, qty, trade_env, ft.OrderType.SPECIAL_LIMIT) if data is None: print('下单失败') EmailNotification.send_email(receiver, '下单失败', '股票代码{},数量{}'.format(code, volume)) sleep(rest_time) continue # depends on [control=['if'], data=[]] order_id = data.iloc[0]['order_id'] sleep(rest_time) while True: (ret, data) = trade_ctx.order_list_query(order_id=order_id, trd_env=trade_env) if ret != ft.RET_OK: sleep(rest_time) continue # depends on [control=['if'], data=[]] status = data.iloc[0]['order_status'] dealt_qty = int(data.iloc[0]['dealt_qty']) order_price = data.iloc[0]['price'] qty -= dealt_qty if status == ft.OrderStatus.FILLED_ALL: print('全部成交:股票代码{}, 成交总数{},价格{}'.format(code, dealt_qty, order_price)) EmailNotification.send_email(receiver, '全部成交', '股票代码{},成交总数{},价格{}'.format(code, dealt_qty, order_price)) break # depends on [control=['if'], data=[]] elif status == ft.OrderStatus.FILLED_PART: print('部分成交:股票代码{},成交总数{},价格{}'.format(code, dealt_qty, order_price)) EmailNotification.send_email(receiver, '部分成交', '股票代码{},成交总数{},价格{}'.format(code, dealt_qty, order_price)) break # depends on [control=['if'], data=[]] elif status == ft.OrderStatus.FAILED or status == ft.OrderStatus.SUBMIT_FAILED or status == ft.OrderStatus.CANCELLED_ALL or (status == ft.OrderStatus.DELETED): break # depends on [control=['if'], data=[]] else: trade_ctx.modify_order(ft.ModifyOrderOp.CANCEL, order_id, 0, 0) sleep(rest_time) continue # depends on [control=['while'], data=[]] if how_to_sell == SellMethod.SIMPLE_SELL: (ret, data) = quote_ctx.get_order_book(code) if ret != ft.RET_OK: raise Exception('获取order_book失败') # depends on [control=['if'], data=[]] sell_price = data['Bid'][0][0] # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['qty']] # draw price and stop price_lst = trailing_stop_handler.price_lst plt.plot(np.arange(len(price_lst)), price_lst) stop_list = trailing_stop_handler.stop_lst plt.plot(np.arange(len(stop_list)), stop_list) break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] quote_ctx.close() trade_ctx.close()
def ave_qual(quals, qround=False, tab=errs_tab(128)): """Calculate average basecall quality of a read. Receive the integer quality scores of a read and return the average quality for that read First convert Phred scores to probabilities, calculate average error probability convert average back to Phred scale """ if quals: mq = -10 * log(sum([tab[q] for q in quals]) / len(quals), 10) if qround: return round(mq) else: return mq else: return None
def function[ave_qual, parameter[quals, qround, tab]]: constant[Calculate average basecall quality of a read. Receive the integer quality scores of a read and return the average quality for that read First convert Phred scores to probabilities, calculate average error probability convert average back to Phred scale ] if name[quals] begin[:] variable[mq] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b264a230> * call[name[log], parameter[binary_operation[call[name[sum], parameter[<ast.ListComp object at 0x7da1b264bd30>]] / call[name[len], parameter[name[quals]]]], constant[10]]]] if name[qround] begin[:] return[call[name[round], parameter[name[mq]]]]
keyword[def] identifier[ave_qual] ( identifier[quals] , identifier[qround] = keyword[False] , identifier[tab] = identifier[errs_tab] ( literal[int] )): literal[string] keyword[if] identifier[quals] : identifier[mq] =- literal[int] * identifier[log] ( identifier[sum] ([ identifier[tab] [ identifier[q] ] keyword[for] identifier[q] keyword[in] identifier[quals] ])/ identifier[len] ( identifier[quals] ), literal[int] ) keyword[if] identifier[qround] : keyword[return] identifier[round] ( identifier[mq] ) keyword[else] : keyword[return] identifier[mq] keyword[else] : keyword[return] keyword[None]
def ave_qual(quals, qround=False, tab=errs_tab(128)): """Calculate average basecall quality of a read. Receive the integer quality scores of a read and return the average quality for that read First convert Phred scores to probabilities, calculate average error probability convert average back to Phred scale """ if quals: mq = -10 * log(sum([tab[q] for q in quals]) / len(quals), 10) if qround: return round(mq) # depends on [control=['if'], data=[]] else: return mq # depends on [control=['if'], data=[]] else: return None
def set( self, **kw): ''' Store keyword args to be written to output file. ''' self.args = kw log.debug( self.args )
def function[set, parameter[self]]: constant[ Store keyword args to be written to output file. ] name[self].args assign[=] name[kw] call[name[log].debug, parameter[name[self].args]]
keyword[def] identifier[set] ( identifier[self] ,** identifier[kw] ): literal[string] identifier[self] . identifier[args] = identifier[kw] identifier[log] . identifier[debug] ( identifier[self] . identifier[args] )
def set(self, **kw): """ Store keyword args to be written to output file. """ self.args = kw log.debug(self.args)
def _native_to_unicode(s): """Convert string to unicode (required in Python 2).""" if six.PY2: return s if isinstance(s, unicode) else s.decode("utf-8") else: return s
def function[_native_to_unicode, parameter[s]]: constant[Convert string to unicode (required in Python 2).] if name[six].PY2 begin[:] return[<ast.IfExp object at 0x7da2054a4f40>]
keyword[def] identifier[_native_to_unicode] ( identifier[s] ): literal[string] keyword[if] identifier[six] . identifier[PY2] : keyword[return] identifier[s] keyword[if] identifier[isinstance] ( identifier[s] , identifier[unicode] ) keyword[else] identifier[s] . identifier[decode] ( literal[string] ) keyword[else] : keyword[return] identifier[s]
def _native_to_unicode(s): """Convert string to unicode (required in Python 2).""" if six.PY2: return s if isinstance(s, unicode) else s.decode('utf-8') # depends on [control=['if'], data=[]] else: return s
def as_dict(self, ordered=False): """Returns the row as a dictionary, as ordered.""" items = zip(self.keys(), self.values()) return OrderedDict(items) if ordered else dict(items)
def function[as_dict, parameter[self, ordered]]: constant[Returns the row as a dictionary, as ordered.] variable[items] assign[=] call[name[zip], parameter[call[name[self].keys, parameter[]], call[name[self].values, parameter[]]]] return[<ast.IfExp object at 0x7da1b1c122c0>]
keyword[def] identifier[as_dict] ( identifier[self] , identifier[ordered] = keyword[False] ): literal[string] identifier[items] = identifier[zip] ( identifier[self] . identifier[keys] (), identifier[self] . identifier[values] ()) keyword[return] identifier[OrderedDict] ( identifier[items] ) keyword[if] identifier[ordered] keyword[else] identifier[dict] ( identifier[items] )
def as_dict(self, ordered=False): """Returns the row as a dictionary, as ordered.""" items = zip(self.keys(), self.values()) return OrderedDict(items) if ordered else dict(items)
def hcode(*content, sep=' '): """ Make mono-width text (HTML) :param content: :param sep: :return: """ return _md(quote_html(_join(*content, sep=sep)), symbols=MD_SYMBOLS[6])
def function[hcode, parameter[]]: constant[ Make mono-width text (HTML) :param content: :param sep: :return: ] return[call[name[_md], parameter[call[name[quote_html], parameter[call[name[_join], parameter[<ast.Starred object at 0x7da1b18fda20>]]]]]]]
keyword[def] identifier[hcode] (* identifier[content] , identifier[sep] = literal[string] ): literal[string] keyword[return] identifier[_md] ( identifier[quote_html] ( identifier[_join] (* identifier[content] , identifier[sep] = identifier[sep] )), identifier[symbols] = identifier[MD_SYMBOLS] [ literal[int] ])
def hcode(*content, sep=' '): """ Make mono-width text (HTML) :param content: :param sep: :return: """ return _md(quote_html(_join(*content, sep=sep)), symbols=MD_SYMBOLS[6])
def save_figure(self,event=None,panel=None): """ save figure image to file""" if panel is None: panel = self.current_panel self.panels[panel].save_figure(event=event)
def function[save_figure, parameter[self, event, panel]]: constant[ save figure image to file] if compare[name[panel] is constant[None]] begin[:] variable[panel] assign[=] name[self].current_panel call[call[name[self].panels][name[panel]].save_figure, parameter[]]
keyword[def] identifier[save_figure] ( identifier[self] , identifier[event] = keyword[None] , identifier[panel] = keyword[None] ): literal[string] keyword[if] identifier[panel] keyword[is] keyword[None] : identifier[panel] = identifier[self] . identifier[current_panel] identifier[self] . identifier[panels] [ identifier[panel] ]. identifier[save_figure] ( identifier[event] = identifier[event] )
def save_figure(self, event=None, panel=None): """ save figure image to file""" if panel is None: panel = self.current_panel # depends on [control=['if'], data=['panel']] self.panels[panel].save_figure(event=event)