code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def load_dframe(self, dframe): """ Load the file contents into the supplied dataframe using the specified key and filetype. """ filename_series = dframe[self.key] loaded_data = filename_series.map(self.filetype.data) keys = [list(el.keys()) for el in loaded_data.values] for key in set().union(*keys): key_exists = key in dframe.columns if key_exists: self.warning("Appending '_data' suffix to data key %r to avoid" "overwriting existing metadata with the same name." % key) suffix = '_data' if key_exists else '' dframe[key+suffix] = loaded_data.map(lambda x: x.get(key, np.nan)) return dframe
def function[load_dframe, parameter[self, dframe]]: constant[ Load the file contents into the supplied dataframe using the specified key and filetype. ] variable[filename_series] assign[=] call[name[dframe]][name[self].key] variable[loaded_data] assign[=] call[name[filename_series].map, parameter[name[self].filetype.data]] variable[keys] assign[=] <ast.ListComp object at 0x7da1afe78520> for taget[name[key]] in starred[call[call[name[set], parameter[]].union, parameter[<ast.Starred object at 0x7da1afe79d80>]]] begin[:] variable[key_exists] assign[=] compare[name[key] in name[dframe].columns] if name[key_exists] begin[:] call[name[self].warning, parameter[binary_operation[constant[Appending '_data' suffix to data key %r to avoidoverwriting existing metadata with the same name.] <ast.Mod object at 0x7da2590d6920> name[key]]]] variable[suffix] assign[=] <ast.IfExp object at 0x7da1afe786a0> call[name[dframe]][binary_operation[name[key] + name[suffix]]] assign[=] call[name[loaded_data].map, parameter[<ast.Lambda object at 0x7da1afe7a0e0>]] return[name[dframe]]
keyword[def] identifier[load_dframe] ( identifier[self] , identifier[dframe] ): literal[string] identifier[filename_series] = identifier[dframe] [ identifier[self] . identifier[key] ] identifier[loaded_data] = identifier[filename_series] . identifier[map] ( identifier[self] . identifier[filetype] . identifier[data] ) identifier[keys] =[ identifier[list] ( identifier[el] . identifier[keys] ()) keyword[for] identifier[el] keyword[in] identifier[loaded_data] . identifier[values] ] keyword[for] identifier[key] keyword[in] identifier[set] (). identifier[union] (* identifier[keys] ): identifier[key_exists] = identifier[key] keyword[in] identifier[dframe] . identifier[columns] keyword[if] identifier[key_exists] : identifier[self] . identifier[warning] ( literal[string] literal[string] % identifier[key] ) identifier[suffix] = literal[string] keyword[if] identifier[key_exists] keyword[else] literal[string] identifier[dframe] [ identifier[key] + identifier[suffix] ]= identifier[loaded_data] . identifier[map] ( keyword[lambda] identifier[x] : identifier[x] . identifier[get] ( identifier[key] , identifier[np] . identifier[nan] )) keyword[return] identifier[dframe]
def load_dframe(self, dframe): """ Load the file contents into the supplied dataframe using the specified key and filetype. """ filename_series = dframe[self.key] loaded_data = filename_series.map(self.filetype.data) keys = [list(el.keys()) for el in loaded_data.values] for key in set().union(*keys): key_exists = key in dframe.columns if key_exists: self.warning("Appending '_data' suffix to data key %r to avoidoverwriting existing metadata with the same name." % key) # depends on [control=['if'], data=[]] suffix = '_data' if key_exists else '' dframe[key + suffix] = loaded_data.map(lambda x: x.get(key, np.nan)) # depends on [control=['for'], data=['key']] return dframe
def align_pipe(fastq_file, pair_file, ref_file, names, align_dir, data): """Perform piped alignment of fastq input files, generating sorted output BAM. """ pair_file = pair_file if pair_file else "" # back compatible -- older files were named with lane information, use sample name now out_file = os.path.join(align_dir, "{0}-sort.bam".format(names["lane"])) if not utils.file_exists(out_file): out_file = os.path.join(align_dir, "{0}-sort.bam".format(dd.get_sample_name(data))) if data.get("align_split") or fastq_file.endswith(".sdf"): final_file = out_file out_file, data = alignprep.setup_combine(final_file, data) fastq_file, pair_file = alignprep.split_namedpipe_cls(fastq_file, pair_file, data) else: final_file = None samtools = config_utils.get_program("samtools", data["config"]) novoalign = config_utils.get_program("novoalign", data["config"]) resources = config_utils.get_resources("novoalign", data["config"]) num_cores = data["config"]["algorithm"].get("num_cores", 1) max_mem = resources.get("memory", "1G") extra_novo_args = " ".join(_novoalign_args_from_config(data["config"])) rg_info = get_rg_info(names) if not utils.file_exists(out_file) and (final_file is None or not utils.file_exists(final_file)): with tx_tmpdir(data) as work_dir: with postalign.tobam_cl(data, out_file, pair_file != "") as (tobam_cl, tx_out_file): tx_out_prefix = os.path.splitext(tx_out_file)[0] cmd = ("unset JAVA_HOME && " "{novoalign} -o SAM '{rg_info}' -d {ref_file} -f {fastq_file} {pair_file} " " -c {num_cores} {extra_novo_args} | ") cmd = (cmd + tobam_cl).format(**locals()) do.run(cmd, "Novoalign: %s" % names["sample"], None, [do.file_nonempty(tx_out_file), do.file_reasonable_size(tx_out_file, fastq_file)]) data["work_bam"] = out_file return data
def function[align_pipe, parameter[fastq_file, pair_file, ref_file, names, align_dir, data]]: constant[Perform piped alignment of fastq input files, generating sorted output BAM. ] variable[pair_file] assign[=] <ast.IfExp object at 0x7da20c6a9cf0> variable[out_file] assign[=] call[name[os].path.join, parameter[name[align_dir], call[constant[{0}-sort.bam].format, parameter[call[name[names]][constant[lane]]]]]] if <ast.UnaryOp object at 0x7da20c6abac0> begin[:] variable[out_file] assign[=] call[name[os].path.join, parameter[name[align_dir], call[constant[{0}-sort.bam].format, parameter[call[name[dd].get_sample_name, parameter[name[data]]]]]]] if <ast.BoolOp object at 0x7da20c6ab940> begin[:] variable[final_file] assign[=] name[out_file] <ast.Tuple object at 0x7da20c6a9c60> assign[=] call[name[alignprep].setup_combine, parameter[name[final_file], name[data]]] <ast.Tuple object at 0x7da20c6abbe0> assign[=] call[name[alignprep].split_namedpipe_cls, parameter[name[fastq_file], name[pair_file], name[data]]] variable[samtools] assign[=] call[name[config_utils].get_program, parameter[constant[samtools], call[name[data]][constant[config]]]] variable[novoalign] assign[=] call[name[config_utils].get_program, parameter[constant[novoalign], call[name[data]][constant[config]]]] variable[resources] assign[=] call[name[config_utils].get_resources, parameter[constant[novoalign], call[name[data]][constant[config]]]] variable[num_cores] assign[=] call[call[call[name[data]][constant[config]]][constant[algorithm]].get, parameter[constant[num_cores], constant[1]]] variable[max_mem] assign[=] call[name[resources].get, parameter[constant[memory], constant[1G]]] variable[extra_novo_args] assign[=] call[constant[ ].join, parameter[call[name[_novoalign_args_from_config], parameter[call[name[data]][constant[config]]]]]] variable[rg_info] assign[=] call[name[get_rg_info], parameter[name[names]]] if <ast.BoolOp object at 0x7da20c6aad70> begin[:] with call[name[tx_tmpdir], parameter[name[data]]] begin[:] with call[name[postalign].tobam_cl, parameter[name[data], name[out_file], compare[name[pair_file] not_equal[!=] constant[]]]] begin[:] variable[tx_out_prefix] assign[=] call[call[name[os].path.splitext, parameter[name[tx_out_file]]]][constant[0]] variable[cmd] assign[=] constant[unset JAVA_HOME && {novoalign} -o SAM '{rg_info}' -d {ref_file} -f {fastq_file} {pair_file} -c {num_cores} {extra_novo_args} | ] variable[cmd] assign[=] call[binary_operation[name[cmd] + name[tobam_cl]].format, parameter[]] call[name[do].run, parameter[name[cmd], binary_operation[constant[Novoalign: %s] <ast.Mod object at 0x7da2590d6920> call[name[names]][constant[sample]]], constant[None], list[[<ast.Call object at 0x7da20c6e7550>, <ast.Call object at 0x7da1b2347670>]]]] call[name[data]][constant[work_bam]] assign[=] name[out_file] return[name[data]]
keyword[def] identifier[align_pipe] ( identifier[fastq_file] , identifier[pair_file] , identifier[ref_file] , identifier[names] , identifier[align_dir] , identifier[data] ): literal[string] identifier[pair_file] = identifier[pair_file] keyword[if] identifier[pair_file] keyword[else] literal[string] identifier[out_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[align_dir] , literal[string] . identifier[format] ( identifier[names] [ literal[string] ])) keyword[if] keyword[not] identifier[utils] . identifier[file_exists] ( identifier[out_file] ): identifier[out_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[align_dir] , literal[string] . identifier[format] ( identifier[dd] . identifier[get_sample_name] ( identifier[data] ))) keyword[if] identifier[data] . identifier[get] ( literal[string] ) keyword[or] identifier[fastq_file] . identifier[endswith] ( literal[string] ): identifier[final_file] = identifier[out_file] identifier[out_file] , identifier[data] = identifier[alignprep] . identifier[setup_combine] ( identifier[final_file] , identifier[data] ) identifier[fastq_file] , identifier[pair_file] = identifier[alignprep] . identifier[split_namedpipe_cls] ( identifier[fastq_file] , identifier[pair_file] , identifier[data] ) keyword[else] : identifier[final_file] = keyword[None] identifier[samtools] = identifier[config_utils] . identifier[get_program] ( literal[string] , identifier[data] [ literal[string] ]) identifier[novoalign] = identifier[config_utils] . identifier[get_program] ( literal[string] , identifier[data] [ literal[string] ]) identifier[resources] = identifier[config_utils] . identifier[get_resources] ( literal[string] , identifier[data] [ literal[string] ]) identifier[num_cores] = identifier[data] [ literal[string] ][ literal[string] ]. identifier[get] ( literal[string] , literal[int] ) identifier[max_mem] = identifier[resources] . identifier[get] ( literal[string] , literal[string] ) identifier[extra_novo_args] = literal[string] . identifier[join] ( identifier[_novoalign_args_from_config] ( identifier[data] [ literal[string] ])) identifier[rg_info] = identifier[get_rg_info] ( identifier[names] ) keyword[if] keyword[not] identifier[utils] . identifier[file_exists] ( identifier[out_file] ) keyword[and] ( identifier[final_file] keyword[is] keyword[None] keyword[or] keyword[not] identifier[utils] . identifier[file_exists] ( identifier[final_file] )): keyword[with] identifier[tx_tmpdir] ( identifier[data] ) keyword[as] identifier[work_dir] : keyword[with] identifier[postalign] . identifier[tobam_cl] ( identifier[data] , identifier[out_file] , identifier[pair_file] != literal[string] ) keyword[as] ( identifier[tobam_cl] , identifier[tx_out_file] ): identifier[tx_out_prefix] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[tx_out_file] )[ literal[int] ] identifier[cmd] =( literal[string] literal[string] literal[string] ) identifier[cmd] =( identifier[cmd] + identifier[tobam_cl] ). identifier[format] (** identifier[locals] ()) identifier[do] . identifier[run] ( identifier[cmd] , literal[string] % identifier[names] [ literal[string] ], keyword[None] , [ identifier[do] . identifier[file_nonempty] ( identifier[tx_out_file] ), identifier[do] . identifier[file_reasonable_size] ( identifier[tx_out_file] , identifier[fastq_file] )]) identifier[data] [ literal[string] ]= identifier[out_file] keyword[return] identifier[data]
def align_pipe(fastq_file, pair_file, ref_file, names, align_dir, data): """Perform piped alignment of fastq input files, generating sorted output BAM. """ pair_file = pair_file if pair_file else '' # back compatible -- older files were named with lane information, use sample name now out_file = os.path.join(align_dir, '{0}-sort.bam'.format(names['lane'])) if not utils.file_exists(out_file): out_file = os.path.join(align_dir, '{0}-sort.bam'.format(dd.get_sample_name(data))) # depends on [control=['if'], data=[]] if data.get('align_split') or fastq_file.endswith('.sdf'): final_file = out_file (out_file, data) = alignprep.setup_combine(final_file, data) (fastq_file, pair_file) = alignprep.split_namedpipe_cls(fastq_file, pair_file, data) # depends on [control=['if'], data=[]] else: final_file = None samtools = config_utils.get_program('samtools', data['config']) novoalign = config_utils.get_program('novoalign', data['config']) resources = config_utils.get_resources('novoalign', data['config']) num_cores = data['config']['algorithm'].get('num_cores', 1) max_mem = resources.get('memory', '1G') extra_novo_args = ' '.join(_novoalign_args_from_config(data['config'])) rg_info = get_rg_info(names) if not utils.file_exists(out_file) and (final_file is None or not utils.file_exists(final_file)): with tx_tmpdir(data) as work_dir: with postalign.tobam_cl(data, out_file, pair_file != '') as (tobam_cl, tx_out_file): tx_out_prefix = os.path.splitext(tx_out_file)[0] cmd = "unset JAVA_HOME && {novoalign} -o SAM '{rg_info}' -d {ref_file} -f {fastq_file} {pair_file} -c {num_cores} {extra_novo_args} | " cmd = (cmd + tobam_cl).format(**locals()) do.run(cmd, 'Novoalign: %s' % names['sample'], None, [do.file_nonempty(tx_out_file), do.file_reasonable_size(tx_out_file, fastq_file)]) # depends on [control=['with'], data=[]] # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] data['work_bam'] = out_file return data
def load(cls, file_path): """ Load the object from a JSON file (saved with :py:func:`Concise.save`). Returns: Concise: Loaded Concise object. """ # convert back to numpy data = helper.read_json(file_path) return Concise.from_dict(data)
def function[load, parameter[cls, file_path]]: constant[ Load the object from a JSON file (saved with :py:func:`Concise.save`). Returns: Concise: Loaded Concise object. ] variable[data] assign[=] call[name[helper].read_json, parameter[name[file_path]]] return[call[name[Concise].from_dict, parameter[name[data]]]]
keyword[def] identifier[load] ( identifier[cls] , identifier[file_path] ): literal[string] identifier[data] = identifier[helper] . identifier[read_json] ( identifier[file_path] ) keyword[return] identifier[Concise] . identifier[from_dict] ( identifier[data] )
def load(cls, file_path): """ Load the object from a JSON file (saved with :py:func:`Concise.save`). Returns: Concise: Loaded Concise object. """ # convert back to numpy data = helper.read_json(file_path) return Concise.from_dict(data)
def storage(self): """ Instantiates and returns a storage instance """ if self.backend == 'redis': return RedisBackend(self.prefix, self.secondary_indexes) if self.backend == 'dynamodb': return DynamoDBBackend(self.prefix, self.key, self.sort_key, self.secondary_indexes) return DictBackend(self.prefix, self.secondary_indexes)
def function[storage, parameter[self]]: constant[ Instantiates and returns a storage instance ] if compare[name[self].backend equal[==] constant[redis]] begin[:] return[call[name[RedisBackend], parameter[name[self].prefix, name[self].secondary_indexes]]] if compare[name[self].backend equal[==] constant[dynamodb]] begin[:] return[call[name[DynamoDBBackend], parameter[name[self].prefix, name[self].key, name[self].sort_key, name[self].secondary_indexes]]] return[call[name[DictBackend], parameter[name[self].prefix, name[self].secondary_indexes]]]
keyword[def] identifier[storage] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[backend] == literal[string] : keyword[return] identifier[RedisBackend] ( identifier[self] . identifier[prefix] , identifier[self] . identifier[secondary_indexes] ) keyword[if] identifier[self] . identifier[backend] == literal[string] : keyword[return] identifier[DynamoDBBackend] ( identifier[self] . identifier[prefix] , identifier[self] . identifier[key] , identifier[self] . identifier[sort_key] , identifier[self] . identifier[secondary_indexes] ) keyword[return] identifier[DictBackend] ( identifier[self] . identifier[prefix] , identifier[self] . identifier[secondary_indexes] )
def storage(self): """ Instantiates and returns a storage instance """ if self.backend == 'redis': return RedisBackend(self.prefix, self.secondary_indexes) # depends on [control=['if'], data=[]] if self.backend == 'dynamodb': return DynamoDBBackend(self.prefix, self.key, self.sort_key, self.secondary_indexes) # depends on [control=['if'], data=[]] return DictBackend(self.prefix, self.secondary_indexes)
def error_page( participant=None, error_text=None, compensate=True, error_type="default", request_data="", ): """Render HTML for error page.""" config = _config() if error_text is None: error_text = """There has been an error and so you are unable to continue, sorry!""" if participant is not None: hit_id = (participant.hit_id,) assignment_id = (participant.assignment_id,) worker_id = participant.worker_id participant_id = participant.id else: hit_id = request.form.get("hit_id", "") assignment_id = request.form.get("assignment_id", "") worker_id = request.form.get("worker_id", "") participant_id = request.form.get("participant_id", None) if participant_id: try: participant_id = int(participant_id) except (ValueError, TypeError): participant_id = None return make_response( render_template( "error.html", error_text=error_text, compensate=compensate, contact_address=config.get("contact_email_on_error"), error_type=error_type, hit_id=hit_id, assignment_id=assignment_id, worker_id=worker_id, request_data=request_data, participant_id=participant_id, ), 500, )
def function[error_page, parameter[participant, error_text, compensate, error_type, request_data]]: constant[Render HTML for error page.] variable[config] assign[=] call[name[_config], parameter[]] if compare[name[error_text] is constant[None]] begin[:] variable[error_text] assign[=] constant[There has been an error and so you are unable to continue, sorry!] if compare[name[participant] is_not constant[None]] begin[:] variable[hit_id] assign[=] tuple[[<ast.Attribute object at 0x7da1b0309360>]] variable[assignment_id] assign[=] tuple[[<ast.Attribute object at 0x7da1b0309c30>]] variable[worker_id] assign[=] name[participant].worker_id variable[participant_id] assign[=] name[participant].id if name[participant_id] begin[:] <ast.Try object at 0x7da1b03098d0> return[call[name[make_response], parameter[call[name[render_template], parameter[constant[error.html]]], constant[500]]]]
keyword[def] identifier[error_page] ( identifier[participant] = keyword[None] , identifier[error_text] = keyword[None] , identifier[compensate] = keyword[True] , identifier[error_type] = literal[string] , identifier[request_data] = literal[string] , ): literal[string] identifier[config] = identifier[_config] () keyword[if] identifier[error_text] keyword[is] keyword[None] : identifier[error_text] = literal[string] keyword[if] identifier[participant] keyword[is] keyword[not] keyword[None] : identifier[hit_id] =( identifier[participant] . identifier[hit_id] ,) identifier[assignment_id] =( identifier[participant] . identifier[assignment_id] ,) identifier[worker_id] = identifier[participant] . identifier[worker_id] identifier[participant_id] = identifier[participant] . identifier[id] keyword[else] : identifier[hit_id] = identifier[request] . identifier[form] . identifier[get] ( literal[string] , literal[string] ) identifier[assignment_id] = identifier[request] . identifier[form] . identifier[get] ( literal[string] , literal[string] ) identifier[worker_id] = identifier[request] . identifier[form] . identifier[get] ( literal[string] , literal[string] ) identifier[participant_id] = identifier[request] . identifier[form] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[participant_id] : keyword[try] : identifier[participant_id] = identifier[int] ( identifier[participant_id] ) keyword[except] ( identifier[ValueError] , identifier[TypeError] ): identifier[participant_id] = keyword[None] keyword[return] identifier[make_response] ( identifier[render_template] ( literal[string] , identifier[error_text] = identifier[error_text] , identifier[compensate] = identifier[compensate] , identifier[contact_address] = identifier[config] . identifier[get] ( literal[string] ), identifier[error_type] = identifier[error_type] , identifier[hit_id] = identifier[hit_id] , identifier[assignment_id] = identifier[assignment_id] , identifier[worker_id] = identifier[worker_id] , identifier[request_data] = identifier[request_data] , identifier[participant_id] = identifier[participant_id] , ), literal[int] , )
def error_page(participant=None, error_text=None, compensate=True, error_type='default', request_data=''): """Render HTML for error page.""" config = _config() if error_text is None: error_text = 'There has been an error and so you are unable to\n continue, sorry!' # depends on [control=['if'], data=['error_text']] if participant is not None: hit_id = (participant.hit_id,) assignment_id = (participant.assignment_id,) worker_id = participant.worker_id participant_id = participant.id # depends on [control=['if'], data=['participant']] else: hit_id = request.form.get('hit_id', '') assignment_id = request.form.get('assignment_id', '') worker_id = request.form.get('worker_id', '') participant_id = request.form.get('participant_id', None) if participant_id: try: participant_id = int(participant_id) # depends on [control=['try'], data=[]] except (ValueError, TypeError): participant_id = None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] return make_response(render_template('error.html', error_text=error_text, compensate=compensate, contact_address=config.get('contact_email_on_error'), error_type=error_type, hit_id=hit_id, assignment_id=assignment_id, worker_id=worker_id, request_data=request_data, participant_id=participant_id), 500)
def get(self, varname, idx=0, units=None): '''get a variable value''' if not varname in self.mapping.vars: raise fgFDMError('Unknown variable %s' % varname) if idx >= self.mapping.vars[varname].arraylength: raise fgFDMError('index of %s beyond end of array idx=%u arraylength=%u' % ( varname, idx, self.mapping.vars[varname].arraylength)) value = self.values[self.mapping.vars[varname].index + idx] if units: value = self.convert(value, self.mapping.vars[varname].units, units) return value
def function[get, parameter[self, varname, idx, units]]: constant[get a variable value] if <ast.UnaryOp object at 0x7da18ede7850> begin[:] <ast.Raise object at 0x7da18ede76a0> if compare[name[idx] greater_or_equal[>=] call[name[self].mapping.vars][name[varname]].arraylength] begin[:] <ast.Raise object at 0x7da18ede55d0> variable[value] assign[=] call[name[self].values][binary_operation[call[name[self].mapping.vars][name[varname]].index + name[idx]]] if name[units] begin[:] variable[value] assign[=] call[name[self].convert, parameter[name[value], call[name[self].mapping.vars][name[varname]].units, name[units]]] return[name[value]]
keyword[def] identifier[get] ( identifier[self] , identifier[varname] , identifier[idx] = literal[int] , identifier[units] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[varname] keyword[in] identifier[self] . identifier[mapping] . identifier[vars] : keyword[raise] identifier[fgFDMError] ( literal[string] % identifier[varname] ) keyword[if] identifier[idx] >= identifier[self] . identifier[mapping] . identifier[vars] [ identifier[varname] ]. identifier[arraylength] : keyword[raise] identifier[fgFDMError] ( literal[string] %( identifier[varname] , identifier[idx] , identifier[self] . identifier[mapping] . identifier[vars] [ identifier[varname] ]. identifier[arraylength] )) identifier[value] = identifier[self] . identifier[values] [ identifier[self] . identifier[mapping] . identifier[vars] [ identifier[varname] ]. identifier[index] + identifier[idx] ] keyword[if] identifier[units] : identifier[value] = identifier[self] . identifier[convert] ( identifier[value] , identifier[self] . identifier[mapping] . identifier[vars] [ identifier[varname] ]. identifier[units] , identifier[units] ) keyword[return] identifier[value]
def get(self, varname, idx=0, units=None): """get a variable value""" if not varname in self.mapping.vars: raise fgFDMError('Unknown variable %s' % varname) # depends on [control=['if'], data=[]] if idx >= self.mapping.vars[varname].arraylength: raise fgFDMError('index of %s beyond end of array idx=%u arraylength=%u' % (varname, idx, self.mapping.vars[varname].arraylength)) # depends on [control=['if'], data=['idx']] value = self.values[self.mapping.vars[varname].index + idx] if units: value = self.convert(value, self.mapping.vars[varname].units, units) # depends on [control=['if'], data=[]] return value
def delete_prefix(self, key_prefix): """Delete a range of keys with a prefix in etcd.""" return self.delete( key_prefix, range_end=_encode(_increment_last_byte(key_prefix)))
def function[delete_prefix, parameter[self, key_prefix]]: constant[Delete a range of keys with a prefix in etcd.] return[call[name[self].delete, parameter[name[key_prefix]]]]
keyword[def] identifier[delete_prefix] ( identifier[self] , identifier[key_prefix] ): literal[string] keyword[return] identifier[self] . identifier[delete] ( identifier[key_prefix] , identifier[range_end] = identifier[_encode] ( identifier[_increment_last_byte] ( identifier[key_prefix] )))
def delete_prefix(self, key_prefix): """Delete a range of keys with a prefix in etcd.""" return self.delete(key_prefix, range_end=_encode(_increment_last_byte(key_prefix)))
def _any_match(matchers, record): """return the bool of whether `record` starts with any item in `matchers`""" def record_matches_key(key): return record == key or record.startswith(key + '.') return anyp(bool, map(record_matches_key, matchers))
def function[_any_match, parameter[matchers, record]]: constant[return the bool of whether `record` starts with any item in `matchers`] def function[record_matches_key, parameter[key]]: return[<ast.BoolOp object at 0x7da2054a5db0>] return[call[name[anyp], parameter[name[bool], call[name[map], parameter[name[record_matches_key], name[matchers]]]]]]
keyword[def] identifier[_any_match] ( identifier[matchers] , identifier[record] ): literal[string] keyword[def] identifier[record_matches_key] ( identifier[key] ): keyword[return] identifier[record] == identifier[key] keyword[or] identifier[record] . identifier[startswith] ( identifier[key] + literal[string] ) keyword[return] identifier[anyp] ( identifier[bool] , identifier[map] ( identifier[record_matches_key] , identifier[matchers] ))
def _any_match(matchers, record): """return the bool of whether `record` starts with any item in `matchers`""" def record_matches_key(key): return record == key or record.startswith(key + '.') return anyp(bool, map(record_matches_key, matchers))
def __view_remove_actions(self): """ Removes the View actions. """ add_project_action = "Actions|Umbra|Components|addons.projects_explorer|Add Project ..." remove_project_action = "Actions|Umbra|Components|addons.projects_explorer|Remove Project" add_new_file_action = "Actions|Umbra|Components|addons.projects_explorer|Add New File ..." add_new_directory_action = "Actions|Umbra|Components|addons.projects_explorer|Add New Directory ..." rename_action = "Actions|Umbra|Components|addons.projects_explorer|Rename ..." # copy_action = "Actions|Umbra|Components|addons.projects_explorer|Copy ..." # move_action = "Actions|Umbra|Components|addons.projects_explorer|Move ..." delete_action = "Actions|Umbra|Components|addons.projects_explorer|Delete ..." find_in_files_action = "Actions|Umbra|Components|addons.projects_explorer|Find In Files ..." output_selected_path_action = "Actions|Umbra|Components|addons.projects_explorer|Output Selected Path" for action in (add_project_action, remove_project_action, add_new_file_action, add_new_directory_action, rename_action, # copy_action, # move_action, delete_action, output_selected_path_action): self.__view.removeAction(self.__engine.actions_manager.get_action(action)) self.__engine.actions_manager.unregister_action(action)
def function[__view_remove_actions, parameter[self]]: constant[ Removes the View actions. ] variable[add_project_action] assign[=] constant[Actions|Umbra|Components|addons.projects_explorer|Add Project ...] variable[remove_project_action] assign[=] constant[Actions|Umbra|Components|addons.projects_explorer|Remove Project] variable[add_new_file_action] assign[=] constant[Actions|Umbra|Components|addons.projects_explorer|Add New File ...] variable[add_new_directory_action] assign[=] constant[Actions|Umbra|Components|addons.projects_explorer|Add New Directory ...] variable[rename_action] assign[=] constant[Actions|Umbra|Components|addons.projects_explorer|Rename ...] variable[delete_action] assign[=] constant[Actions|Umbra|Components|addons.projects_explorer|Delete ...] variable[find_in_files_action] assign[=] constant[Actions|Umbra|Components|addons.projects_explorer|Find In Files ...] variable[output_selected_path_action] assign[=] constant[Actions|Umbra|Components|addons.projects_explorer|Output Selected Path] for taget[name[action]] in starred[tuple[[<ast.Name object at 0x7da1b0aba800>, <ast.Name object at 0x7da1b0ab95a0>, <ast.Name object at 0x7da1b0ab92a0>, <ast.Name object at 0x7da1b0abb8b0>, <ast.Name object at 0x7da1b0ab8df0>, <ast.Name object at 0x7da1b0ab9990>, <ast.Name object at 0x7da1b0ab9a80>]]] begin[:] call[name[self].__view.removeAction, parameter[call[name[self].__engine.actions_manager.get_action, parameter[name[action]]]]] call[name[self].__engine.actions_manager.unregister_action, parameter[name[action]]]
keyword[def] identifier[__view_remove_actions] ( identifier[self] ): literal[string] identifier[add_project_action] = literal[string] identifier[remove_project_action] = literal[string] identifier[add_new_file_action] = literal[string] identifier[add_new_directory_action] = literal[string] identifier[rename_action] = literal[string] identifier[delete_action] = literal[string] identifier[find_in_files_action] = literal[string] identifier[output_selected_path_action] = literal[string] keyword[for] identifier[action] keyword[in] ( identifier[add_project_action] , identifier[remove_project_action] , identifier[add_new_file_action] , identifier[add_new_directory_action] , identifier[rename_action] , identifier[delete_action] , identifier[output_selected_path_action] ): identifier[self] . identifier[__view] . identifier[removeAction] ( identifier[self] . identifier[__engine] . identifier[actions_manager] . identifier[get_action] ( identifier[action] )) identifier[self] . identifier[__engine] . identifier[actions_manager] . identifier[unregister_action] ( identifier[action] )
def __view_remove_actions(self): """ Removes the View actions. """ add_project_action = 'Actions|Umbra|Components|addons.projects_explorer|Add Project ...' remove_project_action = 'Actions|Umbra|Components|addons.projects_explorer|Remove Project' add_new_file_action = 'Actions|Umbra|Components|addons.projects_explorer|Add New File ...' add_new_directory_action = 'Actions|Umbra|Components|addons.projects_explorer|Add New Directory ...' rename_action = 'Actions|Umbra|Components|addons.projects_explorer|Rename ...' # copy_action = "Actions|Umbra|Components|addons.projects_explorer|Copy ..." # move_action = "Actions|Umbra|Components|addons.projects_explorer|Move ..." delete_action = 'Actions|Umbra|Components|addons.projects_explorer|Delete ...' find_in_files_action = 'Actions|Umbra|Components|addons.projects_explorer|Find In Files ...' output_selected_path_action = 'Actions|Umbra|Components|addons.projects_explorer|Output Selected Path' for action in (add_project_action, remove_project_action, add_new_file_action, add_new_directory_action, rename_action, delete_action, output_selected_path_action): # copy_action, # move_action, self.__view.removeAction(self.__engine.actions_manager.get_action(action)) self.__engine.actions_manager.unregister_action(action) # depends on [control=['for'], data=['action']]
def simulated_fit_iter( self, n=None, pexact=None, add_priornoise=False, bootstrap=None, **kargs ): """ Iterator that returns simulation copies of a fit. Fit reliability is tested using simulated data which replaces the mean values in ``self.y`` with random numbers drawn from a distribution whose mean equals ``self.fcn(pexact)`` and whose covariance matrix is the same as ``self.y``'s. Simulated data is very similar to the original fit data, ``self.y``, but corresponds to a world where the correct values for the parameters (*i.e.*, averaged over many simulated data sets) are given by ``pexact``. ``pexact`` is usually taken equal to ``fit.pmean``. Each iteration of the iterator creates new simulated data, with different random numbers, and fits it, returning the the :class:`lsqfit.nonlinear_fit` that results. The simulated data has the same covariance matrix as ``fit.y``. Typical usage is:: ... fit = nonlinear_fit(...) ... for sfit in fit.simulated_fit_iter(n=3): ... verify that sfit has a good chi**2 ... ... verify that sfit.p agrees with pexact=fit.pmean within errors ... Only a few iterations are needed to get a sense of the fit's reliability since we know the correct answer in each case. The simulated fit's output results should agree with ``pexact`` (``=fit.pmean`` here) within the simulated fit's errors. Setting parameter ``add_priornoise=True`` varies the means of the priors as well as the means of the data. This option is useful for testing goodness of fit because with it ``chi**2/N`` should be ``1 ± sqrt(2/N)``, where ``N`` is the number of degrees of freedom. (``chi**2/N`` can be significantly smaller than one without added noise in prior means.) Simulated fits can also be used to estimate biases in the fit's output parameters or functions of them, should non-Gaussian behavior arise. This is possible, again, because we know the correct value for every parameter before we do the fit. Again only a few iterations may be needed for reliable estimates. Args: n (int or ``None``): Maximum number of iterations (equals infinity if ``None``). pexact (``None`` or array/dict of numbers): Fit-parameter values for the underlying distribution used to generate simulated data; replaced by ``self.pmean`` if is ``None`` (default). add_priornoise (bool): Vary prior means if ``True``; otherwise vary only the means in ``self.y`` (default). kargs: Dictionary containing override values for fit parameters. Returns: An iterator that returns :class:`lsqfit.nonlinear_fit`\s for different simulated data. """ pexact = self.pmean if pexact is None else pexact # bootstrap is old name for add_priornoise; keep for legacy code if bootstrap is not None: add_priornoise = bootstrap # Note: don't need svdcut since these are built into the data_iter fargs = dict( fcn=self.fcn, svdcut=None, p0=pexact, fitter=self.fitter, ) fargs.update(self.fitterargs) fargs.update(kargs) for ysim, priorsim in self.simulated_data_iter( n, pexact=pexact, add_priornoise=add_priornoise ): fit = nonlinear_fit( data=(self.x, ysim), prior=priorsim, _fdata=self.fdata, **fargs ) fit.pexact = pexact yield fit
def function[simulated_fit_iter, parameter[self, n, pexact, add_priornoise, bootstrap]]: constant[ Iterator that returns simulation copies of a fit. Fit reliability is tested using simulated data which replaces the mean values in ``self.y`` with random numbers drawn from a distribution whose mean equals ``self.fcn(pexact)`` and whose covariance matrix is the same as ``self.y``'s. Simulated data is very similar to the original fit data, ``self.y``, but corresponds to a world where the correct values for the parameters (*i.e.*, averaged over many simulated data sets) are given by ``pexact``. ``pexact`` is usually taken equal to ``fit.pmean``. Each iteration of the iterator creates new simulated data, with different random numbers, and fits it, returning the the :class:`lsqfit.nonlinear_fit` that results. The simulated data has the same covariance matrix as ``fit.y``. Typical usage is:: ... fit = nonlinear_fit(...) ... for sfit in fit.simulated_fit_iter(n=3): ... verify that sfit has a good chi**2 ... ... verify that sfit.p agrees with pexact=fit.pmean within errors ... Only a few iterations are needed to get a sense of the fit's reliability since we know the correct answer in each case. The simulated fit's output results should agree with ``pexact`` (``=fit.pmean`` here) within the simulated fit's errors. Setting parameter ``add_priornoise=True`` varies the means of the priors as well as the means of the data. This option is useful for testing goodness of fit because with it ``chi**2/N`` should be ``1 ± sqrt(2/N)``, where ``N`` is the number of degrees of freedom. (``chi**2/N`` can be significantly smaller than one without added noise in prior means.) Simulated fits can also be used to estimate biases in the fit's output parameters or functions of them, should non-Gaussian behavior arise. This is possible, again, because we know the correct value for every parameter before we do the fit. Again only a few iterations may be needed for reliable estimates. Args: n (int or ``None``): Maximum number of iterations (equals infinity if ``None``). pexact (``None`` or array/dict of numbers): Fit-parameter values for the underlying distribution used to generate simulated data; replaced by ``self.pmean`` if is ``None`` (default). add_priornoise (bool): Vary prior means if ``True``; otherwise vary only the means in ``self.y`` (default). kargs: Dictionary containing override values for fit parameters. Returns: An iterator that returns :class:`lsqfit.nonlinear_fit`\s for different simulated data. ] variable[pexact] assign[=] <ast.IfExp object at 0x7da2041d82e0> if compare[name[bootstrap] is_not constant[None]] begin[:] variable[add_priornoise] assign[=] name[bootstrap] variable[fargs] assign[=] call[name[dict], parameter[]] call[name[fargs].update, parameter[name[self].fitterargs]] call[name[fargs].update, parameter[name[kargs]]] for taget[tuple[[<ast.Name object at 0x7da18fe92980>, <ast.Name object at 0x7da18fe93ca0>]]] in starred[call[name[self].simulated_data_iter, parameter[name[n]]]] begin[:] variable[fit] assign[=] call[name[nonlinear_fit], parameter[]] name[fit].pexact assign[=] name[pexact] <ast.Yield object at 0x7da18fe90910>
keyword[def] identifier[simulated_fit_iter] ( identifier[self] , identifier[n] = keyword[None] , identifier[pexact] = keyword[None] , identifier[add_priornoise] = keyword[False] , identifier[bootstrap] = keyword[None] ,** identifier[kargs] ): literal[string] identifier[pexact] = identifier[self] . identifier[pmean] keyword[if] identifier[pexact] keyword[is] keyword[None] keyword[else] identifier[pexact] keyword[if] identifier[bootstrap] keyword[is] keyword[not] keyword[None] : identifier[add_priornoise] = identifier[bootstrap] identifier[fargs] = identifier[dict] ( identifier[fcn] = identifier[self] . identifier[fcn] , identifier[svdcut] = keyword[None] , identifier[p0] = identifier[pexact] , identifier[fitter] = identifier[self] . identifier[fitter] , ) identifier[fargs] . identifier[update] ( identifier[self] . identifier[fitterargs] ) identifier[fargs] . identifier[update] ( identifier[kargs] ) keyword[for] identifier[ysim] , identifier[priorsim] keyword[in] identifier[self] . identifier[simulated_data_iter] ( identifier[n] , identifier[pexact] = identifier[pexact] , identifier[add_priornoise] = identifier[add_priornoise] ): identifier[fit] = identifier[nonlinear_fit] ( identifier[data] =( identifier[self] . identifier[x] , identifier[ysim] ), identifier[prior] = identifier[priorsim] , identifier[_fdata] = identifier[self] . identifier[fdata] , ** identifier[fargs] ) identifier[fit] . identifier[pexact] = identifier[pexact] keyword[yield] identifier[fit]
def simulated_fit_iter(self, n=None, pexact=None, add_priornoise=False, bootstrap=None, **kargs): """ Iterator that returns simulation copies of a fit. Fit reliability is tested using simulated data which replaces the mean values in ``self.y`` with random numbers drawn from a distribution whose mean equals ``self.fcn(pexact)`` and whose covariance matrix is the same as ``self.y``'s. Simulated data is very similar to the original fit data, ``self.y``, but corresponds to a world where the correct values for the parameters (*i.e.*, averaged over many simulated data sets) are given by ``pexact``. ``pexact`` is usually taken equal to ``fit.pmean``. Each iteration of the iterator creates new simulated data, with different random numbers, and fits it, returning the the :class:`lsqfit.nonlinear_fit` that results. The simulated data has the same covariance matrix as ``fit.y``. Typical usage is:: ... fit = nonlinear_fit(...) ... for sfit in fit.simulated_fit_iter(n=3): ... verify that sfit has a good chi**2 ... ... verify that sfit.p agrees with pexact=fit.pmean within errors ... Only a few iterations are needed to get a sense of the fit's reliability since we know the correct answer in each case. The simulated fit's output results should agree with ``pexact`` (``=fit.pmean`` here) within the simulated fit's errors. Setting parameter ``add_priornoise=True`` varies the means of the priors as well as the means of the data. This option is useful for testing goodness of fit because with it ``chi**2/N`` should be ``1 ± sqrt(2/N)``, where ``N`` is the number of degrees of freedom. (``chi**2/N`` can be significantly smaller than one without added noise in prior means.) Simulated fits can also be used to estimate biases in the fit's output parameters or functions of them, should non-Gaussian behavior arise. This is possible, again, because we know the correct value for every parameter before we do the fit. Again only a few iterations may be needed for reliable estimates. Args: n (int or ``None``): Maximum number of iterations (equals infinity if ``None``). pexact (``None`` or array/dict of numbers): Fit-parameter values for the underlying distribution used to generate simulated data; replaced by ``self.pmean`` if is ``None`` (default). add_priornoise (bool): Vary prior means if ``True``; otherwise vary only the means in ``self.y`` (default). kargs: Dictionary containing override values for fit parameters. Returns: An iterator that returns :class:`lsqfit.nonlinear_fit`\\s for different simulated data. """ pexact = self.pmean if pexact is None else pexact # bootstrap is old name for add_priornoise; keep for legacy code if bootstrap is not None: add_priornoise = bootstrap # depends on [control=['if'], data=['bootstrap']] # Note: don't need svdcut since these are built into the data_iter fargs = dict(fcn=self.fcn, svdcut=None, p0=pexact, fitter=self.fitter) fargs.update(self.fitterargs) fargs.update(kargs) for (ysim, priorsim) in self.simulated_data_iter(n, pexact=pexact, add_priornoise=add_priornoise): fit = nonlinear_fit(data=(self.x, ysim), prior=priorsim, _fdata=self.fdata, **fargs) fit.pexact = pexact yield fit # depends on [control=['for'], data=[]]
def insertBefore(self, node, refNode): """Insert node as a child of the current node, before refNode in the list of child nodes. Raises ValueError if refNode is not a child of the current node""" offset = self.xml_children.index(refNode) self.xml_insert(node, offset)
def function[insertBefore, parameter[self, node, refNode]]: constant[Insert node as a child of the current node, before refNode in the list of child nodes. Raises ValueError if refNode is not a child of the current node] variable[offset] assign[=] call[name[self].xml_children.index, parameter[name[refNode]]] call[name[self].xml_insert, parameter[name[node], name[offset]]]
keyword[def] identifier[insertBefore] ( identifier[self] , identifier[node] , identifier[refNode] ): literal[string] identifier[offset] = identifier[self] . identifier[xml_children] . identifier[index] ( identifier[refNode] ) identifier[self] . identifier[xml_insert] ( identifier[node] , identifier[offset] )
def insertBefore(self, node, refNode): """Insert node as a child of the current node, before refNode in the list of child nodes. Raises ValueError if refNode is not a child of the current node""" offset = self.xml_children.index(refNode) self.xml_insert(node, offset)
def set_log_level(debug, verbose): """ Function for setting the logging level. :param debug: This boolean field is the logging level. :param verbose: This boolean field is the logging level. """ if debug: logging.basicConfig(level=logging.DEBUG) elif verbose: logging.basicConfig(level=logging.INFO)
def function[set_log_level, parameter[debug, verbose]]: constant[ Function for setting the logging level. :param debug: This boolean field is the logging level. :param verbose: This boolean field is the logging level. ] if name[debug] begin[:] call[name[logging].basicConfig, parameter[]]
keyword[def] identifier[set_log_level] ( identifier[debug] , identifier[verbose] ): literal[string] keyword[if] identifier[debug] : identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[logging] . identifier[DEBUG] ) keyword[elif] identifier[verbose] : identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[logging] . identifier[INFO] )
def set_log_level(debug, verbose): """ Function for setting the logging level. :param debug: This boolean field is the logging level. :param verbose: This boolean field is the logging level. """ if debug: logging.basicConfig(level=logging.DEBUG) # depends on [control=['if'], data=[]] elif verbose: logging.basicConfig(level=logging.INFO) # depends on [control=['if'], data=[]]
def CheckLanguage(filename, clean_lines, linenum, file_extension, include_state, nesting_state, error): """Checks rules from the 'C++ language rules' section of cppguide.html. Some of these rules are hard to test (function overloading, using uint32 inappropriately), but we do the best we can. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. file_extension: The extension (without the dot) of the filename. include_state: An _IncludeState instance in which the headers are inserted. nesting_state: A NestingState instance which maintains information about the current stack of nested blocks being parsed. error: The function to call with any errors found. """ # If the line is empty or consists of entirely a comment, no need to # check it. line = clean_lines.elided[linenum] if not line: return match = _RE_PATTERN_INCLUDE.search(line) if match: CheckIncludeLine(filename, clean_lines, linenum, include_state, error) return # Reset include state across preprocessor directives. This is meant # to silence warnings for conditional includes. match = Match(r'^\s*#\s*(if|ifdef|ifndef|elif|else|endif)\b', line) if match: include_state.ResetSection(match.group(1)) # Perform other checks now that we are sure that this is not an include line CheckCasts(filename, clean_lines, linenum, error) CheckGlobalStatic(filename, clean_lines, linenum, error) CheckPrintf(filename, clean_lines, linenum, error) if file_extension in GetHeaderExtensions(): # TODO(unknown): check that 1-arg constructors are explicit. # How to tell it's a constructor? # (handled in CheckForNonStandardConstructs for now) # TODO(unknown): check that classes declare or disable copy/assign # (level 1 error) pass # Check if people are using the verboten C basic types. The only exception # we regularly allow is "unsigned short port" for port. if Search(r'\bshort port\b', line): if not Search(r'\bunsigned short port\b', line): error(filename, linenum, 'runtime/int', 4, 'Use "unsigned short" for ports, not "short"') else: match = Search(r'\b(short|long(?! +double)|long long)\b', line) if match: error(filename, linenum, 'runtime/int', 4, 'Use int16/int64/etc, rather than the C type %s' % match.group(1)) # Check if some verboten operator overloading is going on # TODO(unknown): catch out-of-line unary operator&: # class X {}; # int operator&(const X& x) { return 42; } // unary operator& # The trick is it's hard to tell apart from binary operator&: # class Y { int operator&(const Y& x) { return 23; } }; // binary operator& if Search(r'\boperator\s*&\s*\(\s*\)', line): error(filename, linenum, 'runtime/operator', 4, 'Unary operator& is dangerous. Do not use it.') # Check for suspicious usage of "if" like # } if (a == b) { if Search(r'\}\s*if\s*\(', line): error(filename, linenum, 'readability/braces', 4, 'Did you mean "else if"? If not, start a new line for "if".') # Check for potential format string bugs like printf(foo). # We constrain the pattern not to pick things like DocidForPrintf(foo). # Not perfect but it can catch printf(foo.c_str()) and printf(foo->c_str()) # TODO(unknown): Catch the following case. Need to change the calling # convention of the whole function to process multiple line to handle it. # printf( # boy_this_is_a_really_long_variable_that_cannot_fit_on_the_prev_line); printf_args = _GetTextInside(line, r'(?i)\b(string)?printf\s*\(') if printf_args: match = Match(r'([\w.\->()]+)$', printf_args) if match and match.group(1) != '__VA_ARGS__': function_name = re.search(r'\b((?:string)?printf)\s*\(', line, re.I).group(1) error(filename, linenum, 'runtime/printf', 4, 'Potential format string bug. Do %s("%%s", %s) instead.' % (function_name, match.group(1))) # Check for potential memset bugs like memset(buf, sizeof(buf), 0). match = Search(r'memset\s*\(([^,]*),\s*([^,]*),\s*0\s*\)', line) if match and not Match(r"^''|-?[0-9]+|0x[0-9A-Fa-f]$", match.group(2)): error(filename, linenum, 'runtime/memset', 4, 'Did you mean "memset(%s, 0, %s)"?' % (match.group(1), match.group(2))) if Search(r'\busing namespace\b', line): if Search(r'\bliterals\b', line): error(filename, linenum, 'build/namespaces_literals', 5, 'Do not use namespace using-directives. ' 'Use using-declarations instead.') else: error(filename, linenum, 'build/namespaces', 5, 'Do not use namespace using-directives. ' 'Use using-declarations instead.') # Detect variable-length arrays. match = Match(r'\s*(.+::)?(\w+) [a-z]\w*\[(.+)];', line) if (match and match.group(2) != 'return' and match.group(2) != 'delete' and match.group(3).find(']') == -1): # Split the size using space and arithmetic operators as delimiters. # If any of the resulting tokens are not compile time constants then # report the error. tokens = re.split(r'\s|\+|\-|\*|\/|<<|>>]', match.group(3)) is_const = True skip_next = False for tok in tokens: if skip_next: skip_next = False continue if Search(r'sizeof\(.+\)', tok): continue if Search(r'arraysize\(\w+\)', tok): continue tok = tok.lstrip('(') tok = tok.rstrip(')') if not tok: continue if Match(r'\d+', tok): continue if Match(r'0[xX][0-9a-fA-F]+', tok): continue if Match(r'k[A-Z0-9]\w*', tok): continue if Match(r'(.+::)?k[A-Z0-9]\w*', tok): continue if Match(r'(.+::)?[A-Z][A-Z0-9_]*', tok): continue # A catch all for tricky sizeof cases, including 'sizeof expression', # 'sizeof(*type)', 'sizeof(const type)', 'sizeof(struct StructName)' # requires skipping the next token because we split on ' ' and '*'. if tok.startswith('sizeof'): skip_next = True continue is_const = False break if not is_const: error(filename, linenum, 'runtime/arrays', 1, 'Do not use variable-length arrays. Use an appropriately named ' "('k' followed by CamelCase) compile-time constant for the size.") # Check for use of unnamed namespaces in header files. Registration # macros are typically OK, so we allow use of "namespace {" on lines # that end with backslashes. if (file_extension in GetHeaderExtensions() and Search(r'\bnamespace\s*{', line) and line[-1] != '\\'): error(filename, linenum, 'build/namespaces', 4, 'Do not use unnamed namespaces in header files. See ' 'https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Namespaces' ' for more information.')
def function[CheckLanguage, parameter[filename, clean_lines, linenum, file_extension, include_state, nesting_state, error]]: constant[Checks rules from the 'C++ language rules' section of cppguide.html. Some of these rules are hard to test (function overloading, using uint32 inappropriately), but we do the best we can. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. file_extension: The extension (without the dot) of the filename. include_state: An _IncludeState instance in which the headers are inserted. nesting_state: A NestingState instance which maintains information about the current stack of nested blocks being parsed. error: The function to call with any errors found. ] variable[line] assign[=] call[name[clean_lines].elided][name[linenum]] if <ast.UnaryOp object at 0x7da204620130> begin[:] return[None] variable[match] assign[=] call[name[_RE_PATTERN_INCLUDE].search, parameter[name[line]]] if name[match] begin[:] call[name[CheckIncludeLine], parameter[name[filename], name[clean_lines], name[linenum], name[include_state], name[error]]] return[None] variable[match] assign[=] call[name[Match], parameter[constant[^\s*#\s*(if|ifdef|ifndef|elif|else|endif)\b], name[line]]] if name[match] begin[:] call[name[include_state].ResetSection, parameter[call[name[match].group, parameter[constant[1]]]]] call[name[CheckCasts], parameter[name[filename], name[clean_lines], name[linenum], name[error]]] call[name[CheckGlobalStatic], parameter[name[filename], name[clean_lines], name[linenum], name[error]]] call[name[CheckPrintf], parameter[name[filename], name[clean_lines], name[linenum], name[error]]] if compare[name[file_extension] in call[name[GetHeaderExtensions], parameter[]]] begin[:] pass if call[name[Search], parameter[constant[\bshort port\b], name[line]]] begin[:] if <ast.UnaryOp object at 0x7da2046220e0> begin[:] call[name[error], parameter[name[filename], name[linenum], constant[runtime/int], constant[4], constant[Use "unsigned short" for ports, not "short"]]] if call[name[Search], parameter[constant[\boperator\s*&\s*\(\s*\)], name[line]]] begin[:] call[name[error], parameter[name[filename], name[linenum], constant[runtime/operator], constant[4], constant[Unary operator& is dangerous. Do not use it.]]] if call[name[Search], parameter[constant[\}\s*if\s*\(], name[line]]] begin[:] call[name[error], parameter[name[filename], name[linenum], constant[readability/braces], constant[4], constant[Did you mean "else if"? If not, start a new line for "if".]]] variable[printf_args] assign[=] call[name[_GetTextInside], parameter[name[line], constant[(?i)\b(string)?printf\s*\(]]] if name[printf_args] begin[:] variable[match] assign[=] call[name[Match], parameter[constant[([\w.\->()]+)$], name[printf_args]]] if <ast.BoolOp object at 0x7da204961600> begin[:] variable[function_name] assign[=] call[call[name[re].search, parameter[constant[\b((?:string)?printf)\s*\(], name[line], name[re].I]].group, parameter[constant[1]]] call[name[error], parameter[name[filename], name[linenum], constant[runtime/printf], constant[4], binary_operation[constant[Potential format string bug. Do %s("%%s", %s) instead.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204623df0>, <ast.Call object at 0x7da204623d30>]]]]] variable[match] assign[=] call[name[Search], parameter[constant[memset\s*\(([^,]*),\s*([^,]*),\s*0\s*\)], name[line]]] if <ast.BoolOp object at 0x7da204621690> begin[:] call[name[error], parameter[name[filename], name[linenum], constant[runtime/memset], constant[4], binary_operation[constant[Did you mean "memset(%s, 0, %s)"?] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da204623c40>, <ast.Call object at 0x7da204623f40>]]]]] if call[name[Search], parameter[constant[\busing namespace\b], name[line]]] begin[:] if call[name[Search], parameter[constant[\bliterals\b], name[line]]] begin[:] call[name[error], parameter[name[filename], name[linenum], constant[build/namespaces_literals], constant[5], constant[Do not use namespace using-directives. Use using-declarations instead.]]] variable[match] assign[=] call[name[Match], parameter[constant[\s*(.+::)?(\w+) [a-z]\w*\[(.+)];], name[line]]] if <ast.BoolOp object at 0x7da2046202b0> begin[:] variable[tokens] assign[=] call[name[re].split, parameter[constant[\s|\+|\-|\*|\/|<<|>>]], call[name[match].group, parameter[constant[3]]]]] variable[is_const] assign[=] constant[True] variable[skip_next] assign[=] constant[False] for taget[name[tok]] in starred[name[tokens]] begin[:] if name[skip_next] begin[:] variable[skip_next] assign[=] constant[False] continue if call[name[Search], parameter[constant[sizeof\(.+\)], name[tok]]] begin[:] continue if call[name[Search], parameter[constant[arraysize\(\w+\)], name[tok]]] begin[:] continue variable[tok] assign[=] call[name[tok].lstrip, parameter[constant[(]]] variable[tok] assign[=] call[name[tok].rstrip, parameter[constant[)]]] if <ast.UnaryOp object at 0x7da204622fe0> begin[:] continue if call[name[Match], parameter[constant[\d+], name[tok]]] begin[:] continue if call[name[Match], parameter[constant[0[xX][0-9a-fA-F]+], name[tok]]] begin[:] continue if call[name[Match], parameter[constant[k[A-Z0-9]\w*], name[tok]]] begin[:] continue if call[name[Match], parameter[constant[(.+::)?k[A-Z0-9]\w*], name[tok]]] begin[:] continue if call[name[Match], parameter[constant[(.+::)?[A-Z][A-Z0-9_]*], name[tok]]] begin[:] continue if call[name[tok].startswith, parameter[constant[sizeof]]] begin[:] variable[skip_next] assign[=] constant[True] continue variable[is_const] assign[=] constant[False] break if <ast.UnaryOp object at 0x7da1b26ac1c0> begin[:] call[name[error], parameter[name[filename], name[linenum], constant[runtime/arrays], constant[1], constant[Do not use variable-length arrays. Use an appropriately named ('k' followed by CamelCase) compile-time constant for the size.]]] if <ast.BoolOp object at 0x7da1b26af400> begin[:] call[name[error], parameter[name[filename], name[linenum], constant[build/namespaces], constant[4], constant[Do not use unnamed namespaces in header files. See https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Namespaces for more information.]]]
keyword[def] identifier[CheckLanguage] ( identifier[filename] , identifier[clean_lines] , identifier[linenum] , identifier[file_extension] , identifier[include_state] , identifier[nesting_state] , identifier[error] ): literal[string] identifier[line] = identifier[clean_lines] . identifier[elided] [ identifier[linenum] ] keyword[if] keyword[not] identifier[line] : keyword[return] identifier[match] = identifier[_RE_PATTERN_INCLUDE] . identifier[search] ( identifier[line] ) keyword[if] identifier[match] : identifier[CheckIncludeLine] ( identifier[filename] , identifier[clean_lines] , identifier[linenum] , identifier[include_state] , identifier[error] ) keyword[return] identifier[match] = identifier[Match] ( literal[string] , identifier[line] ) keyword[if] identifier[match] : identifier[include_state] . identifier[ResetSection] ( identifier[match] . identifier[group] ( literal[int] )) identifier[CheckCasts] ( identifier[filename] , identifier[clean_lines] , identifier[linenum] , identifier[error] ) identifier[CheckGlobalStatic] ( identifier[filename] , identifier[clean_lines] , identifier[linenum] , identifier[error] ) identifier[CheckPrintf] ( identifier[filename] , identifier[clean_lines] , identifier[linenum] , identifier[error] ) keyword[if] identifier[file_extension] keyword[in] identifier[GetHeaderExtensions] (): keyword[pass] keyword[if] identifier[Search] ( literal[string] , identifier[line] ): keyword[if] keyword[not] identifier[Search] ( literal[string] , identifier[line] ): identifier[error] ( identifier[filename] , identifier[linenum] , literal[string] , literal[int] , literal[string] ) keyword[else] : identifier[match] = identifier[Search] ( literal[string] , identifier[line] ) keyword[if] identifier[match] : identifier[error] ( identifier[filename] , identifier[linenum] , literal[string] , literal[int] , literal[string] % identifier[match] . identifier[group] ( literal[int] )) keyword[if] identifier[Search] ( literal[string] , identifier[line] ): identifier[error] ( identifier[filename] , identifier[linenum] , literal[string] , literal[int] , literal[string] ) keyword[if] identifier[Search] ( literal[string] , identifier[line] ): identifier[error] ( identifier[filename] , identifier[linenum] , literal[string] , literal[int] , literal[string] ) identifier[printf_args] = identifier[_GetTextInside] ( identifier[line] , literal[string] ) keyword[if] identifier[printf_args] : identifier[match] = identifier[Match] ( literal[string] , identifier[printf_args] ) keyword[if] identifier[match] keyword[and] identifier[match] . identifier[group] ( literal[int] )!= literal[string] : identifier[function_name] = identifier[re] . identifier[search] ( literal[string] , identifier[line] , identifier[re] . identifier[I] ). identifier[group] ( literal[int] ) identifier[error] ( identifier[filename] , identifier[linenum] , literal[string] , literal[int] , literal[string] %( identifier[function_name] , identifier[match] . identifier[group] ( literal[int] ))) identifier[match] = identifier[Search] ( literal[string] , identifier[line] ) keyword[if] identifier[match] keyword[and] keyword[not] identifier[Match] ( literal[string] , identifier[match] . identifier[group] ( literal[int] )): identifier[error] ( identifier[filename] , identifier[linenum] , literal[string] , literal[int] , literal[string] %( identifier[match] . identifier[group] ( literal[int] ), identifier[match] . identifier[group] ( literal[int] ))) keyword[if] identifier[Search] ( literal[string] , identifier[line] ): keyword[if] identifier[Search] ( literal[string] , identifier[line] ): identifier[error] ( identifier[filename] , identifier[linenum] , literal[string] , literal[int] , literal[string] literal[string] ) keyword[else] : identifier[error] ( identifier[filename] , identifier[linenum] , literal[string] , literal[int] , literal[string] literal[string] ) identifier[match] = identifier[Match] ( literal[string] , identifier[line] ) keyword[if] ( identifier[match] keyword[and] identifier[match] . identifier[group] ( literal[int] )!= literal[string] keyword[and] identifier[match] . identifier[group] ( literal[int] )!= literal[string] keyword[and] identifier[match] . identifier[group] ( literal[int] ). identifier[find] ( literal[string] )==- literal[int] ): identifier[tokens] = identifier[re] . identifier[split] ( literal[string] , identifier[match] . identifier[group] ( literal[int] )) identifier[is_const] = keyword[True] identifier[skip_next] = keyword[False] keyword[for] identifier[tok] keyword[in] identifier[tokens] : keyword[if] identifier[skip_next] : identifier[skip_next] = keyword[False] keyword[continue] keyword[if] identifier[Search] ( literal[string] , identifier[tok] ): keyword[continue] keyword[if] identifier[Search] ( literal[string] , identifier[tok] ): keyword[continue] identifier[tok] = identifier[tok] . identifier[lstrip] ( literal[string] ) identifier[tok] = identifier[tok] . identifier[rstrip] ( literal[string] ) keyword[if] keyword[not] identifier[tok] : keyword[continue] keyword[if] identifier[Match] ( literal[string] , identifier[tok] ): keyword[continue] keyword[if] identifier[Match] ( literal[string] , identifier[tok] ): keyword[continue] keyword[if] identifier[Match] ( literal[string] , identifier[tok] ): keyword[continue] keyword[if] identifier[Match] ( literal[string] , identifier[tok] ): keyword[continue] keyword[if] identifier[Match] ( literal[string] , identifier[tok] ): keyword[continue] keyword[if] identifier[tok] . identifier[startswith] ( literal[string] ): identifier[skip_next] = keyword[True] keyword[continue] identifier[is_const] = keyword[False] keyword[break] keyword[if] keyword[not] identifier[is_const] : identifier[error] ( identifier[filename] , identifier[linenum] , literal[string] , literal[int] , literal[string] literal[string] ) keyword[if] ( identifier[file_extension] keyword[in] identifier[GetHeaderExtensions] () keyword[and] identifier[Search] ( literal[string] , identifier[line] ) keyword[and] identifier[line] [- literal[int] ]!= literal[string] ): identifier[error] ( identifier[filename] , identifier[linenum] , literal[string] , literal[int] , literal[string] literal[string] literal[string] )
def CheckLanguage(filename, clean_lines, linenum, file_extension, include_state, nesting_state, error): """Checks rules from the 'C++ language rules' section of cppguide.html. Some of these rules are hard to test (function overloading, using uint32 inappropriately), but we do the best we can. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. file_extension: The extension (without the dot) of the filename. include_state: An _IncludeState instance in which the headers are inserted. nesting_state: A NestingState instance which maintains information about the current stack of nested blocks being parsed. error: The function to call with any errors found. """ # If the line is empty or consists of entirely a comment, no need to # check it. line = clean_lines.elided[linenum] if not line: return # depends on [control=['if'], data=[]] match = _RE_PATTERN_INCLUDE.search(line) if match: CheckIncludeLine(filename, clean_lines, linenum, include_state, error) return # depends on [control=['if'], data=[]] # Reset include state across preprocessor directives. This is meant # to silence warnings for conditional includes. match = Match('^\\s*#\\s*(if|ifdef|ifndef|elif|else|endif)\\b', line) if match: include_state.ResetSection(match.group(1)) # depends on [control=['if'], data=[]] # Perform other checks now that we are sure that this is not an include line CheckCasts(filename, clean_lines, linenum, error) CheckGlobalStatic(filename, clean_lines, linenum, error) CheckPrintf(filename, clean_lines, linenum, error) if file_extension in GetHeaderExtensions(): # TODO(unknown): check that 1-arg constructors are explicit. # How to tell it's a constructor? # (handled in CheckForNonStandardConstructs for now) # TODO(unknown): check that classes declare or disable copy/assign # (level 1 error) pass # depends on [control=['if'], data=[]] # Check if people are using the verboten C basic types. The only exception # we regularly allow is "unsigned short port" for port. if Search('\\bshort port\\b', line): if not Search('\\bunsigned short port\\b', line): error(filename, linenum, 'runtime/int', 4, 'Use "unsigned short" for ports, not "short"') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: match = Search('\\b(short|long(?! +double)|long long)\\b', line) if match: error(filename, linenum, 'runtime/int', 4, 'Use int16/int64/etc, rather than the C type %s' % match.group(1)) # depends on [control=['if'], data=[]] # Check if some verboten operator overloading is going on # TODO(unknown): catch out-of-line unary operator&: # class X {}; # int operator&(const X& x) { return 42; } // unary operator& # The trick is it's hard to tell apart from binary operator&: # class Y { int operator&(const Y& x) { return 23; } }; // binary operator& if Search('\\boperator\\s*&\\s*\\(\\s*\\)', line): error(filename, linenum, 'runtime/operator', 4, 'Unary operator& is dangerous. Do not use it.') # depends on [control=['if'], data=[]] # Check for suspicious usage of "if" like # } if (a == b) { if Search('\\}\\s*if\\s*\\(', line): error(filename, linenum, 'readability/braces', 4, 'Did you mean "else if"? If not, start a new line for "if".') # depends on [control=['if'], data=[]] # Check for potential format string bugs like printf(foo). # We constrain the pattern not to pick things like DocidForPrintf(foo). # Not perfect but it can catch printf(foo.c_str()) and printf(foo->c_str()) # TODO(unknown): Catch the following case. Need to change the calling # convention of the whole function to process multiple line to handle it. # printf( # boy_this_is_a_really_long_variable_that_cannot_fit_on_the_prev_line); printf_args = _GetTextInside(line, '(?i)\\b(string)?printf\\s*\\(') if printf_args: match = Match('([\\w.\\->()]+)$', printf_args) if match and match.group(1) != '__VA_ARGS__': function_name = re.search('\\b((?:string)?printf)\\s*\\(', line, re.I).group(1) error(filename, linenum, 'runtime/printf', 4, 'Potential format string bug. Do %s("%%s", %s) instead.' % (function_name, match.group(1))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Check for potential memset bugs like memset(buf, sizeof(buf), 0). match = Search('memset\\s*\\(([^,]*),\\s*([^,]*),\\s*0\\s*\\)', line) if match and (not Match("^''|-?[0-9]+|0x[0-9A-Fa-f]$", match.group(2))): error(filename, linenum, 'runtime/memset', 4, 'Did you mean "memset(%s, 0, %s)"?' % (match.group(1), match.group(2))) # depends on [control=['if'], data=[]] if Search('\\busing namespace\\b', line): if Search('\\bliterals\\b', line): error(filename, linenum, 'build/namespaces_literals', 5, 'Do not use namespace using-directives. Use using-declarations instead.') # depends on [control=['if'], data=[]] else: error(filename, linenum, 'build/namespaces', 5, 'Do not use namespace using-directives. Use using-declarations instead.') # depends on [control=['if'], data=[]] # Detect variable-length arrays. match = Match('\\s*(.+::)?(\\w+) [a-z]\\w*\\[(.+)];', line) if match and match.group(2) != 'return' and (match.group(2) != 'delete') and (match.group(3).find(']') == -1): # Split the size using space and arithmetic operators as delimiters. # If any of the resulting tokens are not compile time constants then # report the error. tokens = re.split('\\s|\\+|\\-|\\*|\\/|<<|>>]', match.group(3)) is_const = True skip_next = False for tok in tokens: if skip_next: skip_next = False continue # depends on [control=['if'], data=[]] if Search('sizeof\\(.+\\)', tok): continue # depends on [control=['if'], data=[]] if Search('arraysize\\(\\w+\\)', tok): continue # depends on [control=['if'], data=[]] tok = tok.lstrip('(') tok = tok.rstrip(')') if not tok: continue # depends on [control=['if'], data=[]] if Match('\\d+', tok): continue # depends on [control=['if'], data=[]] if Match('0[xX][0-9a-fA-F]+', tok): continue # depends on [control=['if'], data=[]] if Match('k[A-Z0-9]\\w*', tok): continue # depends on [control=['if'], data=[]] if Match('(.+::)?k[A-Z0-9]\\w*', tok): continue # depends on [control=['if'], data=[]] if Match('(.+::)?[A-Z][A-Z0-9_]*', tok): continue # depends on [control=['if'], data=[]] # A catch all for tricky sizeof cases, including 'sizeof expression', # 'sizeof(*type)', 'sizeof(const type)', 'sizeof(struct StructName)' # requires skipping the next token because we split on ' ' and '*'. if tok.startswith('sizeof'): skip_next = True continue # depends on [control=['if'], data=[]] is_const = False break # depends on [control=['for'], data=['tok']] if not is_const: error(filename, linenum, 'runtime/arrays', 1, "Do not use variable-length arrays. Use an appropriately named ('k' followed by CamelCase) compile-time constant for the size.") # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Check for use of unnamed namespaces in header files. Registration # macros are typically OK, so we allow use of "namespace {" on lines # that end with backslashes. if file_extension in GetHeaderExtensions() and Search('\\bnamespace\\s*{', line) and (line[-1] != '\\'): error(filename, linenum, 'build/namespaces', 4, 'Do not use unnamed namespaces in header files. See https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Namespaces for more information.') # depends on [control=['if'], data=[]]
def _all_store_param_or_result_table_entry(self, instance, table, flags, additional_info=None): """Stores a single row into an overview table :param instance: A parameter or result instance :param table: Table where row will be inserted :param flags: Flags how to insert into the table. Potential Flags are `ADD_ROW`, `REMOVE_ROW`, `MODIFY_ROW` :param additional_info: Dictionary containing information that cannot be extracted from `instance`, but needs to be inserted, too. """ # assert isinstance(table, pt.Table) location = instance.v_location name = instance.v_name fullname = instance.v_full_name if (flags == (HDF5StorageService.ADD_ROW,) and table.nrows < 2 and 'location' in table.colnames): # We add the modify row option here because you cannot delete the very first # row of the table, so there is the rare condition, that the row might already # exist. # We also need to check if 'location' is in the columns in order to avoid # confusion with the smaller explored parameter overviews flags = (HDF5StorageService.ADD_ROW, HDF5StorageService.MODIFY_ROW) if flags == (HDF5StorageService.ADD_ROW,): # If we are sure we only want to add a row we do not need to search! condvars = None condition = None else: # Condition to search for an entry condvars = {'namecol': table.cols.name, 'locationcol': table.cols.location, 'name': name, 'location': location} condition = """(namecol == name) & (locationcol == location)""" if HDF5StorageService.REMOVE_ROW in flags: # If we want to remove a row, we don't need to extract information insert_dict = {} else: # Extract information to insert from the instance and the additional info dict colnames = set(table.colnames) insert_dict = self._all_extract_insert_dict(instance, colnames, additional_info) # Write the table entry self._all_add_or_modify_row(fullname, insert_dict, table, condition=condition, condvars=condvars, flags=flags)
def function[_all_store_param_or_result_table_entry, parameter[self, instance, table, flags, additional_info]]: constant[Stores a single row into an overview table :param instance: A parameter or result instance :param table: Table where row will be inserted :param flags: Flags how to insert into the table. Potential Flags are `ADD_ROW`, `REMOVE_ROW`, `MODIFY_ROW` :param additional_info: Dictionary containing information that cannot be extracted from `instance`, but needs to be inserted, too. ] variable[location] assign[=] name[instance].v_location variable[name] assign[=] name[instance].v_name variable[fullname] assign[=] name[instance].v_full_name if <ast.BoolOp object at 0x7da1b034a1d0> begin[:] variable[flags] assign[=] tuple[[<ast.Attribute object at 0x7da1b034ac20>, <ast.Attribute object at 0x7da1b034ac50>]] if compare[name[flags] equal[==] tuple[[<ast.Attribute object at 0x7da1b034ace0>]]] begin[:] variable[condvars] assign[=] constant[None] variable[condition] assign[=] constant[None] if compare[name[HDF5StorageService].REMOVE_ROW in name[flags]] begin[:] variable[insert_dict] assign[=] dictionary[[], []] call[name[self]._all_add_or_modify_row, parameter[name[fullname], name[insert_dict], name[table]]]
keyword[def] identifier[_all_store_param_or_result_table_entry] ( identifier[self] , identifier[instance] , identifier[table] , identifier[flags] , identifier[additional_info] = keyword[None] ): literal[string] identifier[location] = identifier[instance] . identifier[v_location] identifier[name] = identifier[instance] . identifier[v_name] identifier[fullname] = identifier[instance] . identifier[v_full_name] keyword[if] ( identifier[flags] ==( identifier[HDF5StorageService] . identifier[ADD_ROW] ,) keyword[and] identifier[table] . identifier[nrows] < literal[int] keyword[and] literal[string] keyword[in] identifier[table] . identifier[colnames] ): identifier[flags] =( identifier[HDF5StorageService] . identifier[ADD_ROW] , identifier[HDF5StorageService] . identifier[MODIFY_ROW] ) keyword[if] identifier[flags] ==( identifier[HDF5StorageService] . identifier[ADD_ROW] ,): identifier[condvars] = keyword[None] identifier[condition] = keyword[None] keyword[else] : identifier[condvars] ={ literal[string] : identifier[table] . identifier[cols] . identifier[name] , literal[string] : identifier[table] . identifier[cols] . identifier[location] , literal[string] : identifier[name] , literal[string] : identifier[location] } identifier[condition] = literal[string] keyword[if] identifier[HDF5StorageService] . identifier[REMOVE_ROW] keyword[in] identifier[flags] : identifier[insert_dict] ={} keyword[else] : identifier[colnames] = identifier[set] ( identifier[table] . identifier[colnames] ) identifier[insert_dict] = identifier[self] . identifier[_all_extract_insert_dict] ( identifier[instance] , identifier[colnames] , identifier[additional_info] ) identifier[self] . identifier[_all_add_or_modify_row] ( identifier[fullname] , identifier[insert_dict] , identifier[table] , identifier[condition] = identifier[condition] , identifier[condvars] = identifier[condvars] , identifier[flags] = identifier[flags] )
def _all_store_param_or_result_table_entry(self, instance, table, flags, additional_info=None): """Stores a single row into an overview table :param instance: A parameter or result instance :param table: Table where row will be inserted :param flags: Flags how to insert into the table. Potential Flags are `ADD_ROW`, `REMOVE_ROW`, `MODIFY_ROW` :param additional_info: Dictionary containing information that cannot be extracted from `instance`, but needs to be inserted, too. """ # assert isinstance(table, pt.Table) location = instance.v_location name = instance.v_name fullname = instance.v_full_name if flags == (HDF5StorageService.ADD_ROW,) and table.nrows < 2 and ('location' in table.colnames): # We add the modify row option here because you cannot delete the very first # row of the table, so there is the rare condition, that the row might already # exist. # We also need to check if 'location' is in the columns in order to avoid # confusion with the smaller explored parameter overviews flags = (HDF5StorageService.ADD_ROW, HDF5StorageService.MODIFY_ROW) # depends on [control=['if'], data=[]] if flags == (HDF5StorageService.ADD_ROW,): # If we are sure we only want to add a row we do not need to search! condvars = None condition = None # depends on [control=['if'], data=[]] else: # Condition to search for an entry condvars = {'namecol': table.cols.name, 'locationcol': table.cols.location, 'name': name, 'location': location} condition = '(namecol == name) & (locationcol == location)' if HDF5StorageService.REMOVE_ROW in flags: # If we want to remove a row, we don't need to extract information insert_dict = {} # depends on [control=['if'], data=[]] else: # Extract information to insert from the instance and the additional info dict colnames = set(table.colnames) insert_dict = self._all_extract_insert_dict(instance, colnames, additional_info) # Write the table entry self._all_add_or_modify_row(fullname, insert_dict, table, condition=condition, condvars=condvars, flags=flags)
def _parse_plot_quantity(self,quant,**kwargs): """Internal function to parse a quantity to be plotted based on input data""" # Cannot be using Quantity output kwargs['quantity']= False if callable(quant): return quant(self.t) def _eval(q): # Check those that don't have the exact name of the function if q == 't': return self.time(self.t,**kwargs) elif q == 'Enorm': return self.E(self.t,**kwargs)/self.E(0.,**kwargs) elif q == 'Eznorm': return self.Ez(self.t,**kwargs)/self.Ez(0.,**kwargs) elif q == 'ERnorm': return self.ER(self.t,**kwargs)/self.ER(0.,**kwargs) elif q == 'Jacobinorm': return self.Jacobi(self.t,**kwargs)/self.Jacobi(0.,**kwargs) else: # these are exact, e.g., 'x' for self.x return self.__getattribute__(q)(self.t,**kwargs) try: return _eval(quant) except AttributeError: pass try: import numexpr except ImportError: #pragma: no cover raise ImportError('Parsing the quantity to be plotted failed; if you are trying to plot an expression, please make sure to install numexpr first') # Figure out the variables in the expression to be computed to plot try: vars= numexpr.NumExpr(quant).input_names except TypeError as err: raise TypeError('Parsing the expression {} failed, with error message:\n"{}"'.format(quant,err)) # Construct dictionary of necessary parameters vars_dict= {} for var in vars: vars_dict[var]= _eval(var) return numexpr.evaluate(quant,local_dict=vars_dict)
def function[_parse_plot_quantity, parameter[self, quant]]: constant[Internal function to parse a quantity to be plotted based on input data] call[name[kwargs]][constant[quantity]] assign[=] constant[False] if call[name[callable], parameter[name[quant]]] begin[:] return[call[name[quant], parameter[name[self].t]]] def function[_eval, parameter[q]]: if compare[name[q] equal[==] constant[t]] begin[:] return[call[name[self].time, parameter[name[self].t]]] <ast.Try object at 0x7da1b0c67ca0> <ast.Try object at 0x7da1b0c645b0> <ast.Try object at 0x7da1b0c67a90> variable[vars_dict] assign[=] dictionary[[], []] for taget[name[var]] in starred[name[vars]] begin[:] call[name[vars_dict]][name[var]] assign[=] call[name[_eval], parameter[name[var]]] return[call[name[numexpr].evaluate, parameter[name[quant]]]]
keyword[def] identifier[_parse_plot_quantity] ( identifier[self] , identifier[quant] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[False] keyword[if] identifier[callable] ( identifier[quant] ): keyword[return] identifier[quant] ( identifier[self] . identifier[t] ) keyword[def] identifier[_eval] ( identifier[q] ): keyword[if] identifier[q] == literal[string] : keyword[return] identifier[self] . identifier[time] ( identifier[self] . identifier[t] ,** identifier[kwargs] ) keyword[elif] identifier[q] == literal[string] : keyword[return] identifier[self] . identifier[E] ( identifier[self] . identifier[t] ,** identifier[kwargs] )/ identifier[self] . identifier[E] ( literal[int] ,** identifier[kwargs] ) keyword[elif] identifier[q] == literal[string] : keyword[return] identifier[self] . identifier[Ez] ( identifier[self] . identifier[t] ,** identifier[kwargs] )/ identifier[self] . identifier[Ez] ( literal[int] ,** identifier[kwargs] ) keyword[elif] identifier[q] == literal[string] : keyword[return] identifier[self] . identifier[ER] ( identifier[self] . identifier[t] ,** identifier[kwargs] )/ identifier[self] . identifier[ER] ( literal[int] ,** identifier[kwargs] ) keyword[elif] identifier[q] == literal[string] : keyword[return] identifier[self] . identifier[Jacobi] ( identifier[self] . identifier[t] ,** identifier[kwargs] )/ identifier[self] . identifier[Jacobi] ( literal[int] ,** identifier[kwargs] ) keyword[else] : keyword[return] identifier[self] . identifier[__getattribute__] ( identifier[q] )( identifier[self] . identifier[t] ,** identifier[kwargs] ) keyword[try] : keyword[return] identifier[_eval] ( identifier[quant] ) keyword[except] identifier[AttributeError] : keyword[pass] keyword[try] : keyword[import] identifier[numexpr] keyword[except] identifier[ImportError] : keyword[raise] identifier[ImportError] ( literal[string] ) keyword[try] : identifier[vars] = identifier[numexpr] . identifier[NumExpr] ( identifier[quant] ). identifier[input_names] keyword[except] identifier[TypeError] keyword[as] identifier[err] : keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[quant] , identifier[err] )) identifier[vars_dict] ={} keyword[for] identifier[var] keyword[in] identifier[vars] : identifier[vars_dict] [ identifier[var] ]= identifier[_eval] ( identifier[var] ) keyword[return] identifier[numexpr] . identifier[evaluate] ( identifier[quant] , identifier[local_dict] = identifier[vars_dict] )
def _parse_plot_quantity(self, quant, **kwargs): """Internal function to parse a quantity to be plotted based on input data""" # Cannot be using Quantity output kwargs['quantity'] = False if callable(quant): return quant(self.t) # depends on [control=['if'], data=[]] def _eval(q): # Check those that don't have the exact name of the function if q == 't': return self.time(self.t, **kwargs) # depends on [control=['if'], data=[]] elif q == 'Enorm': return self.E(self.t, **kwargs) / self.E(0.0, **kwargs) # depends on [control=['if'], data=[]] elif q == 'Eznorm': return self.Ez(self.t, **kwargs) / self.Ez(0.0, **kwargs) # depends on [control=['if'], data=[]] elif q == 'ERnorm': return self.ER(self.t, **kwargs) / self.ER(0.0, **kwargs) # depends on [control=['if'], data=[]] elif q == 'Jacobinorm': return self.Jacobi(self.t, **kwargs) / self.Jacobi(0.0, **kwargs) # depends on [control=['if'], data=[]] else: # these are exact, e.g., 'x' for self.x return self.__getattribute__(q)(self.t, **kwargs) try: return _eval(quant) # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] try: import numexpr # depends on [control=['try'], data=[]] except ImportError: #pragma: no cover raise ImportError('Parsing the quantity to be plotted failed; if you are trying to plot an expression, please make sure to install numexpr first') # depends on [control=['except'], data=[]] # Figure out the variables in the expression to be computed to plot try: vars = numexpr.NumExpr(quant).input_names # depends on [control=['try'], data=[]] except TypeError as err: raise TypeError('Parsing the expression {} failed, with error message:\n"{}"'.format(quant, err)) # depends on [control=['except'], data=['err']] # Construct dictionary of necessary parameters vars_dict = {} for var in vars: vars_dict[var] = _eval(var) # depends on [control=['for'], data=['var']] return numexpr.evaluate(quant, local_dict=vars_dict)
def reward_scope(self, state: Sequence[tf.Tensor], action: Sequence[tf.Tensor], next_state: Sequence[tf.Tensor]) -> Dict[str, TensorFluent]: '''Returns the complete reward fluent scope for the current `state`, `action` fluents, and `next_state` fluents. Args: state (Sequence[tf.Tensor]): The current state fluents. action (Sequence[tf.Tensor]): The action fluents. next_state (Sequence[tf.Tensor]): The next state fluents. Returns: A mapping from fluent names to :obj:`rddl2tf.fluent.TensorFluent`. ''' scope = {} scope.update(self.non_fluents_scope()) scope.update(self.state_scope(state)) scope.update(self.action_scope(action)) scope.update(self.next_state_scope(next_state)) return scope
def function[reward_scope, parameter[self, state, action, next_state]]: constant[Returns the complete reward fluent scope for the current `state`, `action` fluents, and `next_state` fluents. Args: state (Sequence[tf.Tensor]): The current state fluents. action (Sequence[tf.Tensor]): The action fluents. next_state (Sequence[tf.Tensor]): The next state fluents. Returns: A mapping from fluent names to :obj:`rddl2tf.fluent.TensorFluent`. ] variable[scope] assign[=] dictionary[[], []] call[name[scope].update, parameter[call[name[self].non_fluents_scope, parameter[]]]] call[name[scope].update, parameter[call[name[self].state_scope, parameter[name[state]]]]] call[name[scope].update, parameter[call[name[self].action_scope, parameter[name[action]]]]] call[name[scope].update, parameter[call[name[self].next_state_scope, parameter[name[next_state]]]]] return[name[scope]]
keyword[def] identifier[reward_scope] ( identifier[self] , identifier[state] : identifier[Sequence] [ identifier[tf] . identifier[Tensor] ], identifier[action] : identifier[Sequence] [ identifier[tf] . identifier[Tensor] ], identifier[next_state] : identifier[Sequence] [ identifier[tf] . identifier[Tensor] ])-> identifier[Dict] [ identifier[str] , identifier[TensorFluent] ]: literal[string] identifier[scope] ={} identifier[scope] . identifier[update] ( identifier[self] . identifier[non_fluents_scope] ()) identifier[scope] . identifier[update] ( identifier[self] . identifier[state_scope] ( identifier[state] )) identifier[scope] . identifier[update] ( identifier[self] . identifier[action_scope] ( identifier[action] )) identifier[scope] . identifier[update] ( identifier[self] . identifier[next_state_scope] ( identifier[next_state] )) keyword[return] identifier[scope]
def reward_scope(self, state: Sequence[tf.Tensor], action: Sequence[tf.Tensor], next_state: Sequence[tf.Tensor]) -> Dict[str, TensorFluent]: """Returns the complete reward fluent scope for the current `state`, `action` fluents, and `next_state` fluents. Args: state (Sequence[tf.Tensor]): The current state fluents. action (Sequence[tf.Tensor]): The action fluents. next_state (Sequence[tf.Tensor]): The next state fluents. Returns: A mapping from fluent names to :obj:`rddl2tf.fluent.TensorFluent`. """ scope = {} scope.update(self.non_fluents_scope()) scope.update(self.state_scope(state)) scope.update(self.action_scope(action)) scope.update(self.next_state_scope(next_state)) return scope
def dequeue(self, destination): """ Removes and returns an item from the queue (or C{None} if no items in queue). @param destination: The queue name (destinationination). @type destination: C{str} @return: The first frame in the specified queue, or C{None} if there are none. @rtype: C{stompclient.frame.Frame} """ if not self.has_frames(destination): return None message_id = self.queue_metadata[destination]['frames'].pop() self.queue_metadata[destination]['dequeued'] += 1 frame = self.frame_store[message_id] del self.frame_store[message_id] self._opcount += 1 self._sync() return frame
def function[dequeue, parameter[self, destination]]: constant[ Removes and returns an item from the queue (or C{None} if no items in queue). @param destination: The queue name (destinationination). @type destination: C{str} @return: The first frame in the specified queue, or C{None} if there are none. @rtype: C{stompclient.frame.Frame} ] if <ast.UnaryOp object at 0x7da1b19b6800> begin[:] return[constant[None]] variable[message_id] assign[=] call[call[call[name[self].queue_metadata][name[destination]]][constant[frames]].pop, parameter[]] <ast.AugAssign object at 0x7da1b19a2dd0> variable[frame] assign[=] call[name[self].frame_store][name[message_id]] <ast.Delete object at 0x7da1b1982740> <ast.AugAssign object at 0x7da1b19823e0> call[name[self]._sync, parameter[]] return[name[frame]]
keyword[def] identifier[dequeue] ( identifier[self] , identifier[destination] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[has_frames] ( identifier[destination] ): keyword[return] keyword[None] identifier[message_id] = identifier[self] . identifier[queue_metadata] [ identifier[destination] ][ literal[string] ]. identifier[pop] () identifier[self] . identifier[queue_metadata] [ identifier[destination] ][ literal[string] ]+= literal[int] identifier[frame] = identifier[self] . identifier[frame_store] [ identifier[message_id] ] keyword[del] identifier[self] . identifier[frame_store] [ identifier[message_id] ] identifier[self] . identifier[_opcount] += literal[int] identifier[self] . identifier[_sync] () keyword[return] identifier[frame]
def dequeue(self, destination): """ Removes and returns an item from the queue (or C{None} if no items in queue). @param destination: The queue name (destinationination). @type destination: C{str} @return: The first frame in the specified queue, or C{None} if there are none. @rtype: C{stompclient.frame.Frame} """ if not self.has_frames(destination): return None # depends on [control=['if'], data=[]] message_id = self.queue_metadata[destination]['frames'].pop() self.queue_metadata[destination]['dequeued'] += 1 frame = self.frame_store[message_id] del self.frame_store[message_id] self._opcount += 1 self._sync() return frame
def read_cstring(self) -> bool: """ read a double quoted string Read following BNF rule else return False:: '"' -> ['\\' #char | ~'\\'] '"' """ self._stream.save_context() idx = self._stream.index if self.read_char("\"") and self.read_until("\"", "\\"): txt = self._stream[idx:self._stream.index] return self._stream.validate_context() return self._stream.restore_context()
def function[read_cstring, parameter[self]]: constant[ read a double quoted string Read following BNF rule else return False:: '"' -> ['\' #char | ~'\'] '"' ] call[name[self]._stream.save_context, parameter[]] variable[idx] assign[=] name[self]._stream.index if <ast.BoolOp object at 0x7da1b0136770> begin[:] variable[txt] assign[=] call[name[self]._stream][<ast.Slice object at 0x7da1b0135c60>] return[call[name[self]._stream.validate_context, parameter[]]] return[call[name[self]._stream.restore_context, parameter[]]]
keyword[def] identifier[read_cstring] ( identifier[self] )-> identifier[bool] : literal[string] identifier[self] . identifier[_stream] . identifier[save_context] () identifier[idx] = identifier[self] . identifier[_stream] . identifier[index] keyword[if] identifier[self] . identifier[read_char] ( literal[string] ) keyword[and] identifier[self] . identifier[read_until] ( literal[string] , literal[string] ): identifier[txt] = identifier[self] . identifier[_stream] [ identifier[idx] : identifier[self] . identifier[_stream] . identifier[index] ] keyword[return] identifier[self] . identifier[_stream] . identifier[validate_context] () keyword[return] identifier[self] . identifier[_stream] . identifier[restore_context] ()
def read_cstring(self) -> bool: """ read a double quoted string Read following BNF rule else return False:: '"' -> ['\\' #char | ~'\\'] '"' """ self._stream.save_context() idx = self._stream.index if self.read_char('"') and self.read_until('"', '\\'): txt = self._stream[idx:self._stream.index] return self._stream.validate_context() # depends on [control=['if'], data=[]] return self._stream.restore_context()
def log2ceil(x): """ Returns no of bits required to store x-1 for example x=8 returns 3 """ if not isinstance(x, (int, float)): x = int(x) if x == 0 or x == 1: res = 1 else: res = math.ceil(math.log2(x)) return hInt(res)
def function[log2ceil, parameter[x]]: constant[ Returns no of bits required to store x-1 for example x=8 returns 3 ] if <ast.UnaryOp object at 0x7da1b0383250> begin[:] variable[x] assign[=] call[name[int], parameter[name[x]]] if <ast.BoolOp object at 0x7da1b0352c80> begin[:] variable[res] assign[=] constant[1] return[call[name[hInt], parameter[name[res]]]]
keyword[def] identifier[log2ceil] ( identifier[x] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[x] ,( identifier[int] , identifier[float] )): identifier[x] = identifier[int] ( identifier[x] ) keyword[if] identifier[x] == literal[int] keyword[or] identifier[x] == literal[int] : identifier[res] = literal[int] keyword[else] : identifier[res] = identifier[math] . identifier[ceil] ( identifier[math] . identifier[log2] ( identifier[x] )) keyword[return] identifier[hInt] ( identifier[res] )
def log2ceil(x): """ Returns no of bits required to store x-1 for example x=8 returns 3 """ if not isinstance(x, (int, float)): x = int(x) # depends on [control=['if'], data=[]] if x == 0 or x == 1: res = 1 # depends on [control=['if'], data=[]] else: res = math.ceil(math.log2(x)) return hInt(res)
def process_custom_field(connection, project_id, cf_type, cf_name, value_names=None): """ Creates custom field and attaches it to the project. If custom field already exists and has type cf_type it is attached to the project. If it has another type, LogicException is raised. If project field already exists, uses it and bundle from it. If not, creates project field and bundle with name <cf_name>_bundle_<project_id> for it. Adds value_names to bundle. Args: connection: An opened Connection instance. project_id: Id of the project to attach CF to. cf_type: Type of cf to be created. cf_name: Name of cf that should be created (if not exists) and attached to the project (if not yet attached) value_names: Values, that cf must have. If None, does not create any bundle for the field. If empty list, creates bundle, but does not create any value_names in it. If bundle already contains some value_names, only value_names that do not already exist are added. Raises: LogicException: If custom field already exists, but has wrong type. YouTrackException: If something is wrong with queries. """ _create_custom_field_prototype(connection, cf_type, cf_name) if cf_type[0:-3] not in connection.bundle_types: value_names = None elif value_names is None: value_names = [] existing_project_fields = [item for item in connection.getProjectCustomFields(project_id) if utf8encode(item.name) == cf_name] if len(existing_project_fields): if value_names is None: return bundle = connection.getBundle(cf_type, existing_project_fields[0].bundle) values_to_add = calculate_missing_value_names(bundle, value_names) else: if value_names is None: connection.createProjectCustomFieldDetailed(project_id, cf_name, "No " + cf_name) return bundle = create_bundle_safe(connection, cf_name + "_bundle_" + project_id, cf_type) values_to_add = calculate_missing_value_names(bundle, value_names) connection.createProjectCustomFieldDetailed(project_id, cf_name, "No " + cf_name, params={"bundle": bundle.name}) for name in values_to_add: connection.addValueToBundle(bundle, bundle.createElement(name))
def function[process_custom_field, parameter[connection, project_id, cf_type, cf_name, value_names]]: constant[ Creates custom field and attaches it to the project. If custom field already exists and has type cf_type it is attached to the project. If it has another type, LogicException is raised. If project field already exists, uses it and bundle from it. If not, creates project field and bundle with name <cf_name>_bundle_<project_id> for it. Adds value_names to bundle. Args: connection: An opened Connection instance. project_id: Id of the project to attach CF to. cf_type: Type of cf to be created. cf_name: Name of cf that should be created (if not exists) and attached to the project (if not yet attached) value_names: Values, that cf must have. If None, does not create any bundle for the field. If empty list, creates bundle, but does not create any value_names in it. If bundle already contains some value_names, only value_names that do not already exist are added. Raises: LogicException: If custom field already exists, but has wrong type. YouTrackException: If something is wrong with queries. ] call[name[_create_custom_field_prototype], parameter[name[connection], name[cf_type], name[cf_name]]] if compare[call[name[cf_type]][<ast.Slice object at 0x7da18fe92a10>] <ast.NotIn object at 0x7da2590d7190> name[connection].bundle_types] begin[:] variable[value_names] assign[=] constant[None] variable[existing_project_fields] assign[=] <ast.ListComp object at 0x7da18fe917b0> if call[name[len], parameter[name[existing_project_fields]]] begin[:] if compare[name[value_names] is constant[None]] begin[:] return[None] variable[bundle] assign[=] call[name[connection].getBundle, parameter[name[cf_type], call[name[existing_project_fields]][constant[0]].bundle]] variable[values_to_add] assign[=] call[name[calculate_missing_value_names], parameter[name[bundle], name[value_names]]] for taget[name[name]] in starred[name[values_to_add]] begin[:] call[name[connection].addValueToBundle, parameter[name[bundle], call[name[bundle].createElement, parameter[name[name]]]]]
keyword[def] identifier[process_custom_field] ( identifier[connection] , identifier[project_id] , identifier[cf_type] , identifier[cf_name] , identifier[value_names] = keyword[None] ): literal[string] identifier[_create_custom_field_prototype] ( identifier[connection] , identifier[cf_type] , identifier[cf_name] ) keyword[if] identifier[cf_type] [ literal[int] :- literal[int] ] keyword[not] keyword[in] identifier[connection] . identifier[bundle_types] : identifier[value_names] = keyword[None] keyword[elif] identifier[value_names] keyword[is] keyword[None] : identifier[value_names] =[] identifier[existing_project_fields] =[ identifier[item] keyword[for] identifier[item] keyword[in] identifier[connection] . identifier[getProjectCustomFields] ( identifier[project_id] ) keyword[if] identifier[utf8encode] ( identifier[item] . identifier[name] )== identifier[cf_name] ] keyword[if] identifier[len] ( identifier[existing_project_fields] ): keyword[if] identifier[value_names] keyword[is] keyword[None] : keyword[return] identifier[bundle] = identifier[connection] . identifier[getBundle] ( identifier[cf_type] , identifier[existing_project_fields] [ literal[int] ]. identifier[bundle] ) identifier[values_to_add] = identifier[calculate_missing_value_names] ( identifier[bundle] , identifier[value_names] ) keyword[else] : keyword[if] identifier[value_names] keyword[is] keyword[None] : identifier[connection] . identifier[createProjectCustomFieldDetailed] ( identifier[project_id] , identifier[cf_name] , literal[string] + identifier[cf_name] ) keyword[return] identifier[bundle] = identifier[create_bundle_safe] ( identifier[connection] , identifier[cf_name] + literal[string] + identifier[project_id] , identifier[cf_type] ) identifier[values_to_add] = identifier[calculate_missing_value_names] ( identifier[bundle] , identifier[value_names] ) identifier[connection] . identifier[createProjectCustomFieldDetailed] ( identifier[project_id] , identifier[cf_name] , literal[string] + identifier[cf_name] , identifier[params] ={ literal[string] : identifier[bundle] . identifier[name] }) keyword[for] identifier[name] keyword[in] identifier[values_to_add] : identifier[connection] . identifier[addValueToBundle] ( identifier[bundle] , identifier[bundle] . identifier[createElement] ( identifier[name] ))
def process_custom_field(connection, project_id, cf_type, cf_name, value_names=None): """ Creates custom field and attaches it to the project. If custom field already exists and has type cf_type it is attached to the project. If it has another type, LogicException is raised. If project field already exists, uses it and bundle from it. If not, creates project field and bundle with name <cf_name>_bundle_<project_id> for it. Adds value_names to bundle. Args: connection: An opened Connection instance. project_id: Id of the project to attach CF to. cf_type: Type of cf to be created. cf_name: Name of cf that should be created (if not exists) and attached to the project (if not yet attached) value_names: Values, that cf must have. If None, does not create any bundle for the field. If empty list, creates bundle, but does not create any value_names in it. If bundle already contains some value_names, only value_names that do not already exist are added. Raises: LogicException: If custom field already exists, but has wrong type. YouTrackException: If something is wrong with queries. """ _create_custom_field_prototype(connection, cf_type, cf_name) if cf_type[0:-3] not in connection.bundle_types: value_names = None # depends on [control=['if'], data=[]] elif value_names is None: value_names = [] # depends on [control=['if'], data=['value_names']] existing_project_fields = [item for item in connection.getProjectCustomFields(project_id) if utf8encode(item.name) == cf_name] if len(existing_project_fields): if value_names is None: return # depends on [control=['if'], data=[]] bundle = connection.getBundle(cf_type, existing_project_fields[0].bundle) values_to_add = calculate_missing_value_names(bundle, value_names) # depends on [control=['if'], data=[]] else: if value_names is None: connection.createProjectCustomFieldDetailed(project_id, cf_name, 'No ' + cf_name) return # depends on [control=['if'], data=[]] bundle = create_bundle_safe(connection, cf_name + '_bundle_' + project_id, cf_type) values_to_add = calculate_missing_value_names(bundle, value_names) connection.createProjectCustomFieldDetailed(project_id, cf_name, 'No ' + cf_name, params={'bundle': bundle.name}) for name in values_to_add: connection.addValueToBundle(bundle, bundle.createElement(name)) # depends on [control=['for'], data=['name']]
def connect_command(self): ''' Generates a JSON string with the params to be used when sending CONNECT to the server. ->> CONNECT {"verbose": false, "pedantic": false, "lang": "python2" } ''' options = { "verbose": self.options["verbose"], "pedantic": self.options["pedantic"], "lang": __lang__, "version": __version__, "protocol": PROTOCOL } if "auth_required" in self._server_info: if self._server_info["auth_required"] == True: # In case there is no password, then consider handle # sending a token instead. if self.options["user"] is not None and self.options["password"] is not None: options["user"] = self.options["user"] options["pass"] = self.options["password"] elif self.options["token"] is not None: options["auth_token"] = self.options["token"] elif self._current_server.uri.password is None: options["auth_token"] = self._current_server.uri.username else: options["user"] = self._current_server.uri.username options["pass"] = self._current_server.uri.password if self.options["name"] is not None: options["name"] = self.options["name"] if self.options["no_echo"] is not None: options["echo"] = not self.options["no_echo"] args = json.dumps(options, sort_keys=True) return CONNECT_PROTO.format(CONNECT_OP, args, _CRLF_)
def function[connect_command, parameter[self]]: constant[ Generates a JSON string with the params to be used when sending CONNECT to the server. ->> CONNECT {"verbose": false, "pedantic": false, "lang": "python2" } ] variable[options] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c6470>, <ast.Constant object at 0x7da20c6c43d0>, <ast.Constant object at 0x7da20c6c41f0>, <ast.Constant object at 0x7da20c6c7430>, <ast.Constant object at 0x7da20c6c5bd0>], [<ast.Subscript object at 0x7da20c6c70a0>, <ast.Subscript object at 0x7da20c6c66b0>, <ast.Name object at 0x7da20c6c7a60>, <ast.Name object at 0x7da20c6c6a70>, <ast.Name object at 0x7da20c6c6c80>]] if compare[constant[auth_required] in name[self]._server_info] begin[:] if compare[call[name[self]._server_info][constant[auth_required]] equal[==] constant[True]] begin[:] if <ast.BoolOp object at 0x7da20c6c7310> begin[:] call[name[options]][constant[user]] assign[=] call[name[self].options][constant[user]] call[name[options]][constant[pass]] assign[=] call[name[self].options][constant[password]] if compare[call[name[self].options][constant[name]] is_not constant[None]] begin[:] call[name[options]][constant[name]] assign[=] call[name[self].options][constant[name]] if compare[call[name[self].options][constant[no_echo]] is_not constant[None]] begin[:] call[name[options]][constant[echo]] assign[=] <ast.UnaryOp object at 0x7da1b040f700> variable[args] assign[=] call[name[json].dumps, parameter[name[options]]] return[call[name[CONNECT_PROTO].format, parameter[name[CONNECT_OP], name[args], name[_CRLF_]]]]
keyword[def] identifier[connect_command] ( identifier[self] ): literal[string] identifier[options] ={ literal[string] : identifier[self] . identifier[options] [ literal[string] ], literal[string] : identifier[self] . identifier[options] [ literal[string] ], literal[string] : identifier[__lang__] , literal[string] : identifier[__version__] , literal[string] : identifier[PROTOCOL] } keyword[if] literal[string] keyword[in] identifier[self] . identifier[_server_info] : keyword[if] identifier[self] . identifier[_server_info] [ literal[string] ]== keyword[True] : keyword[if] identifier[self] . identifier[options] [ literal[string] ] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[options] [ literal[string] ] keyword[is] keyword[not] keyword[None] : identifier[options] [ literal[string] ]= identifier[self] . identifier[options] [ literal[string] ] identifier[options] [ literal[string] ]= identifier[self] . identifier[options] [ literal[string] ] keyword[elif] identifier[self] . identifier[options] [ literal[string] ] keyword[is] keyword[not] keyword[None] : identifier[options] [ literal[string] ]= identifier[self] . identifier[options] [ literal[string] ] keyword[elif] identifier[self] . identifier[_current_server] . identifier[uri] . identifier[password] keyword[is] keyword[None] : identifier[options] [ literal[string] ]= identifier[self] . identifier[_current_server] . identifier[uri] . identifier[username] keyword[else] : identifier[options] [ literal[string] ]= identifier[self] . identifier[_current_server] . identifier[uri] . identifier[username] identifier[options] [ literal[string] ]= identifier[self] . identifier[_current_server] . identifier[uri] . identifier[password] keyword[if] identifier[self] . identifier[options] [ literal[string] ] keyword[is] keyword[not] keyword[None] : identifier[options] [ literal[string] ]= identifier[self] . identifier[options] [ literal[string] ] keyword[if] identifier[self] . identifier[options] [ literal[string] ] keyword[is] keyword[not] keyword[None] : identifier[options] [ literal[string] ]= keyword[not] identifier[self] . identifier[options] [ literal[string] ] identifier[args] = identifier[json] . identifier[dumps] ( identifier[options] , identifier[sort_keys] = keyword[True] ) keyword[return] identifier[CONNECT_PROTO] . identifier[format] ( identifier[CONNECT_OP] , identifier[args] , identifier[_CRLF_] )
def connect_command(self): """ Generates a JSON string with the params to be used when sending CONNECT to the server. ->> CONNECT {"verbose": false, "pedantic": false, "lang": "python2" } """ options = {'verbose': self.options['verbose'], 'pedantic': self.options['pedantic'], 'lang': __lang__, 'version': __version__, 'protocol': PROTOCOL} if 'auth_required' in self._server_info: if self._server_info['auth_required'] == True: # In case there is no password, then consider handle # sending a token instead. if self.options['user'] is not None and self.options['password'] is not None: options['user'] = self.options['user'] options['pass'] = self.options['password'] # depends on [control=['if'], data=[]] elif self.options['token'] is not None: options['auth_token'] = self.options['token'] # depends on [control=['if'], data=[]] elif self._current_server.uri.password is None: options['auth_token'] = self._current_server.uri.username # depends on [control=['if'], data=[]] else: options['user'] = self._current_server.uri.username options['pass'] = self._current_server.uri.password # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if self.options['name'] is not None: options['name'] = self.options['name'] # depends on [control=['if'], data=[]] if self.options['no_echo'] is not None: options['echo'] = not self.options['no_echo'] # depends on [control=['if'], data=[]] args = json.dumps(options, sort_keys=True) return CONNECT_PROTO.format(CONNECT_OP, args, _CRLF_)
def format_fore( number: FormatArg, light: Optional[bool] = False, extended: Optional[bool] = False) -> str: """ Return an escape code for a fore color, by number. This is a convenience method for handling the different code types all in one shot. It also handles some validation. """ return _format_code( number, backcolor=False, light=light, extended=extended )
def function[format_fore, parameter[number, light, extended]]: constant[ Return an escape code for a fore color, by number. This is a convenience method for handling the different code types all in one shot. It also handles some validation. ] return[call[name[_format_code], parameter[name[number]]]]
keyword[def] identifier[format_fore] ( identifier[number] : identifier[FormatArg] , identifier[light] : identifier[Optional] [ identifier[bool] ]= keyword[False] , identifier[extended] : identifier[Optional] [ identifier[bool] ]= keyword[False] )-> identifier[str] : literal[string] keyword[return] identifier[_format_code] ( identifier[number] , identifier[backcolor] = keyword[False] , identifier[light] = identifier[light] , identifier[extended] = identifier[extended] )
def format_fore(number: FormatArg, light: Optional[bool]=False, extended: Optional[bool]=False) -> str: """ Return an escape code for a fore color, by number. This is a convenience method for handling the different code types all in one shot. It also handles some validation. """ return _format_code(number, backcolor=False, light=light, extended=extended)
def get_lambdas(awsclient, config, add_arn=False): """Get the list of lambda functions. :param config: :param add_arn: :return: list containing lambda entries """ if 'lambda' in config: client_lambda = awsclient.get_client('lambda') lambda_entries = config['lambda'].get('entries', []) lmbdas = [] for lambda_entry in lambda_entries: lmbda = { 'name': lambda_entry.get('name', None), 'alias': lambda_entry.get('alias', None), 'swagger_ref': lambda_entry.get('swaggerRef', None) } if add_arn: _sleep() response_lambda = client_lambda.get_function( FunctionName=lmbda['name']) lmbda['arn'] = response_lambda['Configuration']['FunctionArn'] lmbdas.append(lmbda) return lmbdas else: return []
def function[get_lambdas, parameter[awsclient, config, add_arn]]: constant[Get the list of lambda functions. :param config: :param add_arn: :return: list containing lambda entries ] if compare[constant[lambda] in name[config]] begin[:] variable[client_lambda] assign[=] call[name[awsclient].get_client, parameter[constant[lambda]]] variable[lambda_entries] assign[=] call[call[name[config]][constant[lambda]].get, parameter[constant[entries], list[[]]]] variable[lmbdas] assign[=] list[[]] for taget[name[lambda_entry]] in starred[name[lambda_entries]] begin[:] variable[lmbda] assign[=] dictionary[[<ast.Constant object at 0x7da18fe90400>, <ast.Constant object at 0x7da18fe92680>, <ast.Constant object at 0x7da18fe903a0>], [<ast.Call object at 0x7da18fe905b0>, <ast.Call object at 0x7da18fe91db0>, <ast.Call object at 0x7da18fe90e50>]] if name[add_arn] begin[:] call[name[_sleep], parameter[]] variable[response_lambda] assign[=] call[name[client_lambda].get_function, parameter[]] call[name[lmbda]][constant[arn]] assign[=] call[call[name[response_lambda]][constant[Configuration]]][constant[FunctionArn]] call[name[lmbdas].append, parameter[name[lmbda]]] return[name[lmbdas]]
keyword[def] identifier[get_lambdas] ( identifier[awsclient] , identifier[config] , identifier[add_arn] = keyword[False] ): literal[string] keyword[if] literal[string] keyword[in] identifier[config] : identifier[client_lambda] = identifier[awsclient] . identifier[get_client] ( literal[string] ) identifier[lambda_entries] = identifier[config] [ literal[string] ]. identifier[get] ( literal[string] ,[]) identifier[lmbdas] =[] keyword[for] identifier[lambda_entry] keyword[in] identifier[lambda_entries] : identifier[lmbda] ={ literal[string] : identifier[lambda_entry] . identifier[get] ( literal[string] , keyword[None] ), literal[string] : identifier[lambda_entry] . identifier[get] ( literal[string] , keyword[None] ), literal[string] : identifier[lambda_entry] . identifier[get] ( literal[string] , keyword[None] ) } keyword[if] identifier[add_arn] : identifier[_sleep] () identifier[response_lambda] = identifier[client_lambda] . identifier[get_function] ( identifier[FunctionName] = identifier[lmbda] [ literal[string] ]) identifier[lmbda] [ literal[string] ]= identifier[response_lambda] [ literal[string] ][ literal[string] ] identifier[lmbdas] . identifier[append] ( identifier[lmbda] ) keyword[return] identifier[lmbdas] keyword[else] : keyword[return] []
def get_lambdas(awsclient, config, add_arn=False): """Get the list of lambda functions. :param config: :param add_arn: :return: list containing lambda entries """ if 'lambda' in config: client_lambda = awsclient.get_client('lambda') lambda_entries = config['lambda'].get('entries', []) lmbdas = [] for lambda_entry in lambda_entries: lmbda = {'name': lambda_entry.get('name', None), 'alias': lambda_entry.get('alias', None), 'swagger_ref': lambda_entry.get('swaggerRef', None)} if add_arn: _sleep() response_lambda = client_lambda.get_function(FunctionName=lmbda['name']) lmbda['arn'] = response_lambda['Configuration']['FunctionArn'] # depends on [control=['if'], data=[]] lmbdas.append(lmbda) # depends on [control=['for'], data=['lambda_entry']] return lmbdas # depends on [control=['if'], data=['config']] else: return []
def variable_declaration(self): """ variable_declaration: 'let' assignment ';' """ self._process(Nature.LET) node = VariableDeclaration(assignment=self.assignment()) self._process(Nature.SEMI) return node
def function[variable_declaration, parameter[self]]: constant[ variable_declaration: 'let' assignment ';' ] call[name[self]._process, parameter[name[Nature].LET]] variable[node] assign[=] call[name[VariableDeclaration], parameter[]] call[name[self]._process, parameter[name[Nature].SEMI]] return[name[node]]
keyword[def] identifier[variable_declaration] ( identifier[self] ): literal[string] identifier[self] . identifier[_process] ( identifier[Nature] . identifier[LET] ) identifier[node] = identifier[VariableDeclaration] ( identifier[assignment] = identifier[self] . identifier[assignment] ()) identifier[self] . identifier[_process] ( identifier[Nature] . identifier[SEMI] ) keyword[return] identifier[node]
def variable_declaration(self): """ variable_declaration: 'let' assignment ';' """ self._process(Nature.LET) node = VariableDeclaration(assignment=self.assignment()) self._process(Nature.SEMI) return node
def order_percent(id_or_ins, percent, price=None, style=None): """ 发送一个花费价值等于目前投资组合(市场价值和目前现金的总和)一定百分比现金的买/卖单,正数代表买,负数代表卖。股票的股数总是会被调整成对应的一手的股票数的倍数(1手是100股)。百分比是一个小数,并且小于或等于1(<=100%),0.5表示的是50%.需要注意,如果资金不足,该API将不会创建发送订单。 需要注意: 发送买单时,percent 代表的是期望买入股票消耗的金额(包含税费)占投资组合总权益的比例。 发送卖单时,percent 代表的是期望卖出的股票总价值占投资组合总权益的比例。 :param id_or_ins: 下单标的物 :type id_or_ins: :class:`~Instrument` object | `str` :param float percent: 占有现有的投资组合价值的百分比。正数表示买入,负数表示卖出。 :param float price: 下单价格,默认为None,表示 :class:`~MarketOrder`, 此参数主要用于简化 `style` 参数。 :param style: 下单类型, 默认是市价单。目前支持的订单类型有 :class:`~LimitOrder` 和 :class:`~MarketOrder` :type style: `OrderStyle` object :return: :class:`~Order` object | None :example: .. code-block:: python #花费等于现有投资组合50%价值的现金买入平安银行股票: order_percent('000001.XSHG', 0.5) """ if percent < -1 or percent > 1: raise RQInvalidArgument(_(u"percent should between -1 and 1")) style = cal_style(price, style) account = Environment.get_instance().portfolio.accounts[DEFAULT_ACCOUNT_TYPE.STOCK.name] return order_value(id_or_ins, account.total_value * percent, style=style)
def function[order_percent, parameter[id_or_ins, percent, price, style]]: constant[ 发送一个花费价值等于目前投资组合(市场价值和目前现金的总和)一定百分比现金的买/卖单,正数代表买,负数代表卖。股票的股数总是会被调整成对应的一手的股票数的倍数(1手是100股)。百分比是一个小数,并且小于或等于1(<=100%),0.5表示的是50%.需要注意,如果资金不足,该API将不会创建发送订单。 需要注意: 发送买单时,percent 代表的是期望买入股票消耗的金额(包含税费)占投资组合总权益的比例。 发送卖单时,percent 代表的是期望卖出的股票总价值占投资组合总权益的比例。 :param id_or_ins: 下单标的物 :type id_or_ins: :class:`~Instrument` object | `str` :param float percent: 占有现有的投资组合价值的百分比。正数表示买入,负数表示卖出。 :param float price: 下单价格,默认为None,表示 :class:`~MarketOrder`, 此参数主要用于简化 `style` 参数。 :param style: 下单类型, 默认是市价单。目前支持的订单类型有 :class:`~LimitOrder` 和 :class:`~MarketOrder` :type style: `OrderStyle` object :return: :class:`~Order` object | None :example: .. code-block:: python #花费等于现有投资组合50%价值的现金买入平安银行股票: order_percent('000001.XSHG', 0.5) ] if <ast.BoolOp object at 0x7da1b211d930> begin[:] <ast.Raise object at 0x7da1b211d360> variable[style] assign[=] call[name[cal_style], parameter[name[price], name[style]]] variable[account] assign[=] call[call[name[Environment].get_instance, parameter[]].portfolio.accounts][name[DEFAULT_ACCOUNT_TYPE].STOCK.name] return[call[name[order_value], parameter[name[id_or_ins], binary_operation[name[account].total_value * name[percent]]]]]
keyword[def] identifier[order_percent] ( identifier[id_or_ins] , identifier[percent] , identifier[price] = keyword[None] , identifier[style] = keyword[None] ): literal[string] keyword[if] identifier[percent] <- literal[int] keyword[or] identifier[percent] > literal[int] : keyword[raise] identifier[RQInvalidArgument] ( identifier[_] ( literal[string] )) identifier[style] = identifier[cal_style] ( identifier[price] , identifier[style] ) identifier[account] = identifier[Environment] . identifier[get_instance] (). identifier[portfolio] . identifier[accounts] [ identifier[DEFAULT_ACCOUNT_TYPE] . identifier[STOCK] . identifier[name] ] keyword[return] identifier[order_value] ( identifier[id_or_ins] , identifier[account] . identifier[total_value] * identifier[percent] , identifier[style] = identifier[style] )
def order_percent(id_or_ins, percent, price=None, style=None): """ 发送一个花费价值等于目前投资组合(市场价值和目前现金的总和)一定百分比现金的买/卖单,正数代表买,负数代表卖。股票的股数总是会被调整成对应的一手的股票数的倍数(1手是100股)。百分比是一个小数,并且小于或等于1(<=100%),0.5表示的是50%.需要注意,如果资金不足,该API将不会创建发送订单。 需要注意: 发送买单时,percent 代表的是期望买入股票消耗的金额(包含税费)占投资组合总权益的比例。 发送卖单时,percent 代表的是期望卖出的股票总价值占投资组合总权益的比例。 :param id_or_ins: 下单标的物 :type id_or_ins: :class:`~Instrument` object | `str` :param float percent: 占有现有的投资组合价值的百分比。正数表示买入,负数表示卖出。 :param float price: 下单价格,默认为None,表示 :class:`~MarketOrder`, 此参数主要用于简化 `style` 参数。 :param style: 下单类型, 默认是市价单。目前支持的订单类型有 :class:`~LimitOrder` 和 :class:`~MarketOrder` :type style: `OrderStyle` object :return: :class:`~Order` object | None :example: .. code-block:: python #花费等于现有投资组合50%价值的现金买入平安银行股票: order_percent('000001.XSHG', 0.5) """ if percent < -1 or percent > 1: raise RQInvalidArgument(_(u'percent should between -1 and 1')) # depends on [control=['if'], data=[]] style = cal_style(price, style) account = Environment.get_instance().portfolio.accounts[DEFAULT_ACCOUNT_TYPE.STOCK.name] return order_value(id_or_ins, account.total_value * percent, style=style)
def _count_counters(self, counter): """Return all elements count from Counter """ if getattr(self, 'as_set', False): return len(set(counter)) else: return sum(counter.values())
def function[_count_counters, parameter[self, counter]]: constant[Return all elements count from Counter ] if call[name[getattr], parameter[name[self], constant[as_set], constant[False]]] begin[:] return[call[name[len], parameter[call[name[set], parameter[name[counter]]]]]]
keyword[def] identifier[_count_counters] ( identifier[self] , identifier[counter] ): literal[string] keyword[if] identifier[getattr] ( identifier[self] , literal[string] , keyword[False] ): keyword[return] identifier[len] ( identifier[set] ( identifier[counter] )) keyword[else] : keyword[return] identifier[sum] ( identifier[counter] . identifier[values] ())
def _count_counters(self, counter): """Return all elements count from Counter """ if getattr(self, 'as_set', False): return len(set(counter)) # depends on [control=['if'], data=[]] else: return sum(counter.values())
def _upload_assets_to_OSF(dlgr_id, osf_id, provider="osfstorage"): """Upload experimental assets to the OSF.""" root = "https://files.osf.io/v1" snapshot_filename = "{}-code.zip".format(dlgr_id) snapshot_path = os.path.join("snapshots", snapshot_filename) r = requests.put( "{}/resources/{}/providers/{}/".format(root, osf_id, provider), params={"kind": "file", "name": snapshot_filename}, headers={ "Authorization": "Bearer {}".format(config.get("osf_access_token")), "Content-Type": "text/plain", }, data=open(snapshot_path, "rb"), ) r.raise_for_status()
def function[_upload_assets_to_OSF, parameter[dlgr_id, osf_id, provider]]: constant[Upload experimental assets to the OSF.] variable[root] assign[=] constant[https://files.osf.io/v1] variable[snapshot_filename] assign[=] call[constant[{}-code.zip].format, parameter[name[dlgr_id]]] variable[snapshot_path] assign[=] call[name[os].path.join, parameter[constant[snapshots], name[snapshot_filename]]] variable[r] assign[=] call[name[requests].put, parameter[call[constant[{}/resources/{}/providers/{}/].format, parameter[name[root], name[osf_id], name[provider]]]]] call[name[r].raise_for_status, parameter[]]
keyword[def] identifier[_upload_assets_to_OSF] ( identifier[dlgr_id] , identifier[osf_id] , identifier[provider] = literal[string] ): literal[string] identifier[root] = literal[string] identifier[snapshot_filename] = literal[string] . identifier[format] ( identifier[dlgr_id] ) identifier[snapshot_path] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , identifier[snapshot_filename] ) identifier[r] = identifier[requests] . identifier[put] ( literal[string] . identifier[format] ( identifier[root] , identifier[osf_id] , identifier[provider] ), identifier[params] ={ literal[string] : literal[string] , literal[string] : identifier[snapshot_filename] }, identifier[headers] ={ literal[string] : literal[string] . identifier[format] ( identifier[config] . identifier[get] ( literal[string] )), literal[string] : literal[string] , }, identifier[data] = identifier[open] ( identifier[snapshot_path] , literal[string] ), ) identifier[r] . identifier[raise_for_status] ()
def _upload_assets_to_OSF(dlgr_id, osf_id, provider='osfstorage'): """Upload experimental assets to the OSF.""" root = 'https://files.osf.io/v1' snapshot_filename = '{}-code.zip'.format(dlgr_id) snapshot_path = os.path.join('snapshots', snapshot_filename) r = requests.put('{}/resources/{}/providers/{}/'.format(root, osf_id, provider), params={'kind': 'file', 'name': snapshot_filename}, headers={'Authorization': 'Bearer {}'.format(config.get('osf_access_token')), 'Content-Type': 'text/plain'}, data=open(snapshot_path, 'rb')) r.raise_for_status()
def get_collection(self, path, query, **options): """Get a collection from a collection endpoint. Parses GET request options for a collection endpoint and dispatches a request. """ options = self._merge_options(options) if options['iterator_type'] == 'items': return CollectionPageIterator(self, path, query, options).items() if options['iterator_type'] is None: return self.get(path, query, **options) raise Exception('Unknown value for "iterator_type" option: {}'.format( str(options['iterator_type'])))
def function[get_collection, parameter[self, path, query]]: constant[Get a collection from a collection endpoint. Parses GET request options for a collection endpoint and dispatches a request. ] variable[options] assign[=] call[name[self]._merge_options, parameter[name[options]]] if compare[call[name[options]][constant[iterator_type]] equal[==] constant[items]] begin[:] return[call[call[name[CollectionPageIterator], parameter[name[self], name[path], name[query], name[options]]].items, parameter[]]] if compare[call[name[options]][constant[iterator_type]] is constant[None]] begin[:] return[call[name[self].get, parameter[name[path], name[query]]]] <ast.Raise object at 0x7da20c76c190>
keyword[def] identifier[get_collection] ( identifier[self] , identifier[path] , identifier[query] ,** identifier[options] ): literal[string] identifier[options] = identifier[self] . identifier[_merge_options] ( identifier[options] ) keyword[if] identifier[options] [ literal[string] ]== literal[string] : keyword[return] identifier[CollectionPageIterator] ( identifier[self] , identifier[path] , identifier[query] , identifier[options] ). identifier[items] () keyword[if] identifier[options] [ literal[string] ] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[get] ( identifier[path] , identifier[query] ,** identifier[options] ) keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[str] ( identifier[options] [ literal[string] ])))
def get_collection(self, path, query, **options): """Get a collection from a collection endpoint. Parses GET request options for a collection endpoint and dispatches a request. """ options = self._merge_options(options) if options['iterator_type'] == 'items': return CollectionPageIterator(self, path, query, options).items() # depends on [control=['if'], data=[]] if options['iterator_type'] is None: return self.get(path, query, **options) # depends on [control=['if'], data=[]] raise Exception('Unknown value for "iterator_type" option: {}'.format(str(options['iterator_type'])))
def replace_table_rate_rule_by_id(cls, table_rate_rule_id, table_rate_rule, **kwargs): """Replace TableRateRule Replace all attributes of TableRateRule This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.replace_table_rate_rule_by_id(table_rate_rule_id, table_rate_rule, async=True) >>> result = thread.get() :param async bool :param str table_rate_rule_id: ID of tableRateRule to replace (required) :param TableRateRule table_rate_rule: Attributes of tableRateRule to replace (required) :return: TableRateRule If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._replace_table_rate_rule_by_id_with_http_info(table_rate_rule_id, table_rate_rule, **kwargs) else: (data) = cls._replace_table_rate_rule_by_id_with_http_info(table_rate_rule_id, table_rate_rule, **kwargs) return data
def function[replace_table_rate_rule_by_id, parameter[cls, table_rate_rule_id, table_rate_rule]]: constant[Replace TableRateRule Replace all attributes of TableRateRule This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.replace_table_rate_rule_by_id(table_rate_rule_id, table_rate_rule, async=True) >>> result = thread.get() :param async bool :param str table_rate_rule_id: ID of tableRateRule to replace (required) :param TableRateRule table_rate_rule: Attributes of tableRateRule to replace (required) :return: TableRateRule If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async]]] begin[:] return[call[name[cls]._replace_table_rate_rule_by_id_with_http_info, parameter[name[table_rate_rule_id], name[table_rate_rule]]]]
keyword[def] identifier[replace_table_rate_rule_by_id] ( identifier[cls] , identifier[table_rate_rule_id] , identifier[table_rate_rule] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[cls] . identifier[_replace_table_rate_rule_by_id_with_http_info] ( identifier[table_rate_rule_id] , identifier[table_rate_rule] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[cls] . identifier[_replace_table_rate_rule_by_id_with_http_info] ( identifier[table_rate_rule_id] , identifier[table_rate_rule] ,** identifier[kwargs] ) keyword[return] identifier[data]
def replace_table_rate_rule_by_id(cls, table_rate_rule_id, table_rate_rule, **kwargs): """Replace TableRateRule Replace all attributes of TableRateRule This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.replace_table_rate_rule_by_id(table_rate_rule_id, table_rate_rule, async=True) >>> result = thread.get() :param async bool :param str table_rate_rule_id: ID of tableRateRule to replace (required) :param TableRateRule table_rate_rule: Attributes of tableRateRule to replace (required) :return: TableRateRule If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._replace_table_rate_rule_by_id_with_http_info(table_rate_rule_id, table_rate_rule, **kwargs) # depends on [control=['if'], data=[]] else: data = cls._replace_table_rate_rule_by_id_with_http_info(table_rate_rule_id, table_rate_rule, **kwargs) return data
def set_sources(self, sources): """ Creates GeocodeServiceConfigs from each str source """ if len(sources) == 0: raise Exception('Must declare at least one source for a geocoder') self._sources = [] for source in sources: # iterate through a list of sources self.add_source(source)
def function[set_sources, parameter[self, sources]]: constant[ Creates GeocodeServiceConfigs from each str source ] if compare[call[name[len], parameter[name[sources]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da18dc07bb0> name[self]._sources assign[=] list[[]] for taget[name[source]] in starred[name[sources]] begin[:] call[name[self].add_source, parameter[name[source]]]
keyword[def] identifier[set_sources] ( identifier[self] , identifier[sources] ): literal[string] keyword[if] identifier[len] ( identifier[sources] )== literal[int] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[self] . identifier[_sources] =[] keyword[for] identifier[source] keyword[in] identifier[sources] : identifier[self] . identifier[add_source] ( identifier[source] )
def set_sources(self, sources): """ Creates GeocodeServiceConfigs from each str source """ if len(sources) == 0: raise Exception('Must declare at least one source for a geocoder') # depends on [control=['if'], data=[]] self._sources = [] for source in sources: # iterate through a list of sources self.add_source(source) # depends on [control=['for'], data=['source']]
def file_search(self, query, offset=None, timeout=None): """ Search for samples. In addition to retrieving all information on a particular file, VirusTotal allows you to perform what we call "advanced reverse searches". Reverse searches take you from a file property to a list of files that match that property. For example, this functionality enables you to retrieve all those files marked by at least one antivirus vendor as Zbot, or all those files that have a size under 90KB and are detected by at least 10 antivirus solutions, or all those PDF files that have an invalid XREF section, etc. This API is equivalent to VirusTotal Intelligence advanced searches. A very wide variety of search modifiers are available, including: file size, file type, first submission date to VirusTotal, last submission date to VirusTotal, number of positives, dynamic behavioural properties, binary content, submission file name, and a very long etcetera. The full list of search modifiers allowed for file search queries is documented at: https://www.virustotal.com/intelligence/help/file-search/#search-modifiers NOTE: Daily limited! No matter what API step you have licensed, this API call is limited to 50K requests per day. If you need any more, chances are you are approaching your engineering problem erroneously and you can probably solve it using the file distribution call. Do not hesitate to contact us with your particular use case. EXAMPLE: search_options = 'type:peexe size:90kb+ positives:5+ behaviour:"taskkill"' :param query: A search modifier compliant file search query. :param offset: (optional) The offset value returned by a previously issued identical query, allows you to paginate over the results. If not specified the first 300 matching files sorted according to last submission date to VirusTotal in a descending fashion will be returned. :param timeout: The amount of time in seconds the request should wait before timing out. :return: JSON response - By default the list returned contains at most 300 hashes, ordered according to last submission date to VirusTotal in a descending fashion. """ params = dict(apikey=self.api_key, query=query, offset=offset) try: response = requests.get(self.base + 'file/search', params=params, proxies=self.proxies, timeout=timeout) except requests.RequestException as e: return dict(error=str(e)) return _return_response_and_status_code(response)
def function[file_search, parameter[self, query, offset, timeout]]: constant[ Search for samples. In addition to retrieving all information on a particular file, VirusTotal allows you to perform what we call "advanced reverse searches". Reverse searches take you from a file property to a list of files that match that property. For example, this functionality enables you to retrieve all those files marked by at least one antivirus vendor as Zbot, or all those files that have a size under 90KB and are detected by at least 10 antivirus solutions, or all those PDF files that have an invalid XREF section, etc. This API is equivalent to VirusTotal Intelligence advanced searches. A very wide variety of search modifiers are available, including: file size, file type, first submission date to VirusTotal, last submission date to VirusTotal, number of positives, dynamic behavioural properties, binary content, submission file name, and a very long etcetera. The full list of search modifiers allowed for file search queries is documented at: https://www.virustotal.com/intelligence/help/file-search/#search-modifiers NOTE: Daily limited! No matter what API step you have licensed, this API call is limited to 50K requests per day. If you need any more, chances are you are approaching your engineering problem erroneously and you can probably solve it using the file distribution call. Do not hesitate to contact us with your particular use case. EXAMPLE: search_options = 'type:peexe size:90kb+ positives:5+ behaviour:"taskkill"' :param query: A search modifier compliant file search query. :param offset: (optional) The offset value returned by a previously issued identical query, allows you to paginate over the results. If not specified the first 300 matching files sorted according to last submission date to VirusTotal in a descending fashion will be returned. :param timeout: The amount of time in seconds the request should wait before timing out. :return: JSON response - By default the list returned contains at most 300 hashes, ordered according to last submission date to VirusTotal in a descending fashion. ] variable[params] assign[=] call[name[dict], parameter[]] <ast.Try object at 0x7da1b0ffbac0> return[call[name[_return_response_and_status_code], parameter[name[response]]]]
keyword[def] identifier[file_search] ( identifier[self] , identifier[query] , identifier[offset] = keyword[None] , identifier[timeout] = keyword[None] ): literal[string] identifier[params] = identifier[dict] ( identifier[apikey] = identifier[self] . identifier[api_key] , identifier[query] = identifier[query] , identifier[offset] = identifier[offset] ) keyword[try] : identifier[response] = identifier[requests] . identifier[get] ( identifier[self] . identifier[base] + literal[string] , identifier[params] = identifier[params] , identifier[proxies] = identifier[self] . identifier[proxies] , identifier[timeout] = identifier[timeout] ) keyword[except] identifier[requests] . identifier[RequestException] keyword[as] identifier[e] : keyword[return] identifier[dict] ( identifier[error] = identifier[str] ( identifier[e] )) keyword[return] identifier[_return_response_and_status_code] ( identifier[response] )
def file_search(self, query, offset=None, timeout=None): """ Search for samples. In addition to retrieving all information on a particular file, VirusTotal allows you to perform what we call "advanced reverse searches". Reverse searches take you from a file property to a list of files that match that property. For example, this functionality enables you to retrieve all those files marked by at least one antivirus vendor as Zbot, or all those files that have a size under 90KB and are detected by at least 10 antivirus solutions, or all those PDF files that have an invalid XREF section, etc. This API is equivalent to VirusTotal Intelligence advanced searches. A very wide variety of search modifiers are available, including: file size, file type, first submission date to VirusTotal, last submission date to VirusTotal, number of positives, dynamic behavioural properties, binary content, submission file name, and a very long etcetera. The full list of search modifiers allowed for file search queries is documented at: https://www.virustotal.com/intelligence/help/file-search/#search-modifiers NOTE: Daily limited! No matter what API step you have licensed, this API call is limited to 50K requests per day. If you need any more, chances are you are approaching your engineering problem erroneously and you can probably solve it using the file distribution call. Do not hesitate to contact us with your particular use case. EXAMPLE: search_options = 'type:peexe size:90kb+ positives:5+ behaviour:"taskkill"' :param query: A search modifier compliant file search query. :param offset: (optional) The offset value returned by a previously issued identical query, allows you to paginate over the results. If not specified the first 300 matching files sorted according to last submission date to VirusTotal in a descending fashion will be returned. :param timeout: The amount of time in seconds the request should wait before timing out. :return: JSON response - By default the list returned contains at most 300 hashes, ordered according to last submission date to VirusTotal in a descending fashion. """ params = dict(apikey=self.api_key, query=query, offset=offset) try: response = requests.get(self.base + 'file/search', params=params, proxies=self.proxies, timeout=timeout) # depends on [control=['try'], data=[]] except requests.RequestException as e: return dict(error=str(e)) # depends on [control=['except'], data=['e']] return _return_response_and_status_code(response)
def execute(self, request, *args, **path_args): # type: (BaseHttpRequest, tuple, Dict[Any]) -> Any """ Execute the callback (binding callback if required) """ binding = self.binding if binding: # Provide binding as decorators are executed prior to binding return self.callback(binding, request, *args, **path_args) else: return self.callback(request, *args, **path_args)
def function[execute, parameter[self, request]]: constant[ Execute the callback (binding callback if required) ] variable[binding] assign[=] name[self].binding if name[binding] begin[:] return[call[name[self].callback, parameter[name[binding], name[request], <ast.Starred object at 0x7da2044c1720>]]]
keyword[def] identifier[execute] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[path_args] ): literal[string] identifier[binding] = identifier[self] . identifier[binding] keyword[if] identifier[binding] : keyword[return] identifier[self] . identifier[callback] ( identifier[binding] , identifier[request] ,* identifier[args] ,** identifier[path_args] ) keyword[else] : keyword[return] identifier[self] . identifier[callback] ( identifier[request] ,* identifier[args] ,** identifier[path_args] )
def execute(self, request, *args, **path_args): # type: (BaseHttpRequest, tuple, Dict[Any]) -> Any '\n Execute the callback (binding callback if required)\n ' binding = self.binding if binding: # Provide binding as decorators are executed prior to binding return self.callback(binding, request, *args, **path_args) # depends on [control=['if'], data=[]] else: return self.callback(request, *args, **path_args)
def reverse_path(dict_, root, child_to_parents): """ CommandLine: python -m utool.util_graph --exec-reverse_path --show Example: >>> # ENABLE_DOCTEST >>> from utool.util_graph import * # NOQA >>> import utool as ut >>> child_to_parents = { >>> 'chip': ['dummy_annot'], >>> 'chipmask': ['dummy_annot'], >>> 'descriptor': ['keypoint'], >>> 'fgweight': ['keypoint', 'probchip'], >>> 'keypoint': ['chip'], >>> 'notch': ['dummy_annot'], >>> 'probchip': ['dummy_annot'], >>> 'spam': ['fgweight', 'chip', 'keypoint'] >>> } >>> to_root = { >>> 'fgweight': { >>> 'keypoint': { >>> 'chip': { >>> 'dummy_annot': None, >>> }, >>> }, >>> 'probchip': { >>> 'dummy_annot': None, >>> }, >>> }, >>> } >>> reversed_ = reverse_path(to_root, 'dummy_annot', child_to_parents) >>> result = ut.repr3(reversed_) >>> print(result) { 'dummy_annot': { 'chip': { 'keypoint': { 'fgweight': None, }, }, 'probchip': { 'fgweight': None, }, }, } """ # Hacky but illustrative # TODO; implement non-hacky version allkeys = get_allkeys(dict_) mat = np.zeros((len(allkeys), len(allkeys))) for key in allkeys: if key != root: for parent in child_to_parents[key]: rx = allkeys.index(parent) cx = allkeys.index(key) mat[rx][cx] = 1 end = None seen_ = set([]) reversed_ = {root: traverse_path(root, end, seen_, allkeys, mat)} return reversed_
def function[reverse_path, parameter[dict_, root, child_to_parents]]: constant[ CommandLine: python -m utool.util_graph --exec-reverse_path --show Example: >>> # ENABLE_DOCTEST >>> from utool.util_graph import * # NOQA >>> import utool as ut >>> child_to_parents = { >>> 'chip': ['dummy_annot'], >>> 'chipmask': ['dummy_annot'], >>> 'descriptor': ['keypoint'], >>> 'fgweight': ['keypoint', 'probchip'], >>> 'keypoint': ['chip'], >>> 'notch': ['dummy_annot'], >>> 'probchip': ['dummy_annot'], >>> 'spam': ['fgweight', 'chip', 'keypoint'] >>> } >>> to_root = { >>> 'fgweight': { >>> 'keypoint': { >>> 'chip': { >>> 'dummy_annot': None, >>> }, >>> }, >>> 'probchip': { >>> 'dummy_annot': None, >>> }, >>> }, >>> } >>> reversed_ = reverse_path(to_root, 'dummy_annot', child_to_parents) >>> result = ut.repr3(reversed_) >>> print(result) { 'dummy_annot': { 'chip': { 'keypoint': { 'fgweight': None, }, }, 'probchip': { 'fgweight': None, }, }, } ] variable[allkeys] assign[=] call[name[get_allkeys], parameter[name[dict_]]] variable[mat] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Call object at 0x7da1b23693f0>, <ast.Call object at 0x7da1b2368e20>]]]] for taget[name[key]] in starred[name[allkeys]] begin[:] if compare[name[key] not_equal[!=] name[root]] begin[:] for taget[name[parent]] in starred[call[name[child_to_parents]][name[key]]] begin[:] variable[rx] assign[=] call[name[allkeys].index, parameter[name[parent]]] variable[cx] assign[=] call[name[allkeys].index, parameter[name[key]]] call[call[name[mat]][name[rx]]][name[cx]] assign[=] constant[1] variable[end] assign[=] constant[None] variable[seen_] assign[=] call[name[set], parameter[list[[]]]] variable[reversed_] assign[=] dictionary[[<ast.Name object at 0x7da1b236af80>], [<ast.Call object at 0x7da1b236ab90>]] return[name[reversed_]]
keyword[def] identifier[reverse_path] ( identifier[dict_] , identifier[root] , identifier[child_to_parents] ): literal[string] identifier[allkeys] = identifier[get_allkeys] ( identifier[dict_] ) identifier[mat] = identifier[np] . identifier[zeros] (( identifier[len] ( identifier[allkeys] ), identifier[len] ( identifier[allkeys] ))) keyword[for] identifier[key] keyword[in] identifier[allkeys] : keyword[if] identifier[key] != identifier[root] : keyword[for] identifier[parent] keyword[in] identifier[child_to_parents] [ identifier[key] ]: identifier[rx] = identifier[allkeys] . identifier[index] ( identifier[parent] ) identifier[cx] = identifier[allkeys] . identifier[index] ( identifier[key] ) identifier[mat] [ identifier[rx] ][ identifier[cx] ]= literal[int] identifier[end] = keyword[None] identifier[seen_] = identifier[set] ([]) identifier[reversed_] ={ identifier[root] : identifier[traverse_path] ( identifier[root] , identifier[end] , identifier[seen_] , identifier[allkeys] , identifier[mat] )} keyword[return] identifier[reversed_]
def reverse_path(dict_, root, child_to_parents): """ CommandLine: python -m utool.util_graph --exec-reverse_path --show Example: >>> # ENABLE_DOCTEST >>> from utool.util_graph import * # NOQA >>> import utool as ut >>> child_to_parents = { >>> 'chip': ['dummy_annot'], >>> 'chipmask': ['dummy_annot'], >>> 'descriptor': ['keypoint'], >>> 'fgweight': ['keypoint', 'probchip'], >>> 'keypoint': ['chip'], >>> 'notch': ['dummy_annot'], >>> 'probchip': ['dummy_annot'], >>> 'spam': ['fgweight', 'chip', 'keypoint'] >>> } >>> to_root = { >>> 'fgweight': { >>> 'keypoint': { >>> 'chip': { >>> 'dummy_annot': None, >>> }, >>> }, >>> 'probchip': { >>> 'dummy_annot': None, >>> }, >>> }, >>> } >>> reversed_ = reverse_path(to_root, 'dummy_annot', child_to_parents) >>> result = ut.repr3(reversed_) >>> print(result) { 'dummy_annot': { 'chip': { 'keypoint': { 'fgweight': None, }, }, 'probchip': { 'fgweight': None, }, }, } """ # Hacky but illustrative # TODO; implement non-hacky version allkeys = get_allkeys(dict_) mat = np.zeros((len(allkeys), len(allkeys))) for key in allkeys: if key != root: for parent in child_to_parents[key]: rx = allkeys.index(parent) cx = allkeys.index(key) mat[rx][cx] = 1 # depends on [control=['for'], data=['parent']] # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']] end = None seen_ = set([]) reversed_ = {root: traverse_path(root, end, seen_, allkeys, mat)} return reversed_
def get_instance(self, payload): """ Build an instance of AssistantFallbackActionsInstance :param dict payload: Payload response from the API :returns: twilio.rest.preview.understand.assistant.assistant_fallback_actions.AssistantFallbackActionsInstance :rtype: twilio.rest.preview.understand.assistant.assistant_fallback_actions.AssistantFallbackActionsInstance """ return AssistantFallbackActionsInstance( self._version, payload, assistant_sid=self._solution['assistant_sid'], )
def function[get_instance, parameter[self, payload]]: constant[ Build an instance of AssistantFallbackActionsInstance :param dict payload: Payload response from the API :returns: twilio.rest.preview.understand.assistant.assistant_fallback_actions.AssistantFallbackActionsInstance :rtype: twilio.rest.preview.understand.assistant.assistant_fallback_actions.AssistantFallbackActionsInstance ] return[call[name[AssistantFallbackActionsInstance], parameter[name[self]._version, name[payload]]]]
keyword[def] identifier[get_instance] ( identifier[self] , identifier[payload] ): literal[string] keyword[return] identifier[AssistantFallbackActionsInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[assistant_sid] = identifier[self] . identifier[_solution] [ literal[string] ], )
def get_instance(self, payload): """ Build an instance of AssistantFallbackActionsInstance :param dict payload: Payload response from the API :returns: twilio.rest.preview.understand.assistant.assistant_fallback_actions.AssistantFallbackActionsInstance :rtype: twilio.rest.preview.understand.assistant.assistant_fallback_actions.AssistantFallbackActionsInstance """ return AssistantFallbackActionsInstance(self._version, payload, assistant_sid=self._solution['assistant_sid'])
def interruptWrite(self, endpoint, data, timeout=0): """ Synchronous interrupt write. endpoint: endpoint to send data to. data: data to send. timeout: in milliseconds, how long to wait for device acknowledgement. Set to 0 to disable. To avoid memory copies, use an object implementing the writeable buffer interface (ex: bytearray) for the "data" parameter. Returns the number of bytes actually sent. May raise an exception from the USBError family. USBErrorTimeout exception has a "transferred" property giving the number of bytes sent up to the timeout. """ # pylint: disable=undefined-variable endpoint = (endpoint & ~ENDPOINT_DIR_MASK) | ENDPOINT_OUT # pylint: enable=undefined-variable data, _ = create_initialised_buffer(data) return self._interruptTransfer(endpoint, data, sizeof(data), timeout)
def function[interruptWrite, parameter[self, endpoint, data, timeout]]: constant[ Synchronous interrupt write. endpoint: endpoint to send data to. data: data to send. timeout: in milliseconds, how long to wait for device acknowledgement. Set to 0 to disable. To avoid memory copies, use an object implementing the writeable buffer interface (ex: bytearray) for the "data" parameter. Returns the number of bytes actually sent. May raise an exception from the USBError family. USBErrorTimeout exception has a "transferred" property giving the number of bytes sent up to the timeout. ] variable[endpoint] assign[=] binary_operation[binary_operation[name[endpoint] <ast.BitAnd object at 0x7da2590d6b60> <ast.UnaryOp object at 0x7da1b0880850>] <ast.BitOr object at 0x7da2590d6aa0> name[ENDPOINT_OUT]] <ast.Tuple object at 0x7da1b0883280> assign[=] call[name[create_initialised_buffer], parameter[name[data]]] return[call[name[self]._interruptTransfer, parameter[name[endpoint], name[data], call[name[sizeof], parameter[name[data]]], name[timeout]]]]
keyword[def] identifier[interruptWrite] ( identifier[self] , identifier[endpoint] , identifier[data] , identifier[timeout] = literal[int] ): literal[string] identifier[endpoint] =( identifier[endpoint] &~ identifier[ENDPOINT_DIR_MASK] )| identifier[ENDPOINT_OUT] identifier[data] , identifier[_] = identifier[create_initialised_buffer] ( identifier[data] ) keyword[return] identifier[self] . identifier[_interruptTransfer] ( identifier[endpoint] , identifier[data] , identifier[sizeof] ( identifier[data] ), identifier[timeout] )
def interruptWrite(self, endpoint, data, timeout=0): """ Synchronous interrupt write. endpoint: endpoint to send data to. data: data to send. timeout: in milliseconds, how long to wait for device acknowledgement. Set to 0 to disable. To avoid memory copies, use an object implementing the writeable buffer interface (ex: bytearray) for the "data" parameter. Returns the number of bytes actually sent. May raise an exception from the USBError family. USBErrorTimeout exception has a "transferred" property giving the number of bytes sent up to the timeout. """ # pylint: disable=undefined-variable endpoint = endpoint & ~ENDPOINT_DIR_MASK | ENDPOINT_OUT # pylint: enable=undefined-variable (data, _) = create_initialised_buffer(data) return self._interruptTransfer(endpoint, data, sizeof(data), timeout)
def tangent(x, null=(-np.inf, np.inf), rtol=default_rtol, atol=default_atol): ''' tangent(x) is equivalent to tan(x) except that it also works on sparse arrays. The optional argument null (default, (-numpy.inf, numpy.inf)) may be specified to indicate what value(s) should be assigned when x == -pi/2 or -pi/2. If only one number is given, then it is used for both values; otherwise the first value corresponds to -pi/2 and the second to pi/2. A value of x is considered to be equal to one of these valids based on numpy.isclose. The optional arguments rtol and atol are passed along to isclose. If null is None, then no replacement is performed. ''' if sps.issparse(x): x = x.copy() x.data = tangent(x.data, null=null, rtol=rtol, atol=atol) return x else: x = np.asarray(x) if rtol is None: rtol = default_rtol if atol is None: atol = default_atol try: (nln,nlp) = null except Exception: (nln,nlp) = (null,null) x = np.mod(x + pi, tau) - pi ii = None if nln is None else np.where(np.isclose(x, neghpi, rtol=rtol, atol=atol)) jj = None if nlp is None else np.where(np.isclose(x, hpi, rtol=rtol, atol=atol)) x = np.tan(x) if ii: x[ii] = nln if jj: x[jj] = nlp return x
def function[tangent, parameter[x, null, rtol, atol]]: constant[ tangent(x) is equivalent to tan(x) except that it also works on sparse arrays. The optional argument null (default, (-numpy.inf, numpy.inf)) may be specified to indicate what value(s) should be assigned when x == -pi/2 or -pi/2. If only one number is given, then it is used for both values; otherwise the first value corresponds to -pi/2 and the second to pi/2. A value of x is considered to be equal to one of these valids based on numpy.isclose. The optional arguments rtol and atol are passed along to isclose. If null is None, then no replacement is performed. ] if call[name[sps].issparse, parameter[name[x]]] begin[:] variable[x] assign[=] call[name[x].copy, parameter[]] name[x].data assign[=] call[name[tangent], parameter[name[x].data]] return[name[x]] if compare[name[rtol] is constant[None]] begin[:] variable[rtol] assign[=] name[default_rtol] if compare[name[atol] is constant[None]] begin[:] variable[atol] assign[=] name[default_atol] <ast.Try object at 0x7da18f09d6c0> variable[x] assign[=] binary_operation[call[name[np].mod, parameter[binary_operation[name[x] + name[pi]], name[tau]]] - name[pi]] variable[ii] assign[=] <ast.IfExp object at 0x7da1b0b478b0> variable[jj] assign[=] <ast.IfExp object at 0x7da1b0b45d20> variable[x] assign[=] call[name[np].tan, parameter[name[x]]] if name[ii] begin[:] call[name[x]][name[ii]] assign[=] name[nln] if name[jj] begin[:] call[name[x]][name[jj]] assign[=] name[nlp] return[name[x]]
keyword[def] identifier[tangent] ( identifier[x] , identifier[null] =(- identifier[np] . identifier[inf] , identifier[np] . identifier[inf] ), identifier[rtol] = identifier[default_rtol] , identifier[atol] = identifier[default_atol] ): literal[string] keyword[if] identifier[sps] . identifier[issparse] ( identifier[x] ): identifier[x] = identifier[x] . identifier[copy] () identifier[x] . identifier[data] = identifier[tangent] ( identifier[x] . identifier[data] , identifier[null] = identifier[null] , identifier[rtol] = identifier[rtol] , identifier[atol] = identifier[atol] ) keyword[return] identifier[x] keyword[else] : identifier[x] = identifier[np] . identifier[asarray] ( identifier[x] ) keyword[if] identifier[rtol] keyword[is] keyword[None] : identifier[rtol] = identifier[default_rtol] keyword[if] identifier[atol] keyword[is] keyword[None] : identifier[atol] = identifier[default_atol] keyword[try] :( identifier[nln] , identifier[nlp] )= identifier[null] keyword[except] identifier[Exception] :( identifier[nln] , identifier[nlp] )=( identifier[null] , identifier[null] ) identifier[x] = identifier[np] . identifier[mod] ( identifier[x] + identifier[pi] , identifier[tau] )- identifier[pi] identifier[ii] = keyword[None] keyword[if] identifier[nln] keyword[is] keyword[None] keyword[else] identifier[np] . identifier[where] ( identifier[np] . identifier[isclose] ( identifier[x] , identifier[neghpi] , identifier[rtol] = identifier[rtol] , identifier[atol] = identifier[atol] )) identifier[jj] = keyword[None] keyword[if] identifier[nlp] keyword[is] keyword[None] keyword[else] identifier[np] . identifier[where] ( identifier[np] . identifier[isclose] ( identifier[x] , identifier[hpi] , identifier[rtol] = identifier[rtol] , identifier[atol] = identifier[atol] )) identifier[x] = identifier[np] . identifier[tan] ( identifier[x] ) keyword[if] identifier[ii] : identifier[x] [ identifier[ii] ]= identifier[nln] keyword[if] identifier[jj] : identifier[x] [ identifier[jj] ]= identifier[nlp] keyword[return] identifier[x]
def tangent(x, null=(-np.inf, np.inf), rtol=default_rtol, atol=default_atol): """ tangent(x) is equivalent to tan(x) except that it also works on sparse arrays. The optional argument null (default, (-numpy.inf, numpy.inf)) may be specified to indicate what value(s) should be assigned when x == -pi/2 or -pi/2. If only one number is given, then it is used for both values; otherwise the first value corresponds to -pi/2 and the second to pi/2. A value of x is considered to be equal to one of these valids based on numpy.isclose. The optional arguments rtol and atol are passed along to isclose. If null is None, then no replacement is performed. """ if sps.issparse(x): x = x.copy() x.data = tangent(x.data, null=null, rtol=rtol, atol=atol) return x # depends on [control=['if'], data=[]] else: x = np.asarray(x) if rtol is None: rtol = default_rtol # depends on [control=['if'], data=['rtol']] if atol is None: atol = default_atol # depends on [control=['if'], data=['atol']] try: (nln, nlp) = null # depends on [control=['try'], data=[]] except Exception: (nln, nlp) = (null, null) # depends on [control=['except'], data=[]] x = np.mod(x + pi, tau) - pi ii = None if nln is None else np.where(np.isclose(x, neghpi, rtol=rtol, atol=atol)) jj = None if nlp is None else np.where(np.isclose(x, hpi, rtol=rtol, atol=atol)) x = np.tan(x) if ii: x[ii] = nln # depends on [control=['if'], data=[]] if jj: x[jj] = nlp # depends on [control=['if'], data=[]] return x
def clear_file(self): """stub""" if (self.get_file_metadata().is_read_only() or self.get_file_metadata().is_required()): raise NoAccess() if 'assetId' in self.my_osid_object_form._my_map['fileId']: rm = self.my_osid_object_form._get_provider_manager('REPOSITORY') catalog_id_str = '' if 'assignedBankIds' in self.my_osid_object_form._my_map: catalog_id_str = self.my_osid_object_form._my_map['assignedBankIds'][0] elif 'assignedRepositoryIds' in self.my_osid_object_form._my_map: catalog_id_str = self.my_osid_object_form._my_map['assignedRepositoryIds'][0] try: try: aas = rm.get_asset_admin_session_for_repository( Id(catalog_id_str), self.my_osid_object_form._proxy) except NullArgument: aas = rm.get_asset_admin_session_for_repository( Id(catalog_id_str)) except AttributeError: # for update forms try: aas = rm.get_asset_admin_session_for_repository( Id(catalog_id_str), self.my_osid_object_form._proxy) except NullArgument: aas = rm.get_asset_admin_session_for_repository( Id(catalog_id_str)) aas.delete_asset(Id(self.my_osid_object_form._my_map['fileId']['assetId'])) self.my_osid_object_form._my_map['fileId'] = \ dict(self.get_file_metadata().get_default_object_values()[0])
def function[clear_file, parameter[self]]: constant[stub] if <ast.BoolOp object at 0x7da20c6e77f0> begin[:] <ast.Raise object at 0x7da20c6e4670> if compare[constant[assetId] in call[name[self].my_osid_object_form._my_map][constant[fileId]]] begin[:] variable[rm] assign[=] call[name[self].my_osid_object_form._get_provider_manager, parameter[constant[REPOSITORY]]] variable[catalog_id_str] assign[=] constant[] if compare[constant[assignedBankIds] in name[self].my_osid_object_form._my_map] begin[:] variable[catalog_id_str] assign[=] call[call[name[self].my_osid_object_form._my_map][constant[assignedBankIds]]][constant[0]] <ast.Try object at 0x7da20c6e4b50> call[name[aas].delete_asset, parameter[call[name[Id], parameter[call[call[name[self].my_osid_object_form._my_map][constant[fileId]]][constant[assetId]]]]]] call[name[self].my_osid_object_form._my_map][constant[fileId]] assign[=] call[name[dict], parameter[call[call[call[name[self].get_file_metadata, parameter[]].get_default_object_values, parameter[]]][constant[0]]]]
keyword[def] identifier[clear_file] ( identifier[self] ): literal[string] keyword[if] ( identifier[self] . identifier[get_file_metadata] (). identifier[is_read_only] () keyword[or] identifier[self] . identifier[get_file_metadata] (). identifier[is_required] ()): keyword[raise] identifier[NoAccess] () keyword[if] literal[string] keyword[in] identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ]: identifier[rm] = identifier[self] . identifier[my_osid_object_form] . identifier[_get_provider_manager] ( literal[string] ) identifier[catalog_id_str] = literal[string] keyword[if] literal[string] keyword[in] identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] : identifier[catalog_id_str] = identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ][ literal[int] ] keyword[elif] literal[string] keyword[in] identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] : identifier[catalog_id_str] = identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ][ literal[int] ] keyword[try] : keyword[try] : identifier[aas] = identifier[rm] . identifier[get_asset_admin_session_for_repository] ( identifier[Id] ( identifier[catalog_id_str] ), identifier[self] . identifier[my_osid_object_form] . identifier[_proxy] ) keyword[except] identifier[NullArgument] : identifier[aas] = identifier[rm] . identifier[get_asset_admin_session_for_repository] ( identifier[Id] ( identifier[catalog_id_str] )) keyword[except] identifier[AttributeError] : keyword[try] : identifier[aas] = identifier[rm] . identifier[get_asset_admin_session_for_repository] ( identifier[Id] ( identifier[catalog_id_str] ), identifier[self] . identifier[my_osid_object_form] . identifier[_proxy] ) keyword[except] identifier[NullArgument] : identifier[aas] = identifier[rm] . identifier[get_asset_admin_session_for_repository] ( identifier[Id] ( identifier[catalog_id_str] )) identifier[aas] . identifier[delete_asset] ( identifier[Id] ( identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ][ literal[string] ])) identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ]= identifier[dict] ( identifier[self] . identifier[get_file_metadata] (). identifier[get_default_object_values] ()[ literal[int] ])
def clear_file(self): """stub""" if self.get_file_metadata().is_read_only() or self.get_file_metadata().is_required(): raise NoAccess() # depends on [control=['if'], data=[]] if 'assetId' in self.my_osid_object_form._my_map['fileId']: rm = self.my_osid_object_form._get_provider_manager('REPOSITORY') catalog_id_str = '' if 'assignedBankIds' in self.my_osid_object_form._my_map: catalog_id_str = self.my_osid_object_form._my_map['assignedBankIds'][0] # depends on [control=['if'], data=[]] elif 'assignedRepositoryIds' in self.my_osid_object_form._my_map: catalog_id_str = self.my_osid_object_form._my_map['assignedRepositoryIds'][0] # depends on [control=['if'], data=[]] try: try: aas = rm.get_asset_admin_session_for_repository(Id(catalog_id_str), self.my_osid_object_form._proxy) # depends on [control=['try'], data=[]] except NullArgument: aas = rm.get_asset_admin_session_for_repository(Id(catalog_id_str)) # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]] except AttributeError: # for update forms try: aas = rm.get_asset_admin_session_for_repository(Id(catalog_id_str), self.my_osid_object_form._proxy) # depends on [control=['try'], data=[]] except NullArgument: aas = rm.get_asset_admin_session_for_repository(Id(catalog_id_str)) # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] aas.delete_asset(Id(self.my_osid_object_form._my_map['fileId']['assetId'])) # depends on [control=['if'], data=[]] self.my_osid_object_form._my_map['fileId'] = dict(self.get_file_metadata().get_default_object_values()[0])
def _fill_hidden_singles(self): """Look for hidden singles, i.e. cells with only one unique possible value in row, column or box. :return: If any Hidden Single has been found. :rtype: bool """ for i in utils.range_(self.side): box_i = (i // self.order) * self.order for j in utils.range_(self.side): box_j = (j // self.order) * self.order # Skip if this cell is determined already. if self[i][j] > 0: continue # Look for hidden single in rows. p = self._possibles[i][j] for k in utils.range_(self.side): if k == j: continue p = p.difference(self._possibles[i][k]) if len(p) == 1: # Found a hidden single in a row! self.set_cell(i, j, p.pop()) self.solution_steps.append(self._format_step("HIDDEN-ROW", (i, j), self[i][j])) return True # Look for hidden single in columns p = self._possibles[i][j] for k in utils.range_(self.side): if k == i: continue p = p.difference(self._possibles[k][j]) if len(p) == 1: # Found a hidden single in a column! self.set_cell(i, j, p.pop()) self.solution_steps.append(self._format_step("HIDDEN-COL", (i, j), self[i][j])) return True # Look for hidden single in box p = self._possibles[i][j] for k in utils.range_(box_i, box_i + self.order): for kk in utils.range_(box_j, box_j + self.order): if k == i and kk == j: continue p = p.difference(self._possibles[k][kk]) if len(p) == 1: # Found a hidden single in a box! self.set_cell(i, j, p.pop()) self.solution_steps.append(self._format_step("HIDDEN-BOX", (i, j), self[i][j])) return True return False
def function[_fill_hidden_singles, parameter[self]]: constant[Look for hidden singles, i.e. cells with only one unique possible value in row, column or box. :return: If any Hidden Single has been found. :rtype: bool ] for taget[name[i]] in starred[call[name[utils].range_, parameter[name[self].side]]] begin[:] variable[box_i] assign[=] binary_operation[binary_operation[name[i] <ast.FloorDiv object at 0x7da2590d6bc0> name[self].order] * name[self].order] for taget[name[j]] in starred[call[name[utils].range_, parameter[name[self].side]]] begin[:] variable[box_j] assign[=] binary_operation[binary_operation[name[j] <ast.FloorDiv object at 0x7da2590d6bc0> name[self].order] * name[self].order] if compare[call[call[name[self]][name[i]]][name[j]] greater[>] constant[0]] begin[:] continue variable[p] assign[=] call[call[name[self]._possibles][name[i]]][name[j]] for taget[name[k]] in starred[call[name[utils].range_, parameter[name[self].side]]] begin[:] if compare[name[k] equal[==] name[j]] begin[:] continue variable[p] assign[=] call[name[p].difference, parameter[call[call[name[self]._possibles][name[i]]][name[k]]]] if compare[call[name[len], parameter[name[p]]] equal[==] constant[1]] begin[:] call[name[self].set_cell, parameter[name[i], name[j], call[name[p].pop, parameter[]]]] call[name[self].solution_steps.append, parameter[call[name[self]._format_step, parameter[constant[HIDDEN-ROW], tuple[[<ast.Name object at 0x7da2043465f0>, <ast.Name object at 0x7da204345600>]], call[call[name[self]][name[i]]][name[j]]]]]] return[constant[True]] variable[p] assign[=] call[call[name[self]._possibles][name[i]]][name[j]] for taget[name[k]] in starred[call[name[utils].range_, parameter[name[self].side]]] begin[:] if compare[name[k] equal[==] name[i]] begin[:] continue variable[p] assign[=] call[name[p].difference, parameter[call[call[name[self]._possibles][name[k]]][name[j]]]] if compare[call[name[len], parameter[name[p]]] equal[==] constant[1]] begin[:] call[name[self].set_cell, parameter[name[i], name[j], call[name[p].pop, parameter[]]]] call[name[self].solution_steps.append, parameter[call[name[self]._format_step, parameter[constant[HIDDEN-COL], tuple[[<ast.Name object at 0x7da204344e20>, <ast.Name object at 0x7da204345840>]], call[call[name[self]][name[i]]][name[j]]]]]] return[constant[True]] variable[p] assign[=] call[call[name[self]._possibles][name[i]]][name[j]] for taget[name[k]] in starred[call[name[utils].range_, parameter[name[box_i], binary_operation[name[box_i] + name[self].order]]]] begin[:] for taget[name[kk]] in starred[call[name[utils].range_, parameter[name[box_j], binary_operation[name[box_j] + name[self].order]]]] begin[:] if <ast.BoolOp object at 0x7da204347790> begin[:] continue variable[p] assign[=] call[name[p].difference, parameter[call[call[name[self]._possibles][name[k]]][name[kk]]]] if compare[call[name[len], parameter[name[p]]] equal[==] constant[1]] begin[:] call[name[self].set_cell, parameter[name[i], name[j], call[name[p].pop, parameter[]]]] call[name[self].solution_steps.append, parameter[call[name[self]._format_step, parameter[constant[HIDDEN-BOX], tuple[[<ast.Name object at 0x7da204346d70>, <ast.Name object at 0x7da204344f40>]], call[call[name[self]][name[i]]][name[j]]]]]] return[constant[True]] return[constant[False]]
keyword[def] identifier[_fill_hidden_singles] ( identifier[self] ): literal[string] keyword[for] identifier[i] keyword[in] identifier[utils] . identifier[range_] ( identifier[self] . identifier[side] ): identifier[box_i] =( identifier[i] // identifier[self] . identifier[order] )* identifier[self] . identifier[order] keyword[for] identifier[j] keyword[in] identifier[utils] . identifier[range_] ( identifier[self] . identifier[side] ): identifier[box_j] =( identifier[j] // identifier[self] . identifier[order] )* identifier[self] . identifier[order] keyword[if] identifier[self] [ identifier[i] ][ identifier[j] ]> literal[int] : keyword[continue] identifier[p] = identifier[self] . identifier[_possibles] [ identifier[i] ][ identifier[j] ] keyword[for] identifier[k] keyword[in] identifier[utils] . identifier[range_] ( identifier[self] . identifier[side] ): keyword[if] identifier[k] == identifier[j] : keyword[continue] identifier[p] = identifier[p] . identifier[difference] ( identifier[self] . identifier[_possibles] [ identifier[i] ][ identifier[k] ]) keyword[if] identifier[len] ( identifier[p] )== literal[int] : identifier[self] . identifier[set_cell] ( identifier[i] , identifier[j] , identifier[p] . identifier[pop] ()) identifier[self] . identifier[solution_steps] . identifier[append] ( identifier[self] . identifier[_format_step] ( literal[string] ,( identifier[i] , identifier[j] ), identifier[self] [ identifier[i] ][ identifier[j] ])) keyword[return] keyword[True] identifier[p] = identifier[self] . identifier[_possibles] [ identifier[i] ][ identifier[j] ] keyword[for] identifier[k] keyword[in] identifier[utils] . identifier[range_] ( identifier[self] . identifier[side] ): keyword[if] identifier[k] == identifier[i] : keyword[continue] identifier[p] = identifier[p] . identifier[difference] ( identifier[self] . identifier[_possibles] [ identifier[k] ][ identifier[j] ]) keyword[if] identifier[len] ( identifier[p] )== literal[int] : identifier[self] . identifier[set_cell] ( identifier[i] , identifier[j] , identifier[p] . identifier[pop] ()) identifier[self] . identifier[solution_steps] . identifier[append] ( identifier[self] . identifier[_format_step] ( literal[string] ,( identifier[i] , identifier[j] ), identifier[self] [ identifier[i] ][ identifier[j] ])) keyword[return] keyword[True] identifier[p] = identifier[self] . identifier[_possibles] [ identifier[i] ][ identifier[j] ] keyword[for] identifier[k] keyword[in] identifier[utils] . identifier[range_] ( identifier[box_i] , identifier[box_i] + identifier[self] . identifier[order] ): keyword[for] identifier[kk] keyword[in] identifier[utils] . identifier[range_] ( identifier[box_j] , identifier[box_j] + identifier[self] . identifier[order] ): keyword[if] identifier[k] == identifier[i] keyword[and] identifier[kk] == identifier[j] : keyword[continue] identifier[p] = identifier[p] . identifier[difference] ( identifier[self] . identifier[_possibles] [ identifier[k] ][ identifier[kk] ]) keyword[if] identifier[len] ( identifier[p] )== literal[int] : identifier[self] . identifier[set_cell] ( identifier[i] , identifier[j] , identifier[p] . identifier[pop] ()) identifier[self] . identifier[solution_steps] . identifier[append] ( identifier[self] . identifier[_format_step] ( literal[string] ,( identifier[i] , identifier[j] ), identifier[self] [ identifier[i] ][ identifier[j] ])) keyword[return] keyword[True] keyword[return] keyword[False]
def _fill_hidden_singles(self): """Look for hidden singles, i.e. cells with only one unique possible value in row, column or box. :return: If any Hidden Single has been found. :rtype: bool """ for i in utils.range_(self.side): box_i = i // self.order * self.order for j in utils.range_(self.side): box_j = j // self.order * self.order # Skip if this cell is determined already. if self[i][j] > 0: continue # depends on [control=['if'], data=[]] # Look for hidden single in rows. p = self._possibles[i][j] for k in utils.range_(self.side): if k == j: continue # depends on [control=['if'], data=[]] p = p.difference(self._possibles[i][k]) # depends on [control=['for'], data=['k']] if len(p) == 1: # Found a hidden single in a row! self.set_cell(i, j, p.pop()) self.solution_steps.append(self._format_step('HIDDEN-ROW', (i, j), self[i][j])) return True # depends on [control=['if'], data=[]] # Look for hidden single in columns p = self._possibles[i][j] for k in utils.range_(self.side): if k == i: continue # depends on [control=['if'], data=[]] p = p.difference(self._possibles[k][j]) # depends on [control=['for'], data=['k']] if len(p) == 1: # Found a hidden single in a column! self.set_cell(i, j, p.pop()) self.solution_steps.append(self._format_step('HIDDEN-COL', (i, j), self[i][j])) return True # depends on [control=['if'], data=[]] # Look for hidden single in box p = self._possibles[i][j] for k in utils.range_(box_i, box_i + self.order): for kk in utils.range_(box_j, box_j + self.order): if k == i and kk == j: continue # depends on [control=['if'], data=[]] p = p.difference(self._possibles[k][kk]) # depends on [control=['for'], data=['kk']] # depends on [control=['for'], data=['k']] if len(p) == 1: # Found a hidden single in a box! self.set_cell(i, j, p.pop()) self.solution_steps.append(self._format_step('HIDDEN-BOX', (i, j), self[i][j])) return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] return False
def get_task_id(self): """Method to get all department members.""" task_id = self.json_response.get("task_id", None) self.logger.info("%s\t%s" % (self.request_method, self.request_url)) return task_id
def function[get_task_id, parameter[self]]: constant[Method to get all department members.] variable[task_id] assign[=] call[name[self].json_response.get, parameter[constant[task_id], constant[None]]] call[name[self].logger.info, parameter[binary_operation[constant[%s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b23ecc10>, <ast.Attribute object at 0x7da1b23edea0>]]]]] return[name[task_id]]
keyword[def] identifier[get_task_id] ( identifier[self] ): literal[string] identifier[task_id] = identifier[self] . identifier[json_response] . identifier[get] ( literal[string] , keyword[None] ) identifier[self] . identifier[logger] . identifier[info] ( literal[string] %( identifier[self] . identifier[request_method] , identifier[self] . identifier[request_url] )) keyword[return] identifier[task_id]
def get_task_id(self): """Method to get all department members.""" task_id = self.json_response.get('task_id', None) self.logger.info('%s\t%s' % (self.request_method, self.request_url)) return task_id
def mod_hostname(hostname): ''' Modify hostname .. versionchanged:: 2015.8.0 Added support for SunOS (Solaris 10, Illumos, SmartOS) CLI Example: .. code-block:: bash salt '*' network.mod_hostname master.saltstack.com ''' # # SunOS tested on SmartOS and OmniOS (Solaris 10 compatible) # Oracle Solaris 11 uses smf, currently not supported # # /etc/nodename is the hostname only, not fqdn # /etc/defaultdomain is the domain # /etc/hosts should have both fqdn and hostname entries # if hostname is None: return False hostname_cmd = salt.utils.path.which('hostnamectl') or salt.utils.path.which('hostname') if salt.utils.platform.is_sunos(): uname_cmd = '/usr/bin/uname' if salt.utils.platform.is_smartos() else salt.utils.path.which('uname') check_hostname_cmd = salt.utils.path.which('check-hostname') # Grab the old hostname so we know which hostname to change and then # change the hostname using the hostname command if hostname_cmd.endswith('hostnamectl'): result = __salt__['cmd.run_all']('{0} status'.format(hostname_cmd)) if 0 == result['retcode']: out = result['stdout'] for line in out.splitlines(): line = line.split(':') if 'Static hostname' in line[0]: o_hostname = line[1].strip() else: log.debug('%s was unable to get hostname', hostname_cmd) o_hostname = __salt__['network.get_hostname']() elif not salt.utils.platform.is_sunos(): # don't run hostname -f because -f is not supported on all platforms o_hostname = socket.getfqdn() else: # output: Hostname core OK: fully qualified as core.acheron.be o_hostname = __salt__['cmd.run'](check_hostname_cmd).split(' ')[-1] if hostname_cmd.endswith('hostnamectl'): result = __salt__['cmd.run_all']('{0} set-hostname {1}'.format( hostname_cmd, hostname, )) if result['retcode'] != 0: log.debug('%s was unable to set hostname. Error: %s', hostname_cmd, result['stderr']) return False elif not salt.utils.platform.is_sunos(): __salt__['cmd.run']('{0} {1}'.format(hostname_cmd, hostname)) else: __salt__['cmd.run']('{0} -S {1}'.format(uname_cmd, hostname.split('.')[0])) # Modify the /etc/hosts file to replace the old hostname with the # new hostname with salt.utils.files.fopen('/etc/hosts', 'r') as fp_: host_c = [salt.utils.stringutils.to_unicode(_l) for _l in fp_.readlines()] with salt.utils.files.fopen('/etc/hosts', 'w') as fh_: for host in host_c: host = host.split() try: host[host.index(o_hostname)] = hostname if salt.utils.platform.is_sunos(): # also set a copy of the hostname host[host.index(o_hostname.split('.')[0])] = hostname.split('.')[0] except ValueError: pass fh_.write(salt.utils.stringutils.to_str('\t'.join(host) + '\n')) # Modify the /etc/sysconfig/network configuration file to set the # new hostname if __grains__['os_family'] == 'RedHat': with salt.utils.files.fopen('/etc/sysconfig/network', 'r') as fp_: network_c = [salt.utils.stringutils.to_unicode(_l) for _l in fp_.readlines()] with salt.utils.files.fopen('/etc/sysconfig/network', 'w') as fh_: for net in network_c: if net.startswith('HOSTNAME'): old_hostname = net.split('=', 1)[1].rstrip() quote_type = salt.utils.stringutils.is_quoted(old_hostname) fh_.write(salt.utils.stringutils.to_str( 'HOSTNAME={1}{0}{1}\n'.format( salt.utils.stringutils.dequote(hostname), quote_type))) else: fh_.write(salt.utils.stringutils.to_str(net)) elif __grains__['os_family'] in ('Debian', 'NILinuxRT'): with salt.utils.files.fopen('/etc/hostname', 'w') as fh_: fh_.write(salt.utils.stringutils.to_str(hostname + '\n')) if __grains__['lsb_distrib_id'] == 'nilrt': str_hostname = salt.utils.stringutils.to_str(hostname) nirtcfg_cmd = '/usr/local/natinst/bin/nirtcfg' nirtcfg_cmd += ' --set section=SystemSettings,token=\'Host_Name\',value=\'{0}\''.format(str_hostname) if __salt__['cmd.run_all'](nirtcfg_cmd)['retcode'] != 0: raise CommandExecutionError('Couldn\'t set hostname to: {0}\n'.format(str_hostname)) elif __grains__['os_family'] == 'OpenBSD': with salt.utils.files.fopen('/etc/myname', 'w') as fh_: fh_.write(salt.utils.stringutils.to_str(hostname + '\n')) # Update /etc/nodename and /etc/defaultdomain on SunOS if salt.utils.platform.is_sunos(): with salt.utils.files.fopen('/etc/nodename', 'w') as fh_: fh_.write(salt.utils.stringutils.to_str( hostname.split('.')[0] + '\n') ) with salt.utils.files.fopen('/etc/defaultdomain', 'w') as fh_: fh_.write(salt.utils.stringutils.to_str( ".".join(hostname.split('.')[1:]) + '\n') ) return True
def function[mod_hostname, parameter[hostname]]: constant[ Modify hostname .. versionchanged:: 2015.8.0 Added support for SunOS (Solaris 10, Illumos, SmartOS) CLI Example: .. code-block:: bash salt '*' network.mod_hostname master.saltstack.com ] if compare[name[hostname] is constant[None]] begin[:] return[constant[False]] variable[hostname_cmd] assign[=] <ast.BoolOp object at 0x7da1b21e3b20> if call[name[salt].utils.platform.is_sunos, parameter[]] begin[:] variable[uname_cmd] assign[=] <ast.IfExp object at 0x7da1b21e1b10> variable[check_hostname_cmd] assign[=] call[name[salt].utils.path.which, parameter[constant[check-hostname]]] if call[name[hostname_cmd].endswith, parameter[constant[hostnamectl]]] begin[:] variable[result] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[call[constant[{0} status].format, parameter[name[hostname_cmd]]]]] if compare[constant[0] equal[==] call[name[result]][constant[retcode]]] begin[:] variable[out] assign[=] call[name[result]][constant[stdout]] for taget[name[line]] in starred[call[name[out].splitlines, parameter[]]] begin[:] variable[line] assign[=] call[name[line].split, parameter[constant[:]]] if compare[constant[Static hostname] in call[name[line]][constant[0]]] begin[:] variable[o_hostname] assign[=] call[call[name[line]][constant[1]].strip, parameter[]] if call[name[hostname_cmd].endswith, parameter[constant[hostnamectl]]] begin[:] variable[result] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[call[constant[{0} set-hostname {1}].format, parameter[name[hostname_cmd], name[hostname]]]]] if compare[call[name[result]][constant[retcode]] not_equal[!=] constant[0]] begin[:] call[name[log].debug, parameter[constant[%s was unable to set hostname. Error: %s], name[hostname_cmd], call[name[result]][constant[stderr]]]] return[constant[False]] with call[name[salt].utils.files.fopen, parameter[constant[/etc/hosts], constant[r]]] begin[:] variable[host_c] assign[=] <ast.ListComp object at 0x7da1b21e3490> with call[name[salt].utils.files.fopen, parameter[constant[/etc/hosts], constant[w]]] begin[:] for taget[name[host]] in starred[name[host_c]] begin[:] variable[host] assign[=] call[name[host].split, parameter[]] <ast.Try object at 0x7da1b1f27fd0> call[name[fh_].write, parameter[call[name[salt].utils.stringutils.to_str, parameter[binary_operation[call[constant[ ].join, parameter[name[host]]] + constant[ ]]]]]] if compare[call[name[__grains__]][constant[os_family]] equal[==] constant[RedHat]] begin[:] with call[name[salt].utils.files.fopen, parameter[constant[/etc/sysconfig/network], constant[r]]] begin[:] variable[network_c] assign[=] <ast.ListComp object at 0x7da1b1f27100> with call[name[salt].utils.files.fopen, parameter[constant[/etc/sysconfig/network], constant[w]]] begin[:] for taget[name[net]] in starred[name[network_c]] begin[:] if call[name[net].startswith, parameter[constant[HOSTNAME]]] begin[:] variable[old_hostname] assign[=] call[call[call[name[net].split, parameter[constant[=], constant[1]]]][constant[1]].rstrip, parameter[]] variable[quote_type] assign[=] call[name[salt].utils.stringutils.is_quoted, parameter[name[old_hostname]]] call[name[fh_].write, parameter[call[name[salt].utils.stringutils.to_str, parameter[call[constant[HOSTNAME={1}{0}{1} ].format, parameter[call[name[salt].utils.stringutils.dequote, parameter[name[hostname]]], name[quote_type]]]]]]] if call[name[salt].utils.platform.is_sunos, parameter[]] begin[:] with call[name[salt].utils.files.fopen, parameter[constant[/etc/nodename], constant[w]]] begin[:] call[name[fh_].write, parameter[call[name[salt].utils.stringutils.to_str, parameter[binary_operation[call[call[name[hostname].split, parameter[constant[.]]]][constant[0]] + constant[ ]]]]]] with call[name[salt].utils.files.fopen, parameter[constant[/etc/defaultdomain], constant[w]]] begin[:] call[name[fh_].write, parameter[call[name[salt].utils.stringutils.to_str, parameter[binary_operation[call[constant[.].join, parameter[call[call[name[hostname].split, parameter[constant[.]]]][<ast.Slice object at 0x7da18f58f0d0>]]] + constant[ ]]]]]] return[constant[True]]
keyword[def] identifier[mod_hostname] ( identifier[hostname] ): literal[string] keyword[if] identifier[hostname] keyword[is] keyword[None] : keyword[return] keyword[False] identifier[hostname_cmd] = identifier[salt] . identifier[utils] . identifier[path] . identifier[which] ( literal[string] ) keyword[or] identifier[salt] . identifier[utils] . identifier[path] . identifier[which] ( literal[string] ) keyword[if] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_sunos] (): identifier[uname_cmd] = literal[string] keyword[if] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_smartos] () keyword[else] identifier[salt] . identifier[utils] . identifier[path] . identifier[which] ( literal[string] ) identifier[check_hostname_cmd] = identifier[salt] . identifier[utils] . identifier[path] . identifier[which] ( literal[string] ) keyword[if] identifier[hostname_cmd] . identifier[endswith] ( literal[string] ): identifier[result] = identifier[__salt__] [ literal[string] ]( literal[string] . identifier[format] ( identifier[hostname_cmd] )) keyword[if] literal[int] == identifier[result] [ literal[string] ]: identifier[out] = identifier[result] [ literal[string] ] keyword[for] identifier[line] keyword[in] identifier[out] . identifier[splitlines] (): identifier[line] = identifier[line] . identifier[split] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[line] [ literal[int] ]: identifier[o_hostname] = identifier[line] [ literal[int] ]. identifier[strip] () keyword[else] : identifier[log] . identifier[debug] ( literal[string] , identifier[hostname_cmd] ) identifier[o_hostname] = identifier[__salt__] [ literal[string] ]() keyword[elif] keyword[not] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_sunos] (): identifier[o_hostname] = identifier[socket] . identifier[getfqdn] () keyword[else] : identifier[o_hostname] = identifier[__salt__] [ literal[string] ]( identifier[check_hostname_cmd] ). identifier[split] ( literal[string] )[- literal[int] ] keyword[if] identifier[hostname_cmd] . identifier[endswith] ( literal[string] ): identifier[result] = identifier[__salt__] [ literal[string] ]( literal[string] . identifier[format] ( identifier[hostname_cmd] , identifier[hostname] , )) keyword[if] identifier[result] [ literal[string] ]!= literal[int] : identifier[log] . identifier[debug] ( literal[string] , identifier[hostname_cmd] , identifier[result] [ literal[string] ]) keyword[return] keyword[False] keyword[elif] keyword[not] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_sunos] (): identifier[__salt__] [ literal[string] ]( literal[string] . identifier[format] ( identifier[hostname_cmd] , identifier[hostname] )) keyword[else] : identifier[__salt__] [ literal[string] ]( literal[string] . identifier[format] ( identifier[uname_cmd] , identifier[hostname] . identifier[split] ( literal[string] )[ literal[int] ])) keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( literal[string] , literal[string] ) keyword[as] identifier[fp_] : identifier[host_c] =[ identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] ( identifier[_l] ) keyword[for] identifier[_l] keyword[in] identifier[fp_] . identifier[readlines] ()] keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( literal[string] , literal[string] ) keyword[as] identifier[fh_] : keyword[for] identifier[host] keyword[in] identifier[host_c] : identifier[host] = identifier[host] . identifier[split] () keyword[try] : identifier[host] [ identifier[host] . identifier[index] ( identifier[o_hostname] )]= identifier[hostname] keyword[if] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_sunos] (): identifier[host] [ identifier[host] . identifier[index] ( identifier[o_hostname] . identifier[split] ( literal[string] )[ literal[int] ])]= identifier[hostname] . identifier[split] ( literal[string] )[ literal[int] ] keyword[except] identifier[ValueError] : keyword[pass] identifier[fh_] . identifier[write] ( identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str] ( literal[string] . identifier[join] ( identifier[host] )+ literal[string] )) keyword[if] identifier[__grains__] [ literal[string] ]== literal[string] : keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( literal[string] , literal[string] ) keyword[as] identifier[fp_] : identifier[network_c] =[ identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] ( identifier[_l] ) keyword[for] identifier[_l] keyword[in] identifier[fp_] . identifier[readlines] ()] keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( literal[string] , literal[string] ) keyword[as] identifier[fh_] : keyword[for] identifier[net] keyword[in] identifier[network_c] : keyword[if] identifier[net] . identifier[startswith] ( literal[string] ): identifier[old_hostname] = identifier[net] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]. identifier[rstrip] () identifier[quote_type] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[is_quoted] ( identifier[old_hostname] ) identifier[fh_] . identifier[write] ( identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str] ( literal[string] . identifier[format] ( identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[dequote] ( identifier[hostname] ), identifier[quote_type] ))) keyword[else] : identifier[fh_] . identifier[write] ( identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str] ( identifier[net] )) keyword[elif] identifier[__grains__] [ literal[string] ] keyword[in] ( literal[string] , literal[string] ): keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( literal[string] , literal[string] ) keyword[as] identifier[fh_] : identifier[fh_] . identifier[write] ( identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str] ( identifier[hostname] + literal[string] )) keyword[if] identifier[__grains__] [ literal[string] ]== literal[string] : identifier[str_hostname] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str] ( identifier[hostname] ) identifier[nirtcfg_cmd] = literal[string] identifier[nirtcfg_cmd] += literal[string] . identifier[format] ( identifier[str_hostname] ) keyword[if] identifier[__salt__] [ literal[string] ]( identifier[nirtcfg_cmd] )[ literal[string] ]!= literal[int] : keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[str_hostname] )) keyword[elif] identifier[__grains__] [ literal[string] ]== literal[string] : keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( literal[string] , literal[string] ) keyword[as] identifier[fh_] : identifier[fh_] . identifier[write] ( identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str] ( identifier[hostname] + literal[string] )) keyword[if] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_sunos] (): keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( literal[string] , literal[string] ) keyword[as] identifier[fh_] : identifier[fh_] . identifier[write] ( identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str] ( identifier[hostname] . identifier[split] ( literal[string] )[ literal[int] ]+ literal[string] ) ) keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( literal[string] , literal[string] ) keyword[as] identifier[fh_] : identifier[fh_] . identifier[write] ( identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str] ( literal[string] . identifier[join] ( identifier[hostname] . identifier[split] ( literal[string] )[ literal[int] :])+ literal[string] ) ) keyword[return] keyword[True]
def mod_hostname(hostname): """ Modify hostname .. versionchanged:: 2015.8.0 Added support for SunOS (Solaris 10, Illumos, SmartOS) CLI Example: .. code-block:: bash salt '*' network.mod_hostname master.saltstack.com """ # # SunOS tested on SmartOS and OmniOS (Solaris 10 compatible) # Oracle Solaris 11 uses smf, currently not supported # # /etc/nodename is the hostname only, not fqdn # /etc/defaultdomain is the domain # /etc/hosts should have both fqdn and hostname entries # if hostname is None: return False # depends on [control=['if'], data=[]] hostname_cmd = salt.utils.path.which('hostnamectl') or salt.utils.path.which('hostname') if salt.utils.platform.is_sunos(): uname_cmd = '/usr/bin/uname' if salt.utils.platform.is_smartos() else salt.utils.path.which('uname') check_hostname_cmd = salt.utils.path.which('check-hostname') # depends on [control=['if'], data=[]] # Grab the old hostname so we know which hostname to change and then # change the hostname using the hostname command if hostname_cmd.endswith('hostnamectl'): result = __salt__['cmd.run_all']('{0} status'.format(hostname_cmd)) if 0 == result['retcode']: out = result['stdout'] for line in out.splitlines(): line = line.split(':') if 'Static hostname' in line[0]: o_hostname = line[1].strip() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['if'], data=[]] else: log.debug('%s was unable to get hostname', hostname_cmd) o_hostname = __salt__['network.get_hostname']() # depends on [control=['if'], data=[]] elif not salt.utils.platform.is_sunos(): # don't run hostname -f because -f is not supported on all platforms o_hostname = socket.getfqdn() # depends on [control=['if'], data=[]] else: # output: Hostname core OK: fully qualified as core.acheron.be o_hostname = __salt__['cmd.run'](check_hostname_cmd).split(' ')[-1] if hostname_cmd.endswith('hostnamectl'): result = __salt__['cmd.run_all']('{0} set-hostname {1}'.format(hostname_cmd, hostname)) if result['retcode'] != 0: log.debug('%s was unable to set hostname. Error: %s', hostname_cmd, result['stderr']) return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif not salt.utils.platform.is_sunos(): __salt__['cmd.run']('{0} {1}'.format(hostname_cmd, hostname)) # depends on [control=['if'], data=[]] else: __salt__['cmd.run']('{0} -S {1}'.format(uname_cmd, hostname.split('.')[0])) # Modify the /etc/hosts file to replace the old hostname with the # new hostname with salt.utils.files.fopen('/etc/hosts', 'r') as fp_: host_c = [salt.utils.stringutils.to_unicode(_l) for _l in fp_.readlines()] # depends on [control=['with'], data=['fp_']] with salt.utils.files.fopen('/etc/hosts', 'w') as fh_: for host in host_c: host = host.split() try: host[host.index(o_hostname)] = hostname if salt.utils.platform.is_sunos(): # also set a copy of the hostname host[host.index(o_hostname.split('.')[0])] = hostname.split('.')[0] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except ValueError: pass # depends on [control=['except'], data=[]] fh_.write(salt.utils.stringutils.to_str('\t'.join(host) + '\n')) # depends on [control=['for'], data=['host']] # depends on [control=['with'], data=['fh_']] # Modify the /etc/sysconfig/network configuration file to set the # new hostname if __grains__['os_family'] == 'RedHat': with salt.utils.files.fopen('/etc/sysconfig/network', 'r') as fp_: network_c = [salt.utils.stringutils.to_unicode(_l) for _l in fp_.readlines()] # depends on [control=['with'], data=['fp_']] with salt.utils.files.fopen('/etc/sysconfig/network', 'w') as fh_: for net in network_c: if net.startswith('HOSTNAME'): old_hostname = net.split('=', 1)[1].rstrip() quote_type = salt.utils.stringutils.is_quoted(old_hostname) fh_.write(salt.utils.stringutils.to_str('HOSTNAME={1}{0}{1}\n'.format(salt.utils.stringutils.dequote(hostname), quote_type))) # depends on [control=['if'], data=[]] else: fh_.write(salt.utils.stringutils.to_str(net)) # depends on [control=['for'], data=['net']] # depends on [control=['with'], data=['fh_']] # depends on [control=['if'], data=[]] elif __grains__['os_family'] in ('Debian', 'NILinuxRT'): with salt.utils.files.fopen('/etc/hostname', 'w') as fh_: fh_.write(salt.utils.stringutils.to_str(hostname + '\n')) # depends on [control=['with'], data=['fh_']] if __grains__['lsb_distrib_id'] == 'nilrt': str_hostname = salt.utils.stringutils.to_str(hostname) nirtcfg_cmd = '/usr/local/natinst/bin/nirtcfg' nirtcfg_cmd += " --set section=SystemSettings,token='Host_Name',value='{0}'".format(str_hostname) if __salt__['cmd.run_all'](nirtcfg_cmd)['retcode'] != 0: raise CommandExecutionError("Couldn't set hostname to: {0}\n".format(str_hostname)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif __grains__['os_family'] == 'OpenBSD': with salt.utils.files.fopen('/etc/myname', 'w') as fh_: fh_.write(salt.utils.stringutils.to_str(hostname + '\n')) # depends on [control=['with'], data=['fh_']] # depends on [control=['if'], data=[]] # Update /etc/nodename and /etc/defaultdomain on SunOS if salt.utils.platform.is_sunos(): with salt.utils.files.fopen('/etc/nodename', 'w') as fh_: fh_.write(salt.utils.stringutils.to_str(hostname.split('.')[0] + '\n')) # depends on [control=['with'], data=['fh_']] with salt.utils.files.fopen('/etc/defaultdomain', 'w') as fh_: fh_.write(salt.utils.stringutils.to_str('.'.join(hostname.split('.')[1:]) + '\n')) # depends on [control=['with'], data=['fh_']] # depends on [control=['if'], data=[]] return True
def get_array_ndim(self, key): """Return array's ndim""" data = self.model.get_data() return data[key].ndim
def function[get_array_ndim, parameter[self, key]]: constant[Return array's ndim] variable[data] assign[=] call[name[self].model.get_data, parameter[]] return[call[name[data]][name[key]].ndim]
keyword[def] identifier[get_array_ndim] ( identifier[self] , identifier[key] ): literal[string] identifier[data] = identifier[self] . identifier[model] . identifier[get_data] () keyword[return] identifier[data] [ identifier[key] ]. identifier[ndim]
def get_array_ndim(self, key): """Return array's ndim""" data = self.model.get_data() return data[key].ndim
def lattice_2_lmpbox(lattice, origin=(0, 0, 0)): """ Converts a lattice object to LammpsBox, and calculates the symmetry operation used. Args: lattice (Lattice): Input lattice. origin: A (3,) array/list of floats setting lower bounds of simulation box. Default to (0, 0, 0). Returns: LammpsBox, SymmOp """ a, b, c = lattice.abc xlo, ylo, zlo = origin xhi = a + xlo m = lattice.matrix xy = np.dot(m[1], m[0] / a) yhi = np.sqrt(b ** 2 - xy ** 2) + ylo xz = np.dot(m[2], m[0] / a) yz = (np.dot(m[1], m[2]) - xy * xz) / (yhi - ylo) zhi = np.sqrt(c ** 2 - xz ** 2 - yz ** 2) + zlo tilt = None if lattice.is_orthogonal else [xy, xz, yz] rot_matrix = np.linalg.solve([[xhi - xlo, 0, 0], [xy, yhi - ylo, 0], [xz, yz, zhi - zlo]], m) bounds = [[xlo, xhi], [ylo, yhi], [zlo, zhi]] symmop = SymmOp.from_rotation_and_translation(rot_matrix, origin) return LammpsBox(bounds, tilt), symmop
def function[lattice_2_lmpbox, parameter[lattice, origin]]: constant[ Converts a lattice object to LammpsBox, and calculates the symmetry operation used. Args: lattice (Lattice): Input lattice. origin: A (3,) array/list of floats setting lower bounds of simulation box. Default to (0, 0, 0). Returns: LammpsBox, SymmOp ] <ast.Tuple object at 0x7da207f003a0> assign[=] name[lattice].abc <ast.Tuple object at 0x7da207f02470> assign[=] name[origin] variable[xhi] assign[=] binary_operation[name[a] + name[xlo]] variable[m] assign[=] name[lattice].matrix variable[xy] assign[=] call[name[np].dot, parameter[call[name[m]][constant[1]], binary_operation[call[name[m]][constant[0]] / name[a]]]] variable[yhi] assign[=] binary_operation[call[name[np].sqrt, parameter[binary_operation[binary_operation[name[b] ** constant[2]] - binary_operation[name[xy] ** constant[2]]]]] + name[ylo]] variable[xz] assign[=] call[name[np].dot, parameter[call[name[m]][constant[2]], binary_operation[call[name[m]][constant[0]] / name[a]]]] variable[yz] assign[=] binary_operation[binary_operation[call[name[np].dot, parameter[call[name[m]][constant[1]], call[name[m]][constant[2]]]] - binary_operation[name[xy] * name[xz]]] / binary_operation[name[yhi] - name[ylo]]] variable[zhi] assign[=] binary_operation[call[name[np].sqrt, parameter[binary_operation[binary_operation[binary_operation[name[c] ** constant[2]] - binary_operation[name[xz] ** constant[2]]] - binary_operation[name[yz] ** constant[2]]]]] + name[zlo]] variable[tilt] assign[=] <ast.IfExp object at 0x7da2043456c0> variable[rot_matrix] assign[=] call[name[np].linalg.solve, parameter[list[[<ast.List object at 0x7da204345f30>, <ast.List object at 0x7da204346b60>, <ast.List object at 0x7da204344ca0>]], name[m]]] variable[bounds] assign[=] list[[<ast.List object at 0x7da204345930>, <ast.List object at 0x7da204345e10>, <ast.List object at 0x7da2043454e0>]] variable[symmop] assign[=] call[name[SymmOp].from_rotation_and_translation, parameter[name[rot_matrix], name[origin]]] return[tuple[[<ast.Call object at 0x7da204344b80>, <ast.Name object at 0x7da204346bf0>]]]
keyword[def] identifier[lattice_2_lmpbox] ( identifier[lattice] , identifier[origin] =( literal[int] , literal[int] , literal[int] )): literal[string] identifier[a] , identifier[b] , identifier[c] = identifier[lattice] . identifier[abc] identifier[xlo] , identifier[ylo] , identifier[zlo] = identifier[origin] identifier[xhi] = identifier[a] + identifier[xlo] identifier[m] = identifier[lattice] . identifier[matrix] identifier[xy] = identifier[np] . identifier[dot] ( identifier[m] [ literal[int] ], identifier[m] [ literal[int] ]/ identifier[a] ) identifier[yhi] = identifier[np] . identifier[sqrt] ( identifier[b] ** literal[int] - identifier[xy] ** literal[int] )+ identifier[ylo] identifier[xz] = identifier[np] . identifier[dot] ( identifier[m] [ literal[int] ], identifier[m] [ literal[int] ]/ identifier[a] ) identifier[yz] =( identifier[np] . identifier[dot] ( identifier[m] [ literal[int] ], identifier[m] [ literal[int] ])- identifier[xy] * identifier[xz] )/( identifier[yhi] - identifier[ylo] ) identifier[zhi] = identifier[np] . identifier[sqrt] ( identifier[c] ** literal[int] - identifier[xz] ** literal[int] - identifier[yz] ** literal[int] )+ identifier[zlo] identifier[tilt] = keyword[None] keyword[if] identifier[lattice] . identifier[is_orthogonal] keyword[else] [ identifier[xy] , identifier[xz] , identifier[yz] ] identifier[rot_matrix] = identifier[np] . identifier[linalg] . identifier[solve] ([[ identifier[xhi] - identifier[xlo] , literal[int] , literal[int] ], [ identifier[xy] , identifier[yhi] - identifier[ylo] , literal[int] ], [ identifier[xz] , identifier[yz] , identifier[zhi] - identifier[zlo] ]], identifier[m] ) identifier[bounds] =[[ identifier[xlo] , identifier[xhi] ],[ identifier[ylo] , identifier[yhi] ],[ identifier[zlo] , identifier[zhi] ]] identifier[symmop] = identifier[SymmOp] . identifier[from_rotation_and_translation] ( identifier[rot_matrix] , identifier[origin] ) keyword[return] identifier[LammpsBox] ( identifier[bounds] , identifier[tilt] ), identifier[symmop]
def lattice_2_lmpbox(lattice, origin=(0, 0, 0)): """ Converts a lattice object to LammpsBox, and calculates the symmetry operation used. Args: lattice (Lattice): Input lattice. origin: A (3,) array/list of floats setting lower bounds of simulation box. Default to (0, 0, 0). Returns: LammpsBox, SymmOp """ (a, b, c) = lattice.abc (xlo, ylo, zlo) = origin xhi = a + xlo m = lattice.matrix xy = np.dot(m[1], m[0] / a) yhi = np.sqrt(b ** 2 - xy ** 2) + ylo xz = np.dot(m[2], m[0] / a) yz = (np.dot(m[1], m[2]) - xy * xz) / (yhi - ylo) zhi = np.sqrt(c ** 2 - xz ** 2 - yz ** 2) + zlo tilt = None if lattice.is_orthogonal else [xy, xz, yz] rot_matrix = np.linalg.solve([[xhi - xlo, 0, 0], [xy, yhi - ylo, 0], [xz, yz, zhi - zlo]], m) bounds = [[xlo, xhi], [ylo, yhi], [zlo, zhi]] symmop = SymmOp.from_rotation_and_translation(rot_matrix, origin) return (LammpsBox(bounds, tilt), symmop)
def store_episode(self, episode_batch): """episode_batch: array(batch_size x (T or T+1) x dim_key) """ batch_sizes = [len(episode_batch[key]) for key in episode_batch.keys()] assert np.all(np.array(batch_sizes) == batch_sizes[0]) batch_size = batch_sizes[0] with self.lock: idxs = self._get_storage_idx(batch_size) # load inputs into buffers for key in self.buffers.keys(): self.buffers[key][idxs] = episode_batch[key] self.n_transitions_stored += batch_size * self.T
def function[store_episode, parameter[self, episode_batch]]: constant[episode_batch: array(batch_size x (T or T+1) x dim_key) ] variable[batch_sizes] assign[=] <ast.ListComp object at 0x7da1b0235cf0> assert[call[name[np].all, parameter[compare[call[name[np].array, parameter[name[batch_sizes]]] equal[==] call[name[batch_sizes]][constant[0]]]]]] variable[batch_size] assign[=] call[name[batch_sizes]][constant[0]] with name[self].lock begin[:] variable[idxs] assign[=] call[name[self]._get_storage_idx, parameter[name[batch_size]]] for taget[name[key]] in starred[call[name[self].buffers.keys, parameter[]]] begin[:] call[call[name[self].buffers][name[key]]][name[idxs]] assign[=] call[name[episode_batch]][name[key]] <ast.AugAssign object at 0x7da1b0211240>
keyword[def] identifier[store_episode] ( identifier[self] , identifier[episode_batch] ): literal[string] identifier[batch_sizes] =[ identifier[len] ( identifier[episode_batch] [ identifier[key] ]) keyword[for] identifier[key] keyword[in] identifier[episode_batch] . identifier[keys] ()] keyword[assert] identifier[np] . identifier[all] ( identifier[np] . identifier[array] ( identifier[batch_sizes] )== identifier[batch_sizes] [ literal[int] ]) identifier[batch_size] = identifier[batch_sizes] [ literal[int] ] keyword[with] identifier[self] . identifier[lock] : identifier[idxs] = identifier[self] . identifier[_get_storage_idx] ( identifier[batch_size] ) keyword[for] identifier[key] keyword[in] identifier[self] . identifier[buffers] . identifier[keys] (): identifier[self] . identifier[buffers] [ identifier[key] ][ identifier[idxs] ]= identifier[episode_batch] [ identifier[key] ] identifier[self] . identifier[n_transitions_stored] += identifier[batch_size] * identifier[self] . identifier[T]
def store_episode(self, episode_batch): """episode_batch: array(batch_size x (T or T+1) x dim_key) """ batch_sizes = [len(episode_batch[key]) for key in episode_batch.keys()] assert np.all(np.array(batch_sizes) == batch_sizes[0]) batch_size = batch_sizes[0] with self.lock: idxs = self._get_storage_idx(batch_size) # load inputs into buffers for key in self.buffers.keys(): self.buffers[key][idxs] = episode_batch[key] # depends on [control=['for'], data=['key']] self.n_transitions_stored += batch_size * self.T # depends on [control=['with'], data=[]]
def on_client_connect(self, client_conn): """Inform client of build state and version on connect. Parameters ---------- client_conn : ClientConnection object The client connection that has been successfully established. Returns ------- Future that resolves when the device is ready to accept messages. """ assert get_thread_ident() == self._server.ioloop_thread_id self._client_conns.add(client_conn) self._strategies[client_conn] = {} # map sensors -> sampling strategies katcp_version = self.PROTOCOL_INFO.major if katcp_version >= VERSION_CONNECT_KATCP_MAJOR: client_conn.inform(Message.inform( "version-connect", "katcp-protocol", self.PROTOCOL_INFO)) client_conn.inform(Message.inform( "version-connect", "katcp-library", "katcp-python-%s" % katcp.__version__)) client_conn.inform(Message.inform( "version-connect", "katcp-device", self.version(), self.build_state())) else: client_conn.inform(Message.inform("version", self.version())) client_conn.inform(Message.inform("build-state", self.build_state()))
def function[on_client_connect, parameter[self, client_conn]]: constant[Inform client of build state and version on connect. Parameters ---------- client_conn : ClientConnection object The client connection that has been successfully established. Returns ------- Future that resolves when the device is ready to accept messages. ] assert[compare[call[name[get_thread_ident], parameter[]] equal[==] name[self]._server.ioloop_thread_id]] call[name[self]._client_conns.add, parameter[name[client_conn]]] call[name[self]._strategies][name[client_conn]] assign[=] dictionary[[], []] variable[katcp_version] assign[=] name[self].PROTOCOL_INFO.major if compare[name[katcp_version] greater_or_equal[>=] name[VERSION_CONNECT_KATCP_MAJOR]] begin[:] call[name[client_conn].inform, parameter[call[name[Message].inform, parameter[constant[version-connect], constant[katcp-protocol], name[self].PROTOCOL_INFO]]]] call[name[client_conn].inform, parameter[call[name[Message].inform, parameter[constant[version-connect], constant[katcp-library], binary_operation[constant[katcp-python-%s] <ast.Mod object at 0x7da2590d6920> name[katcp].__version__]]]]] call[name[client_conn].inform, parameter[call[name[Message].inform, parameter[constant[version-connect], constant[katcp-device], call[name[self].version, parameter[]], call[name[self].build_state, parameter[]]]]]]
keyword[def] identifier[on_client_connect] ( identifier[self] , identifier[client_conn] ): literal[string] keyword[assert] identifier[get_thread_ident] ()== identifier[self] . identifier[_server] . identifier[ioloop_thread_id] identifier[self] . identifier[_client_conns] . identifier[add] ( identifier[client_conn] ) identifier[self] . identifier[_strategies] [ identifier[client_conn] ]={} identifier[katcp_version] = identifier[self] . identifier[PROTOCOL_INFO] . identifier[major] keyword[if] identifier[katcp_version] >= identifier[VERSION_CONNECT_KATCP_MAJOR] : identifier[client_conn] . identifier[inform] ( identifier[Message] . identifier[inform] ( literal[string] , literal[string] , identifier[self] . identifier[PROTOCOL_INFO] )) identifier[client_conn] . identifier[inform] ( identifier[Message] . identifier[inform] ( literal[string] , literal[string] , literal[string] % identifier[katcp] . identifier[__version__] )) identifier[client_conn] . identifier[inform] ( identifier[Message] . identifier[inform] ( literal[string] , literal[string] , identifier[self] . identifier[version] (), identifier[self] . identifier[build_state] ())) keyword[else] : identifier[client_conn] . identifier[inform] ( identifier[Message] . identifier[inform] ( literal[string] , identifier[self] . identifier[version] ())) identifier[client_conn] . identifier[inform] ( identifier[Message] . identifier[inform] ( literal[string] , identifier[self] . identifier[build_state] ()))
def on_client_connect(self, client_conn): """Inform client of build state and version on connect. Parameters ---------- client_conn : ClientConnection object The client connection that has been successfully established. Returns ------- Future that resolves when the device is ready to accept messages. """ assert get_thread_ident() == self._server.ioloop_thread_id self._client_conns.add(client_conn) self._strategies[client_conn] = {} # map sensors -> sampling strategies katcp_version = self.PROTOCOL_INFO.major if katcp_version >= VERSION_CONNECT_KATCP_MAJOR: client_conn.inform(Message.inform('version-connect', 'katcp-protocol', self.PROTOCOL_INFO)) client_conn.inform(Message.inform('version-connect', 'katcp-library', 'katcp-python-%s' % katcp.__version__)) client_conn.inform(Message.inform('version-connect', 'katcp-device', self.version(), self.build_state())) # depends on [control=['if'], data=[]] else: client_conn.inform(Message.inform('version', self.version())) client_conn.inform(Message.inform('build-state', self.build_state()))
def master_callback(self, m, master): '''process mavlink message m on master, sending any messages to recipients''' # see if it is handled by a specialised sysid connection sysid = m.get_srcSystem() mtype = m.get_type() if sysid in self.mpstate.sysid_outputs: self.mpstate.sysid_outputs[sysid].write(m.get_msgbuf()) if mtype == "GLOBAL_POSITION_INT": for modname in 'map', 'asterix', 'NMEA', 'NMEA2': mod = self.module(modname) if mod is not None: mod.set_secondary_vehicle_position(m) return if getattr(m, '_timestamp', None) is None: master.post_message(m) self.status.counters['MasterIn'][master.linknum] += 1 if mtype == 'GLOBAL_POSITION_INT': # send GLOBAL_POSITION_INT to 2nd GCS for 2nd vehicle display for sysid in self.mpstate.sysid_outputs: self.mpstate.sysid_outputs[sysid].write(m.get_msgbuf()) if self.mpstate.settings.fwdpos: for link in self.mpstate.mav_master: if link != master: link.write(m.get_msgbuf()) # and log them if mtype not in dataPackets and self.mpstate.logqueue: # put link number in bottom 2 bits, so we can analyse packet # delay in saved logs usec = self.get_usec() usec = (usec & ~3) | master.linknum self.mpstate.logqueue.put(bytearray(struct.pack('>Q', usec) + m.get_msgbuf())) # keep the last message of each type around self.status.msgs[mtype] = m if mtype not in self.status.msg_count: self.status.msg_count[mtype] = 0 self.status.msg_count[mtype] += 1 if m.get_srcComponent() == mavutil.mavlink.MAV_COMP_ID_GIMBAL and mtype == 'HEARTBEAT': # silence gimbal heartbeat packets for now return if getattr(m, 'time_boot_ms', None) is not None and self.settings.target_system == m.get_srcSystem(): # update link_delayed attribute self.handle_msec_timestamp(m, master) if mtype in activityPackets: if master.linkerror: master.linkerror = False self.say("link %s OK" % (self.link_label(master))) self.status.last_message = time.time() master.last_message = self.status.last_message if master.link_delayed and self.mpstate.settings.checkdelay: # don't process delayed packets that cause double reporting if mtype in delayedPackets: return self.master_msg_handling(m, master) # don't pass along bad data if mtype != 'BAD_DATA': # pass messages along to listeners, except for REQUEST_DATA_STREAM, which # would lead a conflict in stream rate setting between mavproxy and the other # GCS if self.mpstate.settings.mavfwd_rate or mtype != 'REQUEST_DATA_STREAM': if mtype not in self.no_fwd_types: for r in self.mpstate.mav_outputs: r.write(m.get_msgbuf()) sysid = m.get_srcSystem() target_sysid = self.target_system # pass to modules for (mod,pm) in self.mpstate.modules: if not hasattr(mod, 'mavlink_packet'): continue if not mod.multi_vehicle and sysid != target_sysid: # only pass packets not from our target to modules that # have marked themselves as being multi-vehicle capable continue try: mod.mavlink_packet(m) except Exception as msg: if self.mpstate.settings.moddebug == 1: print(msg) elif self.mpstate.settings.moddebug > 1: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback, limit=2, file=sys.stdout)
def function[master_callback, parameter[self, m, master]]: constant[process mavlink message m on master, sending any messages to recipients] variable[sysid] assign[=] call[name[m].get_srcSystem, parameter[]] variable[mtype] assign[=] call[name[m].get_type, parameter[]] if compare[name[sysid] in name[self].mpstate.sysid_outputs] begin[:] call[call[name[self].mpstate.sysid_outputs][name[sysid]].write, parameter[call[name[m].get_msgbuf, parameter[]]]] if compare[name[mtype] equal[==] constant[GLOBAL_POSITION_INT]] begin[:] for taget[name[modname]] in starred[tuple[[<ast.Constant object at 0x7da1b17de4d0>, <ast.Constant object at 0x7da1b17dd960>, <ast.Constant object at 0x7da1b17df970>, <ast.Constant object at 0x7da1b17dd990>]]] begin[:] variable[mod] assign[=] call[name[self].module, parameter[name[modname]]] if compare[name[mod] is_not constant[None]] begin[:] call[name[mod].set_secondary_vehicle_position, parameter[name[m]]] return[None] if compare[call[name[getattr], parameter[name[m], constant[_timestamp], constant[None]]] is constant[None]] begin[:] call[name[master].post_message, parameter[name[m]]] <ast.AugAssign object at 0x7da1b17dd4b0> if compare[name[mtype] equal[==] constant[GLOBAL_POSITION_INT]] begin[:] for taget[name[sysid]] in starred[name[self].mpstate.sysid_outputs] begin[:] call[call[name[self].mpstate.sysid_outputs][name[sysid]].write, parameter[call[name[m].get_msgbuf, parameter[]]]] if name[self].mpstate.settings.fwdpos begin[:] for taget[name[link]] in starred[name[self].mpstate.mav_master] begin[:] if compare[name[link] not_equal[!=] name[master]] begin[:] call[name[link].write, parameter[call[name[m].get_msgbuf, parameter[]]]] if <ast.BoolOp object at 0x7da1b17dc9d0> begin[:] variable[usec] assign[=] call[name[self].get_usec, parameter[]] variable[usec] assign[=] binary_operation[binary_operation[name[usec] <ast.BitAnd object at 0x7da2590d6b60> <ast.UnaryOp object at 0x7da1b17dd6c0>] <ast.BitOr object at 0x7da2590d6aa0> name[master].linknum] call[name[self].mpstate.logqueue.put, parameter[call[name[bytearray], parameter[binary_operation[call[name[struct].pack, parameter[constant[>Q], name[usec]]] + call[name[m].get_msgbuf, parameter[]]]]]]] call[name[self].status.msgs][name[mtype]] assign[=] name[m] if compare[name[mtype] <ast.NotIn object at 0x7da2590d7190> name[self].status.msg_count] begin[:] call[name[self].status.msg_count][name[mtype]] assign[=] constant[0] <ast.AugAssign object at 0x7da1b17dcb80> if <ast.BoolOp object at 0x7da1b17dd450> begin[:] return[None] if <ast.BoolOp object at 0x7da1b17dc520> begin[:] call[name[self].handle_msec_timestamp, parameter[name[m], name[master]]] if compare[name[mtype] in name[activityPackets]] begin[:] if name[master].linkerror begin[:] name[master].linkerror assign[=] constant[False] call[name[self].say, parameter[binary_operation[constant[link %s OK] <ast.Mod object at 0x7da2590d6920> call[name[self].link_label, parameter[name[master]]]]]] name[self].status.last_message assign[=] call[name[time].time, parameter[]] name[master].last_message assign[=] name[self].status.last_message if <ast.BoolOp object at 0x7da20c76f3a0> begin[:] if compare[name[mtype] in name[delayedPackets]] begin[:] return[None] call[name[self].master_msg_handling, parameter[name[m], name[master]]] if compare[name[mtype] not_equal[!=] constant[BAD_DATA]] begin[:] if <ast.BoolOp object at 0x7da20c76f370> begin[:] if compare[name[mtype] <ast.NotIn object at 0x7da2590d7190> name[self].no_fwd_types] begin[:] for taget[name[r]] in starred[name[self].mpstate.mav_outputs] begin[:] call[name[r].write, parameter[call[name[m].get_msgbuf, parameter[]]]] variable[sysid] assign[=] call[name[m].get_srcSystem, parameter[]] variable[target_sysid] assign[=] name[self].target_system for taget[tuple[[<ast.Name object at 0x7da20c76cd60>, <ast.Name object at 0x7da20c76c430>]]] in starred[name[self].mpstate.modules] begin[:] if <ast.UnaryOp object at 0x7da20c76c100> begin[:] continue if <ast.BoolOp object at 0x7da20c76fa00> begin[:] continue <ast.Try object at 0x7da20c76efe0>
keyword[def] identifier[master_callback] ( identifier[self] , identifier[m] , identifier[master] ): literal[string] identifier[sysid] = identifier[m] . identifier[get_srcSystem] () identifier[mtype] = identifier[m] . identifier[get_type] () keyword[if] identifier[sysid] keyword[in] identifier[self] . identifier[mpstate] . identifier[sysid_outputs] : identifier[self] . identifier[mpstate] . identifier[sysid_outputs] [ identifier[sysid] ]. identifier[write] ( identifier[m] . identifier[get_msgbuf] ()) keyword[if] identifier[mtype] == literal[string] : keyword[for] identifier[modname] keyword[in] literal[string] , literal[string] , literal[string] , literal[string] : identifier[mod] = identifier[self] . identifier[module] ( identifier[modname] ) keyword[if] identifier[mod] keyword[is] keyword[not] keyword[None] : identifier[mod] . identifier[set_secondary_vehicle_position] ( identifier[m] ) keyword[return] keyword[if] identifier[getattr] ( identifier[m] , literal[string] , keyword[None] ) keyword[is] keyword[None] : identifier[master] . identifier[post_message] ( identifier[m] ) identifier[self] . identifier[status] . identifier[counters] [ literal[string] ][ identifier[master] . identifier[linknum] ]+= literal[int] keyword[if] identifier[mtype] == literal[string] : keyword[for] identifier[sysid] keyword[in] identifier[self] . identifier[mpstate] . identifier[sysid_outputs] : identifier[self] . identifier[mpstate] . identifier[sysid_outputs] [ identifier[sysid] ]. identifier[write] ( identifier[m] . identifier[get_msgbuf] ()) keyword[if] identifier[self] . identifier[mpstate] . identifier[settings] . identifier[fwdpos] : keyword[for] identifier[link] keyword[in] identifier[self] . identifier[mpstate] . identifier[mav_master] : keyword[if] identifier[link] != identifier[master] : identifier[link] . identifier[write] ( identifier[m] . identifier[get_msgbuf] ()) keyword[if] identifier[mtype] keyword[not] keyword[in] identifier[dataPackets] keyword[and] identifier[self] . identifier[mpstate] . identifier[logqueue] : identifier[usec] = identifier[self] . identifier[get_usec] () identifier[usec] =( identifier[usec] &~ literal[int] )| identifier[master] . identifier[linknum] identifier[self] . identifier[mpstate] . identifier[logqueue] . identifier[put] ( identifier[bytearray] ( identifier[struct] . identifier[pack] ( literal[string] , identifier[usec] )+ identifier[m] . identifier[get_msgbuf] ())) identifier[self] . identifier[status] . identifier[msgs] [ identifier[mtype] ]= identifier[m] keyword[if] identifier[mtype] keyword[not] keyword[in] identifier[self] . identifier[status] . identifier[msg_count] : identifier[self] . identifier[status] . identifier[msg_count] [ identifier[mtype] ]= literal[int] identifier[self] . identifier[status] . identifier[msg_count] [ identifier[mtype] ]+= literal[int] keyword[if] identifier[m] . identifier[get_srcComponent] ()== identifier[mavutil] . identifier[mavlink] . identifier[MAV_COMP_ID_GIMBAL] keyword[and] identifier[mtype] == literal[string] : keyword[return] keyword[if] identifier[getattr] ( identifier[m] , literal[string] , keyword[None] ) keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[settings] . identifier[target_system] == identifier[m] . identifier[get_srcSystem] (): identifier[self] . identifier[handle_msec_timestamp] ( identifier[m] , identifier[master] ) keyword[if] identifier[mtype] keyword[in] identifier[activityPackets] : keyword[if] identifier[master] . identifier[linkerror] : identifier[master] . identifier[linkerror] = keyword[False] identifier[self] . identifier[say] ( literal[string] %( identifier[self] . identifier[link_label] ( identifier[master] ))) identifier[self] . identifier[status] . identifier[last_message] = identifier[time] . identifier[time] () identifier[master] . identifier[last_message] = identifier[self] . identifier[status] . identifier[last_message] keyword[if] identifier[master] . identifier[link_delayed] keyword[and] identifier[self] . identifier[mpstate] . identifier[settings] . identifier[checkdelay] : keyword[if] identifier[mtype] keyword[in] identifier[delayedPackets] : keyword[return] identifier[self] . identifier[master_msg_handling] ( identifier[m] , identifier[master] ) keyword[if] identifier[mtype] != literal[string] : keyword[if] identifier[self] . identifier[mpstate] . identifier[settings] . identifier[mavfwd_rate] keyword[or] identifier[mtype] != literal[string] : keyword[if] identifier[mtype] keyword[not] keyword[in] identifier[self] . identifier[no_fwd_types] : keyword[for] identifier[r] keyword[in] identifier[self] . identifier[mpstate] . identifier[mav_outputs] : identifier[r] . identifier[write] ( identifier[m] . identifier[get_msgbuf] ()) identifier[sysid] = identifier[m] . identifier[get_srcSystem] () identifier[target_sysid] = identifier[self] . identifier[target_system] keyword[for] ( identifier[mod] , identifier[pm] ) keyword[in] identifier[self] . identifier[mpstate] . identifier[modules] : keyword[if] keyword[not] identifier[hasattr] ( identifier[mod] , literal[string] ): keyword[continue] keyword[if] keyword[not] identifier[mod] . identifier[multi_vehicle] keyword[and] identifier[sysid] != identifier[target_sysid] : keyword[continue] keyword[try] : identifier[mod] . identifier[mavlink_packet] ( identifier[m] ) keyword[except] identifier[Exception] keyword[as] identifier[msg] : keyword[if] identifier[self] . identifier[mpstate] . identifier[settings] . identifier[moddebug] == literal[int] : identifier[print] ( identifier[msg] ) keyword[elif] identifier[self] . identifier[mpstate] . identifier[settings] . identifier[moddebug] > literal[int] : identifier[exc_type] , identifier[exc_value] , identifier[exc_traceback] = identifier[sys] . identifier[exc_info] () identifier[traceback] . identifier[print_exception] ( identifier[exc_type] , identifier[exc_value] , identifier[exc_traceback] , identifier[limit] = literal[int] , identifier[file] = identifier[sys] . identifier[stdout] )
def master_callback(self, m, master): """process mavlink message m on master, sending any messages to recipients""" # see if it is handled by a specialised sysid connection sysid = m.get_srcSystem() mtype = m.get_type() if sysid in self.mpstate.sysid_outputs: self.mpstate.sysid_outputs[sysid].write(m.get_msgbuf()) if mtype == 'GLOBAL_POSITION_INT': for modname in ('map', 'asterix', 'NMEA', 'NMEA2'): mod = self.module(modname) if mod is not None: mod.set_secondary_vehicle_position(m) # depends on [control=['if'], data=['mod']] # depends on [control=['for'], data=['modname']] # depends on [control=['if'], data=[]] return # depends on [control=['if'], data=['sysid']] if getattr(m, '_timestamp', None) is None: master.post_message(m) # depends on [control=['if'], data=[]] self.status.counters['MasterIn'][master.linknum] += 1 if mtype == 'GLOBAL_POSITION_INT': # send GLOBAL_POSITION_INT to 2nd GCS for 2nd vehicle display for sysid in self.mpstate.sysid_outputs: self.mpstate.sysid_outputs[sysid].write(m.get_msgbuf()) # depends on [control=['for'], data=['sysid']] if self.mpstate.settings.fwdpos: for link in self.mpstate.mav_master: if link != master: link.write(m.get_msgbuf()) # depends on [control=['if'], data=['link']] # depends on [control=['for'], data=['link']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # and log them if mtype not in dataPackets and self.mpstate.logqueue: # put link number in bottom 2 bits, so we can analyse packet # delay in saved logs usec = self.get_usec() usec = usec & ~3 | master.linknum self.mpstate.logqueue.put(bytearray(struct.pack('>Q', usec) + m.get_msgbuf())) # depends on [control=['if'], data=[]] # keep the last message of each type around self.status.msgs[mtype] = m if mtype not in self.status.msg_count: self.status.msg_count[mtype] = 0 # depends on [control=['if'], data=['mtype']] self.status.msg_count[mtype] += 1 if m.get_srcComponent() == mavutil.mavlink.MAV_COMP_ID_GIMBAL and mtype == 'HEARTBEAT': # silence gimbal heartbeat packets for now return # depends on [control=['if'], data=[]] if getattr(m, 'time_boot_ms', None) is not None and self.settings.target_system == m.get_srcSystem(): # update link_delayed attribute self.handle_msec_timestamp(m, master) # depends on [control=['if'], data=[]] if mtype in activityPackets: if master.linkerror: master.linkerror = False self.say('link %s OK' % self.link_label(master)) # depends on [control=['if'], data=[]] self.status.last_message = time.time() master.last_message = self.status.last_message # depends on [control=['if'], data=[]] if master.link_delayed and self.mpstate.settings.checkdelay: # don't process delayed packets that cause double reporting if mtype in delayedPackets: return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] self.master_msg_handling(m, master) # don't pass along bad data if mtype != 'BAD_DATA': # pass messages along to listeners, except for REQUEST_DATA_STREAM, which # would lead a conflict in stream rate setting between mavproxy and the other # GCS if self.mpstate.settings.mavfwd_rate or mtype != 'REQUEST_DATA_STREAM': if mtype not in self.no_fwd_types: for r in self.mpstate.mav_outputs: r.write(m.get_msgbuf()) # depends on [control=['for'], data=['r']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] sysid = m.get_srcSystem() target_sysid = self.target_system # pass to modules for (mod, pm) in self.mpstate.modules: if not hasattr(mod, 'mavlink_packet'): continue # depends on [control=['if'], data=[]] if not mod.multi_vehicle and sysid != target_sysid: # only pass packets not from our target to modules that # have marked themselves as being multi-vehicle capable continue # depends on [control=['if'], data=[]] try: mod.mavlink_packet(m) # depends on [control=['try'], data=[]] except Exception as msg: if self.mpstate.settings.moddebug == 1: print(msg) # depends on [control=['if'], data=[]] elif self.mpstate.settings.moddebug > 1: (exc_type, exc_value, exc_traceback) = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback, limit=2, file=sys.stdout) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['msg']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['mtype']]
def queue_bind(self, queue, exchange, routing_key='', nowait=False, arguments=None, ticket=None): """ bind queue to an exchange This method binds a queue to an exchange. Until a queue is bound it will not receive any messages. In a classic messaging model, store-and-forward queues are bound to a dest exchange and subscription queues are bound to a dest_wild exchange. RULE: A server MUST allow ignore duplicate bindings - that is, two or more bind methods for a specific queue, with identical arguments - without treating these as an error. RULE: If a bind fails, the server MUST raise a connection exception. RULE: The server MUST NOT allow a durable queue to bind to a transient exchange. If the client attempts this the server MUST raise a channel exception. RULE: Bindings for durable queues are automatically durable and the server SHOULD restore such bindings after a server restart. RULE: If the client attempts to an exchange that was declared as internal, the server MUST raise a connection exception with reply code 530 (not allowed). RULE: The server SHOULD support at least 4 bindings per queue, and ideally, impose no limit except as defined by available resources. PARAMETERS: queue: shortstr Specifies the name of the queue to bind. If the queue name is empty, refers to the current queue for the channel, which is the last declared queue. RULE: If the client did not previously declare a queue, and the queue name in this method is empty, the server MUST raise a connection exception with reply code 530 (not allowed). RULE: If the queue does not exist the server MUST raise a channel exception with reply code 404 (not found). exchange: shortstr The name of the exchange to bind to. RULE: If the exchange does not exist the server MUST raise a channel exception with reply code 404 (not found). routing_key: shortstr message routing key Specifies the routing key for the binding. The routing key is used for routing messages depending on the exchange configuration. Not all exchanges use a routing key - refer to the specific exchange documentation. If the routing key is empty and the queue name is empty, the routing key will be the current queue for the channel, which is the last declared queue. nowait: boolean do not send a reply method If set, the server will not respond to the method. The client should not wait for a reply method. If the server could not complete the method it will raise a channel or connection exception. arguments: table arguments for binding A set of arguments for the binding. The syntax and semantics of these arguments depends on the exchange class. ticket: short The client provides a valid access ticket giving "active" access rights to the queue's access realm. """ if arguments is None: arguments = {} args = AMQPWriter() if ticket is not None: args.write_short(ticket) else: args.write_short(self.default_ticket) args.write_shortstr(queue) args.write_shortstr(exchange) args.write_shortstr(routing_key) args.write_bit(nowait) args.write_table(arguments) self._send_method((50, 20), args) if not nowait: return self.wait(allowed_methods=[ (50, 21), # Channel.queue_bind_ok ])
def function[queue_bind, parameter[self, queue, exchange, routing_key, nowait, arguments, ticket]]: constant[ bind queue to an exchange This method binds a queue to an exchange. Until a queue is bound it will not receive any messages. In a classic messaging model, store-and-forward queues are bound to a dest exchange and subscription queues are bound to a dest_wild exchange. RULE: A server MUST allow ignore duplicate bindings - that is, two or more bind methods for a specific queue, with identical arguments - without treating these as an error. RULE: If a bind fails, the server MUST raise a connection exception. RULE: The server MUST NOT allow a durable queue to bind to a transient exchange. If the client attempts this the server MUST raise a channel exception. RULE: Bindings for durable queues are automatically durable and the server SHOULD restore such bindings after a server restart. RULE: If the client attempts to an exchange that was declared as internal, the server MUST raise a connection exception with reply code 530 (not allowed). RULE: The server SHOULD support at least 4 bindings per queue, and ideally, impose no limit except as defined by available resources. PARAMETERS: queue: shortstr Specifies the name of the queue to bind. If the queue name is empty, refers to the current queue for the channel, which is the last declared queue. RULE: If the client did not previously declare a queue, and the queue name in this method is empty, the server MUST raise a connection exception with reply code 530 (not allowed). RULE: If the queue does not exist the server MUST raise a channel exception with reply code 404 (not found). exchange: shortstr The name of the exchange to bind to. RULE: If the exchange does not exist the server MUST raise a channel exception with reply code 404 (not found). routing_key: shortstr message routing key Specifies the routing key for the binding. The routing key is used for routing messages depending on the exchange configuration. Not all exchanges use a routing key - refer to the specific exchange documentation. If the routing key is empty and the queue name is empty, the routing key will be the current queue for the channel, which is the last declared queue. nowait: boolean do not send a reply method If set, the server will not respond to the method. The client should not wait for a reply method. If the server could not complete the method it will raise a channel or connection exception. arguments: table arguments for binding A set of arguments for the binding. The syntax and semantics of these arguments depends on the exchange class. ticket: short The client provides a valid access ticket giving "active" access rights to the queue's access realm. ] if compare[name[arguments] is constant[None]] begin[:] variable[arguments] assign[=] dictionary[[], []] variable[args] assign[=] call[name[AMQPWriter], parameter[]] if compare[name[ticket] is_not constant[None]] begin[:] call[name[args].write_short, parameter[name[ticket]]] call[name[args].write_shortstr, parameter[name[queue]]] call[name[args].write_shortstr, parameter[name[exchange]]] call[name[args].write_shortstr, parameter[name[routing_key]]] call[name[args].write_bit, parameter[name[nowait]]] call[name[args].write_table, parameter[name[arguments]]] call[name[self]._send_method, parameter[tuple[[<ast.Constant object at 0x7da20c992950>, <ast.Constant object at 0x7da20c991450>]], name[args]]] if <ast.UnaryOp object at 0x7da20c990130> begin[:] return[call[name[self].wait, parameter[]]]
keyword[def] identifier[queue_bind] ( identifier[self] , identifier[queue] , identifier[exchange] , identifier[routing_key] = literal[string] , identifier[nowait] = keyword[False] , identifier[arguments] = keyword[None] , identifier[ticket] = keyword[None] ): literal[string] keyword[if] identifier[arguments] keyword[is] keyword[None] : identifier[arguments] ={} identifier[args] = identifier[AMQPWriter] () keyword[if] identifier[ticket] keyword[is] keyword[not] keyword[None] : identifier[args] . identifier[write_short] ( identifier[ticket] ) keyword[else] : identifier[args] . identifier[write_short] ( identifier[self] . identifier[default_ticket] ) identifier[args] . identifier[write_shortstr] ( identifier[queue] ) identifier[args] . identifier[write_shortstr] ( identifier[exchange] ) identifier[args] . identifier[write_shortstr] ( identifier[routing_key] ) identifier[args] . identifier[write_bit] ( identifier[nowait] ) identifier[args] . identifier[write_table] ( identifier[arguments] ) identifier[self] . identifier[_send_method] (( literal[int] , literal[int] ), identifier[args] ) keyword[if] keyword[not] identifier[nowait] : keyword[return] identifier[self] . identifier[wait] ( identifier[allowed_methods] =[ ( literal[int] , literal[int] ), ])
def queue_bind(self, queue, exchange, routing_key='', nowait=False, arguments=None, ticket=None): """ bind queue to an exchange This method binds a queue to an exchange. Until a queue is bound it will not receive any messages. In a classic messaging model, store-and-forward queues are bound to a dest exchange and subscription queues are bound to a dest_wild exchange. RULE: A server MUST allow ignore duplicate bindings - that is, two or more bind methods for a specific queue, with identical arguments - without treating these as an error. RULE: If a bind fails, the server MUST raise a connection exception. RULE: The server MUST NOT allow a durable queue to bind to a transient exchange. If the client attempts this the server MUST raise a channel exception. RULE: Bindings for durable queues are automatically durable and the server SHOULD restore such bindings after a server restart. RULE: If the client attempts to an exchange that was declared as internal, the server MUST raise a connection exception with reply code 530 (not allowed). RULE: The server SHOULD support at least 4 bindings per queue, and ideally, impose no limit except as defined by available resources. PARAMETERS: queue: shortstr Specifies the name of the queue to bind. If the queue name is empty, refers to the current queue for the channel, which is the last declared queue. RULE: If the client did not previously declare a queue, and the queue name in this method is empty, the server MUST raise a connection exception with reply code 530 (not allowed). RULE: If the queue does not exist the server MUST raise a channel exception with reply code 404 (not found). exchange: shortstr The name of the exchange to bind to. RULE: If the exchange does not exist the server MUST raise a channel exception with reply code 404 (not found). routing_key: shortstr message routing key Specifies the routing key for the binding. The routing key is used for routing messages depending on the exchange configuration. Not all exchanges use a routing key - refer to the specific exchange documentation. If the routing key is empty and the queue name is empty, the routing key will be the current queue for the channel, which is the last declared queue. nowait: boolean do not send a reply method If set, the server will not respond to the method. The client should not wait for a reply method. If the server could not complete the method it will raise a channel or connection exception. arguments: table arguments for binding A set of arguments for the binding. The syntax and semantics of these arguments depends on the exchange class. ticket: short The client provides a valid access ticket giving "active" access rights to the queue's access realm. """ if arguments is None: arguments = {} # depends on [control=['if'], data=['arguments']] args = AMQPWriter() if ticket is not None: args.write_short(ticket) # depends on [control=['if'], data=['ticket']] else: args.write_short(self.default_ticket) args.write_shortstr(queue) args.write_shortstr(exchange) args.write_shortstr(routing_key) args.write_bit(nowait) args.write_table(arguments) self._send_method((50, 20), args) if not nowait: # Channel.queue_bind_ok return self.wait(allowed_methods=[(50, 21)]) # depends on [control=['if'], data=[]]
def available(self): """ Check whether we have a PEP identity associated with our account. """ disco_info = yield from self._disco_client.query_info( self.client.local_jid.bare() ) for item in disco_info.identities.filter(attrs={"category": "pubsub"}): if item.type_ == "pep": return True return False
def function[available, parameter[self]]: constant[ Check whether we have a PEP identity associated with our account. ] variable[disco_info] assign[=] <ast.YieldFrom object at 0x7da20c796890> for taget[name[item]] in starred[call[name[disco_info].identities.filter, parameter[]]] begin[:] if compare[name[item].type_ equal[==] constant[pep]] begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[available] ( identifier[self] ): literal[string] identifier[disco_info] = keyword[yield] keyword[from] identifier[self] . identifier[_disco_client] . identifier[query_info] ( identifier[self] . identifier[client] . identifier[local_jid] . identifier[bare] () ) keyword[for] identifier[item] keyword[in] identifier[disco_info] . identifier[identities] . identifier[filter] ( identifier[attrs] ={ literal[string] : literal[string] }): keyword[if] identifier[item] . identifier[type_] == literal[string] : keyword[return] keyword[True] keyword[return] keyword[False]
def available(self): """ Check whether we have a PEP identity associated with our account. """ disco_info = (yield from self._disco_client.query_info(self.client.local_jid.bare())) for item in disco_info.identities.filter(attrs={'category': 'pubsub'}): if item.type_ == 'pep': return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] return False
def _try_get_solutions(self, address, size, access, max_solutions=0x1000, force=False): """ Try to solve for a symbolic address, checking permissions when reading/writing size bytes. :param Expression address: The address to solve for :param int size: How many bytes to check permissions for :param str access: 'r' or 'w' :param int max_solutions: Will raise if more solutions are found :param force: Whether to ignore permission failure :rtype: list """ assert issymbolic(address) solutions = solver.get_all_values(self.constraints, address, maxcnt=max_solutions) crashing_condition = False for base in solutions: if not self.access_ok(slice(base, base + size), access, force): crashing_condition = Operators.OR(address == base, crashing_condition) if solver.can_be_true(self.constraints, crashing_condition): raise InvalidSymbolicMemoryAccess(address, access, size, crashing_condition) return solutions
def function[_try_get_solutions, parameter[self, address, size, access, max_solutions, force]]: constant[ Try to solve for a symbolic address, checking permissions when reading/writing size bytes. :param Expression address: The address to solve for :param int size: How many bytes to check permissions for :param str access: 'r' or 'w' :param int max_solutions: Will raise if more solutions are found :param force: Whether to ignore permission failure :rtype: list ] assert[call[name[issymbolic], parameter[name[address]]]] variable[solutions] assign[=] call[name[solver].get_all_values, parameter[name[self].constraints, name[address]]] variable[crashing_condition] assign[=] constant[False] for taget[name[base]] in starred[name[solutions]] begin[:] if <ast.UnaryOp object at 0x7da18dc05870> begin[:] variable[crashing_condition] assign[=] call[name[Operators].OR, parameter[compare[name[address] equal[==] name[base]], name[crashing_condition]]] if call[name[solver].can_be_true, parameter[name[self].constraints, name[crashing_condition]]] begin[:] <ast.Raise object at 0x7da1b000f7c0> return[name[solutions]]
keyword[def] identifier[_try_get_solutions] ( identifier[self] , identifier[address] , identifier[size] , identifier[access] , identifier[max_solutions] = literal[int] , identifier[force] = keyword[False] ): literal[string] keyword[assert] identifier[issymbolic] ( identifier[address] ) identifier[solutions] = identifier[solver] . identifier[get_all_values] ( identifier[self] . identifier[constraints] , identifier[address] , identifier[maxcnt] = identifier[max_solutions] ) identifier[crashing_condition] = keyword[False] keyword[for] identifier[base] keyword[in] identifier[solutions] : keyword[if] keyword[not] identifier[self] . identifier[access_ok] ( identifier[slice] ( identifier[base] , identifier[base] + identifier[size] ), identifier[access] , identifier[force] ): identifier[crashing_condition] = identifier[Operators] . identifier[OR] ( identifier[address] == identifier[base] , identifier[crashing_condition] ) keyword[if] identifier[solver] . identifier[can_be_true] ( identifier[self] . identifier[constraints] , identifier[crashing_condition] ): keyword[raise] identifier[InvalidSymbolicMemoryAccess] ( identifier[address] , identifier[access] , identifier[size] , identifier[crashing_condition] ) keyword[return] identifier[solutions]
def _try_get_solutions(self, address, size, access, max_solutions=4096, force=False): """ Try to solve for a symbolic address, checking permissions when reading/writing size bytes. :param Expression address: The address to solve for :param int size: How many bytes to check permissions for :param str access: 'r' or 'w' :param int max_solutions: Will raise if more solutions are found :param force: Whether to ignore permission failure :rtype: list """ assert issymbolic(address) solutions = solver.get_all_values(self.constraints, address, maxcnt=max_solutions) crashing_condition = False for base in solutions: if not self.access_ok(slice(base, base + size), access, force): crashing_condition = Operators.OR(address == base, crashing_condition) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['base']] if solver.can_be_true(self.constraints, crashing_condition): raise InvalidSymbolicMemoryAccess(address, access, size, crashing_condition) # depends on [control=['if'], data=[]] return solutions
def add_argument(parser, dest, info): """ Add an argument to an `argparse.ArgumentParser` object Parameters ---------- parser : `argparse.ArgumentParser` The parser in question dest : str The destination for the argument info : `tuple` The information associated with the argument in question. """ default, helpstr, typeinfo = info if dest == 'args': parser.add_argument('args', nargs='+', default=None, help=helpstr) elif typeinfo == list: parser.add_argument('--%s' % dest, action='append', help=helpstr) elif typeinfo == bool: parser.add_argument('--%s' % dest, action='store_true', help=helpstr) else: parser.add_argument('--%s' % dest, action='store', type=typeinfo, default=default, help=helpstr)
def function[add_argument, parameter[parser, dest, info]]: constant[ Add an argument to an `argparse.ArgumentParser` object Parameters ---------- parser : `argparse.ArgumentParser` The parser in question dest : str The destination for the argument info : `tuple` The information associated with the argument in question. ] <ast.Tuple object at 0x7da18dc07280> assign[=] name[info] if compare[name[dest] equal[==] constant[args]] begin[:] call[name[parser].add_argument, parameter[constant[args]]]
keyword[def] identifier[add_argument] ( identifier[parser] , identifier[dest] , identifier[info] ): literal[string] identifier[default] , identifier[helpstr] , identifier[typeinfo] = identifier[info] keyword[if] identifier[dest] == literal[string] : identifier[parser] . identifier[add_argument] ( literal[string] , identifier[nargs] = literal[string] , identifier[default] = keyword[None] , identifier[help] = identifier[helpstr] ) keyword[elif] identifier[typeinfo] == identifier[list] : identifier[parser] . identifier[add_argument] ( literal[string] % identifier[dest] , identifier[action] = literal[string] , identifier[help] = identifier[helpstr] ) keyword[elif] identifier[typeinfo] == identifier[bool] : identifier[parser] . identifier[add_argument] ( literal[string] % identifier[dest] , identifier[action] = literal[string] , identifier[help] = identifier[helpstr] ) keyword[else] : identifier[parser] . identifier[add_argument] ( literal[string] % identifier[dest] , identifier[action] = literal[string] , identifier[type] = identifier[typeinfo] , identifier[default] = identifier[default] , identifier[help] = identifier[helpstr] )
def add_argument(parser, dest, info): """ Add an argument to an `argparse.ArgumentParser` object Parameters ---------- parser : `argparse.ArgumentParser` The parser in question dest : str The destination for the argument info : `tuple` The information associated with the argument in question. """ (default, helpstr, typeinfo) = info if dest == 'args': parser.add_argument('args', nargs='+', default=None, help=helpstr) # depends on [control=['if'], data=[]] elif typeinfo == list: parser.add_argument('--%s' % dest, action='append', help=helpstr) # depends on [control=['if'], data=[]] elif typeinfo == bool: parser.add_argument('--%s' % dest, action='store_true', help=helpstr) # depends on [control=['if'], data=[]] else: parser.add_argument('--%s' % dest, action='store', type=typeinfo, default=default, help=helpstr)
def model_ext_functions(vk, model): """Fill the model with extensions functions""" model['ext_functions'] = {'instance': {}, 'device': {}} # invert the alias to better lookup alias = {v: k for k, v in model['alias'].items()} for extension in get_extensions_filtered(vk): for req in extension['require']: if not req.get('command'): continue ext_type = extension['@type'] for x in req['command']: name = x['@name'] if name in alias.keys(): model['ext_functions'][ext_type][name] = alias[name] else: model['ext_functions'][ext_type][name] = name
def function[model_ext_functions, parameter[vk, model]]: constant[Fill the model with extensions functions] call[name[model]][constant[ext_functions]] assign[=] dictionary[[<ast.Constant object at 0x7da1b077ae00>, <ast.Constant object at 0x7da1b07793c0>], [<ast.Dict object at 0x7da1b0778550>, <ast.Dict object at 0x7da1b077b9a0>]] variable[alias] assign[=] <ast.DictComp object at 0x7da1b0779390> for taget[name[extension]] in starred[call[name[get_extensions_filtered], parameter[name[vk]]]] begin[:] for taget[name[req]] in starred[call[name[extension]][constant[require]]] begin[:] if <ast.UnaryOp object at 0x7da1b077bb20> begin[:] continue variable[ext_type] assign[=] call[name[extension]][constant[@type]] for taget[name[x]] in starred[call[name[req]][constant[command]]] begin[:] variable[name] assign[=] call[name[x]][constant[@name]] if compare[name[name] in call[name[alias].keys, parameter[]]] begin[:] call[call[call[name[model]][constant[ext_functions]]][name[ext_type]]][name[name]] assign[=] call[name[alias]][name[name]]
keyword[def] identifier[model_ext_functions] ( identifier[vk] , identifier[model] ): literal[string] identifier[model] [ literal[string] ]={ literal[string] :{}, literal[string] :{}} identifier[alias] ={ identifier[v] : identifier[k] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[model] [ literal[string] ]. identifier[items] ()} keyword[for] identifier[extension] keyword[in] identifier[get_extensions_filtered] ( identifier[vk] ): keyword[for] identifier[req] keyword[in] identifier[extension] [ literal[string] ]: keyword[if] keyword[not] identifier[req] . identifier[get] ( literal[string] ): keyword[continue] identifier[ext_type] = identifier[extension] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[req] [ literal[string] ]: identifier[name] = identifier[x] [ literal[string] ] keyword[if] identifier[name] keyword[in] identifier[alias] . identifier[keys] (): identifier[model] [ literal[string] ][ identifier[ext_type] ][ identifier[name] ]= identifier[alias] [ identifier[name] ] keyword[else] : identifier[model] [ literal[string] ][ identifier[ext_type] ][ identifier[name] ]= identifier[name]
def model_ext_functions(vk, model): """Fill the model with extensions functions""" model['ext_functions'] = {'instance': {}, 'device': {}} # invert the alias to better lookup alias = {v: k for (k, v) in model['alias'].items()} for extension in get_extensions_filtered(vk): for req in extension['require']: if not req.get('command'): continue # depends on [control=['if'], data=[]] ext_type = extension['@type'] for x in req['command']: name = x['@name'] if name in alias.keys(): model['ext_functions'][ext_type][name] = alias[name] # depends on [control=['if'], data=['name']] else: model['ext_functions'][ext_type][name] = name # depends on [control=['for'], data=['x']] # depends on [control=['for'], data=['req']] # depends on [control=['for'], data=['extension']]
def jsonify(*args, **kwargs): """ jsonify with support for MongoDB ObjectId """ return Response( json.dumps( dict( *args, **kwargs), cls=MongoJSONEncoder), mimetype='application/json')
def function[jsonify, parameter[]]: constant[ jsonify with support for MongoDB ObjectId ] return[call[name[Response], parameter[call[name[json].dumps, parameter[call[name[dict], parameter[<ast.Starred object at 0x7da1b0859a80>]]]]]]]
keyword[def] identifier[jsonify] (* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[Response] ( identifier[json] . identifier[dumps] ( identifier[dict] ( * identifier[args] , ** identifier[kwargs] ), identifier[cls] = identifier[MongoJSONEncoder] ), identifier[mimetype] = literal[string] )
def jsonify(*args, **kwargs): """ jsonify with support for MongoDB ObjectId """ return Response(json.dumps(dict(*args, **kwargs), cls=MongoJSONEncoder), mimetype='application/json')
def cast(self, value, **opts): """Convert the given value to the target type. Return ``None`` if the value is empty. If an error occurs, raise a ``ConverterError``. """ if isinstance(value, self.result_type): return value if self._is_null(value): return None try: opts_ = self.opts.copy() opts_.update(opts) return self._cast(value, **opts_) except Exception as e: if not isinstance(e, ConverterError): e = ConverterError(None, exc=e) e.converter = self.__class__ raise e
def function[cast, parameter[self, value]]: constant[Convert the given value to the target type. Return ``None`` if the value is empty. If an error occurs, raise a ``ConverterError``. ] if call[name[isinstance], parameter[name[value], name[self].result_type]] begin[:] return[name[value]] if call[name[self]._is_null, parameter[name[value]]] begin[:] return[constant[None]] <ast.Try object at 0x7da1b0a21150>
keyword[def] identifier[cast] ( identifier[self] , identifier[value] ,** identifier[opts] ): literal[string] keyword[if] identifier[isinstance] ( identifier[value] , identifier[self] . identifier[result_type] ): keyword[return] identifier[value] keyword[if] identifier[self] . identifier[_is_null] ( identifier[value] ): keyword[return] keyword[None] keyword[try] : identifier[opts_] = identifier[self] . identifier[opts] . identifier[copy] () identifier[opts_] . identifier[update] ( identifier[opts] ) keyword[return] identifier[self] . identifier[_cast] ( identifier[value] ,** identifier[opts_] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[if] keyword[not] identifier[isinstance] ( identifier[e] , identifier[ConverterError] ): identifier[e] = identifier[ConverterError] ( keyword[None] , identifier[exc] = identifier[e] ) identifier[e] . identifier[converter] = identifier[self] . identifier[__class__] keyword[raise] identifier[e]
def cast(self, value, **opts): """Convert the given value to the target type. Return ``None`` if the value is empty. If an error occurs, raise a ``ConverterError``. """ if isinstance(value, self.result_type): return value # depends on [control=['if'], data=[]] if self._is_null(value): return None # depends on [control=['if'], data=[]] try: opts_ = self.opts.copy() opts_.update(opts) return self._cast(value, **opts_) # depends on [control=['try'], data=[]] except Exception as e: if not isinstance(e, ConverterError): e = ConverterError(None, exc=e) # depends on [control=['if'], data=[]] e.converter = self.__class__ raise e # depends on [control=['except'], data=['e']]
def unhide_alert(self, id, **kwargs): # noqa: E501 """Unhide a specific integration alert # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.unhide_alert(id, async_req=True) >>> result = thread.get() :param async_req bool :param int id: (required) :return: ResponseContainerAlert If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.unhide_alert_with_http_info(id, **kwargs) # noqa: E501 else: (data) = self.unhide_alert_with_http_info(id, **kwargs) # noqa: E501 return data
def function[unhide_alert, parameter[self, id]]: constant[Unhide a specific integration alert # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.unhide_alert(id, async_req=True) >>> result = thread.get() :param async_req bool :param int id: (required) :return: ResponseContainerAlert If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async_req]]] begin[:] return[call[name[self].unhide_alert_with_http_info, parameter[name[id]]]]
keyword[def] identifier[unhide_alert] ( identifier[self] , identifier[id] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[unhide_alert_with_http_info] ( identifier[id] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[unhide_alert_with_http_info] ( identifier[id] ,** identifier[kwargs] ) keyword[return] identifier[data]
def unhide_alert(self, id, **kwargs): # noqa: E501 'Unhide a specific integration alert # noqa: E501\n\n # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.unhide_alert(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int id: (required)\n :return: ResponseContainerAlert\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.unhide_alert_with_http_info(id, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]] else: data = self.unhide_alert_with_http_info(id, **kwargs) # noqa: E501 return data
def order_book(self, group=True, base="btc", quote="usd"): """ Returns dictionary with "bids" and "asks". Each is a list of open orders and each order is represented as a list of price and amount. """ params = {'group': group} url = self._construct_url("order_book/", base, quote) return self._get(url, params=params, return_json=True, version=2)
def function[order_book, parameter[self, group, base, quote]]: constant[ Returns dictionary with "bids" and "asks". Each is a list of open orders and each order is represented as a list of price and amount. ] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20c990910>], [<ast.Name object at 0x7da20c992ef0>]] variable[url] assign[=] call[name[self]._construct_url, parameter[constant[order_book/], name[base], name[quote]]] return[call[name[self]._get, parameter[name[url]]]]
keyword[def] identifier[order_book] ( identifier[self] , identifier[group] = keyword[True] , identifier[base] = literal[string] , identifier[quote] = literal[string] ): literal[string] identifier[params] ={ literal[string] : identifier[group] } identifier[url] = identifier[self] . identifier[_construct_url] ( literal[string] , identifier[base] , identifier[quote] ) keyword[return] identifier[self] . identifier[_get] ( identifier[url] , identifier[params] = identifier[params] , identifier[return_json] = keyword[True] , identifier[version] = literal[int] )
def order_book(self, group=True, base='btc', quote='usd'): """ Returns dictionary with "bids" and "asks". Each is a list of open orders and each order is represented as a list of price and amount. """ params = {'group': group} url = self._construct_url('order_book/', base, quote) return self._get(url, params=params, return_json=True, version=2)
def write_zip_fp(fp, data, properties, dir_data_list=None): """ Write custom zip file of data and properties to fp :param fp: the file point to which to write the header :param data: the data to write to the file; may be None :param properties: the properties to write to the file; may be None :param dir_data_list: optional list of directory header information structures If dir_data_list is specified, data should be None and properties should be specified. Then the existing data structure will be left alone and only the directory headers and end of directory header will be written. Otherwise, if both data and properties are specified, both are written out in full. The properties param must not change during this method. Callers should take care to ensure this does not happen. """ assert data is not None or properties is not None # dir_data_list has the format: local file record offset, name, data length, crc32 dir_data_list = list() if dir_data_list is None else dir_data_list dt = datetime.datetime.now() if data is not None: offset_data = fp.tell() def write_data(fp): numpy_start_pos = fp.tell() numpy.save(fp, data) numpy_end_pos = fp.tell() fp.seek(numpy_start_pos) data_c = numpy.require(data, dtype=data.dtype, requirements=["C_CONTIGUOUS"]) header_data = fp.read((numpy_end_pos - numpy_start_pos) - data_c.nbytes) # read the header data_crc32 = binascii.crc32(data_c.data, binascii.crc32(header_data)) & 0xFFFFFFFF fp.seek(numpy_end_pos) return data_crc32 data_len, crc32 = write_local_file(fp, b"data.npy", write_data, dt) dir_data_list.append((offset_data, b"data.npy", data_len, crc32)) if properties is not None: json_str = str() try: class JSONEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, Geometry.IntPoint) or isinstance(obj, Geometry.IntSize) or isinstance(obj, Geometry.IntRect) or isinstance(obj, Geometry.FloatPoint) or isinstance(obj, Geometry.FloatSize) or isinstance(obj, Geometry.FloatRect): return tuple(obj) else: return json.JSONEncoder.default(self, obj) json_io = io.StringIO() json.dump(properties, json_io, cls=JSONEncoder) json_str = json_io.getvalue() except Exception as e: # catch exceptions to avoid corrupt zip files import traceback logging.error("Exception writing zip file %s" + str(e)) traceback.print_exc() traceback.print_stack() def write_json(fp): json_bytes = bytes(json_str, 'ISO-8859-1') fp.write(json_bytes) return binascii.crc32(json_bytes) & 0xFFFFFFFF offset_json = fp.tell() json_len, json_crc32 = write_local_file(fp, b"metadata.json", write_json, dt) dir_data_list.append((offset_json, b"metadata.json", json_len, json_crc32)) dir_offset = fp.tell() for offset, name_bytes, data_len, crc32 in dir_data_list: write_directory_data(fp, offset, name_bytes, data_len, crc32, dt) dir_size = fp.tell() - dir_offset write_end_of_directory(fp, dir_size, dir_offset, len(dir_data_list)) fp.truncate()
def function[write_zip_fp, parameter[fp, data, properties, dir_data_list]]: constant[ Write custom zip file of data and properties to fp :param fp: the file point to which to write the header :param data: the data to write to the file; may be None :param properties: the properties to write to the file; may be None :param dir_data_list: optional list of directory header information structures If dir_data_list is specified, data should be None and properties should be specified. Then the existing data structure will be left alone and only the directory headers and end of directory header will be written. Otherwise, if both data and properties are specified, both are written out in full. The properties param must not change during this method. Callers should take care to ensure this does not happen. ] assert[<ast.BoolOp object at 0x7da18f812f80>] variable[dir_data_list] assign[=] <ast.IfExp object at 0x7da18f811a50> variable[dt] assign[=] call[name[datetime].datetime.now, parameter[]] if compare[name[data] is_not constant[None]] begin[:] variable[offset_data] assign[=] call[name[fp].tell, parameter[]] def function[write_data, parameter[fp]]: variable[numpy_start_pos] assign[=] call[name[fp].tell, parameter[]] call[name[numpy].save, parameter[name[fp], name[data]]] variable[numpy_end_pos] assign[=] call[name[fp].tell, parameter[]] call[name[fp].seek, parameter[name[numpy_start_pos]]] variable[data_c] assign[=] call[name[numpy].require, parameter[name[data]]] variable[header_data] assign[=] call[name[fp].read, parameter[binary_operation[binary_operation[name[numpy_end_pos] - name[numpy_start_pos]] - name[data_c].nbytes]]] variable[data_crc32] assign[=] binary_operation[call[name[binascii].crc32, parameter[name[data_c].data, call[name[binascii].crc32, parameter[name[header_data]]]]] <ast.BitAnd object at 0x7da2590d6b60> constant[4294967295]] call[name[fp].seek, parameter[name[numpy_end_pos]]] return[name[data_crc32]] <ast.Tuple object at 0x7da1b0e3c2b0> assign[=] call[name[write_local_file], parameter[name[fp], constant[b'data.npy'], name[write_data], name[dt]]] call[name[dir_data_list].append, parameter[tuple[[<ast.Name object at 0x7da18f810b50>, <ast.Constant object at 0x7da18f8114b0>, <ast.Name object at 0x7da18f811840>, <ast.Name object at 0x7da18f811c60>]]]] if compare[name[properties] is_not constant[None]] begin[:] variable[json_str] assign[=] call[name[str], parameter[]] <ast.Try object at 0x7da18f811600> def function[write_json, parameter[fp]]: variable[json_bytes] assign[=] call[name[bytes], parameter[name[json_str], constant[ISO-8859-1]]] call[name[fp].write, parameter[name[json_bytes]]] return[binary_operation[call[name[binascii].crc32, parameter[name[json_bytes]]] <ast.BitAnd object at 0x7da2590d6b60> constant[4294967295]]] variable[offset_json] assign[=] call[name[fp].tell, parameter[]] <ast.Tuple object at 0x7da1b0e9e860> assign[=] call[name[write_local_file], parameter[name[fp], constant[b'metadata.json'], name[write_json], name[dt]]] call[name[dir_data_list].append, parameter[tuple[[<ast.Name object at 0x7da1b0e9c760>, <ast.Constant object at 0x7da1b0e9da20>, <ast.Name object at 0x7da1b0e9ceb0>, <ast.Name object at 0x7da1b0e9ca90>]]]] variable[dir_offset] assign[=] call[name[fp].tell, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b0e9e650>, <ast.Name object at 0x7da1b0e9c610>, <ast.Name object at 0x7da1b0e9e980>, <ast.Name object at 0x7da1b0e9e9b0>]]] in starred[name[dir_data_list]] begin[:] call[name[write_directory_data], parameter[name[fp], name[offset], name[name_bytes], name[data_len], name[crc32], name[dt]]] variable[dir_size] assign[=] binary_operation[call[name[fp].tell, parameter[]] - name[dir_offset]] call[name[write_end_of_directory], parameter[name[fp], name[dir_size], name[dir_offset], call[name[len], parameter[name[dir_data_list]]]]] call[name[fp].truncate, parameter[]]
keyword[def] identifier[write_zip_fp] ( identifier[fp] , identifier[data] , identifier[properties] , identifier[dir_data_list] = keyword[None] ): literal[string] keyword[assert] identifier[data] keyword[is] keyword[not] keyword[None] keyword[or] identifier[properties] keyword[is] keyword[not] keyword[None] identifier[dir_data_list] = identifier[list] () keyword[if] identifier[dir_data_list] keyword[is] keyword[None] keyword[else] identifier[dir_data_list] identifier[dt] = identifier[datetime] . identifier[datetime] . identifier[now] () keyword[if] identifier[data] keyword[is] keyword[not] keyword[None] : identifier[offset_data] = identifier[fp] . identifier[tell] () keyword[def] identifier[write_data] ( identifier[fp] ): identifier[numpy_start_pos] = identifier[fp] . identifier[tell] () identifier[numpy] . identifier[save] ( identifier[fp] , identifier[data] ) identifier[numpy_end_pos] = identifier[fp] . identifier[tell] () identifier[fp] . identifier[seek] ( identifier[numpy_start_pos] ) identifier[data_c] = identifier[numpy] . identifier[require] ( identifier[data] , identifier[dtype] = identifier[data] . identifier[dtype] , identifier[requirements] =[ literal[string] ]) identifier[header_data] = identifier[fp] . identifier[read] (( identifier[numpy_end_pos] - identifier[numpy_start_pos] )- identifier[data_c] . identifier[nbytes] ) identifier[data_crc32] = identifier[binascii] . identifier[crc32] ( identifier[data_c] . identifier[data] , identifier[binascii] . identifier[crc32] ( identifier[header_data] ))& literal[int] identifier[fp] . identifier[seek] ( identifier[numpy_end_pos] ) keyword[return] identifier[data_crc32] identifier[data_len] , identifier[crc32] = identifier[write_local_file] ( identifier[fp] , literal[string] , identifier[write_data] , identifier[dt] ) identifier[dir_data_list] . identifier[append] (( identifier[offset_data] , literal[string] , identifier[data_len] , identifier[crc32] )) keyword[if] identifier[properties] keyword[is] keyword[not] keyword[None] : identifier[json_str] = identifier[str] () keyword[try] : keyword[class] identifier[JSONEncoder] ( identifier[json] . identifier[JSONEncoder] ): keyword[def] identifier[default] ( identifier[self] , identifier[obj] ): keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Geometry] . identifier[IntPoint] ) keyword[or] identifier[isinstance] ( identifier[obj] , identifier[Geometry] . identifier[IntSize] ) keyword[or] identifier[isinstance] ( identifier[obj] , identifier[Geometry] . identifier[IntRect] ) keyword[or] identifier[isinstance] ( identifier[obj] , identifier[Geometry] . identifier[FloatPoint] ) keyword[or] identifier[isinstance] ( identifier[obj] , identifier[Geometry] . identifier[FloatSize] ) keyword[or] identifier[isinstance] ( identifier[obj] , identifier[Geometry] . identifier[FloatRect] ): keyword[return] identifier[tuple] ( identifier[obj] ) keyword[else] : keyword[return] identifier[json] . identifier[JSONEncoder] . identifier[default] ( identifier[self] , identifier[obj] ) identifier[json_io] = identifier[io] . identifier[StringIO] () identifier[json] . identifier[dump] ( identifier[properties] , identifier[json_io] , identifier[cls] = identifier[JSONEncoder] ) identifier[json_str] = identifier[json_io] . identifier[getvalue] () keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[import] identifier[traceback] identifier[logging] . identifier[error] ( literal[string] + identifier[str] ( identifier[e] )) identifier[traceback] . identifier[print_exc] () identifier[traceback] . identifier[print_stack] () keyword[def] identifier[write_json] ( identifier[fp] ): identifier[json_bytes] = identifier[bytes] ( identifier[json_str] , literal[string] ) identifier[fp] . identifier[write] ( identifier[json_bytes] ) keyword[return] identifier[binascii] . identifier[crc32] ( identifier[json_bytes] )& literal[int] identifier[offset_json] = identifier[fp] . identifier[tell] () identifier[json_len] , identifier[json_crc32] = identifier[write_local_file] ( identifier[fp] , literal[string] , identifier[write_json] , identifier[dt] ) identifier[dir_data_list] . identifier[append] (( identifier[offset_json] , literal[string] , identifier[json_len] , identifier[json_crc32] )) identifier[dir_offset] = identifier[fp] . identifier[tell] () keyword[for] identifier[offset] , identifier[name_bytes] , identifier[data_len] , identifier[crc32] keyword[in] identifier[dir_data_list] : identifier[write_directory_data] ( identifier[fp] , identifier[offset] , identifier[name_bytes] , identifier[data_len] , identifier[crc32] , identifier[dt] ) identifier[dir_size] = identifier[fp] . identifier[tell] ()- identifier[dir_offset] identifier[write_end_of_directory] ( identifier[fp] , identifier[dir_size] , identifier[dir_offset] , identifier[len] ( identifier[dir_data_list] )) identifier[fp] . identifier[truncate] ()
def write_zip_fp(fp, data, properties, dir_data_list=None): """ Write custom zip file of data and properties to fp :param fp: the file point to which to write the header :param data: the data to write to the file; may be None :param properties: the properties to write to the file; may be None :param dir_data_list: optional list of directory header information structures If dir_data_list is specified, data should be None and properties should be specified. Then the existing data structure will be left alone and only the directory headers and end of directory header will be written. Otherwise, if both data and properties are specified, both are written out in full. The properties param must not change during this method. Callers should take care to ensure this does not happen. """ assert data is not None or properties is not None # dir_data_list has the format: local file record offset, name, data length, crc32 dir_data_list = list() if dir_data_list is None else dir_data_list dt = datetime.datetime.now() if data is not None: offset_data = fp.tell() def write_data(fp): numpy_start_pos = fp.tell() numpy.save(fp, data) numpy_end_pos = fp.tell() fp.seek(numpy_start_pos) data_c = numpy.require(data, dtype=data.dtype, requirements=['C_CONTIGUOUS']) header_data = fp.read(numpy_end_pos - numpy_start_pos - data_c.nbytes) # read the header data_crc32 = binascii.crc32(data_c.data, binascii.crc32(header_data)) & 4294967295 fp.seek(numpy_end_pos) return data_crc32 (data_len, crc32) = write_local_file(fp, b'data.npy', write_data, dt) dir_data_list.append((offset_data, b'data.npy', data_len, crc32)) # depends on [control=['if'], data=['data']] if properties is not None: json_str = str() try: class JSONEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, Geometry.IntPoint) or isinstance(obj, Geometry.IntSize) or isinstance(obj, Geometry.IntRect) or isinstance(obj, Geometry.FloatPoint) or isinstance(obj, Geometry.FloatSize) or isinstance(obj, Geometry.FloatRect): return tuple(obj) # depends on [control=['if'], data=[]] else: return json.JSONEncoder.default(self, obj) json_io = io.StringIO() json.dump(properties, json_io, cls=JSONEncoder) json_str = json_io.getvalue() # depends on [control=['try'], data=[]] except Exception as e: # catch exceptions to avoid corrupt zip files import traceback logging.error('Exception writing zip file %s' + str(e)) traceback.print_exc() traceback.print_stack() # depends on [control=['except'], data=['e']] def write_json(fp): json_bytes = bytes(json_str, 'ISO-8859-1') fp.write(json_bytes) return binascii.crc32(json_bytes) & 4294967295 offset_json = fp.tell() (json_len, json_crc32) = write_local_file(fp, b'metadata.json', write_json, dt) dir_data_list.append((offset_json, b'metadata.json', json_len, json_crc32)) # depends on [control=['if'], data=['properties']] dir_offset = fp.tell() for (offset, name_bytes, data_len, crc32) in dir_data_list: write_directory_data(fp, offset, name_bytes, data_len, crc32, dt) # depends on [control=['for'], data=[]] dir_size = fp.tell() - dir_offset write_end_of_directory(fp, dir_size, dir_offset, len(dir_data_list)) fp.truncate()
def _reverse_call(self, related_method, *values): """ Convert each value to a related field, then call the method on each field, passing self.instance as argument. If related_method is a string, it will be the method of the related field. If it's a callable, it's a function which accept the related field and self.instance. """ related_fields = self._to_fields(*values) for related_field in related_fields: if callable(related_method): related_method(related_field, self.instance._pk) else: getattr(related_field, related_method)(self.instance._pk)
def function[_reverse_call, parameter[self, related_method]]: constant[ Convert each value to a related field, then call the method on each field, passing self.instance as argument. If related_method is a string, it will be the method of the related field. If it's a callable, it's a function which accept the related field and self.instance. ] variable[related_fields] assign[=] call[name[self]._to_fields, parameter[<ast.Starred object at 0x7da1b15b5c90>]] for taget[name[related_field]] in starred[name[related_fields]] begin[:] if call[name[callable], parameter[name[related_method]]] begin[:] call[name[related_method], parameter[name[related_field], name[self].instance._pk]]
keyword[def] identifier[_reverse_call] ( identifier[self] , identifier[related_method] ,* identifier[values] ): literal[string] identifier[related_fields] = identifier[self] . identifier[_to_fields] (* identifier[values] ) keyword[for] identifier[related_field] keyword[in] identifier[related_fields] : keyword[if] identifier[callable] ( identifier[related_method] ): identifier[related_method] ( identifier[related_field] , identifier[self] . identifier[instance] . identifier[_pk] ) keyword[else] : identifier[getattr] ( identifier[related_field] , identifier[related_method] )( identifier[self] . identifier[instance] . identifier[_pk] )
def _reverse_call(self, related_method, *values): """ Convert each value to a related field, then call the method on each field, passing self.instance as argument. If related_method is a string, it will be the method of the related field. If it's a callable, it's a function which accept the related field and self.instance. """ related_fields = self._to_fields(*values) for related_field in related_fields: if callable(related_method): related_method(related_field, self.instance._pk) # depends on [control=['if'], data=[]] else: getattr(related_field, related_method)(self.instance._pk) # depends on [control=['for'], data=['related_field']]
def load_file(filename): "Runs the given scent.py file." mod_name = '.'.join(os.path.basename(filename).split('.')[:-1]) mod_path = os.path.dirname(filename) if mod_name in sys.modules: del sys.modules[mod_name] if mod_path not in set(sys.modules.keys()): sys.path.insert(0, mod_path) return ScentModule(__import__(mod_name, g, g), filename)
def function[load_file, parameter[filename]]: constant[Runs the given scent.py file.] variable[mod_name] assign[=] call[constant[.].join, parameter[call[call[call[name[os].path.basename, parameter[name[filename]]].split, parameter[constant[.]]]][<ast.Slice object at 0x7da1b2648790>]]] variable[mod_path] assign[=] call[name[os].path.dirname, parameter[name[filename]]] if compare[name[mod_name] in name[sys].modules] begin[:] <ast.Delete object at 0x7da1b26486d0> if compare[name[mod_path] <ast.NotIn object at 0x7da2590d7190> call[name[set], parameter[call[name[sys].modules.keys, parameter[]]]]] begin[:] call[name[sys].path.insert, parameter[constant[0], name[mod_path]]] return[call[name[ScentModule], parameter[call[name[__import__], parameter[name[mod_name], name[g], name[g]]], name[filename]]]]
keyword[def] identifier[load_file] ( identifier[filename] ): literal[string] identifier[mod_name] = literal[string] . identifier[join] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[filename] ). identifier[split] ( literal[string] )[:- literal[int] ]) identifier[mod_path] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[filename] ) keyword[if] identifier[mod_name] keyword[in] identifier[sys] . identifier[modules] : keyword[del] identifier[sys] . identifier[modules] [ identifier[mod_name] ] keyword[if] identifier[mod_path] keyword[not] keyword[in] identifier[set] ( identifier[sys] . identifier[modules] . identifier[keys] ()): identifier[sys] . identifier[path] . identifier[insert] ( literal[int] , identifier[mod_path] ) keyword[return] identifier[ScentModule] ( identifier[__import__] ( identifier[mod_name] , identifier[g] , identifier[g] ), identifier[filename] )
def load_file(filename): """Runs the given scent.py file.""" mod_name = '.'.join(os.path.basename(filename).split('.')[:-1]) mod_path = os.path.dirname(filename) if mod_name in sys.modules: del sys.modules[mod_name] # depends on [control=['if'], data=['mod_name']] if mod_path not in set(sys.modules.keys()): sys.path.insert(0, mod_path) # depends on [control=['if'], data=['mod_path']] return ScentModule(__import__(mod_name, g, g), filename)
def bitcoin_address(self) -> str: """Generate a random bitcoin address. :return: Bitcoin address. :Example: 3EktnHQD7RiAE6uzMj2ZifT9YgRrkSgzQX """ type_ = self.random.choice(['1', '3']) letters = string.ascii_letters + string.digits return type_ + ''.join( self.random.choice(letters) for _ in range(33))
def function[bitcoin_address, parameter[self]]: constant[Generate a random bitcoin address. :return: Bitcoin address. :Example: 3EktnHQD7RiAE6uzMj2ZifT9YgRrkSgzQX ] variable[type_] assign[=] call[name[self].random.choice, parameter[list[[<ast.Constant object at 0x7da2044c1960>, <ast.Constant object at 0x7da18f58f460>]]]] variable[letters] assign[=] binary_operation[name[string].ascii_letters + name[string].digits] return[binary_operation[name[type_] + call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da20e9b05b0>]]]]
keyword[def] identifier[bitcoin_address] ( identifier[self] )-> identifier[str] : literal[string] identifier[type_] = identifier[self] . identifier[random] . identifier[choice] ([ literal[string] , literal[string] ]) identifier[letters] = identifier[string] . identifier[ascii_letters] + identifier[string] . identifier[digits] keyword[return] identifier[type_] + literal[string] . identifier[join] ( identifier[self] . identifier[random] . identifier[choice] ( identifier[letters] ) keyword[for] identifier[_] keyword[in] identifier[range] ( literal[int] ))
def bitcoin_address(self) -> str: """Generate a random bitcoin address. :return: Bitcoin address. :Example: 3EktnHQD7RiAE6uzMj2ZifT9YgRrkSgzQX """ type_ = self.random.choice(['1', '3']) letters = string.ascii_letters + string.digits return type_ + ''.join((self.random.choice(letters) for _ in range(33)))
def find_all_matches(text_log_error, matchers): """ Find matches for the given error using the given matcher classes Returns *unsaved* TextLogErrorMatch instances. """ for matcher_func in matchers: matches = matcher_func(text_log_error) # matches: iterator of (score, ClassifiedFailure.id) if not matches: continue for score, classified_failure_id in matches: yield TextLogErrorMatch( score=score, matcher_name=matcher_func.__name__, classified_failure_id=classified_failure_id, text_log_error=text_log_error, )
def function[find_all_matches, parameter[text_log_error, matchers]]: constant[ Find matches for the given error using the given matcher classes Returns *unsaved* TextLogErrorMatch instances. ] for taget[name[matcher_func]] in starred[name[matchers]] begin[:] variable[matches] assign[=] call[name[matcher_func], parameter[name[text_log_error]]] if <ast.UnaryOp object at 0x7da1b08a7eb0> begin[:] continue for taget[tuple[[<ast.Name object at 0x7da1b08a7640>, <ast.Name object at 0x7da1b08a7010>]]] in starred[name[matches]] begin[:] <ast.Yield object at 0x7da1b08a58a0>
keyword[def] identifier[find_all_matches] ( identifier[text_log_error] , identifier[matchers] ): literal[string] keyword[for] identifier[matcher_func] keyword[in] identifier[matchers] : identifier[matches] = identifier[matcher_func] ( identifier[text_log_error] ) keyword[if] keyword[not] identifier[matches] : keyword[continue] keyword[for] identifier[score] , identifier[classified_failure_id] keyword[in] identifier[matches] : keyword[yield] identifier[TextLogErrorMatch] ( identifier[score] = identifier[score] , identifier[matcher_name] = identifier[matcher_func] . identifier[__name__] , identifier[classified_failure_id] = identifier[classified_failure_id] , identifier[text_log_error] = identifier[text_log_error] , )
def find_all_matches(text_log_error, matchers): """ Find matches for the given error using the given matcher classes Returns *unsaved* TextLogErrorMatch instances. """ for matcher_func in matchers: matches = matcher_func(text_log_error) # matches: iterator of (score, ClassifiedFailure.id) if not matches: continue # depends on [control=['if'], data=[]] for (score, classified_failure_id) in matches: yield TextLogErrorMatch(score=score, matcher_name=matcher_func.__name__, classified_failure_id=classified_failure_id, text_log_error=text_log_error) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['matcher_func']]
def get_cluster_graph(self, engine="fdp", graph_attr=None, node_attr=None, edge_attr=None): """ Generate directory graph in the DOT language. Directories are shown as clusters .. warning:: This function scans the entire directory tree starting from top so the resulting graph can be really big. Args: engine: Layout command used. ['dot', 'neato', 'twopi', 'circo', 'fdp', 'sfdp', 'patchwork', 'osage'] graph_attr: Mapping of (attribute, value) pairs for the graph. node_attr: Mapping of (attribute, value) pairs set for all nodes. edge_attr: Mapping of (attribute, value) pairs set for all edges. Returns: graphviz.Digraph <https://graphviz.readthedocs.io/en/stable/api.html#digraph> """ # https://www.graphviz.org/doc/info/ from graphviz import Digraph g = Digraph("directory", #filename="flow_%s.gv" % os.path.basename(self.relworkdir), engine=engine) # if engine == "automatic" else engine) # Set graph attributes. #g.attr(label="%s@%s" % (self.__class__.__name__, self.relworkdir)) g.attr(label=self.top) #g.attr(fontcolor="white", bgcolor='purple:pink') #g.attr(rankdir="LR", pagedir="BL") #g.attr(constraint="false", pack="true", packMode="clust") g.node_attr.update(color='lightblue2', style='filled') #g.node_attr.update(ranksep='equally') # Add input attributes. if graph_attr is not None: fg.graph_attr.update(**graph_attr) if node_attr is not None: fg.node_attr.update(**node_attr) if edge_attr is not None: fg.edge_attr.update(**edge_attr) def node_kwargs(path): return dict( #shape="circle", #shape="none", #shape="plaintext", #shape="point", shape="record", #color=node.color_hex, fontsize="8.0", label=os.path.basename(path), ) edge_kwargs = dict(arrowType="vee", style="solid", minlen="1") cluster_kwargs = dict(rankdir="LR", pagedir="BL", style="rounded", bgcolor="azure2") # TODO: Write other method without clusters if not walk. exclude_top_node = False for root, dirs, files in os.walk(self.top): if exclude_top_node and root == self.top: continue cluster_name = "cluster_%s" % root #print("root", root, cluster_name, "dirs", dirs, "files", files, sep="\n") with g.subgraph(name=cluster_name) as d: d.attr(**cluster_kwargs) d.attr(rank="source" if (files or dirs) else "sink") d.attr(label=os.path.basename(root)) for f in files: filepath = os.path.join(root, f) d.node(filepath, **node_kwargs(filepath)) if os.path.islink(filepath): # Follow the link and use the relpath wrt link as label. realp = os.path.realpath(filepath) realp = os.path.relpath(realp, filepath) #realp = os.path.relpath(realp, self.top) #print(filepath, realp) #g.node(realp, **node_kwargs(realp)) g.edge(filepath, realp, **edge_kwargs) for dirname in dirs: dirpath = os.path.join(root, dirname) #head, basename = os.path.split(dirpath) new_cluster_name = "cluster_%s" % dirpath #rank = "source" if os.listdir(dirpath) else "sink" #g.node(dirpath, rank=rank, **node_kwargs(dirpath)) #g.edge(dirpath, new_cluster_name, **edge_kwargs) #d.edge(cluster_name, new_cluster_name, minlen="2", **edge_kwargs) d.edge(cluster_name, new_cluster_name, **edge_kwargs) return g
def function[get_cluster_graph, parameter[self, engine, graph_attr, node_attr, edge_attr]]: constant[ Generate directory graph in the DOT language. Directories are shown as clusters .. warning:: This function scans the entire directory tree starting from top so the resulting graph can be really big. Args: engine: Layout command used. ['dot', 'neato', 'twopi', 'circo', 'fdp', 'sfdp', 'patchwork', 'osage'] graph_attr: Mapping of (attribute, value) pairs for the graph. node_attr: Mapping of (attribute, value) pairs set for all nodes. edge_attr: Mapping of (attribute, value) pairs set for all edges. Returns: graphviz.Digraph <https://graphviz.readthedocs.io/en/stable/api.html#digraph> ] from relative_module[graphviz] import module[Digraph] variable[g] assign[=] call[name[Digraph], parameter[constant[directory]]] call[name[g].attr, parameter[]] call[name[g].node_attr.update, parameter[]] if compare[name[graph_attr] is_not constant[None]] begin[:] call[name[fg].graph_attr.update, parameter[]] if compare[name[node_attr] is_not constant[None]] begin[:] call[name[fg].node_attr.update, parameter[]] if compare[name[edge_attr] is_not constant[None]] begin[:] call[name[fg].edge_attr.update, parameter[]] def function[node_kwargs, parameter[path]]: return[call[name[dict], parameter[]]] variable[edge_kwargs] assign[=] call[name[dict], parameter[]] variable[cluster_kwargs] assign[=] call[name[dict], parameter[]] variable[exclude_top_node] assign[=] constant[False] for taget[tuple[[<ast.Name object at 0x7da20c76e7a0>, <ast.Name object at 0x7da20c76f160>, <ast.Name object at 0x7da20c76fdc0>]]] in starred[call[name[os].walk, parameter[name[self].top]]] begin[:] if <ast.BoolOp object at 0x7da20c76f850> begin[:] continue variable[cluster_name] assign[=] binary_operation[constant[cluster_%s] <ast.Mod object at 0x7da2590d6920> name[root]] with call[name[g].subgraph, parameter[]] begin[:] call[name[d].attr, parameter[]] call[name[d].attr, parameter[]] call[name[d].attr, parameter[]] for taget[name[f]] in starred[name[files]] begin[:] variable[filepath] assign[=] call[name[os].path.join, parameter[name[root], name[f]]] call[name[d].node, parameter[name[filepath]]] if call[name[os].path.islink, parameter[name[filepath]]] begin[:] variable[realp] assign[=] call[name[os].path.realpath, parameter[name[filepath]]] variable[realp] assign[=] call[name[os].path.relpath, parameter[name[realp], name[filepath]]] call[name[g].edge, parameter[name[filepath], name[realp]]] for taget[name[dirname]] in starred[name[dirs]] begin[:] variable[dirpath] assign[=] call[name[os].path.join, parameter[name[root], name[dirname]]] variable[new_cluster_name] assign[=] binary_operation[constant[cluster_%s] <ast.Mod object at 0x7da2590d6920> name[dirpath]] call[name[d].edge, parameter[name[cluster_name], name[new_cluster_name]]] return[name[g]]
keyword[def] identifier[get_cluster_graph] ( identifier[self] , identifier[engine] = literal[string] , identifier[graph_attr] = keyword[None] , identifier[node_attr] = keyword[None] , identifier[edge_attr] = keyword[None] ): literal[string] keyword[from] identifier[graphviz] keyword[import] identifier[Digraph] identifier[g] = identifier[Digraph] ( literal[string] , identifier[engine] = identifier[engine] ) identifier[g] . identifier[attr] ( identifier[label] = identifier[self] . identifier[top] ) identifier[g] . identifier[node_attr] . identifier[update] ( identifier[color] = literal[string] , identifier[style] = literal[string] ) keyword[if] identifier[graph_attr] keyword[is] keyword[not] keyword[None] : identifier[fg] . identifier[graph_attr] . identifier[update] (** identifier[graph_attr] ) keyword[if] identifier[node_attr] keyword[is] keyword[not] keyword[None] : identifier[fg] . identifier[node_attr] . identifier[update] (** identifier[node_attr] ) keyword[if] identifier[edge_attr] keyword[is] keyword[not] keyword[None] : identifier[fg] . identifier[edge_attr] . identifier[update] (** identifier[edge_attr] ) keyword[def] identifier[node_kwargs] ( identifier[path] ): keyword[return] identifier[dict] ( identifier[shape] = literal[string] , identifier[fontsize] = literal[string] , identifier[label] = identifier[os] . identifier[path] . identifier[basename] ( identifier[path] ), ) identifier[edge_kwargs] = identifier[dict] ( identifier[arrowType] = literal[string] , identifier[style] = literal[string] , identifier[minlen] = literal[string] ) identifier[cluster_kwargs] = identifier[dict] ( identifier[rankdir] = literal[string] , identifier[pagedir] = literal[string] , identifier[style] = literal[string] , identifier[bgcolor] = literal[string] ) identifier[exclude_top_node] = keyword[False] keyword[for] identifier[root] , identifier[dirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[self] . identifier[top] ): keyword[if] identifier[exclude_top_node] keyword[and] identifier[root] == identifier[self] . identifier[top] : keyword[continue] identifier[cluster_name] = literal[string] % identifier[root] keyword[with] identifier[g] . identifier[subgraph] ( identifier[name] = identifier[cluster_name] ) keyword[as] identifier[d] : identifier[d] . identifier[attr] (** identifier[cluster_kwargs] ) identifier[d] . identifier[attr] ( identifier[rank] = literal[string] keyword[if] ( identifier[files] keyword[or] identifier[dirs] ) keyword[else] literal[string] ) identifier[d] . identifier[attr] ( identifier[label] = identifier[os] . identifier[path] . identifier[basename] ( identifier[root] )) keyword[for] identifier[f] keyword[in] identifier[files] : identifier[filepath] = identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[f] ) identifier[d] . identifier[node] ( identifier[filepath] ,** identifier[node_kwargs] ( identifier[filepath] )) keyword[if] identifier[os] . identifier[path] . identifier[islink] ( identifier[filepath] ): identifier[realp] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[filepath] ) identifier[realp] = identifier[os] . identifier[path] . identifier[relpath] ( identifier[realp] , identifier[filepath] ) identifier[g] . identifier[edge] ( identifier[filepath] , identifier[realp] ,** identifier[edge_kwargs] ) keyword[for] identifier[dirname] keyword[in] identifier[dirs] : identifier[dirpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[dirname] ) identifier[new_cluster_name] = literal[string] % identifier[dirpath] identifier[d] . identifier[edge] ( identifier[cluster_name] , identifier[new_cluster_name] ,** identifier[edge_kwargs] ) keyword[return] identifier[g]
def get_cluster_graph(self, engine='fdp', graph_attr=None, node_attr=None, edge_attr=None): """ Generate directory graph in the DOT language. Directories are shown as clusters .. warning:: This function scans the entire directory tree starting from top so the resulting graph can be really big. Args: engine: Layout command used. ['dot', 'neato', 'twopi', 'circo', 'fdp', 'sfdp', 'patchwork', 'osage'] graph_attr: Mapping of (attribute, value) pairs for the graph. node_attr: Mapping of (attribute, value) pairs set for all nodes. edge_attr: Mapping of (attribute, value) pairs set for all edges. Returns: graphviz.Digraph <https://graphviz.readthedocs.io/en/stable/api.html#digraph> """ # https://www.graphviz.org/doc/info/ from graphviz import Digraph #filename="flow_%s.gv" % os.path.basename(self.relworkdir), g = Digraph('directory', engine=engine) # if engine == "automatic" else engine) # Set graph attributes. #g.attr(label="%s@%s" % (self.__class__.__name__, self.relworkdir)) g.attr(label=self.top) #g.attr(fontcolor="white", bgcolor='purple:pink') #g.attr(rankdir="LR", pagedir="BL") #g.attr(constraint="false", pack="true", packMode="clust") g.node_attr.update(color='lightblue2', style='filled') #g.node_attr.update(ranksep='equally') # Add input attributes. if graph_attr is not None: fg.graph_attr.update(**graph_attr) # depends on [control=['if'], data=['graph_attr']] if node_attr is not None: fg.node_attr.update(**node_attr) # depends on [control=['if'], data=['node_attr']] if edge_attr is not None: fg.edge_attr.update(**edge_attr) # depends on [control=['if'], data=['edge_attr']] def node_kwargs(path): #shape="circle", #shape="none", #shape="plaintext", #shape="point", #color=node.color_hex, return dict(shape='record', fontsize='8.0', label=os.path.basename(path)) edge_kwargs = dict(arrowType='vee', style='solid', minlen='1') cluster_kwargs = dict(rankdir='LR', pagedir='BL', style='rounded', bgcolor='azure2') # TODO: Write other method without clusters if not walk. exclude_top_node = False for (root, dirs, files) in os.walk(self.top): if exclude_top_node and root == self.top: continue # depends on [control=['if'], data=[]] cluster_name = 'cluster_%s' % root #print("root", root, cluster_name, "dirs", dirs, "files", files, sep="\n") with g.subgraph(name=cluster_name) as d: d.attr(**cluster_kwargs) d.attr(rank='source' if files or dirs else 'sink') d.attr(label=os.path.basename(root)) for f in files: filepath = os.path.join(root, f) d.node(filepath, **node_kwargs(filepath)) if os.path.islink(filepath): # Follow the link and use the relpath wrt link as label. realp = os.path.realpath(filepath) realp = os.path.relpath(realp, filepath) #realp = os.path.relpath(realp, self.top) #print(filepath, realp) #g.node(realp, **node_kwargs(realp)) g.edge(filepath, realp, **edge_kwargs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] for dirname in dirs: dirpath = os.path.join(root, dirname) #head, basename = os.path.split(dirpath) new_cluster_name = 'cluster_%s' % dirpath #rank = "source" if os.listdir(dirpath) else "sink" #g.node(dirpath, rank=rank, **node_kwargs(dirpath)) #g.edge(dirpath, new_cluster_name, **edge_kwargs) #d.edge(cluster_name, new_cluster_name, minlen="2", **edge_kwargs) d.edge(cluster_name, new_cluster_name, **edge_kwargs) # depends on [control=['for'], data=['dirname']] # depends on [control=['with'], data=['d']] # depends on [control=['for'], data=[]] return g
def extractSchedule(self, schedule, period): """ Read a single schedule tariff from meter object buffer. Args: schedule (int): A :class:`~ekmmeters.Schedules` value or in range(Extent.Schedules). tariff (int): A :class:`~ekmmeters.Tariffs` value or in range(Extent.Tariffs). Returns: bool: True on completion. """ ret = namedtuple("ret", ["Hour", "Min", "Tariff", "Period", "Schedule"]) work_table = self.m_schd_1_to_4 if Schedules.Schedule_5 <= schedule <= Schedules.Schedule_6: work_table = self.m_schd_5_to_6 period += 1 schedule += 1 ret.Period = str(period) ret.Schedule = str(schedule) if (schedule < 1) or (schedule > Extents.Schedules) or (period < 0) or (period > Extents.Periods): ekm_log("Out of bounds: tariff " + str(period) + " for schedule " + str(schedule)) ret.Hour = ret.Min = ret.Tariff = str(0) return ret idxhr = "Schedule_" + str(schedule) + "_Period_" + str(period) + "_Hour" idxmin = "Schedule_" + str(schedule) + "_Period_" + str(period) + "_Min" idxrate = "Schedule_" + str(schedule) + "_Period_" + str(period) + "_Tariff" if idxhr not in work_table: ekm_log("Incorrect index: " + idxhr) ret.Hour = ret.Min = ret.Tariff = str(0) return ret if idxmin not in work_table: ekm_log("Incorrect index: " + idxmin) ret.Hour = ret.Min = ret.Tariff = str(0) return ret if idxrate not in work_table: ekm_log("Incorrect index: " + idxrate) ret.Hour = ret.Min = ret.Tariff = str(0) return ret ret.Hour = work_table[idxhr][MeterData.StringValue] ret.Min = work_table[idxmin][MeterData.StringValue].zfill(2) ret.Tariff = work_table[idxrate][MeterData.StringValue] return ret
def function[extractSchedule, parameter[self, schedule, period]]: constant[ Read a single schedule tariff from meter object buffer. Args: schedule (int): A :class:`~ekmmeters.Schedules` value or in range(Extent.Schedules). tariff (int): A :class:`~ekmmeters.Tariffs` value or in range(Extent.Tariffs). Returns: bool: True on completion. ] variable[ret] assign[=] call[name[namedtuple], parameter[constant[ret], list[[<ast.Constant object at 0x7da18f8134c0>, <ast.Constant object at 0x7da18f813b20>, <ast.Constant object at 0x7da18f810f10>, <ast.Constant object at 0x7da18f8126e0>, <ast.Constant object at 0x7da18f811450>]]]] variable[work_table] assign[=] name[self].m_schd_1_to_4 if compare[name[Schedules].Schedule_5 less_or_equal[<=] name[schedule]] begin[:] variable[work_table] assign[=] name[self].m_schd_5_to_6 <ast.AugAssign object at 0x7da18f8123e0> <ast.AugAssign object at 0x7da18f812a10> name[ret].Period assign[=] call[name[str], parameter[name[period]]] name[ret].Schedule assign[=] call[name[str], parameter[name[schedule]]] if <ast.BoolOp object at 0x7da18f810880> begin[:] call[name[ekm_log], parameter[binary_operation[binary_operation[binary_operation[constant[Out of bounds: tariff ] + call[name[str], parameter[name[period]]]] + constant[ for schedule ]] + call[name[str], parameter[name[schedule]]]]]] name[ret].Hour assign[=] call[name[str], parameter[constant[0]]] return[name[ret]] variable[idxhr] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[Schedule_] + call[name[str], parameter[name[schedule]]]] + constant[_Period_]] + call[name[str], parameter[name[period]]]] + constant[_Hour]] variable[idxmin] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[Schedule_] + call[name[str], parameter[name[schedule]]]] + constant[_Period_]] + call[name[str], parameter[name[period]]]] + constant[_Min]] variable[idxrate] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[Schedule_] + call[name[str], parameter[name[schedule]]]] + constant[_Period_]] + call[name[str], parameter[name[period]]]] + constant[_Tariff]] if compare[name[idxhr] <ast.NotIn object at 0x7da2590d7190> name[work_table]] begin[:] call[name[ekm_log], parameter[binary_operation[constant[Incorrect index: ] + name[idxhr]]]] name[ret].Hour assign[=] call[name[str], parameter[constant[0]]] return[name[ret]] if compare[name[idxmin] <ast.NotIn object at 0x7da2590d7190> name[work_table]] begin[:] call[name[ekm_log], parameter[binary_operation[constant[Incorrect index: ] + name[idxmin]]]] name[ret].Hour assign[=] call[name[str], parameter[constant[0]]] return[name[ret]] if compare[name[idxrate] <ast.NotIn object at 0x7da2590d7190> name[work_table]] begin[:] call[name[ekm_log], parameter[binary_operation[constant[Incorrect index: ] + name[idxrate]]]] name[ret].Hour assign[=] call[name[str], parameter[constant[0]]] return[name[ret]] name[ret].Hour assign[=] call[call[name[work_table]][name[idxhr]]][name[MeterData].StringValue] name[ret].Min assign[=] call[call[call[name[work_table]][name[idxmin]]][name[MeterData].StringValue].zfill, parameter[constant[2]]] name[ret].Tariff assign[=] call[call[name[work_table]][name[idxrate]]][name[MeterData].StringValue] return[name[ret]]
keyword[def] identifier[extractSchedule] ( identifier[self] , identifier[schedule] , identifier[period] ): literal[string] identifier[ret] = identifier[namedtuple] ( literal[string] ,[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]) identifier[work_table] = identifier[self] . identifier[m_schd_1_to_4] keyword[if] identifier[Schedules] . identifier[Schedule_5] <= identifier[schedule] <= identifier[Schedules] . identifier[Schedule_6] : identifier[work_table] = identifier[self] . identifier[m_schd_5_to_6] identifier[period] += literal[int] identifier[schedule] += literal[int] identifier[ret] . identifier[Period] = identifier[str] ( identifier[period] ) identifier[ret] . identifier[Schedule] = identifier[str] ( identifier[schedule] ) keyword[if] ( identifier[schedule] < literal[int] ) keyword[or] ( identifier[schedule] > identifier[Extents] . identifier[Schedules] ) keyword[or] ( identifier[period] < literal[int] ) keyword[or] ( identifier[period] > identifier[Extents] . identifier[Periods] ): identifier[ekm_log] ( literal[string] + identifier[str] ( identifier[period] )+ literal[string] + identifier[str] ( identifier[schedule] )) identifier[ret] . identifier[Hour] = identifier[ret] . identifier[Min] = identifier[ret] . identifier[Tariff] = identifier[str] ( literal[int] ) keyword[return] identifier[ret] identifier[idxhr] = literal[string] + identifier[str] ( identifier[schedule] )+ literal[string] + identifier[str] ( identifier[period] )+ literal[string] identifier[idxmin] = literal[string] + identifier[str] ( identifier[schedule] )+ literal[string] + identifier[str] ( identifier[period] )+ literal[string] identifier[idxrate] = literal[string] + identifier[str] ( identifier[schedule] )+ literal[string] + identifier[str] ( identifier[period] )+ literal[string] keyword[if] identifier[idxhr] keyword[not] keyword[in] identifier[work_table] : identifier[ekm_log] ( literal[string] + identifier[idxhr] ) identifier[ret] . identifier[Hour] = identifier[ret] . identifier[Min] = identifier[ret] . identifier[Tariff] = identifier[str] ( literal[int] ) keyword[return] identifier[ret] keyword[if] identifier[idxmin] keyword[not] keyword[in] identifier[work_table] : identifier[ekm_log] ( literal[string] + identifier[idxmin] ) identifier[ret] . identifier[Hour] = identifier[ret] . identifier[Min] = identifier[ret] . identifier[Tariff] = identifier[str] ( literal[int] ) keyword[return] identifier[ret] keyword[if] identifier[idxrate] keyword[not] keyword[in] identifier[work_table] : identifier[ekm_log] ( literal[string] + identifier[idxrate] ) identifier[ret] . identifier[Hour] = identifier[ret] . identifier[Min] = identifier[ret] . identifier[Tariff] = identifier[str] ( literal[int] ) keyword[return] identifier[ret] identifier[ret] . identifier[Hour] = identifier[work_table] [ identifier[idxhr] ][ identifier[MeterData] . identifier[StringValue] ] identifier[ret] . identifier[Min] = identifier[work_table] [ identifier[idxmin] ][ identifier[MeterData] . identifier[StringValue] ]. identifier[zfill] ( literal[int] ) identifier[ret] . identifier[Tariff] = identifier[work_table] [ identifier[idxrate] ][ identifier[MeterData] . identifier[StringValue] ] keyword[return] identifier[ret]
def extractSchedule(self, schedule, period): """ Read a single schedule tariff from meter object buffer. Args: schedule (int): A :class:`~ekmmeters.Schedules` value or in range(Extent.Schedules). tariff (int): A :class:`~ekmmeters.Tariffs` value or in range(Extent.Tariffs). Returns: bool: True on completion. """ ret = namedtuple('ret', ['Hour', 'Min', 'Tariff', 'Period', 'Schedule']) work_table = self.m_schd_1_to_4 if Schedules.Schedule_5 <= schedule <= Schedules.Schedule_6: work_table = self.m_schd_5_to_6 # depends on [control=['if'], data=[]] period += 1 schedule += 1 ret.Period = str(period) ret.Schedule = str(schedule) if schedule < 1 or schedule > Extents.Schedules or period < 0 or (period > Extents.Periods): ekm_log('Out of bounds: tariff ' + str(period) + ' for schedule ' + str(schedule)) ret.Hour = ret.Min = ret.Tariff = str(0) return ret # depends on [control=['if'], data=[]] idxhr = 'Schedule_' + str(schedule) + '_Period_' + str(period) + '_Hour' idxmin = 'Schedule_' + str(schedule) + '_Period_' + str(period) + '_Min' idxrate = 'Schedule_' + str(schedule) + '_Period_' + str(period) + '_Tariff' if idxhr not in work_table: ekm_log('Incorrect index: ' + idxhr) ret.Hour = ret.Min = ret.Tariff = str(0) return ret # depends on [control=['if'], data=['idxhr']] if idxmin not in work_table: ekm_log('Incorrect index: ' + idxmin) ret.Hour = ret.Min = ret.Tariff = str(0) return ret # depends on [control=['if'], data=['idxmin']] if idxrate not in work_table: ekm_log('Incorrect index: ' + idxrate) ret.Hour = ret.Min = ret.Tariff = str(0) return ret # depends on [control=['if'], data=['idxrate']] ret.Hour = work_table[idxhr][MeterData.StringValue] ret.Min = work_table[idxmin][MeterData.StringValue].zfill(2) ret.Tariff = work_table[idxrate][MeterData.StringValue] return ret
def _check_panel(self, length): """ Check that given fixed panel length evenly divides index. Parameters ---------- length : int Fixed length with which to subdivide index """ n = len(self.index) if divmod(n, length)[1] != 0: raise ValueError("Panel length '%g' must evenly divide length of series '%g'" % (length, n)) if n == length: raise ValueError("Panel length '%g' cannot be length of series '%g'" % (length, n))
def function[_check_panel, parameter[self, length]]: constant[ Check that given fixed panel length evenly divides index. Parameters ---------- length : int Fixed length with which to subdivide index ] variable[n] assign[=] call[name[len], parameter[name[self].index]] if compare[call[call[name[divmod], parameter[name[n], name[length]]]][constant[1]] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da18f09ebf0> if compare[name[n] equal[==] name[length]] begin[:] <ast.Raise object at 0x7da18f09faf0>
keyword[def] identifier[_check_panel] ( identifier[self] , identifier[length] ): literal[string] identifier[n] = identifier[len] ( identifier[self] . identifier[index] ) keyword[if] identifier[divmod] ( identifier[n] , identifier[length] )[ literal[int] ]!= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[length] , identifier[n] )) keyword[if] identifier[n] == identifier[length] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[length] , identifier[n] ))
def _check_panel(self, length): """ Check that given fixed panel length evenly divides index. Parameters ---------- length : int Fixed length with which to subdivide index """ n = len(self.index) if divmod(n, length)[1] != 0: raise ValueError("Panel length '%g' must evenly divide length of series '%g'" % (length, n)) # depends on [control=['if'], data=[]] if n == length: raise ValueError("Panel length '%g' cannot be length of series '%g'" % (length, n)) # depends on [control=['if'], data=['n', 'length']]
def as_version(self, version=Version.latest): """Returns a dict that has been modified based on versioning in order to be represented in JSON properly A class should overload as_version(self, version) implementation in order to tailor a more specific representation :param version: the relevant version. This allows for variance between versions :type version: str | unicode """ if not isinstance(self, list): result = {} for k, v in self.iteritems() if isinstance(self, dict) else vars(self).iteritems(): k = self._props_corrected.get(k, k) if isinstance(v, SerializableBase): result[k] = v.as_version(version) elif isinstance(v, list): result[k] = [] for val in v: if isinstance(val, SerializableBase): result[k].append(val.as_version(version)) else: result[k].append(val) elif isinstance(v, uuid.UUID): result[k] = unicode(v) elif isinstance(v, datetime.timedelta): result[k] = jsonify_timedelta(v) elif isinstance(v, datetime.datetime): result[k] = jsonify_datetime(v) else: result[k] = v result = self._filter_none(result) else: result = [] for v in self: if isinstance(v, SerializableBase): result.append(v.as_version(version)) else: result.append(v) return result
def function[as_version, parameter[self, version]]: constant[Returns a dict that has been modified based on versioning in order to be represented in JSON properly A class should overload as_version(self, version) implementation in order to tailor a more specific representation :param version: the relevant version. This allows for variance between versions :type version: str | unicode ] if <ast.UnaryOp object at 0x7da1b0c65810> begin[:] variable[result] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b0c66590>, <ast.Name object at 0x7da1b0c65b70>]]] in starred[<ast.IfExp object at 0x7da1b0c653f0>] begin[:] variable[k] assign[=] call[name[self]._props_corrected.get, parameter[name[k], name[k]]] if call[name[isinstance], parameter[name[v], name[SerializableBase]]] begin[:] call[name[result]][name[k]] assign[=] call[name[v].as_version, parameter[name[version]]] variable[result] assign[=] call[name[self]._filter_none, parameter[name[result]]] return[name[result]]
keyword[def] identifier[as_version] ( identifier[self] , identifier[version] = identifier[Version] . identifier[latest] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[self] , identifier[list] ): identifier[result] ={} keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[iteritems] () keyword[if] identifier[isinstance] ( identifier[self] , identifier[dict] ) keyword[else] identifier[vars] ( identifier[self] ). identifier[iteritems] (): identifier[k] = identifier[self] . identifier[_props_corrected] . identifier[get] ( identifier[k] , identifier[k] ) keyword[if] identifier[isinstance] ( identifier[v] , identifier[SerializableBase] ): identifier[result] [ identifier[k] ]= identifier[v] . identifier[as_version] ( identifier[version] ) keyword[elif] identifier[isinstance] ( identifier[v] , identifier[list] ): identifier[result] [ identifier[k] ]=[] keyword[for] identifier[val] keyword[in] identifier[v] : keyword[if] identifier[isinstance] ( identifier[val] , identifier[SerializableBase] ): identifier[result] [ identifier[k] ]. identifier[append] ( identifier[val] . identifier[as_version] ( identifier[version] )) keyword[else] : identifier[result] [ identifier[k] ]. identifier[append] ( identifier[val] ) keyword[elif] identifier[isinstance] ( identifier[v] , identifier[uuid] . identifier[UUID] ): identifier[result] [ identifier[k] ]= identifier[unicode] ( identifier[v] ) keyword[elif] identifier[isinstance] ( identifier[v] , identifier[datetime] . identifier[timedelta] ): identifier[result] [ identifier[k] ]= identifier[jsonify_timedelta] ( identifier[v] ) keyword[elif] identifier[isinstance] ( identifier[v] , identifier[datetime] . identifier[datetime] ): identifier[result] [ identifier[k] ]= identifier[jsonify_datetime] ( identifier[v] ) keyword[else] : identifier[result] [ identifier[k] ]= identifier[v] identifier[result] = identifier[self] . identifier[_filter_none] ( identifier[result] ) keyword[else] : identifier[result] =[] keyword[for] identifier[v] keyword[in] identifier[self] : keyword[if] identifier[isinstance] ( identifier[v] , identifier[SerializableBase] ): identifier[result] . identifier[append] ( identifier[v] . identifier[as_version] ( identifier[version] )) keyword[else] : identifier[result] . identifier[append] ( identifier[v] ) keyword[return] identifier[result]
def as_version(self, version=Version.latest): """Returns a dict that has been modified based on versioning in order to be represented in JSON properly A class should overload as_version(self, version) implementation in order to tailor a more specific representation :param version: the relevant version. This allows for variance between versions :type version: str | unicode """ if not isinstance(self, list): result = {} for (k, v) in self.iteritems() if isinstance(self, dict) else vars(self).iteritems(): k = self._props_corrected.get(k, k) if isinstance(v, SerializableBase): result[k] = v.as_version(version) # depends on [control=['if'], data=[]] elif isinstance(v, list): result[k] = [] for val in v: if isinstance(val, SerializableBase): result[k].append(val.as_version(version)) # depends on [control=['if'], data=[]] else: result[k].append(val) # depends on [control=['for'], data=['val']] # depends on [control=['if'], data=[]] elif isinstance(v, uuid.UUID): result[k] = unicode(v) # depends on [control=['if'], data=[]] elif isinstance(v, datetime.timedelta): result[k] = jsonify_timedelta(v) # depends on [control=['if'], data=[]] elif isinstance(v, datetime.datetime): result[k] = jsonify_datetime(v) # depends on [control=['if'], data=[]] else: result[k] = v # depends on [control=['for'], data=[]] result = self._filter_none(result) # depends on [control=['if'], data=[]] else: result = [] for v in self: if isinstance(v, SerializableBase): result.append(v.as_version(version)) # depends on [control=['if'], data=[]] else: result.append(v) # depends on [control=['for'], data=['v']] return result
def imagetransformer_b12l_4h_b128_h512_uncond_dr01_im(): """TPU related imagenet model.""" hparams = imagetransformer_b12l_4h_b256_uncond_dr03_tpu() update_hparams_for_tpu(hparams) hparams.batch_size = 4 hparams.optimizer = "Adafactor" hparams.learning_rate_schedule = "rsqrt_decay" hparams.learning_rate_warmup_steps = 6000 hparams.layer_prepostprocess_dropout = 0.1 return hparams
def function[imagetransformer_b12l_4h_b128_h512_uncond_dr01_im, parameter[]]: constant[TPU related imagenet model.] variable[hparams] assign[=] call[name[imagetransformer_b12l_4h_b256_uncond_dr03_tpu], parameter[]] call[name[update_hparams_for_tpu], parameter[name[hparams]]] name[hparams].batch_size assign[=] constant[4] name[hparams].optimizer assign[=] constant[Adafactor] name[hparams].learning_rate_schedule assign[=] constant[rsqrt_decay] name[hparams].learning_rate_warmup_steps assign[=] constant[6000] name[hparams].layer_prepostprocess_dropout assign[=] constant[0.1] return[name[hparams]]
keyword[def] identifier[imagetransformer_b12l_4h_b128_h512_uncond_dr01_im] (): literal[string] identifier[hparams] = identifier[imagetransformer_b12l_4h_b256_uncond_dr03_tpu] () identifier[update_hparams_for_tpu] ( identifier[hparams] ) identifier[hparams] . identifier[batch_size] = literal[int] identifier[hparams] . identifier[optimizer] = literal[string] identifier[hparams] . identifier[learning_rate_schedule] = literal[string] identifier[hparams] . identifier[learning_rate_warmup_steps] = literal[int] identifier[hparams] . identifier[layer_prepostprocess_dropout] = literal[int] keyword[return] identifier[hparams]
def imagetransformer_b12l_4h_b128_h512_uncond_dr01_im(): """TPU related imagenet model.""" hparams = imagetransformer_b12l_4h_b256_uncond_dr03_tpu() update_hparams_for_tpu(hparams) hparams.batch_size = 4 hparams.optimizer = 'Adafactor' hparams.learning_rate_schedule = 'rsqrt_decay' hparams.learning_rate_warmup_steps = 6000 hparams.layer_prepostprocess_dropout = 0.1 return hparams
def find_usage(self): """ Determine the current usage for each limit of this service, and update corresponding Limit via :py:meth:`~.AwsLimit._add_current_usage`. """ logger.debug("Checking usage for service %s", self.service_name) for lim in self.limits.values(): lim._reset_usage() try: self.connect() resp = self.conn.get_send_quota() except EndpointConnectionError as ex: logger.warning('Skipping SES: %s', str(ex)) return except ClientError as ex: if ex.response['Error']['Code'] in ['AccessDenied', '503']: logger.warning('Skipping SES: %s', ex) return raise self.limits['Daily sending quota']._add_current_usage( resp['SentLast24Hours'] ) self._have_usage = True logger.debug("Done checking usage.")
def function[find_usage, parameter[self]]: constant[ Determine the current usage for each limit of this service, and update corresponding Limit via :py:meth:`~.AwsLimit._add_current_usage`. ] call[name[logger].debug, parameter[constant[Checking usage for service %s], name[self].service_name]] for taget[name[lim]] in starred[call[name[self].limits.values, parameter[]]] begin[:] call[name[lim]._reset_usage, parameter[]] <ast.Try object at 0x7da20c76c9d0> call[call[name[self].limits][constant[Daily sending quota]]._add_current_usage, parameter[call[name[resp]][constant[SentLast24Hours]]]] name[self]._have_usage assign[=] constant[True] call[name[logger].debug, parameter[constant[Done checking usage.]]]
keyword[def] identifier[find_usage] ( identifier[self] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[service_name] ) keyword[for] identifier[lim] keyword[in] identifier[self] . identifier[limits] . identifier[values] (): identifier[lim] . identifier[_reset_usage] () keyword[try] : identifier[self] . identifier[connect] () identifier[resp] = identifier[self] . identifier[conn] . identifier[get_send_quota] () keyword[except] identifier[EndpointConnectionError] keyword[as] identifier[ex] : identifier[logger] . identifier[warning] ( literal[string] , identifier[str] ( identifier[ex] )) keyword[return] keyword[except] identifier[ClientError] keyword[as] identifier[ex] : keyword[if] identifier[ex] . identifier[response] [ literal[string] ][ literal[string] ] keyword[in] [ literal[string] , literal[string] ]: identifier[logger] . identifier[warning] ( literal[string] , identifier[ex] ) keyword[return] keyword[raise] identifier[self] . identifier[limits] [ literal[string] ]. identifier[_add_current_usage] ( identifier[resp] [ literal[string] ] ) identifier[self] . identifier[_have_usage] = keyword[True] identifier[logger] . identifier[debug] ( literal[string] )
def find_usage(self): """ Determine the current usage for each limit of this service, and update corresponding Limit via :py:meth:`~.AwsLimit._add_current_usage`. """ logger.debug('Checking usage for service %s', self.service_name) for lim in self.limits.values(): lim._reset_usage() # depends on [control=['for'], data=['lim']] try: self.connect() resp = self.conn.get_send_quota() # depends on [control=['try'], data=[]] except EndpointConnectionError as ex: logger.warning('Skipping SES: %s', str(ex)) return # depends on [control=['except'], data=['ex']] except ClientError as ex: if ex.response['Error']['Code'] in ['AccessDenied', '503']: logger.warning('Skipping SES: %s', ex) return # depends on [control=['if'], data=[]] raise # depends on [control=['except'], data=['ex']] self.limits['Daily sending quota']._add_current_usage(resp['SentLast24Hours']) self._have_usage = True logger.debug('Done checking usage.')
def parse_endnotes(document, xmlcontent): """Parse endnotes document. Endnotes are defined in file 'endnotes.xml' """ endnotes = etree.fromstring(xmlcontent) document.endnotes = {} for note in endnotes.xpath('.//w:endnote', namespaces=NAMESPACES): paragraphs = [parse_paragraph(document, para) for para in note.xpath('.//w:p', namespaces=NAMESPACES)] document.endnotes[note.attrib[_name('{{{w}}}id')]] = paragraphs
def function[parse_endnotes, parameter[document, xmlcontent]]: constant[Parse endnotes document. Endnotes are defined in file 'endnotes.xml' ] variable[endnotes] assign[=] call[name[etree].fromstring, parameter[name[xmlcontent]]] name[document].endnotes assign[=] dictionary[[], []] for taget[name[note]] in starred[call[name[endnotes].xpath, parameter[constant[.//w:endnote]]]] begin[:] variable[paragraphs] assign[=] <ast.ListComp object at 0x7da20e9b3ca0> call[name[document].endnotes][call[name[note].attrib][call[name[_name], parameter[constant[{{{w}}}id]]]]] assign[=] name[paragraphs]
keyword[def] identifier[parse_endnotes] ( identifier[document] , identifier[xmlcontent] ): literal[string] identifier[endnotes] = identifier[etree] . identifier[fromstring] ( identifier[xmlcontent] ) identifier[document] . identifier[endnotes] ={} keyword[for] identifier[note] keyword[in] identifier[endnotes] . identifier[xpath] ( literal[string] , identifier[namespaces] = identifier[NAMESPACES] ): identifier[paragraphs] =[ identifier[parse_paragraph] ( identifier[document] , identifier[para] ) keyword[for] identifier[para] keyword[in] identifier[note] . identifier[xpath] ( literal[string] , identifier[namespaces] = identifier[NAMESPACES] )] identifier[document] . identifier[endnotes] [ identifier[note] . identifier[attrib] [ identifier[_name] ( literal[string] )]]= identifier[paragraphs]
def parse_endnotes(document, xmlcontent): """Parse endnotes document. Endnotes are defined in file 'endnotes.xml' """ endnotes = etree.fromstring(xmlcontent) document.endnotes = {} for note in endnotes.xpath('.//w:endnote', namespaces=NAMESPACES): paragraphs = [parse_paragraph(document, para) for para in note.xpath('.//w:p', namespaces=NAMESPACES)] document.endnotes[note.attrib[_name('{{{w}}}id')]] = paragraphs # depends on [control=['for'], data=['note']]
def execute(self, query, *args, **kwargs): """Asynchronously execute the specified CQL query. The execute command also takes optional parameters and trace keyword arguments. See cassandra-python documentation for definition of those parameters. """ tornado_future = Future() cassandra_future = self._session.execute_async(query, *args, **kwargs) self._ioloop.add_callback( self._callback, cassandra_future, tornado_future) return tornado_future
def function[execute, parameter[self, query]]: constant[Asynchronously execute the specified CQL query. The execute command also takes optional parameters and trace keyword arguments. See cassandra-python documentation for definition of those parameters. ] variable[tornado_future] assign[=] call[name[Future], parameter[]] variable[cassandra_future] assign[=] call[name[self]._session.execute_async, parameter[name[query], <ast.Starred object at 0x7da1b162a6e0>]] call[name[self]._ioloop.add_callback, parameter[name[self]._callback, name[cassandra_future], name[tornado_future]]] return[name[tornado_future]]
keyword[def] identifier[execute] ( identifier[self] , identifier[query] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[tornado_future] = identifier[Future] () identifier[cassandra_future] = identifier[self] . identifier[_session] . identifier[execute_async] ( identifier[query] ,* identifier[args] ,** identifier[kwargs] ) identifier[self] . identifier[_ioloop] . identifier[add_callback] ( identifier[self] . identifier[_callback] , identifier[cassandra_future] , identifier[tornado_future] ) keyword[return] identifier[tornado_future]
def execute(self, query, *args, **kwargs): """Asynchronously execute the specified CQL query. The execute command also takes optional parameters and trace keyword arguments. See cassandra-python documentation for definition of those parameters. """ tornado_future = Future() cassandra_future = self._session.execute_async(query, *args, **kwargs) self._ioloop.add_callback(self._callback, cassandra_future, tornado_future) return tornado_future
def distros_for_location(location, basename, metadata=None): """Yield egg or source distribution objects based on basename""" if basename.endswith('.egg.zip'): basename = basename[:-4] # strip the .zip if basename.endswith('.egg') and '-' in basename: # only one, unambiguous interpretation return [Distribution.from_location(location, basename, metadata)] if basename.endswith('.exe'): win_base, py_ver, platform = parse_bdist_wininst(basename) if win_base is not None: return interpret_distro_name( location, win_base, metadata, py_ver, BINARY_DIST, platform ) # Try source distro extensions (.zip, .tgz, etc.) # for ext in EXTENSIONS: if basename.endswith(ext): basename = basename[:-len(ext)] return interpret_distro_name(location, basename, metadata) return []
def function[distros_for_location, parameter[location, basename, metadata]]: constant[Yield egg or source distribution objects based on basename] if call[name[basename].endswith, parameter[constant[.egg.zip]]] begin[:] variable[basename] assign[=] call[name[basename]][<ast.Slice object at 0x7da20c6abeb0>] if <ast.BoolOp object at 0x7da20c6a8730> begin[:] return[list[[<ast.Call object at 0x7da20c6a8d60>]]] if call[name[basename].endswith, parameter[constant[.exe]]] begin[:] <ast.Tuple object at 0x7da20c6a9a50> assign[=] call[name[parse_bdist_wininst], parameter[name[basename]]] if compare[name[win_base] is_not constant[None]] begin[:] return[call[name[interpret_distro_name], parameter[name[location], name[win_base], name[metadata], name[py_ver], name[BINARY_DIST], name[platform]]]] for taget[name[ext]] in starred[name[EXTENSIONS]] begin[:] if call[name[basename].endswith, parameter[name[ext]]] begin[:] variable[basename] assign[=] call[name[basename]][<ast.Slice object at 0x7da20c6aa530>] return[call[name[interpret_distro_name], parameter[name[location], name[basename], name[metadata]]]] return[list[[]]]
keyword[def] identifier[distros_for_location] ( identifier[location] , identifier[basename] , identifier[metadata] = keyword[None] ): literal[string] keyword[if] identifier[basename] . identifier[endswith] ( literal[string] ): identifier[basename] = identifier[basename] [:- literal[int] ] keyword[if] identifier[basename] . identifier[endswith] ( literal[string] ) keyword[and] literal[string] keyword[in] identifier[basename] : keyword[return] [ identifier[Distribution] . identifier[from_location] ( identifier[location] , identifier[basename] , identifier[metadata] )] keyword[if] identifier[basename] . identifier[endswith] ( literal[string] ): identifier[win_base] , identifier[py_ver] , identifier[platform] = identifier[parse_bdist_wininst] ( identifier[basename] ) keyword[if] identifier[win_base] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[interpret_distro_name] ( identifier[location] , identifier[win_base] , identifier[metadata] , identifier[py_ver] , identifier[BINARY_DIST] , identifier[platform] ) keyword[for] identifier[ext] keyword[in] identifier[EXTENSIONS] : keyword[if] identifier[basename] . identifier[endswith] ( identifier[ext] ): identifier[basename] = identifier[basename] [:- identifier[len] ( identifier[ext] )] keyword[return] identifier[interpret_distro_name] ( identifier[location] , identifier[basename] , identifier[metadata] ) keyword[return] []
def distros_for_location(location, basename, metadata=None): """Yield egg or source distribution objects based on basename""" if basename.endswith('.egg.zip'): basename = basename[:-4] # strip the .zip # depends on [control=['if'], data=[]] if basename.endswith('.egg') and '-' in basename: # only one, unambiguous interpretation return [Distribution.from_location(location, basename, metadata)] # depends on [control=['if'], data=[]] if basename.endswith('.exe'): (win_base, py_ver, platform) = parse_bdist_wininst(basename) if win_base is not None: return interpret_distro_name(location, win_base, metadata, py_ver, BINARY_DIST, platform) # depends on [control=['if'], data=['win_base']] # depends on [control=['if'], data=[]] # Try source distro extensions (.zip, .tgz, etc.) # for ext in EXTENSIONS: if basename.endswith(ext): basename = basename[:-len(ext)] return interpret_distro_name(location, basename, metadata) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ext']] return []
def is_agent_ready(self): """ Used after making a successful announce to test when the agent is ready to accept data. """ try: response = self.client.head(self.__data_url(), timeout=0.8) if response.status_code is 200: return True return False except (requests.ConnectTimeout, requests.ConnectionError): logger.debug("is_agent_ready: host agent connection error")
def function[is_agent_ready, parameter[self]]: constant[ Used after making a successful announce to test when the agent is ready to accept data. ] <ast.Try object at 0x7da20c991870>
keyword[def] identifier[is_agent_ready] ( identifier[self] ): literal[string] keyword[try] : identifier[response] = identifier[self] . identifier[client] . identifier[head] ( identifier[self] . identifier[__data_url] (), identifier[timeout] = literal[int] ) keyword[if] identifier[response] . identifier[status_code] keyword[is] literal[int] : keyword[return] keyword[True] keyword[return] keyword[False] keyword[except] ( identifier[requests] . identifier[ConnectTimeout] , identifier[requests] . identifier[ConnectionError] ): identifier[logger] . identifier[debug] ( literal[string] )
def is_agent_ready(self): """ Used after making a successful announce to test when the agent is ready to accept data. """ try: response = self.client.head(self.__data_url(), timeout=0.8) if response.status_code is 200: return True # depends on [control=['if'], data=[]] return False # depends on [control=['try'], data=[]] except (requests.ConnectTimeout, requests.ConnectionError): logger.debug('is_agent_ready: host agent connection error') # depends on [control=['except'], data=[]]
def splitall(path): """ Splits path in its components: foo/bar, /foo/bar and /foo/bar/ will all return ['foo', 'bar'] """ head, tail = os.path.split(os.path.normpath(path)) components = [] while tail: components.insert(0, tail) head, tail = os.path.split(head) return components
def function[splitall, parameter[path]]: constant[ Splits path in its components: foo/bar, /foo/bar and /foo/bar/ will all return ['foo', 'bar'] ] <ast.Tuple object at 0x7da20c76c430> assign[=] call[name[os].path.split, parameter[call[name[os].path.normpath, parameter[name[path]]]]] variable[components] assign[=] list[[]] while name[tail] begin[:] call[name[components].insert, parameter[constant[0], name[tail]]] <ast.Tuple object at 0x7da20c76d240> assign[=] call[name[os].path.split, parameter[name[head]]] return[name[components]]
keyword[def] identifier[splitall] ( identifier[path] ): literal[string] identifier[head] , identifier[tail] = identifier[os] . identifier[path] . identifier[split] ( identifier[os] . identifier[path] . identifier[normpath] ( identifier[path] )) identifier[components] =[] keyword[while] identifier[tail] : identifier[components] . identifier[insert] ( literal[int] , identifier[tail] ) identifier[head] , identifier[tail] = identifier[os] . identifier[path] . identifier[split] ( identifier[head] ) keyword[return] identifier[components]
def splitall(path): """ Splits path in its components: foo/bar, /foo/bar and /foo/bar/ will all return ['foo', 'bar'] """ (head, tail) = os.path.split(os.path.normpath(path)) components = [] while tail: components.insert(0, tail) (head, tail) = os.path.split(head) # depends on [control=['while'], data=[]] return components
def current_window_handle(self): """ Returns the handle of the current window. :Usage: :: driver.current_window_handle """ if self.w3c: return self.execute(Command.W3C_GET_CURRENT_WINDOW_HANDLE)['value'] else: return self.execute(Command.GET_CURRENT_WINDOW_HANDLE)['value']
def function[current_window_handle, parameter[self]]: constant[ Returns the handle of the current window. :Usage: :: driver.current_window_handle ] if name[self].w3c begin[:] return[call[call[name[self].execute, parameter[name[Command].W3C_GET_CURRENT_WINDOW_HANDLE]]][constant[value]]]
keyword[def] identifier[current_window_handle] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[w3c] : keyword[return] identifier[self] . identifier[execute] ( identifier[Command] . identifier[W3C_GET_CURRENT_WINDOW_HANDLE] )[ literal[string] ] keyword[else] : keyword[return] identifier[self] . identifier[execute] ( identifier[Command] . identifier[GET_CURRENT_WINDOW_HANDLE] )[ literal[string] ]
def current_window_handle(self): """ Returns the handle of the current window. :Usage: :: driver.current_window_handle """ if self.w3c: return self.execute(Command.W3C_GET_CURRENT_WINDOW_HANDLE)['value'] # depends on [control=['if'], data=[]] else: return self.execute(Command.GET_CURRENT_WINDOW_HANDLE)['value']
def _columns_for_table(table_name): """ Return all of the columns registered for a given table. Parameters ---------- table_name : str Returns ------- columns : dict of column wrappers Keys will be column names. """ return {cname: col for (tname, cname), col in _COLUMNS.items() if tname == table_name}
def function[_columns_for_table, parameter[table_name]]: constant[ Return all of the columns registered for a given table. Parameters ---------- table_name : str Returns ------- columns : dict of column wrappers Keys will be column names. ] return[<ast.DictComp object at 0x7da2054a4a60>]
keyword[def] identifier[_columns_for_table] ( identifier[table_name] ): literal[string] keyword[return] { identifier[cname] : identifier[col] keyword[for] ( identifier[tname] , identifier[cname] ), identifier[col] keyword[in] identifier[_COLUMNS] . identifier[items] () keyword[if] identifier[tname] == identifier[table_name] }
def _columns_for_table(table_name): """ Return all of the columns registered for a given table. Parameters ---------- table_name : str Returns ------- columns : dict of column wrappers Keys will be column names. """ return {cname: col for ((tname, cname), col) in _COLUMNS.items() if tname == table_name}
def validate_attr_dict(self, attr_dict: Dict[str, str]) -> None: """ Validates that ContractType keys in attr_dict reference existing manifest ContractTypes. """ attr_dict_names = list(attr_dict.keys()) if not self.unlinked_references and not self.linked_references: raise BytecodeLinkingError( "Unable to validate attr dict, this contract has no linked/unlinked references." ) unlinked_refs = self.unlinked_references or ({},) linked_refs = self.linked_references or ({},) all_link_refs = unlinked_refs + linked_refs all_link_names = [ref["name"] for ref in all_link_refs] if set(attr_dict_names) != set(all_link_names): raise BytecodeLinkingError( "All link references must be defined when calling " "`link_bytecode` on a contract factory." ) for address in attr_dict.values(): if not is_canonical_address(address): raise BytecodeLinkingError( f"Address: {address} as specified in the attr_dict is not " "a valid canoncial address." )
def function[validate_attr_dict, parameter[self, attr_dict]]: constant[ Validates that ContractType keys in attr_dict reference existing manifest ContractTypes. ] variable[attr_dict_names] assign[=] call[name[list], parameter[call[name[attr_dict].keys, parameter[]]]] if <ast.BoolOp object at 0x7da20e961420> begin[:] <ast.Raise object at 0x7da18dc050f0> variable[unlinked_refs] assign[=] <ast.BoolOp object at 0x7da18dc066e0> variable[linked_refs] assign[=] <ast.BoolOp object at 0x7da18dc05d80> variable[all_link_refs] assign[=] binary_operation[name[unlinked_refs] + name[linked_refs]] variable[all_link_names] assign[=] <ast.ListComp object at 0x7da18dc052d0> if compare[call[name[set], parameter[name[attr_dict_names]]] not_equal[!=] call[name[set], parameter[name[all_link_names]]]] begin[:] <ast.Raise object at 0x7da18dc06800> for taget[name[address]] in starred[call[name[attr_dict].values, parameter[]]] begin[:] if <ast.UnaryOp object at 0x7da18dc066b0> begin[:] <ast.Raise object at 0x7da18dc07820>
keyword[def] identifier[validate_attr_dict] ( identifier[self] , identifier[attr_dict] : identifier[Dict] [ identifier[str] , identifier[str] ])-> keyword[None] : literal[string] identifier[attr_dict_names] = identifier[list] ( identifier[attr_dict] . identifier[keys] ()) keyword[if] keyword[not] identifier[self] . identifier[unlinked_references] keyword[and] keyword[not] identifier[self] . identifier[linked_references] : keyword[raise] identifier[BytecodeLinkingError] ( literal[string] ) identifier[unlinked_refs] = identifier[self] . identifier[unlinked_references] keyword[or] ({},) identifier[linked_refs] = identifier[self] . identifier[linked_references] keyword[or] ({},) identifier[all_link_refs] = identifier[unlinked_refs] + identifier[linked_refs] identifier[all_link_names] =[ identifier[ref] [ literal[string] ] keyword[for] identifier[ref] keyword[in] identifier[all_link_refs] ] keyword[if] identifier[set] ( identifier[attr_dict_names] )!= identifier[set] ( identifier[all_link_names] ): keyword[raise] identifier[BytecodeLinkingError] ( literal[string] literal[string] ) keyword[for] identifier[address] keyword[in] identifier[attr_dict] . identifier[values] (): keyword[if] keyword[not] identifier[is_canonical_address] ( identifier[address] ): keyword[raise] identifier[BytecodeLinkingError] ( literal[string] literal[string] )
def validate_attr_dict(self, attr_dict: Dict[str, str]) -> None: """ Validates that ContractType keys in attr_dict reference existing manifest ContractTypes. """ attr_dict_names = list(attr_dict.keys()) if not self.unlinked_references and (not self.linked_references): raise BytecodeLinkingError('Unable to validate attr dict, this contract has no linked/unlinked references.') # depends on [control=['if'], data=[]] unlinked_refs = self.unlinked_references or ({},) linked_refs = self.linked_references or ({},) all_link_refs = unlinked_refs + linked_refs all_link_names = [ref['name'] for ref in all_link_refs] if set(attr_dict_names) != set(all_link_names): raise BytecodeLinkingError('All link references must be defined when calling `link_bytecode` on a contract factory.') # depends on [control=['if'], data=[]] for address in attr_dict.values(): if not is_canonical_address(address): raise BytecodeLinkingError(f'Address: {address} as specified in the attr_dict is not a valid canoncial address.') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['address']]
def load_json_dict(filename, *args): """Checks if file exists. Returns {} if something fails.""" data = {} if os.path.exists(filename): with open(filename, "r") as f: try: data = json.load(f) if not isinstance(data, dict): data = {} except: pass if args: return {key: data[key] for key in args if key in data} return data
def function[load_json_dict, parameter[filename]]: constant[Checks if file exists. Returns {} if something fails.] variable[data] assign[=] dictionary[[], []] if call[name[os].path.exists, parameter[name[filename]]] begin[:] with call[name[open], parameter[name[filename], constant[r]]] begin[:] <ast.Try object at 0x7da1b1b477f0> if name[args] begin[:] return[<ast.DictComp object at 0x7da1b1b45f90>] return[name[data]]
keyword[def] identifier[load_json_dict] ( identifier[filename] ,* identifier[args] ): literal[string] identifier[data] ={} keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[filename] ): keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] : keyword[try] : identifier[data] = identifier[json] . identifier[load] ( identifier[f] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[data] , identifier[dict] ): identifier[data] ={} keyword[except] : keyword[pass] keyword[if] identifier[args] : keyword[return] { identifier[key] : identifier[data] [ identifier[key] ] keyword[for] identifier[key] keyword[in] identifier[args] keyword[if] identifier[key] keyword[in] identifier[data] } keyword[return] identifier[data]
def load_json_dict(filename, *args): """Checks if file exists. Returns {} if something fails.""" data = {} if os.path.exists(filename): with open(filename, 'r') as f: try: data = json.load(f) if not isinstance(data, dict): data = {} # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] # depends on [control=['with'], data=['f']] if args: return {key: data[key] for key in args if key in data} # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return data
def parse(self): """Extracts component keys from filename. :raises tvrenamer.exceptions.InvalidFilename: when filename was not parseable :raises tvrenamer.exceptions.ConfigValueError: when regex used for parsing was incorrectly configured """ self.clean_name = formatter.apply_replacements( self.name, cfg.CONF.input_filename_replacements) output = parser.parse_filename(self.clean_name) if output is None: self.messages.append( 'Invalid filename: unable to parse {0}'.format( self.clean_name)) LOG.info(self.messages[-1]) raise exc.InvalidFilename(self.messages[-1]) self.episode_numbers = output.get('episode_numbers') if self.episode_numbers is None: self.messages.append( 'Regex does not contain episode number group, ' 'should contain episodenumber, episodenumber1-9, ' 'or episodenumberstart and episodenumberend\n\n' 'Pattern was:\n' + output.get('pattern')) LOG.info(self.messages[-1]) raise exc.ConfigValueError(self.messages[-1]) self.series_name = output.get('series_name') if self.series_name is None: self.messages.append( 'Regex must contain seriesname. Pattern was:\n' + output.get('pattern')) LOG.info(self.messages[-1]) raise exc.ConfigValueError(self.messages[-1]) self.series_name = formatter.clean_series_name(self.series_name) self.season_number = output.get('season_number')
def function[parse, parameter[self]]: constant[Extracts component keys from filename. :raises tvrenamer.exceptions.InvalidFilename: when filename was not parseable :raises tvrenamer.exceptions.ConfigValueError: when regex used for parsing was incorrectly configured ] name[self].clean_name assign[=] call[name[formatter].apply_replacements, parameter[name[self].name, name[cfg].CONF.input_filename_replacements]] variable[output] assign[=] call[name[parser].parse_filename, parameter[name[self].clean_name]] if compare[name[output] is constant[None]] begin[:] call[name[self].messages.append, parameter[call[constant[Invalid filename: unable to parse {0}].format, parameter[name[self].clean_name]]]] call[name[LOG].info, parameter[call[name[self].messages][<ast.UnaryOp object at 0x7da1b0813100>]]] <ast.Raise object at 0x7da1b08113c0> name[self].episode_numbers assign[=] call[name[output].get, parameter[constant[episode_numbers]]] if compare[name[self].episode_numbers is constant[None]] begin[:] call[name[self].messages.append, parameter[binary_operation[constant[Regex does not contain episode number group, should contain episodenumber, episodenumber1-9, or episodenumberstart and episodenumberend Pattern was: ] + call[name[output].get, parameter[constant[pattern]]]]]] call[name[LOG].info, parameter[call[name[self].messages][<ast.UnaryOp object at 0x7da1b08131c0>]]] <ast.Raise object at 0x7da1b0813220> name[self].series_name assign[=] call[name[output].get, parameter[constant[series_name]]] if compare[name[self].series_name is constant[None]] begin[:] call[name[self].messages.append, parameter[binary_operation[constant[Regex must contain seriesname. Pattern was: ] + call[name[output].get, parameter[constant[pattern]]]]]] call[name[LOG].info, parameter[call[name[self].messages][<ast.UnaryOp object at 0x7da1b08103d0>]]] <ast.Raise object at 0x7da1b0812ec0> name[self].series_name assign[=] call[name[formatter].clean_series_name, parameter[name[self].series_name]] name[self].season_number assign[=] call[name[output].get, parameter[constant[season_number]]]
keyword[def] identifier[parse] ( identifier[self] ): literal[string] identifier[self] . identifier[clean_name] = identifier[formatter] . identifier[apply_replacements] ( identifier[self] . identifier[name] , identifier[cfg] . identifier[CONF] . identifier[input_filename_replacements] ) identifier[output] = identifier[parser] . identifier[parse_filename] ( identifier[self] . identifier[clean_name] ) keyword[if] identifier[output] keyword[is] keyword[None] : identifier[self] . identifier[messages] . identifier[append] ( literal[string] . identifier[format] ( identifier[self] . identifier[clean_name] )) identifier[LOG] . identifier[info] ( identifier[self] . identifier[messages] [- literal[int] ]) keyword[raise] identifier[exc] . identifier[InvalidFilename] ( identifier[self] . identifier[messages] [- literal[int] ]) identifier[self] . identifier[episode_numbers] = identifier[output] . identifier[get] ( literal[string] ) keyword[if] identifier[self] . identifier[episode_numbers] keyword[is] keyword[None] : identifier[self] . identifier[messages] . identifier[append] ( literal[string] literal[string] literal[string] literal[string] + identifier[output] . identifier[get] ( literal[string] )) identifier[LOG] . identifier[info] ( identifier[self] . identifier[messages] [- literal[int] ]) keyword[raise] identifier[exc] . identifier[ConfigValueError] ( identifier[self] . identifier[messages] [- literal[int] ]) identifier[self] . identifier[series_name] = identifier[output] . identifier[get] ( literal[string] ) keyword[if] identifier[self] . identifier[series_name] keyword[is] keyword[None] : identifier[self] . identifier[messages] . identifier[append] ( literal[string] + identifier[output] . identifier[get] ( literal[string] )) identifier[LOG] . identifier[info] ( identifier[self] . identifier[messages] [- literal[int] ]) keyword[raise] identifier[exc] . identifier[ConfigValueError] ( identifier[self] . identifier[messages] [- literal[int] ]) identifier[self] . identifier[series_name] = identifier[formatter] . identifier[clean_series_name] ( identifier[self] . identifier[series_name] ) identifier[self] . identifier[season_number] = identifier[output] . identifier[get] ( literal[string] )
def parse(self): """Extracts component keys from filename. :raises tvrenamer.exceptions.InvalidFilename: when filename was not parseable :raises tvrenamer.exceptions.ConfigValueError: when regex used for parsing was incorrectly configured """ self.clean_name = formatter.apply_replacements(self.name, cfg.CONF.input_filename_replacements) output = parser.parse_filename(self.clean_name) if output is None: self.messages.append('Invalid filename: unable to parse {0}'.format(self.clean_name)) LOG.info(self.messages[-1]) raise exc.InvalidFilename(self.messages[-1]) # depends on [control=['if'], data=[]] self.episode_numbers = output.get('episode_numbers') if self.episode_numbers is None: self.messages.append('Regex does not contain episode number group, should contain episodenumber, episodenumber1-9, or episodenumberstart and episodenumberend\n\nPattern was:\n' + output.get('pattern')) LOG.info(self.messages[-1]) raise exc.ConfigValueError(self.messages[-1]) # depends on [control=['if'], data=[]] self.series_name = output.get('series_name') if self.series_name is None: self.messages.append('Regex must contain seriesname. Pattern was:\n' + output.get('pattern')) LOG.info(self.messages[-1]) raise exc.ConfigValueError(self.messages[-1]) # depends on [control=['if'], data=[]] self.series_name = formatter.clean_series_name(self.series_name) self.season_number = output.get('season_number')
def find_related_modules(package, related_name_re='.+', ignore_exceptions=False): """Find matching modules using a package and a module name pattern.""" warnings.warn('find_related_modules has been deprecated.', DeprecationWarning) package_elements = package.rsplit(".", 1) try: if len(package_elements) == 2: pkg = __import__(package_elements[0], globals(), locals(), [ package_elements[1]]) pkg = getattr(pkg, package_elements[1]) else: pkg = __import__(package_elements[0], globals(), locals(), []) pkg_path = pkg.__path__ except AttributeError: return [] # Find all modules named according to related_name p = re.compile(related_name_re) modules = [] for name in find_modules(package, include_packages=True): if p.match(name.split('.')[-1]): try: modules.append(import_string(name, silent=ignore_exceptions)) except Exception as e: if not ignore_exceptions: raise e return modules
def function[find_related_modules, parameter[package, related_name_re, ignore_exceptions]]: constant[Find matching modules using a package and a module name pattern.] call[name[warnings].warn, parameter[constant[find_related_modules has been deprecated.], name[DeprecationWarning]]] variable[package_elements] assign[=] call[name[package].rsplit, parameter[constant[.], constant[1]]] <ast.Try object at 0x7da1b28fcfa0> variable[p] assign[=] call[name[re].compile, parameter[name[related_name_re]]] variable[modules] assign[=] list[[]] for taget[name[name]] in starred[call[name[find_modules], parameter[name[package]]]] begin[:] if call[name[p].match, parameter[call[call[name[name].split, parameter[constant[.]]]][<ast.UnaryOp object at 0x7da1b28fff10>]]] begin[:] <ast.Try object at 0x7da1b28febc0> return[name[modules]]
keyword[def] identifier[find_related_modules] ( identifier[package] , identifier[related_name_re] = literal[string] , identifier[ignore_exceptions] = keyword[False] ): literal[string] identifier[warnings] . identifier[warn] ( literal[string] , identifier[DeprecationWarning] ) identifier[package_elements] = identifier[package] . identifier[rsplit] ( literal[string] , literal[int] ) keyword[try] : keyword[if] identifier[len] ( identifier[package_elements] )== literal[int] : identifier[pkg] = identifier[__import__] ( identifier[package_elements] [ literal[int] ], identifier[globals] (), identifier[locals] (),[ identifier[package_elements] [ literal[int] ]]) identifier[pkg] = identifier[getattr] ( identifier[pkg] , identifier[package_elements] [ literal[int] ]) keyword[else] : identifier[pkg] = identifier[__import__] ( identifier[package_elements] [ literal[int] ], identifier[globals] (), identifier[locals] (),[]) identifier[pkg_path] = identifier[pkg] . identifier[__path__] keyword[except] identifier[AttributeError] : keyword[return] [] identifier[p] = identifier[re] . identifier[compile] ( identifier[related_name_re] ) identifier[modules] =[] keyword[for] identifier[name] keyword[in] identifier[find_modules] ( identifier[package] , identifier[include_packages] = keyword[True] ): keyword[if] identifier[p] . identifier[match] ( identifier[name] . identifier[split] ( literal[string] )[- literal[int] ]): keyword[try] : identifier[modules] . identifier[append] ( identifier[import_string] ( identifier[name] , identifier[silent] = identifier[ignore_exceptions] )) keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[if] keyword[not] identifier[ignore_exceptions] : keyword[raise] identifier[e] keyword[return] identifier[modules]
def find_related_modules(package, related_name_re='.+', ignore_exceptions=False): """Find matching modules using a package and a module name pattern.""" warnings.warn('find_related_modules has been deprecated.', DeprecationWarning) package_elements = package.rsplit('.', 1) try: if len(package_elements) == 2: pkg = __import__(package_elements[0], globals(), locals(), [package_elements[1]]) pkg = getattr(pkg, package_elements[1]) # depends on [control=['if'], data=[]] else: pkg = __import__(package_elements[0], globals(), locals(), []) pkg_path = pkg.__path__ # depends on [control=['try'], data=[]] except AttributeError: return [] # depends on [control=['except'], data=[]] # Find all modules named according to related_name p = re.compile(related_name_re) modules = [] for name in find_modules(package, include_packages=True): if p.match(name.split('.')[-1]): try: modules.append(import_string(name, silent=ignore_exceptions)) # depends on [control=['try'], data=[]] except Exception as e: if not ignore_exceptions: raise e # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']] return modules
def remove(self): """ Remove all cgroups this instance represents from the system. This instance is afterwards not usable anymore! """ for cgroup in self.paths: remove_cgroup(cgroup) del self.paths del self.per_subsystem
def function[remove, parameter[self]]: constant[ Remove all cgroups this instance represents from the system. This instance is afterwards not usable anymore! ] for taget[name[cgroup]] in starred[name[self].paths] begin[:] call[name[remove_cgroup], parameter[name[cgroup]]] <ast.Delete object at 0x7da20c6c6fe0> <ast.Delete object at 0x7da20c6c53c0>
keyword[def] identifier[remove] ( identifier[self] ): literal[string] keyword[for] identifier[cgroup] keyword[in] identifier[self] . identifier[paths] : identifier[remove_cgroup] ( identifier[cgroup] ) keyword[del] identifier[self] . identifier[paths] keyword[del] identifier[self] . identifier[per_subsystem]
def remove(self): """ Remove all cgroups this instance represents from the system. This instance is afterwards not usable anymore! """ for cgroup in self.paths: remove_cgroup(cgroup) # depends on [control=['for'], data=['cgroup']] del self.paths del self.per_subsystem
def doc(inherit=None, **kwargs): """Annotate the decorated view function or class with the specified Swagger attributes. Usage: .. code-block:: python @doc(tags=['pet'], description='a pet store') def get_pet(pet_id): return Pet.query.filter(Pet.id == pet_id).one() :param inherit: Inherit Swagger documentation from parent classes """ def wrapper(func): annotate(func, 'docs', [kwargs], inherit=inherit) return activate(func) return wrapper
def function[doc, parameter[inherit]]: constant[Annotate the decorated view function or class with the specified Swagger attributes. Usage: .. code-block:: python @doc(tags=['pet'], description='a pet store') def get_pet(pet_id): return Pet.query.filter(Pet.id == pet_id).one() :param inherit: Inherit Swagger documentation from parent classes ] def function[wrapper, parameter[func]]: call[name[annotate], parameter[name[func], constant[docs], list[[<ast.Name object at 0x7da1b1ddae60>]]]] return[call[name[activate], parameter[name[func]]]] return[name[wrapper]]
keyword[def] identifier[doc] ( identifier[inherit] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[def] identifier[wrapper] ( identifier[func] ): identifier[annotate] ( identifier[func] , literal[string] ,[ identifier[kwargs] ], identifier[inherit] = identifier[inherit] ) keyword[return] identifier[activate] ( identifier[func] ) keyword[return] identifier[wrapper]
def doc(inherit=None, **kwargs): """Annotate the decorated view function or class with the specified Swagger attributes. Usage: .. code-block:: python @doc(tags=['pet'], description='a pet store') def get_pet(pet_id): return Pet.query.filter(Pet.id == pet_id).one() :param inherit: Inherit Swagger documentation from parent classes """ def wrapper(func): annotate(func, 'docs', [kwargs], inherit=inherit) return activate(func) return wrapper
def rolling_window_sequences(X, window_size, target_size, value_column, time_column): """ Function that takes in a pandas.DataFrame and a window_size then creates output arrays that correspond to a timeseries sequence with window_size overlap. The output arrays can be fed into a timeseries forecasting model. Assumes the input is timeseries sorted. Args: X (pandas.DataFrame): a pandas dataframe which has 'timestamp' and 'value' columns, and is sorted based on timestamp. The timestamp column is in UNIX format (in seconds). window_size (int): number of values that overlap to create the sequence. value_column (string): name of column that has the value field. time_column (string): name of column that has the time field. Returns: (numpy.ndarray): contains the time series sequenced data with each entry having window_size rows. (numpy.ndarray): acts as the label for the forecasting problem with each entry having window_size rows. (numpy.ndarray): the corresponding timestamps series. """ output_X = [] y = [] time = [] for start in range(len(X) - window_size - target_size): end = start + window_size output_X.append(X.iloc[start:end][value_column].values.reshape([-1, 1])) y.append(X.iloc[end:end + target_size][value_column].values) time.append(X.iloc[end + 1][time_column]) return np.asarray(output_X), np.asarray(y), np.asarray(time)
def function[rolling_window_sequences, parameter[X, window_size, target_size, value_column, time_column]]: constant[ Function that takes in a pandas.DataFrame and a window_size then creates output arrays that correspond to a timeseries sequence with window_size overlap. The output arrays can be fed into a timeseries forecasting model. Assumes the input is timeseries sorted. Args: X (pandas.DataFrame): a pandas dataframe which has 'timestamp' and 'value' columns, and is sorted based on timestamp. The timestamp column is in UNIX format (in seconds). window_size (int): number of values that overlap to create the sequence. value_column (string): name of column that has the value field. time_column (string): name of column that has the time field. Returns: (numpy.ndarray): contains the time series sequenced data with each entry having window_size rows. (numpy.ndarray): acts as the label for the forecasting problem with each entry having window_size rows. (numpy.ndarray): the corresponding timestamps series. ] variable[output_X] assign[=] list[[]] variable[y] assign[=] list[[]] variable[time] assign[=] list[[]] for taget[name[start]] in starred[call[name[range], parameter[binary_operation[binary_operation[call[name[len], parameter[name[X]]] - name[window_size]] - name[target_size]]]]] begin[:] variable[end] assign[=] binary_operation[name[start] + name[window_size]] call[name[output_X].append, parameter[call[call[call[name[X].iloc][<ast.Slice object at 0x7da18dc06770>]][name[value_column]].values.reshape, parameter[list[[<ast.UnaryOp object at 0x7da18dc057e0>, <ast.Constant object at 0x7da18dc07bb0>]]]]]] call[name[y].append, parameter[call[call[name[X].iloc][<ast.Slice object at 0x7da18dc05630>]][name[value_column]].values]] call[name[time].append, parameter[call[call[name[X].iloc][binary_operation[name[end] + constant[1]]]][name[time_column]]]] return[tuple[[<ast.Call object at 0x7da18dc05ab0>, <ast.Call object at 0x7da18dc06470>, <ast.Call object at 0x7da18dc07940>]]]
keyword[def] identifier[rolling_window_sequences] ( identifier[X] , identifier[window_size] , identifier[target_size] , identifier[value_column] , identifier[time_column] ): literal[string] identifier[output_X] =[] identifier[y] =[] identifier[time] =[] keyword[for] identifier[start] keyword[in] identifier[range] ( identifier[len] ( identifier[X] )- identifier[window_size] - identifier[target_size] ): identifier[end] = identifier[start] + identifier[window_size] identifier[output_X] . identifier[append] ( identifier[X] . identifier[iloc] [ identifier[start] : identifier[end] ][ identifier[value_column] ]. identifier[values] . identifier[reshape] ([- literal[int] , literal[int] ])) identifier[y] . identifier[append] ( identifier[X] . identifier[iloc] [ identifier[end] : identifier[end] + identifier[target_size] ][ identifier[value_column] ]. identifier[values] ) identifier[time] . identifier[append] ( identifier[X] . identifier[iloc] [ identifier[end] + literal[int] ][ identifier[time_column] ]) keyword[return] identifier[np] . identifier[asarray] ( identifier[output_X] ), identifier[np] . identifier[asarray] ( identifier[y] ), identifier[np] . identifier[asarray] ( identifier[time] )
def rolling_window_sequences(X, window_size, target_size, value_column, time_column): """ Function that takes in a pandas.DataFrame and a window_size then creates output arrays that correspond to a timeseries sequence with window_size overlap. The output arrays can be fed into a timeseries forecasting model. Assumes the input is timeseries sorted. Args: X (pandas.DataFrame): a pandas dataframe which has 'timestamp' and 'value' columns, and is sorted based on timestamp. The timestamp column is in UNIX format (in seconds). window_size (int): number of values that overlap to create the sequence. value_column (string): name of column that has the value field. time_column (string): name of column that has the time field. Returns: (numpy.ndarray): contains the time series sequenced data with each entry having window_size rows. (numpy.ndarray): acts as the label for the forecasting problem with each entry having window_size rows. (numpy.ndarray): the corresponding timestamps series. """ output_X = [] y = [] time = [] for start in range(len(X) - window_size - target_size): end = start + window_size output_X.append(X.iloc[start:end][value_column].values.reshape([-1, 1])) y.append(X.iloc[end:end + target_size][value_column].values) time.append(X.iloc[end + 1][time_column]) # depends on [control=['for'], data=['start']] return (np.asarray(output_X), np.asarray(y), np.asarray(time))
def setup(self): """ Initialize the crochet library. This starts the reactor in a thread, and connect's Twisted's logs to Python's standard library logging module. This must be called at least once before the library can be used, and can be called multiple times. """ if self._started: return self._common_setup() if platform.type == "posix": self._reactor.callFromThread(self._startReapingProcesses) if self._startLoggingWithObserver: observer = ThreadLogObserver(PythonLoggingObserver().emit) def start(): # Twisted is going to override warnings.showwarning; let's # make sure that has no effect: from twisted.python import log original = log.showwarning log.showwarning = warnings.showwarning self._startLoggingWithObserver(observer, False) log.showwarning = original self._reactor.callFromThread(start) # We only want to stop the logging thread once the reactor has # shut down: self._reactor.addSystemEventTrigger( "after", "shutdown", observer.stop) t = threading.Thread( target=lambda: self._reactor.run(installSignalHandlers=False), name="CrochetReactor") t.start() self._atexit_register(self._reactor.callFromThread, self._reactor.stop) self._atexit_register(_store.log_errors) if self._watchdog_thread is not None: self._watchdog_thread.start()
def function[setup, parameter[self]]: constant[ Initialize the crochet library. This starts the reactor in a thread, and connect's Twisted's logs to Python's standard library logging module. This must be called at least once before the library can be used, and can be called multiple times. ] if name[self]._started begin[:] return[None] call[name[self]._common_setup, parameter[]] if compare[name[platform].type equal[==] constant[posix]] begin[:] call[name[self]._reactor.callFromThread, parameter[name[self]._startReapingProcesses]] if name[self]._startLoggingWithObserver begin[:] variable[observer] assign[=] call[name[ThreadLogObserver], parameter[call[name[PythonLoggingObserver], parameter[]].emit]] def function[start, parameter[]]: from relative_module[twisted.python] import module[log] variable[original] assign[=] name[log].showwarning name[log].showwarning assign[=] name[warnings].showwarning call[name[self]._startLoggingWithObserver, parameter[name[observer], constant[False]]] name[log].showwarning assign[=] name[original] call[name[self]._reactor.callFromThread, parameter[name[start]]] call[name[self]._reactor.addSystemEventTrigger, parameter[constant[after], constant[shutdown], name[observer].stop]] variable[t] assign[=] call[name[threading].Thread, parameter[]] call[name[t].start, parameter[]] call[name[self]._atexit_register, parameter[name[self]._reactor.callFromThread, name[self]._reactor.stop]] call[name[self]._atexit_register, parameter[name[_store].log_errors]] if compare[name[self]._watchdog_thread is_not constant[None]] begin[:] call[name[self]._watchdog_thread.start, parameter[]]
keyword[def] identifier[setup] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_started] : keyword[return] identifier[self] . identifier[_common_setup] () keyword[if] identifier[platform] . identifier[type] == literal[string] : identifier[self] . identifier[_reactor] . identifier[callFromThread] ( identifier[self] . identifier[_startReapingProcesses] ) keyword[if] identifier[self] . identifier[_startLoggingWithObserver] : identifier[observer] = identifier[ThreadLogObserver] ( identifier[PythonLoggingObserver] (). identifier[emit] ) keyword[def] identifier[start] (): keyword[from] identifier[twisted] . identifier[python] keyword[import] identifier[log] identifier[original] = identifier[log] . identifier[showwarning] identifier[log] . identifier[showwarning] = identifier[warnings] . identifier[showwarning] identifier[self] . identifier[_startLoggingWithObserver] ( identifier[observer] , keyword[False] ) identifier[log] . identifier[showwarning] = identifier[original] identifier[self] . identifier[_reactor] . identifier[callFromThread] ( identifier[start] ) identifier[self] . identifier[_reactor] . identifier[addSystemEventTrigger] ( literal[string] , literal[string] , identifier[observer] . identifier[stop] ) identifier[t] = identifier[threading] . identifier[Thread] ( identifier[target] = keyword[lambda] : identifier[self] . identifier[_reactor] . identifier[run] ( identifier[installSignalHandlers] = keyword[False] ), identifier[name] = literal[string] ) identifier[t] . identifier[start] () identifier[self] . identifier[_atexit_register] ( identifier[self] . identifier[_reactor] . identifier[callFromThread] , identifier[self] . identifier[_reactor] . identifier[stop] ) identifier[self] . identifier[_atexit_register] ( identifier[_store] . identifier[log_errors] ) keyword[if] identifier[self] . identifier[_watchdog_thread] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[_watchdog_thread] . identifier[start] ()
def setup(self): """ Initialize the crochet library. This starts the reactor in a thread, and connect's Twisted's logs to Python's standard library logging module. This must be called at least once before the library can be used, and can be called multiple times. """ if self._started: return # depends on [control=['if'], data=[]] self._common_setup() if platform.type == 'posix': self._reactor.callFromThread(self._startReapingProcesses) # depends on [control=['if'], data=[]] if self._startLoggingWithObserver: observer = ThreadLogObserver(PythonLoggingObserver().emit) def start(): # Twisted is going to override warnings.showwarning; let's # make sure that has no effect: from twisted.python import log original = log.showwarning log.showwarning = warnings.showwarning self._startLoggingWithObserver(observer, False) log.showwarning = original self._reactor.callFromThread(start) # We only want to stop the logging thread once the reactor has # shut down: self._reactor.addSystemEventTrigger('after', 'shutdown', observer.stop) # depends on [control=['if'], data=[]] t = threading.Thread(target=lambda : self._reactor.run(installSignalHandlers=False), name='CrochetReactor') t.start() self._atexit_register(self._reactor.callFromThread, self._reactor.stop) self._atexit_register(_store.log_errors) if self._watchdog_thread is not None: self._watchdog_thread.start() # depends on [control=['if'], data=[]]
def hosting_devices_unassigned_from_cfg_agent(self, context, ids, host): """Notify cfg agent to no longer handle some hosting devices. This notification relieves the cfg agent in <host> of responsibility to monitor and configure hosting devices with id specified in <ids>. """ self._host_notification(context, 'hosting_devices_unassigned_from_cfg_agent', {'hosting_device_ids': ids}, host)
def function[hosting_devices_unassigned_from_cfg_agent, parameter[self, context, ids, host]]: constant[Notify cfg agent to no longer handle some hosting devices. This notification relieves the cfg agent in <host> of responsibility to monitor and configure hosting devices with id specified in <ids>. ] call[name[self]._host_notification, parameter[name[context], constant[hosting_devices_unassigned_from_cfg_agent], dictionary[[<ast.Constant object at 0x7da1b1be4eb0>], [<ast.Name object at 0x7da1b1be5a20>]], name[host]]]
keyword[def] identifier[hosting_devices_unassigned_from_cfg_agent] ( identifier[self] , identifier[context] , identifier[ids] , identifier[host] ): literal[string] identifier[self] . identifier[_host_notification] ( identifier[context] , literal[string] , { literal[string] : identifier[ids] }, identifier[host] )
def hosting_devices_unassigned_from_cfg_agent(self, context, ids, host): """Notify cfg agent to no longer handle some hosting devices. This notification relieves the cfg agent in <host> of responsibility to monitor and configure hosting devices with id specified in <ids>. """ self._host_notification(context, 'hosting_devices_unassigned_from_cfg_agent', {'hosting_device_ids': ids}, host)
def fixpath(path): """Normalize path fixing case, making absolute and removing symlinks""" norm = osp.normcase if os.name == 'nt' else osp.normpath return norm(osp.abspath(osp.realpath(path)))
def function[fixpath, parameter[path]]: constant[Normalize path fixing case, making absolute and removing symlinks] variable[norm] assign[=] <ast.IfExp object at 0x7da2043459c0> return[call[name[norm], parameter[call[name[osp].abspath, parameter[call[name[osp].realpath, parameter[name[path]]]]]]]]
keyword[def] identifier[fixpath] ( identifier[path] ): literal[string] identifier[norm] = identifier[osp] . identifier[normcase] keyword[if] identifier[os] . identifier[name] == literal[string] keyword[else] identifier[osp] . identifier[normpath] keyword[return] identifier[norm] ( identifier[osp] . identifier[abspath] ( identifier[osp] . identifier[realpath] ( identifier[path] )))
def fixpath(path): """Normalize path fixing case, making absolute and removing symlinks""" norm = osp.normcase if os.name == 'nt' else osp.normpath return norm(osp.abspath(osp.realpath(path)))
def dbType(self, typ): """ Returns the database object type based on the given connection type. :param typ: <str> :return: <str> """ return self.TypeMap.get(typ, self.TypeMap.get('Default'))
def function[dbType, parameter[self, typ]]: constant[ Returns the database object type based on the given connection type. :param typ: <str> :return: <str> ] return[call[name[self].TypeMap.get, parameter[name[typ], call[name[self].TypeMap.get, parameter[constant[Default]]]]]]
keyword[def] identifier[dbType] ( identifier[self] , identifier[typ] ): literal[string] keyword[return] identifier[self] . identifier[TypeMap] . identifier[get] ( identifier[typ] , identifier[self] . identifier[TypeMap] . identifier[get] ( literal[string] ))
def dbType(self, typ): """ Returns the database object type based on the given connection type. :param typ: <str> :return: <str> """ return self.TypeMap.get(typ, self.TypeMap.get('Default'))
def get_items(self, gos): """Given GO terms, return genes or gene products for the GOs.""" items = [] for go_id in gos: items.extend(self.go2items.get(go_id, [])) return set(items)
def function[get_items, parameter[self, gos]]: constant[Given GO terms, return genes or gene products for the GOs.] variable[items] assign[=] list[[]] for taget[name[go_id]] in starred[name[gos]] begin[:] call[name[items].extend, parameter[call[name[self].go2items.get, parameter[name[go_id], list[[]]]]]] return[call[name[set], parameter[name[items]]]]
keyword[def] identifier[get_items] ( identifier[self] , identifier[gos] ): literal[string] identifier[items] =[] keyword[for] identifier[go_id] keyword[in] identifier[gos] : identifier[items] . identifier[extend] ( identifier[self] . identifier[go2items] . identifier[get] ( identifier[go_id] ,[])) keyword[return] identifier[set] ( identifier[items] )
def get_items(self, gos): """Given GO terms, return genes or gene products for the GOs.""" items = [] for go_id in gos: items.extend(self.go2items.get(go_id, [])) # depends on [control=['for'], data=['go_id']] return set(items)
def set_widgets(self): """Set widgets on the Hazard Category tab.""" self.clear_further_steps() # Set widgets self.lstHazardCategories.clear() self.lblDescribeHazardCategory.setText('') self.lblSelectHazardCategory.setText( hazard_category_question) hazard_categories = self.hazard_categories_for_layer() for hazard_category in hazard_categories: if not isinstance(hazard_category, dict): # noinspection PyTypeChecker hazard_category = definition(hazard_category) # noinspection PyTypeChecker item = QListWidgetItem( hazard_category['name'], self.lstHazardCategories) # noinspection PyTypeChecker item.setData(QtCore.Qt.UserRole, hazard_category['key']) self.lstHazardCategories.addItem(item) # Set values based on existing keywords (if already assigned) category_keyword = self.parent.get_existing_keyword('hazard_category') if category_keyword: categories = [] for index in range(self.lstHazardCategories.count()): item = self.lstHazardCategories.item(index) categories.append(item.data(QtCore.Qt.UserRole)) if category_keyword in categories: self.lstHazardCategories.setCurrentRow( categories.index(category_keyword)) self.auto_select_one_item(self.lstHazardCategories)
def function[set_widgets, parameter[self]]: constant[Set widgets on the Hazard Category tab.] call[name[self].clear_further_steps, parameter[]] call[name[self].lstHazardCategories.clear, parameter[]] call[name[self].lblDescribeHazardCategory.setText, parameter[constant[]]] call[name[self].lblSelectHazardCategory.setText, parameter[name[hazard_category_question]]] variable[hazard_categories] assign[=] call[name[self].hazard_categories_for_layer, parameter[]] for taget[name[hazard_category]] in starred[name[hazard_categories]] begin[:] if <ast.UnaryOp object at 0x7da18dc04550> begin[:] variable[hazard_category] assign[=] call[name[definition], parameter[name[hazard_category]]] variable[item] assign[=] call[name[QListWidgetItem], parameter[call[name[hazard_category]][constant[name]], name[self].lstHazardCategories]] call[name[item].setData, parameter[name[QtCore].Qt.UserRole, call[name[hazard_category]][constant[key]]]] call[name[self].lstHazardCategories.addItem, parameter[name[item]]] variable[category_keyword] assign[=] call[name[self].parent.get_existing_keyword, parameter[constant[hazard_category]]] if name[category_keyword] begin[:] variable[categories] assign[=] list[[]] for taget[name[index]] in starred[call[name[range], parameter[call[name[self].lstHazardCategories.count, parameter[]]]]] begin[:] variable[item] assign[=] call[name[self].lstHazardCategories.item, parameter[name[index]]] call[name[categories].append, parameter[call[name[item].data, parameter[name[QtCore].Qt.UserRole]]]] if compare[name[category_keyword] in name[categories]] begin[:] call[name[self].lstHazardCategories.setCurrentRow, parameter[call[name[categories].index, parameter[name[category_keyword]]]]] call[name[self].auto_select_one_item, parameter[name[self].lstHazardCategories]]
keyword[def] identifier[set_widgets] ( identifier[self] ): literal[string] identifier[self] . identifier[clear_further_steps] () identifier[self] . identifier[lstHazardCategories] . identifier[clear] () identifier[self] . identifier[lblDescribeHazardCategory] . identifier[setText] ( literal[string] ) identifier[self] . identifier[lblSelectHazardCategory] . identifier[setText] ( identifier[hazard_category_question] ) identifier[hazard_categories] = identifier[self] . identifier[hazard_categories_for_layer] () keyword[for] identifier[hazard_category] keyword[in] identifier[hazard_categories] : keyword[if] keyword[not] identifier[isinstance] ( identifier[hazard_category] , identifier[dict] ): identifier[hazard_category] = identifier[definition] ( identifier[hazard_category] ) identifier[item] = identifier[QListWidgetItem] ( identifier[hazard_category] [ literal[string] ], identifier[self] . identifier[lstHazardCategories] ) identifier[item] . identifier[setData] ( identifier[QtCore] . identifier[Qt] . identifier[UserRole] , identifier[hazard_category] [ literal[string] ]) identifier[self] . identifier[lstHazardCategories] . identifier[addItem] ( identifier[item] ) identifier[category_keyword] = identifier[self] . identifier[parent] . identifier[get_existing_keyword] ( literal[string] ) keyword[if] identifier[category_keyword] : identifier[categories] =[] keyword[for] identifier[index] keyword[in] identifier[range] ( identifier[self] . identifier[lstHazardCategories] . identifier[count] ()): identifier[item] = identifier[self] . identifier[lstHazardCategories] . identifier[item] ( identifier[index] ) identifier[categories] . identifier[append] ( identifier[item] . identifier[data] ( identifier[QtCore] . identifier[Qt] . identifier[UserRole] )) keyword[if] identifier[category_keyword] keyword[in] identifier[categories] : identifier[self] . identifier[lstHazardCategories] . identifier[setCurrentRow] ( identifier[categories] . identifier[index] ( identifier[category_keyword] )) identifier[self] . identifier[auto_select_one_item] ( identifier[self] . identifier[lstHazardCategories] )
def set_widgets(self): """Set widgets on the Hazard Category tab.""" self.clear_further_steps() # Set widgets self.lstHazardCategories.clear() self.lblDescribeHazardCategory.setText('') self.lblSelectHazardCategory.setText(hazard_category_question) hazard_categories = self.hazard_categories_for_layer() for hazard_category in hazard_categories: if not isinstance(hazard_category, dict): # noinspection PyTypeChecker hazard_category = definition(hazard_category) # depends on [control=['if'], data=[]] # noinspection PyTypeChecker item = QListWidgetItem(hazard_category['name'], self.lstHazardCategories) # noinspection PyTypeChecker item.setData(QtCore.Qt.UserRole, hazard_category['key']) self.lstHazardCategories.addItem(item) # depends on [control=['for'], data=['hazard_category']] # Set values based on existing keywords (if already assigned) category_keyword = self.parent.get_existing_keyword('hazard_category') if category_keyword: categories = [] for index in range(self.lstHazardCategories.count()): item = self.lstHazardCategories.item(index) categories.append(item.data(QtCore.Qt.UserRole)) # depends on [control=['for'], data=['index']] if category_keyword in categories: self.lstHazardCategories.setCurrentRow(categories.index(category_keyword)) # depends on [control=['if'], data=['category_keyword', 'categories']] # depends on [control=['if'], data=[]] self.auto_select_one_item(self.lstHazardCategories)
def indent(el, level=0): """Function to pretty print the xml, meaning adding tabs and newlines. :param ElementTree.Element el: Current element. :param int level: Current level. """ i = '\n' + level * '\t' if len(el): if not el.text or not el.text.strip(): el.text = i+'\t' if not el.tail or not el.tail.strip(): el.tail = i for elem in el: indent(elem, level+1) if not el.tail or not el.tail.strip(): el.tail = i else: if level and (not el.tail or not el.tail.strip()): el.tail = i
def function[indent, parameter[el, level]]: constant[Function to pretty print the xml, meaning adding tabs and newlines. :param ElementTree.Element el: Current element. :param int level: Current level. ] variable[i] assign[=] binary_operation[constant[ ] + binary_operation[name[level] * constant[ ]]] if call[name[len], parameter[name[el]]] begin[:] if <ast.BoolOp object at 0x7da1b02d0b80> begin[:] name[el].text assign[=] binary_operation[name[i] + constant[ ]] if <ast.BoolOp object at 0x7da1b03ba500> begin[:] name[el].tail assign[=] name[i] for taget[name[elem]] in starred[name[el]] begin[:] call[name[indent], parameter[name[elem], binary_operation[name[level] + constant[1]]]] if <ast.BoolOp object at 0x7da1b03bb430> begin[:] name[el].tail assign[=] name[i]
keyword[def] identifier[indent] ( identifier[el] , identifier[level] = literal[int] ): literal[string] identifier[i] = literal[string] + identifier[level] * literal[string] keyword[if] identifier[len] ( identifier[el] ): keyword[if] keyword[not] identifier[el] . identifier[text] keyword[or] keyword[not] identifier[el] . identifier[text] . identifier[strip] (): identifier[el] . identifier[text] = identifier[i] + literal[string] keyword[if] keyword[not] identifier[el] . identifier[tail] keyword[or] keyword[not] identifier[el] . identifier[tail] . identifier[strip] (): identifier[el] . identifier[tail] = identifier[i] keyword[for] identifier[elem] keyword[in] identifier[el] : identifier[indent] ( identifier[elem] , identifier[level] + literal[int] ) keyword[if] keyword[not] identifier[el] . identifier[tail] keyword[or] keyword[not] identifier[el] . identifier[tail] . identifier[strip] (): identifier[el] . identifier[tail] = identifier[i] keyword[else] : keyword[if] identifier[level] keyword[and] ( keyword[not] identifier[el] . identifier[tail] keyword[or] keyword[not] identifier[el] . identifier[tail] . identifier[strip] ()): identifier[el] . identifier[tail] = identifier[i]
def indent(el, level=0): """Function to pretty print the xml, meaning adding tabs and newlines. :param ElementTree.Element el: Current element. :param int level: Current level. """ i = '\n' + level * '\t' if len(el): if not el.text or not el.text.strip(): el.text = i + '\t' # depends on [control=['if'], data=[]] if not el.tail or not el.tail.strip(): el.tail = i # depends on [control=['if'], data=[]] for elem in el: indent(elem, level + 1) # depends on [control=['for'], data=['elem']] if not el.tail or not el.tail.strip(): el.tail = i # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif level and (not el.tail or not el.tail.strip()): el.tail = i # depends on [control=['if'], data=[]]
def d2logpdf_dlink2_dvar(self, link_f, y, Y_metadata=None): """ Gradient of the hessian (d2logpdf_dlink2) w.r.t variance parameter (noise_variance) .. math:: \\frac{d}{d\\sigma^{2}}(\\frac{d^{2} \\ln p(y_{i}|\\lambda(f_{i}))}{d^{2}\\lambda(f)}) = \\frac{1}{\\sigma^{4}} :param link_f: latent variables link(f) :type link_f: Nx1 array :param y: data :type y: Nx1 array :param Y_metadata: Y_metadata not used in gaussian :returns: derivative of log hessian evaluated at points link(f_i) and link(f_j) w.r.t variance parameter :rtype: Nx1 array """ s_4 = 1.0/(self.variance**2) N = y.shape[0] D = link_f.shape[1] d2logpdf_dlink2_dvar = np.ones((N, D))*s_4 return d2logpdf_dlink2_dvar
def function[d2logpdf_dlink2_dvar, parameter[self, link_f, y, Y_metadata]]: constant[ Gradient of the hessian (d2logpdf_dlink2) w.r.t variance parameter (noise_variance) .. math:: \frac{d}{d\sigma^{2}}(\frac{d^{2} \ln p(y_{i}|\lambda(f_{i}))}{d^{2}\lambda(f)}) = \frac{1}{\sigma^{4}} :param link_f: latent variables link(f) :type link_f: Nx1 array :param y: data :type y: Nx1 array :param Y_metadata: Y_metadata not used in gaussian :returns: derivative of log hessian evaluated at points link(f_i) and link(f_j) w.r.t variance parameter :rtype: Nx1 array ] variable[s_4] assign[=] binary_operation[constant[1.0] / binary_operation[name[self].variance ** constant[2]]] variable[N] assign[=] call[name[y].shape][constant[0]] variable[D] assign[=] call[name[link_f].shape][constant[1]] variable[d2logpdf_dlink2_dvar] assign[=] binary_operation[call[name[np].ones, parameter[tuple[[<ast.Name object at 0x7da1b1ba8dc0>, <ast.Name object at 0x7da1b1baaef0>]]]] * name[s_4]] return[name[d2logpdf_dlink2_dvar]]
keyword[def] identifier[d2logpdf_dlink2_dvar] ( identifier[self] , identifier[link_f] , identifier[y] , identifier[Y_metadata] = keyword[None] ): literal[string] identifier[s_4] = literal[int] /( identifier[self] . identifier[variance] ** literal[int] ) identifier[N] = identifier[y] . identifier[shape] [ literal[int] ] identifier[D] = identifier[link_f] . identifier[shape] [ literal[int] ] identifier[d2logpdf_dlink2_dvar] = identifier[np] . identifier[ones] (( identifier[N] , identifier[D] ))* identifier[s_4] keyword[return] identifier[d2logpdf_dlink2_dvar]
def d2logpdf_dlink2_dvar(self, link_f, y, Y_metadata=None): """ Gradient of the hessian (d2logpdf_dlink2) w.r.t variance parameter (noise_variance) .. math:: \\frac{d}{d\\sigma^{2}}(\\frac{d^{2} \\ln p(y_{i}|\\lambda(f_{i}))}{d^{2}\\lambda(f)}) = \\frac{1}{\\sigma^{4}} :param link_f: latent variables link(f) :type link_f: Nx1 array :param y: data :type y: Nx1 array :param Y_metadata: Y_metadata not used in gaussian :returns: derivative of log hessian evaluated at points link(f_i) and link(f_j) w.r.t variance parameter :rtype: Nx1 array """ s_4 = 1.0 / self.variance ** 2 N = y.shape[0] D = link_f.shape[1] d2logpdf_dlink2_dvar = np.ones((N, D)) * s_4 return d2logpdf_dlink2_dvar
def output_compressed_dinf(dinfflowang, compdinffile, weightfile): """Output compressed Dinf flow direction and weight to raster file Args: dinfflowang: Dinf flow direction raster file compdinffile: Compressed D8 flow code weightfile: The correspond weight """ dinf_r = RasterUtilClass.read_raster(dinfflowang) data = dinf_r.data xsize = dinf_r.nCols ysize = dinf_r.nRows nodata_value = dinf_r.noDataValue cal_dir_code = frompyfunc(DinfUtil.compress_dinf, 2, 3) updated_angle, dir_code, weight = cal_dir_code(data, nodata_value) RasterUtilClass.write_gtiff_file(dinfflowang, ysize, xsize, updated_angle, dinf_r.geotrans, dinf_r.srs, DEFAULT_NODATA, GDT_Float32) RasterUtilClass.write_gtiff_file(compdinffile, ysize, xsize, dir_code, dinf_r.geotrans, dinf_r.srs, DEFAULT_NODATA, GDT_Int16) RasterUtilClass.write_gtiff_file(weightfile, ysize, xsize, weight, dinf_r.geotrans, dinf_r.srs, DEFAULT_NODATA, GDT_Float32)
def function[output_compressed_dinf, parameter[dinfflowang, compdinffile, weightfile]]: constant[Output compressed Dinf flow direction and weight to raster file Args: dinfflowang: Dinf flow direction raster file compdinffile: Compressed D8 flow code weightfile: The correspond weight ] variable[dinf_r] assign[=] call[name[RasterUtilClass].read_raster, parameter[name[dinfflowang]]] variable[data] assign[=] name[dinf_r].data variable[xsize] assign[=] name[dinf_r].nCols variable[ysize] assign[=] name[dinf_r].nRows variable[nodata_value] assign[=] name[dinf_r].noDataValue variable[cal_dir_code] assign[=] call[name[frompyfunc], parameter[name[DinfUtil].compress_dinf, constant[2], constant[3]]] <ast.Tuple object at 0x7da1b23592d0> assign[=] call[name[cal_dir_code], parameter[name[data], name[nodata_value]]] call[name[RasterUtilClass].write_gtiff_file, parameter[name[dinfflowang], name[ysize], name[xsize], name[updated_angle], name[dinf_r].geotrans, name[dinf_r].srs, name[DEFAULT_NODATA], name[GDT_Float32]]] call[name[RasterUtilClass].write_gtiff_file, parameter[name[compdinffile], name[ysize], name[xsize], name[dir_code], name[dinf_r].geotrans, name[dinf_r].srs, name[DEFAULT_NODATA], name[GDT_Int16]]] call[name[RasterUtilClass].write_gtiff_file, parameter[name[weightfile], name[ysize], name[xsize], name[weight], name[dinf_r].geotrans, name[dinf_r].srs, name[DEFAULT_NODATA], name[GDT_Float32]]]
keyword[def] identifier[output_compressed_dinf] ( identifier[dinfflowang] , identifier[compdinffile] , identifier[weightfile] ): literal[string] identifier[dinf_r] = identifier[RasterUtilClass] . identifier[read_raster] ( identifier[dinfflowang] ) identifier[data] = identifier[dinf_r] . identifier[data] identifier[xsize] = identifier[dinf_r] . identifier[nCols] identifier[ysize] = identifier[dinf_r] . identifier[nRows] identifier[nodata_value] = identifier[dinf_r] . identifier[noDataValue] identifier[cal_dir_code] = identifier[frompyfunc] ( identifier[DinfUtil] . identifier[compress_dinf] , literal[int] , literal[int] ) identifier[updated_angle] , identifier[dir_code] , identifier[weight] = identifier[cal_dir_code] ( identifier[data] , identifier[nodata_value] ) identifier[RasterUtilClass] . identifier[write_gtiff_file] ( identifier[dinfflowang] , identifier[ysize] , identifier[xsize] , identifier[updated_angle] , identifier[dinf_r] . identifier[geotrans] , identifier[dinf_r] . identifier[srs] , identifier[DEFAULT_NODATA] , identifier[GDT_Float32] ) identifier[RasterUtilClass] . identifier[write_gtiff_file] ( identifier[compdinffile] , identifier[ysize] , identifier[xsize] , identifier[dir_code] , identifier[dinf_r] . identifier[geotrans] , identifier[dinf_r] . identifier[srs] , identifier[DEFAULT_NODATA] , identifier[GDT_Int16] ) identifier[RasterUtilClass] . identifier[write_gtiff_file] ( identifier[weightfile] , identifier[ysize] , identifier[xsize] , identifier[weight] , identifier[dinf_r] . identifier[geotrans] , identifier[dinf_r] . identifier[srs] , identifier[DEFAULT_NODATA] , identifier[GDT_Float32] )
def output_compressed_dinf(dinfflowang, compdinffile, weightfile): """Output compressed Dinf flow direction and weight to raster file Args: dinfflowang: Dinf flow direction raster file compdinffile: Compressed D8 flow code weightfile: The correspond weight """ dinf_r = RasterUtilClass.read_raster(dinfflowang) data = dinf_r.data xsize = dinf_r.nCols ysize = dinf_r.nRows nodata_value = dinf_r.noDataValue cal_dir_code = frompyfunc(DinfUtil.compress_dinf, 2, 3) (updated_angle, dir_code, weight) = cal_dir_code(data, nodata_value) RasterUtilClass.write_gtiff_file(dinfflowang, ysize, xsize, updated_angle, dinf_r.geotrans, dinf_r.srs, DEFAULT_NODATA, GDT_Float32) RasterUtilClass.write_gtiff_file(compdinffile, ysize, xsize, dir_code, dinf_r.geotrans, dinf_r.srs, DEFAULT_NODATA, GDT_Int16) RasterUtilClass.write_gtiff_file(weightfile, ysize, xsize, weight, dinf_r.geotrans, dinf_r.srs, DEFAULT_NODATA, GDT_Float32)
def json(self, **kwargs): """Decodes response as JSON.""" encoding = detect_encoding(self.content[:4]) value = self.content.decode(encoding) return simplejson.loads(value, **kwargs)
def function[json, parameter[self]]: constant[Decodes response as JSON.] variable[encoding] assign[=] call[name[detect_encoding], parameter[call[name[self].content][<ast.Slice object at 0x7da2045646a0>]]] variable[value] assign[=] call[name[self].content.decode, parameter[name[encoding]]] return[call[name[simplejson].loads, parameter[name[value]]]]
keyword[def] identifier[json] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[encoding] = identifier[detect_encoding] ( identifier[self] . identifier[content] [: literal[int] ]) identifier[value] = identifier[self] . identifier[content] . identifier[decode] ( identifier[encoding] ) keyword[return] identifier[simplejson] . identifier[loads] ( identifier[value] ,** identifier[kwargs] )
def json(self, **kwargs): """Decodes response as JSON.""" encoding = detect_encoding(self.content[:4]) value = self.content.decode(encoding) return simplejson.loads(value, **kwargs)
def cumargmax(a): """Cumulative argmax. Parameters ---------- a : np.ndarray Returns ------- np.ndarray """ # Thank you @Alex Riley # https://stackoverflow.com/a/40675969/7954504 m = np.asarray(np.maximum.accumulate(a)) if a.ndim == 1: x = np.arange(a.shape[0]) else: x = np.repeat(np.arange(a.shape[0])[:, None], a.shape[1], axis=1) x[1:] *= m[:-1] < m[1:] np.maximum.accumulate(x, axis=0, out=x) return x
def function[cumargmax, parameter[a]]: constant[Cumulative argmax. Parameters ---------- a : np.ndarray Returns ------- np.ndarray ] variable[m] assign[=] call[name[np].asarray, parameter[call[name[np].maximum.accumulate, parameter[name[a]]]]] if compare[name[a].ndim equal[==] constant[1]] begin[:] variable[x] assign[=] call[name[np].arange, parameter[call[name[a].shape][constant[0]]]] <ast.AugAssign object at 0x7da1b07e83a0> call[name[np].maximum.accumulate, parameter[name[x]]] return[name[x]]
keyword[def] identifier[cumargmax] ( identifier[a] ): literal[string] identifier[m] = identifier[np] . identifier[asarray] ( identifier[np] . identifier[maximum] . identifier[accumulate] ( identifier[a] )) keyword[if] identifier[a] . identifier[ndim] == literal[int] : identifier[x] = identifier[np] . identifier[arange] ( identifier[a] . identifier[shape] [ literal[int] ]) keyword[else] : identifier[x] = identifier[np] . identifier[repeat] ( identifier[np] . identifier[arange] ( identifier[a] . identifier[shape] [ literal[int] ])[:, keyword[None] ], identifier[a] . identifier[shape] [ literal[int] ], identifier[axis] = literal[int] ) identifier[x] [ literal[int] :]*= identifier[m] [:- literal[int] ]< identifier[m] [ literal[int] :] identifier[np] . identifier[maximum] . identifier[accumulate] ( identifier[x] , identifier[axis] = literal[int] , identifier[out] = identifier[x] ) keyword[return] identifier[x]
def cumargmax(a): """Cumulative argmax. Parameters ---------- a : np.ndarray Returns ------- np.ndarray """ # Thank you @Alex Riley # https://stackoverflow.com/a/40675969/7954504 m = np.asarray(np.maximum.accumulate(a)) if a.ndim == 1: x = np.arange(a.shape[0]) # depends on [control=['if'], data=[]] else: x = np.repeat(np.arange(a.shape[0])[:, None], a.shape[1], axis=1) x[1:] *= m[:-1] < m[1:] np.maximum.accumulate(x, axis=0, out=x) return x
def is_resource_modified( environ, etag=None, data=None, last_modified=None, ignore_if_range=True ): """Convenience method for conditional requests. :param environ: the WSGI environment of the request to be checked. :param etag: the etag for the response for comparison. :param data: or alternatively the data of the response to automatically generate an etag using :func:`generate_etag`. :param last_modified: an optional date of the last modification. :param ignore_if_range: If `False`, `If-Range` header will be taken into account. :return: `True` if the resource was modified, otherwise `False`. """ if etag is None and data is not None: etag = generate_etag(data) elif data is not None: raise TypeError("both data and etag given") if environ["REQUEST_METHOD"] not in ("GET", "HEAD"): return False unmodified = False if isinstance(last_modified, string_types): last_modified = parse_date(last_modified) # ensure that microsecond is zero because the HTTP spec does not transmit # that either and we might have some false positives. See issue #39 if last_modified is not None: last_modified = last_modified.replace(microsecond=0) if_range = None if not ignore_if_range and "HTTP_RANGE" in environ: # https://tools.ietf.org/html/rfc7233#section-3.2 # A server MUST ignore an If-Range header field received in a request # that does not contain a Range header field. if_range = parse_if_range_header(environ.get("HTTP_IF_RANGE")) if if_range is not None and if_range.date is not None: modified_since = if_range.date else: modified_since = parse_date(environ.get("HTTP_IF_MODIFIED_SINCE")) if modified_since and last_modified and last_modified <= modified_since: unmodified = True if etag: etag, _ = unquote_etag(etag) if if_range is not None and if_range.etag is not None: unmodified = parse_etags(if_range.etag).contains(etag) else: if_none_match = parse_etags(environ.get("HTTP_IF_NONE_MATCH")) if if_none_match: # https://tools.ietf.org/html/rfc7232#section-3.2 # "A recipient MUST use the weak comparison function when comparing # entity-tags for If-None-Match" unmodified = if_none_match.contains_weak(etag) # https://tools.ietf.org/html/rfc7232#section-3.1 # "Origin server MUST use the strong comparison function when # comparing entity-tags for If-Match" if_match = parse_etags(environ.get("HTTP_IF_MATCH")) if if_match: unmodified = not if_match.is_strong(etag) return not unmodified
def function[is_resource_modified, parameter[environ, etag, data, last_modified, ignore_if_range]]: constant[Convenience method for conditional requests. :param environ: the WSGI environment of the request to be checked. :param etag: the etag for the response for comparison. :param data: or alternatively the data of the response to automatically generate an etag using :func:`generate_etag`. :param last_modified: an optional date of the last modification. :param ignore_if_range: If `False`, `If-Range` header will be taken into account. :return: `True` if the resource was modified, otherwise `False`. ] if <ast.BoolOp object at 0x7da18f58e260> begin[:] variable[etag] assign[=] call[name[generate_etag], parameter[name[data]]] if compare[call[name[environ]][constant[REQUEST_METHOD]] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da18f58c580>, <ast.Constant object at 0x7da18f58e590>]]] begin[:] return[constant[False]] variable[unmodified] assign[=] constant[False] if call[name[isinstance], parameter[name[last_modified], name[string_types]]] begin[:] variable[last_modified] assign[=] call[name[parse_date], parameter[name[last_modified]]] if compare[name[last_modified] is_not constant[None]] begin[:] variable[last_modified] assign[=] call[name[last_modified].replace, parameter[]] variable[if_range] assign[=] constant[None] if <ast.BoolOp object at 0x7da18f58c4c0> begin[:] variable[if_range] assign[=] call[name[parse_if_range_header], parameter[call[name[environ].get, parameter[constant[HTTP_IF_RANGE]]]]] if <ast.BoolOp object at 0x7da18f58c9a0> begin[:] variable[modified_since] assign[=] name[if_range].date if <ast.BoolOp object at 0x7da18f58d510> begin[:] variable[unmodified] assign[=] constant[True] if name[etag] begin[:] <ast.Tuple object at 0x7da18f58d720> assign[=] call[name[unquote_etag], parameter[name[etag]]] if <ast.BoolOp object at 0x7da18f58dcc0> begin[:] variable[unmodified] assign[=] call[call[name[parse_etags], parameter[name[if_range].etag]].contains, parameter[name[etag]]] return[<ast.UnaryOp object at 0x7da18f58efb0>]
keyword[def] identifier[is_resource_modified] ( identifier[environ] , identifier[etag] = keyword[None] , identifier[data] = keyword[None] , identifier[last_modified] = keyword[None] , identifier[ignore_if_range] = keyword[True] ): literal[string] keyword[if] identifier[etag] keyword[is] keyword[None] keyword[and] identifier[data] keyword[is] keyword[not] keyword[None] : identifier[etag] = identifier[generate_etag] ( identifier[data] ) keyword[elif] identifier[data] keyword[is] keyword[not] keyword[None] : keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] identifier[environ] [ literal[string] ] keyword[not] keyword[in] ( literal[string] , literal[string] ): keyword[return] keyword[False] identifier[unmodified] = keyword[False] keyword[if] identifier[isinstance] ( identifier[last_modified] , identifier[string_types] ): identifier[last_modified] = identifier[parse_date] ( identifier[last_modified] ) keyword[if] identifier[last_modified] keyword[is] keyword[not] keyword[None] : identifier[last_modified] = identifier[last_modified] . identifier[replace] ( identifier[microsecond] = literal[int] ) identifier[if_range] = keyword[None] keyword[if] keyword[not] identifier[ignore_if_range] keyword[and] literal[string] keyword[in] identifier[environ] : identifier[if_range] = identifier[parse_if_range_header] ( identifier[environ] . identifier[get] ( literal[string] )) keyword[if] identifier[if_range] keyword[is] keyword[not] keyword[None] keyword[and] identifier[if_range] . identifier[date] keyword[is] keyword[not] keyword[None] : identifier[modified_since] = identifier[if_range] . identifier[date] keyword[else] : identifier[modified_since] = identifier[parse_date] ( identifier[environ] . identifier[get] ( literal[string] )) keyword[if] identifier[modified_since] keyword[and] identifier[last_modified] keyword[and] identifier[last_modified] <= identifier[modified_since] : identifier[unmodified] = keyword[True] keyword[if] identifier[etag] : identifier[etag] , identifier[_] = identifier[unquote_etag] ( identifier[etag] ) keyword[if] identifier[if_range] keyword[is] keyword[not] keyword[None] keyword[and] identifier[if_range] . identifier[etag] keyword[is] keyword[not] keyword[None] : identifier[unmodified] = identifier[parse_etags] ( identifier[if_range] . identifier[etag] ). identifier[contains] ( identifier[etag] ) keyword[else] : identifier[if_none_match] = identifier[parse_etags] ( identifier[environ] . identifier[get] ( literal[string] )) keyword[if] identifier[if_none_match] : identifier[unmodified] = identifier[if_none_match] . identifier[contains_weak] ( identifier[etag] ) identifier[if_match] = identifier[parse_etags] ( identifier[environ] . identifier[get] ( literal[string] )) keyword[if] identifier[if_match] : identifier[unmodified] = keyword[not] identifier[if_match] . identifier[is_strong] ( identifier[etag] ) keyword[return] keyword[not] identifier[unmodified]
def is_resource_modified(environ, etag=None, data=None, last_modified=None, ignore_if_range=True): """Convenience method for conditional requests. :param environ: the WSGI environment of the request to be checked. :param etag: the etag for the response for comparison. :param data: or alternatively the data of the response to automatically generate an etag using :func:`generate_etag`. :param last_modified: an optional date of the last modification. :param ignore_if_range: If `False`, `If-Range` header will be taken into account. :return: `True` if the resource was modified, otherwise `False`. """ if etag is None and data is not None: etag = generate_etag(data) # depends on [control=['if'], data=[]] elif data is not None: raise TypeError('both data and etag given') # depends on [control=['if'], data=[]] if environ['REQUEST_METHOD'] not in ('GET', 'HEAD'): return False # depends on [control=['if'], data=[]] unmodified = False if isinstance(last_modified, string_types): last_modified = parse_date(last_modified) # depends on [control=['if'], data=[]] # ensure that microsecond is zero because the HTTP spec does not transmit # that either and we might have some false positives. See issue #39 if last_modified is not None: last_modified = last_modified.replace(microsecond=0) # depends on [control=['if'], data=['last_modified']] if_range = None if not ignore_if_range and 'HTTP_RANGE' in environ: # https://tools.ietf.org/html/rfc7233#section-3.2 # A server MUST ignore an If-Range header field received in a request # that does not contain a Range header field. if_range = parse_if_range_header(environ.get('HTTP_IF_RANGE')) # depends on [control=['if'], data=[]] if if_range is not None and if_range.date is not None: modified_since = if_range.date # depends on [control=['if'], data=[]] else: modified_since = parse_date(environ.get('HTTP_IF_MODIFIED_SINCE')) if modified_since and last_modified and (last_modified <= modified_since): unmodified = True # depends on [control=['if'], data=[]] if etag: (etag, _) = unquote_etag(etag) if if_range is not None and if_range.etag is not None: unmodified = parse_etags(if_range.etag).contains(etag) # depends on [control=['if'], data=[]] else: if_none_match = parse_etags(environ.get('HTTP_IF_NONE_MATCH')) if if_none_match: # https://tools.ietf.org/html/rfc7232#section-3.2 # "A recipient MUST use the weak comparison function when comparing # entity-tags for If-None-Match" unmodified = if_none_match.contains_weak(etag) # depends on [control=['if'], data=[]] # https://tools.ietf.org/html/rfc7232#section-3.1 # "Origin server MUST use the strong comparison function when # comparing entity-tags for If-Match" if_match = parse_etags(environ.get('HTTP_IF_MATCH')) if if_match: unmodified = not if_match.is_strong(etag) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return not unmodified
def create(cls, community, record, user=None, expires_at=None, notify=True): """Create a record inclusion request to a community. :param community: Community object. :param record: Record API object. :param expires_at: Time after which the request expires and shouldn't be resolved anymore. """ if expires_at and expires_at < datetime.utcnow(): raise InclusionRequestExpiryTimeError( community=community, record=record) if community.has_record(record): raise InclusionRequestObsoleteError( community=community, record=record) try: # Create inclusion request with db.session.begin_nested(): obj = cls( id_community=community.id, id_record=record.id, user=user, expires_at=expires_at ) db.session.add(obj) except (IntegrityError, FlushError): raise InclusionRequestExistsError( community=community, record=record) # Send signal inclusion_request_created.send( current_app._get_current_object(), request=obj, notify=notify ) return obj
def function[create, parameter[cls, community, record, user, expires_at, notify]]: constant[Create a record inclusion request to a community. :param community: Community object. :param record: Record API object. :param expires_at: Time after which the request expires and shouldn't be resolved anymore. ] if <ast.BoolOp object at 0x7da2041dab30> begin[:] <ast.Raise object at 0x7da2041d99c0> if call[name[community].has_record, parameter[name[record]]] begin[:] <ast.Raise object at 0x7da2041d9990> <ast.Try object at 0x7da2041daf80> call[name[inclusion_request_created].send, parameter[call[name[current_app]._get_current_object, parameter[]]]] return[name[obj]]
keyword[def] identifier[create] ( identifier[cls] , identifier[community] , identifier[record] , identifier[user] = keyword[None] , identifier[expires_at] = keyword[None] , identifier[notify] = keyword[True] ): literal[string] keyword[if] identifier[expires_at] keyword[and] identifier[expires_at] < identifier[datetime] . identifier[utcnow] (): keyword[raise] identifier[InclusionRequestExpiryTimeError] ( identifier[community] = identifier[community] , identifier[record] = identifier[record] ) keyword[if] identifier[community] . identifier[has_record] ( identifier[record] ): keyword[raise] identifier[InclusionRequestObsoleteError] ( identifier[community] = identifier[community] , identifier[record] = identifier[record] ) keyword[try] : keyword[with] identifier[db] . identifier[session] . identifier[begin_nested] (): identifier[obj] = identifier[cls] ( identifier[id_community] = identifier[community] . identifier[id] , identifier[id_record] = identifier[record] . identifier[id] , identifier[user] = identifier[user] , identifier[expires_at] = identifier[expires_at] ) identifier[db] . identifier[session] . identifier[add] ( identifier[obj] ) keyword[except] ( identifier[IntegrityError] , identifier[FlushError] ): keyword[raise] identifier[InclusionRequestExistsError] ( identifier[community] = identifier[community] , identifier[record] = identifier[record] ) identifier[inclusion_request_created] . identifier[send] ( identifier[current_app] . identifier[_get_current_object] (), identifier[request] = identifier[obj] , identifier[notify] = identifier[notify] ) keyword[return] identifier[obj]
def create(cls, community, record, user=None, expires_at=None, notify=True): """Create a record inclusion request to a community. :param community: Community object. :param record: Record API object. :param expires_at: Time after which the request expires and shouldn't be resolved anymore. """ if expires_at and expires_at < datetime.utcnow(): raise InclusionRequestExpiryTimeError(community=community, record=record) # depends on [control=['if'], data=[]] if community.has_record(record): raise InclusionRequestObsoleteError(community=community, record=record) # depends on [control=['if'], data=[]] try: # Create inclusion request with db.session.begin_nested(): obj = cls(id_community=community.id, id_record=record.id, user=user, expires_at=expires_at) db.session.add(obj) # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]] except (IntegrityError, FlushError): raise InclusionRequestExistsError(community=community, record=record) # depends on [control=['except'], data=[]] # Send signal inclusion_request_created.send(current_app._get_current_object(), request=obj, notify=notify) return obj
def _construct_operation_id(self, service_name, protorpc_method_name): """Return an operation id for a service method. Args: service_name: The name of the service. protorpc_method_name: The ProtoRPC method name. Returns: A string representing the operation id. """ # camelCase the ProtoRPC method name method_name_camel = util.snake_case_to_headless_camel_case( protorpc_method_name) return '{0}_{1}'.format(service_name, method_name_camel)
def function[_construct_operation_id, parameter[self, service_name, protorpc_method_name]]: constant[Return an operation id for a service method. Args: service_name: The name of the service. protorpc_method_name: The ProtoRPC method name. Returns: A string representing the operation id. ] variable[method_name_camel] assign[=] call[name[util].snake_case_to_headless_camel_case, parameter[name[protorpc_method_name]]] return[call[constant[{0}_{1}].format, parameter[name[service_name], name[method_name_camel]]]]
keyword[def] identifier[_construct_operation_id] ( identifier[self] , identifier[service_name] , identifier[protorpc_method_name] ): literal[string] identifier[method_name_camel] = identifier[util] . identifier[snake_case_to_headless_camel_case] ( identifier[protorpc_method_name] ) keyword[return] literal[string] . identifier[format] ( identifier[service_name] , identifier[method_name_camel] )
def _construct_operation_id(self, service_name, protorpc_method_name): """Return an operation id for a service method. Args: service_name: The name of the service. protorpc_method_name: The ProtoRPC method name. Returns: A string representing the operation id. """ # camelCase the ProtoRPC method name method_name_camel = util.snake_case_to_headless_camel_case(protorpc_method_name) return '{0}_{1}'.format(service_name, method_name_camel)
def emit(self, tup, **kwargs): """Modified emit that will not return task IDs after emitting. See :class:`pystorm.component.Bolt` for more information. :returns: ``None``. """ kwargs["need_task_ids"] = False return super(BatchingBolt, self).emit(tup, **kwargs)
def function[emit, parameter[self, tup]]: constant[Modified emit that will not return task IDs after emitting. See :class:`pystorm.component.Bolt` for more information. :returns: ``None``. ] call[name[kwargs]][constant[need_task_ids]] assign[=] constant[False] return[call[call[name[super], parameter[name[BatchingBolt], name[self]]].emit, parameter[name[tup]]]]
keyword[def] identifier[emit] ( identifier[self] , identifier[tup] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[False] keyword[return] identifier[super] ( identifier[BatchingBolt] , identifier[self] ). identifier[emit] ( identifier[tup] ,** identifier[kwargs] )
def emit(self, tup, **kwargs): """Modified emit that will not return task IDs after emitting. See :class:`pystorm.component.Bolt` for more information. :returns: ``None``. """ kwargs['need_task_ids'] = False return super(BatchingBolt, self).emit(tup, **kwargs)
def image_targets_bottom(x, model_hparams, vocab_size): """Bottom transformation for target images.""" pixel_embedding_size = 64 inputs = x with tf.variable_scope("image_modality"): if not tf.executing_eagerly(): tf.summary.image( "targets_bottom", common_layers.tpu_safe_image_summary(inputs), max_outputs=1) inputs_shape = common_layers.shape_list(inputs) if len(inputs_shape) != 4: raise ValueError("Assuming images given as int tensors in the format " "[batch, height, width, channels] (256 values).") # We embed each of 256=vocab_size possible pixel values. embedding_var = tf.get_variable( "pixel_embedding", [vocab_size, pixel_embedding_size]) hot_inputs = tf.one_hot(tf.to_int32(inputs), vocab_size) hot_inputs = tf.reshape(hot_inputs, [-1, vocab_size]) embedded = tf.matmul(hot_inputs, embedding_var) # Let's now merge all channels that were embedded into a single vector. merged_size = pixel_embedding_size * inputs_shape[3] embedded = tf.reshape(embedded, inputs_shape[:3] + [merged_size]) merged = tf.layers.dense( embedded, model_hparams.hidden_size, name="merge_pixel_embedded_channels") return merged
def function[image_targets_bottom, parameter[x, model_hparams, vocab_size]]: constant[Bottom transformation for target images.] variable[pixel_embedding_size] assign[=] constant[64] variable[inputs] assign[=] name[x] with call[name[tf].variable_scope, parameter[constant[image_modality]]] begin[:] if <ast.UnaryOp object at 0x7da18bccbe80> begin[:] call[name[tf].summary.image, parameter[constant[targets_bottom], call[name[common_layers].tpu_safe_image_summary, parameter[name[inputs]]]]] variable[inputs_shape] assign[=] call[name[common_layers].shape_list, parameter[name[inputs]]] if compare[call[name[len], parameter[name[inputs_shape]]] not_equal[!=] constant[4]] begin[:] <ast.Raise object at 0x7da18bccaad0> variable[embedding_var] assign[=] call[name[tf].get_variable, parameter[constant[pixel_embedding], list[[<ast.Name object at 0x7da18bcc9c60>, <ast.Name object at 0x7da18bcc83d0>]]]] variable[hot_inputs] assign[=] call[name[tf].one_hot, parameter[call[name[tf].to_int32, parameter[name[inputs]]], name[vocab_size]]] variable[hot_inputs] assign[=] call[name[tf].reshape, parameter[name[hot_inputs], list[[<ast.UnaryOp object at 0x7da18bccb850>, <ast.Name object at 0x7da18bcca380>]]]] variable[embedded] assign[=] call[name[tf].matmul, parameter[name[hot_inputs], name[embedding_var]]] variable[merged_size] assign[=] binary_operation[name[pixel_embedding_size] * call[name[inputs_shape]][constant[3]]] variable[embedded] assign[=] call[name[tf].reshape, parameter[name[embedded], binary_operation[call[name[inputs_shape]][<ast.Slice object at 0x7da1b1ff0310>] + list[[<ast.Name object at 0x7da1b1ff0eb0>]]]]] variable[merged] assign[=] call[name[tf].layers.dense, parameter[name[embedded], name[model_hparams].hidden_size]] return[name[merged]]
keyword[def] identifier[image_targets_bottom] ( identifier[x] , identifier[model_hparams] , identifier[vocab_size] ): literal[string] identifier[pixel_embedding_size] = literal[int] identifier[inputs] = identifier[x] keyword[with] identifier[tf] . identifier[variable_scope] ( literal[string] ): keyword[if] keyword[not] identifier[tf] . identifier[executing_eagerly] (): identifier[tf] . identifier[summary] . identifier[image] ( literal[string] , identifier[common_layers] . identifier[tpu_safe_image_summary] ( identifier[inputs] ), identifier[max_outputs] = literal[int] ) identifier[inputs_shape] = identifier[common_layers] . identifier[shape_list] ( identifier[inputs] ) keyword[if] identifier[len] ( identifier[inputs_shape] )!= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) identifier[embedding_var] = identifier[tf] . identifier[get_variable] ( literal[string] , [ identifier[vocab_size] , identifier[pixel_embedding_size] ]) identifier[hot_inputs] = identifier[tf] . identifier[one_hot] ( identifier[tf] . identifier[to_int32] ( identifier[inputs] ), identifier[vocab_size] ) identifier[hot_inputs] = identifier[tf] . identifier[reshape] ( identifier[hot_inputs] ,[- literal[int] , identifier[vocab_size] ]) identifier[embedded] = identifier[tf] . identifier[matmul] ( identifier[hot_inputs] , identifier[embedding_var] ) identifier[merged_size] = identifier[pixel_embedding_size] * identifier[inputs_shape] [ literal[int] ] identifier[embedded] = identifier[tf] . identifier[reshape] ( identifier[embedded] , identifier[inputs_shape] [: literal[int] ]+[ identifier[merged_size] ]) identifier[merged] = identifier[tf] . identifier[layers] . identifier[dense] ( identifier[embedded] , identifier[model_hparams] . identifier[hidden_size] , identifier[name] = literal[string] ) keyword[return] identifier[merged]
def image_targets_bottom(x, model_hparams, vocab_size): """Bottom transformation for target images.""" pixel_embedding_size = 64 inputs = x with tf.variable_scope('image_modality'): if not tf.executing_eagerly(): tf.summary.image('targets_bottom', common_layers.tpu_safe_image_summary(inputs), max_outputs=1) # depends on [control=['if'], data=[]] inputs_shape = common_layers.shape_list(inputs) if len(inputs_shape) != 4: raise ValueError('Assuming images given as int tensors in the format [batch, height, width, channels] (256 values).') # depends on [control=['if'], data=[]] # We embed each of 256=vocab_size possible pixel values. embedding_var = tf.get_variable('pixel_embedding', [vocab_size, pixel_embedding_size]) hot_inputs = tf.one_hot(tf.to_int32(inputs), vocab_size) hot_inputs = tf.reshape(hot_inputs, [-1, vocab_size]) embedded = tf.matmul(hot_inputs, embedding_var) # Let's now merge all channels that were embedded into a single vector. merged_size = pixel_embedding_size * inputs_shape[3] embedded = tf.reshape(embedded, inputs_shape[:3] + [merged_size]) merged = tf.layers.dense(embedded, model_hparams.hidden_size, name='merge_pixel_embedded_channels') return merged # depends on [control=['with'], data=[]]
def random_weights(nobj: int, nweight: int) -> List[List[float]]: """ Generatate nw random weight vectors for nof objectives as per Tchebycheff method [SteCho83]_ .. [SteCho83] Steuer, R. E. & Choo, E.-U. An interactive weighted Tchebycheff procedure for multiple objective programming, Mathematical programming, Springer, 1983, 26, 326-344 Parameters ---------- nobj: Number of objective functions nweight: Number of weights vectors to be generated Returns ------- List[List[float] nobj x nweight matrix of weight vectors """ # Initial wector space as per # Miettinen, K. Nonlinear Multiobjective Optimization # Kluwer Academic Publishers, 1999 wspace = 50 * nobj while wspace < nweight: wspace *= 2 weights = np.random.rand(wspace, nobj) return _centroids(nobj, weights)
def function[random_weights, parameter[nobj, nweight]]: constant[ Generatate nw random weight vectors for nof objectives as per Tchebycheff method [SteCho83]_ .. [SteCho83] Steuer, R. E. & Choo, E.-U. An interactive weighted Tchebycheff procedure for multiple objective programming, Mathematical programming, Springer, 1983, 26, 326-344 Parameters ---------- nobj: Number of objective functions nweight: Number of weights vectors to be generated Returns ------- List[List[float] nobj x nweight matrix of weight vectors ] variable[wspace] assign[=] binary_operation[constant[50] * name[nobj]] while compare[name[wspace] less[<] name[nweight]] begin[:] <ast.AugAssign object at 0x7da18ede7e80> variable[weights] assign[=] call[name[np].random.rand, parameter[name[wspace], name[nobj]]] return[call[name[_centroids], parameter[name[nobj], name[weights]]]]
keyword[def] identifier[random_weights] ( identifier[nobj] : identifier[int] , identifier[nweight] : identifier[int] )-> identifier[List] [ identifier[List] [ identifier[float] ]]: literal[string] identifier[wspace] = literal[int] * identifier[nobj] keyword[while] identifier[wspace] < identifier[nweight] : identifier[wspace] *= literal[int] identifier[weights] = identifier[np] . identifier[random] . identifier[rand] ( identifier[wspace] , identifier[nobj] ) keyword[return] identifier[_centroids] ( identifier[nobj] , identifier[weights] )
def random_weights(nobj: int, nweight: int) -> List[List[float]]: """ Generatate nw random weight vectors for nof objectives as per Tchebycheff method [SteCho83]_ .. [SteCho83] Steuer, R. E. & Choo, E.-U. An interactive weighted Tchebycheff procedure for multiple objective programming, Mathematical programming, Springer, 1983, 26, 326-344 Parameters ---------- nobj: Number of objective functions nweight: Number of weights vectors to be generated Returns ------- List[List[float] nobj x nweight matrix of weight vectors """ # Initial wector space as per # Miettinen, K. Nonlinear Multiobjective Optimization # Kluwer Academic Publishers, 1999 wspace = 50 * nobj while wspace < nweight: wspace *= 2 # depends on [control=['while'], data=['wspace']] weights = np.random.rand(wspace, nobj) return _centroids(nobj, weights)
def bidiagonalize_real_matrix_pair_with_symmetric_products( mat1: np.ndarray, mat2: np.ndarray, *, rtol: float = 1e-5, atol: float = 1e-8, check_preconditions: bool = True) -> Tuple[np.ndarray, np.ndarray]: """Finds orthogonal matrices that diagonalize both mat1 and mat2. Requires mat1 and mat2 to be real. Requires mat1.T @ mat2 to be symmetric. Requires mat1 @ mat2.T to be symmetric. Args: mat1: One of the real matrices. mat2: The other real matrix. rtol: Relative numeric error threshold. atol: Absolute numeric error threshold. check_preconditions: If set, verifies that the inputs are real, and that mat1.T @ mat2 and mat1 @ mat2.T are both symmetric. Defaults to set. Returns: A tuple (L, R) of two orthogonal matrices, such that both L @ mat1 @ R and L @ mat2 @ R are diagonal matrices. Raises: ValueError: Matrices don't meet preconditions (e.g. not real). """ if check_preconditions: if np.any(np.imag(mat1) != 0): raise ValueError('mat1 must be real.') if np.any(np.imag(mat2) != 0): raise ValueError('mat2 must be real.') if not predicates.is_hermitian(mat1.dot(mat2.T), rtol=rtol, atol=atol): raise ValueError('mat1 @ mat2.T must be symmetric.') if not predicates.is_hermitian(mat1.T.dot(mat2), rtol=rtol, atol=atol): raise ValueError('mat1.T @ mat2 must be symmetric.') # Use SVD to bi-diagonalize the first matrix. base_left, base_diag, base_right = _svd_handling_empty(np.real(mat1)) base_diag = np.diag(base_diag) # Determine where we switch between diagonalization-fixup strategies. dim = base_diag.shape[0] rank = dim while rank > 0 and tolerance.all_near_zero(base_diag[rank - 1, rank - 1], atol=atol): rank -= 1 base_diag = base_diag[:rank, :rank] # Try diagonalizing the second matrix with the same factors as the first. semi_corrected = base_left.T.dot(np.real(mat2)).dot(base_right.T) # Fix up the part of the second matrix's diagonalization that's matched # against non-zero diagonal entries in the first matrix's diagonalization # by performing simultaneous diagonalization. overlap = semi_corrected[:rank, :rank] overlap_adjust = diagonalize_real_symmetric_and_sorted_diagonal_matrices( overlap, base_diag, rtol=rtol, atol=atol, check_preconditions=check_preconditions) # Fix up the part of the second matrix's diagonalization that's matched # against zeros in the first matrix's diagonalization by performing an SVD. extra = semi_corrected[rank:, rank:] extra_left_adjust, _, extra_right_adjust = _svd_handling_empty(extra) # Merge the fixup factors into the initial diagonalization. left_adjust = combinators.block_diag(overlap_adjust, extra_left_adjust) right_adjust = combinators.block_diag(overlap_adjust.T, extra_right_adjust) left = left_adjust.T.dot(base_left.T) right = base_right.T.dot(right_adjust.T) return left, right
def function[bidiagonalize_real_matrix_pair_with_symmetric_products, parameter[mat1, mat2]]: constant[Finds orthogonal matrices that diagonalize both mat1 and mat2. Requires mat1 and mat2 to be real. Requires mat1.T @ mat2 to be symmetric. Requires mat1 @ mat2.T to be symmetric. Args: mat1: One of the real matrices. mat2: The other real matrix. rtol: Relative numeric error threshold. atol: Absolute numeric error threshold. check_preconditions: If set, verifies that the inputs are real, and that mat1.T @ mat2 and mat1 @ mat2.T are both symmetric. Defaults to set. Returns: A tuple (L, R) of two orthogonal matrices, such that both L @ mat1 @ R and L @ mat2 @ R are diagonal matrices. Raises: ValueError: Matrices don't meet preconditions (e.g. not real). ] if name[check_preconditions] begin[:] if call[name[np].any, parameter[compare[call[name[np].imag, parameter[name[mat1]]] not_equal[!=] constant[0]]]] begin[:] <ast.Raise object at 0x7da1b1f48610> if call[name[np].any, parameter[compare[call[name[np].imag, parameter[name[mat2]]] not_equal[!=] constant[0]]]] begin[:] <ast.Raise object at 0x7da1b1f4a4a0> if <ast.UnaryOp object at 0x7da1b1f49d20> begin[:] <ast.Raise object at 0x7da1b1f49600> if <ast.UnaryOp object at 0x7da1b1f48f10> begin[:] <ast.Raise object at 0x7da1b1f4bd30> <ast.Tuple object at 0x7da1b1f49030> assign[=] call[name[_svd_handling_empty], parameter[call[name[np].real, parameter[name[mat1]]]]] variable[base_diag] assign[=] call[name[np].diag, parameter[name[base_diag]]] variable[dim] assign[=] call[name[base_diag].shape][constant[0]] variable[rank] assign[=] name[dim] while <ast.BoolOp object at 0x7da1b1f49630> begin[:] <ast.AugAssign object at 0x7da1b1c99c60> variable[base_diag] assign[=] call[name[base_diag]][tuple[[<ast.Slice object at 0x7da1b1c9a320>, <ast.Slice object at 0x7da1b1c9a560>]]] variable[semi_corrected] assign[=] call[call[name[base_left].T.dot, parameter[call[name[np].real, parameter[name[mat2]]]]].dot, parameter[name[base_right].T]] variable[overlap] assign[=] call[name[semi_corrected]][tuple[[<ast.Slice object at 0x7da1b1c3e470>, <ast.Slice object at 0x7da1b1c3e920>]]] variable[overlap_adjust] assign[=] call[name[diagonalize_real_symmetric_and_sorted_diagonal_matrices], parameter[name[overlap], name[base_diag]]] variable[extra] assign[=] call[name[semi_corrected]][tuple[[<ast.Slice object at 0x7da1b1ce7a00>, <ast.Slice object at 0x7da1b1ce7a90>]]] <ast.Tuple object at 0x7da1b1ce4ca0> assign[=] call[name[_svd_handling_empty], parameter[name[extra]]] variable[left_adjust] assign[=] call[name[combinators].block_diag, parameter[name[overlap_adjust], name[extra_left_adjust]]] variable[right_adjust] assign[=] call[name[combinators].block_diag, parameter[name[overlap_adjust].T, name[extra_right_adjust]]] variable[left] assign[=] call[name[left_adjust].T.dot, parameter[name[base_left].T]] variable[right] assign[=] call[name[base_right].T.dot, parameter[name[right_adjust].T]] return[tuple[[<ast.Name object at 0x7da1b1ce6950>, <ast.Name object at 0x7da1b1ce6740>]]]
keyword[def] identifier[bidiagonalize_real_matrix_pair_with_symmetric_products] ( identifier[mat1] : identifier[np] . identifier[ndarray] , identifier[mat2] : identifier[np] . identifier[ndarray] , *, identifier[rtol] : identifier[float] = literal[int] , identifier[atol] : identifier[float] = literal[int] , identifier[check_preconditions] : identifier[bool] = keyword[True] )-> identifier[Tuple] [ identifier[np] . identifier[ndarray] , identifier[np] . identifier[ndarray] ]: literal[string] keyword[if] identifier[check_preconditions] : keyword[if] identifier[np] . identifier[any] ( identifier[np] . identifier[imag] ( identifier[mat1] )!= literal[int] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[np] . identifier[any] ( identifier[np] . identifier[imag] ( identifier[mat2] )!= literal[int] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] keyword[not] identifier[predicates] . identifier[is_hermitian] ( identifier[mat1] . identifier[dot] ( identifier[mat2] . identifier[T] ), identifier[rtol] = identifier[rtol] , identifier[atol] = identifier[atol] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] keyword[not] identifier[predicates] . identifier[is_hermitian] ( identifier[mat1] . identifier[T] . identifier[dot] ( identifier[mat2] ), identifier[rtol] = identifier[rtol] , identifier[atol] = identifier[atol] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[base_left] , identifier[base_diag] , identifier[base_right] = identifier[_svd_handling_empty] ( identifier[np] . identifier[real] ( identifier[mat1] )) identifier[base_diag] = identifier[np] . identifier[diag] ( identifier[base_diag] ) identifier[dim] = identifier[base_diag] . identifier[shape] [ literal[int] ] identifier[rank] = identifier[dim] keyword[while] identifier[rank] > literal[int] keyword[and] identifier[tolerance] . identifier[all_near_zero] ( identifier[base_diag] [ identifier[rank] - literal[int] , identifier[rank] - literal[int] ], identifier[atol] = identifier[atol] ): identifier[rank] -= literal[int] identifier[base_diag] = identifier[base_diag] [: identifier[rank] ,: identifier[rank] ] identifier[semi_corrected] = identifier[base_left] . identifier[T] . identifier[dot] ( identifier[np] . identifier[real] ( identifier[mat2] )). identifier[dot] ( identifier[base_right] . identifier[T] ) identifier[overlap] = identifier[semi_corrected] [: identifier[rank] ,: identifier[rank] ] identifier[overlap_adjust] = identifier[diagonalize_real_symmetric_and_sorted_diagonal_matrices] ( identifier[overlap] , identifier[base_diag] , identifier[rtol] = identifier[rtol] , identifier[atol] = identifier[atol] , identifier[check_preconditions] = identifier[check_preconditions] ) identifier[extra] = identifier[semi_corrected] [ identifier[rank] :, identifier[rank] :] identifier[extra_left_adjust] , identifier[_] , identifier[extra_right_adjust] = identifier[_svd_handling_empty] ( identifier[extra] ) identifier[left_adjust] = identifier[combinators] . identifier[block_diag] ( identifier[overlap_adjust] , identifier[extra_left_adjust] ) identifier[right_adjust] = identifier[combinators] . identifier[block_diag] ( identifier[overlap_adjust] . identifier[T] , identifier[extra_right_adjust] ) identifier[left] = identifier[left_adjust] . identifier[T] . identifier[dot] ( identifier[base_left] . identifier[T] ) identifier[right] = identifier[base_right] . identifier[T] . identifier[dot] ( identifier[right_adjust] . identifier[T] ) keyword[return] identifier[left] , identifier[right]
def bidiagonalize_real_matrix_pair_with_symmetric_products(mat1: np.ndarray, mat2: np.ndarray, *, rtol: float=1e-05, atol: float=1e-08, check_preconditions: bool=True) -> Tuple[np.ndarray, np.ndarray]: """Finds orthogonal matrices that diagonalize both mat1 and mat2. Requires mat1 and mat2 to be real. Requires mat1.T @ mat2 to be symmetric. Requires mat1 @ mat2.T to be symmetric. Args: mat1: One of the real matrices. mat2: The other real matrix. rtol: Relative numeric error threshold. atol: Absolute numeric error threshold. check_preconditions: If set, verifies that the inputs are real, and that mat1.T @ mat2 and mat1 @ mat2.T are both symmetric. Defaults to set. Returns: A tuple (L, R) of two orthogonal matrices, such that both L @ mat1 @ R and L @ mat2 @ R are diagonal matrices. Raises: ValueError: Matrices don't meet preconditions (e.g. not real). """ if check_preconditions: if np.any(np.imag(mat1) != 0): raise ValueError('mat1 must be real.') # depends on [control=['if'], data=[]] if np.any(np.imag(mat2) != 0): raise ValueError('mat2 must be real.') # depends on [control=['if'], data=[]] if not predicates.is_hermitian(mat1.dot(mat2.T), rtol=rtol, atol=atol): raise ValueError('mat1 @ mat2.T must be symmetric.') # depends on [control=['if'], data=[]] if not predicates.is_hermitian(mat1.T.dot(mat2), rtol=rtol, atol=atol): raise ValueError('mat1.T @ mat2 must be symmetric.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Use SVD to bi-diagonalize the first matrix. (base_left, base_diag, base_right) = _svd_handling_empty(np.real(mat1)) base_diag = np.diag(base_diag) # Determine where we switch between diagonalization-fixup strategies. dim = base_diag.shape[0] rank = dim while rank > 0 and tolerance.all_near_zero(base_diag[rank - 1, rank - 1], atol=atol): rank -= 1 # depends on [control=['while'], data=[]] base_diag = base_diag[:rank, :rank] # Try diagonalizing the second matrix with the same factors as the first. semi_corrected = base_left.T.dot(np.real(mat2)).dot(base_right.T) # Fix up the part of the second matrix's diagonalization that's matched # against non-zero diagonal entries in the first matrix's diagonalization # by performing simultaneous diagonalization. overlap = semi_corrected[:rank, :rank] overlap_adjust = diagonalize_real_symmetric_and_sorted_diagonal_matrices(overlap, base_diag, rtol=rtol, atol=atol, check_preconditions=check_preconditions) # Fix up the part of the second matrix's diagonalization that's matched # against zeros in the first matrix's diagonalization by performing an SVD. extra = semi_corrected[rank:, rank:] (extra_left_adjust, _, extra_right_adjust) = _svd_handling_empty(extra) # Merge the fixup factors into the initial diagonalization. left_adjust = combinators.block_diag(overlap_adjust, extra_left_adjust) right_adjust = combinators.block_diag(overlap_adjust.T, extra_right_adjust) left = left_adjust.T.dot(base_left.T) right = base_right.T.dot(right_adjust.T) return (left, right)
def mmGetPlotStability(self, title="Stability", showReset=False, resetShading=0.25): """ Returns plot of the overlap metric between union SDRs within a sequence. @param title an optional title for the figure @return (Plot) plot """ plot = Plot(self, title) self._mmComputeSequenceRepresentationData() data = self._mmData["stabilityConfusion"] plot.addGraph(sorted(data, reverse=True), position=211, xlabel="Time steps", ylabel="Overlap") plot.addHistogram(data, position=212, bins=100, xlabel="Overlap", ylabel="# time steps") return plot
def function[mmGetPlotStability, parameter[self, title, showReset, resetShading]]: constant[ Returns plot of the overlap metric between union SDRs within a sequence. @param title an optional title for the figure @return (Plot) plot ] variable[plot] assign[=] call[name[Plot], parameter[name[self], name[title]]] call[name[self]._mmComputeSequenceRepresentationData, parameter[]] variable[data] assign[=] call[name[self]._mmData][constant[stabilityConfusion]] call[name[plot].addGraph, parameter[call[name[sorted], parameter[name[data]]]]] call[name[plot].addHistogram, parameter[name[data]]] return[name[plot]]
keyword[def] identifier[mmGetPlotStability] ( identifier[self] , identifier[title] = literal[string] , identifier[showReset] = keyword[False] , identifier[resetShading] = literal[int] ): literal[string] identifier[plot] = identifier[Plot] ( identifier[self] , identifier[title] ) identifier[self] . identifier[_mmComputeSequenceRepresentationData] () identifier[data] = identifier[self] . identifier[_mmData] [ literal[string] ] identifier[plot] . identifier[addGraph] ( identifier[sorted] ( identifier[data] , identifier[reverse] = keyword[True] ), identifier[position] = literal[int] , identifier[xlabel] = literal[string] , identifier[ylabel] = literal[string] ) identifier[plot] . identifier[addHistogram] ( identifier[data] , identifier[position] = literal[int] , identifier[bins] = literal[int] , identifier[xlabel] = literal[string] , identifier[ylabel] = literal[string] ) keyword[return] identifier[plot]
def mmGetPlotStability(self, title='Stability', showReset=False, resetShading=0.25): """ Returns plot of the overlap metric between union SDRs within a sequence. @param title an optional title for the figure @return (Plot) plot """ plot = Plot(self, title) self._mmComputeSequenceRepresentationData() data = self._mmData['stabilityConfusion'] plot.addGraph(sorted(data, reverse=True), position=211, xlabel='Time steps', ylabel='Overlap') plot.addHistogram(data, position=212, bins=100, xlabel='Overlap', ylabel='# time steps') return plot
def r_value(means, variances, n, approx=False): '''Calculate the Gelman-Rubin R value (Chapter 2.2 in [GR92]_). The R value can be used to quantify mixing of "multiple iterative simulations" (e.g. Markov Chains) in parameter space. An R value "close to one" indicates that all chains explored the same region of the parameter. .. note:: The R value is defined only in *one* dimension. :param means: Vector-like array; the sample mean of each chain. :param variances: Vector-like array; the sample variance of each chain. ''' # use same variable names as in [GR92] # means is \bar{x}_i # variances is s_i^2 means = _np.asarray(means) variances = _np.asarray(variances) assert means.ndim == 1, '``means`` must be vector-like' assert variances.ndim == 1, '``variances`` must be vector-like' assert len(means) == len(variances), \ 'Number of ``means`` (%i) does not match number of ``variances`` (%i)' %( len(means), len(variances) ) m = len(means) x_bar = _np.average(means) B_over_n = ((means - x_bar)**2).sum() / (m - 1) W = _np.average(variances) # var_estimate is \hat{\sigma}^2 var_estimate = (n - 1) / n * W + B_over_n if approx: return var_estimate / W V = var_estimate + B_over_n / m # calculate the three terms of \hat{var}(\hat{V}) (equation (4) in [GR92] # var_V is \hat{var}(\hat{V}) tmp_cov_matrix = _np.cov(variances, means) # third term var_V = _np.cov(variances, means**2)[1,0] - 2. * x_bar * tmp_cov_matrix[1,0] var_V *= 2. * (m + 1) * (n - 1) / (m * m * n) # second term (merged n in denominator into ``B_over_n``) var_V += ((m + 1) / m)**2 * 2. / (m - 1) * B_over_n * B_over_n # first term var_V += ((n - 1) / n)**2 / m * tmp_cov_matrix[0,0] df = 2. * V**2 / var_V if df <= 2.: return _np.inf return V / W * df / (df - 2)
def function[r_value, parameter[means, variances, n, approx]]: constant[Calculate the Gelman-Rubin R value (Chapter 2.2 in [GR92]_). The R value can be used to quantify mixing of "multiple iterative simulations" (e.g. Markov Chains) in parameter space. An R value "close to one" indicates that all chains explored the same region of the parameter. .. note:: The R value is defined only in *one* dimension. :param means: Vector-like array; the sample mean of each chain. :param variances: Vector-like array; the sample variance of each chain. ] variable[means] assign[=] call[name[_np].asarray, parameter[name[means]]] variable[variances] assign[=] call[name[_np].asarray, parameter[name[variances]]] assert[compare[name[means].ndim equal[==] constant[1]]] assert[compare[name[variances].ndim equal[==] constant[1]]] assert[compare[call[name[len], parameter[name[means]]] equal[==] call[name[len], parameter[name[variances]]]]] variable[m] assign[=] call[name[len], parameter[name[means]]] variable[x_bar] assign[=] call[name[_np].average, parameter[name[means]]] variable[B_over_n] assign[=] binary_operation[call[binary_operation[binary_operation[name[means] - name[x_bar]] ** constant[2]].sum, parameter[]] / binary_operation[name[m] - constant[1]]] variable[W] assign[=] call[name[_np].average, parameter[name[variances]]] variable[var_estimate] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[n] - constant[1]] / name[n]] * name[W]] + name[B_over_n]] if name[approx] begin[:] return[binary_operation[name[var_estimate] / name[W]]] variable[V] assign[=] binary_operation[name[var_estimate] + binary_operation[name[B_over_n] / name[m]]] variable[tmp_cov_matrix] assign[=] call[name[_np].cov, parameter[name[variances], name[means]]] variable[var_V] assign[=] binary_operation[call[call[name[_np].cov, parameter[name[variances], binary_operation[name[means] ** constant[2]]]]][tuple[[<ast.Constant object at 0x7da20c7c9570>, <ast.Constant object at 0x7da20c7c9de0>]]] - binary_operation[binary_operation[constant[2.0] * name[x_bar]] * call[name[tmp_cov_matrix]][tuple[[<ast.Constant object at 0x7da20c7cac50>, <ast.Constant object at 0x7da20c7cb610>]]]]] <ast.AugAssign object at 0x7da18c4cc1f0> <ast.AugAssign object at 0x7da18c4cfe50> <ast.AugAssign object at 0x7da18c4cf730> variable[df] assign[=] binary_operation[binary_operation[constant[2.0] * binary_operation[name[V] ** constant[2]]] / name[var_V]] if compare[name[df] less_or_equal[<=] constant[2.0]] begin[:] return[name[_np].inf] return[binary_operation[binary_operation[binary_operation[name[V] / name[W]] * name[df]] / binary_operation[name[df] - constant[2]]]]
keyword[def] identifier[r_value] ( identifier[means] , identifier[variances] , identifier[n] , identifier[approx] = keyword[False] ): literal[string] identifier[means] = identifier[_np] . identifier[asarray] ( identifier[means] ) identifier[variances] = identifier[_np] . identifier[asarray] ( identifier[variances] ) keyword[assert] identifier[means] . identifier[ndim] == literal[int] , literal[string] keyword[assert] identifier[variances] . identifier[ndim] == literal[int] , literal[string] keyword[assert] identifier[len] ( identifier[means] )== identifier[len] ( identifier[variances] ), literal[string] %( identifier[len] ( identifier[means] ), identifier[len] ( identifier[variances] )) identifier[m] = identifier[len] ( identifier[means] ) identifier[x_bar] = identifier[_np] . identifier[average] ( identifier[means] ) identifier[B_over_n] =(( identifier[means] - identifier[x_bar] )** literal[int] ). identifier[sum] ()/( identifier[m] - literal[int] ) identifier[W] = identifier[_np] . identifier[average] ( identifier[variances] ) identifier[var_estimate] =( identifier[n] - literal[int] )/ identifier[n] * identifier[W] + identifier[B_over_n] keyword[if] identifier[approx] : keyword[return] identifier[var_estimate] / identifier[W] identifier[V] = identifier[var_estimate] + identifier[B_over_n] / identifier[m] identifier[tmp_cov_matrix] = identifier[_np] . identifier[cov] ( identifier[variances] , identifier[means] ) identifier[var_V] = identifier[_np] . identifier[cov] ( identifier[variances] , identifier[means] ** literal[int] )[ literal[int] , literal[int] ]- literal[int] * identifier[x_bar] * identifier[tmp_cov_matrix] [ literal[int] , literal[int] ] identifier[var_V] *= literal[int] *( identifier[m] + literal[int] )*( identifier[n] - literal[int] )/( identifier[m] * identifier[m] * identifier[n] ) identifier[var_V] +=(( identifier[m] + literal[int] )/ identifier[m] )** literal[int] * literal[int] /( identifier[m] - literal[int] )* identifier[B_over_n] * identifier[B_over_n] identifier[var_V] +=(( identifier[n] - literal[int] )/ identifier[n] )** literal[int] / identifier[m] * identifier[tmp_cov_matrix] [ literal[int] , literal[int] ] identifier[df] = literal[int] * identifier[V] ** literal[int] / identifier[var_V] keyword[if] identifier[df] <= literal[int] : keyword[return] identifier[_np] . identifier[inf] keyword[return] identifier[V] / identifier[W] * identifier[df] /( identifier[df] - literal[int] )
def r_value(means, variances, n, approx=False): """Calculate the Gelman-Rubin R value (Chapter 2.2 in [GR92]_). The R value can be used to quantify mixing of "multiple iterative simulations" (e.g. Markov Chains) in parameter space. An R value "close to one" indicates that all chains explored the same region of the parameter. .. note:: The R value is defined only in *one* dimension. :param means: Vector-like array; the sample mean of each chain. :param variances: Vector-like array; the sample variance of each chain. """ # use same variable names as in [GR92] # means is \bar{x}_i # variances is s_i^2 means = _np.asarray(means) variances = _np.asarray(variances) assert means.ndim == 1, '``means`` must be vector-like' assert variances.ndim == 1, '``variances`` must be vector-like' assert len(means) == len(variances), 'Number of ``means`` (%i) does not match number of ``variances`` (%i)' % (len(means), len(variances)) m = len(means) x_bar = _np.average(means) B_over_n = ((means - x_bar) ** 2).sum() / (m - 1) W = _np.average(variances) # var_estimate is \hat{\sigma}^2 var_estimate = (n - 1) / n * W + B_over_n if approx: return var_estimate / W # depends on [control=['if'], data=[]] V = var_estimate + B_over_n / m # calculate the three terms of \hat{var}(\hat{V}) (equation (4) in [GR92] # var_V is \hat{var}(\hat{V}) tmp_cov_matrix = _np.cov(variances, means) # third term var_V = _np.cov(variances, means ** 2)[1, 0] - 2.0 * x_bar * tmp_cov_matrix[1, 0] var_V *= 2.0 * (m + 1) * (n - 1) / (m * m * n) # second term (merged n in denominator into ``B_over_n``) var_V += ((m + 1) / m) ** 2 * 2.0 / (m - 1) * B_over_n * B_over_n # first term var_V += ((n - 1) / n) ** 2 / m * tmp_cov_matrix[0, 0] df = 2.0 * V ** 2 / var_V if df <= 2.0: return _np.inf # depends on [control=['if'], data=[]] return V / W * df / (df - 2)
def sfilter(self, source): """Execute filter.""" return [SourceText(source.text, source.context, source.encoding, 'text')]
def function[sfilter, parameter[self, source]]: constant[Execute filter.] return[list[[<ast.Call object at 0x7da18eb54e80>]]]
keyword[def] identifier[sfilter] ( identifier[self] , identifier[source] ): literal[string] keyword[return] [ identifier[SourceText] ( identifier[source] . identifier[text] , identifier[source] . identifier[context] , identifier[source] . identifier[encoding] , literal[string] )]
def sfilter(self, source): """Execute filter.""" return [SourceText(source.text, source.context, source.encoding, 'text')]