code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def consume_normals(self): while True: yield ( float(self.values[1]), float(self.values[2]), float(self.values[3]), ) try: self.next_line() except StopIteration: break if n...
Consumes all consecutive texture coordinate lines
def read_reg(self, addr): val, data = self.command(self.ESP_READ_REG, struct.pack('<I', addr)) if byte(data, 0) != 0: raise FatalError.WithResult("Failed to read register address %08x" % addr, data) return val
Read memory address in target
def refresh_interval(self, refresh_interval): if isinstance(refresh_interval, int) and refresh_interval > 0: self._refresh_interval = refresh_interval else: self._refresh_interval = None
Set the new cache refresh interval
def safe_int(value): try: result = int(value) if result < 0: raise NegativeDurationError( 'Negative values in duration strings are not allowed!' ) except NegativeDurationError as exc: raise exc except (TypeError, ValueError): result = 0...
Tries to convert a value to int; returns 0 if conversion failed
def reset(project, user): d = Project.path(project, user) + "output" if os.path.isdir(d): shutil.rmtree(d) os.makedirs(d) else: raise flask.abort(404) if os.path.exists(Project.path(project, user) + ".done"): os.unlink(Project.path(project,...
Reset system, delete all output files and prepare for a new run
def getBoundsColor(self, nNumOutputColors, flCollisionBoundsFadeDistance): fn = self.function_table.getBoundsColor pOutputColorArray = HmdColor_t() pOutputCameraColor = HmdColor_t() fn(byref(pOutputColorArray), nNumOutputColors, flCollisionBoundsFadeDistance, byref(pOutputCameraColor)) ...
Get the current chaperone bounds draw color and brightness
def stem_leaf_plot(data, vmin, vmax, bins, digit=1, title=None): assert bins > 0 range = vmax - vmin step = range * 1. / bins if isinstance(range, int): step = int(ceil(step)) step = step or 1 bins = np.arange(vmin, vmax + step, step) hist, bin_edges = np.histogram(data, bins=bins) ...
Generate stem and leaf plot given a collection of numbers
def from_bucket(cls, connection, bucket): if bucket is None: raise errors.NoContainerException return cls(connection, bucket.name)
Create from bucket object.
def token_permission_view(token): scopes = [current_oauth2server.scopes[x] for x in token.scopes] return render_template( "invenio_oauth2server/settings/token_permission_view.html", token=token, scopes=scopes, )
Show permission garanted to authorized application token.
def unpack(cls, msg): flags, cursor_id, _, number_returned = cls.UNPACK_FROM(msg) documents = bytes(msg[20:]) return cls(flags, cursor_id, number_returned, documents)
Construct an _OpReply from raw bytes.
def parse_headers(self, req, name, field): return req.get_header(name, required=False) or core.missing
Pull a header value from the request.
def find_spectrum_match(spec, spec_lib, method='euclidian'): spec = spec / np.max(spec) if method == 'dot': d1 = (spec_lib * lil_matrix(spec).T).sum(axis=1).A ** 2 d2 = np.sum(spec ** 2) * spec_lib.multiply(spec_lib).sum(axis=1).A dist = d1 / d2 elif method == 'euclidian': st...
Find spectrum in spec_lib most similar to spec.
def execute(self): cluster_name = self.params.cluster creator = make_creator(self.params.config, storage_path=self.params.storage) try: cluster = creator.load_cluster(cluster_name) except (ClusterNotFound, ConfigurationError) as e: l...
Pause the cluster if it is running.
def njsd(network, ref_gene_expression_dict, query_gene_expression_dict, gene_set): gene_jsd_dict = dict() reference_genes = ref_gene_expression_dict.keys() assert len(reference_genes) != 'Reference gene expression profile should have > 0 genes.' for gene in gene_set: if gene not in network....
Calculate Jensen-Shannon divergence between query and reference gene expression profile.
def reverse_reference(self): self.ref_start = self.ref_length - self.ref_start - 1 self.ref_end = self.ref_length - self.ref_end - 1
Changes the coordinates as if the reference sequence has been reverse complemented
def aot_blpop(self): if self.tcex.default_args.tc_playbook_db_type == 'Redis': res = None try: self.tcex.log.info('Blocking for AOT message.') msg_data = self.db.blpop( self.tcex.default_args.tc_action_channel, timeo...
Subscribe to AOT action channel.
def clear_measurements(self): mid_list = self.assignments.get('measurements', None) if mid_list is not None: for mid in mid_list: self.configs.delete_measurements(mid=mid) self.assignments['measurements'] = None
Forget any previous measurements
def call(self, method, *args, **params): transaction_id = params.get("transaction_id") if not transaction_id: self.transaction_id += 1 transaction_id = self.transaction_id obj = params.get("obj") args = [method, transaction_id, obj] + list(args) args_encod...
Calls a method on the server.
def _add_numeric_methods_binary(cls): cls.__add__ = _make_arithmetic_op(operator.add, cls) cls.__radd__ = _make_arithmetic_op(ops.radd, cls) cls.__sub__ = _make_arithmetic_op(operator.sub, cls) cls.__rsub__ = _make_arithmetic_op(ops.rsub, cls) cls.__rpow__ = _make_arithmetic_op(o...
Add in numeric methods.
def _unpack(self, record, key, expected): attrs = record.get(key) if attrs is None: return obj = unpack_from_dynamodb( attrs=attrs, expected=expected, model=self.model, engine=self.engine ) object_loaded.send(self.engine...
Replaces the attr dict at the given key with an instance of a Model
def copy(self): other = ContextModel(self._context, self.parent()) other._stale = self._stale other._modified = self._modified other.request = self.request[:] other.packages_path = self.packages_path other.implicit_packages = self.implicit_packages other.package_f...
Returns a copy of the context.
def _write_to_file(self, filename, bytesvalue): fh, tmp = tempfile.mkstemp() with os.fdopen(fh, self._flag) as f: f.write(self._dumps(bytesvalue)) rename(tmp, filename) os.chmod(filename, self._mode)
Write bytesvalue to filename.
def owned_ecs(self): with self._mutex: if not self._owned_ecs: self._owned_ecs = [ExecutionContext(ec, self._obj.get_context_handle(ec)) \ for ec in self._obj.get_owned_contexts()] return self._owned_ecs
A list of the execution contexts owned by this component.
def list_zip(archive, compression, cmd, verbosity, interactive): try: with zipfile.ZipFile(archive, "r") as zfile: for name in zfile.namelist(): if verbosity >= 0: print(name) except Exception as err: msg = "error listing %s: %s" % (archive, err) ...
List member of a ZIP archive with the zipfile Python module.
def _path_to_be_kept(self, path): if self.excludes and (path in self.excludes or helpers.is_inside_any(self.excludes, path)): return False if self.includes: return (path in self.includes or helpers.is_inside_any(self.includes, path)) return...
Does the given path pass the filtering criteria?
def error_handler(_, event): evt = event.contents ERROR.details = { "type": evt.type, "serial": evt.serial, "error_code": evt.error_code, "request_code": evt.request_code, "minor_code": evt.minor_code, } return 0
Specifies the program's supplied error handler.
def upload_artifacts(self): deploy_strategy = self.properties["deploy_strategy"] mirror = False if deploy_strategy == "mirror": mirror = True self._upload_artifacts_to_path(mirror=mirror) if deploy_strategy == "highlander": self._sync_to_uri(self.s3_latest...
Upload artifacts to S3 and copy to correct path depending on strategy.
def all(): dir() cmd3() banner("CLEAN PREVIOUS CLOUDMESH INSTALLS") r = int(local("pip freeze |fgrep cloudmesh | wc -l", capture=True)) while r > 0: local('echo "y\n" | pip uninstall cloudmesh') r = int(local("pip freeze |fgrep cloudmesh | wc -l", capture=True))
clean the dis and uninstall cloudmesh
def _from_dict(cls, _dict): args = {} if 'generic' in _dict: args['generic'] = [ DialogRuntimeResponseGeneric._from_dict(x) for x in (_dict.get('generic')) ] if 'intents' in _dict: args['intents'] = [ RuntimeInte...
Initialize a MessageOutput object from a json dictionary.
def execute(self, conn, site_name= "", transaction = False): sql = self.sql if site_name == "": result = self.dbi.processData(sql, conn=conn, transaction=transaction) else: sql += "WHERE S.SITE_NAME = :site_name" binds = { "site_name" : site_name } ...
Lists all sites types if site_name is not provided.
def take_function_register(self, rtype = SharedData.TYPES.NO_TYPE): reg = SharedData.FUNCTION_REGISTER if reg not in self.free_registers: self.error("function register already taken") self.free_registers.remove(reg) self.used_registers.append(reg) self.symtab.se...
Reserves register for function return value and sets its type
def word_to_vector(word): vector = [] for char in list(word): vector.append(char2int(char)) return vector
Convert character vectors to integer vectors.
def _detect(self): results = [] for c in self.contracts: unindexed_params = self.detect_erc20_unindexed_event_params(c) if unindexed_params: info = "{} ({}) does not mark important ERC20 parameters as 'indexed':\n" info = info.format(c.name, c.sour...
Detect un-indexed ERC20 event parameters in all contracts.
def substitution_set(string, indexes): strlen = len(string) return {mutate_string(string, x) for x in indexes if valid_substitution(strlen, x)}
for a string, return a set of all possible substitutions
def emit_node(self, node): emit = getattr(self, "%s_emit" % node.kind, self.default_emit) return emit(node)
Emit a single node.
def _get_cibfile_tmp(cibname): cibfile_tmp = '{0}.tmp'.format(_get_cibfile(cibname)) log.trace('cibfile_tmp: %s', cibfile_tmp) return cibfile_tmp
Get the full path of a temporary CIB-file with the name of the CIB
def removeDuplicates(inFileName, outFileName) : f = open(inFileName) legend = f.readline() data = '' h = {} h[legend] = 0 lines = f.readlines() for l in lines : if not h.has_key(l) : h[l] = 0 data += l f.flush() f.close() f = open(outFileName, 'w') f.write(legend+data) f.flush() f.close()
removes duplicated lines from a 'inFileName' CSV file, the results are witten in 'outFileName
def _get_struct_matrix(self): obj = _make_object("Matrix") bc = BitConsumer(self._src) obj.HasScale = bc.u_get(1) if obj.HasScale: obj.NScaleBits = n_scale_bits = bc.u_get(5) obj.ScaleX = bc.fb_get(n_scale_bits) obj.ScaleY = bc.fb_get(n_scale_bits) ...
Get the values for the MATRIX record.
def _transform_legacy_stats(self, stats): if stats and 'pools' not in stats: pool = stats.copy() pool['pool_name'] = self.id for key in ('driver_version', 'shared_targets', 'sparse_copy_volume', 'storage_protocol', 'vendor_name'...
Convert legacy stats to new stats with pools key.
def generic_find_fk_constraint_names(table, columns, referenced, insp): names = set() for fk in insp.get_foreign_keys(table): if fk['referred_table'] == referenced and set(fk['referred_columns']) == columns: names.add(fk['name']) return names
Utility to find foreign-key constraint names in alembic migrations
def process_unset(line, annotations): matches = re.match('UNSET\s+"?(.*?)"?\s*$', line) if matches: val = matches.group(1) if val == "ALL" or val == "STATEMENT_GROUP": annotations = {} elif re.match("{", val): vals = convert_csv_str_to_list(val) for va...
Process UNSET lines in BEL Script
def __configure_client(self, config): self.logger.info("Configuring p4 client...") client_dict = config.to_dict() client_dict['root_path'] = os.path.expanduser(config.get('root_path')) os.chdir(client_dict['root_path']) client_dict['hostname'] = system.NODE client_dict['p...
write the perforce client
def copy_heroku_to_local(id): heroku_app = HerokuApp(dallinger_uid=id) try: subprocess.call(["dropdb", heroku_app.name]) except Exception: pass heroku_app.pg_pull()
Copy a Heroku database locally.
def show_version(self): version_info = self.get_cli_version() version_info += self.get_runtime_version() print(version_info, file=self.out_file)
Print version information to the out file.
def send_hid_event(use_page, usage, down): message = create(protobuf.SEND_HID_EVENT_MESSAGE) event = message.inner() abstime = binascii.unhexlify(b'438922cf08020000') data = use_page.to_bytes(2, byteorder='big') data += usage.to_bytes(2, byteorder='big') data += (1 if down else 0).to_bytes(2, by...
Create a new SEND_HID_EVENT_MESSAGE.
def ripple_carry_add(A, B, cin=0): if len(A) != len(B): raise ValueError("expected A and B to be equal length") ss, cs = list(), list() for i, a in enumerate(A): c = (cin if i == 0 else cs[i-1]) ss.append(a ^ B[i] ^ c) cs.append(a & B[i] | a & c | B[i] & c) return farray(...
Return symbolic logic for an N-bit ripple carry adder.
def handle_ajax_request(self): func_arg = self.traverse_subpath[0] func_name = "ajax_{}".format(func_arg) func = getattr(self, func_name, None) if func is None: return self.fail("Invalid function", status=400) args = self.traverse_subpath[1:] func_sig = inspec...
Handle requests ajax routes
def _is_compress_filetype(self, inpath): if self._is_common_binary(inpath): return False elif self._is_common_text(inpath): return True else: the_file_size = file_size(inpath) if the_file_size > 10240: if the_file_size > 512000: ...
private method that performs magic number and size check on file to determine whether to compress the file
def buildfeed(request, feedclass, **criterias): 'View that handles the feeds.' view_data = initview(request) wrap = lambda func: ft.partial(func, _view_data=view_data, **criterias) return condition( etag_func=wrap(cache_etag), last_modified_func=wrap(cache_last_modified) )\ (_buildfeed)(request, feedclass, ...
View that handles the feeds.
def addBiosample(self): self._openRepo() dataset = self._repo.getDatasetByName(self._args.datasetName) biosample = bio_metadata.Biosample( dataset, self._args.biosampleName) biosample.populateFromJson(self._args.biosample) self._updateRepo(self._repo.insertBiosample, ...
Adds a new biosample into this repo
def cache_set(cache_dir, cache_key, content): filename = os.path.join(cache_dir, cache_key) with open(filename, 'w') as f: f.write(content)
Creates a new cache file in the cache directory
def _head_temp_file(self, temp_file, num_lines): if not isinstance(num_lines, int): raise DagobahError('num_lines must be an integer') temp_file.seek(0) result, curr_line = [], 0 for line in temp_file: curr_line += 1 result.append(line.strip()) ...
Returns a list of the first num_lines lines from a temp file.
def getparent(self, profile): assert self.parent for inputtemplate in profile.input: if inputtemplate == self.parent: return inputtemplate raise Exception("Parent InputTemplate '"+self.parent+"' not found!")
Resolve a parent ID
def normalize(self): self.__v = self.__v - np.amin(self.__v) self.__v = self.__v / np.amax(self.__v)
Sets the potential range 0 to 1.
def flush(self): self.log.info('Flushing tables and arrays to disk...') for tab in self._tables.values(): tab.flush() self._write_ndarrays_cache_to_disk()
Flush tables and arrays to disk
def _stdlib_paths(): attr_candidates = [ 'prefix', 'real_prefix', 'base_prefix', ] prefixes = (getattr(sys, a) for a in attr_candidates if hasattr(sys, a)) version = 'python%s.%s' % sys.version_info[0:2] return set(os.path.abspath(os.path.join(p, 'lib', version)) ...
Return a set of paths from which Python imports the standard library.
def _get_raw(source, bitarray): offset = int(source['offset']) size = int(source['size']) return int(''.join(['1' if digit else '0' for digit in bitarray[offset:offset + size]]), 2)
Get raw data as integer, based on offset and size
def active_brokers(self): return { broker for broker in six.itervalues(self.brokers) if not broker.inactive and not broker.decommissioned }
Set of brokers that are not inactive or decommissioned.
def build_upstream_edge_predicate(nodes: Iterable[BaseEntity]) -> EdgePredicate: nodes = set(nodes) def upstream_filter(graph: BELGraph, u: BaseEntity, v: BaseEntity, k: str) -> bool: return v in nodes and graph[u][v][k][RELATION] in CAUSAL_RELATIONS return upstream_filter
Build an edge predicate that pass for relations for which one of the given nodes is the object.
def _start_dev_proc(self, device_os, device_config): log.info('Starting the child process for %s', device_os) dos = NapalmLogsDeviceProc(device_os, self.opts, device_config) os_p...
Start the device worker process.
def evaluate(self, model, threshold=0.1): with model: if self.medium is not None: self.medium.apply(model) if self.objective is not None: model.objective = self.objective model.add_cons_vars(self.constraints) threshold *= model.slim...
Evaluate in silico growth rates.
def _log_error_and_abort(ret, obj): ret['result'] = False ret['abort'] = True if 'error' in obj: ret['comment'] = '{0}'.format(obj.get('error')) return ret
helper function to update errors in the return structure
async def send(self, data): self.writer.write(data) await self.writer.drain()
Add data to send queue.
def load_file_to_list(self): lst = [] try: with open(self.fullname, 'r') as f: for line in f: lst.append(line) return lst except IOError: return lst
load a file to a list
def run(ident): source = get_source(ident) cls = backends.get(current_app, source.backend) backend = cls(source) backend.harvest()
Launch or resume an harvesting for a given source if none is running
def swap_across(idx, idy, mat_a, mat_r, perm): size = mat_a.shape[0] perm_new = numpy.eye(size, dtype=int) perm_row = 1.0*perm[:, idx] perm[:, idx] = perm[:, idy] perm[:, idy] = perm_row row_p = 1.0 * perm_new[idx] perm_new[idx] = perm_new[idy] perm_new[idy] = row_p mat_a = numpy.dot...
Interchange row and column idy and idx.
def logs(self): if self._resources is None: self.__init() if "logs" in self._resources: url = self._url + "/logs" return _logs.Log(url=url, securityHandler=self._securityHandler, proxy_url=self._proxy_url, ...
returns an object to work with the site logs
def make_links(self, project): self.doc = ford.utils.sub_links(self.doc,project) if 'summary' in self.meta: self.meta['summary'] = ford.utils.sub_links(self.meta['summary'],project) for item in self.iterator('variables', 'types', 'enums', 'modules', ...
Process intra-site links to documentation of other parts of the program.
def _create_date_slug(self): if not self.pk: d = utc_now() elif self.published and self.published_on: d = self.published_on elif self.updated_on: d = self.updated_on self.date_slug = u"{0}/{1}".format(d.strftime("%Y/%m/%d"), self.slug)
Prefixes the slug with the ``published_on`` date.
def _get_db_names(self): query = conn = self._connect(self.config['dbname']) cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cursor.execute(query) datnames = [d['datname'] for d in cursor.fetchall()] conn.close() if not datnames: datnam...
Try to get a list of db names
def compare_password(expected, actual): if expected == actual: return True, "OK" msg = [] ver_exp = expected[-8:].rstrip() ver_act = actual[-8:].rstrip() if expected[:-8] != actual[:-8]: msg.append("Password mismatch") if ver_exp != ver_act: msg.append("asterisk_mbox vers...
Compare two 64byte encoded passwords.
def parse_declarations(self, declarations): declarations = self.declaration_re.findall(declarations) return dict(declarations)
parse a css declaration list
def capture(command, input=None, cwd=None, shell=False, raiseOnError=False): proc = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, shell=shell, universal_newlines=True) (stdout, stderr) = proc.communicate(input) if raiseOnError == True and proc.returncode...
Executes a child process and captures its output
def insert_draft_child(self, child_pid): if child_pid.status != PIDStatus.RESERVED: raise PIDRelationConsistencyError( "Draft child should have status 'RESERVED'") if not self.draft_child: with db.session.begin_nested(): super(PIDNodeVersioning, se...
Insert a draft child to versioning.
def prune(manager: Manager): nodes_to_delete = [ node for node in tqdm(manager.session.query(Node), total=manager.count_nodes()) if not node.networks ] manager.session.delete(nodes_to_delete) manager.session.commit()
Prune nodes not belonging to any edges.
def def_linear(fun): defjvp_argnum(fun, lambda argnum, g, ans, args, kwargs: fun(*subval(args, argnum, g), **kwargs))
Flags that a function is linear wrt all args
def check_url_does_not_exists(form, field): if field.data != field.object_data and Reuse.url_exists(field.data): raise validators.ValidationError(_('This URL is already registered'))
Ensure a reuse URL is not yet registered
def model_reaction_limits(model): for reaction in sorted(model.reactions, key=lambda r: r.id): equation = reaction.properties.get('equation') if equation is None: continue lower_default, upper_default = None, None if model.default_flux_limit is not None: if eq...
Yield model reaction limits as YAML dicts.
def open_links(self): if self._is_open: raise Exception('Already opened') try: self.parallel_safe(lambda scf: scf.open_link()) self._is_open = True except Exception as e: self.close_links() raise e
Open links to all individuals in the swarm
def _dyn_loader(self, module: str, kwargs: str): package_directory: str = os.path.dirname(os.path.abspath(__file__)) modules: str = package_directory + "/modules" module = module + ".py" if module not in os.listdir(modules): raise Exception("Module %s is not valid" % module) ...
Dynamically load a specific module instance.
def handle_json_GET_routes(self, params): schedule = self.server.schedule result = [] for r in schedule.GetRouteList(): result.append( (r.route_id, r.route_short_name, r.route_long_name) ) result.sort(key = lambda x: x[1:3]) return result
Return a list of all routes.
def instantiate_database(sqlite_file='ftwj.sqlite'): table_name = 'ftw' col1 = 'rule_id' col1_t = 'INTEGER' col2 = 'test_id' col2_t = 'STRING' col3 = 'time_start' col3_t = 'TEXT' col4 = 'time_end' col4_t = 'TEXT' col5 = 'response_blob' col5_t = 'TEXT' col6 = 'status_code'...
Create journal database for FTW runs
def optimize_spot_bid(ctx, instance_type, spot_bid): spot_history = _get_spot_history(ctx, instance_type) if spot_history: _check_spot_bid(spot_bid, spot_history) zones = ctx.ec2.get_all_zones() most_stable_zone = choose_spot_zone(zones, spot_bid, spot_history) logger.debug("Placing spot ins...
Check whether the bid is sane and makes an effort to place the instance in a sensible zone.
def floor_point(self): floor_point = self.centroid floor_point[1] = self.v[:, 1].min() return floor_point
Return the point on the floor that lies below the centroid.
def _add_unqualified_edge(self, source: Node, target: Node, key: str, bel: str, data: EdgeData) -> Edge: return self.get_or_create_edge( source=source, target=target, relation=data[RELATION], bel=bel, sha512=key, data=data, )
Add an unqualified edge to the network.
def build_package_data(self): for package, src_dir, build_dir, filenames in self.data_files: for filename in filenames: target = os.path.join(build_dir, filename) self.mkpath(os.path.dirname(target)) srcfile = os.path.join(src_dir, filename) ...
Copy data files into build directory
def _get_cookie_referrer_host(self): referer = self._original_request.fields.get('Referer') if referer: return URLInfo.parse(referer).hostname else: return None
Return the referrer hostname.
def pclink(self, parent, child): if parent._children is None: parent._children = set() if child._parents is None: child._parents = set() parent._children.add(child) child._parents.add(parent)
Create a parent-child relationship.
def distancePointToPolygon(point, polygon, perpendicular=False): p = point s = polygon minDist = None for i in range(0, len(s) - 1): dist = distancePointToLine(p, s[i], s[i + 1], perpendicular) if dist == INVALID_DISTANCE and perpendicular and i != 0: dist = distance(point, s...
Return the minimum distance between point and polygon
def register_event(self, event_name, event_level, message): self.events[event_name] = (event_level, message)
Registers an event so that it can be logged later.
def upload(resume, message): data_config = DataConfigManager.get_config() if not upload_is_resumable(data_config) or not opt_to_resume(resume): abort_previous_upload(data_config) access_token = AuthConfigManager.get_access_token() initialize_new_upload(data_config, access_token, message)...
Upload files in the current dir to FloydHub.
def fetch_and_parse(url, bodyLines): pageHtml = fetch_page(url) return parse(url, pageHtml, bodyLines)
Takes a url, and returns a dictionary of data with 'bodyLines' lines
def RetryQuestion(question_text, output_re="", default_val=None): while True: if default_val is not None: new_text = "%s [%s]: " % (question_text, default_val) else: new_text = "%s: " % question_text output = builtins.input(new_text) or str(default_val) output = output.strip() if not o...
Continually ask a question until the output_re is matched.
def to_json(payload, mode="history"): for key, val in six.iteritems(payload): if isinstance(val, dict): payload[key] = to_json(val, mode) else: payload[key] = val_to_json( key, val, mode, step=payload.get("_step")) return payload
Converts all keys in a potentially nested array into their JSON representation
def publish(self, name, value, raw_value=None, precision=0, metric_type='GAUGE', instance=None): if self.config['metrics_whitelist']: if not self.config['metrics_whitelist'].match(name): return elif self.config['metrics_blacklist']: if self.config[...
Publish a metric with the given name
def _extract(archive, compression, cmd, format, verbosity, outdir): targetname = util.get_single_outfile(outdir, archive) try: with lzma.LZMAFile(archive, **_get_lzma_options(format)) as lzmafile: with open(targetname, 'wb') as targetfile: data = lzmafile.read(READ_SIZE_BYTES...
Extract an LZMA or XZ archive with the lzma Python module.
def _poll_queue(self): while not self._stop_event.is_set(): reply = self.run_job() self.send(reply) if self.queue: continue time.sleep(0.02)
Poll the queue for work.
def probe(gandi, resource, enable, disable, test, host, interval, http_method, http_response, threshold, timeout, url, window): result = gandi.webacc.probe(resource, enable, disable, test, host, interval, http_method, http_response, threshold...
Manage a probe for a webaccelerator
def increment(self, batch_size): self.example_count += batch_size self.example_total += batch_size if self.log_unit == "seconds": self.unit_count = int(self.timer.elapsed()) self.unit_total = int(self.timer.total_elapsed()) elif self.log_unit == "examples": ...
Update the total and relative unit counts
def _create_tmpfile(cls, status): tmpl = string.Template(cls._TMPFILE_PATTERN) filename = tmpl.substitute( id=status.mapreduce_id, shard=status.shard, random=random.getrandbits(cls._RAND_BITS)) return cls._open_file(status.writer_spec, filename, use_tmp_bucket=True)
Creates a new random-named tmpfile.