code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def Filter(self, filename_spec): if "@" in filename_spec: file_path, package_name = filename_spec.split("@") else: file_path, package_name = filename_spec, Resource.default_package resource_path = package.ResourcePath(package_name, file_path) if resource_path is not None: return resour...
Use pkg_resources to find the path to the required resource.
def getfiles(qfiles, dirname, names): for name in names: fullname = os.path.join(dirname, name) if os.path.isfile(fullname) and \ fullname.endswith('.cf') or \ fullname.endswith('.post'): qfiles.put(fullname)
Get rule files in a directory
def search_genius_web(self, search_term, per_page=5): endpoint = "search/multi?" params = {'per_page': per_page, 'q': search_term} url = "https://genius.com/api/" + endpoint + urlencode(params) response = requests.get(url, timeout=self.timeout) time.sleep(max(self._SLEEP_MIN, sel...
Use the web-version of Genius search
def _handle_iorder(self, state): if self.opts['state_auto_order']: for name in state: for s_dec in state[name]: if not isinstance(s_dec, six.string_types): continue if not isinstance(state[name], dict): ...
Take a state and apply the iorder system
def main(): parser = argparse.ArgumentParser(description='Run SSOIS and return the available images in a particular filter.') parser.add_argument("--filter", action="store", default='r', dest="filter", choices=['r', ...
Input asteroid family, filter type, and image type to query SSOIS
def v_unique_name_leaf_list(ctx, stmt): if not stmt.i_config: return seen = [] for defval in stmt.i_default: if defval in seen: err_add(ctx.errors, stmt.pos, 'DUPLICATE_DEFAULT', (defval)) else: seen.append(defval)
Make sure config true leaf-lists do nothave duplicate defaults
def gen_cmd_and_param_completions(self): if self.complete_command: for param in self.command_param_info.get(self.current_command, []): if self.validate_param_completion(param, self.leftover_args): yield self.yield_param_completion(param, self.unfinished_word) ...
generates command and parameter completions
def _len_table_cache(self): length = 0 for table in self._table_cache: length += len(self._table_cache[table]) return length
Returns the length of the table cache
def eidos_process_jsonld(): if request.method == 'OPTIONS': return {} response = request.body.read().decode('utf-8') body = json.loads(response) eidos_json = body.get('jsonld') ep = eidos.process_json_str(eidos_json) return _stmts_from_proc(ep)
Process an EIDOS JSON-LD and return INDRA Statements.
def _normalize_path(self, path): norm_path = os.path.normpath(path) return os.path.relpath(norm_path, start=self._get_working_dir())
Normalizes a file path so that it returns a path relative to the root repo directory.
def html_attributes(self): extra_attributes = '' if self.element_id is not None: extra_attributes = ' id="%s"' % self.element_id if self.style_class is not None: extra_attributes = '%s class="%s"' % ( extra_attributes, self.style_class) if self.att...
Get extra html attributes such as id and class.
def creator(entry, config): template_file = os.path.join(os.path.dirname(__file__), 'templates/docker-container.sh.j2') with open(template_file) as handle: template = handle.read() wrapped_script = render(template, container={ 'image': 'centos:7' if 'image' not in...
Creator function for creating an instance of a Bash.
def getV0(self, v_mag_guess, buses, generators, type=CASE_GUESS): if type == CASE_GUESS: Va = array([b.v_angle * (pi / 180.0) for b in buses]) Vm = array([b.v_magnitude for b in buses]) V0 = Vm * exp(1j * Va) elif type == FLAT_START: V0 = ones(len(buses)) ...
Returns the initial voltage profile.
def add(name, connection_uri, id_file="", o=[], config=None): storm_ = get_storm_instance(config) try: if '@' in name: raise ValueError('invalid value: "@" cannot be used in name.') user, host, port = parse( connection_uri, user=get_default("user", storm_.defa...
Adds a new entry to sshconfig.
def iterargs(self): iterargs = OrderedDict() for name in self._iterargs: plural = self._profile.iterargs[name] iterargs[name] = tuple(self._values[plural]) return iterargs
uses the singular name as key
def sin(x, context=None): return _apply_function_in_current_context( BigFloat, mpfr.mpfr_sin, (BigFloat._implicit_convert(x),), context, )
Return the sine of ``x``.
def encode_query_kwargs(dynamizer, kwargs): ret = {} for k, v in six.iteritems(kwargs): if '__' not in k: raise TypeError("Invalid query argument '%s'" % k) name, condition_key = k.split('__') if condition_key == 'eq' and is_null(v): condition_key = 'null' ...
Encode query constraints in Dynamo format
def levenshteinDistance(self, tree): s1 = ' '.join(map(View.__microStr__, self.views)) s2 = ' '.join(map(View.__microStr__, tree)) return ViewClient.__levenshteinDistance(s1, s2)
Finds the Levenshtein distance between this tree and the one passed as argument.
def require_user(wa, email): cache_key = 'user-list' try: data = userCache[cache_key] except KeyError: data = wa.users.loadUsers() userCache[cache_key] = data return AssertUser([x for x in data if x.username == email])
Require that the user has an account
def _group_directional(self, clusters, adj_list, counts): observed = set() groups = [] for cluster in clusters: if len(cluster) == 1: groups.append(list(cluster)) observed.update(cluster) else: cluster = sorted(cluster, key=...
return groups for directional method
def parse_tfjob_config(): if os.getenv("TF_CONFIG"): try: return json.loads(os.environ["TF_CONFIG"]) except ValueError: return False else: return False
Attempts to parse TFJob config, returning False if it can't find it
def _str(obj): values = [] for name in obj._attribs: val = getattr(obj, name) if isinstance(val, str): val = repr(val) val = str(val) if len(str(val)) < 10 else "(...)" values.append((name, val)) values = ", ".join("{}={}".format(k, v) for k, v in values) retu...
Show nicely the generic object received.
def go_to_parent_directory(self): self.chdir(osp.abspath(osp.join(getcwd_or_home(), os.pardir)))
Go to parent directory
def deploy(project_name): request_log = requestlog.RequestLog header_addon = HeaderControl fault_wrapper = FaultWrapper application = handler.SdkHandler() for middleware in (header_addon, fault_wrapper, request_log, ): if m...
Assemble the middleware pipeline
async def receive(self, pkt): self.server.logger.info('%s: Received packet %s data %s', self.sid, packet.packet_names[pkt.packet_type], pkt.data if not isinstance(pkt.data, bytes) else '<binary>') if pkt.pack...
Receive packet from the client.
def _init(): if ApplicationLayer._isinit: return ApplicationLayer._isinit = True ApplicationLayer._to_app = {} ApplicationLayer._from_app = Queue()
Internal switchyard static initialization method.
def error(self, text): self.logger.error("{}{}".format(self.message_prefix, text))
Ajout d'un message de log de type ERROR
def mean(attrs, inputs, proto_obj): concat_input = [symbol.expand_dims(op_input, axis=0) for op_input in inputs] concat_sym = symbol.concat(*concat_input, dim=0) mean_sym = symbol.mean(concat_sym, axis=0) return mean_sym, attrs, inputs
Mean of all the input tensors.
def ok(self): rgb, hsv, hexa = self.square.get() if self.alpha_channel: hexa = self.hexa.get() rgb += (self.alpha.get(),) self.color = rgb, hsv, hexa self.destroy()
Validate color selection and destroy dialog.
def _print_divide(self): for space in self.AttributesLength: self.StrTable += "+ " + "- " * space self.StrTable += "+" + "\n"
Prints all those table line dividers.
def configure_stdout_logger(log_level=logging.DEBUG): root = logging.getLogger() root.setLevel(log_level) handler = logging.StreamHandler() handler.setLevel(log_level) handler.setFormatter(logging.Formatter(LOG_FORMAT_ESCAPED)) root.addHandler(handler)
Configures logging to use STDOUT
def parse_address(self, address_line): params = {"term": address_line} json = self._make_request('/address/getParsedAddress', params) if json is None: return None return Address.from_json(json)
Parses the given address into it's individual address fields.
def deinit(self): for i in range(len(self.buf)): self.buf[i] = 0 neopixel_write(self.pin, self.buf) self.pin.deinit()
Blank out the NeoPixels and release the pin.
def tweets_files(string, path): for filename in os.listdir(path): if re.match(string, filename) and ".jsonl" in filename: f = gzip.open if ".gz" in filename else open yield path + filename, f Ellipsis
Iterates over json files in path.
def _nelec(self): pd = self.particle_distribution(self._gam * mec2) return pd.to(1 / mec2_unit).value
Particles per unit lorentz factor
def FromStream(cls, stream, auto_transfer=True, total_size=None, **kwds): return cls(stream, auto_transfer=auto_transfer, total_size=total_size, **kwds)
Create a new Download object from a stream.
def order_error(subtag, got, expected): options = SUBTAG_TYPES[expected:] if len(options) == 1: expect_str = options[0] elif len(options) == 2: expect_str = '%s or %s' % (options[0], options[1]) else: expect_str = '%s, or %s' % (', '.join(options[:-1]), options[-1]) got_str =...
Output an error indicating that tags were out of order.
def _ReadAndLockNextRequestsToProcess(self, flow_keys, cursor): query = condition_template = "(client_id = %s AND flow_id = %s)" conditions = [condition_template] * len(flow_keys) query = query.format(conditions=" OR ".join(conditions)) args = [] for client_id, flow_id in flow_keys: args.a...
Reads and locks the next_request_to_process for a number of flows.
def _set_annotation_to_str(annotation_data: Mapping[str, Mapping[str, bool]], key: str) -> str: value = annotation_data[key] if len(value) == 1: return 'SET {} = "{}"'.format(key, list(value)[0]) x = ('"{}"'.format(v) for v in sorted(value)) return 'SET {} = {{{}}}'.format(key, ', '.join(x))
Return a set annotation string.
def weighted_mean(data): W, N, mean, d = 0, len(data), 0, 0 if N < 1: return "", "" if N == 1: return data[0][0], 0 for x in data: W += x[1] for x in data: mean += old_div((float(x[1]) * float(x[0])), float(W)) for x in data: d += (old_div(float(x[1]), flo...
calculates weighted mean of data
def apply_statusbar_settings(self): show_status_bar = CONF.get('main', 'show_status_bar') self.statusBar().setVisible(show_status_bar) if show_status_bar: for widget, name in ((self.mem_status, 'memory_usage'), (self.cpu_status, 'cpu_usage')): ...
Update status bar widgets settings
def undecorated(o): if type(o) is type: return o try: closure = o.func_closure except AttributeError: pass try: closure = o.__closure__ except AttributeError: return if closure: for cell in closure: if cell.cell_contents is o: ...
Remove all decorators from a function, method or class
def _run_event_methods(self, tag, stage=None): import inspect from ambry.bundle.events import _runable_for_event funcs = [] for func_name, f in inspect.getmembers(self, predicate=inspect.ismethod): if _runable_for_event(f, tag, stage): funcs.append(f) ...
Run code in the bundle that is marked with events.
def listPrimaryDSTypes(self, primary_ds_type="", dataset=""): conn = self.dbi.connection() try: result = self.primdstypeList.execute(conn, primary_ds_type, dataset) if conn: conn.close() return result finally: if conn: conn.close()
Returns all primary dataset types if dataset or primary_ds_type are not passed.
def _writen(fd, data): while data: n = os.write(fd, data) data = data[n:]
Write all the data to a descriptor.
def getServiceLevel(self): command = '$GE' settings = self.sendCommand(command) flags = int(settings[2], 16) return (flags & 0x0001) + 1
Returns the service level
def fan_triangulate(indices): if len(indices[0]) != 4: raise ValueError("Assumes working with a sequence of quad indices") new_indices = [] for face in indices: new_indices.extend([face[:-1], face[1:]]) return np.array(new_indices)
Return an array of vertices in triangular order using a fan triangulation algorithm.
def deactivate(profile='default'): with jconfig(profile) as config: deact = True; if not getattr(config.NotebookApp.contents_manager_class, 'startswith',lambda x:False)('jupyterdrive'): deact=False if 'gdrive' not in getattr(config.NotebookApp.tornado_settings,'get', lambda _,__:...
should be a matter of just unsetting the above keys
def processEnded(self, reason): log_debug("{name} process exited", name=self.name) if self.deferred: if reason.type == ProcessDone: self.deferred.callback(reason.value.exitCode) elif reason.type == ProcessTerminated: self.deferred.errback(reason) ...
Connected process shut down
def remove_resource_file(issue, filepath, ignore_layouts): if os.path.exists(filepath) and (ignore_layouts is False or issue.elements[0][0] != 'layout'): print('removing resource: {0}'.format(filepath)) os.remove(os.path.abspath(filepath))
Delete a file from the filesystem
def usages(self): row, col = self.editor.cursor() self.log.debug('usages: in') self.call_options[self.call_id] = { "word_under_cursor": self.editor.current_word(), "false_resp_msg": "Not a valid symbol under the cursor"} self.send_at_point("UsesOfSymbol", ...
Request usages of whatever at cursor.
def open(self, fn): "Open image in `fn`, subclass and overwrite for custom behavior." return open_image(fn, convert_mode=self.convert_mode, after_open=self.after_open)
Open image in `fn`, subclass and overwrite for custom behavior.
def put(self, event): self.log("Configuration put request ", event.user) try: component = model_factory(Schema).find_one({ 'uuid': event.data['uuid'] }) component.update(event.data) component.save() response = {...
Store a given configuration
async def set_as_default_gateway(self): interface = self._data['interface'] await interface._handler.set_default_gateway( system_id=interface.node.system_id, id=interface.id, link_id=self.id)
Set this link as the default gateway for the node.
def _formatOntologyTerm(self, element, element_type): elementClause = None if isinstance(element, dict) and element.get('terms'): elements = [] for _term in element['terms']: if _term.get('id'): elements.append('?{} = <{}> '.format( ...
Formats the ontology terms for query
def add_callback(self, callback, *callback_args, **callback_kwargs): return self.add_callbacks(callback, callback_args=callback_args, callback_kwargs=callback_kwargs)
Add a callback without an associated errback.
def _one(self, query): try: result = query.one() if result.has_expired(self._expirations): raise NotFoundError return result except (NoResultFound, MultipleResultsFound): raise NotFoundError
Gets one row from the query. Raises NotFoundError if there isn't a row or if there are multiple rows
def find_console_handler(logger): for handler in logger.handlers: if (isinstance(handler, logging.StreamHandler) and handler.stream == sys.stderr): return handler
Return a stream handler, if it exists.
def format_from_extension(self, extension): formats = [name for name, format in self._formats.items() if format.get('file_extension', None) == extension] if len(formats) == 0: return None elif len(formats) == 2: raise RuntimeError("Se...
Find a format from its extension.
def to_bytes(s): if isinstance(s, bytes): return s if isinstance(s, str) or is_unicode(s): return s.encode("utf-8") try: return unicode(s).encode("utf-8") except NameError: return str(s).encode("utf-8")
Convert an item into bytes.
def _enable_cleanup(func): @functools.wraps(func) def wrapper(*args, **kwargs): self = args[0] result = func(*args, **kwargs) self.cleanup(self) return result return wrapper
Execute cleanup operation when the decorated function completed.
def parse(cls, parser, text, pos): match = cls.regex.match(text) if match: if match.group(0).lower() not in cls.grammar: result = text, SyntaxError(repr(match.group(0)) + " is not a member of " + repr(cls.grammar)) else: result = text[len(match.gro...
Checks if terminal token is a keyword after lower-casing it.
def rate(self, currency): if not self._backend: raise ExchangeBackendNotInstalled() return self._backend.rate(currency)
Return quotation between the base and another currency
def py_bisect(f, a, b, args=(), xtol=_xtol, rtol=_rtol, maxiter=_iter, ytol=None, full_output=False, disp=True): fa = f(a, *args) fb = f(b, *args) if fa*fb > 0.0: raise ValueError("f(a) and f(b) must have different signs") elif fa == 0.0: return a elif fb == 0.0: ...
Port of SciPy's C bisect routine.
def delete(self, params, args, data): ctx = self._create_context(params, args, data) row_id = ctx.get_row_id() if row_id: deleted = self._delete_one(row_id, ctx) if deleted: return ResourceResult(body={}) else: return NOT_FOUND ...
Supports only singular delete and adds proper http status.
def analysis_info(self, webid): response = self._post(self.apiurl + "/v2/analysis/info", data={'apikey': self.apikey, 'webid': webid}) return self._raise_or_extract(response)
Show the status and most important attributes of an analysis.
def license2marc(self, key, value): return { 'a': value.get('license'), 'b': value.get('imposing'), 'u': value.get('url'), '3': value.get('material'), }
Populate the ``540`` MARC field.
def GetClientStates(self, client_list, client_chunk=50): for client_group in collection.Batch(client_list, client_chunk): for fd in aff4.FACTORY.MultiOpen( client_group, mode="r", aff4_type=aff4_grr.VFSGRRClient, token=self.token): result = {} result["ag...
Take in a client list and return dicts with their age and hostname.
def targets_for_class(self, target, classname): targets_with_class = set() for target in target.closure(): for one_class in self._target_classes(target): if classname in one_class: targets_with_class.add(target) break return targets_with_class
Search which targets from `target`'s transitive dependencies contain `classname`.
def createCatalog(config,roi=None,lon=None,lat=None): import ugali.observation.catalog if roi is None: roi = createROI(config,lon,lat) catalog = ugali.observation.catalog.Catalog(config,roi=roi) return catalog
Create a catalog object
def write_question(self, question): self.write_name(question.name) self.write_short(question.type) self.write_short(question.clazz)
Writes a question to the packet
def _run_formula(self, variable, population, period): formula = variable.get_formula(period) if formula is None: return None if self.trace: parameters_at = self.trace_parameters_at_instant else: parameters_at = self.tax_benefit_system.get_parameters_at...
Find the ``variable`` formula for the given ``period`` if it exists, and apply it to ``population``.
def send(self, event): event_str = json.dumps(event, cls=DateTimeJSONEncoder) if self.max_event_size is None or len(event_str) <= self.max_event_size: self.log(event_str)
Send the event to the standard python logger
def build_json(self, guid): upserts = [] for value in self.upserts: upserts.append({"value": value, "criteria": self.upserts[value]}) return json.dumps({'replace_all': self.replace_all, 'guid': guid, 'complete': self.complete, 'upserts': upserts, 'deletes':...
Build JSON with the input guid
def rename(self, from_, to): blueprint = self._create_blueprint(from_) blueprint.rename(to) self._build(blueprint)
Rename a table on the schema.
def to_torrent(magnet_link): infoHash = parse_magnet(magnet_link)['infoHash'] torcache = 'http://torcache.net/torrent/' + infoHash + '.torrent' torrage = 'https://torrage.com/torrent/' + infoHash + '.torrent' reflektor = 'http://reflektor.karmorra.info/torrent/' + \ infoHash + '.torrent' the...
turn a magnet link to a link to a torrent file
def visit_GeneratorExp(self, node: ast.GeneratorExp) -> Any: result = self._execute_comprehension(node=node) for generator in node.generators: self.visit(generator.iter) return result
Compile the generator expression as a function and call it.
def _get_id_from_name(self, name): _filter = { 'placementGroups': { 'name': {'operation': name} } } mask = "mask[id, name]" results = self.client.call('Account', 'getPlacementGroups', filter=_filter, mask=mask) return [result['id'] for resu...
List placement group ids which match the given name.
def find_rak(): if hasattr(current_app, 'rak'): return getattr(current_app, 'rak') else: if hasattr(current_app, 'blueprints'): blueprints = getattr(current_app, 'blueprints') for blueprint_name in blueprints: if hasattr(blueprints[blueprint_name], 'rak'):...
Find our instance of Rak, navigating Local's and possible blueprints.
def append(self, content, encoding='utf8'): if not self.parent.exists: self.parent.create() with open(self._filename, "ab") as output_file: if not is_text(content): Log.error(u"expecting to write unicode only") output_file.write(content.encode(encoding...
add a line to file
def untokenize_without_newlines(tokens): text = '' last_row = 0 last_column = -1 for t in tokens: token_string = t[1] (start_row, start_column) = t[2] (end_row, end_column) = t[3] if start_row > last_row: last_column = 0 if ( (start_column ...
Return source code based on tokens.
def setCursorSize(self, p): 'sets width based on diagonal corner p' self.cursorBox = BoundingBox(self.cursorBox.xmin, self.cursorBox.ymin, p.x, p.y) self.cursorBox.w = max(self.cursorBox.w, self.canvasCharWidth) self.cursorBox.h = max(self.cursorBox.h, self.canvasCharHeight)
sets width based on diagonal corner p
def _listdir(self, path): if self._user is None: return os.listdir(path) else: args = self._build_cmdline(['/bin/ls', '-1A', path]) return subprocess.check_output(args, stderr=DEVNULL).decode('utf-8', errors='ignore').split('\n')
Return the list of files in a directory, assuming that our user can read it.
def _AssertDataIsList(key, lst): if not isinstance(lst, list) and not isinstance(lst, tuple): raise NotAListError('%s must be a list' % key) for element in lst: if not isinstance(element, str): raise ElementNotAStringError('Unsupported list element %s found in %s', (...
Assert that lst contains list data and is not structured.
def send(self, endpoint, json_message): _send_event_task.spool(endpoint=endpoint, json_message=json_message)
Queues the message to be sent.
def clean(self, elements): cleanelements = [] for i in xrange(len(elements)): if isempty(elements[i]): return [] next = elements[i] if isinstance(elements[i], (list, tuple)): next = self.clean(elements[i]) if next: cleanelements.append(elements[i]) return cleanelements
Removes empty or incomplete answers.
def help_text(self): help_text = '\n'.join(sorted(get_commands().keys())) help_text = "\nCommands:\n" + help_text return help_text
Formats and prints the help text from the command list
def config(self, steps_per_epoch, starting_epoch, max_epoch): self.starting_epoch = int(starting_epoch) self.max_epoch = int(max_epoch) self.steps_per_epoch = int(steps_per_epoch) assert self.steps_per_epoch >= 0 and self.max_epoch >= 0 self._epoch_num = starting_epoch - 1
Configure the loop given the settings.
def compare_balance(self, operator, or_equals, amount): amount = int(amount) if operator == 'less': if or_equals: self.assertLessEqual(self.balance, amount) else: self.assertLess(self.balance, amount) elif or_equals: self.assert...
Additional step using regex matcher to compare the current balance with some number
def encode (self): delay_s = int( math.floor(self.delay) ) delay_ms = int( (self.delay - delay_s) * 255.0 ) return struct.pack('>H', delay_s) + struct.pack('B', delay_ms)
Encodes this SeqDelay to a binary bytearray.
def unique_lorem(anon, obj, field, val): return anon.faker.unique_lorem(field=field)
Generates a unique paragraph of lorem ipsum text
def handle_raw_output(ctx, data): if ctx.obj['format'] == 'json': print(json_dump(data)) exit(0) if ctx.obj['format'] == 'yaml': print(yaml_dump(data), end='') exit(0)
If a raw output format is set, dump data and exit
def strip_byte_order_mark(cls, data): encoding = None if (len(data) >= 4) and (data[:2] == b'\xfe\xff') \ and (data[2:4] != '\x00\x00'): encoding = 'utf-16be' data = data[2:] elif (len(data) >= 4) and (data[:2] == b'\xff\xfe') \ and (data[2...
If a byte-order mark is present, strip it and return the encoding it implies.
def get(self): config = self.get_block('^router bgp .*') if not config: return None response = dict() response.update(self._parse_bgp_as(config)) response.update(self._parse_router_id(config)) response.update(self._parse_max_paths(config)) response.upd...
Returns the bgp routing configuration as a dict object
def filter_zone(self, data): if self.private_zone is not None: if data['Config']['PrivateZone'] != self.str2bool(self.private_zone): return False if data['Name'] != '{0}.'.format(self.domain): return False return True
Check if a zone is private
def process_response(self, request, response): if not self.should_cache(request, response): return response response = self.patch_headers(response) self.set_cache(request, response) return response
Sets the cache and deals with caching headers if needed
def visit(self, node: AST, dfltChaining: bool = True) -> str: if node is None: return '' if isinstance(node, ast.Expression): return self.visit(node.body) method = 'visit_' + node.__class__.__name__ visitor = getattr(self, method, self.generic_visit) retur...
Process `node` by dispatching to a handler.
def column_types(self): column_types = {} for c in self.sqla_columns: column_types[c.name] = c.type return column_types
Return a dict mapping column name to type for all columns in table
def setupCentral(self): self.central = TabsWindow(self.bin_windows, self) self.setCentralWidget(self.central)
Setup empty window supporting tabs at startup.
def parse(binary, **params): binary = io.BytesIO(binary) collection = list() with tarfile.TarFile(fileobj=binary, mode='r') as tar: for tar_info in tar.getmembers(): content_type, encoding = mimetypes.guess_type(tar_info.name) content = tar.extractfile(tar_info) c...
Turns a TAR file into a frozen sample.