code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def _save_installed(self):
import json
from utility import json_serial, get_json
fulldata = get_json(self.instpath, {})
fulldata["installed"] = self.installed
with open(self.instpath, 'w') as f:
json.dump(fulldata, f, default=json_serial) | Saves the list of installed repo XML settings files. |
def add_metadata(track_file, track_data):
mp3 = mutagen.mp3.MP3(track_file)
mp3['TPE1'] = mutagen.id3.TPE1(encoding=3, text=track_data.user['username'])
mp3['TIT2'] = mutagen.id3.TIT2(encoding=3, text=track_data.title)
cover_bytes = requests.get(track_data.artwork_url, stream=True).raw.read()
mp3.ta... | Adds artist and title from the track data, and downloads the cover and embeds it in the MP3 tags. |
def ess(self):
_, y_vals, values, colors = self.labels_ticks_and_vals()
for y, value, color in zip(y_vals, values, colors):
if value.ndim != 2 or value.shape[0] < 2:
yield y, None, color
else:
yield y, _get_ess(value), color | Get effective n data for the variable. |
def _handle_dot(self):
self.printer = DotPrinter(self.todolist)
try:
arg = self.argument(1)
todo = self.todolist.todo(arg)
arg = self.argument(1)
todos = set([self.todolist.todo(arg)])
todos |= set(self.todolist.children(todo))
todo... | Handles the dot subsubcommand. |
def _previous(self):
self.summaries.rotate()
current_summary = self.summaries[0]
self._update_summary(current_summary) | Get the previous summary and present it. |
def write_text(filename: str, text: str) -> None:
with open(filename, 'w') as f:
print(text, file=f) | Writes text to a file. |
def remove_pane(self, pane):
assert isinstance(pane, Pane)
if pane in self.panes:
if pane == self.active_pane:
if self.previous_active_pane:
self.active_pane = self.previous_active_pane
else:
self.focus_next()
... | Remove pane from this Window. |
def attrlist(self):
'Transform the KEY_MAP paramiter into an attrlist for ldap filters'
keymap = self.config.get('KEY_MAP')
if keymap:
return [s.encode('utf-8') for s in keymap.values()]
else:
return None | Transform the KEY_MAP paramiter into an attrlist for ldap filters |
def do_start_alerts(self, _):
if self._alerter_thread.is_alive():
print("The alert thread is already started")
else:
self._stop_thread = False
self._alerter_thread = threading.Thread(name='alerter', target=self._alerter_thread_func)
self._alerter_thread.st... | Starts the alerter thread |
def join_right_in(self, *objs):
if not objs:
return self.table.c[self.fielda]!=self.table.c[self.fielda]
else:
keys = get_objs_columns(objs, self.reference_fieldname)
return (self.table.c[self.fieldb] == self.reference_class.c[self.reference_fieldname]) & (self.table.... | Create a join condition, connect B and C |
def _get_span(s, pattern):
i, j = -1, -1
match = pattern.match(s)
if not match:
return i, j
for group_name in pattern.groupindex:
i, j = match.span(group_name)
if (i, j) != (-1, -1):
return i, j
return i, j | Return the span of the first group that matches the pattern. |
def _probe_positions(probe, group):
positions = probe['channel_groups'][group]['geometry']
channels = _probe_channels(probe, group)
return np.array([positions[channel] for channel in channels]) | Return the positions of a probe channel group. |
def OnEnterSelectionMode(self, event):
self.grid.sel_mode_cursor = list(self.grid.actions.cursor)
self.grid.EnableDragGridSize(False)
self.grid.EnableEditing(False) | Event handler for entering selection mode, disables cell edits |
def sc_cuts_alg(self, viewer, event, msg=True):
if self.cancut:
direction = self.get_direction(event.direction)
self._cycle_cuts_alg(viewer, msg, direction=direction)
return True | Adjust cuts algorithm interactively. |
def _sanitize_title(self, title):
title = re.sub(self.inside_brackets, "", title)
title = re.sub(self.after_delimiter, "", title)
return title.strip() | Remove redunant meta data from title and return it |
def filepath(self) -> str:
return os.path.join(self._dirpath, self.name + '.nc') | The NetCDF file path. |
def create(name, packages=None, user=None):
packages = packages or ''
packages = packages.split(',')
packages.append('pip')
args = packages + ['--yes', '-q']
cmd = _create_conda_cmd('create', args=args, env=name, user=user)
ret = _execcmd(cmd, user=user, return0=True)
if ret['retcode'] == 0:... | Create a conda env |
def on_epoch_end(self, last_metrics, **kwargs):
"Set the final result in `last_metrics`."
return add_metrics(last_metrics, self.val/self.count) | Set the final result in `last_metrics`. |
def list_policies(self, scaling_group):
uri = "/%s/%s/policies" % (self.uri_base, utils.get_id(scaling_group))
resp, resp_body = self.api.method_get(uri)
return [AutoScalePolicy(self, data, scaling_group)
for data in resp_body.get("policies", [])] | Returns a list of all policies defined for the specified scaling group. |
def sha1sum_numpy(np_array):
import hashlib
return hashlib.sha1(np_array.view(np.uint8)).hexdigest() | Return the secure hash digest with sha1 algorithm for a numpy array |
def log_metrics(metrics, summ_writer, log_prefix, step, history=None):
rjust_len = max([len(name) for name in metrics])
for name, value in six.iteritems(metrics):
step_log(step, "%s %s | % .8f" % (
log_prefix.ljust(5), name.rjust(rjust_len), value))
full_name = "metrics/" + name
if history:
... | Log metrics to summary writer and history. |
def as_ul(self, current_linkable=False, class_current="active_link",
before_1="", after_1="", before_all="", after_all=""):
return self.__do_menu("as_ul", current_linkable, class_current,
before_1=before_1, after_1=after_1, before_all=before_all, after_all=after_all) | It returns menu as ul |
def urls(order_by: Optional[str] = None):
url_rules: List[Rule] = current_app.url_map._rules
if order_by == 'view':
url_rules = sorted(url_rules, key=lambda rule: _get_rule_view(rule))
elif order_by != 'priority':
url_rules = sorted(url_rules, key=lambda rule: getattr(rule, order_by))
he... | List all URLs registered with the app. |
def orthologize(self, ortho_species_id, belast):
if (
self.orthologs
and ortho_species_id in self.orthologs
and ortho_species_id != self.species_id
):
self.orthology_species = ortho_species_id
self.canonical = self.orthologs[ortho_species_id]["... | Decanonical ortholog name used |
def cont_c(self, percent=0.9, N=None):
if not hasattr(self, 'G'):
self.fs_c(N=self.rank)
return apply_along_axis(lambda _: _/self.L[:N], 1,
apply_along_axis(lambda _: _*self.c, 0, self.G[:, :N]**2)) | Return the contribution of each column. |
def print_todo(self, p_todo):
todo_str = p_todo.source()
for ppf in self.filters:
todo_str = ppf.filter(todo_str, p_todo)
return TopydoString(todo_str) | Given a todo item, pretty print it. |
def _ProcessEntries(self, fd):
p = config_file.KeyValueParser(kv_sep="{", term="}", sep=None)
data = utils.ReadFileBytesAsUnicode(fd)
entries = p.ParseEntries(data)
for entry in entries:
for section, cfg in iteritems(entry):
if cfg:
cfg = cfg[0].strip()
else:
cf... | Extract entries from the xinetd config files. |
def skull_strip(dset,suffix='_ns',prefix=None,unifize=True):
return available_method('skull_strip')(dset,suffix,prefix,unifize) | attempts to cleanly remove skull from ``dset`` |
def trend_coefficients(self, order=LINEAR):
if not len(self.points):
raise ArithmeticError('Cannot calculate the trend of an empty series')
return LazyImport.numpy().polyfit(self.timestamps, self.values, order) | Calculate trend coefficients for the specified order. |
def start(self):
"start the pool's workers"
for i in xrange(self.size):
scheduler.schedule(self._runner)
self._closing = False | start the pool's workers |
def check_update():
r = requests.get("https://pypi.python.org/pypi/prof/json")
data = r.json()
if versiontuple(data['info']['version']) > versiontuple(__version__):
return True
return False | Return True if an update is available on pypi |
def coinc(self, s0, s1, slide, step):
loglr = - s0 - s1
threshes = [self.fits_by_tid[i]['thresh'] for i in self.ifos]
loglr += sum([t**2. / 2. for t in threshes])
return (2. * loglr) ** 0.5 | Calculate the final coinc ranking statistic |
def proximal(self):
if self.quadratic_coeff < 0:
raise TypeError('`quadratic_coeff` {} must be non-negative'
''.format(self.quadratic_coeff))
return proximal_quadratic_perturbation(
self.functional.proximal,
a=self.quadratic_coeff, u=self.l... | Proximal factory of the quadratically perturbed functional. |
def getCol(self, x):
return [self.getCell(x, i) for i in self.__size_range] | return the x-th column, starting at 0 |
def rglob(dirname, pattern, dirs=False, sort=True):
fns = []
path = str(dirname)
if os.path.isdir(path):
fns = glob(os.path.join(escape(path), pattern))
dns = [fn for fn
in [os.path.join(path, fn)
for fn in os.listdir(path)]
if os... | recursive glob, gets all files that match the pattern within the directory tree |
def spam(self, tag=None, fromdate=None, todate=None):
return self.call("GET", "/stats/outbound/spam", tag=tag, fromdate=fromdate, todate=todate) | Gets a total count of recipients who have marked your email as spam. |
def writefits(self, *args, **kwargs):
old_wave = self.wave
self.wave = self._wavetable
try:
super(UniformTransmission, self).writefits(*args, **kwargs)
finally:
self.wave = old_wave | Write to file using default waveset. |
def cookies(self):
return [line.strip()
for line in self.conn.issue_command("GetCookies").split("\n")
if line.strip()] | Returns a list of all cookies in cookie string format. |
def add_date_facet(self, *args, **kwargs):
self.facets.append(DateHistogramFacet(*args, **kwargs)) | Add a date factory facet |
def output(data, **kwargs):
if isinstance(data, Exception):
data = six.text_type(data)
if 'output_indent' in __opts__ and __opts__['output_indent'] >= 0:
return pprint.pformat(data, indent=__opts__['output_indent'])
return pprint.pformat(data) | Print out via pretty print |
def _get_classifier(self, prefix):
with self.name_scope():
classifier = nn.Dense(2, prefix=prefix)
return classifier | Construct a decoder for the next sentence prediction task |
def getReferenceByName(self, name):
if name not in self._referenceNameMap:
raise exceptions.ReferenceNameNotFoundException(name)
return self._referenceNameMap[name] | Returns the reference with the specified name. |
def _request_raw_content(self, url, timeout):
request = Request(url)
if self.referer is not None:
request.add_header('Referer', self.referer)
raw_xml = self._call_geocoder(
request,
timeout=timeout,
deserializer=None
)
return raw_xm... | Send the request to get raw content. |
def runGetReferenceSet(self, id_):
referenceSet = self.getDataRepository().getReferenceSet(id_)
return self.runGetRequest(referenceSet) | Runs a getReferenceSet request for the specified ID. |
def verify_claims(self, claims):
scopes = self.get_settings('OIDC_RP_SCOPES', 'openid email')
if 'email' in scopes.split():
return 'email' in claims
LOGGER.warning('Custom OIDC_RP_SCOPES defined. '
'You need to override `verify_claims` for custom claims verific... | Verify the provided claims to decide if authentication should be allowed. |
def add_option(self, *args, **kwargs):
if self.parseTool == 'argparse':
if args and args[0] == '':
args = args[1:]
return self.parser.add_argument(*args, **kwargs)
else:
return self.parser.add_option(*args, **kwargs) | Add optparse or argparse option depending on CmdHelper initialization. |
def bbox_hflip(bbox, rows, cols):
x_min, y_min, x_max, y_max = bbox
return [1 - x_max, y_min, 1 - x_min, y_max] | Flip a bounding box horizontally around the y-axis. |
def update(ctx, migrate=False):
msg = 'Update all dependencies'
if migrate:
msg += ' and migrate data'
header(msg)
info('Updating Python dependencies')
lrun('pip install -r requirements/develop.pip')
lrun('pip install -e .')
info('Updating JavaScript dependencies')
lrun('npm inst... | Perform a development update |
def init_group_read(self, dst_addr=1):
self.init_group(dst_addr)
self.tpci_apci = 0x00
self.data = [0] | Initialize the CEMI frame for a group read operation. |
async def format(self, fstype, *, uuid=None):
self._data = await self._handler.format(
system_id=self.node.system_id, id=self.id,
fstype=fstype, uuid=uuid) | Format this block device. |
async def recv_multipart_expect(self, data: Tuple[bytes, ...]=(b'',)) -> None:
expect_all(await self.recv_multipart(), data) | Waits for the next multipart message and asserts that it contains the given data. |
def timeline(self):
rev = int(self.db.get('site:rev'))
if rev != self.revision:
self.reload_site()
return self._timeline | Get timeline, reloading the site if needed. |
def process(self, sched, coro):
super(AddCoro, self).process(sched, coro)
self.result = sched.add(self.coro, self.args, self.kwargs, self.prio & priority.OP)
if self.prio & priority.CORO:
return self, coro
else:
sched.active.append( (None, coro)) | Add the given coroutine in the scheduler. |
def _raise_response_exceptions(response):
if not response.ok and 'www-authenticate' in response.headers:
msg = response.headers['www-authenticate']
if 'insufficient_scope' in msg:
raise OAuthInsufficientScope('insufficient_scope', response.url)
elif 'invalid_token' in msg:
... | Raise specific errors on some status codes. |
def _get_service_state(service_id: str):
LOG.debug('Getting state of service %s', service_id)
services = get_service_id_list()
service_ids = [s for s in services if service_id in s]
if len(service_ids) != 1:
return 'Service not found! services = {}'.format(str(services))
... | Get the Service state object for the specified id. |
def hmsStrToDeg(ra):
hour, min, sec = ra.split(':')
ra_deg = hmsToDeg(int(hour), int(min), float(sec))
return ra_deg | Convert a string representation of RA into a float in degrees. |
def _flatten(self, field, items, transform = None):
values = [item[field] for item in items]
flattened = []
for value in values:
flattened.extend([value] if isinstance(value, str) else value)
return transform(flattened) if transform != None else flattened | Extracts the entry `field` from each item in the supplied iterable, flattening any nested lists |
def _create_product_map(self):
self._product_map = {}
for dep in self._tile.dependencies:
try:
dep_tile = IOTile(os.path.join('build', 'deps', dep['unique_id']))
except (ArgumentError, EnvironmentError):
raise BuildError("Could not find required de... | Create a map of all products produced by this or a dependency. |
def delete_session_entity_type(project_id, session_id,
entity_type_display_name):
import dialogflow_v2 as dialogflow
session_entity_types_client = dialogflow.SessionEntityTypesClient()
session_entity_type_name = (
session_entity_types_client.session_entity_type_path(
... | Delete session entity type with the given entity type display name. |
def step(self):
u = self._u
if u is None:
raise ValueError("the levelset function is not set "
"(use set_levelset)")
data = self.data
inside = (u > 0)
outside = (u <= 0)
c0 = data[outside].sum() / float(outside.sum())
c1 = ... | Perform a single step of the morphological Chan-Vese evolution. |
def constant_image_value(image, crs='EPSG:32613', scale=1):
return getinfo(ee.Image(image).reduceRegion(
reducer=ee.Reducer.first(), scale=scale,
geometry=ee.Geometry.Rectangle([0, 0, 10, 10], crs, False))) | Extract the output value from a calculation done with constant images |
def close_threads(self, parent):
logger.debug("Call ThreadManager's 'close_threads'")
if parent is None:
self.pending_threads = []
threadlist = []
for threads in list(self.started_threads.values()):
threadlist += threads
else:
... | Close threads associated to parent_id |
def _config_win32_search(self, search):
search = str(search)
split_char = self._determine_split_char(search)
search_list = search.split(split_char)
for s in search_list:
if not s in self.search:
self.search.add(dns.name.from_text(s)) | Configure a Search registry entry. |
def add_listener(self, name, listener, priority=0):
if name not in self.listeners:
self.listeners[name] = []
self.listeners[name].append((listener, priority))
self.listeners[name].sort(key=lambda listener: listener[1], reverse=True) | Add a new listener to the dispatch |
def inet_pton(address_family, ip_string):
global __inet_pton
if __inet_pton is None:
if hasattr(socket, 'inet_pton'):
__inet_pton = socket.inet_pton
else:
from ospd import win_socket
__inet_pton = win_socket.inet_pton
return __inet_pton(address_family, ip_... | A platform independent version of inet_pton |
def coerce(self, value, **kwargs):
result = []
for v in value:
result.append(self._coercion.coerce(v, **kwargs))
return result | Coerces array items with proper coercion. |
def _populate_and_save_user_profile(self):
try:
app_label, class_name = django.conf.settings.AUTH_PROFILE_MODULE.split('.')
profile_model = apps.get_model(app_label, class_name)
profile, created = profile_model.objects.get_or_create(user=self._user)
save_profile =... | Populates a User profile object with fields from the LDAP directory. |
def load_config(filename):
try:
with open(filename, 'r') as config_file:
return json.loads(config_file.read())
except IOError:
pass | Read contents of config file. |
def _export_signatures(meta_graph):
named_signatures = tf_v1.get_collection(_SIGNATURE_COLLECTION)
if not named_signatures:
raise ValueError("No signatures present. Please call hub.add_signature(...)"
"at least once in the module_fn.")
for key, signature in named_signatures:
meta_grap... | Exports signatures from current graph into a MetaGraphDef. |
def read_name(self):
result = ''
off = self.offset
next = -1
first = off
while 1:
len = ord(self.data[off])
off += 1
if len == 0:
break
t = len & 0xC0
if t == 0x00:
result = ''.join((resul... | Reads a domain name from the packet |
def make_describe_attrs(self):
lst = []
if self.all_groups:
lst.append((NEWLINE, '\n'))
lst.append((INDENT, ''))
for group in self.all_groups:
if group.name:
lst.extend(self.tokens.make_describe_attr(group.kls_name))
return ... | Create tokens for setting is_noy_spec on describes |
def make_id(self):
if self.url_id is None:
self.url_id = select([func.coalesce(func.max(self.__class__.url_id + 1), 1)],
self.__class__.parent == self.parent) | Create a new URL id that is unique to the parent container |
def _run_sequence(self, sequence):
self._check_pause()
self._pre_test()
session_data = self.target.get_session_data()
self._test_info()
resp = None
for edge in sequence:
if edge.callback:
edge.callback(self, edge, resp)
session_data... | Run a single sequence |
def from_callable_parameter(cls, parameter):
if parameter.kind == parameter.KEYWORD_ONLY or \
parameter.kind == parameter.POSITIONAL_OR_KEYWORD and parameter.default is not parameter.empty:
if parameter.annotation is not parameter.empty:
constraint = parameter.annotat... | Produces a parameter from a function or method |
def check_dataset(dataset, mode):
names = [x['name'] for x in dataset.schema]
types = [x['type'] for x in dataset.schema]
if mode == 'train':
if (set(['image_url', 'label']) != set(names) or any(t != 'STRING' for t in types)):
raise ValueError('Invalid dataset. Expect only "image_url,label" STRING colum... | Validate we have a good dataset. |
def install_key(self, key_data):
logger.info(("importing repository signing key {0} "
"{1}".format(self.key_info['fingerprint'],
self.key_info['uids'][0])))
import_result = self.gpg.import_keys(key_data)
logger.debug("import results: {0}".fo... | Install untrusted repo signing key |
def to_csc(self):
self._X_train = csc_matrix(self._X_train)
self._X_test = csc_matrix(self._X_test) | Convert Dataset to scipy's Compressed Sparse Column matrix. |
def _is_charge_balanced(struct):
if sum([s.specie.oxi_state for s in struct.sites]) == 0.0:
return True
else:
return False | checks if the structure object is charge balanced |
def create_install_template_skin(self):
ckan_extension_template(self.name, self.target)
self.install_package_develop('ckanext-' + self.name + 'theme') | Create an example ckan extension for this environment and install it |
def reverse_query(self):
self.qry_start = self.qry_length - self.qry_start - 1
self.qry_end = self.qry_length - self.qry_end - 1 | Changes the coordinates as if the query sequence has been reverse complemented |
def serverdir():
path = join(ROOT_DIR, 'server')
path = normpath(path)
if sys.platform == 'cygwin': path = realpath(path)
return path | Get the location of the server subpackage |
def create_form_groupings(self,
vocabularies,
solr_response,
element_group_dict,
sort_order):
element_list = []
for group_name, group_list in element_group_dict.items():
el... | Create a group object from groupings of element objects. |
def _check_metrics_file(self, bam_name, metrics_ext):
base, _ = os.path.splitext(bam_name)
try:
int(base[-1])
can_glob = False
except ValueError:
can_glob = True
check_fname = "{base}{maybe_glob}.{ext}".format(
base=base, maybe_glob="*" if ... | Check for an existing metrics file for the given BAM. |
def __upload(self, resource, bytes):
headers = {
'x-ton-expires': http_time(self.options.get('x-ton-expires', self._DEFAULT_EXPIRE)),
'content-length': str(self._file_size),
'content-type': self.content_type
}
return Request(self._client, 'post', resource,
... | Performs a single chunk upload. |
def sync_release_files(self):
release_files = []
for release in self.releases.values():
release_files.extend(release)
downloaded_files = set()
deferred_exception = None
for release_file in release_files:
try:
downloaded_file = self.download... | Purge + download files returning files removed + added |
def cb(option, value, parser):
arguments = [value]
for arg in parser.rargs:
if arg[0] != "-":
arguments.append(arg)
else:
del parser.rargs[:len(arguments)]
break
if getattr(parser.values, option.dest):
arguments.extend(getattr(parser.values, option... | Callback function to handle variable number of arguments in optparse |
def _create_session(team, auth):
session = requests.Session()
session.hooks.update(dict(
response=partial(_handle_response, team)
))
session.headers.update({
"Content-Type": "application/json",
"Accept": "application/json",
"User-Agent": "quilt-cli/%s (%s %s) %s/%s" % (
... | Creates a session object to be used for `push`, `install`, etc. |
def database_dsn(self):
if not self._config.library.database:
return 'sqlite:///{root}/library.db'.format(root=self._root)
return self._config.library.database.format(root=self._root) | Substitute the root dir into the database DSN, for Sqlite |
def _defaultErrorHandler(varBinds, **context):
errors = context.get('errors')
if errors:
err = errors[-1]
raise err['error'] | Raise exception on any error if user callback is missing |
def make_sa():
configuration = dict(config.items('coilmq'))
engine = engine_from_config(configuration, 'qstore.sqlalchemy.')
init_model(engine)
store = SAQueue()
return store | Factory to creates a SQLAlchemy queue store, pulling config values from the CoilMQ configuration. |
def apply(cls, args, run):
try:
lvl = int(args)
except ValueError:
lvl = args
run.root_logger.setLevel(lvl) | Adjust the loglevel of the root-logger of this run. |
def ellipsize(o):
r = repr(o)
if len(r) < 800:
return r
r = r[:60] + ' ... ' + r[-15:]
return r | Ellipsize the representation of the given object. |
def buy_market_order(self, amount, base="btc", quote="usd"):
data = {'amount': amount}
url = self._construct_url("buy/market/", base, quote)
return self._post(url, data=data, return_json=True, version=2) | Order to buy amount of bitcoins for market price. |
def add_class_button_status(self):
if self.class_form.text() and self.radius_form.value() >= 0:
self.add_class_button.setEnabled(True)
else:
self.add_class_button.setEnabled(False) | Function to enable or disable add class button. |
def create_tomodir(self, directory):
pwd = os.getcwd()
if not os.path.isdir(directory):
os.makedirs(directory)
os.chdir(directory)
directories = (
'config',
'exe',
'grid',
'mod',
'mod/pot',
'mod/sens',
... | Create a tomodir subdirectory structure in the given directory |
def thread(self):
log.info('@{}.thread starting'.format(self.__class__.__name__))
thread = threading.Thread(target=thread_wrapper(self.consume), args=())
thread.daemon = True
thread.start() | Start a thread for this consumer. |
def releasers(cls):
return [
HookReleaser,
VersionFileReleaser,
PythonReleaser,
CocoaPodsReleaser,
NPMReleaser,
CReleaser,
ChangelogReleaser,
GitHubReleaser,
GitReleaser,
] | Returns all of the supported releasers. |
def _plt_gogrouped(self, goids, go2color_usr, **kws):
fout_img = self.get_outfile(kws['outfile'], goids)
sections = read_sections(kws['sections'], exclude_ungrouped=True)
grprobj_cur = self._get_grprobj(goids, sections)
grpcolor = GrouperColors(grprobj_cur)
grp_go2color = grpcolo... | Plot grouped GO IDs. |
def parse_lines(log_parsers, fileinp):
while 1:
logentry = fileinp.readline()
if not logentry:
break
elif not logentry.rstrip():
continue
processed = False
for lp in log_parsers:
if lp.grok(logentry):
processed = True
... | parse lines from the fileinput and send them to the log_parsers |
def load_from_file(self, file_path, format=None):
if format is None:
base_name, file_extension = os.path.splitext(file_path)
if file_extension in (".yaml", ".yml"):
format = "yaml"
elif file_extension in (".json"):
format = "json"
e... | Return dict from a file config |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.