code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
# renew the dashboard config self.submission_data["dashboard_config"] = self.dashboard.get_persistent_config() # write the submission data to the output file self._outputs["submission"].dump(self.submission_data, formatter="json", indent=4)
def dump_submission_data(self)
Dumps the current submission data to the submission file.
9.469769
9.254601
1.02325
task = self.task self._outputs = self.output() # create the job dashboard interface self.dashboard = task.create_job_dashboard() or NoJobDashboard() # read submission data and reset some values submitted = not task.ignore_submission and self._outputs["submissio...
def run(self)
Actual run method that starts the processing of jobs and initiates the status polling, or performs job cancelling or cleaning, depending on the task parameters.
5.578483
5.39614
1.033791
task = self.task # get job ids from submission data job_ids = [ d["job_id"] for d in self.submission_data.jobs.values() if d["job_id"] not in (self.submission_data.dummy_job_id, None) ] if not job_ids: return # cancel jobs ...
def cancel(self)
Cancels running jobs. The job ids are read from the submission file which has to exist for obvious reasons.
3.64399
3.469815
1.050197
task = self.task # get job ids from submission data job_ids = [ d["job_id"] for d in self.submission_data.jobs.values() if d["job_id"] not in (self.submission_data.dummy_job_id, None) ] if not job_ids: return # cleanup jobs ...
def cleanup(self)
Cleans up jobs on the remote run location. The job ids are read from the submission file which has to exist for obvious reasons.
3.351878
3.160005
1.060719
task = self.task # create the parent directory self._outputs["submission"].parent.touch() # get all branch indexes and chunk them by tasks_per_job branch_chunks = list(iter_chunks(task.branch_map.keys(), task.tasks_per_job)) # submission output if not ...
def touch_control_outputs(self)
Creates and saves dummy submission and status files. This method is called in case the collection of branch task outputs exists.
3.806023
3.433909
1.108365
# possible events: # - action.submit # - action.cancel # - status.pending # - status.running # - status.finished # - status.retry # - status.failed # forward to dashboard in any event by default return dashboard.publi...
def forward_dashboard_event(self, dashboard, job_data, event, job_num)
Hook to preprocess and publish dashboard events. By default, every event is passed to the dashboard's :py:meth:`law.job.dashboard.BaseJobDashboard.publish` method unchanged.
4.866822
4.366227
1.114652
if mode not in ("r", "w"): raise Exception("unknown mode '{}', use r or w".format(mode)) # get additional arguments skip_copy = kwargs.pop("skip_copy", False) is_tmp = kwargs.pop("is_tmp", mode == "w") if mode == "r": if is_tmp: ...
def localize(self, mode="r", perm=None, parent_perm=None, **kwargs)
localize(mode="r", perm=None, parent_perm=None, skip_copy=False, is_tmp=None, **kwargs)
3.507147
3.229708
1.085902
parser = sub_parsers.add_parser("index", prog="law index", description="Create or update the" " (human-readable) law task index file ({}). This is only required for the shell" " auto-completion.".format(Config.instance().get("core", "index_file"))) parser.add_argument("--modules", "-m", na...
def setup_parser(sub_parsers)
Sets up the command line parser for the *index* subprogram and adds it to *sub_parsers*.
4.221886
4.123981
1.02374
index_file = Config.instance().get_expanded("core", "index_file") # just print the file location? if args.location: print(index_file) return # just remove the index file? if args.remove: if os.path.exists(index_file): os.remove(index_file) print...
def execute(args)
Executes the *index* subprogram with parsed commandline *args*.
2.890278
2.880226
1.00349
params = [] for cls in luigi.task.Config.__subclasses__(): if config_names and cls.__name__ not in config_names: continue for attr in dir(cls): param = getattr(cls, attr) if not isinstance(param, luigi.Parameter): continue fu...
def get_global_parameters(config_names=("core", "scheduler", "worker", "retcode"))
Returns a list of global, luigi-internal configuration parameters. Each list item is a 4-tuple containing the configuration class, the parameter instance, the parameter name, and the full parameter name in the cli. When *config_names* is set, it should be a list of configuration class names that are exclusi...
3.273565
2.434988
1.344386
anchor = os.path.abspath(os.path.expandvars(os.path.expanduser(anchor))) if os.path.exists(anchor) and os.path.isfile(anchor): anchor = os.path.dirname(anchor) return os.path.normpath(os.path.join(anchor, *paths))
def rel_path(anchor, *paths)
Returns a path made of framgment *paths* relativ to an *anchor* path. When *anchor* is a file, its absolute directory is used instead.
1.930197
1.84798
1.04449
home = os.getenv("LAW_HOME", "$HOME/.law") home = os.path.expandvars(os.path.expanduser(home)) return os.path.normpath(os.path.join(home, *paths))
def law_home_path(*paths)
Returns the law home directory (``$LAW_HOME``) that defaults to ``"$HOME/.law"``, optionally joined with *paths*.
2.502676
2.189403
1.143086
sys.stderr.write(" ".join(str(arg) for arg in args) + "\n") if kwargs.get("flush", False): sys.stderr.flush()
def print_err(*args, **kwargs)
print_err(*args, flush=False) Same as *print*, but outputs to stderr. If *flush* is *True*, stderr is flushed after printing.
2.544551
2.17489
1.169967
if msg is not None: if exitcode in (None, 0): print(msg) else: print_err(msg) sys.exit(exitcode)
def abort(msg=None, exitcode=1)
Aborts the process (*sys.exit*) with an *exitcode*. If *msg* is not *None*, it is printed first to stdout if *exitcode* is 0 or *None*, and to stderr otherwise.
2.952052
2.904878
1.016239
try: if not force and not os.isatty(sys.stdout.fileno()): return msg except: return msg color = colors.get(color, colors["default"]) background = backgrounds.get(background, backgrounds["default"]) if not isinstance(style, (tuple, list, set)): style = (styl...
def colored(msg, color=None, background=None, style=None, force=False)
Return the colored version of a string *msg*. For *color*, *background* and *style* options, see https://misc.flogisoft.com/bash/tip_colors_and_formatting. Unless *force* is *True*, the *msg* string is returned unchanged in case the output is not a tty.
2.33936
2.27722
1.027288
choices = _choices = [str(c) for c in choices] if lower: _choices = [c.lower() for c in choices] if default is not None: if default not in choices: raise Exception("default must be one of the choices") hints = [(choice if choice != default else choice + "*") for choice...
def query_choice(msg, choices, default=None, descriptions=None, lower=True)
Interactively query a choice from the prompt until the input matches one of the *choices*. The prompt can be configured using *msg* and *descriptions*, which, if set, must have the same length as *choices*. When *default* is not *None* it must be one of the choices and is used when the input is empty. When ...
2.59811
2.683998
0.968
if not regex: return mode(fnmatch.fnmatch(name, pattern) for pattern in patterns) else: return mode(re.match(pattern, name) for pattern in patterns)
def multi_match(name, patterns, mode=any, regex=False)
Compares *name* to multiple *patterns* and returns *True* in case of at least one match (*mode* = *any*, the default), or in case all patterns matched (*mode* = *all*). Otherwise, *False* is returned. When *regex* is *True*, *re.match* is used instead of *fnmatch.fnmatch*.
2.072473
2.104476
0.984793
return isinstance(obj, (types.GeneratorType, collections.MappingView, six.moves.range, enumerate))
def is_lazy_iterable(obj)
Returns whether *obj* is iterable lazily, such as generators, range objects, etc.
9.744043
8.428419
1.156094
if isinstance(obj, list): return list(obj) elif is_lazy_iterable(obj): return list(obj) elif isinstance(obj, (tuple, set)) and cast: return list(obj) else: return [obj]
def make_list(obj, cast=True)
Converts an object *obj* to a list and returns it. Objects of types *tuple* and *set* are converted if *cast* is *True*. Otherwise, and for all other types, *obj* is put in a new list.
2.391965
2.670531
0.895689
if isinstance(obj, tuple): return tuple(obj) elif is_lazy_iterable(obj): return tuple(obj) elif isinstance(obj, (list, set)) and cast: return tuple(obj) else: return (obj,)
def make_tuple(obj, cast=True)
Converts an object *obj* to a tuple and returns it. Objects of types *list* and *set* are converted if *cast* is *True*. Otherwise, and for all other types, *obj* is put in a new tuple.
2.465888
2.675809
0.921549
if isinstance(struct, dict): return flatten(struct.values()) elif isinstance(struct, (list, tuple, set)) or is_lazy_iterable(struct): objs = [] for obj in struct: objs.extend(flatten(obj)) return objs else: return [struct]
def flatten(struct)
Flattens and returns a complex structured object *struct*.
2.524174
2.659056
0.949274
# get or infer the class cls = kwargs.get("cls", None) if cls is None: for d in dicts: if isinstance(d, dict): cls = d.__class__ break else: raise TypeError("cannot infer cls as none of the passed objects is of type dict") # s...
def merge_dicts(*dicts, **kwargs)
merge_dicts(*dicts, cls=None) Takes multiple *dicts* and returns a single merged dict. The merging takes place in order of the passed dicts and therefore, values of rear objects have precedence in case of field collisions. The class of the returned merged dict is configurable via *cls*. If it is *None*, the...
3.114847
2.734601
1.13905
executable = lambda path: os.path.isfile(path) and os.access(path, os.X_OK) # prog can also be a path dirname, _ = os.path.split(prog) if dirname: if executable(prog): return prog elif "PATH" in os.environ: for search_path in os.environ["PATH"].split(os.pathsep): ...
def which(prog)
Pythonic ``which`` implementation. Returns the path to an executable *prog* by searching in *PATH*, or *None* when it could not be found.
2.187331
2.209584
0.989929
# default callable if not callable(callback): def callback(i): print(msg.format(i + offset)) results = [] for i, obj in enumerate(seq): results.append(func(obj)) do_call = (start and i == 0) or (i + 1) % every == 0 if do_call: callback(i) ...
def map_verbose(func, seq, msg="{}", every=25, start=True, end=True, offset=0, callback=None)
Same as the built-in map function but prints a *msg* after chunks of size *every* iterations. When *start* (*stop*) is *True*, the *msg* is also printed after the first (last) iteration. Note that *msg* is supposed to be a template string that will be formatted with the current iteration number (starting at...
3.101234
4.333978
0.715563
# interpret generators and views as lists if is_lazy_iterable(struct): struct = list(struct) valid_types = tuple() if map_dict: valid_types += (dict,) if isinstance(map_dict, int) and not isinstance(map_dict, bool): map_dict -= 1 if map_list: valid_t...
def map_struct(func, struct, cls=None, map_dict=True, map_list=True, map_tuple=False, map_set=False)
Applies a function *func* to each value of a complex structured object *struct* and returns the output in the same structure. Example: .. code-block:: python struct = {"foo": [123, 456], "bar": [{"1": 1}, {"2": 2}]} def times_two(i): return i * 2 map_struct(struct, times_two) ...
2.293435
2.32635
0.985851
# interpret lazy iterables lists if is_lazy_iterable(struct): struct = list(struct) # when mask is a bool, or struct is not a dict or sequence, apply the mask immediately if isinstance(mask, bool) or not isinstance(struct, (list, tuple, dict)): return struct if mask else replace ...
def mask_struct(mask, struct, replace=no_value)
Masks a complex structured object *struct* with a *mask* and returns the remaining values. When *replace* is set, masked values are replaced with that value instead of being removed. The *mask* can have a complex structure as well. Examples: .. code-block:: python struct = {"a": [1, 2], "b": [3, [...
2.367987
2.402762
0.985527
fileno, path = tempfile.mkstemp(*args, **kwargs) # create the file with open(path, "w") as f: f.write("") # yield it try: yield fileno, path finally: if os.path.exists(path): os.remove(path)
def tmp_file(*args, **kwargs)
Context manager that generates a temporary file, yields the file descriptor number and temporary path, and eventually removes the files. All *args* and *kwargs* are passed to :py:meth:`tempfile.mkstemp`.
2.734378
2.698531
1.013284
kwargs["preexec_fn"] = os.setsid p = subprocess.Popen(*args, **kwargs) try: out, err = p.communicate() except KeyboardInterrupt: os.killpg(os.getpgid(p.pid), signal.SIGTERM) raise if six.PY3: if out is not None: out = out.decode("utf-8") if...
def interruptable_popen(*args, **kwargs)
Shorthand to :py:class:`Popen` followed by :py:meth:`Popen.communicate`. All *args* and *kwargs* are forwatded to the :py:class:`Popen` constructor. The return code, standard output and standard error are returned in a tuple. The call :py:meth:`Popen.communicate` is interruptable by the user.
1.886663
2.035955
0.926672
# force pipes kwargs["stdout"] = subprocess.PIPE kwargs["stderr"] = subprocess.STDOUT p = subprocess.Popen(*args, **kwargs) for line in iter(lambda: p.stdout.readline(), ""): if six.PY3: line = line.decode("utf-8") yield line.rstrip() # yield the process itsel...
def readable_popen(*args, **kwargs)
Shorthand to :py:class:`Popen` which yields the output live line-by-line. All *args* and *kwargs* are forwatded to the :py:class:`Popen` constructor. When EOF is reached, ``communicate()`` is called on the subprocess and it is yielded. Example: .. code-block:: python for line in readable_popen(["s...
3.602046
4.618535
0.779911
return getattr(hashlib, algo)(six.b(str(inp))).hexdigest()[:l]
def create_hash(inp, l=10, algo="sha256")
Takes an input *inp* and creates a hash based on an algorithm *algo*. For valid algorithms, see python's hashlib. *l* corresponds to the maximum length of the returned hash. Internally, the string representation of *inp* is used.
5.349353
7.024752
0.761501
shutil.copy(src, dst) perm = os.stat(dst).st_mode shutil.copystat(src, dst) os.chmod(dst, perm)
def copy_no_perm(src, dst)
Copies a file from *src* to *dst* including meta data except for permission bits.
2.168556
2.377381
0.912162
if not os.path.exists(path): if perm is None: os.makedirs(path) else: umask = os.umask(0) try: os.makedirs(path, perm) finally: os.umask(umask)
def makedirs_perm(path, perm=None)
Recursively creates directory up to *path*. If *perm* is set, the permissions of all newly created directories are set to its value.
1.707811
1.888043
0.90454
if uid is None: uid = os.getuid() path = os.path.expandvars(os.path.expanduser(path)) return os.stat(path).st_uid == uid
def user_owns_file(path, uid=None)
Returns whether a file located at *path* is owned by the user with *uid*. When *uid* is *None*, the user id of the current process is used.
2.332361
2.252729
1.035349
if isinstance(l, six.integer_types): l = six.moves.range(l) if is_lazy_iterable(l): if size < 1: yield list(l) else: chunk = [] for elem in l: if len(chunk) < size: chunk.append(elem) else: ...
def iter_chunks(l, size)
Returns a generator containing chunks of *size* of a list, integer or generator *l*. A *size* smaller than 1 results in no chunking at all.
2.079466
1.936986
1.073558
if n == 0: idx = 0 elif unit: idx = byte_units.index(unit) else: idx = int(math.floor(math.log(abs(n), 1024))) idx = min(idx, len(byte_units)) return n / 1024. ** idx, byte_units[idx]
def human_bytes(n, unit=None)
Takes a number of bytes *n*, assigns the best matching unit and returns the respective number and unit string in a tuple. When *unit* is set, that unit is used. Example: .. code-block:: python human_bytes(3407872) # -> (3.25, "MB") human_bytes(3407872, "kB") # -> (3328.0, "kB"...
2.700724
2.89642
0.932435
secs = float(datetime.timedelta(*args, **kwargs).total_seconds()) parts = [] for unit, mul in time_units: if secs / mul >= 1 or mul == 1: if mul > 1: n = int(math.floor(secs / mul)) secs -= n * mul else: n = round(secs, 1) ...
def human_time_diff(*args, **kwargs)
Returns a human readable time difference. The largest unit is days. All *args* and *kwargs* are passed to ``datetime.timedelta``. Example: .. code-block:: python human_time_diff(seconds=1233) # -> "20 minutes, 33 seconds" human_time_diff(seconds=90001) # -> "1 day, 1 hour, 1 s...
2.443338
2.650469
0.921851
if six.PY3: return isinstance(e, FileExistsError) # noqa: F821 else: return isinstance(e, OSError) and e.errno == 17
def is_file_exists_error(e)
Returns whether the exception *e* was raised due to an already existing file or directory.
2.647515
2.313026
1.144611
try: server = smtplib.SMTP(smtp_host, smtp_port) except Exception as e: logger = logging.getLogger(__name__) logger.warning("cannot create SMTP server: {}".format(e)) return False header = "From: {}\r\nTo: {}\r\nSubject: {}\r\n\r\n".format(sender, recipient, subject) ...
def send_mail(recipient, sender, subject="", content="", smtp_host="127.0.0.1", smtp_port=25)
Lightweight mail functionality. Sends an mail from *sender* to *recipient* with *subject* and *content*. *smtp_host* and *smtp_port* are forwarded to the ``smtplib.SMTP`` constructor. *True* is returned on success, *False* otherwise.
2.05224
2.093166
0.980448
if six.PY3: return open(*args, **kwargs) else: f = io.open(*args, **kwargs) if f.encoding and f.encoding.lower().replace("-", "") == "utf8": write_orig = f.write def write(data, *args, **kwargs): u = unicode # noqa: F821 if...
def open_compat(*args, **kwargs)
Polyfill for python's ``open`` factory, returning the plain ``open`` in python 3, and ``io.open`` in python 2 with a patched ``write`` method that internally handles unicode conversion of its first argument. All *args* and *kwargs* are forwarded.
2.865545
2.597176
1.103331
orig = getattr(obj, attr, no_value) try: setattr(obj, attr, value) yield obj finally: try: if orig is no_value: delattr(obj, attr) else: setattr(obj, attr, orig) except: pass
def patch_object(obj, attr, value)
Context manager that temporarily patches an object *obj* by replacing its attribute *attr* with *value*. The original value is set again when the context is closed.
2.60791
2.397929
1.087567
for consumer in self.consumers: if not getattr(consumer, "closed", False): consumer.flush()
def _flush(self)
Flushes all registered consumer streams.
5.689994
3.850061
1.477897
for consumer in self.consumers: consumer.write(*args, **kwargs)
def _write(self, *args, **kwargs)
Writes to all registered consumer streams, passing *args* and *kwargs*.
5.784438
2.844203
2.033764
if self.filter_fn(*args, **kwargs): self.stream.write(*args, **kwargs)
def _write(self, *args, **kwargs)
Writes to the consumer stream when *filter_fn* evaluates to *True*, passing *args* and *kwargs*.
5.95285
2.776548
2.143975
if self.has_section(section) and self.has_option(section, option): value = self.get(section, option) if isinstance(value, six.string_types): if expandvars: value = os.path.expandvars(value) if expanduser: va...
def get_default(self, section, option, default=None, type=None, expandvars=False, expanduser=False)
Returns the config value defined by *section* and *option*. When either the section or the option does not exist, the *default* value is returned instead. When *type* is set, it must be either `"str"`, `"int"`, `"float"`, or `"boolean"`. When *expandvars* is *True*, environment variables are exp...
1.792666
1.975633
0.907388
kwargs.setdefault("expandvars", True) kwargs.setdefault("expanduser", True) return self.get_default(*args, **kwargs)
def get_expanded(self, *args, **kwargs)
Same as :py:meth:`get_default`, but *expandvars* and *expanduser* arguments are set to *True* by default.
4.616214
2.082675
2.216483
if overwrite is not None: overwrite_sections = overwrite overwrite_options = overwrite for section, _data in six.iteritems(data): if not self.has_section(section): self.add_section(section) elif not overwrite_sections: ...
def update(self, data, overwrite=None, overwrite_sections=True, overwrite_options=True)
Updates the currently stored configuration with new *data*, given as a dictionary. When *overwrite_sections* is *False*, sections in *data* that are already present in the current config are skipped. When *overwrite_options* is *False*, existing options are not overwritten. When *overwrite* is n...
1.91058
1.882665
1.014828
p = self.__class__(filename, skip_defaults=True, skip_fallbacks=True) self.update(p._sections, *args, **kwargs)
def include(self, filename, *args, **kwargs)
Updates the current configc with the config found in *filename*. All *args* and *kwargs* are forwarded to :py:meth:`update`.
8.518606
6.80381
1.252035
return [key for key, _ in self.items(section) if (not prefix or key.startswith(prefix))]
def keys(self, section, prefix=None)
Returns all keys of a *section* in a list. When *prefix* is set, only keys starting with that prefix are returned
4.112992
3.895564
1.055814
prefix = "luigi_" lparser = luigi.configuration.LuigiConfigParser.instance() if push: for section in self.sections(): if not section.startswith(prefix): continue lsection = section[len(prefix):] if not lpa...
def sync_luigi_config(self, push=True, pull=True, expand=True)
Synchronizes sections starting with ``"luigi_"`` with the luigi configuration parser. First, when *push* is *True*, options that exist in law but **not** in luigi are stored as defaults in the luigi config. Then, when *pull* is *True*, all luigi-related options in the law config are overwritten ...
1.82145
1.788811
1.018246
# test import import telegram # noqa: F401 cfg = Config.instance() # get default token and chat if not token: token = cfg.get_expanded("notifications", "telegram_token") if not chat: chat = cfg.get_expanded("notifications", "telegram_chat") if not token or not chat: ...
def notify_telegram(title, content, token=None, chat=None, mention_user=None, **kwargs)
Sends a telegram notification and returns *True* on success. The communication with the telegram API might have some delays and is therefore handled by a thread.
3.311171
3.233746
1.023943
if self.task.local_workflow_require_branches: return self._has_run else: return super(LocalWorkflowProxy, self).complete()
def complete(self)
When *local_workflow_require_branches* of the task was set to *True*, returns whether the :py:meth:`run` method has been called before. Otherwise, the call is forwarded to the super class.
15.57869
3.668351
4.246783
if not self._has_yielded and not self.task.local_workflow_require_branches: self._has_yielded = True yield list(self.task.get_branch_tasks().values()) self._has_run = True
def run(self)
When *local_workflow_require_branches* of the task was set to *False*, starts all branch tasks via dynamic dependencies by yielding them in a list, or simply does nothing otherwise.
11.358304
4.178712
2.718135
def wrapper(decorator): @functools.wraps(decorator) def wrapper(fn=None, **opts): _opts = default_opts.copy() _opts.update(opts) def wrapper(fn): @functools.wraps(fn) def wrapper(*args, **kwargs): return de...
def factory(**default_opts)
Factory function to create decorators for tasks' run methods. Default options for the decorator function can be given in *default_opts*. The returned decorator can be used with or without function invocation. Example: .. code-block:: python @factory(digits=2) def runtime(fn, opts, task, *a...
2.25539
2.367523
0.952637
_task = get_task(task) log = get_param(_task.log_file, _task.default_log_file) if log == "-" or not log: return fn(task, *args, **kwargs) else: # use the local target functionality to create the parent directory LocalFileTarget(log).parent.touch() with open_compat(l...
def log(fn, opts, task, *args, **kwargs)
log() Wraps a bound method of a task and redirects output of both stdout and stderr to the file defined by the tasks's *log_file* parameter or *default_log_file* attribute. If its value is ``"-"`` or *None*, the output is not redirected.
3.742958
3.393152
1.103092
try: return fn(task, *args, **kwargs) except Exception as e: if opts["skip"] is None or not isinstance(e, opts["skip"]): for outp in luigi.task.flatten(task.output()): outp.remove() raise
def safe_output(fn, opts, task, *args, **kwargs)
safe_output(skip=None) Wraps a bound method of a task and guards its execution. If an exception occurs, and it is not an instance of *skip*, the task's output is removed prior to the actual raising.
4.036779
2.990169
1.350017
if opts["stddev"] <= 0: t = opts["t"] elif opts["pdf"] == "gauss": t = random.gauss(opts["t"], opts["stddev"]) elif opts["pdf"] == "uniform": t = random.uniform(opts["t"], opts["stddev"]) else: raise ValueError("unknown delay decorator pdf '{}'".format(opts["pdf"])) ...
def delay(fn, opts, task, *args, **kwargs)
delay(t=5, stddev=0., pdf="gauss") Wraps a bound method of a task and delays its execution by *t* seconds.
3.201557
2.272633
1.408744
_task = get_task(task) # get notification transports transports = [] for param_name, param in _task.get_params(): if isinstance(param, NotifyParameter) and getattr(_task, param_name): try: transport = param.get_transport() if transport: ...
def notify(fn, opts, task, *args, **kwargs)
notify(on_success=True, on_failure=True, **kwargs) Wraps a bound method of a task and guards its execution. Information about the execution (task name, duration, etc) is collected and dispatched to all notification transports registered on wrapped task via adding :py:class:`law.NotifyParameter` parameters. ...
3.568303
3.458509
1.031746
start_time = time.time() try: return fn(task, *args, **kwargs) finally: duration = human_time_diff(seconds=round(time.time() - start_time, 1)) # log timeit_logger = logger.getChild("timeit") timeit_logger.info("runtime of {}: {}".format(task.task_id, duration)) ...
def timeit(fn, opts, task, *args, **kwargs)
Wraps a bound method of a task and logs its execution time in a human readable format. Logs in info mode. When *publish_message* is *True*, the duration is also published as a task message to the scheduler.
3.776324
3.093481
1.220736
out = _voms_proxy_info(["--identity"])[1].strip() try: return re.match(r".*\/CN\=([^\/]+).*", out.strip()).group(1) except: raise Exception("no valid identity found in voms proxy: {}".format(out))
def get_voms_proxy_user()
Returns the owner of the voms proxy.
6.390141
5.964752
1.071317
valid = _voms_proxy_info(["--exists"], silent=True)[0] == 0 if log and not valid: logger.warning("no valid voms proxy found") return valid
def check_voms_proxy_validity(log=False)
Returns *True* when a valid voms proxy exists, *False* otherwise. When *log* is *True*, a warning will be logged.
6.371104
5.813046
1.096001
with tmp_file() as (_, tmp): with open(tmp, "w") as f: f.write(passwd) cmd = "cat '{}' | voms-proxy-init --valid '{}'".format(tmp, lifetime) if vo: cmd += " -voms '{}'".format(vo) code, out, _ = interruptable_popen(cmd, shell=True, executable="/bin/bash"...
def renew_voms_proxy(passwd="", vo=None, lifetime="196:00")
Renews the voms proxy using a password *passwd*, an optional virtual organization name *vo*, and a default *lifetime* of 8 days. The password is written to a temporary file first and piped into the renewal commad to ensure it is not visible in the process list.
3.192312
3.255139
0.980699
# get the proxy file proxy_file = get_voms_proxy_file() if not os.path.exists(proxy_file): raise Exception("proxy file '{}' does not exist".format(proxy_file)) if cache: if isinstance(cache, six.string_types): cache_file = cache else: cache_file = pr...
def delegate_voms_proxy_glite(endpoint, stdout=None, stderr=None, cache=True)
Delegates the voms proxy via gLite to an *endpoint*, e.g. ``grid-ce.physik.rwth-aachen.de:8443``. *stdout* and *stderr* are passed to the *Popen* constructor for executing the ``glite-ce-delegate-proxy`` command. When *cache* is *True*, a json file is created alongside the proxy file, which stores the deleg...
2.346751
2.187577
1.072763
formatter = FormatterRegister.formatters.get(name) if formatter or silent: return formatter else: raise Exception("cannot find formatter '{}'".format(name))
def get_formatter(name, silent=False)
Returns the formatter class whose name attribute is *name*. When no class could be found and *silent* is *True*, *None* is returned. Otherwise, an exception is raised.
4.518277
4.659156
0.969763
formatters = [f for f in six.itervalues(FormatterRegister.formatters) if f.accepts(path)] if formatters or silent: return formatters else: raise Exception("cannot find formatter for path '{}'".format(path))
def find_formatters(path, silent=True)
Returns a list of formatter classes which would accept the file given by *path*. When no classes could be found and *silent* is *True*, an empty list is returned. Otherwise, an exception is raised.
3.979155
3.800093
1.047121
if name == AUTO_FORMATTER: return find_formatters(path, silent=False)[0] else: return get_formatter(name, silent=False)
def find_formatter(name, path)
Returns the formatter class whose name attribute is *name* when *name* is not *AUTO_FORMATTER*. Otherwise, the first formatter that accepts *path* is returned. Internally, this method simply uses :py:func:`get_formatter` or :py:func:`find_formatters` depending on the value of *name*.
4.873771
3.326536
1.465119
encoded = base64.b64encode(six.b(" ".join(str(v) for v in value) or "-")) return encoded.decode("utf-8") if six.PY3 else encoded
def encode_list(cls, value)
Encodes a list *value* into a string via base64 encoding.
4.005278
3.575444
1.120218
return [ self.task_cls.__module__, self.task_cls.__name__, self.encode_list(self.task_params), self.encode_list(self.branches), self.encode_bool(self.auto_retry), self.encode_list(self.dashboard_data), ]
def get_args(self)
Returns the list of encoded job arguments. The order of this list corresponds to the arguments expected by the job wrapper script.
5.275692
4.656079
1.133076
for pkg in flatten(packages): if pkg in loaded_packages: logger.debug("skip contrib package '{}', already loaded".format(pkg)) continue loaded_packages.append(pkg) mod = __import__("law.contrib.{}".format(pkg), globals(), locals(), [pkg]) logger.debug("l...
def load(*packages)
Loads contrib *packages* and adds members exposed in ``__all__`` to the law main module. Example: .. code-block:: python import law law.contrib.load("numpy") print(law.NumpyFormatter) # -> <class 'law.contrib.numpy.formatter.NumpyFormatter'> It is ensured that packages ar...
2.840257
2.670952
1.063388
cfg = Config.instance() if not recipient: recipient = cfg.get_expanded("notifications", "mail_recipient") if not sender: sender = cfg.get_expanded("notifications", "mail_sender") if not smtp_host: smtp_host = cfg.get_expanded("notifications", "mail_smtp_host") if not sm...
def notify_mail(title, message, recipient=None, sender=None, smtp_host=None, smtp_port=None, **kwargs)
Mail notification method taking a *title* and a string *message*. *recipient*, *sender*, *smtp_host* and *smtp_port* default to the configuration values in the [notifications] section.
1.94847
1.914434
1.017779
global _patched if _patched: return _patched = True patch_default_retcodes() patch_worker_run_task() patch_worker_factory() patch_keepalive_run() patch_cmdline_parser() logger.debug("applied law-specific luigi patches")
def patch_all()
Runs all patches. This function ensures that a second invocation has no effect.
11.502361
10.961604
1.049332
import luigi.retcodes retcode = luigi.retcodes.retcode retcode.already_running._default = 10 retcode.missing_data._default = 20 retcode.not_run._default = 30 retcode.task_failed._default = 40 retcode.scheduling_error._default = 50 retcode.unhandled_exception._default = 60
def patch_default_retcodes()
Sets the default luigi return codes in ``luigi.retcodes.retcode`` to: - already_running: 10 - missing_data: 20 - not_run: 30 - task_failed: 40 - scheduling_error: 50 - unhandled_exception: 60
3.558859
1.731402
2.055478
_run_task = luigi.worker.Worker._run_task def run_task(self, task_id): task = self._scheduled_tasks[task_id] task._worker_id = self._id task._worker_task = self._first_task try: _run_task(self, task_id) finally: task._worker_id = None ...
def patch_worker_run_task()
Patches the ``luigi.worker.Worker._run_task`` method to store the worker id and the id of its first task in the task. This information is required by the sandboxing mechanism
4.500735
4.022065
1.119011
def create_worker(self, scheduler, worker_processes, assistant=False): worker = luigi.worker.Worker(scheduler=scheduler, worker_processes=worker_processes, assistant=assistant, worker_id=os.getenv("LAW_SANDBOX_WORKER_ID")) worker._first_task = os.getenv("LAW_SANDBOX_WORKER_TASK") ...
def patch_worker_factory()
Patches the ``luigi.interface._WorkerSchedulerFactory`` to include sandboxing information when create a worker instance.
4.750007
3.634975
1.306751
_run = luigi.worker.KeepAliveThread.run def run(self): # do not run the keep-alive loop when sandboxed if os.getenv("LAW_SANDBOX_SWITCHED") == "1": self.stop() else: _run(self) luigi.worker.KeepAliveThread.run = run
def patch_keepalive_run()
Patches the ``luigi.worker.KeepAliveThread.run`` to immediately stop the keep-alive thread when running within a sandbox.
6.376058
4.328652
1.472989
# store original functions _init = luigi.cmdline_parser.CmdlineParser.__init__ # patch init def __init__(self, cmdline_args): _init(self, cmdline_args) self.cmdline_args = cmdline_args luigi.cmdline_parser.CmdlineParser.__init__ = __init__
def patch_cmdline_parser()
Patches the ``luigi.cmdline_parser.CmdlineParser`` to store the original command line arguments for later processing in the :py:class:`law.config.Config`.
3.314648
3.181491
1.041854
# test import import slackclient # noqa: F401 cfg = Config.instance() # get default token and channel if not token: token = cfg.get_expanded("notifications", "slack_token") if not channel: channel = cfg.get_expanded("notifications", "slack_channel") if not token or n...
def notify_slack(title, content, attachment_color="#4bb543", short_threshold=40, token=None, channel=None, mention_user=None, **kwargs)
Sends a slack notification and returns *True* on success. The communication with the slack API might have some delays and is therefore handled by a thread. The format of the notification depends on *content*. If it is a string, a simple text notification is sent. Otherwise, it should be a dictionary whose f...
3.192994
3.165342
1.008736
def wrapper(self, *args, **kwargs): if self.slow: time.sleep(random.randint(5, 15)) return func(self, *args, **kwargs) return wrapper
def maybe_wait(func)
Wrapper around run() methods that reads the *slow* flag to decide whether to wait some seconds for illustrative purposes. This is very straight forward, so no need for functools.wraps here.
2.730939
2.010565
1.358294
query_data = {} for line in out.strip().split("\n"): parts = line.split() if len(parts) < 6: continue job_id = parts[0] status_flag = parts[2] # map the status status = cls.map_status(status_flag) ...
def parse_query_output(cls, out)
Example output to parse: 141914132 user_name DONE queue_name exec_host b63cee711a job_name Feb 8 14:54
3.252936
3.273132
0.99383
global console_handler # make sure logging is setup only once if console_handler: return # set the handler of the law root logger console_handler = logging.StreamHandler() console_handler.setFormatter(LogFormatter()) logging.getLogger("law").addHandler(console_handler) # ...
def setup_logging()
Sets up the internal logging mechanism, i.e., it creates the :py:attr:`console_handler`, sets its formatting, and mounts on on the main ``"law"`` logger. It also sets the levels of all loggers that are given in the law config.
3.741879
2.948139
1.269234
# just print the file location? if args.location: print(Config.instance().config_file) return # every option below requires the name to be set if not args.name: abort("please give the name of the config in the format <section>[.<option>]") # removal if args.remove:...
def execute(args)
Executes the *config* subprogram with parsed commandline *args*.
6.494103
6.742909
0.963101
cfg = Config.instance() only_section = "." not in name # when only the section is given, print all keys if only_section: return "\n".join(cfg.keys(name)) else: section, option = name.split(".", 1) func = cfg.get_expanded if expand else cfg.get return func(secti...
def get_config(name, expand=False)
Returns the config value that corresponds to *name*, which must have the format ``<section>[.<option>]``. When an option is given and *expand* is *True*, variables are expanded in the returned value.
5.239452
5.121593
1.023012
parser = sub_parsers.add_parser("run", prog="law run", description="Run a task with" " configurable parameters. See http://luigi.rtfd.io/en/stable/running_luigi.html for more" " info.") parser.add_argument("task_family", help="a task family registered in the task database file or" ...
def setup_parser(sub_parsers)
Sets up the command line parser for the *run* subprogram and adds it to *sub_parsers*.
6.617741
6.417169
1.031256
task_family = None error = None # try to infer the task module from the passed task family and import it parts = args.task_family.rsplit(".", 1) if len(parts) == 2: modid, cls_name = parts try: mod = __import__(modid, globals(), locals(), [cls_name]) if ...
def execute(args)
Executes the *run* subprogram with parsed commandline *args*.
3.249768
3.24475
1.001547
# read task information from the index file given a task family if index_file is None: index_file = Config.instance().get_expanded("core", "index_file") # open and go through lines with open(index_file, "r") as f: for line in f.readlines(): line = line.strip() ...
def read_task_from_index(task_family, index_file=None)
Returns module id, task family and space-separated parameters in a tuple for a task given by *task_family* from the *index_file*. When *None*, the *index_file* refers to the default as defined in :py:mod:`law.config`. Returns *None* when the task could not be found.
3.910975
3.581975
1.091849
# setup the main parser and sub parsers parser = ArgumentParser(prog="law", description="The law command line tool.") sub_parsers = parser.add_subparsers(help="subcommands", dest="command") # add main arguments parser.add_argument("--version", "-V", action="version", version=law.__version__) ...
def run()
Entry point to the law cli. Sets up all parsers, parses all arguments, and executes the requested subprogram.
3.1058
2.848398
1.090367
@functools.wraps(func) def wrapper(self, job_data, event, job_num, *args, **kwargs): job_id = job_data["job_id"] dashboard_status = self.map_status(job_data.get("status"), event) # nothing to do when the status is invalid or did not change if not dashboard_status or self._l...
def cache_by_status(func)
Decorator for :py:meth:`BaseJobDashboard.publish` (and inheriting classes) that caches the last published status to decide if the a new publication is necessary or not. When the status did not change since the last call, the actual publish method is not invoked and *None* is returned.
2.998933
2.919059
1.027363
parser = sub_parsers.add_parser("software", prog="law software", description="Create or update" " the law software cache ({}). This is only required for some sandboxes that need to" " forward software into containers.".format(get_sw_dir())) parser.add_argument("--remove", "-r", action="sto...
def setup_parser(sub_parsers)
Sets up the command line parser for the *software* subprogram and adds it to *sub_parsers*.
6.043494
5.604253
1.078376
sw_dir = get_sw_dir() # just print the cache location? if args.location: print(sw_dir) return # just remove the current software cache? if args.remove: remove_software_cache(sw_dir) return # rebuild the software cache build_software_cache(sw_dir)
def execute(args)
Executes the *software* subprogram with parsed commandline *args*.
4.521192
4.344383
1.040698
# ensure the cache is empty sw_dir = get_sw_dir(sw_dir) remove_software_cache(sw_dir) os.makedirs(sw_dir) # reload dependencies to find the proper module paths reload_dependencies(force=True) for mod in deps: path = os.path.dirname(mod.__file__) name, ext = os.path.spl...
def build_software_cache(sw_dir=None)
Builds up the software cache directory at *sw_dir* by simply copying all required python modules. *sw_dir* is evaluated with :py:func:`get_sw_dir`.
3.213517
3.121431
1.029501
sw_dir = get_sw_dir(sw_dir) if os.path.exists(sw_dir): shutil.rmtree(sw_dir)
def remove_software_cache(sw_dir=None)
Removes the software cache directory at *sw_dir* which is evaluated with :py:func:`get_sw_dir`.
2.222472
1.984263
1.120049
global _reloaded_deps if _reloaded_deps and not force: return _reloaded_deps = True for mod in deps: six.moves.reload_module(mod) logger.debug("reloaded module '{}'".format(mod))
def reload_dependencies(force=False)
Reloads all python modules that law depends on. Currently, this is just *luigi* and *six*. Unless *force* is *True*, multiple calls to this function will not have any effect.
3.820989
3.51325
1.087594
sw_dir = get_sw_dir(sw_dir) if os.path.exists(sw_dir): sys.path.insert(1, sw_dir) if reload_deps: reload_dependencies()
def use_software_cache(sw_dir=None, reload_deps=False)
Adjusts ``sys.path`` so that the cached software at *sw_dir* is used. *sw_dir* is evaluated with :py:func:`get_sw_dir`. When *reload_deps* is *True*, :py:func:`reload_dependencies` is invoked.
2.906712
2.971077
0.978336
if sw_dir is None: sw_dir = Config.instance().get("core", "software_dir") sw_dir = os.path.expandvars(os.path.expanduser(sw_dir)) return sw_dir
def get_sw_dir(sw_dir=None)
Returns the software directory defined in the ``config.software_dir`` config. When *sw_dir* is not *None*, it is expanded and returned instead.
2.981194
2.683127
1.111089
global guarded_tfile_cls if not guarded_tfile_cls: import ROOT class GuardedTFile(ROOT.TFile): def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): if self.IsOpen(): self.Close() ...
def GuardedTFile(*args, **kwargs)
Factory function that lazily creates the guarded TFile class, and creates and returns an instance with all passed *args* and *kwargs*. This is required as we do not want to import ROOT in the global scope.
2.0948
1.918047
1.092153
@functools.wraps(func) def wrapper(self): return func(self.as_workflow()) return property(wrapper)
def workflow_property(func)
Decorator to declare a property that is stored only on a workflow but makes it also accessible from branch tasks. Internally, branch tasks are re-instantiated with ``branch=-1``, and its decorated property is invoked. You might want to use this decorator in case of a property that is common (and mutable) to...
3.319103
6.313466
0.525718
def wrapper(func): _attr = attr or "_workflow_cached_" + func.__name__ @functools.wraps(func) def getter(self): wf = self.as_workflow() if not hasattr(wf, _attr): setattr(wf, _attr, func(wf)) return getattr(wf, _attr) _setter...
def cached_workflow_property(func=None, attr=None, setter=True)
Decorator to declare an attribute that is stored only on a workflow and also cached for subsequent calls. Therefore, the decorated method is expected to (lazily) provide the value to cache. The resulting value is stored as ``_workflow_cached_<func.__name__>`` on the workflow, which can be overwritten by set...
2.120768
2.432997
0.871669
if callable(self.task.workflow_complete): return self.task.workflow_complete() else: return super(BaseWorkflowProxy, self).complete()
def complete(self)
Custom completion check that invokes the task's *workflow_complete* if it is callable, or just does the default completion check otherwise.
5.955139
2.985456
1.994717
reqs = OrderedDict() reqs.update(self.task.workflow_requires()) return reqs
def requires(self)
Returns the default workflow requirements in an ordered dictionary, which is updated with the return value of the task's *workflow_requires* method.
11.437043
3.805487
3.005409
if self.task.target_collection_cls is not None: cls = self.task.target_collection_cls elif self.task.outputs_siblings: cls = SiblingFileCollection else: cls = TargetCollection targets = luigi.task.getpaths(self.task.get_branch_tasks()) ...
def output(self)
Returns the default workflow outputs in an ordered dictionary. At the moment this is just the collection of outputs of the branch tasks, stored with the key ``"collection"``.
7.837573
5.627209
1.392799
if n is None: n = len(self.task.branch_map()) acceptance = self.task.acceptance return (acceptance * n) if acceptance <= 1 else acceptance
def threshold(self, n=None)
Returns the threshold number of tasks that need to be complete in order to consider the workflow as being complete itself. This takes into account the :py:attr:`law.BaseWorkflow.acceptance` parameter of the workflow. The threshold is passed to the :py:class:`law.TargetCollection` (or :py:class:`...
9.969885
4.898302
2.035376