code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def ignore_logger(name_or_logger, allow_level=None): def handler(logger, level, msg, args, kwargs): if allow_level is not None and \ level >= allow_level: return False return True register_special_log_handler(name_or_logger, handler)
Ignores a logger during breadcrumb recording.
def _rm_gos_edges_rel(self, rm_goids, edges_rel): edges_ret = {} for rname, edges_cur in edges_rel.items(): edges_new = self._rm_gos_edges(rm_goids, edges_cur) if edges_new: edges_ret[rname] = edges_new return edges_ret
Remove any relationship that contain user-specified edges.
def _recursive_cleanup(foo): if isinstance(foo, dict): for (key, val) in list(foo.items()): if isinstance(val, dict): _recursive_cleanup(val) if val == "" or val == [] or val == {}: del foo[key]
Aggressively cleans up things that look empty.
def drange(v0, v1, d): assert v0 < v1 return xrange(int(v0)//d, int(v1+d)//d)
Returns a discrete range.
def _check_valid_basic(self, get_params): try: if get_params(self.variable): return self.default except: pass return not self.default
Simple check that the variable is set
def cmd_terrain_check(self, args): if len(args) >= 2: latlon = (float(args[0]), float(args[1])) else: try: latlon = self.module('map').click_position except Exception: print("No map available") return if latl...
check a piece of terrain data
def merge_dict(dict_1, *other, **kw): tmp = dict_1.copy() for x in other: tmp.update(x) tmp.update(kw) return tmp
Merge two or more dict including kw into result dict.
def create_app(): global QT_APP QT_APP = QApplication.instance() if QT_APP is None: QT_APP = QApplication(sys.argv) return QT_APP
Create a Qt application.
def reverse(self): clone = self._clone() assert self._ordering, "You need to set an ordering for reverse" ordering = [] for order in self._ordering: for k,v in order.items(): if v=="asc": ordering.append({k: "desc"}) else: ...
Reverses the ordering of the QuerySet.
def str_append_hash(*args): ret_hash = "" for i in args: ret_hash += str(i).lower() return hash(ret_hash)
Convert each argument to a lower case string, appended, then hash
def divide_url(self, url): if 'https://' in url: host = url[8:].split('/')[0] path = url[8 + len(host):] elif 'http://' in url: host = url[7:].split('/')[0] path = url[7 + len(host):] else: host = url.split('/')[0] path = ur...
divide url into host and path two parts
def _pnorm_default(x, p): return np.linalg.norm(x.data.ravel(), ord=p)
Default p-norm implementation.
def remove(self): print 'remove' if self.exists(): print 'cleaning', self.venv run('rm -rf {}'.format(self.venv))
Remove the virtual environment completely
def inherit_doc(cls): for name, func in vars(cls).items(): if name.startswith("_"): continue if not func.__doc__: for parent in cls.__bases__: parent_func = getattr(parent, name, None) if parent_func and getattr(parent_func, "__doc__", None): ...
A decorator that makes a class inherit documentation from its parents.
def execd_run(command, execd_dir=None, die_on_error=True, stderr=subprocess.STDOUT): for submodule_path in execd_submodule_paths(command, execd_dir): try: subprocess.check_output(submodule_path, stderr=stderr, universal_newlines=True) except subprocess...
Run command for each module within execd_dir which defines it.
def create_inquirer_layout( ic: InquirerControl, get_prompt_tokens: Callable[[], List[Tuple[Text, Text]]], **kwargs) -> Layout: ps = PromptSession(get_prompt_tokens, reserve_space_for_menu=0, **kwargs) _fix_unecessary_blank_lines(ps) return Layout(HSplit([ ps.layout.container...
Create a layout combining question and inquirer selection.
def pol2cart(theta, rho): x = rho * np.cos(theta) y = rho * np.sin(theta) return x, y
Polar to Cartesian coordinates conversion.
def next(self): line = self.filehandle.readline() line = line.decode('utf-8', 'replace') if line == '': raise StopIteration line = line.rstrip('\n') le = LogEvent(line) if self._datetime_format and self._datetime_nextpos is not None: ret = le.set_d...
Get next line, adjust for year rollover and hint datetime format.
def onClose(self, was_clean, code, reason): logger.debug("Connection closed ({peer})".format(peer=self.peer)) self.factory.mease.publisher.publish( message_type=ON_CLOSE, client_id=self._client_id, client_storage=self.storage) self.factory.remove_client(self)
Called when a client closes a websocket connection
def generic_html(self, result, errors): h1 = htmlize(type(result)) out = [] result = pre_process_json(result) if not hasattr(result, 'items'): header = "<tr><th>Value</th></tr>" if type(result) is list: result = htmlize_list(result) els...
Try to display any object in sensible HTML.
def _add_cpu_percent(self, cur_read): for executor_id, cur_data in cur_read.items(): stats = cur_data['statistics'] cpus_limit = stats.get('cpus_limit') cpus_utilisation = stats.get('cpus_utilisation') if cpus_utilisation and cpus_limit != 0: stats...
Compute cpu percent basing on the provided utilisation
def build_archive(cls, **kwargs): if cls._archive is None: cls._archive = cls(**kwargs) return cls._archive
Return the singleton `JobArchive` instance, building it if needed
def version_from_frame(frame): module = getmodule(frame) if module is None: s = "<unknown from {0}:{1}>" return s.format(frame.f_code.co_filename, frame.f_lineno) module_name = module.__name__ variable = "AUTOVERSION_{}".format(module_name.upper()) override = os.environ.get(variable,...
Given a ``frame``, obtain the version number of the module running there.
def strip_spaces(value, sep=None, join=True): value = value.strip() value = [v.strip() for v in value.split(sep)] join_sep = sep or ' ' return join_sep.join(value) if join else value
Cleans trailing whitespaces and replaces also multiple whitespaces with a single space.
def from_dbus_fact(fact): return Fact(fact[4], start_time = dt.datetime.utcfromtimestamp(fact[1]), end_time = dt.datetime.utcfromtimestamp(fact[2]) if fact[2] else None, description = fact[3], activity_id = fact[5], category = fact[6],...
unpack the struct into a proper dict
def _linux_memdata(): grains = {'mem_total': 0, 'swap_total': 0} meminfo = '/proc/meminfo' if os.path.isfile(meminfo): with salt.utils.files.fopen(meminfo, 'r') as ifile: for line in ifile: comps = line.rstrip('\n').split(':') if not len(comps) > 1: ...
Return the memory information for Linux-like systems
def transform_dot(self, node, results): module_dot = results.get("bare_with_attr") member = results.get("member") new_name = None if isinstance(member, list): member = member[0] for change in MAPPING[module_dot.value]: if member.value in change[1]: ...
Transform for calls to module members in code.
def allow_unregister(self, plugin_override=True): vals = self._hook_manager.call_hook('course_allow_unregister', course=self, default=self._allow_unregister) return vals[0] if len(vals) and plugin_override else self._allow_unregister
Returns True if students can unregister from course
def make_json_formatter(graph): return { "()": graph.config.logging.json_formatter.formatter, "fmt": graph.config.logging.json_required_keys, }
Create the default json formatter.
def whitelist(ctx, whitelist_account, account): account = Account(account, blockchain_instance=ctx.blockchain) print_tx(account.whitelist(whitelist_account))
Add an account to a whitelist
def hyperparameters(self): hyperparameters = super(Chainer, self).hyperparameters() additional_hyperparameters = {Chainer._use_mpi: self.use_mpi, Chainer._num_processes: self.num_processes, Chainer._process_slots_per_host: self....
Return hyperparameters used by your custom Chainer code during training.
def teardown(self): if self.controller: self.controller.teardown() for monitor in self.monitors: monitor.teardown()
Clean up the target once all tests are completed
def _is_tp(pkt): tp = [SOMEIP.TYPE_TP_REQUEST, SOMEIP.TYPE_TP_REQUEST_NO_RET, SOMEIP.TYPE_TP_NOTIFICATION, SOMEIP.TYPE_TP_RESPONSE, SOMEIP.TYPE_TP_ERROR] if isinstance(pkt, Packet): return pkt.msg_type in tp else: return pkt[15] in tp
Returns true if pkt is using SOMEIP-TP, else returns false.
def _prepare_uimodules(self): for key, value in self._config.get(config.UI_MODULES, {}).iteritems(): self._config[config.UI_MODULES][key] = self._import_class(value) self._config[config.UI_MODULES] = dict(self._config[config.UI_MODULES] or {})
Prepare the UI Modules from a list of namespaced paths.
def _get_firewall_rules(firewall_rules): ret = [] for key, value in six.iteritems(firewall_rules): if 'protocol' not in firewall_rules[key].keys(): raise SaltCloudConfigError( 'The firewall rule \'{0}\' is missing \'protocol\''.format(key) ) ret.append(Fir...
Construct a list of optional firewall rules from the cloud profile.
def _get_data_from_empty_list(source, fields='*', first_row=0, count=-1, schema=None): fields = get_field_list(fields, schema) return {'cols': _get_cols(fields, schema), 'rows': []}, 0
Helper function for _get_data that handles empty lists.
def log_error(self, callback, error=None): print("Uncaught error during callback: {}".format(callback)) print("Error: {}".format(error))
Log the error that occurred when running the given callback.
def state(anon, obj, field, val): return anon.faker.state(field=field)
Returns a randomly selected US state code
async def save(self, request, response): if isinstance(response, Response) and SESSION_KEY in request and not response.prepared: session = request[SESSION_KEY] if session.save(response.set_cookie): self.app.logger.debug('Session saved: %s', session)
Save session to response cookies.
def changelist_view(self, request, extra_context=None): return super(TrackedLiveAdmin, self).changelist_view( request, dict(extra_context or {}, url_name='admin:%s_%s_tracking_report' % (self.model._meta.app_label, self.model._meta.model_name), per...
Updates the changelist view to include settings from this admin.
def _symmetrize_correlograms(correlograms): n_clusters, _, n_bins = correlograms.shape assert n_clusters == _ correlograms[..., 0] = np.maximum(correlograms[..., 0], correlograms[..., 0].T) sym = correlograms[..., 1:][..., ::-1] sym = np.transpose(sym, (1, 0, 2)...
Return the symmetrized version of the CCG arrays.
def track_download_request(download_url, download_title): from indico_piwik.plugin import PiwikPlugin if not download_url: raise ValueError("download_url can't be empty") if not download_title: raise ValueError("download_title can't be empty") request = PiwikRequest(server_url=PiwikPlugi...
Track a download in Piwik
def scenario(ctx, dependency_name, driver_name, lint_name, provisioner_name, role_name, scenario_name, verifier_name): command_args = { 'dependency_name': dependency_name, 'driver_name': driver_name, 'lint_name': lint_name, 'provisioner_name': provisioner_name, '...
Initialize a new scenario for use with Molecule.
def cache_items(values): import os config_path = os.path.expanduser('~/.config/blockade') file_path = os.path.join(config_path, 'cache.txt') if not os.path.isfile(file_path): file(file_path, 'w').close() written = [x.strip() for x in open(file_path, 'r').readlines()] handle = open(file_p...
Cache indicators that were successfully sent to avoid dups.
def check_exclamations_ppm(text): err = "leonard.exclamation.30ppm" msg = u"More than 30 ppm of exclamations. Keep them under control." regex = r"\w!" count = len(re.findall(regex, text)) num_words = len(text.split(" ")) ppm = (count*1.0 / num_words) * 1e6 if ppm > 30 and count > 1: ...
Make sure that the exclamation ppm is under 30.
def addEdgeToGraph(parentNodeName, childNodeName, graphFileHandle, colour="black", length="10", weight="1", dir="none", label="", style=""): graphFileHandle.write('edge[color=%s,len=%s,weight=%s,dir=%s,label="%s",style=%s];\n' % (colour, length, weight, dir, label, style)) graphFileHandle.write("%s -- %s;\n" % ...
Links two nodes in the graph together.
def register_job_from_link(self, link, key, **kwargs): job_config = kwargs.get('job_config', None) if job_config is None: job_config = link.args status = kwargs.get('status', JobStatus.unknown) job_details = JobDetails(jobname=link.linkname, j...
Register a job in the `JobArchive` from a `Link` object
def cwms_process_text(): if request.method == 'OPTIONS': return {} response = request.body.read().decode('utf-8') body = json.loads(response) text = body.get('text') cp = cwms.process_text(text) return _stmts_from_proc(cp)
Process text with CWMS and return INDRA Statements.
def dateJDN(year, month, day, calendar): a = (14 - month) // 12 y = year + 4800 - a m = month + 12*a - 3 if calendar == GREGORIAN: return day + (153*m + 2)//5 + 365*y + y//4 - y//100 + y//400 - 32045 else: return day + (153*m + 2)//5 + 365*y + y//4 - 32083
Converts date to Julian Day Number.
def _get_source_sum(source_hash, file_path, saltenv): ret = dict() schemes = ('salt', 'http', 'https', 'ftp', 'swift', 's3', 'file') invalid_hash_msg = ("Source hash '{0}' format is invalid. It must be in " "the format <hash type>=<hash>").format(source_hash) source_hash = six.te...
Extract the hash sum, whether it is in a remote hash file, or just a string.
def read_input_data(filename): logging.info('Opening file %s for reading input', filename) input_file = open(filename, 'r') data = [] labels = [] for line in input_file: tokens = line.split(',', 1) labels.append(tokens[0].strip()) data.append(tokens[1].strip()) return lab...
Helper function to get training data
def task_class(self): from scenario_player.tasks.base import get_task_class_for_type root_task_type, _ = self.task task_class = get_task_class_for_type(root_task_type) return task_class
Return the Task class type configured for the scenario.
def __generate_key(self, config): cwd = config.get('ssh_path', self._install_directory()) if config.is_affirmative('create', default="yes"): if not os.path.exists(cwd): os.makedirs(cwd) if not os.path.exists(os.path.join(cwd, config.get('keyname'))): ...
Generate the ssh key, and return the ssh config location
def remove_accounts_from_project(accounts_query, project): query = accounts_query.filter(date_deleted__isnull=True) for account in query: remove_account_from_project(account, project)
Remove accounts from project.
def _checkblk(name): blk = __salt__['cmd.run']('blkid -o value -s TYPE {0}'.format(name), ignore_retcode=True) return '' if not blk else blk
Check if the blk exists and return its fstype if ok
def build_lines_data(self, code_obj): if self.version > 1.4: linestarts = list(self.opc.findlinestarts(code_obj)) else: linestarts = [[0, 1]] self.linestarts = dict(linestarts) lines = [] LineTuple = namedtuple('LineTuple', ['l_no', 'next']) _, pre...
Generate various line-related helper data.
def request_reset(self, event): self.log('Password reset request received:', event.__dict__, lvl=hilight) user_object = objectmodels['user'] email = event.data.get('email', None) email_user = None if email is not None and user_object.count({'mail': email}) > 0: email_...
An anonymous client requests a password reset
def clear(self): self.title = None self.numbers = np.zeros(0, int) self.atom_types = [] self.charges = [] self.names = [] self.molecules = np.zeros(0, int) self.bonds = np.zeros((0, 2), int) self.bends = np.zeros((0, 3), int) self.dihedrals = np.ze...
Clear the contents of the data structure
def getInput(): input = '' if sys.platform == 'win32': import msvcrt if msvcrt.kbhit(): input += msvcrt.getch() print_(input) else: time.sleep(.1) else: sock = sys.stdin.fileno() while len(select.select([sock], [], [], 0.1)[0]) > 0:...
Read the input buffer without blocking the system.
def concat_generator(filename, up_threshold, low_threshold=10): txt = "" for line in tf.gfile.Open(filename): line = line.strip() if len(txt) + len(line) + 1 >= up_threshold: ret = txt txt = "" if len(ret) > low_threshold and len(ret) < up_threshold: yield {"targets": ret} if n...
Generate concatenated lines from file upto up_threshold characters.
def grad(self): from . import _ndarray_cls hdl = NDArrayHandle() check_call(_LIB.MXNDArrayGetGrad(self.handle, ctypes.byref(hdl))) if hdl.value is None: return None return _ndarray_cls(hdl)
Returns gradient buffer attached to this NDArray.
def find_module(self, fullname, path=None): basepaths = [""] + list(sys.path) if fullname.startswith("."): if path is None: return None fullname = fullname[1:] basepaths.insert(0, path) fullpath = os.path.join(*fullname.split(".")) for ...
Searches for a Coconut file of the given name and compiles it.
def read_property(f, endianness="<"): prop_name = types.String.read(f, endianness) prop_data_type = types.tds_data_types[types.Uint32.read(f, endianness)] value = prop_data_type.read(f, endianness) log.debug("Property %s: %r", prop_name, value) return prop_name, value
Read a property from a segment's metadata
def append_payload(self, payload: Payload) -> Payload: encoding = payload.headers.get(CONTENT_ENCODING, '').lower() if encoding and encoding not in ('deflate', 'gzip', 'identity'): raise RuntimeError('unknown content encoding: {}'.format(encoding)) if encoding == 'identity': ...
Adds a new body part to multipart writer.
def drop_streams(streams, kdims, keys): stream_params = stream_parameters(streams) inds, dims = zip(*[(ind, kdim) for ind, kdim in enumerate(kdims) if kdim not in stream_params]) get = operator.itemgetter(*inds) keys = (get(k) for k in keys) return dims, ([wrap_tuple(k) for k ...
Drop any dimensioned streams from the keys and kdims.
def _move_centroids(self): for k in range(self.n_clusters): if k in self.clusters: centroid = np.mean(self._X[self.clusters == k, :], axis=0) self.centroids[k] = centroid else: self.n_clusters-=1 self.centroids = self.centro...
Calculate new centroids as the means of the samples in each cluster
def suppressConsoleOut(meth): @wraps(meth) def decorate(*args, **kwargs): _stdout = sys.stdout fptr = open(os.devnull, 'w') sys.stdout = fptr try: return meth(*args, **kwargs) except Exception as e: raise e finally: sys.stdout =...
Disable console output during the method is run.
def _lookup_style(element, names): return _STYLES.get('_'+element, '') + \ ''.join([_STYLES.get(name, '') for name in names])
Lookup style by either element name or the list of classes
async def release_name_async(self, bus_name, error = None, timeout = DBUS.TIMEOUT_USE_DEFAULT) : "releases a registered bus name." assert self.loop != None, "no event loop to attach coroutine to" return \ await self.connection.bus_release_name_async(bus_name, error = error, timeout =...
releases a registered bus name.
def open(self): self._connection = \ amqp.Connection(host='%s:%s' % (self.hostname, self.port), userid=self.username, password=self.password, virtual_host=self.virtual_host, insist=False) self.channel = self._connection.channel()
Open a connection to the AMQP compliant broker.
def uninstall(self, bug: Bug) -> bool: r = self.__api.post('bugs/{}/uninstall'.format(bug.name)) raise NotImplementedError
Uninstalls the Docker image associated with a given bug.
def create_response(version, status, headers): message = [] message.append('HTTP/{} {}\r\n'.format(version, status)) for name, value in headers: message.append(name) message.append(': ') message.append(value) message.append('\r\n') message.append('\r\n') return s2b(''...
Create a HTTP response header.
def _merge_nbval_coverage_data(cov): if not cov: return suffix = _make_suffix(cov) if suffix is True: return filename = cov.data_files.filename + '.' + suffix nbval_data = coverage.CoverageData(debug=cov.debug) try: nbval_data.read_file(os.path.abspath(filename)) exce...
Merge nbval coverage data into pytest-cov data.
def extern_store_tuple(self, context_handle, vals_ptr, vals_len): c = self._ffi.from_handle(context_handle) return c.to_value(tuple(c.from_value(val[0]) for val in self._ffi.unpack(vals_ptr, vals_len)))
Given storage and an array of Handles, return a new Handle to represent the list.
def visit(self, node): for child in node: yield child for subchild in self.visit(child): yield subchild
Returns a generator that walks all children recursively.
def league_scores(self, total_data, time): data = [] for league, score in self.supported_leagues(total_data): item = {'league': league, 'homeTeamName': score['homeTeamName'], 'goalsHomeTeam': score['result']['goalsHomeTeam'], 'goalsAwayTeam': score['re...
Store output of fixtures based on league and time to a JSON file
def raw_tag(name, value): return name.encode('utf-8') + \ len(value).to_bytes(4, byteorder='big') + \ value
Create a DMAP tag with raw data.
def _generate_grid(self): grid_axes = [] for _, param in self.tunables: grid_axes.append(param.get_grid_axis(self.grid_width)) return grid_axes
Get the all possible values for each of the tunables.
def from_stream(cls, f, **kwargs): lines = lines_from_stream(f) if 'meta' not in kwargs: kwargs['meta'] = {'from': 'stream'} kwargs['meta']['filepath'] = f.name if hasattr(f, 'name') else None return cls(lines, **kwargs)
Create an editor instance from a file stream.
def _where(self, filter_fn): assert callable(filter_fn), 'filter_fn needs to be callable' return VList(i for i in self if filter_fn(i()))
use this to filter VLists, simply provide a filter function to filter the current found objects
def next(self): self._parse_block() if self._remaining > 0: self._remaining -= 1 return six.next(self._iter_rows)
Get the next row in the page.
def enqueue(self, item_type, item): with self.enlock: self.queue[item_type].append(item)
Queue a new data item, make item iterable
def UndoTransaction(self): from Ucs import ConfigMap self._transactionInProgress = False self._configMap = ConfigMap()
Cancels any running transaction.
def project_drawn(cb, msg): stream = cb.streams[0] old_data = stream.data stream.update(data=msg['data']) element = stream.element stream.update(data=old_data) proj = cb.plot.projection if not isinstance(element, _Element) or element.crs == proj: return None crs = element.crs ...
Projects a drawn element to the declared coordinate system
def load_handler(self): handler_path = self.handler_name.split(".") handler_module = __import__(".".join(handler_path[:-1]), {}, {}, str(handler_path[-1])) self.handler = getattr(handler_module, handler_path[-1])()
Load the detected handler.
def recv_rpc(self, context, payload): logger.debug("Adding RPC payload to ControlBuffer queue: %s", payload) self.buf.put(('rpc', (context, payload))) with self.cv: self.cv.notifyAll()
Call from any thread
def _maybe_connect(self, node_id): with self._lock: conn = self._conns.get(node_id) if conn is None: broker = self.cluster.broker_metadata(node_id) assert broker, 'Broker id %s not in current metadata' % (node_id,) log.debug("Initiating con...
Idempotent non-blocking connection attempt to the given node id.
def uniquenessRatio(self, value): if value >= 5 and value <= 15: self._uniqueness = value else: raise InvalidUniquenessRatioError("Uniqueness ratio must be " "between 5 and 15.") self._replace_bm()
Set private ``_uniqueness`` and reset ``_block_matcher``.
def _run_workflow(items, paired, workflow_file, work_dir): utils.remove_safe(os.path.join(work_dir, "workspace")) data = paired.tumor_data if paired else items[0] cmd = [utils.get_program_python("configManta.py"), workflow_file, "-m", "local", "-j", dd.get_num_cores(data)] do.run(cmd, "Run manta SV anal...
Run manta analysis inside prepared workflow directory.
def _get_cibpath(): cibpath = os.path.join(__opts__['cachedir'], 'pcs', __env__) log.trace('cibpath: %s', cibpath) return cibpath
Get the path to the directory on the minion where CIB's are saved
def _format_object(obj, format_type=None): if json_api_settings.FORMAT_KEYS is not None: return format_keys(obj, format_type) return format_field_names(obj, format_type)
Depending on settings calls either `format_keys` or `format_field_names`
def to_bigquery_field(self, name_case=DdlParseBase.NAME_CASE.original): col_name = self.get_name(name_case) mode = self.bigquery_mode if self.array_dimensional <= 1: type = self.bigquery_legacy_data_type else: type = "RECORD" fields = OrderedDict() ...
Generate BigQuery JSON field define
def _distarray_missing(self, xc, xd, cdiffs): cindices = [] dindices = [] for i in range(self._datalen): cindices.append(np.where(np.isnan(xc[i]))[0]) dindices.append(np.where(np.isnan(xd[i]))[0]) if self.n_jobs != 1: dist_array = Parallel(n_jobs=self....
Distance array calculation for data with missing values
def to_ufo_background_image(self, ufo_glyph, layer): image = layer.backgroundImage if image is None: return ufo_image = ufo_glyph.image ufo_image.fileName = image.path ufo_image.transformation = image.transform ufo_glyph.lib[CROP_KEY] = list(image.crop) ufo_glyph.lib[LOCKED_KEY] = im...
Copy the backgound image from the GSLayer to the UFO Glyph.
def list(gandi, domain, limit): options = {'items_per_page': limit} mailboxes = gandi.mail.list(domain, options) output_list(gandi, [mbox['login'] for mbox in mailboxes]) return mailboxes
List mailboxes created on a domain.
def make_html_page(self, valumap): logger.info('Making an html report using template %r.', self.html_template) fh = open(self.html_template) template = fh.read() fh.close() parts = [] for sr in self.subreports: report_data = [item.html for item in sr.report_da...
Builds the report as html page, using the template page from file.
def CheckRequestsForCompletion(self, requests): subjects = [r.session_id.Add("state") for r in requests] statuses_found = {} for subject, result in self.MultiResolvePrefix(subjects, self.FLOW_STATUS_PREFIX): for predicate, _, _ in result: requ...
Checks if there is a status message queued for a number of requests.
def app(environ, start_response): r = HttpRequestHandler(environ, start_response, Router).dispatch() return r
Function called by the WSGI server.
def __notify(self): if self.__callback is not None: try: self.__callback( self._done_event.data, self._done_event.exception, self.__extra, ) except Exception as ex: self._logger.ex...
Notify the given callback about the result of the execution
def insert_tracking_record(self): if self._insert_tracking_record is None: self._insert_tracking_record = self._prepare_insert( tmpl=self._insert_values_tmpl, placeholder_for_id=True, record_class=self.tracking_record_class, field_names...
SQL statement that inserts tracking records.