text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def verify_signature(self, signedtext, cert_file=None, cert_type='pem', node_name=NODE_NAME, node_id=None, id_attr=''): """ Verifies the signature of a XML document. :param signedtext: The XML document as a string :param cert_file: The public key that was used to sign the document :param cert_type: The file type of the certificate :param node_name: The name of the class that is signed :param node_id: The identifier of the node :param id_attr: The attribute name for the identifier, normally one of 'id','Id' or 'ID' :return: Boolean True if the signature was correct otherwise False. """ # This is only for testing purposes, otherwise when would you receive # stuff that is signed with your key !? if not cert_file: cert_file = self.cert_file cert_type = self.cert_type if not id_attr: id_attr = self.id_attr return self.crypto.validate_signature( signedtext, cert_file=cert_file, cert_type=cert_type, node_name=node_name, node_id=node_id, id_attr=id_attr)
[ "def", "verify_signature", "(", "self", ",", "signedtext", ",", "cert_file", "=", "None", ",", "cert_type", "=", "'pem'", ",", "node_name", "=", "NODE_NAME", ",", "node_id", "=", "None", ",", "id_attr", "=", "''", ")", ":", "# This is only for testing purposes...
42.571429
18.428571
def all_thumbnails(path, recursive=True, prefix=None, subdir=None): """ Return a dictionary referencing all files which match the thumbnail format. Each key is a source image filename, relative to path. Each value is a list of dictionaries as explained in `thumbnails_for_file`. """ if prefix is None: prefix = settings.THUMBNAIL_PREFIX if subdir is None: subdir = settings.THUMBNAIL_SUBDIR thumbnail_files = {} if not path.endswith('/'): path = '%s/' % path len_path = len(path) if recursive: all = os.walk(path) else: files = [] for file in os.listdir(path): if os.path.isfile(os.path.join(path, file)): files.append(file) all = [(path, [], files)] for dir_, subdirs, files in all: rel_dir = dir_[len_path:] for file in files: thumb = re_thumbnail_file.match(file) if not thumb: continue d = thumb.groupdict() source_filename = d.pop('source_filename') if prefix: source_path, source_filename = os.path.split(source_filename) if not source_filename.startswith(prefix): continue source_filename = os.path.join( source_path, source_filename[len(prefix):]) d['options'] = d['options'] and d['options'].split('_') or [] if subdir and rel_dir.endswith(subdir): rel_dir = rel_dir[:-len(subdir)] # Corner-case bug: if the filename didn't have an extension but did # have an underscore, the last underscore will get converted to a # '.'. m = re.match(r'(.*)_(.*)', source_filename) if m: source_filename = '%s.%s' % m.groups() filename = os.path.join(rel_dir, source_filename) thumbnail_file = thumbnail_files.setdefault(filename, []) d['filename'] = os.path.join(dir_, file) thumbnail_file.append(d) return thumbnail_files
[ "def", "all_thumbnails", "(", "path", ",", "recursive", "=", "True", ",", "prefix", "=", "None", ",", "subdir", "=", "None", ")", ":", "if", "prefix", "is", "None", ":", "prefix", "=", "settings", ".", "THUMBNAIL_PREFIX", "if", "subdir", "is", "None", ...
40.235294
16.901961
def valid_url(url): """Validate url. :rtype: str :return: url :param str url: package homepage url. """ regex = re.compile( r'^(?:http)s?://' r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+' r'(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?))' r'(?:/?|[/?]\S+)$', re.IGNORECASE) if not regex.match(url): raise argparse.ArgumentTypeError('"{0}" is invalid url.'.format(url)) return url
[ "def", "valid_url", "(", "url", ")", ":", "regex", "=", "re", ".", "compile", "(", "r'^(?:http)s?://'", "r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\\.)+'", "r'(?:[A-Z]{2,6}\\.?|[A-Z0-9-]{2,}\\.?))'", "r'(?:/?|[/?]\\S+)$'", ",", "re", ".", "IGNORECASE", ")", "if", "not",...
26.875
17.6875
def run(self, config, workflow_id, signal, *, data=None): """ Run the dag by calling the tasks in the correct order. Args: config (Config): Reference to the configuration object from which the settings for the dag are retrieved. workflow_id (str): The unique ID of the workflow that runs this dag. signal (DagSignal): The signal object for dags. It wraps the construction and sending of signals into easy to use methods. data (MultiTaskData): The initial data that is passed on to the start tasks. Raises: DirectedAcyclicGraphInvalid: If the graph is not a dag (e.g. contains loops). ConfigNotDefinedError: If the configuration for the dag is empty. """ graph = self.make_graph(self._schema) # pre-checks self.validate(graph) if config is None: raise ConfigNotDefinedError() # create the celery app for submitting tasks celery_app = create_app(config) # the task queue for managing the current state of the tasks tasks = [] stopped = False # add all tasks without predecessors to the task list for task in nx.topological_sort(graph): task.workflow_name = self.workflow_name task.dag_name = self.name if len(list(graph.predecessors(task))) == 0: task.state = TaskState.Waiting tasks.append(task) def set_task_completed(completed_task): """ For each completed task, add all successor tasks to the task list. If they are not in the task list yet, flag them as 'waiting'. """ completed_task.state = TaskState.Completed for successor in graph.successors(completed_task): if successor not in tasks: successor.state = TaskState.Waiting tasks.append(successor) # process the task queue as long as there are tasks in it while tasks: if not stopped: stopped = signal.is_stopped # delay the execution by the polling time if config.dag_polling_time > 0.0: sleep(config.dag_polling_time) for i in range(len(tasks) - 1, -1, -1): task = tasks[i] # for each waiting task, wait for all predecessor tasks to be # completed. Then check whether the task should be skipped by # interrogating the predecessor tasks. if task.is_waiting: if stopped: task.state = TaskState.Stopped else: pre_tasks = list(graph.predecessors(task)) if all([p.is_completed for p in pre_tasks]): # check whether the task should be skipped run_task = task.has_to_run or len(pre_tasks) == 0 for pre in pre_tasks: if run_task: break # predecessor task is skipped and flag should # not be propagated if pre.is_skipped and not pre.propagate_skip: run_task = True # limits of a non-skipped predecessor task if not pre.is_skipped: if pre.celery_result.result.limit is not None: if task.name in [ n.name if isinstance(n, BaseTask) else n for n in pre.celery_result.result.limit]: run_task = True else: run_task = True task.is_skipped = not run_task # send the task to celery or, if skipped, mark it as completed if task.is_skipped: set_task_completed(task) else: # compose the input data from the predecessor tasks # output. Data from skipped predecessor tasks do not # contribute to the input data if len(pre_tasks) == 0: input_data = data else: input_data = MultiTaskData() for pt in [p for p in pre_tasks if not p.is_skipped]: slot = graph[pt][task]['slot'] input_data.add_dataset( pt.name, pt.celery_result.result.data.default_dataset, aliases=[slot] if slot is not None else None) task.state = TaskState.Running task.celery_result = celery_app.send_task( JobExecPath.Task, args=(task, workflow_id, input_data), queue=task.queue, routing_key=task.queue ) # flag task as completed elif task.is_running: if task.celery_completed: set_task_completed(task) elif task.celery_failed: task.state = TaskState.Aborted signal.stop_workflow() # cleanup task results that are not required anymore elif task.is_completed: if all([s.is_completed or s.is_stopped or s.is_aborted for s in graph.successors(task)]): if celery_app.conf.result_expires == 0: task.clear_celery_result() tasks.remove(task) # cleanup and remove stopped and aborted tasks elif task.is_stopped or task.is_aborted: if celery_app.conf.result_expires == 0: task.clear_celery_result() tasks.remove(task)
[ "def", "run", "(", "self", ",", "config", ",", "workflow_id", ",", "signal", ",", "*", ",", "data", "=", "None", ")", ":", "graph", "=", "self", ".", "make_graph", "(", "self", ".", "_schema", ")", "# pre-checks", "self", ".", "validate", "(", "graph...
46.657143
22
def relative_path(path, from_file): """ Return the relative path of a file or directory, specified as ``path`` relative to (the parent directory of) ``from_file``. This method is intented to be called with ``__file__`` as second argument. The returned path is relative to the current working directory. If ``path`` is ``None``, return ``None``. Example: :: path="res/foo.bar" from_file="/root/abc/def/ghi.py" cwd="/root" => "abc/def/res/foo.bar" :param string path: the file path :param string from_file: the reference file :rtype: string """ if path is None: return None abs_path_target = absolute_path(path, from_file) abs_path_cwd = os.getcwd() if is_windows(): # NOTE on Windows, if the two paths are on different drives, # the notion of relative path is not defined: # return the absolute path of the target instead. t_drive, t_tail = os.path.splitdrive(abs_path_target) c_drive, c_tail = os.path.splitdrive(abs_path_cwd) if t_drive != c_drive: return abs_path_target return os.path.relpath(abs_path_target, start=abs_path_cwd)
[ "def", "relative_path", "(", "path", ",", "from_file", ")", ":", "if", "path", "is", "None", ":", "return", "None", "abs_path_target", "=", "absolute_path", "(", "path", ",", "from_file", ")", "abs_path_cwd", "=", "os", ".", "getcwd", "(", ")", "if", "is...
32.805556
19.194444
def computeAccuracyEnding(predictions, truths, iterations, resets=None, randoms=None, num=None, sequenceCounter=None): """ Compute accuracy on the sequence ending """ accuracy = [] numIteration = [] numSequences = [] for i in xrange(len(predictions) - 1): if num is not None and i > num: continue if truths[i] is None: continue # identify the end of sequence if resets is not None or randoms is not None: if not (resets[i+1] or randoms[i+1]): continue correct = truths[i] is None or truths[i] in predictions[i] accuracy.append(correct) numSequences.append(sequenceCounter[i]) numIteration.append(iterations[i]) return (accuracy, numIteration, numSequences)
[ "def", "computeAccuracyEnding", "(", "predictions", ",", "truths", ",", "iterations", ",", "resets", "=", "None", ",", "randoms", "=", "None", ",", "num", "=", "None", ",", "sequenceCounter", "=", "None", ")", ":", "accuracy", "=", "[", "]", "numIteration"...
26.714286
16.571429
def validate(self, value): """ Applies the validation criteria. Returns value, new value, or None if invalid. """ try: coord.Angle(value, unit=self.unit) return value except ValueError: return None
[ "def", "validate", "(", "self", ",", "value", ")", ":", "try", ":", "coord", ".", "Angle", "(", "value", ",", "unit", "=", "self", ".", "unit", ")", "return", "value", "except", "ValueError", ":", "return", "None" ]
27.2
10.8
def _get_table_rows(parent_table, table_name, row_name): """ Inconsistent behavior: {'TABLE_intf': [{'ROW_intf': { vs {'TABLE_mac_address': {'ROW_mac_address': [{ vs {'TABLE_vrf': {'ROW_vrf': {'TABLE_adj': {'ROW_adj': { """ if parent_table is None: return [] _table = parent_table.get(table_name) _table_rows = [] if isinstance(_table, list): _table_rows = [_table_row.get(row_name) for _table_row in _table] elif isinstance(_table, dict): _table_rows = _table.get(row_name) if not isinstance(_table_rows, list): _table_rows = [_table_rows] return _table_rows
[ "def", "_get_table_rows", "(", "parent_table", ",", "table_name", ",", "row_name", ")", ":", "if", "parent_table", "is", "None", ":", "return", "[", "]", "_table", "=", "parent_table", ".", "get", "(", "table_name", ")", "_table_rows", "=", "[", "]", "if",...
35.4
11.9
def pack_sources(sources, normalize=True): ''' Accepts list of dicts (or a string representing a list of dicts) and packs the key/value pairs into a single dict. ``'[{"foo": "salt://foo.rpm"}, {"bar": "salt://bar.rpm"}]'`` would become ``{"foo": "salt://foo.rpm", "bar": "salt://bar.rpm"}`` normalize : True Normalize the package name by removing the architecture, if the architecture of the package is different from the architecture of the operating system. The ability to disable this behavior is useful for poorly-created packages which include the architecture as an actual part of the name, such as kernel modules which match a specific kernel version. .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt '*' pkg_resource.pack_sources '[{"foo": "salt://foo.rpm"}, {"bar": "salt://bar.rpm"}]' ''' if normalize and 'pkg.normalize_name' in __salt__: _normalize_name = __salt__['pkg.normalize_name'] else: _normalize_name = lambda pkgname: pkgname if isinstance(sources, six.string_types): try: sources = salt.utils.yaml.safe_load(sources) except salt.utils.yaml.parser.ParserError as err: log.error(err) return {} ret = {} for source in sources: if (not isinstance(source, dict)) or len(source) != 1: log.error('Invalid input: %s', pprint.pformat(sources)) log.error('Input must be a list of 1-element dicts') return {} else: key = next(iter(source)) ret[_normalize_name(key)] = source[key] return ret
[ "def", "pack_sources", "(", "sources", ",", "normalize", "=", "True", ")", ":", "if", "normalize", "and", "'pkg.normalize_name'", "in", "__salt__", ":", "_normalize_name", "=", "__salt__", "[", "'pkg.normalize_name'", "]", "else", ":", "_normalize_name", "=", "l...
36.644444
25.355556
def list(declared, undeclared): """List configured queues.""" queues = current_queues.queues.values() if declared: queues = filter(lambda queue: queue.exists, queues) elif undeclared: queues = filter(lambda queue: not queue.exists, queues) queue_names = [queue.routing_key for queue in queues] queue_names.sort() for queue in queue_names: click.secho(queue)
[ "def", "list", "(", "declared", ",", "undeclared", ")", ":", "queues", "=", "current_queues", ".", "queues", ".", "values", "(", ")", "if", "declared", ":", "queues", "=", "filter", "(", "lambda", "queue", ":", "queue", ".", "exists", ",", "queues", ")...
36.272727
14.363636
def obtain_to(filename): """ Return the digital elevation map projected to the lat lon matrix coordenates. Keyword arguments: filename -- the name of a netcdf file. """ root, _ = nc.open(filename) lat, lon = nc.getvar(root, 'lat')[0,:], nc.getvar(root, 'lon')[0,:] nc.close(root) return obtain(lat, lon)
[ "def", "obtain_to", "(", "filename", ")", ":", "root", ",", "_", "=", "nc", ".", "open", "(", "filename", ")", "lat", ",", "lon", "=", "nc", ".", "getvar", "(", "root", ",", "'lat'", ")", "[", "0", ",", ":", "]", ",", "nc", ".", "getvar", "("...
30
17.454545
def reports_list(request): ''' Lists all of the reports currently available. ''' reports = [] for report in get_all_reports(): reports.append({ "name": report.__name__, "url": reverse(report), "description": report.__doc__, }) reports.sort(key=lambda report: report["name"]) ctx = { "reports": reports, } return render(request, "registrasion/reports_list.html", ctx)
[ "def", "reports_list", "(", "request", ")", ":", "reports", "=", "[", "]", "for", "report", "in", "get_all_reports", "(", ")", ":", "reports", ".", "append", "(", "{", "\"name\"", ":", "report", ".", "__name__", ",", "\"url\"", ":", "reverse", "(", "re...
23.210526
22.578947
def validate_id(request): """Validate request id.""" if 'id' in request: correct_id = isinstance( request['id'], (string_types, int, None), ) error = 'Incorrect identifier' assert correct_id, error
[ "def", "validate_id", "(", "request", ")", ":", "if", "'id'", "in", "request", ":", "correct_id", "=", "isinstance", "(", "request", "[", "'id'", "]", ",", "(", "string_types", ",", "int", ",", "None", ")", ",", ")", "error", "=", "'Incorrect identifier'...
25.3
13.7
def cmd_ADSB(self, args): '''adsb command parser''' usage = "usage: adsb <set>" if len(args) == 0: print(usage) return if args[0] == "status": print("total threat count: %u active threat count: %u" % (len(self.threat_vehicles), len(self.active_threat_ids))) for id in self.threat_vehicles.keys(): print("id: %s distance: %.2f m callsign: %s alt: %.2f" % (id, self.threat_vehicles[id].distance, self.threat_vehicles[id].state['callsign'], self.threat_vehicles[id].state['altitude'])) elif args[0] == "set": self.ADSB_settings.command(args[1:]) else: print(usage)
[ "def", "cmd_ADSB", "(", "self", ",", "args", ")", ":", "usage", "=", "\"usage: adsb <set>\"", "if", "len", "(", "args", ")", "==", "0", ":", "print", "(", "usage", ")", "return", "if", "args", "[", "0", "]", "==", "\"status\"", ":", "print", "(", "...
48.894737
27.947368
def detail_dict(self): """A more detailed dict that includes the descriptions, sub descriptions, table and columns.""" d = self.dict def aug_col(c): d = c.dict d['stats'] = [s.dict for s in c.stats] return d d['table'] = self.table.dict d['table']['columns'] = [aug_col(c) for c in self.table.columns] return d
[ "def", "detail_dict", "(", "self", ")", ":", "d", "=", "self", ".", "dict", "def", "aug_col", "(", "c", ")", ":", "d", "=", "c", ".", "dict", "d", "[", "'stats'", "]", "=", "[", "s", ".", "dict", "for", "s", "in", "c", ".", "stats", "]", "r...
26.133333
21.466667
def _ge_from_le(self, other): """Return a >= b. Computed by @total_ordering from (not a <= b) or (a == b).""" op_result = self.__le__(other) if op_result is NotImplemented: return NotImplemented return not op_result or self == other
[ "def", "_ge_from_le", "(", "self", ",", "other", ")", ":", "op_result", "=", "self", ".", "__le__", "(", "other", ")", "if", "op_result", "is", "NotImplemented", ":", "return", "NotImplemented", "return", "not", "op_result", "or", "self", "==", "other" ]
42
5.666667
def get_ips_from_steamids(self, server_steam_ids, timeout=30): """Resolve IPs from SteamIDs :param server_steam_ids: a list of steamids :type server_steam_ids: list :param timeout: (optional) timeout for request in seconds :type timeout: int :return: map of ips to steamids :rtype: dict :raises: :class:`.UnifiedMessageError` Sample response: .. code:: python {SteamID(id=123456, type='AnonGameServer', universe='Public', instance=1234): '1.2.3.4:27060'} """ resp, error = self._um.send_and_wait("GameServers.GetServerIPsBySteamID#1", {"server_steamids": server_steam_ids}, timeout=timeout, ) if error: raise error if resp is None: return None return {SteamID(server.steamid): server.addr for server in resp.servers}
[ "def", "get_ips_from_steamids", "(", "self", ",", "server_steam_ids", ",", "timeout", "=", "30", ")", ":", "resp", ",", "error", "=", "self", ".", "_um", ".", "send_and_wait", "(", "\"GameServers.GetServerIPsBySteamID#1\"", ",", "{", "\"server_steamids\"", ":", ...
36.592593
23.148148
def compute(self, *inputs, **kwargs): """ Compute based on NeuralVariable. :type inputs: list of NeuralVariable :return: NeuralVariable """ from deepy.core.neural_var import NeuralVariable from deepy.core.graph import graph if type(inputs[0]) != NeuralVariable: raise SystemError("The input of `compute` must be NeuralVar") dims = [t.dim() for t in inputs] if len(inputs) == 1: self.init(input_dim=dims[0]) else: self.init(input_dims=dims) # Check block if self.parameters and not self._linked_block: self.belongs_to(graph.default_block()) # convert kwargs train_kwargs, _, _ = convert_to_theano_var(kwargs) output = self.compute_tensor(*[t.tensor for t in inputs], **train_kwargs) if type(output) != list and type(output) != tuple: return NeuralVariable(output, dim=self.output_dim) else: return [NeuralVariable(*item) for item in zip(output, self.output_dims)]
[ "def", "compute", "(", "self", ",", "*", "inputs", ",", "*", "*", "kwargs", ")", ":", "from", "deepy", ".", "core", ".", "neural_var", "import", "NeuralVariable", "from", "deepy", ".", "core", ".", "graph", "import", "graph", "if", "type", "(", "inputs...
37.678571
16.535714
def write_output_files(self,traj): """ The total hydrogen bond count per frame is provided as CSV output file. Each trajectory has a separate file. """ try: os.chdir("analysis") except Exception as e: os.mkdir("analysis") os.chdir("analysis") os.mkdir("pistacking") os.chdir("pistacking") with open('pistacking_data_total_'+str(traj)+'.csv', 'wb') as outfile: hwriter = csv.writer(outfile, delimiter=' ') for time in self.pistacking_by_time[traj]: hwriter.writerow([time[0],time[1]]) for bond in self.pistacking_by_type[traj]: if bond in self.pi_contacts_for_drawing.keys(): with open("pi_contact_"+str(traj)+".csv","wb") as outfile: hwriter = csv.writer(outfile, delimiter=' ') for time in self.timesteps: result = [1 if x[0]==time and x["acceptor_idx"]==bond["acceptor_idx"] else 0 for x in self.timeseries][0] hwriter.writerow([time,result]) os.chdir("../../")
[ "def", "write_output_files", "(", "self", ",", "traj", ")", ":", "try", ":", "os", ".", "chdir", "(", "\"analysis\"", ")", "except", "Exception", "as", "e", ":", "os", ".", "mkdir", "(", "\"analysis\"", ")", "os", ".", "chdir", "(", "\"analysis\"", ")"...
46.666667
17.583333
def ReleaseClick(cls): ''' 释放按压操作 ''' element = cls._element() action = ActionChains(Web.driver) action.release(element) action.perform()
[ "def", "ReleaseClick", "(", "cls", ")", ":", "element", "=", "cls", ".", "_element", "(", ")", "action", "=", "ActionChains", "(", "Web", ".", "driver", ")", "action", ".", "release", "(", "element", ")", "action", ".", "perform", "(", ")" ]
28
12.857143
def lastname(random=random, *args, **kwargs): """ Return a first name! >>> mock_random.seed(0) >>> lastname(random=mock_random) 'chimp' >>> mock_random.seed(1) >>> lastname(random=mock_random, capitalize=True) 'Wonderful' >>> mock_random.seed(2) >>> lastname(random=mock_random, slugify=True) 'poopbritches' >>> [lastname(random=mock_random) for x in range(0,10)] ['wonderful', 'chimp', 'onionmighty', 'magnificentslap', 'smellmouse', 'secretbale', 'boatbenchtwirl', 'spectacularmice', 'incrediblebritches', 'poopbritches'] """ types = [ "{noun}", "{adjective}", "{noun}{second_noun}", "{adjective}{noun}", "{adjective}{plural}", "{noun}{verb}", "{noun}{container}", "{verb}{noun}", "{adjective}{verb}", "{noun}{adjective}", "{noun}{firstname}", "{noun}{title}", "{adjective}{title}", "{adjective}-{noun}", "{adjective}-{plural}" ] return random.choice(types).format(noun=noun(random=random), second_noun=noun(random=random), adjective=adjective(random=random), plural=plural(random=random), container=container(random=random), verb=verb(random=random), firstname=firstname(random=random), title=title(random=random))
[ "def", "lastname", "(", "random", "=", "random", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "types", "=", "[", "\"{noun}\"", ",", "\"{adjective}\"", ",", "\"{noun}{second_noun}\"", ",", "\"{adjective}{noun}\"", ",", "\"{adjective}{plural}\"", ",", "\...
34.2
22.333333
def render_latex_sub_super( name, subs=None, supers=None, translate_symbols=True, sep=','): r'''Assemble a string from the primary name and the given sub- and superscripts:: >>> render_latex_sub_super(name='alpha', subs=['mu', 'nu'], supers=[2]) '\\alpha_{\\mu,\\nu}^{2}' >>> render_latex_sub_super( ... name='alpha', subs=['1', '2'], supers=['(1)'], sep='') '\\alpha_{12}^{(1)}' Args: name (str): the string without the subscript/superscript subs (list or None): list of subscripts supers (list or None): list of superscripts translate_symbols (bool): If True, try to translate (Greek) symbols in `name, `subs`, and `supers` to unicode sep (str): Separator to use if there are multiple subscripts/superscripts ''' if subs is None: subs = [] if supers is None: supers = [] if translate_symbols: supers = [_translate_symbols(str(sup)) for sup in supers] subs = [_translate_symbols(str(sub)) for sub in subs] name = _translate_symbols(name) res = name sub = sep.join(subs) sup = sep.join(supers) if len(sub) > 0: res += "_{%s}" % sub if len(sup) > 0: res += "^{%s}" % sup return res
[ "def", "render_latex_sub_super", "(", "name", ",", "subs", "=", "None", ",", "supers", "=", "None", ",", "translate_symbols", "=", "True", ",", "sep", "=", "','", ")", ":", "if", "subs", "is", "None", ":", "subs", "=", "[", "]", "if", "supers", "is",...
34.378378
20.972973
def holtWintersConfidenceBands(requestContext, seriesList, delta=3): """ Performs a Holt-Winters forecast using the series as input data and plots upper and lower bands with the predicted forecast deviations. """ previewSeconds = 7 * 86400 # 7 days # ignore original data and pull new, including our preview newContext = requestContext.copy() newContext['startTime'] = (requestContext['startTime'] - timedelta(seconds=previewSeconds)) previewList = evaluateTokens(newContext, requestContext['args'][0]) results = [] for series in previewList: analysis = holtWintersAnalysis(series) data = analysis['predictions'] windowPoints = previewSeconds // data.step forecast = TimeSeries(data.name, data.start + previewSeconds, data.end, data.step, data[windowPoints:]) forecast.pathExpression = data.pathExpression data = analysis['deviations'] windowPoints = previewSeconds // data.step deviation = TimeSeries(data.name, data.start + previewSeconds, data.end, data.step, data[windowPoints:]) deviation.pathExpression = data.pathExpression seriesLength = len(forecast) i = 0 upperBand = list() lowerBand = list() while i < seriesLength: forecast_item = forecast[i] deviation_item = deviation[i] i = i + 1 if forecast_item is None or deviation_item is None: upperBand.append(None) lowerBand.append(None) else: scaled_deviation = delta * deviation_item upperBand.append(forecast_item + scaled_deviation) lowerBand.append(forecast_item - scaled_deviation) upperName = "holtWintersConfidenceUpper(%s)" % series.name lowerName = "holtWintersConfidenceLower(%s)" % series.name upperSeries = TimeSeries(upperName, forecast.start, forecast.end, forecast.step, upperBand) lowerSeries = TimeSeries(lowerName, forecast.start, forecast.end, forecast.step, lowerBand) upperSeries.pathExpression = series.pathExpression lowerSeries.pathExpression = series.pathExpression results.append(lowerSeries) results.append(upperSeries) return results
[ "def", "holtWintersConfidenceBands", "(", "requestContext", ",", "seriesList", ",", "delta", "=", "3", ")", ":", "previewSeconds", "=", "7", "*", "86400", "# 7 days", "# ignore original data and pull new, including our preview", "newContext", "=", "requestContext", ".", ...
44.296296
18.407407
def find_modules(import_path, include_packages=False, recursive=False): """Finds all the modules below a package. This can be useful to automatically import all views / controllers so that their metaclasses / function decorators have a chance to register themselves on the application. Packages are not returned unless `include_packages` is `True`. This can also recursively list modules but in that case it will import all the packages to get the correct load path of that module. :param import_path: the dotted name for the package to find child modules. :param include_packages: set to `True` if packages should be returned, too. :param recursive: set to `True` if recursion should happen. :return: generator """ module = import_string(import_path) path = getattr(module, "__path__", None) if path is None: raise ValueError("%r is not a package" % import_path) basename = module.__name__ + "." for _importer, modname, ispkg in pkgutil.iter_modules(path): modname = basename + modname if ispkg: if include_packages: yield modname if recursive: for item in find_modules(modname, include_packages, True): yield item else: yield modname
[ "def", "find_modules", "(", "import_path", ",", "include_packages", "=", "False", ",", "recursive", "=", "False", ")", ":", "module", "=", "import_string", "(", "import_path", ")", "path", "=", "getattr", "(", "module", ",", "\"__path__\"", ",", "None", ")",...
43.266667
20.766667
def cli(env, keyword, package_type): """List packages that can be ordered via the placeOrder API. :: # List out all packages for ordering slcli order package-list # List out all packages with "server" in the name slcli order package-list --keyword server # Select only specifict package types slcli order package-list --package_type BARE_METAL_CPU """ manager = ordering.OrderingManager(env.client) table = formatting.Table(COLUMNS) _filter = {'type': {'keyName': {'operation': '!= BLUEMIX_SERVICE'}}} if keyword: _filter['name'] = {'operation': '*= %s' % keyword} if package_type: _filter['type'] = {'keyName': {'operation': package_type}} packages = manager.list_packages(filter=_filter) for package in packages: table.add_row([ package['id'], package['name'], package['keyName'], package['type']['keyName'] ]) env.fout(table)
[ "def", "cli", "(", "env", ",", "keyword", ",", "package_type", ")", ":", "manager", "=", "ordering", ".", "OrderingManager", "(", "env", ".", "client", ")", "table", "=", "formatting", ".", "Table", "(", "COLUMNS", ")", "_filter", "=", "{", "'type'", "...
28.676471
20.176471
def close(self, code: int = None, reason: str = None) -> None: """Closes the WebSocket connection.""" if not self.server_terminated: if not self.stream.closed(): if code is None and reason is not None: code = 1000 # "normal closure" status code if code is None: close_data = b"" else: close_data = struct.pack(">H", code) if reason is not None: close_data += utf8(reason) try: self._write_frame(True, 0x8, close_data) except StreamClosedError: self._abort() self.server_terminated = True if self.client_terminated: if self._waiting is not None: self.stream.io_loop.remove_timeout(self._waiting) self._waiting = None self.stream.close() elif self._waiting is None: # Give the client a few seconds to complete a clean shutdown, # otherwise just close the connection. self._waiting = self.stream.io_loop.add_timeout( self.stream.io_loop.time() + 5, self._abort )
[ "def", "close", "(", "self", ",", "code", ":", "int", "=", "None", ",", "reason", ":", "str", "=", "None", ")", "->", "None", ":", "if", "not", "self", ".", "server_terminated", ":", "if", "not", "self", ".", "stream", ".", "closed", "(", ")", ":...
43.75
11.607143
def set_vm_status(status, name=None, vmid=None): ''' Convenience function for setting VM status ''' log.debug('Set status to %s for %s (%s)', status, name, vmid) if vmid is not None: log.debug('set_vm_status: via ID - VMID %s (%s): %s', vmid, name, status) vmobj = _get_vm_by_id(vmid) else: log.debug('set_vm_status: via name - VMID %s (%s): %s', vmid, name, status) vmobj = _get_vm_by_name(name) if not vmobj or 'node' not in vmobj or 'type' not in vmobj or 'vmid' not in vmobj: log.error('Unable to set status %s for %s (%s)', status, name, vmid) raise SaltCloudExecutionTimeout log.debug("VM_STATUS: Has desired info (%s). Setting status..", vmobj) data = query('post', 'nodes/{0}/{1}/{2}/status/{3}'.format( vmobj['node'], vmobj['type'], vmobj['vmid'], status)) result = _parse_proxmox_upid(data, vmobj) if result is not False and result is not None: log.debug('Set_vm_status action result: %s', result) return True return False
[ "def", "set_vm_status", "(", "status", ",", "name", "=", "None", ",", "vmid", "=", "None", ")", ":", "log", ".", "debug", "(", "'Set status to %s for %s (%s)'", ",", "status", ",", "name", ",", "vmid", ")", "if", "vmid", "is", "not", "None", ":", "log"...
34.903226
21.741935
def get_contact(self, msisdn): """ Returns the WhatsApp ID for the given MSISDN """ response = self.session.post( urllib_parse.urljoin(self.api_url, "/v1/contacts"), json={"blocking": "wait", "contacts": [msisdn]}, ) response.raise_for_status() whatsapp_id = response.json()["contacts"][0].get("wa_id") if not whatsapp_id: self.fire_failed_contact_lookup(msisdn) return whatsapp_id
[ "def", "get_contact", "(", "self", ",", "msisdn", ")", ":", "response", "=", "self", ".", "session", ".", "post", "(", "urllib_parse", ".", "urljoin", "(", "self", ".", "api_url", ",", "\"/v1/contacts\"", ")", ",", "json", "=", "{", "\"blocking\"", ":", ...
36.692308
12.846154
def get_body(name): """Retrieve the Body structure of a JPL .bsp file object Args: name (str) Return: :py:class:`~beyond.constants.Body` """ body = Pck()[name] body.propagate = lambda date: get_orbit(name, date) return body
[ "def", "get_body", "(", "name", ")", ":", "body", "=", "Pck", "(", ")", "[", "name", "]", "body", ".", "propagate", "=", "lambda", "date", ":", "get_orbit", "(", "name", ",", "date", ")", "return", "body" ]
21.5
20.25
def handle_symbol_search(self, call_id, payload): """Handler for symbol search results""" self.log.debug('handle_symbol_search: in %s', Pretty(payload)) syms = payload["syms"] qfList = [] for sym in syms: p = sym.get("pos") if p: item = self.editor.to_quickfix_item(str(p["file"]), p["line"], str(sym["name"]), "info") qfList.append(item) self.editor.write_quickfix_list(qfList, "Symbol Search")
[ "def", "handle_symbol_search", "(", "self", ",", "call_id", ",", "payload", ")", ":", "self", ".", "log", ".", "debug", "(", "'handle_symbol_search: in %s'", ",", "Pretty", "(", "payload", ")", ")", "syms", "=", "payload", "[", "\"syms\"", "]", "qfList", "...
42.8
19
def get(context, request, username=None): """Plone users route """ user_ids = [] # Don't allow anonymous users to query a user other than themselves if api.is_anonymous(): username = "current" # query all users if no username was given if username is None: user_ids = api.get_member_ids() elif username == "current": current_user = api.get_current_user() user_ids = [current_user.getId()] else: user_ids = [username] # Prepare batch size = req.get_batch_size() start = req.get_batch_start() batch = api.make_batch(user_ids, size, start) # get the user info for the user ids in the current batch users = map(get_user_info, batch.get_batch()) return { "pagesize": batch.get_pagesize(), "next": batch.make_next_url(), "previous": batch.make_prev_url(), "page": batch.get_pagenumber(), "pages": batch.get_numpages(), "count": batch.get_sequence_length(), "items": users, }
[ "def", "get", "(", "context", ",", "request", ",", "username", "=", "None", ")", ":", "user_ids", "=", "[", "]", "# Don't allow anonymous users to query a user other than themselves", "if", "api", ".", "is_anonymous", "(", ")", ":", "username", "=", "\"current\"",...
28.657143
15.142857
def _regexp(filename): """Get a list of patterns from a file and make a regular expression.""" lines = _get_resource_content(filename).decode('utf-8').splitlines() return re.compile('|'.join(lines))
[ "def", "_regexp", "(", "filename", ")", ":", "lines", "=", "_get_resource_content", "(", "filename", ")", ".", "decode", "(", "'utf-8'", ")", ".", "splitlines", "(", ")", "return", "re", ".", "compile", "(", "'|'", ".", "join", "(", "lines", ")", ")" ]
51.75
13
def apply(f, args): """Apply function f to the arguments provided. The last argument must always be coercible to a Seq. Intermediate arguments are not modified. For example: (apply max [1 2 3]) ;=> 3 (apply max 4 [1 2 3]) ;=> 4""" final = list(args[:-1]) try: last = args[-1] except TypeError as e: logger.debug("Ignored %s: %s", type(e).__name__, e) s = to_seq(last) if s is not None: final.extend(s) return f(*final)
[ "def", "apply", "(", "f", ",", "args", ")", ":", "final", "=", "list", "(", "args", "[", ":", "-", "1", "]", ")", "try", ":", "last", "=", "args", "[", "-", "1", "]", "except", "TypeError", "as", "e", ":", "logger", ".", "debug", "(", "\"Igno...
25.526316
19.947368
def intersection(self, other, sort=False): """ Form the intersection of two MultiIndex objects. Parameters ---------- other : MultiIndex or array / Index of tuples sort : False or None, default False Sort the resulting MultiIndex if possible .. versionadded:: 0.24.0 .. versionchanged:: 0.24.1 Changed the default from ``True`` to ``False``, to match behaviour from before 0.24.0 Returns ------- Index """ self._validate_sort_keyword(sort) self._assert_can_do_setop(other) other, result_names = self._convert_can_do_setop(other) if self.equals(other): return self self_tuples = self._ndarray_values other_tuples = other._ndarray_values uniq_tuples = set(self_tuples) & set(other_tuples) if sort is None: uniq_tuples = sorted(uniq_tuples) if len(uniq_tuples) == 0: return MultiIndex(levels=self.levels, codes=[[]] * self.nlevels, names=result_names, verify_integrity=False) else: return MultiIndex.from_arrays(lzip(*uniq_tuples), sortorder=0, names=result_names)
[ "def", "intersection", "(", "self", ",", "other", ",", "sort", "=", "False", ")", ":", "self", ".", "_validate_sort_keyword", "(", "sort", ")", "self", ".", "_assert_can_do_setop", "(", "other", ")", "other", ",", "result_names", "=", "self", ".", "_conver...
31.071429
19.309524
async def connect(self, next_time=None): """Connect with store :return: a coroutine and therefore it must be awaited """ if self.status in can_connect: loop = self._loop if loop.is_running(): self.status = StatusType.connecting await self._connect(next_time)
[ "async", "def", "connect", "(", "self", ",", "next_time", "=", "None", ")", ":", "if", "self", ".", "status", "in", "can_connect", ":", "loop", "=", "self", ".", "_loop", "if", "loop", ".", "is_running", "(", ")", ":", "self", ".", "status", "=", "...
33.8
9.8
def uniquify(model): ''' Remove all duplicate relationships ''' seen = set() to_remove = set() for ix, (o, r, t, a) in model: hashable_link = (o, r, t) + tuple(sorted(a.items())) #print(hashable_link) if hashable_link in seen: to_remove.add(ix) seen.add(hashable_link) model.remove(to_remove) return
[ "def", "uniquify", "(", "model", ")", ":", "seen", "=", "set", "(", ")", "to_remove", "=", "set", "(", ")", "for", "ix", ",", "(", "o", ",", "r", ",", "t", ",", "a", ")", "in", "model", ":", "hashable_link", "=", "(", "o", ",", "r", ",", "t...
24.133333
18.533333
def escapeUnderscores(self): """ Escape underscores so that the markdown is correct """ new_metrics = [] for m in self.metrics: m['name'] = m['name'].replace("_", "\_") new_metrics.append(m) self.metrics = new_metrics
[ "def", "escapeUnderscores", "(", "self", ")", ":", "new_metrics", "=", "[", "]", "for", "m", "in", "self", ".", "metrics", ":", "m", "[", "'name'", "]", "=", "m", "[", "'name'", "]", ".", "replace", "(", "\"_\"", ",", "\"\\_\"", ")", "new_metrics", ...
31.222222
9
def handle_event(self, packet): """Handle incoming packet from rflink gateway.""" if packet.get('command'): task = self.send_command_ack(packet['id'], packet['command']) self.loop.create_task(task)
[ "def", "handle_event", "(", "self", ",", "packet", ")", ":", "if", "packet", ".", "get", "(", "'command'", ")", ":", "task", "=", "self", ".", "send_command_ack", "(", "packet", "[", "'id'", "]", ",", "packet", "[", "'command'", "]", ")", "self", "."...
46.6
10
def getSectionByOffset(self, offset): """ Given an offset in the file, tries to determine the section this offset belong to. @type offset: int @param offset: Offset value. @rtype: int @return: An index, starting at 0, that represents the section the given offset belongs to. """ index = -1 for i in range(len(self.sectionHeaders)): if (offset < self.sectionHeaders[i].pointerToRawData.value + self.sectionHeaders[i].sizeOfRawData.value): index = i break return index
[ "def", "getSectionByOffset", "(", "self", ",", "offset", ")", ":", "index", "=", "-", "1", "for", "i", "in", "range", "(", "len", "(", "self", ".", "sectionHeaders", ")", ")", ":", "if", "(", "offset", "<", "self", ".", "sectionHeaders", "[", "i", ...
37.0625
23.5625
def _get_dns_cname(name, link=False): """ Looks for associated domain zone, nameservers and linked record name until no more linked record name was found for the given fully qualified record name or the CNAME lookup was disabled, and then returns the parameters as a tuple. """ resolver = dns.resolver.Resolver() resolver.lifetime = 1 domain = dns.resolver.zone_for_name(name, resolver=resolver).to_text(True) nameservers = Provider._get_nameservers(domain) cname = None links, max_links = 0, 5 while link: if links >= max_links: LOGGER.error('Hetzner => Record %s has more than %d linked CNAME ' 'records. Reduce the amount of CNAME links!', name, max_links) raise AssertionError qname = cname if cname else name rrset = Provider._dns_lookup(qname, 'CNAME', nameservers) if rrset: links += 1 cname = rrset[0].to_text() qdomain = dns.resolver.zone_for_name(cname, resolver=resolver).to_text(True) if domain != qdomain: domain = qdomain nameservers = Provider._get_nameservers(qdomain) else: link = False if cname: LOGGER.info('Hetzner => Record %s has CNAME %s', name, cname) return domain, nameservers, cname
[ "def", "_get_dns_cname", "(", "name", ",", "link", "=", "False", ")", ":", "resolver", "=", "dns", ".", "resolver", ".", "Resolver", "(", ")", "resolver", ".", "lifetime", "=", "1", "domain", "=", "dns", ".", "resolver", ".", "zone_for_name", "(", "nam...
46
18.5625
def merge(cls, edge1, edge2): """ Merges multi-color information from two supplied :class:`BGEdge` instances into a new :class:`BGEdge` Since :class:`BGEdge` represents an undirected edge, created edge's vertices are assign according to the order in first supplied edge. Accounts for subclassing. :param edge1: first out of two edge information from which is to be merged into a new one :param edge2: second out of two edge information from which is to be merged into a new one :return: a new undirected with multi-color information merged from two supplied :class:`BGEdge` objects :raises: ``ValueError`` """ if edge1.vertex1 != edge2.vertex1 and edge1.vertex1 != edge2.vertex2: raise ValueError("Edges to be merged do not connect same vertices") forward = edge1.vertex1 == edge2.vertex1 if forward and edge1.vertex2 != edge2.vertex2: raise ValueError("Edges to be merged do not connect same vertices") elif not forward and edge1.vertex2 != edge2.vertex1: raise ValueError("Edges to be merged do not connect same vertices") return cls(vertex1=edge1.vertex1, vertex2=edge1.vertex2, multicolor=edge1.multicolor + edge2.multicolor)
[ "def", "merge", "(", "cls", ",", "edge1", ",", "edge2", ")", ":", "if", "edge1", ".", "vertex1", "!=", "edge2", ".", "vertex1", "and", "edge1", ".", "vertex1", "!=", "edge2", ".", "vertex2", ":", "raise", "ValueError", "(", "\"Edges to be merged do not con...
62.65
35.15
def doc_open(): """Build the HTML docs and open them in a web browser.""" doc_index = os.path.join(DOCS_DIRECTORY, 'build', 'html', 'index.html') if sys.platform == 'darwin': # Mac OS X subprocess.check_call(['open', doc_index]) elif sys.platform == 'win32': # Windows subprocess.check_call(['start', doc_index], shell=True) elif sys.platform == 'linux2': # All freedesktop-compatible desktops subprocess.check_call(['xdg-open', doc_index]) else: print_failure_message( "Unsupported platform. Please open `{0}' manually.".format( doc_index))
[ "def", "doc_open", "(", ")", ":", "doc_index", "=", "os", ".", "path", ".", "join", "(", "DOCS_DIRECTORY", ",", "'build'", ",", "'html'", ",", "'index.html'", ")", "if", "sys", ".", "platform", "==", "'darwin'", ":", "# Mac OS X", "subprocess", ".", "che...
39.625
16.4375
def fold(self, strand, temp=37.0, dangles=2, nolp=False, nogu=False, noclosinggu=False, constraints=None, canonicalbponly=False, partition=False, pfscale=None, imfeelinglucky=False, gquad=False): '''Run the RNAfold command and retrieve the result in a dictionary. :param strand: The DNA or RNA sequence on which to run RNAfold. :type strand: coral.DNA or coral.RNA :param temp: Temperature at which to run the calculations. :type temp: float :param dangles: How to treat dangling end energies. Set to 0 to ignore dangling ends. Set to 1 to limit unpaired bases to at most one dangling end (default for MFE calc). Set to 2 (the default) to remove the limit in 1. Set to 3 to allow coaxial stacking of adjacent helices in .multi-loops :type dangles: int :param nolp: Produce structures without lonely pairs (isolated single base pairs). :type nolp: bool :param nogu: Do not allow GU pairs. :type nogu: bool :param noclosinggu: Do not allow GU pairs at the end of helices. :type noclosinggu: bool :param constraints: Any structural constraints to use. Format is defined at http://www.tbi.univie.ac.at/RNA/RNAfold.1.html :type constraints: str :param canonicalbponly: Remove non-canonical base pairs from the structure constraint (if applicable). :type canonicalbponly: bool :param partition: Generates the partition function, generating a coarse grain structure ('coarse') in the format described at http://www.itc.univie.ac.at/~ivo/RNA/RNAlib/PF-Fold.h tml, the ensemble free energy ('ensemble'), the centroid structure ('centroid'), the free energy of the centroid structure ('centroid_fe'), and its distance from the ensemble ('centroid_d'). :type partition: int :param pfscale: Scaling factor for the partition function. :type pfScale: float :param imfeelinglucky: Returns the one secondary structure from the Boltzmann equilibrium according to its probability in the ensemble. :type imfeelinglucky: bool :param gquad: Incorporate G-Quadruplex formation into the structure prediction. :type gquad: bool :returns: Dictionary of calculated values, defaulting to values of MFE ('mfe': float) and dotbracket structure ('dotbracket': str). More keys are added depending on keyword arguments. :rtype: dict ''' cmd_args = [] cmd_kwargs = {'--temp=': str(temp)} cmd_kwargs['--dangles='] = dangles if nolp: cmd_args.append('--noLP') if nogu: cmd_args.append('--noGU') if noclosinggu: cmd_args.append('--noClosingGU') if constraints is not None: cmd_args.append('--constraint') if canonicalbponly: cmd_args.append('--canonicalBPonly') if partition: cmd_args.append('--partfunc') if pfscale is not None: cmd_kwargs['pfScale'] = float(pfscale) if gquad: cmd_args.append('--gquad') inputs = [str(strand)] if constraints is not None: inputs.append(constraints) if strand.circular: cmd_args.append('--circ') rnafold_output = self._run('RNAfold', inputs, cmd_args, cmd_kwargs) # Process the output output = {} lines = rnafold_output.splitlines() # Line 1 is the sequence as RNA lines.pop(0) # Line 2 is the dotbracket + mfe line2 = lines.pop(0) output['dotbracket'] = self._lparse(line2, '^(.*) \(') output['mfe'] = float(self._lparse(line2, ' \((.*)\)$')) # Optional outputs if partition: # Line 3 is 'a coarse representation of the pair probabilities' and # the ensemble free energy line3 = lines.pop(0) output['coarse'] = self._lparse(line3, '^(.*) \[') output['ensemble'] = float(self._lparse(line3, ' \[(.*)\]$')) # Line 4 is the centroid structure, its free energy, and distance # to the ensemble line4 = lines.pop(0) output['centroid'] = self._lparse(line4, '^(.*) \{') output['centroid_fe'] = float(self._lparse(line4, '^.*{(.*) d')) output['centroid_d'] = float(self._lparse(line4, 'd=(.*)}$')) return output
[ "def", "fold", "(", "self", ",", "strand", ",", "temp", "=", "37.0", ",", "dangles", "=", "2", ",", "nolp", "=", "False", ",", "nogu", "=", "False", ",", "noclosinggu", "=", "False", ",", "constraints", "=", "None", ",", "canonicalbponly", "=", "Fals...
45.613208
21.009434
def apply_one_hot_encoding(self, one_hot_encoding): """Apply one hot encoding to generate a specific config. Arguments: one_hot_encoding (list): A list of one hot encodings, 1 for each parameter. The shape of each encoding should match that ``ParameterSpace`` Returns: A dict config with specific <name, value> pair """ config = {} for ps, one_hot in zip(self.param_list, one_hot_encoding): index = np.argmax(one_hot) config[ps.name] = ps.choices[index] return config
[ "def", "apply_one_hot_encoding", "(", "self", ",", "one_hot_encoding", ")", ":", "config", "=", "{", "}", "for", "ps", ",", "one_hot", "in", "zip", "(", "self", ".", "param_list", ",", "one_hot_encoding", ")", ":", "index", "=", "np", ".", "argmax", "(",...
34.705882
19.470588
def instantiate(self, **extra_args): """ Instantiate the model """ input_block = self.input_block.instantiate() backbone = self.backbone.instantiate(**extra_args) return QRainbowModel( input_block=input_block, backbone=backbone, action_space=extra_args['action_space'], vmin=self.vmin, vmax=self.vmax, atoms=self.atoms, initial_std_dev=self.initial_std_dev, factorized_noise=self.factorized_noise )
[ "def", "instantiate", "(", "self", ",", "*", "*", "extra_args", ")", ":", "input_block", "=", "self", ".", "input_block", ".", "instantiate", "(", ")", "backbone", "=", "self", ".", "backbone", ".", "instantiate", "(", "*", "*", "extra_args", ")", "retur...
34.733333
13.2
def _check_attributes(self, attributes, extra=None): """Check if attributes given to the constructor can be used to instanciate a valid node.""" extra = extra or () unknown_keys = set(attributes) - set(self._possible_attributes) - set(extra) if unknown_keys: logger.warning('%s got unknown attributes: %s' % (self.__class__.__name__, unknown_keys))
[ "def", "_check_attributes", "(", "self", ",", "attributes", ",", "extra", "=", "None", ")", ":", "extra", "=", "extra", "or", "(", ")", "unknown_keys", "=", "set", "(", "attributes", ")", "-", "set", "(", "self", ".", "_possible_attributes", ")", "-", ...
52.625
16.625
def _get_variant_regions(items): """Retrieve variant regions defined in any of the input items. """ return list(filter(lambda x: x is not None, [tz.get_in(("config", "algorithm", "variant_regions"), data) for data in items if tz.get_in(["config", "algorithm", "coverage_interval"], data) != "genome"]))
[ "def", "_get_variant_regions", "(", "items", ")", ":", "return", "list", "(", "filter", "(", "lambda", "x", ":", "x", "is", "not", "None", ",", "[", "tz", ".", "get_in", "(", "(", "\"config\"", ",", "\"algorithm\"", ",", "\"variant_regions\"", ")", ",", ...
54.142857
17.428571
def build_docker_build_command(configuration): """ Translate a declarative docker `configuration` to a `docker build` command. Parameters ---------- configuration : dict configuration Returns ------- args : list sequence of command line arguments to build an image """ parts = configuration.pop('docker', 'docker').split() parts.append('build') build = configuration.pop('build') build['path'] = os.path.join(configuration['workspace'], build['path']) build['file'] = os.path.join(build['path'], build['file']) parts.extend(build_parameter_parts( build, 'tag', 'file', 'no-cache', 'quiet', 'cpu-shares', 'memory')) parts.extend(build_dict_parameter_parts(build, 'build-arg')) parts.append(build.pop('path')) return parts
[ "def", "build_docker_build_command", "(", "configuration", ")", ":", "parts", "=", "configuration", ".", "pop", "(", "'docker'", ",", "'docker'", ")", ".", "split", "(", ")", "parts", ".", "append", "(", "'build'", ")", "build", "=", "configuration", ".", ...
27.413793
23.965517
def next_code_is_indented(lines): """Is the next unescaped line indented?""" for line in lines: if _BLANK_LINE.match(line) or _PY_COMMENT.match(line): continue return _PY_INDENTED.match(line) return False
[ "def", "next_code_is_indented", "(", "lines", ")", ":", "for", "line", "in", "lines", ":", "if", "_BLANK_LINE", ".", "match", "(", "line", ")", "or", "_PY_COMMENT", ".", "match", "(", "line", ")", ":", "continue", "return", "_PY_INDENTED", ".", "match", ...
34
13.142857
def xml(self): """ Get xml representation of the object. @return: The root node. @rtype: L{Element} """ root = Element('Security', ns=wssens) root.set('mustUnderstand', str(self.mustUnderstand).lower()) for t in self.tokens: root.append(t.xml()) return root
[ "def", "xml", "(", "self", ")", ":", "root", "=", "Element", "(", "'Security'", ",", "ns", "=", "wssens", ")", "root", ".", "set", "(", "'mustUnderstand'", ",", "str", "(", "self", ".", "mustUnderstand", ")", ".", "lower", "(", ")", ")", "for", "t"...
30.090909
11.545455
def error(self, s, pos): """Show text and a caret under that. For example: x = 2y + z ^ """ print("Lexical error:") print("%s" % s[:pos+10]) # + 10 for trailing context print("%s^" % (" "*(pos-1))) for t in self.rv: print(t) raise SystemExit
[ "def", "error", "(", "self", ",", "s", ",", "pos", ")", ":", "print", "(", "\"Lexical error:\"", ")", "print", "(", "\"%s\"", "%", "s", "[", ":", "pos", "+", "10", "]", ")", "# + 10 for trailing context", "print", "(", "\"%s^\"", "%", "(", "\" \"", "...
28.6
13.6
def btc_script_serialize(_script): """ Given a deserialized script (i.e. an array of Nones, ints, and strings), or an existing script, turn it back into a hex script Based on code from pybitcointools (https://github.com/vbuterin/pybitcointools) by Vitalik Buterin """ script = _script if encoding.json_is_base(_script, 16): # hex-to-bin all hex strings in this script script = encoding.json_changebase(_script, lambda x: binascii.unhexlify(x)) # encode each item and return the concatenated list return encoding.safe_hexlify( ''.join(map(_btc_script_serialize_unit, script)) )
[ "def", "btc_script_serialize", "(", "_script", ")", ":", "script", "=", "_script", "if", "encoding", ".", "json_is_base", "(", "_script", ",", "16", ")", ":", "# hex-to-bin all hex strings in this script", "script", "=", "encoding", ".", "json_changebase", "(", "_...
41.333333
23.066667
def destroy_venv(env_path, venvscache=None): """Destroy a venv.""" # remove the venv itself in disk logger.debug("Destroying virtualenv at: %s", env_path) shutil.rmtree(env_path, ignore_errors=True) # remove venv from cache if venvscache is not None: venvscache.remove(env_path)
[ "def", "destroy_venv", "(", "env_path", ",", "venvscache", "=", "None", ")", ":", "# remove the venv itself in disk", "logger", ".", "debug", "(", "\"Destroying virtualenv at: %s\"", ",", "env_path", ")", "shutil", ".", "rmtree", "(", "env_path", ",", "ignore_errors...
33.666667
11.111111
def create_timeline(self, timeline, scope_identifier, hub_name, plan_id): """CreateTimeline. :param :class:`<Timeline> <azure.devops.v5_0.task.models.Timeline>` timeline: :param str scope_identifier: The project GUID to scope the request :param str hub_name: The name of the server hub: "build" for the Build server or "rm" for the Release Management server :param str plan_id: :rtype: :class:`<Timeline> <azure.devops.v5_0.task.models.Timeline>` """ route_values = {} if scope_identifier is not None: route_values['scopeIdentifier'] = self._serialize.url('scope_identifier', scope_identifier, 'str') if hub_name is not None: route_values['hubName'] = self._serialize.url('hub_name', hub_name, 'str') if plan_id is not None: route_values['planId'] = self._serialize.url('plan_id', plan_id, 'str') content = self._serialize.body(timeline, 'Timeline') response = self._send(http_method='POST', location_id='83597576-cc2c-453c-bea6-2882ae6a1653', version='5.0', route_values=route_values, content=content) return self._deserialize('Timeline', response)
[ "def", "create_timeline", "(", "self", ",", "timeline", ",", "scope_identifier", ",", "hub_name", ",", "plan_id", ")", ":", "route_values", "=", "{", "}", "if", "scope_identifier", "is", "not", "None", ":", "route_values", "[", "'scopeIdentifier'", "]", "=", ...
58.909091
24.954545
def _format_results(_keywords, combined_keywords, split, scores): """ :param keywords:dict of keywords:scores :param combined_keywords:list of word/s """ combined_keywords.sort(key=lambda w: _get_average_score(w, _keywords), reverse=True) if scores: return [(word, _get_average_score(word, _keywords)) for word in combined_keywords] if split: return combined_keywords return "\n".join(combined_keywords)
[ "def", "_format_results", "(", "_keywords", ",", "combined_keywords", ",", "split", ",", "scores", ")", ":", "combined_keywords", ".", "sort", "(", "key", "=", "lambda", "w", ":", "_get_average_score", "(", "w", ",", "_keywords", ")", ",", "reverse", "=", ...
40.090909
17.363636
def manage_view(request, semester, profile=None): """ View all members' preferences. This view also includes forms to create an entire semester's worth of weekly workshifts. """ page_name = "Manage Workshift" pools = WorkshiftPool.objects.filter(semester=semester) full_management = utils.can_manage(request.user, semester=semester) edit_semester_form = None close_semester_form = None open_semester_form = None if not full_management: pools = pools.filter(managers__incumbent__user=request.user) if not pools.count(): messages.add_message(request, messages.ERROR, MESSAGES["ADMINS_ONLY"]) return HttpResponseRedirect(semester.get_view_url()) else: edit_semester_form = FullSemesterForm( data=request.POST if "edit_semester" in request.POST else None, instance=semester, ) if semester.current: close_semester_form = CloseSemesterForm( data=request.POST if "close_semester" in request.POST else None, semester=semester, ) else: open_semester_form = OpenSemesterForm( data=request.POST if "open_semester" in request.POST else None, semester=semester ) if edit_semester_form and edit_semester_form.is_valid(): semester = edit_semester_form.save() messages.add_message( request, messages.INFO, "Semester successfully updated.", ) return HttpResponseRedirect(wurl( "workshift:manage", sem_url=semester.sem_url, )) if close_semester_form and close_semester_form.is_valid(): close_semester_form.save() messages.add_message(request, messages.INFO, "Semester closed.") return HttpResponseRedirect(wurl( "workshift:manage", sem_url=semester.sem_url, )) if open_semester_form and open_semester_form.is_valid(): open_semester_form.save() messages.add_message(request, messages.INFO, "Semester reopened.") return HttpResponseRedirect(wurl( "workshift:manage", sem_url=semester.sem_url, )) pools = pools.order_by("-is_primary", "title") workshifters = WorkshiftProfile.objects.filter(semester=semester) pool_hours = [ [ workshifter.pool_hours.get(pool=pool) for pool in pools ] for workshifter in workshifters ] return render_to_response("manage.html", { "page_name": page_name, "pools": pools, "full_management": full_management, "edit_semester_form": edit_semester_form, "close_semester_form": close_semester_form, "open_semester_form": open_semester_form, "workshifters": zip(workshifters, pool_hours), }, context_instance=RequestContext(request))
[ "def", "manage_view", "(", "request", ",", "semester", ",", "profile", "=", "None", ")", ":", "page_name", "=", "\"Manage Workshift\"", "pools", "=", "WorkshiftPool", ".", "objects", ".", "filter", "(", "semester", "=", "semester", ")", "full_management", "=",...
35.839506
17.691358
def discover(url, timeout=None): """Discover the hub url and topic url of a given url. Firstly, by inspecting the page's headers, secondarily by inspecting the content for link tags. timeout determines how long to wait for the url to load. It defaults to 3. """ resp = get_content({'REQUEST_TIMEOUT': timeout}, url) parser = LinkParser() parser.hub_url = (resp.links.get('hub') or {}).get('url') parser.topic_url = (resp.links.get('self') or {}).get('url') try: parser.updated() for chunk in resp.iter_content(chunk_size=None, decode_unicode=True): parser.feed(chunk) parser.close() except Finished: return {'hub_url': parser.hub_url, 'topic_url': parser.topic_url} raise DiscoveryError("Could not find hub url in topic page")
[ "def", "discover", "(", "url", ",", "timeout", "=", "None", ")", ":", "resp", "=", "get_content", "(", "{", "'REQUEST_TIMEOUT'", ":", "timeout", "}", ",", "url", ")", "parser", "=", "LinkParser", "(", ")", "parser", ".", "hub_url", "=", "(", "resp", ...
38
24.238095
def unsubscribe_all(self, callback=False): """Send an unsubscribe for all active subscriptions""" futures = ((f, r) for f, r in self._requests.items() if isinstance(r, Subscribe) and f not in self._pending_unsubscribes) if futures: for future, request in futures: if callback: log.warn("Unsubscribing from %s", request.path) cothread.Callback(self.unsubscribe, future) else: self.unsubscribe(future)
[ "def", "unsubscribe_all", "(", "self", ",", "callback", "=", "False", ")", ":", "futures", "=", "(", "(", "f", ",", "r", ")", "for", "f", ",", "r", "in", "self", ".", "_requests", ".", "items", "(", ")", "if", "isinstance", "(", "r", ",", "Subscr...
46.166667
13
def change_dir(directory): """ Wraps a function to run in a given directory. """ def cd_decorator(func): @wraps(func) def wrapper(*args, **kwargs): org_path = os.getcwd() os.chdir(directory) func(*args, **kwargs) os.chdir(org_path) return wrapper return cd_decorator
[ "def", "change_dir", "(", "directory", ")", ":", "def", "cd_decorator", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "org_path", "=", "os", ".", "getcwd", "(", ")", ...
21.428571
14.571429
def generate_bytecode_from_obb(obb: object, previous: bytes) -> bytes: """ Generates a bytecode from an object. :param obb: The object to generate. :param previous: The previous bytecode to use when generating subobjects. :return: The generated bytecode. """ # Generates bytecode from a specified object, be it a validator or an int or bytes even. if isinstance(obb, pyte.superclasses._PyteOp): return obb.to_bytes(previous) elif isinstance(obb, (pyte.superclasses._PyteAugmentedComparator, pyte.superclasses._PyteAugmentedValidator._FakeMathematicalOP)): return obb.to_bytes(previous) elif isinstance(obb, pyte.superclasses._PyteAugmentedValidator): obb.validate() return obb.to_load() elif isinstance(obb, int): return obb.to_bytes((obb.bit_length() + 7) // 8, byteorder="little") or b'' elif isinstance(obb, bytes): return obb else: raise TypeError("`{}` was not a valid bytecode-encodable item".format(obb))
[ "def", "generate_bytecode_from_obb", "(", "obb", ":", "object", ",", "previous", ":", "bytes", ")", "->", "bytes", ":", "# Generates bytecode from a specified object, be it a validator or an int or bytes even.", "if", "isinstance", "(", "obb", ",", "pyte", ".", "superclas...
44.521739
20.608696
def status(self, obj): """Get the wifi interface status.""" reply = self._send_cmd_to_wpas(obj['name'], 'STATUS', True) result = reply.split('\n') status = '' for l in result: if l.startswith('wpa_state='): status = l[10:] return status_dict[status.lower()]
[ "def", "status", "(", "self", ",", "obj", ")", ":", "reply", "=", "self", ".", "_send_cmd_to_wpas", "(", "obj", "[", "'name'", "]", ",", "'STATUS'", ",", "True", ")", "result", "=", "reply", ".", "split", "(", "'\\n'", ")", "status", "=", "''", "fo...
30.272727
17.181818
async def get_my_did_with_meta(wallet_handle: int, did: str) -> str: """ Get DID metadata and verkey stored in the wallet. :param wallet_handle: wallet handler (created by open_wallet). :param did: The DID to retrieve metadata. :return: DID with verkey and metadata. """ logger = logging.getLogger(__name__) logger.debug("get_my_did_with_meta: >>> wallet_handle: %r, did: %r", wallet_handle, did) if not hasattr(get_my_did_with_meta, "cb"): logger.debug("get_my_did_with_meta: Creating callback") get_my_did_with_meta.cb = create_cb(CFUNCTYPE(None, c_int32, c_int32, c_char_p)) c_wallet_handle = c_int32(wallet_handle) c_did = c_char_p(did.encode('utf-8')) did_with_meta = await do_call('indy_get_my_did_with_meta', c_wallet_handle, c_did, get_my_did_with_meta.cb) res = did_with_meta.decode() logger.debug("get_my_did_with_meta: <<< res: %r", res) return res
[ "async", "def", "get_my_did_with_meta", "(", "wallet_handle", ":", "int", ",", "did", ":", "str", ")", "->", "str", ":", "logger", "=", "logging", ".", "getLogger", "(", "__name__", ")", "logger", ".", "debug", "(", "\"get_my_did_with_meta: >>> wallet_handle: %r...
34.966667
19.966667
def endpoint(self): """ The endpoint for this route. """ if self._endpoint: return self._endpoint elif self._controller_cls: endpoint = f'{snake_case(self._controller_cls.__name__)}.{self.method_name}' return endpoint if not self.bp_name else f'{self.bp_name}.{endpoint}' elif self.bp_name: return f'{self.bp_name}.{self.method_name}' return f'{self.view_func.__module__}.{self.method_name}'
[ "def", "endpoint", "(", "self", ")", ":", "if", "self", ".", "_endpoint", ":", "return", "self", ".", "_endpoint", "elif", "self", ".", "_controller_cls", ":", "endpoint", "=", "f'{snake_case(self._controller_cls.__name__)}.{self.method_name}'", "return", "endpoint", ...
40.333333
16.166667
def UNas(self, to='name_short'): """ Return UN member states in the specified classification Parameters ---------- to : str, optional Output classification (valid str for an index of country_data file), default: name_short Returns ------- Pandas DataFrame """ if isinstance(to, str): to = [to] return self.data[self.data.UNmember > 0][to]
[ "def", "UNas", "(", "self", ",", "to", "=", "'name_short'", ")", ":", "if", "isinstance", "(", "to", ",", "str", ")", ":", "to", "=", "[", "to", "]", "return", "self", ".", "data", "[", "self", ".", "data", ".", "UNmember", ">", "0", "]", "[", ...
24.888889
19.222222
def build_byte_align_buff(bits): """Pad the left side of a bitarray with 0s to align its length with byte boundaries. Args: bits: A bitarray to be padded and aligned. Returns: A newly aligned bitarray. """ bitmod = len(bits)%8 if bitmod == 0: rdiff = bitarray() else: #KEEP bitarray rdiff = bitarray(8-bitmod) rdiff.setall(False) return rdiff+bits
[ "def", "build_byte_align_buff", "(", "bits", ")", ":", "bitmod", "=", "len", "(", "bits", ")", "%", "8", "if", "bitmod", "==", "0", ":", "rdiff", "=", "bitarray", "(", ")", "else", ":", "#KEEP bitarray", "rdiff", "=", "bitarray", "(", "8", "-", "bitm...
24.294118
17.764706
def get_resource_pools(service_instance, resource_pool_names, datacenter_name=None, get_all_resource_pools=False): ''' Retrieves resource pool objects service_instance The service instance object to query the vCenter resource_pool_names Resource pool names datacenter_name Name of the datacenter where the resource pool is available get_all_resource_pools Boolean return Resourcepool managed object reference ''' properties = ['name'] if not resource_pool_names: resource_pool_names = [] if datacenter_name: container_ref = get_datacenter(service_instance, datacenter_name) else: container_ref = get_root_folder(service_instance) resource_pools = get_mors_with_properties(service_instance, vim.ResourcePool, container_ref=container_ref, property_list=properties) selected_pools = [] for pool in resource_pools: if get_all_resource_pools or (pool['name'] in resource_pool_names): selected_pools.append(pool['object']) if not selected_pools: raise salt.exceptions.VMwareObjectRetrievalError( 'The resource pools with properties ' 'names={} get_all={} could not be found'.format(selected_pools, get_all_resource_pools)) return selected_pools
[ "def", "get_resource_pools", "(", "service_instance", ",", "resource_pool_names", ",", "datacenter_name", "=", "None", ",", "get_all_resource_pools", "=", "False", ")", ":", "properties", "=", "[", "'name'", "]", "if", "not", "resource_pool_names", ":", "resource_po...
33.511111
24.888889
def _special_value_maxLength(em, newValue=NOT_PROVIDED): ''' _special_value_maxLength - Handle the special "maxLength" property @param em <AdvancedTag> - The tag element @param newValue - Default NOT_PROVIDED, if provided will use that value instead of the current .getAttribute value on the tag. This is because this method can be used for both validation and getting/setting ''' if newValue is NOT_PROVIDED: if not em.hasAttribute('maxlength'): return -1 curValue = em.getAttribute('maxlength', '-1') # If we are accessing, the invalid default should be negative invalidDefault = -1 else: curValue = newValue # If we are setting, we should raise an exception upon invalid value invalidDefault = IndexSizeErrorException return convertToIntRange(curValue, minValue=0, maxValue=None, emptyValue='0', invalidDefault=invalidDefault)
[ "def", "_special_value_maxLength", "(", "em", ",", "newValue", "=", "NOT_PROVIDED", ")", ":", "if", "newValue", "is", "NOT_PROVIDED", ":", "if", "not", "em", ".", "hasAttribute", "(", "'maxlength'", ")", ":", "return", "-", "1", "curValue", "=", "em", ".",...
35.142857
30.428571
def attach(self, engine, metric_names=None, output_transform=None, event_name=Events.ITERATION_COMPLETED, closing_event_name=Events.EPOCH_COMPLETED): """ Attaches the progress bar to an engine object. Args: engine (Engine): engine object. metric_names (list, optional): list of the metrics names to log as the bar progresses output_transform (callable, optional): a function to select what you want to print from the engine's output. This function may return either a dictionary with entries in the format of ``{name: value}``, or a single scalar, which will be displayed with the default name `output`. event_name: event's name on which the progress bar advances. Valid events are from :class:`~ignite.engine.Events`. closing_event_name: event's name on which the progress bar is closed. Valid events are from :class:`~ignite.engine.Events`. """ desc = self.tqdm_kwargs.get("desc", "Epoch") if not (event_name in Events and closing_event_name in Events): raise ValueError("Logging and closing events should be only ignite.engine.Events") if not self._compare_lt(event_name, closing_event_name): raise ValueError("Logging event {} should be called before closing event {}" .format(event_name, closing_event_name)) log_handler = _OutputHandler(desc, metric_names, output_transform, event_name=event_name, closing_event_name=closing_event_name) super(ProgressBar, self).attach(engine, log_handler, event_name) engine.add_event_handler(closing_event_name, self._close)
[ "def", "attach", "(", "self", ",", "engine", ",", "metric_names", "=", "None", ",", "output_transform", "=", "None", ",", "event_name", "=", "Events", ".", "ITERATION_COMPLETED", ",", "closing_event_name", "=", "Events", ".", "EPOCH_COMPLETED", ")", ":", "desc...
58.064516
32
def remove_class(self, cls): """ Remove a XSO class `cls` from parsing. This method raises :class:`KeyError` with the classes :attr:`TAG` attribute as argument if removing fails because the class is not registered. """ del self._tag_map[cls.TAG] del self._class_map[cls]
[ "def", "remove_class", "(", "self", ",", "cls", ")", ":", "del", "self", ".", "_tag_map", "[", "cls", ".", "TAG", "]", "del", "self", ".", "_class_map", "[", "cls", "]" ]
39.875
13.625
def _left_motion(self, event): """Function bound to move event for marker canvas""" iid = self.current_iid if iid is None: return marker = self._markers[iid] if marker["move"] is False: return delta = marker["finish"] - marker["start"] # Limit x to 0 x = max(self._timeline.canvasx(event.x), 0) # Check if the timeline needs to be extended limit = self.get_time_position(self._finish - delta) if self._extend is False: x = min(x, limit) elif x > limit: # self._extend is True self.configure(finish=(self.get_position_time(x) + (marker["finish"] - marker["start"])) * 1.1) # Get the new start value start = self.get_position_time(x) finish = start + (marker["finish"] - marker["start"]) rectangle_id, text_id = marker["rectangle_id"], marker["text_id"] if rectangle_id not in self._timeline.find_all(): return x1, y1, x2, y2 = self._timeline.coords(rectangle_id) # Overlap protection allow_overlap = marker["allow_overlap"] allow_overlap = self._marker_allow_overlap if allow_overlap == "default" else allow_overlap if allow_overlap is False: for marker_dict in self.markers.values(): if marker_dict["allow_overlap"] is True: continue if marker["iid"] != marker_dict["iid"] and marker["category"] == marker_dict["category"]: if marker_dict["start"] < start < marker_dict["finish"]: start = marker_dict["finish"] if start < marker_dict["finish"] else marker_dict["start"] finish = start + (marker["finish"] - marker["start"]) x = self.get_time_position(start) break if marker_dict["start"] < finish < marker_dict["finish"]: finish = marker_dict["finish"] if finish > marker_dict["finish"] else marker_dict["start"] start = finish - (marker_dict["finish"] - marker_dict["start"]) x = self.get_time_position(start) break # Vertical movement if marker["change_category"] is True or \ (marker["change_category"] == "default" and self._marker_change_category): y = max(self._timeline.canvasy(event.y), 0) category = min(self._rows.keys(), key=lambda category: abs(self._rows[category][0] - y)) marker["category"] = category y1, y2 = self._rows[category] # Snapping to ticks if marker["snap_to_ticks"] is True or (marker["snap_to_ticks"] == "default" and self._marker_snap_to_ticks): # Start is prioritized over finish for tick in self._ticks: tick = self.get_time_position(tick) # Start if abs(x - tick) < self._snap_margin: x = tick break # Finish x_finish = x + delta if abs(x_finish - tick) < self._snap_margin: delta = self.get_time_position(marker["finish"] - marker["start"]) x = tick - delta break rectangle_coords = (x, y1, x2 + (x - x1), y2) self._timeline.coords(rectangle_id, *rectangle_coords) if text_id is not None: text_x, text_y = TimeLine.calculate_text_coords(rectangle_coords) self._timeline.coords(text_id, text_x, text_y) if self._after_id is not None: self.after_cancel(self._after_id) args = (iid, (marker["start"], marker["finish"]), (start, finish)) self._after_id = self.after(10, self._after_handler(iid, "move_callback", args)) marker["start"] = start marker["finish"] = finish
[ "def", "_left_motion", "(", "self", ",", "event", ")", ":", "iid", "=", "self", ".", "current_iid", "if", "iid", "is", "None", ":", "return", "marker", "=", "self", ".", "_markers", "[", "iid", "]", "if", "marker", "[", "\"move\"", "]", "is", "False"...
51.64
20.653333
def get_deps_manager(self, *args, **kwargs): """ Return instance of the dependancies manager using given args and kwargs Add 'silent_key_error' option in kwargs if not given. """ if 'silent_key_error' not in kwargs: kwargs['silent_key_error'] = self.silent_key_error return self.deps_manager(*args, **kwargs)
[ "def", "get_deps_manager", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "'silent_key_error'", "not", "in", "kwargs", ":", "kwargs", "[", "'silent_key_error'", "]", "=", "self", ".", "silent_key_error", "return", "self", ".", "dep...
40.111111
15.444444
def random_get_instance() -> tcod.random.Random: """Return the default Random instance. Returns: Random: A Random instance using the default random number generator. """ return tcod.random.Random._new_from_cdata( ffi.cast("mersenne_data_t*", lib.TCOD_random_get_instance()) )
[ "def", "random_get_instance", "(", ")", "->", "tcod", ".", "random", ".", "Random", ":", "return", "tcod", ".", "random", ".", "Random", ".", "_new_from_cdata", "(", "ffi", ".", "cast", "(", "\"mersenne_data_t*\"", ",", "lib", ".", "TCOD_random_get_instance", ...
33.777778
20.111111
def read_samples(self, sr=None, offset=0, duration=None): """ Read the samples of the utterance. Args: sr (int): If None uses the sampling rate given by the track, otherwise resamples to the given sampling rate. offset (float): Offset in seconds to read samples from. duration (float): If not ``None`` read only this number of seconds in maximum. Returns: np.ndarray: A numpy array containing the samples as a floating point (numpy.float32) time series. """ read_duration = self.duration if offset > 0 and read_duration is not None: read_duration -= offset if duration is not None: if read_duration is None: read_duration = duration else: read_duration = min(duration, read_duration) return self.track.read_samples( sr=sr, offset=self.start + offset, duration=read_duration )
[ "def", "read_samples", "(", "self", ",", "sr", "=", "None", ",", "offset", "=", "0", ",", "duration", "=", "None", ")", ":", "read_duration", "=", "self", ".", "duration", "if", "offset", ">", "0", "and", "read_duration", "is", "not", "None", ":", "r...
33.0625
19.5
def from_inline(cls: Type[WS2PEndpointType], inline: str) -> WS2PEndpointType: """ Return WS2PEndpoint instance from endpoint string :param inline: Endpoint string :return: """ m = WS2PEndpoint.re_inline.match(inline) if m is None: raise MalformedDocumentError(WS2PEndpoint.API) ws2pid = m.group(1) server = m.group(2) port = int(m.group(3)) path = m.group(4) if not path: path = "" return cls(ws2pid, server, port, path)
[ "def", "from_inline", "(", "cls", ":", "Type", "[", "WS2PEndpointType", "]", ",", "inline", ":", "str", ")", "->", "WS2PEndpointType", ":", "m", "=", "WS2PEndpoint", ".", "re_inline", ".", "match", "(", "inline", ")", "if", "m", "is", "None", ":", "rai...
31.411765
15.411765
def discard(self, element): """Remove an element. Do not raise an exception if absent.""" key = self._transform(element) if key in self._elements: del self._elements[key]
[ "def", "discard", "(", "self", ",", "element", ")", ":", "key", "=", "self", ".", "_transform", "(", "element", ")", "if", "key", "in", "self", ".", "_elements", ":", "del", "self", ".", "_elements", "[", "key", "]" ]
40.6
5.4
def bin2hexline(data, add_addr=True, width=16): """ Format binary data to a Hex-Editor like format... e.g.: with open("C:\Python27\python.exe", "rb") as f: data = f.read(150) print("\n".join(bin2hexline(data, width=16))) 0000 4d 5a 90 00 03 00 00 00 04 00 00 00 ff ff 00 00 MZ.............. 0016 b8 00 00 00 00 00 00 00 40 00 00 00 00 00 00 00 ........@....... 0032 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................ 0048 00 00 00 00 00 00 00 00 00 00 00 00 e8 00 00 00 ................ 0064 0e 1f ba 0e 00 b4 09 cd 21 b8 01 4c cd 21 54 68 ........!..L.!Th 0080 69 73 20 70 72 6f 67 72 61 6d 20 63 61 6e 6e 6f is.program.canno 0096 74 20 62 65 20 72 75 6e 20 69 6e 20 44 4f 53 20 t.be.run.in.DOS. 0112 6d 6f 64 65 2e 0d 0d 0a 24 00 00 00 00 00 00 00 mode....$....... 0128 9d 68 ba 89 d9 09 d4 da d9 09 d4 da d9 09 d4 da .h.............. 0144 d0 71 41 da d8 09 .qA... """ assert isinstance(data, binary_type), ( "is type: %s and not bytes/str: %s" % (type(data), repr(data)) ) addr = 0 lines = [] run = True line_width = 4 + (width * 3) + 1 while run: if add_addr: line = ["%04i" % addr] else: line = [] ascii_block = "" for i in range(width): b = data[addr] if PY2: b = ord(b) if chr(b) in string.printable: ascii_block += chr(b) else: ascii_block += "." line.append("%02x" % b) addr += 1 if addr >= len(data): run = False break line = " ".join(line) line = line.ljust(line_width) line += ascii_block lines.append(line) return lines
[ "def", "bin2hexline", "(", "data", ",", "add_addr", "=", "True", ",", "width", "=", "16", ")", ":", "assert", "isinstance", "(", "data", ",", "binary_type", ")", ",", "(", "\"is type: %s and not bytes/str: %s\"", "%", "(", "type", "(", "data", ")", ",", ...
30.896552
21.586207
def get_all_attribute_value( self, tag_name, attribute, format_value=True, **attribute_filter ): """ Yields all the attribute values in xml files which match with the tag name and the specific attribute :param str tag_name: specify the tag name :param str attribute: specify the attribute :param bool format_value: specify if the value needs to be formatted with packagename """ tags = self.find_tags(tag_name, **attribute_filter) for tag in tags: value = tag.get(attribute) or tag.get(self._ns(attribute)) if value is not None: if format_value: yield self._format_value(value) else: yield value
[ "def", "get_all_attribute_value", "(", "self", ",", "tag_name", ",", "attribute", ",", "format_value", "=", "True", ",", "*", "*", "attribute_filter", ")", ":", "tags", "=", "self", ".", "find_tags", "(", "tag_name", ",", "*", "*", "attribute_filter", ")", ...
41.722222
21.055556
def DoxyEmitter(source, target, env): """Doxygen Doxyfile emitter""" # possible output formats and their default values and output locations output_formats = { "HTML": ("YES", "html"), "LATEX": ("YES", "latex"), "RTF": ("NO", "rtf"), "MAN": ("YES", "man"), "XML": ("NO", "xml"), } data = DoxyfileParse(source[0].get_contents()) targets = [] out_dir = data.get("OUTPUT_DIRECTORY", ".") if not os.path.isabs(out_dir): conf_dir = os.path.dirname(str(source[0])) out_dir = os.path.join(conf_dir, out_dir) # add our output locations for (k, v) in output_formats.items(): if data.get("GENERATE_" + k, v[0]) == "YES": targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) # add the tag file if neccessary: tagfile = data.get("GENERATE_TAGFILE", "") if tagfile != "": if not os.path.isabs(tagfile): conf_dir = os.path.dirname(str(source[0])) tagfile = os.path.join(conf_dir, tagfile) targets.append(env.File(tagfile)) # don't clobber targets for node in targets: env.Precious(node) # set up cleaning stuff for node in targets: env.Clean(node, node) return (targets, source)
[ "def", "DoxyEmitter", "(", "source", ",", "target", ",", "env", ")", ":", "# possible output formats and their default values and output locations", "output_formats", "=", "{", "\"HTML\"", ":", "(", "\"YES\"", ",", "\"html\"", ")", ",", "\"LATEX\"", ":", "(", "\"YES...
29.536585
17.512195
def _average_precision(self, rec, prec): """ calculate average precision Params: ---------- rec : numpy.array cumulated recall prec : numpy.array cumulated precision Returns: ---------- ap as float """ # append sentinel values at both ends mrec = np.concatenate(([0.], rec, [1.])) mpre = np.concatenate(([0.], prec, [0.])) # compute precision integration ladder for i in range(mpre.size - 1, 0, -1): mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i]) # look for recall value changes i = np.where(mrec[1:] != mrec[:-1])[0] # sum (\delta recall) * prec ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) return ap
[ "def", "_average_precision", "(", "self", ",", "rec", ",", "prec", ")", ":", "# append sentinel values at both ends", "mrec", "=", "np", ".", "concatenate", "(", "(", "[", "0.", "]", ",", "rec", ",", "[", "1.", "]", ")", ")", "mpre", "=", "np", ".", ...
27.857143
15.428571
def handleOneClientMsg(self, wrappedMsg): """ Validate and process a client message :param wrappedMsg: a message from a client """ try: vmsg = self.validateClientMsg(wrappedMsg) if vmsg: self.unpackClientMsg(*vmsg) except BlowUp: raise except Exception as ex: msg, frm = wrappedMsg friendly = friendlyEx(ex) if isinstance(ex, SuspiciousClient): self.reportSuspiciousClient(frm, friendly) self.handleInvalidClientMsg(ex, wrappedMsg)
[ "def", "handleOneClientMsg", "(", "self", ",", "wrappedMsg", ")", ":", "try", ":", "vmsg", "=", "self", ".", "validateClientMsg", "(", "wrappedMsg", ")", "if", "vmsg", ":", "self", ".", "unpackClientMsg", "(", "*", "vmsg", ")", "except", "BlowUp", ":", "...
30.894737
13.736842
def load_hdf5(path): """Load data from a HDF5 file. Args: path (str): A path to the HDF5 format file containing data. dense (boolean): An optional variable indicating if the return matrix should be dense. By default, it is false. Returns: Data matrix X and target vector y """ with h5py.File(path, 'r') as f: is_sparse = f['issparse'][...] if is_sparse: shape = tuple(f['shape'][...]) data = f['data'][...] indices = f['indices'][...] indptr = f['indptr'][...] X = sparse.csr_matrix((data, indices, indptr), shape=shape) else: X = f['data'][...] y = f['target'][...] return X, y
[ "def", "load_hdf5", "(", "path", ")", ":", "with", "h5py", ".", "File", "(", "path", ",", "'r'", ")", "as", "f", ":", "is_sparse", "=", "f", "[", "'issparse'", "]", "[", "...", "]", "if", "is_sparse", ":", "shape", "=", "tuple", "(", "f", "[", ...
28.307692
19.807692
def send_response(self, transaction): """ Handles the Blocks option in a outgoing response. :type transaction: Transaction :param transaction: the transaction that owns the response :rtype : Transaction :return: the edited transaction """ host, port = transaction.request.source key_token = hash(str(host) + str(port) + str(transaction.request.token)) if (key_token in self._block2_receive and transaction.response.payload is not None) or \ (transaction.response.payload is not None and len(transaction.response.payload) > defines.MAX_PAYLOAD): if key_token in self._block2_receive: byte = self._block2_receive[key_token].byte size = self._block2_receive[key_token].size num = self._block2_receive[key_token].num else: byte = 0 num = 0 size = defines.MAX_PAYLOAD m = 1 self._block2_receive[key_token] = BlockItem(byte, num, m, size) if len(transaction.response.payload) > (byte + size): m = 1 else: m = 0 transaction.response.payload = transaction.response.payload[byte:byte + size] del transaction.response.block2 transaction.response.block2 = (num, m, size) self._block2_receive[key_token].byte += size self._block2_receive[key_token].num += 1 if m == 0: del self._block2_receive[key_token] return transaction
[ "def", "send_response", "(", "self", ",", "transaction", ")", ":", "host", ",", "port", "=", "transaction", ".", "request", ".", "source", "key_token", "=", "hash", "(", "str", "(", "host", ")", "+", "str", "(", "port", ")", "+", "str", "(", "transac...
38.487805
22.634146
def docx_preprocess(docx, batch=False): """ Load docx files from local filepath if not already b64 encoded """ if batch: return [docx_preprocess(doc, batch=False) for doc in docx] if os.path.isfile(docx): # a filepath is provided, read and encode return b64encode(open(docx, 'rb').read()) else: # assume doc is already b64 encoded return docx
[ "def", "docx_preprocess", "(", "docx", ",", "batch", "=", "False", ")", ":", "if", "batch", ":", "return", "[", "docx_preprocess", "(", "doc", ",", "batch", "=", "False", ")", "for", "doc", "in", "docx", "]", "if", "os", ".", "path", ".", "isfile", ...
30.384615
15.769231
def query_recent_edited(timstamp, kind='1'): ''' Query posts recently update. ''' return TabPost.select().where( (TabPost.kind == kind) & (TabPost.time_update > timstamp) ).order_by( TabPost.time_update.desc() )
[ "def", "query_recent_edited", "(", "timstamp", ",", "kind", "=", "'1'", ")", ":", "return", "TabPost", ".", "select", "(", ")", ".", "where", "(", "(", "TabPost", ".", "kind", "==", "kind", ")", "&", "(", "TabPost", ".", "time_update", ">", "timstamp",...
28.6
13
def reconstruct_mds(edm, all_points, completion='optspace', mask=None, method='geometric', print_out=False, n=1): """ Reconstruct point set using MDS and matrix completion algorithms. """ from .point_set import dm_from_edm from .mds import MDS N = all_points.shape[0] d = all_points.shape[1] if mask is not None: edm_missing = np.multiply(edm, mask) if completion == 'optspace': from .edm_completion import optspace edm_complete = optspace(edm_missing, d + 2) elif completion == 'alternate': from .edm_completion import rank_alternation edm_complete, errs = rank_alternation( edm_missing, d + 2, print_out=False, edm_true=edm) else: raise NameError('Unknown completion method {}'.format(completion)) if (print_out): err = np.linalg.norm(edm_complete - edm)**2 / \ np.linalg.norm(edm)**2 print('{}: relative error:{}'.format(completion, err)) edm = edm_complete Xhat = MDS(edm, d, method, False).T Y, R, t, c = procrustes(all_points[n:], Xhat, True) #Y, R, t, c = procrustes(all_points, Xhat, True) return Y
[ "def", "reconstruct_mds", "(", "edm", ",", "all_points", ",", "completion", "=", "'optspace'", ",", "mask", "=", "None", ",", "method", "=", "'geometric'", ",", "print_out", "=", "False", ",", "n", "=", "1", ")", ":", "from", ".", "point_set", "import", ...
44
15.407407
def events_this_week(context): """ Displays a week's worth of events. Starts week with Monday, unless today is Sunday. """ request = context['request'] home = request.site.root_page cal = CalendarPage.objects.live().descendant_of(home).first() calUrl = cal.get_url(request) if cal else None calName = cal.title if cal else None today = dt.date.today() beginOrd = today.toordinal() if today.weekday() != 6: # Start week with Monday, unless today is Sunday beginOrd -= today.weekday() endOrd = beginOrd + 6 dateFrom = dt.date.fromordinal(beginOrd) dateTo = dt.date.fromordinal(endOrd) if cal: events = cal._getEventsByDay(request, dateFrom, dateTo) else: events = getAllEventsByDay(request, dateFrom, dateTo) return {'request': request, 'today': today, 'calendarUrl': calUrl, 'calendarName': calName, 'events': events }
[ "def", "events_this_week", "(", "context", ")", ":", "request", "=", "context", "[", "'request'", "]", "home", "=", "request", ".", "site", ".", "root_page", "cal", "=", "CalendarPage", ".", "objects", ".", "live", "(", ")", ".", "descendant_of", "(", "h...
37.038462
11.961538
def _determine_filtered_package_requirements(self): """ Parse the configuration file for [blacklist]packages Returns ------- list of packaging.requirements.Requirement For all PEP440 package specifiers """ filtered_requirements = set() try: lines = self.configuration["blacklist"]["packages"] package_lines = lines.split("\n") except KeyError: package_lines = [] for package_line in package_lines: package_line = package_line.strip() if not package_line or package_line.startswith("#"): continue filtered_requirements.add(Requirement(package_line)) return list(filtered_requirements)
[ "def", "_determine_filtered_package_requirements", "(", "self", ")", ":", "filtered_requirements", "=", "set", "(", ")", "try", ":", "lines", "=", "self", ".", "configuration", "[", "\"blacklist\"", "]", "[", "\"packages\"", "]", "package_lines", "=", "lines", "...
35.809524
14.095238
def _distributeCells(numCellsPop): ''' distribute cells across compute nodes using round-robin''' from .. import sim hostCells = {} for i in range(sim.nhosts): hostCells[i] = [] for i in range(numCellsPop): hostCells[sim.nextHost].append(i) sim.nextHost+=1 if sim.nextHost>=sim.nhosts: sim.nextHost=0 if sim.cfg.verbose: print(("Distributed population of %i cells on %s hosts: %s, next: %s"%(numCellsPop,sim.nhosts,hostCells,sim.nextHost))) return hostCells
[ "def", "_distributeCells", "(", "numCellsPop", ")", ":", "from", ".", ".", "import", "sim", "hostCells", "=", "{", "}", "for", "i", "in", "range", "(", "sim", ".", "nhosts", ")", ":", "hostCells", "[", "i", "]", "=", "[", "]", "for", "i", "in", "...
30.722222
21.944444
def add_listener(self, event_name: str, listener: Callable): """Add a listener.""" self.listeners[event_name].append(listener) return self
[ "def", "add_listener", "(", "self", ",", "event_name", ":", "str", ",", "listener", ":", "Callable", ")", ":", "self", ".", "listeners", "[", "event_name", "]", ".", "append", "(", "listener", ")", "return", "self" ]
39.75
13
def is_writeable(value, **kwargs): """Indicate whether ``value`` is a writeable file. .. caution:: This validator does **NOT** work correctly on a Windows file system. This is due to the vagaries of how Windows manages its file system and the various ways in which it can manage file permission. If called on a Windows file system, this validator will raise :class:`NotImplementedError() <python:NotImplementedError>`. .. caution:: **Use of this validator is an anti-pattern and should be used with caution.** Validating the writability of a file *before* attempting to write to it exposes your code to a bug called `TOCTOU <https://en.wikipedia.org/wiki/Time_of_check_to_time_of_use>`_. This particular class of bug can expose your code to **security vulnerabilities** and so this validator should only be used if you are an advanced user. A better pattern to use when writing to file is to apply the principle of EAFP ("easier to ask forgiveness than permission"), and simply attempt to write to the file using a ``try ... except`` block: .. code-block:: python try: with open('path/to/filename.txt', mode = 'a') as file_object: # write to file here except (OSError, IOError) as error: # Handle an error if unable to write. .. note:: This validator relies on :func:`os.access() <python:os.access>` to check whether ``value`` is writeable. This function has certain limitations, most especially that: * It will **ignore** file-locking (yielding a false-positive) if the file is locked. * It focuses on *local operating system permissions*, which means if trying to access a path over a network you might get a false positive or false negative (because network paths may have more complicated authentication methods). :param value: The value to evaluate. :type value: Path-like object :returns: ``True`` if ``value`` is valid, ``False`` if it is not. :rtype: :class:`bool <python:bool>` :raises NotImplementedError: if called on a Windows system :raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates keyword parameters passed to the underlying validator """ if sys.platform in ['win32', 'cygwin']: raise NotImplementedError('not supported on Windows') try: validators.writeable(value, allow_empty = False, **kwargs) except SyntaxError as error: raise error except Exception: return False return True
[ "def", "is_writeable", "(", "value", ",", "*", "*", "kwargs", ")", ":", "if", "sys", ".", "platform", "in", "[", "'win32'", ",", "'cygwin'", "]", ":", "raise", "NotImplementedError", "(", "'not supported on Windows'", ")", "try", ":", "validators", ".", "w...
36.493151
27.109589
def simplex_compute_potentials(self, t, root): ''' API: simplex_compute_potentials(self, t, root) Description: Computes node potentials for a minimum cost flow problem and stores them as node attribute 'potential'. Based on pseudocode given in Network Flows by Ahuja et al. Pre: (1) Assumes a directed graph in which each arc has a 'cost' attribute. (2) Uses 'thread' and 'pred' attributes of nodes. Input: t: Current spanning tree solution, its type is Graph. root: root node of the tree. Post: Keeps the node potentials as 'potential' attribute. ''' self.get_node(root).set_attr('potential', 0) j = t.get_node(root).get_attr('thread') while j is not root: i = t.get_node(j).get_attr('pred') potential_i = self.get_node(i).get_attr('potential') if (i,j) in self.edge_attr: c_ij = self.edge_attr[(i,j)]['cost'] self.get_node(j).set_attr('potential', potential_i-c_ij) if (j,i) in self.edge_attr: c_ji = self.edge_attr[(j,i)]['cost'] self.get_node(j).set_attr('potential', potential_i+c_ji) j = t.get_node(j).get_attr('thread')
[ "def", "simplex_compute_potentials", "(", "self", ",", "t", ",", "root", ")", ":", "self", ".", "get_node", "(", "root", ")", ".", "set_attr", "(", "'potential'", ",", "0", ")", "j", "=", "t", ".", "get_node", "(", "root", ")", ".", "get_attr", "(", ...
44.033333
18.633333
def sg_cast(tensor, opt): r"""Casts a tensor to a new type. See `tf.cast()` in tensorflow. Args: tensor: A `Tensor` or `SparseTensor` (automatically given by chain). opt: dtype : The destination type. name : If provided, it replaces current tensor's name Returns: A `Tensor` or `SparseTensor` with same shape as `tensor`. """ assert opt.dtype is not None, 'dtype is mandatory.' return tf.cast(tensor, opt.dtype, name=opt.name)
[ "def", "sg_cast", "(", "tensor", ",", "opt", ")", ":", "assert", "opt", ".", "dtype", "is", "not", "None", ",", "'dtype is mandatory.'", "return", "tf", ".", "cast", "(", "tensor", ",", "opt", ".", "dtype", ",", "name", "=", "opt", ".", "name", ")" ]
30
20.875
def p_expression_Xnor(self, p): 'expression : expression XNOR expression' p[0] = Xnor(p[1], p[3], lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
[ "def", "p_expression_Xnor", "(", "self", ",", "p", ")", ":", "p", "[", "0", "]", "=", "Xnor", "(", "p", "[", "1", "]", ",", "p", "[", "3", "]", ",", "lineno", "=", "p", ".", "lineno", "(", "1", ")", ")", "p", ".", "set_lineno", "(", "0", ...
41.75
8.25
def xml_to_bytes(element, pretty_print=False): """Wrapper for etree.tostring, that takes care of unsupported pretty_print option and prepends an encoding header.""" if use_lxml: xml = etree.tostring( element, encoding="UTF-8", xml_declaration=True, pretty_print=pretty_print ) else: xml = etree.tostring(element, encoding="UTF-8") if not xml.startswith(b"<?xml "): xml = b'<?xml version="1.0" encoding="utf-8" ?>\n' + xml assert xml.startswith(b"<?xml ") # ET should prepend an encoding header return xml
[ "def", "xml_to_bytes", "(", "element", ",", "pretty_print", "=", "False", ")", ":", "if", "use_lxml", ":", "xml", "=", "etree", ".", "tostring", "(", "element", ",", "encoding", "=", "\"UTF-8\"", ",", "xml_declaration", "=", "True", ",", "pretty_print", "=...
40.928571
21.071429
def __emulate_axis(self, axis_changes, timeval=None): """Make the axis events use the Linux style format.""" events = [] for axis in axis_changes: code, value = self.__map_axis(axis) event = self.create_event_object( "Absolute", code, value, timeval=timeval) events.append(event) return events
[ "def", "__emulate_axis", "(", "self", ",", "axis_changes", ",", "timeval", "=", "None", ")", ":", "events", "=", "[", "]", "for", "axis", "in", "axis_changes", ":", "code", ",", "value", "=", "self", ".", "__map_axis", "(", "axis", ")", "event", "=", ...
34.5
11.5
def set_hint_style(self, hint_style): """Changes the :ref:`HINT_STYLE` for the font options object. This controls whether to fit font outlines to the pixel grid, and if so, whether to optimize for fidelity or contrast. """ cairo.cairo_font_options_set_hint_style(self._pointer, hint_style) self._check_status()
[ "def", "set_hint_style", "(", "self", ",", "hint_style", ")", ":", "cairo", ".", "cairo_font_options_set_hint_style", "(", "self", ".", "_pointer", ",", "hint_style", ")", "self", ".", "_check_status", "(", ")" ]
44
17.75
def request_timeout(self, timeout): """ Context manager implements opportunity to change request timeout in current context :param timeout: :return: """ timeout = self._prepare_timeout(timeout) token = self._ctx_timeout.set(timeout) try: yield finally: self._ctx_timeout.reset(token)
[ "def", "request_timeout", "(", "self", ",", "timeout", ")", ":", "timeout", "=", "self", ".", "_prepare_timeout", "(", "timeout", ")", "token", "=", "self", ".", "_ctx_timeout", ".", "set", "(", "timeout", ")", "try", ":", "yield", "finally", ":", "self"...
28.307692
17.538462
def read_instances(self): """ Read `.dsb/instances.yaml` Populates `self.cluster` """ logger.debug('Reading instances from: %s', self.instances_path) if os.path.exists(self.instances_path): with open(self.instances_path, 'r') as f: list_ = yaml.load(f.read()) self.cluster = Cluster.from_list(list_, settings=self.settings)
[ "def", "read_instances", "(", "self", ")", ":", "logger", ".", "debug", "(", "'Reading instances from: %s'", ",", "self", ".", "instances_path", ")", "if", "os", ".", "path", ".", "exists", "(", "self", ".", "instances_path", ")", ":", "with", "open", "(",...
40.6
12.2
def set_org_processor(request): """ Simple context processor that automatically sets 'org' on the context if it is present in the request. """ if getattr(request, "org", None): org = request.org pattern_bg = org.backgrounds.filter(is_active=True, background_type="P") pattern_bg = pattern_bg.order_by("-pk").first() banner_bg = org.backgrounds.filter(is_active=True, background_type="B") banner_bg = banner_bg.order_by("-pk").first() return dict(org=org, pattern_bg=pattern_bg, banner_bg=banner_bg) else: return dict()
[ "def", "set_org_processor", "(", "request", ")", ":", "if", "getattr", "(", "request", ",", "\"org\"", ",", "None", ")", ":", "org", "=", "request", ".", "org", "pattern_bg", "=", "org", ".", "backgrounds", ".", "filter", "(", "is_active", "=", "True", ...
39
20.333333
def resources_from_resource_arguments(default_num_cpus, default_num_gpus, default_resources, runtime_num_cpus, runtime_num_gpus, runtime_resources): """Determine a task's resource requirements. Args: default_num_cpus: The default number of CPUs required by this function or actor method. default_num_gpus: The default number of GPUs required by this function or actor method. default_resources: The default custom resources required by this function or actor method. runtime_num_cpus: The number of CPUs requested when the task was invoked. runtime_num_gpus: The number of GPUs requested when the task was invoked. runtime_resources: The custom resources requested when the task was invoked. Returns: A dictionary of the resource requirements for the task. """ if runtime_resources is not None: resources = runtime_resources.copy() elif default_resources is not None: resources = default_resources.copy() else: resources = {} if "CPU" in resources or "GPU" in resources: raise ValueError("The resources dictionary must not " "contain the key 'CPU' or 'GPU'") assert default_num_cpus is not None resources["CPU"] = (default_num_cpus if runtime_num_cpus is None else runtime_num_cpus) if runtime_num_gpus is not None: resources["GPU"] = runtime_num_gpus elif default_num_gpus is not None: resources["GPU"] = default_num_gpus return resources
[ "def", "resources_from_resource_arguments", "(", "default_num_cpus", ",", "default_num_gpus", ",", "default_resources", ",", "runtime_num_cpus", ",", "runtime_num_gpus", ",", "runtime_resources", ")", ":", "if", "runtime_resources", "is", "not", "None", ":", "resources", ...
38.511628
20.767442