text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def str_numerator(self): """Returns the numerator with formatting.""" unit_numerator, unit = UnitByte(self.numerator).auto_no_thousands if unit_numerator >= 10: formatter = '%d' else: formatter = '%0.1f' return '{0} {1}'.format(locale.format(formatter, unit_numerator, grouping=False), unit)
[ "def", "str_numerator", "(", "self", ")", ":", "unit_numerator", ",", "unit", "=", "UnitByte", "(", "self", ".", "numerator", ")", ".", "auto_no_thousands", "if", "unit_numerator", ">=", "10", ":", "formatter", "=", "'%d'", "else", ":", "formatter", "=", "...
43.5
20
def as_string(self, forsigning=False): """ >>> len(OmapiMessage().as_string(True)) >= 24 True @type forsigning: bool @rtype: bytes @raises OmapiSizeLimitError: """ ret = OutBuffer() self.serialize(ret, forsigning) return ret.getvalue()
[ "def", "as_string", "(", "self", ",", "forsigning", "=", "False", ")", ":", "ret", "=", "OutBuffer", "(", ")", "self", ".", "serialize", "(", "ret", ",", "forsigning", ")", "return", "ret", ".", "getvalue", "(", ")" ]
20.416667
14.916667
def add_output_list_opt(self, opt, outputs): """ Add an option that determines a list of outputs """ self.add_opt(opt) for out in outputs: self.add_opt(out) self._add_output(out)
[ "def", "add_output_list_opt", "(", "self", ",", "opt", ",", "outputs", ")", ":", "self", ".", "add_opt", "(", "opt", ")", "for", "out", "in", "outputs", ":", "self", ".", "add_opt", "(", "out", ")", "self", ".", "_add_output", "(", "out", ")" ]
32.571429
7.142857
def servo_config(self, pin, min_pulse=544, max_pulse=2400): """ This method configures the Arduino for servo operation. :param pin: Servo control pin :param min_pulse: Minimum pulse width :param max_pulse: Maximum pulse width :returns: No return value """ task = asyncio.ensure_future(self.core.servo_config(pin, min_pulse, max_pulse)) self.loop.run_until_complete(task)
[ "def", "servo_config", "(", "self", ",", "pin", ",", "min_pulse", "=", "544", ",", "max_pulse", "=", "2400", ")", ":", "task", "=", "asyncio", ".", "ensure_future", "(", "self", ".", "core", ".", "servo_config", "(", "pin", ",", "min_pulse", ",", "max_...
32.8
19.333333
def post_process(self): """ Apply last 2D transforms""" self.image.putdata(self.pixels) self.image = self.image.transpose(Image.ROTATE_90)
[ "def", "post_process", "(", "self", ")", ":", "self", ".", "image", ".", "putdata", "(", "self", ".", "pixels", ")", "self", ".", "image", "=", "self", ".", "image", ".", "transpose", "(", "Image", ".", "ROTATE_90", ")" ]
39.75
9
def get_form(self, form_class=None): ''' Set form groups to the groups specified in the view if defined ''' formobj = super(GenModify, self).get_form(form_class) # Set requested group to this form selfgroups = getattr(self, "form_groups", None) if selfgroups: if type(selfgroups) == list: formobj.__groups__ = lambda: selfgroups else: formobj.__groups__ = selfgroups else: selfgroups = getattr(self, "__groups__", None) if selfgroups: formobj.__groups__ = selfgroups # Return the new updated form return formobj
[ "def", "get_form", "(", "self", ",", "form_class", "=", "None", ")", ":", "formobj", "=", "super", "(", "GenModify", ",", "self", ")", ".", "get_form", "(", "form_class", ")", "# Set requested group to this form", "selfgroups", "=", "getattr", "(", "self", "...
33.5
18
def node_labels(node_labels, node_indices): """Validate that there is a label for each node.""" if len(node_labels) != len(node_indices): raise ValueError("Labels {0} must label every node {1}.".format( node_labels, node_indices)) if len(node_labels) != len(set(node_labels)): raise ValueError("Labels {0} must be unique.".format(node_labels))
[ "def", "node_labels", "(", "node_labels", ",", "node_indices", ")", ":", "if", "len", "(", "node_labels", ")", "!=", "len", "(", "node_indices", ")", ":", "raise", "ValueError", "(", "\"Labels {0} must label every node {1}.\"", ".", "format", "(", "node_labels", ...
47.125
15.5
def append_to_file(file_name, line_data): """append a line of text to a file""" with open(file_name, mode='a', encoding='utf-8') as f1: f1.write(line_data) f1.write("\n")
[ "def", "append_to_file", "(", "file_name", ",", "line_data", ")", ":", "with", "open", "(", "file_name", ",", "mode", "=", "'a'", ",", "encoding", "=", "'utf-8'", ")", "as", "f1", ":", "f1", ".", "write", "(", "line_data", ")", "f1", ".", "write", "(...
38
10.2
def getCanonicalRep(record_cluster): """ Given a list of records within a duplicate cluster, constructs a canonical representation of the cluster by finding canonical values for each field """ canonical_rep = {} keys = record_cluster[0].keys() for key in keys: key_values = [] for record in record_cluster: # assume non-empty values always better than empty value # for canonical record if record[key]: key_values.append(record[key]) if key_values: canonical_rep[key] = getCentroid(key_values, comparator) else: canonical_rep[key] = '' return canonical_rep
[ "def", "getCanonicalRep", "(", "record_cluster", ")", ":", "canonical_rep", "=", "{", "}", "keys", "=", "record_cluster", "[", "0", "]", ".", "keys", "(", ")", "for", "key", "in", "keys", ":", "key_values", "=", "[", "]", "for", "record", "in", "record...
28.375
18.375
def energy_at_conditions(self, pH, V): """ Get free energy for a given pH and V Args: pH (float): pH at which to evaluate free energy V (float): voltage at which to evaluate free energy Returns: free energy at conditions """ return self.energy + self.npH * PREFAC * pH + self.nPhi * V
[ "def", "energy_at_conditions", "(", "self", ",", "pH", ",", "V", ")", ":", "return", "self", ".", "energy", "+", "self", ".", "npH", "*", "PREFAC", "*", "pH", "+", "self", ".", "nPhi", "*", "V" ]
29.916667
17.416667
def on_connection_unblocked(self, method_frame): """When RabbitMQ indicates the connection is unblocked, set the state appropriately. :param pika.amqp_object.Method method_frame: Unblocked method frame """ LOGGER.debug('Connection unblocked: %r', method_frame) self.state = self.STATE_READY if self.on_ready: self.on_ready(self)
[ "def", "on_connection_unblocked", "(", "self", ",", "method_frame", ")", ":", "LOGGER", ".", "debug", "(", "'Connection unblocked: %r'", ",", "method_frame", ")", "self", ".", "state", "=", "self", ".", "STATE_READY", "if", "self", ".", "on_ready", ":", "self"...
35.272727
17.272727
def read_var_uint32(self): """Reads a varint from the stream, interprets this varint as an unsigned, 32-bit integer, and returns the integer. """ i = self.read_var_uint64() if i > wire_format.UINT32_MAX: raise errors.DecodeError('Value out of range for uint32: %d' % i) return i
[ "def", "read_var_uint32", "(", "self", ")", ":", "i", "=", "self", ".", "read_var_uint64", "(", ")", "if", "i", ">", "wire_format", ".", "UINT32_MAX", ":", "raise", "errors", ".", "DecodeError", "(", "'Value out of range for uint32: %d'", "%", "i", ")", "ret...
41.375
13.375
def _reaction_to_dicts(reaction): """Convert a reaction to reduced left, right dictionaries. Returns a pair of (left, right) dictionaries mapping compounds to normalized integer stoichiometric values. If a compound occurs multiple times on one side, the occurences are combined into a single entry in the dictionary. """ def dict_from_iter_sum(it, div): d = {} for k, v in it: if k not in d: d[k] = 0 d[k] += int(v / div) return d div = reduce(gcd, (abs(v) for _, v in reaction.compounds), 0) if div == 0: raise ValueError('Empty reaction') left = dict_from_iter_sum(reaction.left, div) right = dict_from_iter_sum(reaction.right, div) return left, right
[ "def", "_reaction_to_dicts", "(", "reaction", ")", ":", "def", "dict_from_iter_sum", "(", "it", ",", "div", ")", ":", "d", "=", "{", "}", "for", "k", ",", "v", "in", "it", ":", "if", "k", "not", "in", "d", ":", "d", "[", "k", "]", "=", "0", "...
31.416667
20.458333
def _import_ucsmsdk(self): """Imports the Ucsm SDK module. This module is not installed as part of the normal Neutron distributions. It is imported dynamically in this module so that the import can be mocked, allowing unit testing without requiring the installation of UcsSdk. """ # Check if SSL certificate checking has been disabled. # If so, warn the user before proceeding. if not CONF.ml2_cisco_ucsm.ucsm_https_verify: LOG.warning(const.SSL_WARNING) # Monkey patch the UCS sdk version of urllib2 to disable # https verify if required. from networking_cisco.ml2_drivers.ucsm import ucs_urllib2 ucsmsdkhandle = importutils.import_module('UcsSdk.UcsHandle') ucsmsdkhandle.urllib2 = ucs_urllib2 ucsmsdk = importutils.import_module('UcsSdk') return ucsmsdk
[ "def", "_import_ucsmsdk", "(", "self", ")", ":", "# Check if SSL certificate checking has been disabled.", "# If so, warn the user before proceeding.", "if", "not", "CONF", ".", "ml2_cisco_ucsm", ".", "ucsm_https_verify", ":", "LOG", ".", "warning", "(", "const", ".", "SS...
39.954545
19.681818
def read_seg(self, parc_type='aparc'): """Read the MRI segmentation. Parameters ---------- parc_type : str 'aparc' or 'aparc.a2009s' Returns ------- numpy.ndarray 3d matrix with values numpy.ndarray 4x4 affine matrix """ seg_file = self.dir / 'mri' / (parc_type + '+aseg.mgz') seg_mri = load(seg_file) seg_aff = seg_mri.affine seg_dat = seg_mri.get_data() return seg_dat, seg_aff
[ "def", "read_seg", "(", "self", ",", "parc_type", "=", "'aparc'", ")", ":", "seg_file", "=", "self", ".", "dir", "/", "'mri'", "/", "(", "parc_type", "+", "'+aseg.mgz'", ")", "seg_mri", "=", "load", "(", "seg_file", ")", "seg_aff", "=", "seg_mri", ".",...
25.5
15.2
def _wrapped(self): """ Wrap this udf with a function and attach docstring from func """ # It is possible for a callable instance without __name__ attribute or/and # __module__ attribute to be wrapped here. For example, functools.partial. In this case, # we should avoid wrapping the attributes from the wrapped function to the wrapper # function. So, we take out these attribute names from the default names to set and # then manually assign it after being wrapped. assignments = tuple( a for a in functools.WRAPPER_ASSIGNMENTS if a != '__name__' and a != '__module__') @functools.wraps(self.func, assigned=assignments) def wrapper(*args): return self(*args) wrapper.__name__ = self._name wrapper.__module__ = (self.func.__module__ if hasattr(self.func, '__module__') else self.func.__class__.__module__) wrapper.func = self.func wrapper.returnType = self.returnType wrapper.evalType = self.evalType wrapper.deterministic = self.deterministic wrapper.asNondeterministic = functools.wraps( self.asNondeterministic)(lambda: self.asNondeterministic()._wrapped()) return wrapper
[ "def", "_wrapped", "(", "self", ")", ":", "# It is possible for a callable instance without __name__ attribute or/and", "# __module__ attribute to be wrapped here. For example, functools.partial. In this case,", "# we should avoid wrapping the attributes from the wrapped function to the wrapper", ...
45.357143
24.928571
def lock(): ''' Attempts an exclusive lock on the candidate configuration. This is a non-blocking call. .. note:: When locking, it is important to remember to call :py:func:`junos.unlock <salt.modules.junos.unlock>` once finished. If locking during orchestration, remember to include a step in the orchestration job to unlock. CLI Example: .. code-block:: bash salt 'device_name' junos.lock ''' conn = __proxy__['junos.conn']() ret = {} ret['out'] = True try: conn.cu.lock() ret['message'] = "Successfully locked the configuration." except jnpr.junos.exception.LockError as exception: ret['message'] = 'Could not gain lock due to : "{0}"'.format(exception) ret['out'] = False return ret
[ "def", "lock", "(", ")", ":", "conn", "=", "__proxy__", "[", "'junos.conn'", "]", "(", ")", "ret", "=", "{", "}", "ret", "[", "'out'", "]", "=", "True", "try", ":", "conn", ".", "cu", ".", "lock", "(", ")", "ret", "[", "'message'", "]", "=", ...
28.142857
25.5
def get_probability_masks(self, non_valid_value=0): """ Get probability maps of areas for each available date. The pixels without valid data are assigned non_valid_value. :param non_valid_value: Value to be assigned to non valid data pixels :type non_valid_value: float :return: Probability map of shape `(times, height, width)` and `dtype=numpy.float64` :rtype: numpy.ndarray """ if self.probability_masks is None: self.get_data() self.probability_masks = self.cloud_detector.get_cloud_probability_maps(self.bands) self.probability_masks[~self.valid_data] = non_valid_value return self.probability_masks
[ "def", "get_probability_masks", "(", "self", ",", "non_valid_value", "=", "0", ")", ":", "if", "self", ".", "probability_masks", "is", "None", ":", "self", ".", "get_data", "(", ")", "self", ".", "probability_masks", "=", "self", ".", "cloud_detector", ".", ...
43.9375
23.4375
def from_list(lst): """Parses list :param lst: list of elements :return: LinkedList: Nodes from list """ if not lst: return None head = Node(lst[0], None) if len(lst) == 1: return head head.next_node = LinkedList.from_list(lst[1:]) return head
[ "def", "from_list", "(", "lst", ")", ":", "if", "not", "lst", ":", "return", "None", "head", "=", "Node", "(", "lst", "[", "0", "]", ",", "None", ")", "if", "len", "(", "lst", ")", "==", "1", ":", "return", "head", "head", ".", "next_node", "="...
20.5
18.8125
def removeComponent(self, component): """ Remove ``component`` from the glyph. >>> glyph.removeComponent(component) ``component`` may be a :ref:`BaseComponent` or an :ref:`type-int` representing a component index. """ if isinstance(component, int): index = component else: index = self._getComponentIndex(component) index = normalizers.normalizeIndex(index) if index >= self._len__components(): raise ValueError("No component located at index %d." % index) self._removeComponent(index)
[ "def", "removeComponent", "(", "self", ",", "component", ")", ":", "if", "isinstance", "(", "component", ",", "int", ")", ":", "index", "=", "component", "else", ":", "index", "=", "self", ".", "_getComponentIndex", "(", "component", ")", "index", "=", "...
35.235294
13.588235
def add_for_targets(self, targets, products): """Updates the products for the given targets, adding to existing entries. :API: public """ # TODO: This is a temporary helper for use until the classpath has been split. for target in targets: self.add_for_target(target, products)
[ "def", "add_for_targets", "(", "self", ",", "targets", ",", "products", ")", ":", "# TODO: This is a temporary helper for use until the classpath has been split.", "for", "target", "in", "targets", ":", "self", ".", "add_for_target", "(", "target", ",", "products", ")" ...
37.125
16
def export(self, output, tight=False, concat=True, close_pdf=None, use_time=False, **kwargs): """Exports the figures of the project to one or more image files Parameters ---------- output: str, iterable or matplotlib.backends.backend_pdf.PdfPages if string or list of strings, those define the names of the output files. Otherwise you may provide an instance of :class:`matplotlib.backends.backend_pdf.PdfPages` to save the figures in it. If string (or iterable of strings), attribute names in the xarray.DataArray.attrs attribute as well as index dimensions are replaced by the respective value (see examples below). Furthermore a single format string without key (e.g. %i, %s, %d, etc.) is replaced by a counter. tight: bool If True, it is tried to figure out the tight bbox of the figure (same as bbox_inches='tight') concat: bool if True and the output format is `pdf`, all figures are concatenated into one single pdf close_pdf: bool or None If True and the figures are concatenated into one single pdf, the resulting pdf instance is closed. If False it remains open. If None and `output` is a string, it is the same as ``close_pdf=True``, if None and `output` is neither a string nor an iterable, it is the same as ``close_pdf=False`` use_time: bool If True, formatting strings for the :meth:`datetime.datetime.strftime` are expected to be found in `output` (e.g. ``'%m'``, ``'%Y'``, etc.). If so, other formatting strings must be escaped by double ``'%'`` (e.g. ``'%%i'`` instead of (``'%i'``)) ``**kwargs`` Any valid keyword for the :func:`matplotlib.pyplot.savefig` function Returns ------- matplotlib.backends.backend_pdf.PdfPages or None a PdfPages instance if output is a string and close_pdf is False, otherwise None Examples -------- Simply save all figures into one single pdf:: >>> p = psy.gcp() >>> p.export('my_plots.pdf') Save all figures into separate pngs with increasing numbers (e.g. ``'my_plots_1.png'``):: >>> p.export('my_plots_%i.png') Save all figures into separate pngs with the name of the variables shown in each figure (e.g. ``'my_plots_t2m.png'``):: >>> p.export('my_plots_%(name)s.png') Save all figures into separate pngs with the name of the variables shown in each figure and with increasing numbers (e.g. ``'my_plots_1_t2m.png'``):: >>> p.export('my_plots_%i_%(name)s.png') Specify the names for each figure directly via a list:: >>> p.export(['my_plots1.pdf', 'my_plots2.pdf']) """ from matplotlib.backends.backend_pdf import PdfPages if tight: kwargs['bbox_inches'] = 'tight' if use_time: def insert_time(s, attrs): time = attrs[tname] try: # assume a valid datetime.datetime instance s = pd.to_datetime(time).strftime(s) except ValueError: pass return s tnames = self._get_tnames() tname = next(iter(tnames)) if len(tnames) == 1 else None else: def insert_time(s, attrs): return s tname = None if isinstance(output, six.string_types): # a single string out_fmt = kwargs.pop('format', os.path.splitext(output))[1][1:] if out_fmt.lower() == 'pdf' and concat: attrs = self.joined_attrs('-') if tname is not None and tname in attrs: output = insert_time(output, attrs) pdf = PdfPages(safe_modulo(output, attrs)) def save(fig): pdf.savefig(fig, **kwargs) def close(): if close_pdf is None or close_pdf: pdf.close() return return pdf else: def save(fig): attrs = self.figs[fig].joined_attrs('-') out = output if tname is not None and tname in attrs: out = insert_time(out, attrs) try: out = safe_modulo(out, i, print_warning=False) except TypeError: pass fig.savefig(safe_modulo(out, attrs), **kwargs) def close(): pass elif isinstance(output, Iterable): # a list of strings output = cycle(output) def save(fig): attrs = self.figs[fig].joined_attrs('-') out = next(output) if tname is not None and tname in attrs: out = insert_time(out, attrs) try: out = safe_modulo(next(output), i, print_warning=False) except TypeError: pass fig.savefig(safe_modulo(out, attrs), **kwargs) def close(): pass else: # an instances of matplotlib.backends.backend_pdf.PdfPages def save(fig): output.savefig(fig, **kwargs) def close(): if close_pdf: output.close() for i, fig in enumerate(self.figs, 1): save(fig) return close()
[ "def", "export", "(", "self", ",", "output", ",", "tight", "=", "False", ",", "concat", "=", "True", ",", "close_pdf", "=", "None", ",", "use_time", "=", "False", ",", "*", "*", "kwargs", ")", ":", "from", "matplotlib", ".", "backends", ".", "backend...
38.331081
21.054054
def _remove_api_url_from_link(link): '''Remove the API URL from the link if it is there''' if link.startswith(_api_url()): link = link[len(_api_url()):] if link.startswith(_api_url(mirror=True)): link = link[len(_api_url(mirror=True)):] return link
[ "def", "_remove_api_url_from_link", "(", "link", ")", ":", "if", "link", ".", "startswith", "(", "_api_url", "(", ")", ")", ":", "link", "=", "link", "[", "len", "(", "_api_url", "(", ")", ")", ":", "]", "if", "link", ".", "startswith", "(", "_api_ur...
39.142857
9.714286
def iter_variants_by_names(self, names): """Iterates over the genotypes for variants using a list of names. Args: names (list): The list of names for variant extraction. """ for name in names: for result in self.get_variant_by_name(name): yield result
[ "def", "iter_variants_by_names", "(", "self", ",", "names", ")", ":", "for", "name", "in", "names", ":", "for", "result", "in", "self", ".", "get_variant_by_name", "(", "name", ")", ":", "yield", "result" ]
31.6
17.7
def encode_json_body(data): """ Return prettified JSON `data`, set ``response.content_type`` to ``application/json; charset=utf-8``. Args: data (any): Any basic python data structure. Returns: str: Data converted to prettified JSON. """ # support for StringIO / file - like objects if hasattr(data, "read"): return data response.content_type = "application/json; charset=utf-8" return json.dumps( data, indent=4, separators=(',', ': ') )
[ "def", "encode_json_body", "(", "data", ")", ":", "# support for StringIO / file - like objects", "if", "hasattr", "(", "data", ",", "\"read\"", ")", ":", "return", "data", "response", ".", "content_type", "=", "\"application/json; charset=utf-8\"", "return", "json", ...
23.272727
20.545455
def get_channels(self, condensed=False): '''Grabs all channels in the slack team Args: condensed (bool): if true triggers list condensing functionality Returns: dic: Dict of channels in Slack team. See also: https://api.slack.com/methods/channels.list ''' channel_list = self.slack_client.api_call('channels.list') if not channel_list.get('ok'): return None if condensed: channels = [{'id': item.get('id'), 'name': item.get('name')} for item in channel_list.get('channels')] return channels else: return channel_list
[ "def", "get_channels", "(", "self", ",", "condensed", "=", "False", ")", ":", "channel_list", "=", "self", ".", "slack_client", ".", "api_call", "(", "'channels.list'", ")", "if", "not", "channel_list", ".", "get", "(", "'ok'", ")", ":", "return", "None", ...
32.190476
23.333333
def to_str(self, s): ''' In py2 converts a unicode to str (bytes) using utf-8. -- in py3 raises an error if it's not str already. ''' if s.__class__ != str: if not IS_PY3K: s = s.encode('utf-8') else: raise AssertionError('Expected to have str on Python 3. Found: %s (%s)' % (s, s.__class__)) return s
[ "def", "to_str", "(", "self", ",", "s", ")", ":", "if", "s", ".", "__class__", "!=", "str", ":", "if", "not", "IS_PY3K", ":", "s", "=", "s", ".", "encode", "(", "'utf-8'", ")", "else", ":", "raise", "AssertionError", "(", "'Expected to have str on Pyth...
35.909091
23.363636
def get_referenced_object(referring_object, fieldname): """ Get an object referred to by a field in another object. For example an object of type Construction has fields for each layer, each of which refers to a Material. This functions allows the object representing a Material to be fetched using the name of the layer. Returns the first item found since if there is more than one matching item, it is a malformed IDF. Parameters ---------- referring_object : EpBunch The object which contains a reference to another object, fieldname : str The name of the field in the referring object which contains the reference to another object. Returns ------- EpBunch """ idf = referring_object.theidf object_list = referring_object.getfieldidd_item(fieldname, u'object-list') for obj_type in idf.idfobjects: for obj in idf.idfobjects[obj_type]: valid_object_lists = obj.getfieldidd_item("Name", u'reference') if set(object_list).intersection(set(valid_object_lists)): referenced_obj_name = referring_object[fieldname] if obj.Name == referenced_obj_name: return obj
[ "def", "get_referenced_object", "(", "referring_object", ",", "fieldname", ")", ":", "idf", "=", "referring_object", ".", "theidf", "object_list", "=", "referring_object", ".", "getfieldidd_item", "(", "fieldname", ",", "u'object-list'", ")", "for", "obj_type", "in"...
36.69697
23.545455
def draw(self, img, pixmapper, bounds): '''draw a polygon on the image''' if self.hidden: return (x,y,w,h) = bounds spacing = 1000 while True: start = mp_util.latlon_round((x,y), spacing) dist = mp_util.gps_distance(x,y,x+w,y+h) count = int(dist / spacing) if count < 2: spacing /= 10 elif count > 50: spacing *= 10 else: break for i in range(count*2+2): pos1 = mp_util.gps_newpos(start[0], start[1], 90, i*spacing) pos3 = mp_util.gps_newpos(pos1[0], pos1[1], 0, 3*count*spacing) self.draw_line(img, pixmapper, pos1, pos3, self.colour, self.linewidth) pos1 = mp_util.gps_newpos(start[0], start[1], 0, i*spacing) pos3 = mp_util.gps_newpos(pos1[0], pos1[1], 90, 3*count*spacing) self.draw_line(img, pixmapper, pos1, pos3, self.colour, self.linewidth)
[ "def", "draw", "(", "self", ",", "img", ",", "pixmapper", ",", "bounds", ")", ":", "if", "self", ".", "hidden", ":", "return", "(", "x", ",", "y", ",", "w", ",", "h", ")", "=", "bounds", "spacing", "=", "1000", "while", "True", ":", "start", "=...
39.12
20.8
def complete(self): """ Returns whether or not this manager has reached a "completed" state. """ if not self._techniques: return False if not any(tech._is_overriden('complete') for tech in self._techniques): return False return self.completion_mode(tech.complete(self) for tech in self._techniques if tech._is_overriden('complete'))
[ "def", "complete", "(", "self", ")", ":", "if", "not", "self", ".", "_techniques", ":", "return", "False", "if", "not", "any", "(", "tech", ".", "_is_overriden", "(", "'complete'", ")", "for", "tech", "in", "self", ".", "_techniques", ")", ":", "return...
44
24
def _ScopesFromMetadataServer(self, scopes): """Returns instance scopes based on GCE metadata server.""" if not util.DetectGce(): raise exceptions.ResourceUnavailableError( 'GCE credentials requested outside a GCE instance') if not self.GetServiceAccount(self.__service_account_name): raise exceptions.ResourceUnavailableError( 'GCE credentials requested but service account ' '%s does not exist.' % self.__service_account_name) if scopes: scope_ls = util.NormalizeScopes(scopes) instance_scopes = self.GetInstanceScopes() if scope_ls > instance_scopes: raise exceptions.CredentialsError( 'Instance did not have access to scopes %s' % ( sorted(list(scope_ls - instance_scopes)),)) else: scopes = self.GetInstanceScopes() return scopes
[ "def", "_ScopesFromMetadataServer", "(", "self", ",", "scopes", ")", ":", "if", "not", "util", ".", "DetectGce", "(", ")", ":", "raise", "exceptions", ".", "ResourceUnavailableError", "(", "'GCE credentials requested outside a GCE instance'", ")", "if", "not", "self...
49.684211
16.263158
def find_phase(self, obj): r""" Find the Phase associated with a given object. Parameters ---------- obj : OpenPNM Object Can either be a Physics or Algorithm object Returns ------- An OpenPNM Phase object. Raises ------ If no Phase object can be found, then an Exception is raised. """ # If received phase, just return self if obj._isa('phase'): return obj # If phase happens to be in settings (i.e. algorithm), look it up if 'phase' in obj.settings.keys(): phase = self.phases()[obj.settings['phase']] return phase # Otherwise find it using bottom-up approach (i.e. look in phase keys) for phase in self.phases().values(): if ('pore.'+obj.name in phase) or ('throat.'+obj.name in phase): return phase # If all else fails, throw an exception raise Exception('Cannot find a phase associated with '+obj.name)
[ "def", "find_phase", "(", "self", ",", "obj", ")", ":", "# If received phase, just return self", "if", "obj", ".", "_isa", "(", "'phase'", ")", ":", "return", "obj", "# If phase happens to be in settings (i.e. algorithm), look it up", "if", "'phase'", "in", "obj", "."...
33.9
19.6
def write_sub_file(self): """ Write a submit file for this Condor job. """ if not self.__log_file: raise CondorSubmitError, "Log file not specified." if not self.__err_file: raise CondorSubmitError, "Error file not specified." if not self.__out_file: raise CondorSubmitError, "Output file not specified." if not self.__sub_file_path: raise CondorSubmitError, 'No path for submit file.' try: subfile = open(self.__sub_file_path, 'w') except: raise CondorSubmitError, "Cannot open file " + self.__sub_file_path if self.__universe == 'grid': if self.__grid_type == None: raise CondorSubmitError, 'No grid type specified.' elif self.__grid_type == 'gt2': if self.__grid_server == None: raise CondorSubmitError, 'No server specified for grid resource.' elif self.__grid_type == 'gt4': if self.__grid_server == None: raise CondorSubmitError, 'No server specified for grid resource.' if self.__grid_scheduler == None: raise CondorSubmitError, 'No scheduler specified for grid resource.' else: raise CondorSubmitError, 'Unsupported grid resource.' subfile.write( 'universe = ' + self.__universe + '\n' ) subfile.write( 'executable = ' + self.__executable + '\n' ) if self.__universe == 'grid': if self.__grid_type == 'gt2': subfile.write('grid_resource = %s %s\n' % (self.__grid_type, self.__grid_server)) if self.__grid_type == 'gt4': subfile.write('grid_resource = %s %s %s\n' % (self.__grid_type, self.__grid_server, self.__grid_scheduler)) if self.__universe == 'grid': subfile.write('when_to_transfer_output = ON_EXIT\n') subfile.write('transfer_output_files = $(macrooutput)\n') subfile.write('transfer_input_files = $(macroinput)\n') if self.__options.keys() or self.__short_options.keys() or self.__arguments: subfile.write( 'arguments = "' ) for c in self.__options.keys(): if self.__options[c]: subfile.write( ' --' + c + ' ' + self.__options[c] ) else: subfile.write( ' --' + c ) for c in self.__short_options.keys(): if self.__short_options[c]: subfile.write( ' -' + c + ' ' + self.__short_options[c] ) else: subfile.write( ' -' + c ) for c in self.__arguments: subfile.write( ' ' + c ) subfile.write( ' "\n' ) for cmd in self.__condor_cmds.keys(): subfile.write( str(cmd) + " = " + str(self.__condor_cmds[cmd]) + '\n' ) subfile.write( 'log = ' + self.__log_file + '\n' ) if self.__in_file is not None: subfile.write( 'input = ' + self.__in_file + '\n' ) subfile.write( 'error = ' + self.__err_file + '\n' ) subfile.write( 'output = ' + self.__out_file + '\n' ) if self.__notification: subfile.write( 'notification = ' + self.__notification + '\n' ) subfile.write( 'queue ' + str(self.__queue) + '\n' ) subfile.close()
[ "def", "write_sub_file", "(", "self", ")", ":", "if", "not", "self", ".", "__log_file", ":", "raise", "CondorSubmitError", ",", "\"Log file not specified.\"", "if", "not", "self", ".", "__err_file", ":", "raise", "CondorSubmitError", ",", "\"Error file not specified...
38.584416
18.142857
def list_of_matching(self, tup_tree, matched): """ Parse only the children of particular types defined in the list/tuple matched under tup_tree. Other children are ignored rather than giving an error. """ result = [] for child in kids(tup_tree): if name(child) not in matched: continue result.append(self.parse_any(child)) return result
[ "def", "list_of_matching", "(", "self", ",", "tup_tree", ",", "matched", ")", ":", "result", "=", "[", "]", "for", "child", "in", "kids", "(", "tup_tree", ")", ":", "if", "name", "(", "child", ")", "not", "in", "matched", ":", "continue", "result", "...
26.8125
19.0625
def write(cls, filename, samples, write_params=None, static_args=None, **metadata): """Writes the injection samples to the given hdf file. Parameters ---------- filename : str The name of the file to write to. samples : io.FieldArray FieldArray of parameters. write_params : list, optional Only write the given parameter names. All given names must be keys in ``samples``. Default is to write all parameters in ``samples``. static_args : dict, optional Dictionary mapping static parameter names to values. These are written to the ``attrs``. \**metadata : All other keyword arguments will be written to the file's attrs. """ with h5py.File(filename, 'w') as fp: # write metadata if static_args is None: static_args = {} fp.attrs["static_args"] = static_args.keys() fp.attrs['injtype'] = cls.injtype for key, val in metadata.items(): fp.attrs[key] = val if write_params is None: write_params = samples.fieldnames for arg, val in static_args.items(): fp.attrs[arg] = val for field in write_params: fp[field] = samples[field]
[ "def", "write", "(", "cls", ",", "filename", ",", "samples", ",", "write_params", "=", "None", ",", "static_args", "=", "None", ",", "*", "*", "metadata", ")", ":", "with", "h5py", ".", "File", "(", "filename", ",", "'w'", ")", "as", "fp", ":", "# ...
40.757576
12.969697
def load_xml_attrs(self): """ Load XML attributes as object attributes. :returns: List of parsed attributes. :rtype: list """ attrs_list = list() if hasattr(self, 'xml_element'): xml_attrs = self.xml_element.attrib for variable, value in iter(xml_attrs.items()): uri, tag = Element.get_namespace_and_tag(variable) tag = tag.replace('-', '_') attrs_list.append(tag) setattr(self, tag, value) self.attrs = attrs_list return self.attrs
[ "def", "load_xml_attrs", "(", "self", ")", ":", "attrs_list", "=", "list", "(", ")", "if", "hasattr", "(", "self", ",", "'xml_element'", ")", ":", "xml_attrs", "=", "self", ".", "xml_element", ".", "attrib", "for", "variable", ",", "value", "in", "iter",...
26.409091
17.227273
def init_app(self, app, router=None, realm=None, in_twisted=None): """Configure and call the :meth:`AutobahnSync.start` method :param app: Flask app to configure :param router: WAMP router to connect to :param realm: WAMP realm to connect to :param in_twisted: Is the code is going to run inside a Twisted application .. Note:: The config provided as argument will overwrite the one privided by ``app.config`` """ router = router or app.config.get('AUTHOBAHN_ROUTER') realm = realm or app.config.get('AUTHOBAHN_REALM') in_twisted = in_twisted or app.config.get('AUTHOBAHN_IN_TWISTED') if router: self.config['router'] = router if realm: self.config['realm'] = realm if in_twisted: self.run_in_twisted(url=self.config['router'], realm=self.config['realm']) else: self.run(url=self.config['router'], realm=self.config['realm'])
[ "def", "init_app", "(", "self", ",", "app", ",", "router", "=", "None", ",", "realm", "=", "None", ",", "in_twisted", "=", "None", ")", ":", "router", "=", "router", "or", "app", ".", "config", ".", "get", "(", "'AUTHOBAHN_ROUTER'", ")", "realm", "="...
46.047619
22.333333
def _set_child_joined_alias_using_join_map(child, join_map, alias_map): """ Set the joined alias on the child, for Django <= 1.7.x. :param child: :param join_map: :param alias_map: """ for lhs, table, join_cols in join_map: if lhs is None: continue if lhs == child.alias: relevant_alias = child.related_alias elif lhs == child.related_alias: relevant_alias = child.alias else: continue join_info = alias_map[relevant_alias] if join_info.join_type is None: continue if join_info.lhs_alias in [child.alias, child.related_alias]: child.set_joined_alias(relevant_alias) break
[ "def", "_set_child_joined_alias_using_join_map", "(", "child", ",", "join_map", ",", "alias_map", ")", ":", "for", "lhs", ",", "table", ",", "join_cols", "in", "join_map", ":", "if", "lhs", "is", "None", ":", "continue", "if", "lhs", "==", "child", ".", "a...
33.416667
15.75
def _FormatSocketUnixToken(self, token_data): """Formats an Unix socket token as a dictionary of values. Args: token_data (bsm_token_data_sockunix): AUT_SOCKUNIX token data. Returns: dict[str, str]: token values. """ protocol = bsmtoken.BSM_PROTOCOLS.get(token_data.socket_family, 'UNKNOWN') return { 'protocols': protocol, 'family': token_data.socket_family, 'path': token_data.socket_path}
[ "def", "_FormatSocketUnixToken", "(", "self", ",", "token_data", ")", ":", "protocol", "=", "bsmtoken", ".", "BSM_PROTOCOLS", ".", "get", "(", "token_data", ".", "socket_family", ",", "'UNKNOWN'", ")", "return", "{", "'protocols'", ":", "protocol", ",", "'fami...
31.428571
18.357143
def get_conversations(self): """ Returns list of Conversation objects """ cs = self.data["data"] res = [] for c in cs: res.append(Conversation(c)) return res
[ "def", "get_conversations", "(", "self", ")", ":", "cs", "=", "self", ".", "data", "[", "\"data\"", "]", "res", "=", "[", "]", "for", "c", "in", "cs", ":", "res", ".", "append", "(", "Conversation", "(", "c", ")", ")", "return", "res" ]
24.222222
10.444444
def is_extension_supported(request, extension_alias): """Check if a specified extension is supported. :param request: django request object :param extension_alias: neutron extension alias """ extensions = list_extensions(request) for extension in extensions: if extension['alias'] == extension_alias: return True else: return False
[ "def", "is_extension_supported", "(", "request", ",", "extension_alias", ")", ":", "extensions", "=", "list_extensions", "(", "request", ")", "for", "extension", "in", "extensions", ":", "if", "extension", "[", "'alias'", "]", "==", "extension_alias", ":", "retu...
31.416667
12.583333
def generate(ast_tree: ast.Tree, model_name: str): """ :param ast_tree: AST to generate from :param model_name: class to generate :return: sympy source code for model """ component_ref = ast.ComponentRef.from_string(model_name) ast_tree_new = copy.deepcopy(ast_tree) ast_walker = TreeWalker() flat_tree = flatten(ast_tree_new, component_ref) gen = XmlGenerator() ast_walker.walk(gen, flat_tree) return etree.tostring(gen.xml[flat_tree], pretty_print=True).decode('utf-8')
[ "def", "generate", "(", "ast_tree", ":", "ast", ".", "Tree", ",", "model_name", ":", "str", ")", ":", "component_ref", "=", "ast", ".", "ComponentRef", ".", "from_string", "(", "model_name", ")", "ast_tree_new", "=", "copy", ".", "deepcopy", "(", "ast_tree...
39
9
def set_dataset_date(self, dataset_date, dataset_end_date=None, date_format=None): # type: (str, Optional[str], Optional[str]) -> None """Set dataset date from string using specified format. If no format is supplied, the function will guess. For unambiguous formats, this should be fine. Args: dataset_date (str): Dataset date string dataset_end_date (Optional[str]): Dataset end date string date_format (Optional[str]): Date format. If None is given, will attempt to guess. Defaults to None. Returns: None """ parsed_date = self._parse_date(dataset_date, date_format) if dataset_end_date is None: self.set_dataset_date_from_datetime(parsed_date) else: parsed_end_date = self._parse_date(dataset_end_date, date_format) self.set_dataset_date_from_datetime(parsed_date, parsed_end_date)
[ "def", "set_dataset_date", "(", "self", ",", "dataset_date", ",", "dataset_end_date", "=", "None", ",", "date_format", "=", "None", ")", ":", "# type: (str, Optional[str], Optional[str]) -> None", "parsed_date", "=", "self", ".", "_parse_date", "(", "dataset_date", ",...
48.631579
25.842105
def cmdline_params(self, distance_matrix_file_name='distance.matrix', remote_folder_path=None): """Synthesize command line parameters e.g. [ ['--output-file', 'out.barcode'], ['distance_matrix.file']] :param distance_matrix_file_name: Name of distance matrix file :param remote_folder_path: Path to remote folder containing distance matrix file """ parameters = [] pm_dict = self.get_dict() for k, v in pm_dict.iteritems(): parameters += ['--' + k, v] # distance matrix can be provided via remote folder if remote_folder_path is None: parameters += [distance_matrix_file_name] else: parameters += [remote_folder_path + distance_matrix_file_name] return map(str, parameters)
[ "def", "cmdline_params", "(", "self", ",", "distance_matrix_file_name", "=", "'distance.matrix'", ",", "remote_folder_path", "=", "None", ")", ":", "parameters", "=", "[", "]", "pm_dict", "=", "self", ".", "get_dict", "(", ")", "for", "k", ",", "v", "in", ...
34.75
22
def is_editable(self, request): """ Restrict in-line editing to the objects's owner and superusers. """ return request.user.is_superuser or request.user.id == self.user_id
[ "def", "is_editable", "(", "self", ",", "request", ")", ":", "return", "request", ".", "user", ".", "is_superuser", "or", "request", ".", "user", ".", "id", "==", "self", ".", "user_id" ]
39.8
15
def setlist(self, key, new_list): # type: (Hashable, List[Any]) -> None """ Remove the old values for a key and add new ones. Note that the list you pass the values in will be shallow-copied before it is inserted in the dictionary. >>> d = MultiValueDict() >>> d.setlist('foo', ['1', '2']) >>> d['foo'] '1' >>> d.getlist('foo') ['1', '2'] :param key: The key for which the values are set. :param new_list: An iterable with the new values for the key. Old values are removed first. """ dict.__setitem__(self, key, list(new_list))
[ "def", "setlist", "(", "self", ",", "key", ",", "new_list", ")", ":", "# type: (Hashable, List[Any]) -> None", "dict", ".", "__setitem__", "(", "self", ",", "key", ",", "list", "(", "new_list", ")", ")" ]
38.764706
15.705882
def list(self, **kwargs): """Retrieve a list of objects. Args: all (bool): If True, return all the items, without pagination per_page (int): Number of items to retrieve per request page (int): ID of the page to return (starts with page 1) as_list (bool): If set to False and no pagination option is defined, return a generator instead of a list **kwargs: Extra options to send to the server (e.g. sudo) Returns: list: The list of objects, or a generator if `as_list` is False Raises: GitlabAuthenticationError: If authentication is not correct GitlabListError: If the server cannot perform the request """ path = self._compute_path('/projects/%(project_id)s/forks') return ListMixin.list(self, path=path, **kwargs)
[ "def", "list", "(", "self", ",", "*", "*", "kwargs", ")", ":", "path", "=", "self", ".", "_compute_path", "(", "'/projects/%(project_id)s/forks'", ")", "return", "ListMixin", ".", "list", "(", "self", ",", "path", "=", "path", ",", "*", "*", "kwargs", ...
41.238095
26.619048
def _apply_policy_config(policy_spec, policy_dict): '''Applies a policy dictionary to a policy spec''' log.trace('policy_dict = %s', policy_dict) if policy_dict.get('name'): policy_spec.name = policy_dict['name'] if policy_dict.get('description'): policy_spec.description = policy_dict['description'] if policy_dict.get('subprofiles'): # Incremental changes to subprofiles and capabilities are not # supported because they would complicate updates too much # The whole configuration of all sub-profiles is expected and applied policy_spec.constraints = pbm.profile.SubProfileCapabilityConstraints() subprofiles = [] for subprofile_dict in policy_dict['subprofiles']: subprofile_spec = \ pbm.profile.SubProfileCapabilityConstraints.SubProfile( name=subprofile_dict['name']) cap_specs = [] if subprofile_dict.get('force_provision'): subprofile_spec.forceProvision = \ subprofile_dict['force_provision'] for cap_dict in subprofile_dict['capabilities']: prop_inst_spec = pbm.capability.PropertyInstance( id=cap_dict['id'] ) setting_type = cap_dict['setting']['type'] if setting_type == 'set': prop_inst_spec.value = pbm.capability.types.DiscreteSet() prop_inst_spec.value.values = cap_dict['setting']['values'] elif setting_type == 'range': prop_inst_spec.value = pbm.capability.types.Range() prop_inst_spec.value.max = cap_dict['setting']['max'] prop_inst_spec.value.min = cap_dict['setting']['min'] elif setting_type == 'scalar': prop_inst_spec.value = cap_dict['setting']['value'] cap_spec = pbm.capability.CapabilityInstance( id=pbm.capability.CapabilityMetadata.UniqueId( id=cap_dict['id'], namespace=cap_dict['namespace']), constraint=[pbm.capability.ConstraintInstance( propertyInstance=[prop_inst_spec])]) cap_specs.append(cap_spec) subprofile_spec.capability = cap_specs subprofiles.append(subprofile_spec) policy_spec.constraints.subProfiles = subprofiles log.trace('updated policy_spec = %s', policy_spec) return policy_spec
[ "def", "_apply_policy_config", "(", "policy_spec", ",", "policy_dict", ")", ":", "log", ".", "trace", "(", "'policy_dict = %s'", ",", "policy_dict", ")", "if", "policy_dict", ".", "get", "(", "'name'", ")", ":", "policy_spec", ".", "name", "=", "policy_dict", ...
53.638298
17.723404
def query_topology_db(self, dict_convert=False, **req): """Query an entry to the topology DB. """ session = db.get_session() with session.begin(subtransactions=True): try: # Check if entry exists. topo_disc = session.query(DfaTopologyDb).filter_by(**req).all() except orm_exc.NoResultFound: LOG.info("No Topology results found for %s", req) return None if dict_convert: return self._convert_topo_obj_dict(topo_disc) return topo_disc
[ "def", "query_topology_db", "(", "self", ",", "dict_convert", "=", "False", ",", "*", "*", "req", ")", ":", "session", "=", "db", ".", "get_session", "(", ")", "with", "session", ".", "begin", "(", "subtransactions", "=", "True", ")", ":", "try", ":", ...
43.076923
13.923077
def _submitQuery(self, gitquery, gitvars={}, verbose=False, rest=False): """Send a curl request to GitHub. Args: gitquery (str): The query or endpoint itself. Examples: query: 'query { viewer { login } }' endpoint: '/user' gitvars (Optional[Dict]): All query variables. Defaults to empty. verbose (Optional[bool]): If False, stderr prints will be suppressed. Defaults to False. rest (Optional[bool]): If True, uses the REST API instead of GraphQL. Defaults to False. Returns: { 'statusNum' (int): The HTTP status code. 'headDict' (Dict[str]): The response headers. 'linkDict' (Dict[int]): Link based pagination data. 'result' (str): The body of the response. } """ errOut = DEVNULL if not verbose else None authhead = 'Authorization: bearer ' + self.__githubApiToken bashcurl = 'curl -iH TMPauthhead -X POST -d TMPgitquery https://api.github.com/graphql' if not rest \ else 'curl -iH TMPauthhead https://api.github.com' + gitquery bashcurl_list = bashcurl.split() bashcurl_list[2] = authhead if not rest: gitqueryJSON = json.dumps({'query': gitquery, 'variables': json.dumps(gitvars)}) bashcurl_list[6] = gitqueryJSON fullResponse = check_output(bashcurl_list, stderr=errOut).decode() _vPrint(verbose, "\n" + fullResponse) fullResponse = fullResponse.split('\r\n\r\n') heads = fullResponse[0].split('\r\n') if len(fullResponse) > 1: result = fullResponse[1] else: result = "" http = heads[0].split() statusNum = int(http[1]) # Parse headers into a useful dictionary headDict = {} headDict["http"] = heads[0] for header in heads[1:]: h = header.split(': ') headDict[h[0]] = h[1] # Parse any Link headers even further linkDict = None if "Link" in headDict: linkProperties = headDict["Link"].split(', ') propDict = {} for item in linkProperties: divided = re.split(r'<https://api.github.com|>; rel="|"', item) propDict[divided[2]] = divided[1] linkDict = propDict return {'statusNum': statusNum, 'headDict': headDict, 'linkDict': linkDict, 'result': result}
[ "def", "_submitQuery", "(", "self", ",", "gitquery", ",", "gitvars", "=", "{", "}", ",", "verbose", "=", "False", ",", "rest", "=", "False", ")", ":", "errOut", "=", "DEVNULL", "if", "not", "verbose", "else", "None", "authhead", "=", "'Authorization: bea...
39.28125
19.8125
def parse_raw_token(self, raw_token): "Parse token and secret from raw token response." if raw_token is None: return (None, None) # Load as json first then parse as query string try: token_data = json.loads(raw_token) except ValueError: qs = parse_qs(raw_token) token = qs.get('access_token', [None])[0] else: token = token_data.get('access_token', None) return (token, None)
[ "def", "parse_raw_token", "(", "self", ",", "raw_token", ")", ":", "if", "raw_token", "is", "None", ":", "return", "(", "None", ",", "None", ")", "# Load as json first then parse as query string", "try", ":", "token_data", "=", "json", ".", "loads", "(", "raw_...
36.846154
13.461538
def simplified_edges(self): """ A generator for getting all of the edges without consuming extra memory. """ for group, edgelist in self.edges.items(): for u, v, d in edgelist: yield (u, v)
[ "def", "simplified_edges", "(", "self", ")", ":", "for", "group", ",", "edgelist", "in", "self", ".", "edges", ".", "items", "(", ")", ":", "for", "u", ",", "v", ",", "d", "in", "edgelist", ":", "yield", "(", "u", ",", "v", ")" ]
31.25
12
def fromapi(_class, apiresponse): """Create a bulletin object from an API response (dict), containing `sbj`, etc.""" for resp in apiresponse['sb']: # Extract details from dict _id = "n/a" or resp.get("nm") subject = resp.get("sbj") text = resp.get('dtl') + "\n" + resp.get('brf') priority = "n/a" or resp.get('prty') for_stops, for_routes = [], [] svc = resp.get('srvc') # Create list of affected routes/stops, if there are any if svc: has_stop = 'stpid' in svc or 'stpnm' in svc has_rt = 'rt' in svc or 'rtdir' in svc if has_stop: aff = _class.affected_service('stop', svc.get('stpid'), svc.get('stpnm')) for_stops.append(aff) if has_rt: aff = _class.affected_service('route', svc.get('rt'), svc.get('rtdir')) for_routes.append(aff) yield _class(_id, subject, text, priority, for_stops, for_routes)
[ "def", "fromapi", "(", "_class", ",", "apiresponse", ")", ":", "for", "resp", "in", "apiresponse", "[", "'sb'", "]", ":", "# Extract details from dict", "_id", "=", "\"n/a\"", "or", "resp", ".", "get", "(", "\"nm\"", ")", "subject", "=", "resp", ".", "ge...
45
16.666667
def _insert_layer_between(self, src, snk, new_layer, new_keras_layer): """ Insert the new_layer before layer, whose position is layer_idx. The new layer's parameter is stored in a Keras layer called new_keras_layer """ if snk is None: insert_pos = self.layer_list.index(src) + 1 else: insert_pos = self.layer_list.index(snk) # insert position self.layer_list.insert(insert_pos, new_layer) self.keras_layer_map[new_layer] = new_keras_layer if src is None: # snk is an input layer self._add_edge(new_layer, snk) elif snk is None: # src is an output layer self._add_edge(src, new_layer) else: self._add_edge(src, new_layer) self._add_edge(new_layer, snk) self._remove_edge(src, snk)
[ "def", "_insert_layer_between", "(", "self", ",", "src", ",", "snk", ",", "new_layer", ",", "new_keras_layer", ")", ":", "if", "snk", "is", "None", ":", "insert_pos", "=", "self", ".", "layer_list", ".", "index", "(", "src", ")", "+", "1", "else", ":",...
43.894737
14.526316
def verify(self, obj): """Verify that the object conforms to this verifier's schema Args: obj (object): A python object to verify Raises: ValidationError: If there is a problem verifying the dictionary, a ValidationError is thrown with at least the reason key set indicating the reason for the lack of validation. """ if obj != self._literal: raise ValidationError("Object is not equal to literal", reason='%s is not equal to %s' % (str(obj), str(self._literal)), object=obj) return obj
[ "def", "verify", "(", "self", ",", "obj", ")", ":", "if", "obj", "!=", "self", ".", "_literal", ":", "raise", "ValidationError", "(", "\"Object is not equal to literal\"", ",", "reason", "=", "'%s is not equal to %s'", "%", "(", "str", "(", "obj", ")", ",", ...
36.705882
27.352941
def init_registry(mongo, model_defs, clear_collection=False): """Initialize a model registry with a list of model definitions in Json format. Parameters ---------- mongo : scodata.MongoDBFactory Connector for MongoDB model_defs : list() List of model definitions in Json-like format clear_collection : boolean If true, collection will be dropped before models are created """ # Create model registry registry = DefaultModelRegistry(mongo) # Drop collection if clear flag is set to True if clear_collection: registry.clear() for i in range(len(model_defs)): model = registry.from_json(model_defs[i]) registry.register_model( model.identifier, model.properties, model.parameters, model.outputs )
[ "def", "init_registry", "(", "mongo", ",", "model_defs", ",", "clear_collection", "=", "False", ")", ":", "# Create model registry", "registry", "=", "DefaultModelRegistry", "(", "mongo", ")", "# Drop collection if clear flag is set to True", "if", "clear_collection", ":"...
31.730769
14.807692
def modify(self, **params): """https://developers.coinbase.com/api#modify-an-account""" data = self.api_client.update_account(self.id, **params) self.update(data) return data
[ "def", "modify", "(", "self", ",", "*", "*", "params", ")", ":", "data", "=", "self", ".", "api_client", ".", "update_account", "(", "self", ".", "id", ",", "*", "*", "params", ")", "self", ".", "update", "(", "data", ")", "return", "data" ]
40.4
14.6
def fishers_method(pvals): """Fisher's method for combining independent p-values.""" pvals = np.asarray(pvals) degrees_of_freedom = 2 * pvals.size chisq_stat = np.sum(-2*np.log(pvals)) fishers_pval = stats.chi2.sf(chisq_stat, degrees_of_freedom) return fishers_pval
[ "def", "fishers_method", "(", "pvals", ")", ":", "pvals", "=", "np", ".", "asarray", "(", "pvals", ")", "degrees_of_freedom", "=", "2", "*", "pvals", ".", "size", "chisq_stat", "=", "np", ".", "sum", "(", "-", "2", "*", "np", ".", "log", "(", "pval...
40.428571
9.714286
def splits(cls, fields, root=".data", train="train.txt", test="test.txt", validation_frac=0.1, **kwargs): """Downloads and loads the CoNLL 2000 Chunking dataset. NOTE: There is only a train and test dataset so we use 10% of the train set as validation """ train, test = super(CoNLL2000Chunking, cls).splits( fields=fields, root=root, train=train, test=test, separator=' ', **kwargs) # HACK: Saving the sort key function as the split() call removes it sort_key = train.sort_key # Now split the train set # Force a random seed to make the split deterministic random.seed(0) train, val = train.split(1 - validation_frac, random_state=random.getstate()) # Reset the seed random.seed() # HACK: Set the sort key train.sort_key = sort_key val.sort_key = sort_key return train, val, test
[ "def", "splits", "(", "cls", ",", "fields", ",", "root", "=", "\".data\"", ",", "train", "=", "\"train.txt\"", ",", "test", "=", "\"test.txt\"", ",", "validation_frac", "=", "0.1", ",", "*", "*", "kwargs", ")", ":", "train", ",", "test", "=", "super", ...
36.153846
19.461538
def gated_linear_unit_layer(x, name=None): """Gated linear unit layer. Paper: Language Modeling with Gated Convolutional Networks. Link: https://arxiv.org/abs/1612.08083 x = Wx * sigmoid(W'x). Args: x: A tensor name: A string Returns: A tensor of the same shape as x. """ with tf.variable_scope(name, default_name="glu_layer", values=[x]): depth = shape_list(x)[-1] x = layers().Dense(depth * 2, activation=None)(x) x, gating_x = tf.split(x, 2, axis=-1) return x * tf.nn.sigmoid(gating_x)
[ "def", "gated_linear_unit_layer", "(", "x", ",", "name", "=", "None", ")", ":", "with", "tf", ".", "variable_scope", "(", "name", ",", "default_name", "=", "\"glu_layer\"", ",", "values", "=", "[", "x", "]", ")", ":", "depth", "=", "shape_list", "(", "...
27.210526
17.315789
def find_course_and_crosslistings(self, partial): """Returns the given course and all other courses it is crosslisted with. """ course = self.find_course(partial) crosslisted = self.crosslisted_with(course.crn) return (course,) + tuple(map(self.find_course_by_crn, crosslisted))
[ "def", "find_course_and_crosslistings", "(", "self", ",", "partial", ")", ":", "course", "=", "self", ".", "find_course", "(", "partial", ")", "crosslisted", "=", "self", ".", "crosslisted_with", "(", "course", ".", "crn", ")", "return", "(", "course", ",", ...
45.714286
10.857143
def redirect_to_url(req, url, redirection_type=None, norobot=False): """ Redirect current page to url. @param req: request as received from apache @param url: url to redirect to @param redirection_type: what kind of redirection is required: e.g.: apache.HTTP_MULTIPLE_CHOICES = 300 apache.HTTP_MOVED_PERMANENTLY = 301 apache.HTTP_MOVED_TEMPORARILY = 302 apache.HTTP_SEE_OTHER = 303 apache.HTTP_NOT_MODIFIED = 304 apache.HTTP_USE_PROXY = 305 apache.HTTP_TEMPORARY_REDIRECT = 307 The default is apache.HTTP_MOVED_TEMPORARILY @param norobot: wether to instruct crawlers and robots such as GoogleBot not to index past this point. @see: http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3 """ url = url.strip() if redirection_type is None: redirection_type = apache.HTTP_MOVED_TEMPORARILY from flask import redirect r = redirect(url, code=redirection_type) raise apache.SERVER_RETURN(r) # FIXME enable code bellow del req.headers_out["Cache-Control"] req.headers_out["Cache-Control"] = "no-cache, private, no-store, " \ "must-revalidate, post-check=0, pre-check=0, max-age=0" req.headers_out["Pragma"] = "no-cache" if norobot: req.headers_out[ "X-Robots-Tag"] = "noarchive, nosnippet, noindex, nocache" user_agent = req.headers_in.get('User-Agent', '') if 'Microsoft Office Existence Discovery' in user_agent or 'ms-office' in user_agent: # HACK: this is to workaround Microsoft Office trying to be smart # when users click on URLs in Office documents that require # authentication. Office will check the validity of the URL # but will pass the browser the redirected URL rather than # the original one. This is incompatible with e.g. Shibboleth # based SSO since the referer would be lost. # See: http://support.microsoft.com/kb/899927 req.status = 200 req.content_type = 'text/html' if req.method != 'HEAD': req.write(""" <html> <head> <title>Intermediate page for URLs clicked on MS Office Documents</title> <meta http-equiv="REFRESH" content="5;url=%(url)s"></meta> </head> <body> <p>You are going to be redirected to the desired content within 5 seconds. If the redirection does not happen automatically please click on <a href="%(url)s">%(url_ok)s</a>.</p> </body> </html>""" % { 'url': escape(req.unparsed_uri, True), 'url_ok': escape(req.unparsed_uri) }) raise apache.SERVER_RETURN(apache.DONE) req.headers_out["Location"] = url if req.response_sent_p: raise IOError("Cannot redirect after headers have already been sent.") req.status = redirection_type req.write('<p>Please go to <a href="%s">here</a></p>\n' % url) raise apache.SERVER_RETURN(apache.DONE)
[ "def", "redirect_to_url", "(", "req", ",", "url", ",", "redirection_type", "=", "None", ",", "norobot", "=", "False", ")", ":", "url", "=", "url", ".", "strip", "(", ")", "if", "redirection_type", "is", "None", ":", "redirection_type", "=", "apache", "."...
41.791667
21.555556
def _kl_laplace_laplace(a, b, name=None): """Calculate the batched KL divergence KL(a || b) with a and b Laplace. Args: a: instance of a Laplace distribution object. b: instance of a Laplace distribution object. name: (optional) Name to use for created operations. default is "kl_laplace_laplace". Returns: Batchwise KL(a || b) """ with tf.name_scope(name or "kl_laplace_laplace"): # Consistent with # http://www.mast.queensu.ca/~communications/Papers/gil-msc11.pdf, page 38 distance = tf.abs(a.loc - b.loc) ratio = a.scale / b.scale return (-tf.math.log(ratio) - 1 + distance / b.scale + ratio * tf.exp(-distance / a.scale))
[ "def", "_kl_laplace_laplace", "(", "a", ",", "b", ",", "name", "=", "None", ")", ":", "with", "tf", ".", "name_scope", "(", "name", "or", "\"kl_laplace_laplace\"", ")", ":", "# Consistent with", "# http://www.mast.queensu.ca/~communications/Papers/gil-msc11.pdf, page 38...
33.65
17.25
def build_or_install_bokehjs(): ''' Build a new BokehJS (and install it) or install a previously build BokehJS. If no options ``--build-js`` or ``--install-js`` are detected, the user is prompted for what to do. If ``--existing-js`` is detected, then this setup.py is being run from a packaged sdist, no action is taken. Note that ``-build-js`` is only compatible with the following ``setup.py`` commands: install, develop, sdist, egg_info, build Returns: str : one of 'built', 'installed', 'packaged' How (or if) BokehJS was installed into the python source tree ''' # This happens when building from inside a published, pre-packaged sdist # The --existing-js option is not otherwise documented if '--existing-js' in sys.argv: sys.argv.remove('--existing-js') return "packaged" if '--build-js' not in sys.argv and '--install-js' not in sys.argv: jsbuild = jsbuild_prompt() elif '--build-js' in sys.argv: jsbuild = True sys.argv.remove('--build-js') # must be "--install-js" else: jsbuild = False sys.argv.remove('--install-js') jsbuild_ok = ('install', 'develop', 'sdist', 'egg_info', 'build') if jsbuild and not any(arg in sys.argv for arg in jsbuild_ok): print("Error: Option '--build-js' only valid with 'install', 'develop', 'sdist', or 'build', exiting.") sys.exit(1) if jsbuild: build_js() install_js() return "built" else: install_js() return "installed"
[ "def", "build_or_install_bokehjs", "(", ")", ":", "# This happens when building from inside a published, pre-packaged sdist", "# The --existing-js option is not otherwise documented", "if", "'--existing-js'", "in", "sys", ".", "argv", ":", "sys", ".", "argv", ".", "remove", "("...
31.44898
25.244898
def _create_embedded_unclaimed_draft_with_template(self, test_mode=False, client_id=None, is_for_embedded_signing=False, template_id=None, template_ids=None, requester_email_address=None, title=None, subject=None, message=None, signers=None, ccs=None, signing_redirect_url=None, requesting_redirect_url=None, metadata=None, custom_fields=None, allow_decline=False): ''' Helper method for creating unclaimed drafts from templates See public function for params. ''' #single params payload = { "test_mode": self._boolean(test_mode), "client_id": client_id, "is_for_embedded_signing": self._boolean(is_for_embedded_signing), "template_id": template_id, "requester_email_address": requester_email_address, "title": title, "subject": subject, "message": message, "signing_redirect_url": signing_redirect_url, "requesting_redirect_url": requesting_redirect_url, "allow_decline": self._boolean(allow_decline) } #format multi params template_ids_payload = HSFormat.format_param_list(template_ids, 'template_ids') signers_payload = HSFormat.format_dict_list(signers, 'signers', 'role_name') ccs_payload = HSFormat.format_dict_list(ccs, 'ccs', 'role_name') metadata_payload = HSFormat.format_single_dict(metadata, 'metadata') custom_fields_payload = HSFormat.format_custom_fields(custom_fields) #assemble payload data = {} data.update(payload) data.update(template_ids_payload) data.update(signers_payload) data.update(ccs_payload) data.update(metadata_payload) data.update(custom_fields_payload) data = HSFormat.strip_none_values(data) #send call url = self.UNCLAIMED_DRAFT_CREATE_EMBEDDED_WITH_TEMPLATE_URL request = self._get_request() response = request.post(url, data=data) return response
[ "def", "_create_embedded_unclaimed_draft_with_template", "(", "self", ",", "test_mode", "=", "False", ",", "client_id", "=", "None", ",", "is_for_embedded_signing", "=", "False", ",", "template_id", "=", "None", ",", "template_ids", "=", "None", ",", "requester_emai...
46.302326
27.465116
def principal_curve(data, basis='pca', n_comps=4, clusters_list=None, copy=False): """Computes the principal curve Arguments --------- data: :class:`~anndata.AnnData` Annotated data matrix. basis: `str` (default: `'pca'`) Basis to use for computing the principal curve. n_comps: `int` (default: 4) Number of pricipal components to be used. copy: `bool`, (default: `False`) Return a copy instead of writing to adata. Returns ------- Returns or updates `adata` with the attributes principal_curve: `.uns` dictionary containing `projections`, `ixsort` and `arclength` """ adata = data.copy() if copy else data import rpy2.robjects as robjects from rpy2.robjects.packages import importr if clusters_list is not None: cell_subset = np.array([label in clusters_list for label in adata.obs['clusters']]) X_emb = adata[cell_subset].obsm['X_' + basis][:, :n_comps] else: cell_subset = None X_emb = adata.obsm['X_' + basis][:, :n_comps] n_obs, n_dim = X_emb.shape # convert array to R matrix xvec = robjects.FloatVector(X_emb.T.reshape((X_emb.size))) X_R = robjects.r.matrix(xvec, nrow=n_obs, ncol=n_dim) fit = importr("princurve").principal_curve(X_R) adata.uns['principal_curve'] = dict() adata.uns['principal_curve']['ixsort'] = ixsort = np.array(fit[1])-1 adata.uns['principal_curve']['projections'] = np.array(fit[0])[ixsort] adata.uns['principal_curve']['arclength'] = np.array(fit[2]) adata.uns['principal_curve']['cell_subset'] = cell_subset return adata if copy else None
[ "def", "principal_curve", "(", "data", ",", "basis", "=", "'pca'", ",", "n_comps", "=", "4", ",", "clusters_list", "=", "None", ",", "copy", "=", "False", ")", ":", "adata", "=", "data", ".", "copy", "(", ")", "if", "copy", "else", "data", "import", ...
35.326087
20.108696
def serialize(obj, no_dump=False): """ Serialize an object. Returns a dict containing an `_error` property if a MemoryError happens during the object serialization. See #369. :param obj: the object to serialize :type obj: alignak.objects.item.Item | dict | list | str :param no_dump: if True return dict, otherwise return a json :type no_dump: bool :return: dict or json dumps dict with the following structure :: {'__sys_python_module__': "%s.%s" % (o_cls.__module__, o_cls.__name__) 'content' : obj.serialize()} :rtype: dict | str """ # print("Serialize (%s): %s" % (no_dump, obj)) if hasattr(obj, "serialize") and isinstance(obj.serialize, collections.Callable): o_dict = { '__sys_python_module__': "%s.%s" % (obj.__class__.__module__, obj.__class__.__name__), 'content': obj.serialize() } elif isinstance(obj, dict): o_dict = {} for key, value in list(obj.items()): o_dict[key] = serialize(value, True) elif isinstance(obj, (list, set)): o_dict = [serialize(item, True) for item in obj] else: o_dict = obj if no_dump: return o_dict result = None try: result = json.dumps(o_dict, ensure_ascii=False) except MemoryError: return {'_error': 'Not enough memory on this computer to correctly manage Alignak ' 'objects serialization! ' 'Sorry for this, please log an issue in the project repository.'} return result
[ "def", "serialize", "(", "obj", ",", "no_dump", "=", "False", ")", ":", "# print(\"Serialize (%s): %s\" % (no_dump, obj))", "if", "hasattr", "(", "obj", ",", "\"serialize\"", ")", "and", "isinstance", "(", "obj", ".", "serialize", ",", "collections", ".", "Calla...
31.9375
24.354167
def _object_instance_content(obj): """ Returns consistant content for a action class or an instance thereof :Parameters: - `obj` Should be either and action class or an instance thereof :Returns: bytearray or bytes representing the obj suitable for generating a signature from. """ retval = bytearray() if obj is None: return b'N.' if isinstance(obj, SCons.Util.BaseStringTypes): return SCons.Util.to_bytes(obj) inst_class = obj.__class__ inst_class_name = bytearray(obj.__class__.__name__,'utf-8') inst_class_module = bytearray(obj.__class__.__module__,'utf-8') inst_class_hierarchy = bytearray(repr(inspect.getclasstree([obj.__class__,])),'utf-8') # print("ICH:%s : %s"%(inst_class_hierarchy, repr(obj))) properties = [(p, getattr(obj, p, "None")) for p in dir(obj) if not (p[:2] == '__' or inspect.ismethod(getattr(obj, p)) or inspect.isbuiltin(getattr(obj,p))) ] properties.sort() properties_str = ','.join(["%s=%s"%(p[0],p[1]) for p in properties]) properties_bytes = bytearray(properties_str,'utf-8') methods = [p for p in dir(obj) if inspect.ismethod(getattr(obj, p))] methods.sort() method_contents = [] for m in methods: # print("Method:%s"%m) v = _function_contents(getattr(obj, m)) # print("[%s->]V:%s [%s]"%(m,v,type(v))) method_contents.append(v) retval = bytearray(b'{') retval.extend(inst_class_name) retval.extend(b":") retval.extend(inst_class_module) retval.extend(b'}[[') retval.extend(inst_class_hierarchy) retval.extend(b']]{{') retval.extend(bytearray(b",").join(method_contents)) retval.extend(b"}}{{{") retval.extend(properties_bytes) retval.extend(b'}}}') return retval
[ "def", "_object_instance_content", "(", "obj", ")", ":", "retval", "=", "bytearray", "(", ")", "if", "obj", "is", "None", ":", "return", "b'N.'", "if", "isinstance", "(", "obj", ",", "SCons", ".", "Util", ".", "BaseStringTypes", ")", ":", "return", "SCon...
34.215686
23.078431
def lgammln(xx): """ Returns the gamma function of xx. Gamma(z) = Integral(0,infinity) of t^(z-1)exp(-t) dt. (Adapted from: Numerical Recipies in C.) Usage: lgammln(xx) """ coeff = [76.18009173, -86.50532033, 24.01409822, -1.231739516, 0.120858003e-2, -0.536382e-5] x = xx - 1.0 tmp = x + 5.5 tmp = tmp - (x+0.5)*math.log(tmp) ser = 1.0 for j in range(len(coeff)): x = x + 1 ser = ser + coeff[j]/x return -tmp + math.log(2.50662827465*ser)
[ "def", "lgammln", "(", "xx", ")", ":", "coeff", "=", "[", "76.18009173", ",", "-", "86.50532033", ",", "24.01409822", ",", "-", "1.231739516", ",", "0.120858003e-2", ",", "-", "0.536382e-5", "]", "x", "=", "xx", "-", "1.0", "tmp", "=", "x", "+", "5.5...
25.789474
15.789474
def adapt(self, d, x): """ Adapt weights according one desired value and its input. **Args:** * `d` : desired value (float) * `x` : input array (1-dimensional array) """ y = np.dot(self.w, x) e = d - y nu = self.mu / (self.eps + np.dot(x, x)) self.w += nu * x * e**3
[ "def", "adapt", "(", "self", ",", "d", ",", "x", ")", ":", "y", "=", "np", ".", "dot", "(", "self", ".", "w", ",", "x", ")", "e", "=", "d", "-", "y", "nu", "=", "self", ".", "mu", "/", "(", "self", ".", "eps", "+", "np", ".", "dot", "...
24
17.714286
def stop(self): """Stop the progress bar.""" if self._progressing: self._progressing = False self._thread.join()
[ "def", "stop", "(", "self", ")", ":", "if", "self", ".", "_progressing", ":", "self", ".", "_progressing", "=", "False", "self", ".", "_thread", ".", "join", "(", ")" ]
29.6
9.6
def find(self, table_name, constraints=None, *, columns=None, order_by=None): """Returns the first record that matches the given criteria. :table_name: the name of the table to search on :constraints: is any construct that can be parsed by SqlWriter.parse_constraints. :columns: either a string or a list of column names :order_by: the order by clause """ query_string, params = self.sql_writer.get_find_all_query( table_name, constraints, columns=columns, order_by=order_by) query_string += " limit 1;" return self.execute(query_string, params).fetchone()
[ "def", "find", "(", "self", ",", "table_name", ",", "constraints", "=", "None", ",", "*", ",", "columns", "=", "None", ",", "order_by", "=", "None", ")", ":", "query_string", ",", "params", "=", "self", ".", "sql_writer", ".", "get_find_all_query", "(", ...
49
18.916667
def set_process_type(self, value): """ Setter for 'process_type' field. :param value - a new value of 'process_type' field. """ if value is None or not isinstance(value, str): raise TypeError("ProcessType must be set to a String") elif value not in Process.__process_type_list: raise ValueError("ProcessType must be one of specified values: 'None', 'Public', 'Private'") else: self.__process_type = value
[ "def", "set_process_type", "(", "self", ",", "value", ")", ":", "if", "value", "is", "None", "or", "not", "isinstance", "(", "value", ",", "str", ")", ":", "raise", "TypeError", "(", "\"ProcessType must be set to a String\"", ")", "elif", "value", "not", "in...
44.181818
15.636364
def args(self) -> str: """Provides arguments for the command.""" return '{}{}{}{}{}'.format( to_ascii_hex(self._index, 2), to_ascii_hex(self._group_number, 2), to_ascii_hex(self._unit_number, 2), to_ascii_hex(int(self._enable_status), 4), to_ascii_hex(int(self._switches), 4))
[ "def", "args", "(", "self", ")", "->", "str", ":", "return", "'{}{}{}{}{}'", ".", "format", "(", "to_ascii_hex", "(", "self", ".", "_index", ",", "2", ")", ",", "to_ascii_hex", "(", "self", ".", "_group_number", ",", "2", ")", ",", "to_ascii_hex", "(",...
43.125
7.75
def _filter_validate(filepath, location, values, validate): """Generator for validate() results called against all given values. On errors, fields are warned about and ignored, unless strict mode is set in which case a compiler error is raised. """ for value in values: if not isinstance(value, dict): warn_invalid(filepath, location, value, '(expected a dict)') continue try: yield validate(**value) except dbt.exceptions.JSONValidationException as exc: # we don't want to fail the full run, but we do want to fail # parsing this file warn_invalid(filepath, location, value, '- ' + exc.msg) continue
[ "def", "_filter_validate", "(", "filepath", ",", "location", ",", "values", ",", "validate", ")", ":", "for", "value", "in", "values", ":", "if", "not", "isinstance", "(", "value", ",", "dict", ")", ":", "warn_invalid", "(", "filepath", ",", "location", ...
44.5625
16.8125
def max_repetition_level(self, path): """Get the max repetition level for the given schema path.""" max_level = 0 for part in path: element = self.schema_element(part) if element.repetition_type == parquet_thrift.FieldRepetitionType.REQUIRED: max_level += 1 return max_level
[ "def", "max_repetition_level", "(", "self", ",", "path", ")", ":", "max_level", "=", "0", "for", "part", "in", "path", ":", "element", "=", "self", ".", "schema_element", "(", "part", ")", "if", "element", ".", "repetition_type", "==", "parquet_thrift", "....
42.375
14.5
def schema(self, shex: Optional[Union[str, ShExJ.Schema]]) -> None: """ Set the schema to be used. Schema can either be a ShExC or ShExJ string or a pre-parsed schema. :param shex: Schema """ self.pfx = None if shex is not None: if isinstance(shex, ShExJ.Schema): self._schema = shex else: shext = shex.strip() loader = SchemaLoader() if ('\n' in shex or '\r' in shex) or shext[0] in '#<_: ': self._schema = loader.loads(shex) else: self._schema = loader.load(shex) if isinstance(shex, str) else shex if self._schema is None: raise ValueError("Unable to parse shex file") self.pfx = PrefixLibrary(loader.schema_text)
[ "def", "schema", "(", "self", ",", "shex", ":", "Optional", "[", "Union", "[", "str", ",", "ShExJ", ".", "Schema", "]", "]", ")", "->", "None", ":", "self", ".", "pfx", "=", "None", "if", "shex", "is", "not", "None", ":", "if", "isinstance", "(",...
44.052632
16
def tofile(self, f): """Serialize this ScalableBloomFilter into the file-object `f'.""" f.write(pack(self.FILE_FMT, self.scale, self.ratio, self.initial_capacity, self.error_rate)) # Write #-of-filters f.write(pack(b'<l', len(self.filters))) if len(self.filters) > 0: # Then each filter directly, with a header describing # their lengths. headerpos = f.tell() headerfmt = b'<' + b'Q'*(len(self.filters)) f.write(b'.' * calcsize(headerfmt)) filter_sizes = [] for filter in self.filters: begin = f.tell() filter.tofile(f) filter_sizes.append(f.tell() - begin) f.seek(headerpos) f.write(pack(headerfmt, *filter_sizes))
[ "def", "tofile", "(", "self", ",", "f", ")", ":", "f", ".", "write", "(", "pack", "(", "self", ".", "FILE_FMT", ",", "self", ".", "scale", ",", "self", ".", "ratio", ",", "self", ".", "initial_capacity", ",", "self", ".", "error_rate", ")", ")", ...
35.695652
14.608696
def pop(self, key, *args, **kwargs): """Remove and return the value associated with case-insensitive ``key``.""" return super(CaseInsensitiveDict, self).pop(CaseInsensitiveStr(key))
[ "def", "pop", "(", "self", ",", "key", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "super", "(", "CaseInsensitiveDict", ",", "self", ")", ".", "pop", "(", "CaseInsensitiveStr", "(", "key", ")", ")" ]
65
13.333333
def scatter_plot(data, index_x, index_y, percent=100.0, seed=1, size=50, title=None, outfile=None, wait=True): """ Plots two attributes against each other. TODO: click events http://matplotlib.org/examples/event_handling/data_browser.html :param data: the dataset :type data: Instances :param index_x: the 0-based index of the attribute on the x axis :type index_x: int :param index_y: the 0-based index of the attribute on the y axis :type index_y: int :param percent: the percentage of the dataset to use for plotting :type percent: float :param seed: the seed value to use for subsampling :type seed: int :param size: the size of the circles in point :type size: int :param title: an optional title :type title: str :param outfile: the (optional) file to save the generated plot to. The extension determines the file format. :type outfile: str :param wait: whether to wait for the user to close the plot :type wait: bool """ if not plot.matplotlib_available: logger.error("Matplotlib is not installed, plotting unavailable!") return # create subsample data = plot.create_subsample(data, percent=percent, seed=seed) # collect data x = [] y = [] if data.class_index == -1: c = None else: c = [] for i in range(data.num_instances): inst = data.get_instance(i) x.append(inst.get_value(index_x)) y.append(inst.get_value(index_y)) if c is not None: c.append(inst.get_value(inst.class_index)) # plot data fig, ax = plt.subplots() if c is None: ax.scatter(x, y, s=size, alpha=0.5) else: ax.scatter(x, y, c=c, s=size, alpha=0.5) ax.set_xlabel(data.attribute(index_x).name) ax.set_ylabel(data.attribute(index_y).name) if title is None: title = "Attribute scatter plot" if percent != 100: title += " (%0.1f%%)" % percent ax.set_title(title) ax.plot(ax.get_xlim(), ax.get_ylim(), ls="--", c="0.3") ax.grid(True) fig.canvas.set_window_title(data.relationname) plt.draw() if outfile is not None: plt.savefig(outfile) if wait: plt.show()
[ "def", "scatter_plot", "(", "data", ",", "index_x", ",", "index_y", ",", "percent", "=", "100.0", ",", "seed", "=", "1", ",", "size", "=", "50", ",", "title", "=", "None", ",", "outfile", "=", "None", ",", "wait", "=", "True", ")", ":", "if", "no...
32.537313
20.089552
def query_pager_by_slug(slug, current_page_num=1, tag='', order=False): ''' Query pager via category slug. ''' cat_rec = MCategory.get_by_slug(slug) if cat_rec: cat_id = cat_rec.uid else: return None # The flowing code is valid. if cat_id.endswith('00'): # The first level category, using the code bellow. cat_con = TabPost2Tag.par_id == cat_id else: cat_con = TabPost2Tag.tag_id == cat_id if tag: condition = { 'def_tag_arr': [tag] } recs = TabPost.select().join( TabPost2Tag, on=((TabPost.uid == TabPost2Tag.post_id) & (TabPost.valid == 1)) ).where( cat_con & TabPost.extinfo.contains(condition) ).order_by( TabPost.time_update.desc() ).paginate(current_page_num, CMS_CFG['list_num']) elif order: recs = TabPost.select().join( TabPost2Tag, on=((TabPost.uid == TabPost2Tag.post_id) & (TabPost.valid == 1)) ).where( cat_con ).order_by( TabPost.order.asc() ) else: recs = TabPost.select().join( TabPost2Tag, on=((TabPost.uid == TabPost2Tag.post_id) & (TabPost.valid == 1)) ).where( cat_con ).order_by( TabPost.time_update.desc() ).paginate(current_page_num, CMS_CFG['list_num']) return recs
[ "def", "query_pager_by_slug", "(", "slug", ",", "current_page_num", "=", "1", ",", "tag", "=", "''", ",", "order", "=", "False", ")", ":", "cat_rec", "=", "MCategory", ".", "get_by_slug", "(", "slug", ")", "if", "cat_rec", ":", "cat_id", "=", "cat_rec", ...
32.326531
18.612245
def get_handlers(self, event: str) -> T.List[T.Callable]: """Returns a list of handlers registered for the given event.""" return list(self._events.get(event, []))
[ "def", "get_handlers", "(", "self", ",", "event", ":", "str", ")", "->", "T", ".", "List", "[", "T", ".", "Callable", "]", ":", "return", "list", "(", "self", ".", "_events", ".", "get", "(", "event", ",", "[", "]", ")", ")" ]
44.25
16.25
def grab_sub_repo(repositories, repos): """ Grab SUB_REPOSITORY """ for i, repo in enumerate(repositories): if repos in repo: sub = repositories[i].replace(repos, "") repositories[i] = repos return sub return ""
[ "def", "grab_sub_repo", "(", "repositories", ",", "repos", ")", ":", "for", "i", ",", "repo", "in", "enumerate", "(", "repositories", ")", ":", "if", "repos", "in", "repo", ":", "sub", "=", "repositories", "[", "i", "]", ".", "replace", "(", "repos", ...
26.6
9.8
def check_methods(self, resource): '''Iteratively check all methods (endpoints) in the Resource.''' checker = ResourceMethodChecker() errors = [] for callback in resource.callbacks: new_errors = checker(callback) if new_errors: errors.extend(new_errors) return errors
[ "def", "check_methods", "(", "self", ",", "resource", ")", ":", "checker", "=", "ResourceMethodChecker", "(", ")", "errors", "=", "[", "]", "for", "callback", "in", "resource", ".", "callbacks", ":", "new_errors", "=", "checker", "(", "callback", ")", "if"...
37.666667
11
def topic_present(name, subscriptions=None, attributes=None, region=None, key=None, keyid=None, profile=None): ''' Ensure the SNS topic exists. name Name of the SNS topic. subscriptions List of SNS subscriptions. Each subscription is a dictionary with a protocol and endpoint key: .. code-block:: yaml subscriptions: - Protocol: https Endpoint: https://www.example.com/sns-endpoint - Protocol: sqs Endpoint: arn:aws:sqs:us-west-2:123456789012:MyQueue Additional attributes which may be set on a subscription are: - DeliveryPolicy - FilterPolicy - RawMessageDelivery If provided, they should be passed as key/value pairs within the same dictionaries. E.g. .. code-block:: yaml subscriptions: - Protocol: sqs Endpoint: arn:aws:sqs:us-west-2:123456789012:MyQueue RawMessageDelivery: True attributes Dictionary of attributes to set on the SNS topic Valid attribute keys are: - Policy: the JSON serialization of the topic's access control policy - DisplayName: the human-readable name used in the "From" field for notifications to email and email-json endpoints - DeliveryPolicy: the JSON serialization of the topic's delivery policy region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} something_changed = False current = __salt__['boto3_sns.describe_topic'](name, region, key, keyid, profile) if current: ret['comment'] = 'AWS SNS topic {0} present.'.format(name) TopicArn = current['TopicArn'] else: if __opts__['test']: ret['comment'] = 'AWS SNS topic {0} would be created.'.format(name) ret['result'] = None return ret else: TopicArn = __salt__['boto3_sns.create_topic'](name, region=region, key=key, keyid=keyid, profile=profile) if TopicArn: ret['comment'] = 'AWS SNS topic {0} created with ARN {1}.'.format(name, TopicArn) something_changed = True else: ret['comment'] = 'Failed to create AWS SNS topic {0}'.format(name) log.error(ret['comment']) ret['result'] = False return ret ### Update any explicitly defined attributes want_attrs = attributes if attributes else {} # Freshen these in case we just created it above curr_attrs = __salt__['boto3_sns.get_topic_attributes'](TopicArn, region=region, key=key, keyid=keyid, profile=profile) for attr in ['DisplayName', 'Policy', 'DeliveryPolicy']: curr_val = curr_attrs.get(attr) want_val = want_attrs.get(attr) # Some get default values if not set, so it's not safe to enforce absense if they're # not provided at all. This implies that if you want to clear a value, you must explicitly # set it to an empty string. if want_val is None: continue if _json_objs_equal(want_val, curr_val): continue if __opts__['test']: ret['comment'] += ' Attribute {} would be updated on topic {}.'.format(attr, TopicArn) ret['result'] = None continue want_val = want_val if isinstance(want_val, six.string_types) else salt.utils.json.dumps(want_val) if __salt__['boto3_sns.set_topic_attributes'](TopicArn, attr, want_val, region=region, key=key, keyid=keyid, profile=profile): ret['comment'] += ' Attribute {0} set to {1} on topic {2}.'.format(attr, want_val, TopicArn) something_changed = True else: ret['comment'] += ' Failed to update {0} on topic {1}.'.format(attr, TopicArn) ret['result'] = False return ret ### Add / remove subscriptions mutable_attrs = ('DeliveryPolicy', 'FilterPolicy', 'RawMessageDelivery') want_subs = subscriptions if subscriptions else [] want_subs = [{k: v for k, v in c.items() if k in ('Protocol', 'Endpoint') or k in mutable_attrs} for c in want_subs] curr_subs = current.get('Subscriptions', []) subscribe = [] unsubscribe = [] want_obfuscated = [] for sub in want_subs: # If the subscription contains inline digest auth, AWS will obfuscate the password # with '****'. Thus we need to do the same with ours to permit 1-to-1 comparison. # Example: https://user:****@my.endpoiint.com/foo/bar endpoint = sub['Endpoint'] matches = re.search(r'http[s]?://(?P<user>\w+):(?P<pass>\w+)@', endpoint) if matches is not None: sub['Endpoint'] = endpoint.replace(':' + matches.groupdict()['pass'], ':****') want_obfuscated += [{'Protocol': sub['Protocol'], 'Endpoint': sub['Endpoint']}] if sub not in curr_subs: sub['obfuscated'] = sub['Endpoint'] sub['Endpoint'] = endpoint # Set it back to the unobfuscated value. subscribe += [sub] for sub in curr_subs: if {'Protocol': sub['Protocol'], 'Endpoint': sub['Endpoint']} not in want_obfuscated: if sub['SubscriptionArn'].startswith('arn:aws:sns:'): unsubscribe += [sub['SubscriptionArn']] for sub in subscribe: ret = _create_or_update_subscription(ret, sub, curr_subs, mutable_attrs, TopicArn, region, key, keyid, profile) if ret.pop('something_changed', False) is True: something_changed = True for sub in unsubscribe: if __opts__['test']: msg = ' Subscription {} would be removed from topic {}.'.format(sub, TopicArn) ret['comment'] += msg ret['result'] = None continue unsubbed = __salt__['boto3_sns.unsubscribe'](sub, region=region, key=key, keyid=keyid, profile=profile) if unsubbed: ret['comment'] += ' Subscription {0} removed from topic {1}.'.format(sub, TopicArn) something_changed = True else: msg = ' Failed to remove subscription {0} from topic {1}.'.format(sub, TopicArn) ret['comment'] += msg ret['result'] = False return ret if something_changed: ret['changes']['old'] = current ret['changes']['new'] = __salt__['boto3_sns.describe_topic'](name, region, key, keyid, profile) return ret
[ "def", "topic_present", "(", "name", ",", "subscriptions", "=", "None", ",", "attributes", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "ret", "=", "{", "'name'", ...
43.067485
25.840491
def get_filter_qobj(self, keys=None): """ Return a copy of this Query object with additional where clauses for the keys in the argument """ # only care about columns in aggregates right? cols = set() for agg in self.select.aggregates: cols.update(agg.cols) sels = [SelectExpr(col, [col], col, None) for col in cols] select = Select(sels) where = list(self.where) if keys: keys = list(keys) keys = map(sqlize, list(keys)) expr = self.select.nonaggs[0].expr clause = [] if None in keys: clause.append("%s is null" % expr) if len([k for k in keys if k is not None]) > 0: clause.append("%s in %%s" % expr) clause = " or ".join(clause) where.append(clause) else: where.append( '%s = %%s' % (self.select.nonaggs[0].expr ) ) q = Query(self.db, select, self.fr, where) return q
[ "def", "get_filter_qobj", "(", "self", ",", "keys", "=", "None", ")", ":", "# only care about columns in aggregates right?", "cols", "=", "set", "(", ")", "for", "agg", "in", "self", ".", "select", ".", "aggregates", ":", "cols", ".", "update", "(", "agg", ...
31.357143
14.571429
def _parse_docstring(docstring): """ Using the sphinx RSTParse to parse __doc__ for argparse `parameters`, `help`, and `description`. The first rst paragraph encountered it treated as the argparse help text. Any param fields are treated as argparse arguments. Any other text is combined and added to the argparse description. example: \""" this will be the summary :param name: describe the parameter called name. this will be the descriptions * more description * more description This will also be in the description \""" :param str docstring: :return: :rtype: dict """ settings = OptionParser(components=(RSTParser,)).get_default_values() rstparser = RSTParser() document = utils.new_document(' ', settings) rstparser.parse(docstring, document) if document.children[0].tagname != 'block_quote': logger.warning("The first line of the docstring must be blank.") else: document = document.children[0] def get_params(field_list_node, params): for field in field_list_node.children: name = field.children[0].rawsource.split(' ') if 'param' == name[0]: params[name[-1]] = field.children[1].astext() method_args = {'summary': '', 'params': dict(), 'description': ''} for node in document.children: if node.tagname is 'paragraph' and method_args['summary'] == '': method_args['summary'] = node.astext() elif node.tagname is 'field_list': get_params(node, method_args['params']) else: method_args['description'] += '\n' + node.astext() return method_args
[ "def", "_parse_docstring", "(", "docstring", ")", ":", "settings", "=", "OptionParser", "(", "components", "=", "(", "RSTParser", ",", ")", ")", ".", "get_default_values", "(", ")", "rstparser", "=", "RSTParser", "(", ")", "document", "=", "utils", ".", "n...
34.979167
20.854167
def _convert_json(obj): ''' Converts from the JSON output provided by ovs-vsctl into a usable Python object tree. In particular, sets and maps are converted from lists to actual sets or maps. Args: obj: Object that shall be recursively converted. Returns: Converted version of object. ''' if isinstance(obj, dict): return {_convert_json(key): _convert_json(val) for (key, val) in six.iteritems(obj)} elif isinstance(obj, list) and len(obj) == 2: first = obj[0] second = obj[1] if first == 'set' and isinstance(second, list): return [_convert_json(elem) for elem in second] elif first == 'map' and isinstance(second, list): for elem in second: if not isinstance(elem, list) or len(elem) != 2: return obj return {elem[0]: _convert_json(elem[1]) for elem in second} else: return obj elif isinstance(obj, list): return [_convert_json(elem) for elem in obj] else: return obj
[ "def", "_convert_json", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "dict", ")", ":", "return", "{", "_convert_json", "(", "key", ")", ":", "_convert_json", "(", "val", ")", "for", "(", "key", ",", "val", ")", "in", "six", ".", "iter...
34.354839
21.064516
def idle_task(self): '''called rapidly by mavproxy''' now = time.time() if now-self.last_bored > self.boredom_interval: self.last_bored = now message = self.boredom_message() self.say("%s: %s" % (self.name,message)) # See if whatever we're connected to would like to play: self.master.mav.statustext_send(mavutil.mavlink.MAV_SEVERITY_NOTICE, message)
[ "def", "idle_task", "(", "self", ")", ":", "now", "=", "time", ".", "time", "(", ")", "if", "now", "-", "self", ".", "last_bored", ">", "self", ".", "boredom_interval", ":", "self", ".", "last_bored", "=", "now", "message", "=", "self", ".", "boredom...
46.9
15.3
def app_template_global(self, name: Optional[str]=None) -> Callable: """Add an application wide template global. This is designed to be used as a decorator, and has the same arguments as :meth:`~quart.Quart.template_global`. An example usage, .. code-block:: python blueprint = Blueprint(__name__) @blueprint.app_template_global() def global(value): ... """ def decorator(func: Callable) -> Callable: self.add_app_template_global(func, name=name) return func return decorator
[ "def", "app_template_global", "(", "self", ",", "name", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "Callable", ":", "def", "decorator", "(", "func", ":", "Callable", ")", "->", "Callable", ":", "self", ".", "add_app_template_global", "(", ...
34.941176
18.823529
def _get_concatenation(extractors, text, *, ignore_whitespace=True): """Returns a concatenation ParseNode whose children are the nodes returned by each of the methods in the extractors enumerable. If ignore_whitespace is True, whitespace will be ignored and then attached to the child it preceeded. """ ignored_ws, use_text = _split_ignored(text, ignore_whitespace) extractor, *remaining = extractors child = _call_extractor(extractor, use_text) child.add_ignored(ignored_ws) # TODO: Should I set node.position = -len(text) for the case that ignored whitespace will cause # the first child's position to not be the whitespace, and therefore the concatenation's # position will be the first non-whitespace? I think not, but I'm adding this note in # case that causes an issue I'm not seeing at the moment. node = ParseNode(ParseNodeType.concatenation, children=[child]) if remaining: # child.consumed will include ignored whitespace, so we base the text we pass on on text rather # than use_text. return node.merged(_get_concatenation(remaining, text[child.consumed:], ignore_whitespace=ignore_whitespace)) else: return node
[ "def", "_get_concatenation", "(", "extractors", ",", "text", ",", "*", ",", "ignore_whitespace", "=", "True", ")", ":", "ignored_ws", ",", "use_text", "=", "_split_ignored", "(", "text", ",", "ignore_whitespace", ")", "extractor", ",", "", "*", "remaining", ...
44.678571
28.75
def render(self): '''Render a matplotlib figure from the analyzer result Return the figure, use fig.show() to display if neeeded ''' fig, ax = plt.subplots() self.data_object._render_plot(ax) return fig
[ "def", "render", "(", "self", ")", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", ")", "self", ".", "data_object", ".", "_render_plot", "(", "ax", ")", "return", "fig" ]
27.444444
23.444444
def list_current_orders(self, bet_ids=None, market_ids=None, order_projection=None, customer_order_refs=None, customer_strategy_refs=None, date_range=time_range(), order_by=None, sort_dir=None, from_record=None, record_count=None, session=None, lightweight=None): """ Returns a list of your current orders. :param list bet_ids: If you ask for orders, restricts the results to orders with the specified bet IDs :param list market_ids: One or more market ids :param str order_projection: Optionally restricts the results to the specified order status :param list customer_order_refs: Optionally restricts the results to the specified customer order references :param list customer_strategy_refs: Optionally restricts the results to the specified customer strategy references :param dict date_range: Optionally restricts the results to be from/to the specified date, these dates are contextual to the orders being returned and therefore the dates used to filter on will change to placed, matched, voided or settled dates depending on the orderBy :param str order_by: Specifies how the results will be ordered. If no value is passed in, it defaults to BY_BET :param str sort_dir: Specifies the direction the results will be sorted in :param int from_record: Specifies the first record that will be returned :param int record_count: Specifies how many records will be returned from the index position 'fromRecord' :param requests.session session: Requests session object :param bool lightweight: If True will return dict not a resource :rtype: resources.CurrentOrders """ params = clean_locals(locals()) method = '%s%s' % (self.URI, 'listCurrentOrders') (response, elapsed_time) = self.request(method, params, session) return self.process_response(response, resources.CurrentOrders, elapsed_time, lightweight)
[ "def", "list_current_orders", "(", "self", ",", "bet_ids", "=", "None", ",", "market_ids", "=", "None", ",", "order_projection", "=", "None", ",", "customer_order_refs", "=", "None", ",", "customer_strategy_refs", "=", "None", ",", "date_range", "=", "time_range...
72.107143
41.607143
def generate(self): """ Fetch all rows associated with this experiment. This will generate a huge .csv. """ exp_name = self.exp_name() fname = os.path.basename(self.out_path) fname = "{exp}_{prefix}_{name}{ending}".format( exp=exp_name, prefix=os.path.splitext(fname)[0], ending=os.path.splitext(fname)[-1], name="full") first = True for chunk in self.report(): print("Writing chunk to :'{0}'".format(fname)) chunk.to_csv(fname, header=first, mode='a') first = False
[ "def", "generate", "(", "self", ")", ":", "exp_name", "=", "self", ".", "exp_name", "(", ")", "fname", "=", "os", ".", "path", ".", "basename", "(", "self", ".", "out_path", ")", "fname", "=", "\"{exp}_{prefix}_{name}{ending}\"", ".", "format", "(", "exp...
31.894737
13.789474
def write_config(path, config): """ Write the config with a little post-converting formatting. """ config_as_string = to_nice_yaml(config) config_as_string = "---\n" + config_as_string string_to_file(path, config_as_string)
[ "def", "write_config", "(", "path", ",", "config", ")", ":", "config_as_string", "=", "to_nice_yaml", "(", "config", ")", "config_as_string", "=", "\"---\\n\"", "+", "config_as_string", "string_to_file", "(", "path", ",", "config_as_string", ")" ]
26.777778
13.888889
def path_to_str(path): """ Convert pathlib.Path objects to str; return other objects as-is. """ try: from pathlib import Path as _Path except ImportError: # Python < 3.4 class _Path: pass if isinstance(path, _Path): return str(path) return path
[ "def", "path_to_str", "(", "path", ")", ":", "try", ":", "from", "pathlib", "import", "Path", "as", "_Path", "except", "ImportError", ":", "# Python < 3.4", "class", "_Path", ":", "pass", "if", "isinstance", "(", "path", ",", "_Path", ")", ":", "return", ...
29.2
14.6
def get_extract_method(path): """Returns `ExtractMethod` to use on resource at path. Cannot be None.""" info_path = _get_info_path(path) info = _read_info(info_path) fname = info.get('original_fname', path) if info else path return _guess_extract_method(fname)
[ "def", "get_extract_method", "(", "path", ")", ":", "info_path", "=", "_get_info_path", "(", "path", ")", "info", "=", "_read_info", "(", "info_path", ")", "fname", "=", "info", ".", "get", "(", "'original_fname'", ",", "path", ")", "if", "info", "else", ...
44.166667
8.333333
def fetch_task_to_run(self): """ Returns the first task that is ready to run or None if no task can be submitted at present" Raises: `StopIteration` if all tasks are done. """ # All the tasks are done so raise an exception # that will be handled by the client code. if all(task.is_completed for task in self): raise StopIteration("All tasks completed.") for task in self: if task.can_run: return task # No task found, this usually happens when we have dependencies. # Beware of possible deadlocks here! logger.warning("Possible deadlock in fetch_task_to_run!") return None
[ "def", "fetch_task_to_run", "(", "self", ")", ":", "# All the tasks are done so raise an exception", "# that will be handled by the client code.", "if", "all", "(", "task", ".", "is_completed", "for", "task", "in", "self", ")", ":", "raise", "StopIteration", "(", "\"All...
33.857143
17.380952
def is_running(self) -> bool: """Return True if ffmpeg is running.""" if self._proc is None or self._proc.returncode is not None: return False return True
[ "def", "is_running", "(", "self", ")", "->", "bool", ":", "if", "self", ".", "_proc", "is", "None", "or", "self", ".", "_proc", ".", "returncode", "is", "not", "None", ":", "return", "False", "return", "True" ]
37.2
15
def same_page_choosen(form, field): """Check that we are not trying to assign list page itself as a child.""" if form._obj is not None: if field.data.id == form._obj.list_id: raise ValidationError( _('You cannot assign list page itself as a child.'))
[ "def", "same_page_choosen", "(", "form", ",", "field", ")", ":", "if", "form", ".", "_obj", "is", "not", "None", ":", "if", "field", ".", "data", ".", "id", "==", "form", ".", "_obj", ".", "list_id", ":", "raise", "ValidationError", "(", "_", "(", ...
48.166667
9.333333