text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def main(args=sys.argv): """ main entry point for the manifest CLI """ if len(args) < 2: return usage("Command expected") command = args[1] rest = args[2:] if "create".startswith(command): return cli_create(rest) elif "query".startswith(command): return cli_query(rest) elif "verify".startswith(command): return cli_verify(rest) else: return usage("Unknown command: %s" % command)
[ "def", "main", "(", "args", "=", "sys", ".", "argv", ")", ":", "if", "len", "(", "args", ")", "<", "2", ":", "return", "usage", "(", "\"Command expected\"", ")", "command", "=", "args", "[", "1", "]", "rest", "=", "args", "[", "2", ":", "]", "i...
23.421053
14.578947
def _unmarshal_parts(pkg_reader, package, part_factory): """ Return a dictionary of |Part| instances unmarshalled from *pkg_reader*, keyed by partname. Side-effect is that each part in *pkg_reader* is constructed using *part_factory*. """ parts = {} for partname, content_type, reltype, blob in pkg_reader.iter_sparts(): parts[partname] = part_factory( partname, content_type, reltype, blob, package ) return parts
[ "def", "_unmarshal_parts", "(", "pkg_reader", ",", "package", ",", "part_factory", ")", ":", "parts", "=", "{", "}", "for", "partname", ",", "content_type", ",", "reltype", ",", "blob", "in", "pkg_reader", ".", "iter_sparts", "(", ")", ":", "parts", "[", ...
42.25
18.583333
def _scan( self, fs, # type: FS dir_path, # type: Text namespaces=None, # type: Optional[Collection[Text]] ): # type: (...) -> Iterator[Info] """Get an iterator of `Info` objects for a directory path. Arguments: fs (FS): A filesystem instance. dir_path (str): A path to a directory on the filesystem. namespaces (list): A list of additional namespaces to include in the `Info` objects. Returns: ~collections.Iterator: iterator of `Info` objects for resources within the given path. """ try: for info in fs.scandir(dir_path, namespaces=namespaces): yield info except FSError as error: if not self.on_error(dir_path, error): six.reraise(type(error), error)
[ "def", "_scan", "(", "self", ",", "fs", ",", "# type: FS", "dir_path", ",", "# type: Text", "namespaces", "=", "None", ",", "# type: Optional[Collection[Text]]", ")", ":", "# type: (...) -> Iterator[Info]", "try", ":", "for", "info", "in", "fs", ".", "scandir", ...
33.038462
18.846154
def update_subnetpool(self, subnetpool, body=None): """Updates a subnetpool.""" return self.put(self.subnetpool_path % (subnetpool), body=body)
[ "def", "update_subnetpool", "(", "self", ",", "subnetpool", ",", "body", "=", "None", ")", ":", "return", "self", ".", "put", "(", "self", ".", "subnetpool_path", "%", "(", "subnetpool", ")", ",", "body", "=", "body", ")" ]
52.333333
14
def dataframe(self): """ Returns a pandas DataFrame containing all other class properties and values. The index for the DataFrame is the string URI that is used to instantiate the class, such as '201806070VEG'. """ if self._away_goals is None and self._home_goals is None: return None fields_to_include = { 'arena': self.arena, 'attendance': self.attendance, 'away_assists': self.away_assists, 'away_even_strength_assists': self.away_even_strength_assists, 'away_even_strength_goals': self.away_even_strength_goals, 'away_game_winning_goals': self.away_game_winning_goals, 'away_goals': self.away_goals, 'away_penalties_in_minutes': self.away_penalties_in_minutes, 'away_points': self.away_points, 'away_power_play_assists': self.away_power_play_assists, 'away_power_play_goals': self.away_power_play_goals, 'away_save_percentage': self.away_save_percentage, 'away_saves': self.away_saves, 'away_shooting_percentage': self.away_shooting_percentage, 'away_short_handed_assists': self.away_short_handed_assists, 'away_short_handed_goals': self.away_short_handed_goals, 'away_shots_on_goal': self.away_shots_on_goal, 'away_shutout': self.away_shutout, 'date': self.date, 'duration': self.duration, 'home_assists': self.home_assists, 'home_even_strength_assists': self.home_even_strength_assists, 'home_even_strength_goals': self.home_even_strength_goals, 'home_game_winning_goals': self.home_game_winning_goals, 'home_goals': self.home_goals, 'home_penalties_in_minutes': self.home_penalties_in_minutes, 'home_points': self.home_points, 'home_power_play_assists': self.home_power_play_assists, 'home_power_play_goals': self.home_power_play_goals, 'home_save_percentage': self.home_save_percentage, 'home_saves': self.home_saves, 'home_shooting_percentage': self.home_shooting_percentage, 'home_short_handed_assists': self.home_short_handed_assists, 'home_short_handed_goals': self.home_short_handed_goals, 'home_shots_on_goal': self.home_shots_on_goal, 'home_shutout': self.home_shutout, 'losing_abbr': self.losing_abbr, 'losing_name': self.losing_name, 'time': self.time, 'winner': self.winner, 'winning_abbr': self.winning_abbr, 'winning_name': self.winning_name } return pd.DataFrame([fields_to_include], index=[self._uri])
[ "def", "dataframe", "(", "self", ")", ":", "if", "self", ".", "_away_goals", "is", "None", "and", "self", ".", "_home_goals", "is", "None", ":", "return", "None", "fields_to_include", "=", "{", "'arena'", ":", "self", ".", "arena", ",", "'attendance'", "...
52.056604
17.490566
def common_values_dict(): """Build a basic values object used in every create method. All our resources contain a same subset of value. Instead of redoing this code everytime, this method ensures it is done only at one place. """ now = datetime.datetime.utcnow().isoformat() etag = utils.gen_etag() values = { 'id': utils.gen_uuid(), 'created_at': now, 'updated_at': now, 'etag': etag } return values
[ "def", "common_values_dict", "(", ")", ":", "now", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "isoformat", "(", ")", "etag", "=", "utils", ".", "gen_etag", "(", ")", "values", "=", "{", "'id'", ":", "utils", ".", "gen_uuid", "(",...
27.470588
20.058824
def printed_out(self, name): """ Create a string describing the APIObject and its children """ out = '' out += '|\n' if self._id_variable: subs = '[{}]'.format(self._id_variable) else: subs = '' out += '|---{}{}\n'.format(name, subs) if self._description: out += '| | {}\n'.format(self._description) for name, action in self._actions.items(): out += action.printed_out(name) return out
[ "def", "printed_out", "(", "self", ",", "name", ")", ":", "out", "=", "''", "out", "+=", "'|\\n'", "if", "self", ".", "_id_variable", ":", "subs", "=", "'[{}]'", ".", "format", "(", "self", ".", "_id_variable", ")", "else", ":", "subs", "=", "''", ...
31.875
13.75
def uniq_to_level_ipix(uniq): """ Convert a HEALPix cell uniq number to its (level, ipix) equivalent. A uniq number is a 64 bits integer equaling to : ipix + 4*(4**level). Please read this `paper <http://ivoa.net/documents/MOC/20140602/REC-MOC-1.0-20140602.pdf>`_ for more details about uniq numbers. Parameters ---------- uniq : int The uniq number of a HEALPix cell. Returns ------- level, ipix: int, int The level and index of the HEALPix cell computed from ``uniq``. """ uniq = np.asarray(uniq, dtype=np.int64) level = (np.log2(uniq//4)) // 2 level = level.astype(np.int64) _validate_level(level) ipix = uniq - (1 << 2*(level + 1)) _validate_npix(level, ipix) return level, ipix
[ "def", "uniq_to_level_ipix", "(", "uniq", ")", ":", "uniq", "=", "np", ".", "asarray", "(", "uniq", ",", "dtype", "=", "np", ".", "int64", ")", "level", "=", "(", "np", ".", "log2", "(", "uniq", "//", "4", ")", ")", "//", "2", "level", "=", "le...
26.892857
21.821429
def describe_volumes(self, xml_bytes): """Parse the XML returned by the C{DescribeVolumes} function. @param xml_bytes: XML bytes with a C{DescribeVolumesResponse} root element. @return: A list of L{Volume} instances. TODO: attachementSetItemResponseType#deleteOnTermination """ root = XML(xml_bytes) result = [] for volume_data in root.find("volumeSet"): volume_id = volume_data.findtext("volumeId") size = int(volume_data.findtext("size")) snapshot_id = volume_data.findtext("snapshotId") availability_zone = volume_data.findtext("availabilityZone") status = volume_data.findtext("status") create_time = volume_data.findtext("createTime") create_time = datetime.strptime( create_time[:19], "%Y-%m-%dT%H:%M:%S") volume = model.Volume( volume_id, size, status, create_time, availability_zone, snapshot_id) result.append(volume) for attachment_data in volume_data.find("attachmentSet"): instance_id = attachment_data.findtext("instanceId") device = attachment_data.findtext("device") status = attachment_data.findtext("status") attach_time = attachment_data.findtext("attachTime") attach_time = datetime.strptime( attach_time[:19], "%Y-%m-%dT%H:%M:%S") attachment = model.Attachment( instance_id, device, status, attach_time) volume.attachments.append(attachment) return result
[ "def", "describe_volumes", "(", "self", ",", "xml_bytes", ")", ":", "root", "=", "XML", "(", "xml_bytes", ")", "result", "=", "[", "]", "for", "volume_data", "in", "root", ".", "find", "(", "\"volumeSet\"", ")", ":", "volume_id", "=", "volume_data", ".",...
47.057143
17.228571
def buildconfig_update(orig, new, remove_nonexistent_keys=False): """Performs update of given `orig` BuildConfig with values from `new` BuildConfig. Both BuildConfigs have to be represented as `dict`s. This function: - adds all key/value pairs to `orig` from `new` that are missing - replaces values in `orig` for keys that are in both - removes key/value pairs from `orig` for keys that are not in `new`, but only in dicts nested inside `strategy` key (see https://github.com/projectatomic/osbs-client/pull/273#issuecomment-148038314) """ if isinstance(orig, dict) and isinstance(new, dict): clean_triggers(orig, new) if remove_nonexistent_keys: missing = set(orig.keys()) - set(new.keys()) for k in missing: orig.pop(k) for k, v in new.items(): if k == 'strategy': remove_nonexistent_keys = True if isinstance(orig.get(k), dict) and isinstance(v, dict): buildconfig_update(orig[k], v, remove_nonexistent_keys) else: orig[k] = v
[ "def", "buildconfig_update", "(", "orig", ",", "new", ",", "remove_nonexistent_keys", "=", "False", ")", ":", "if", "isinstance", "(", "orig", ",", "dict", ")", "and", "isinstance", "(", "new", ",", "dict", ")", ":", "clean_triggers", "(", "orig", ",", "...
45.791667
17.833333
def OnTextColorDialog(self, event): """Event handler for launching text color dialog""" dlg = wx.ColourDialog(self.main_window) # Ensure the full colour dialog is displayed, # not the abbreviated version. dlg.GetColourData().SetChooseFull(True) if dlg.ShowModal() == wx.ID_OK: # Fetch color data data = dlg.GetColourData() color = data.GetColour().GetRGB() post_command_event(self.main_window, self.main_window.TextColorMsg, color=color) dlg.Destroy()
[ "def", "OnTextColorDialog", "(", "self", ",", "event", ")", ":", "dlg", "=", "wx", ".", "ColourDialog", "(", "self", ".", "main_window", ")", "# Ensure the full colour dialog is displayed,", "# not the abbreviated version.", "dlg", ".", "GetColourData", "(", ")", "....
30.210526
18.578947
def isBridgeFiltered (self): """ Checks if address is an IEEE 802.1D MAC Bridge Filtered MAC Group Address This range is 01-80-C2-00-00-00 to 01-80-C2-00-00-0F. MAC frames that have a destination MAC address within this range are not relayed by bridges conforming to IEEE 802.1D """ return ((self.__value[0] == 0x01) and (self.__value[1] == 0x80) and (self.__value[2] == 0xC2) and (self.__value[3] == 0x00) and (self.__value[4] == 0x00) and (self.__value[5] <= 0x0F))
[ "def", "isBridgeFiltered", "(", "self", ")", ":", "return", "(", "(", "self", ".", "__value", "[", "0", "]", "==", "0x01", ")", "and", "(", "self", ".", "__value", "[", "1", "]", "==", "0x80", ")", "and", "(", "self", ".", "__value", "[", "2", ...
39.785714
12.642857
def watts2pascal(watts, cfm, fan_tot_eff): """convert and return inputs for E+ in pascal and m3/s""" bhp = watts2bhp(watts) return bhp2pascal(bhp, cfm, fan_tot_eff)
[ "def", "watts2pascal", "(", "watts", ",", "cfm", ",", "fan_tot_eff", ")", ":", "bhp", "=", "watts2bhp", "(", "watts", ")", "return", "bhp2pascal", "(", "bhp", ",", "cfm", ",", "fan_tot_eff", ")" ]
43.25
5
def process_document(self, doc): """ Add your code for processing the document """ segment = doc.select_segments("target_text")[0] for e in self.e_list: res = doc.extract(e, segment) doc.store(res, e.name) return list()
[ "def", "process_document", "(", "self", ",", "doc", ")", ":", "segment", "=", "doc", ".", "select_segments", "(", "\"target_text\"", ")", "[", "0", "]", "for", "e", "in", "self", ".", "e_list", ":", "res", "=", "doc", ".", "extract", "(", "e", ",", ...
25.727273
13.545455
def remove(self, rel_path, propagate=False): '''Delete the file from the cache, and from the upstream''' if not self.upstream: raise Exception("CompressionCache must have an upstream") # Must always propagate, since this is really just a filter. self.upstream.remove(self._rename(rel_path), propagate) # In case someone called get() uc_rel_path = os.path.join('uncompressed', rel_path) self.upstream.remove(uc_rel_path)
[ "def", "remove", "(", "self", ",", "rel_path", ",", "propagate", "=", "False", ")", ":", "if", "not", "self", ".", "upstream", ":", "raise", "Exception", "(", "\"CompressionCache must have an upstream\"", ")", "# Must always propagate, since this is really just a filter...
36.846154
23.461538
def disk(x, y, height, gaussian_width): """ Circular disk with Gaussian fall-off after the solid central region. """ disk_radius = height/2.0 distance_from_origin = np.sqrt(x**2+y**2) distance_outside_disk = distance_from_origin - disk_radius sigmasq = gaussian_width*gaussian_width if sigmasq==0.0: falloff = x*0.0 else: with float_error_ignore(): falloff = np.exp(np.divide(-distance_outside_disk*distance_outside_disk, 2*sigmasq)) return np.where(distance_outside_disk<=0,1.0,falloff)
[ "def", "disk", "(", "x", ",", "y", ",", "height", ",", "gaussian_width", ")", ":", "disk_radius", "=", "height", "/", "2.0", "distance_from_origin", "=", "np", ".", "sqrt", "(", "x", "**", "2", "+", "y", "**", "2", ")", "distance_outside_disk", "=", ...
31.944444
18.611111
def dict_merge(dct, merge_dct): """ Recursive dict merge. Inspired by :meth:``dict.update()``, instead of updating only top-level keys, dict_merge recurses down into dicts nested to an arbitrary depth, updating keys. The ``merge_dct`` is merged into ``dct``. :param dct: dict onto which the merge is executed :param merge_dct: dct merged into dct :return: None """ for k, v in merge_dct.items(): if (k in dct and isinstance(dct[k], dict) and isinstance(v, dict)): dict_merge(dct[k], v) else: dct[k] = v
[ "def", "dict_merge", "(", "dct", ",", "merge_dct", ")", ":", "for", "k", ",", "v", "in", "merge_dct", ".", "items", "(", ")", ":", "if", "(", "k", "in", "dct", "and", "isinstance", "(", "dct", "[", "k", "]", ",", "dict", ")", "and", "isinstance",...
40.357143
16.928571
def pre_save(self, instance, add): """ Auto-generate the slug if needed. """ # get currently entered slug value = self.value_from_object(instance) slug = None # auto populate (if the form didn't do that already). # If you want unique_with logic, use django-autoslug instead. # This model field only allows parameters which can be passed to the form widget too. if self.populate_from and (self.always_update or not value): value = getattr(instance, self.populate_from) # Make sure the slugify logic is applied, # even on manually entered input. if value: value = force_text(value) slug = self.slugify(value) if self.max_length < len(slug): slug = slug[:self.max_length] # make the updated slug available as instance attribute setattr(instance, self.name, slug) return slug
[ "def", "pre_save", "(", "self", ",", "instance", ",", "add", ")", ":", "# get currently entered slug", "value", "=", "self", ".", "value_from_object", "(", "instance", ")", "slug", "=", "None", "# auto populate (if the form didn't do that already).", "# If you want uniq...
37.68
16
def sliding_tensor(mv_time_series, width, step, order='F'): ''' segments multivariate time series with sliding window Parameters ---------- mv_time_series : array like shape [n_samples, n_variables] multivariate time series or sequence width : int > 0 segment width in samples step : int > 0 stepsize for sliding in samples Returns ------- data : array like shape [n_segments, width, n_variables] segmented multivariate time series data ''' D = mv_time_series.shape[1] data = [sliding_window(mv_time_series[:, j], width, step, order) for j in range(D)] return np.stack(data, axis=2)
[ "def", "sliding_tensor", "(", "mv_time_series", ",", "width", ",", "step", ",", "order", "=", "'F'", ")", ":", "D", "=", "mv_time_series", ".", "shape", "[", "1", "]", "data", "=", "[", "sliding_window", "(", "mv_time_series", "[", ":", ",", "j", "]", ...
31.047619
21.904762
def add_link(app, pagename, templatename, context, doctree): """Add the slides link to the HTML context.""" # we can only show the slidelink if we can resolve the filename context['show_slidelink'] = ( app.config.slide_link_html_to_slides and hasattr(app.builder, 'get_outfilename') ) if context['show_slidelink']: context['slide_path'] = slide_path(app.builder, pagename)
[ "def", "add_link", "(", "app", ",", "pagename", ",", "templatename", ",", "context", ",", "doctree", ")", ":", "# we can only show the slidelink if we can resolve the filename", "context", "[", "'show_slidelink'", "]", "=", "(", "app", ".", "config", ".", "slide_lin...
37.090909
19.636364
def show(close=None): """Show all figures as SVG/PNG payloads sent to the IPython clients. Parameters ---------- close : bool, optional If true, a ``plt.close('all')`` call is automatically issued after sending all the figures. If this is set, the figures will entirely removed from the internal list of figures. """ if close is None: close = InlineBackend.instance().close_figures try: for figure_manager in Gcf.get_all_fig_managers(): send_figure(figure_manager.canvas.figure) finally: show._to_draw = [] if close: matplotlib.pyplot.close('all')
[ "def", "show", "(", "close", "=", "None", ")", ":", "if", "close", "is", "None", ":", "close", "=", "InlineBackend", ".", "instance", "(", ")", ".", "close_figures", "try", ":", "for", "figure_manager", "in", "Gcf", ".", "get_all_fig_managers", "(", ")",...
33.473684
18.894737
def _multihop_xml(self, **kwargs): """Build BGP multihop XML. Do not use this method directly. You probably want ``multihop``. Args: rbridge_id (str): The rbridge ID of the device on which BGP will be configured in a VCS fabric. neighbor (ipaddress.ip_interface): `ip_interface` object containing peer IP address (IPv4 or IPv6). count (str): Number of hops to allow. (1-255) Returns: ``ElementTree``: XML for configuring BGP multihop. Raises: KeyError: if any arg is not specified. Examples: >>> import pynos.device >>> from ipaddress import ip_interface >>> conn = ('10.24.39.230', '22') >>> auth = ('admin', 'password') >>> with pynos.device.Device(conn=conn, auth=auth) as dev: ... dev.bgp._multihop_xml(neighbor=ip_interface(unicode( ... '10.10.10.10')), count='5', vrf='default', rbridge_id='1') ... dev.bgp._multihop_xml( ... ip='10.10.10.10') # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): KeyError """ ip_addr = kwargs.pop('neighbor') ip = str(ip_addr.ip) rbr_ns = 'urn:brocade.com:mgmt:brocade-rbridge' bgp_ns = 'urn:brocade.com:mgmt:brocade-bgp' config = ET.Element('config') ele = ET.SubElement(config, 'rbridge-id', xmlns=rbr_ns) ET.SubElement(ele, 'rbridge-id').text = kwargs.pop('rbridge_id') ele = ET.SubElement(ele, 'router') ele = ET.SubElement(ele, 'router-bgp', xmlns=bgp_ns) ele = ET.SubElement(ele, 'router-bgp-attributes') ele = ET.SubElement(ele, 'neighbor') if ip_addr.version == 4: ele = ET.SubElement(ele, 'neighbor-ips') ele = ET.SubElement(ele, 'neighbor-addr') ET.SubElement(ele, 'router-bgp-neighbor-address').text = ip else: ele = ET.SubElement(ele, 'neighbor-ipv6s') ele = ET.SubElement(ele, 'neighbor-ipv6-addr') ET.SubElement(ele, 'router-bgp-neighbor-ipv6-address').text = ip ele = ET.SubElement(ele, 'ebgp-multihop') ET.SubElement(ele, 'ebgp-multihop-count').text = kwargs.pop('count') return config
[ "def", "_multihop_xml", "(", "self", ",", "*", "*", "kwargs", ")", ":", "ip_addr", "=", "kwargs", ".", "pop", "(", "'neighbor'", ")", "ip", "=", "str", "(", "ip_addr", ".", "ip", ")", "rbr_ns", "=", "'urn:brocade.com:mgmt:brocade-rbridge'", "bgp_ns", "=", ...
43.566038
19.283019
def srcmaps(self, **kwargs): """ return the name of a source map file """ kwargs_copy = self.base_dict.copy() kwargs_copy.update(**kwargs) kwargs_copy['dataset'] = kwargs.get('dataset', self.dataset(**kwargs)) kwargs_copy['component'] = kwargs.get( 'component', self.component(**kwargs)) self._replace_none(kwargs_copy) localpath = NameFactory.srcmaps_format.format(**kwargs_copy) if kwargs.get('fullpath', False): return self.fullpath(localpath=localpath) return localpath
[ "def", "srcmaps", "(", "self", ",", "*", "*", "kwargs", ")", ":", "kwargs_copy", "=", "self", ".", "base_dict", ".", "copy", "(", ")", "kwargs_copy", ".", "update", "(", "*", "*", "kwargs", ")", "kwargs_copy", "[", "'dataset'", "]", "=", "kwargs", "....
44.076923
10.615385
def on_for_degrees(self, speed, degrees, brake=True, block=True): """ Rotate the motor at ``speed`` for ``degrees`` ``speed`` can be a percentage or a :class:`ev3dev2.motor.SpeedValue` object, enabling use of other units. """ speed_sp = self._speed_native_units(speed) self._set_rel_position_degrees_and_speed_sp(degrees, speed_sp) self._set_brake(brake) self.run_to_rel_pos() if block: self.wait_until('running', timeout=WAIT_RUNNING_TIMEOUT) self.wait_until_not_moving()
[ "def", "on_for_degrees", "(", "self", ",", "speed", ",", "degrees", ",", "brake", "=", "True", ",", "block", "=", "True", ")", ":", "speed_sp", "=", "self", ".", "_speed_native_units", "(", "speed", ")", "self", ".", "_set_rel_position_degrees_and_speed_sp", ...
37.6
18
def applyColorMap(gray, cmap='flame'): ''' like cv2.applyColorMap(im_gray, cv2.COLORMAP_*) but with different color maps ''' # TODO:implement more cmaps if cmap != 'flame': raise NotImplemented # TODO: make better mx = 256 # if gray.dtype==np.uint8 else 65535 lut = np.empty(shape=(256, 3)) cmap = ( # taken from pyqtgraph GradientEditorItem (0, (0, 0, 0)), (0.2, (7, 0, 220)), (0.5, (236, 0, 134)), (0.8, (246, 246, 0)), (1.0, (255, 255, 255)) ) # build lookup table: lastval, lastcol = cmap[0] for step, col in cmap[1:]: val = int(step * mx) for i in range(3): lut[lastval:val, i] = np.linspace( lastcol[i], col[i], val - lastval) lastcol = col lastval = val s0, s1 = gray.shape out = np.empty(shape=(s0, s1, 3), dtype=np.uint8) for i in range(3): out[..., i] = cv2.LUT(gray, lut[:, i]) return out
[ "def", "applyColorMap", "(", "gray", ",", "cmap", "=", "'flame'", ")", ":", "# TODO:implement more cmaps\r", "if", "cmap", "!=", "'flame'", ":", "raise", "NotImplemented", "# TODO: make better\r", "mx", "=", "256", "# if gray.dtype==np.uint8 else 65535\r", "lut", "=",...
28.428571
17.4
def ParseOptions(cls, options, configuration_object): """Parses and validates options. Args: options (argparse.Namespace): parser options. configuration_object (CLITool): object to be configured by the argument helper. Raises: BadConfigObject: when the configuration object is of the wrong type. BadConfigOption: if the required artifact definitions are not defined. """ if not isinstance(configuration_object, tools.CLITool): raise errors.BadConfigObject( 'Configuration object is not an instance of CLITool') artifacts_path = getattr(options, 'artifact_definitions_path', None) data_location = getattr(configuration_object, '_data_location', None) if ((not artifacts_path or not os.path.exists(artifacts_path)) and data_location): artifacts_path = os.path.dirname(data_location) artifacts_path = os.path.join(artifacts_path, 'artifacts') if not os.path.exists(artifacts_path) and 'VIRTUAL_ENV' in os.environ: artifacts_path = os.path.join( os.environ['VIRTUAL_ENV'], 'share', 'artifacts') if not os.path.exists(artifacts_path): artifacts_path = os.path.join(sys.prefix, 'share', 'artifacts') if not os.path.exists(artifacts_path): artifacts_path = os.path.join(sys.prefix, 'local', 'share', 'artifacts') if sys.prefix != '/usr': if not os.path.exists(artifacts_path): artifacts_path = os.path.join('/usr', 'share', 'artifacts') if not os.path.exists(artifacts_path): artifacts_path = os.path.join('/usr', 'local', 'share', 'artifacts') if not os.path.exists(artifacts_path): artifacts_path = None if not artifacts_path or not os.path.exists(artifacts_path): raise errors.BadConfigOption( 'Unable to determine path to artifact definitions.') custom_artifacts_path = getattr( options, 'custom_artifact_definitions_path', None) if custom_artifacts_path and not os.path.isfile(custom_artifacts_path): raise errors.BadConfigOption( 'No such artifacts filter file: {0:s}.'.format(custom_artifacts_path)) if custom_artifacts_path: logger.info( 'Custom artifact filter file: {0:s}'.format(custom_artifacts_path)) registry = artifacts_registry.ArtifactDefinitionsRegistry() reader = artifacts_reader.YamlArtifactsReader() logger.info( 'Determined artifact definitions path: {0:s}'.format(artifacts_path)) try: registry.ReadFromDirectory(reader, artifacts_path) except (KeyError, artifacts_errors.FormatError) as exception: raise errors.BadConfigOption(( 'Unable to read artifact definitions from: {0:s} with error: ' '{1!s}').format(artifacts_path, exception)) for name in preprocessors_manager.PreprocessPluginsManager.GetNames(): if not registry.GetDefinitionByName(name): raise errors.BadConfigOption( 'Missing required artifact definition: {0:s}'.format(name)) if custom_artifacts_path: try: registry.ReadFromFile(reader, custom_artifacts_path) except (KeyError, artifacts_errors.FormatError) as exception: raise errors.BadConfigOption(( 'Unable to read artifact definitions from: {0:s} with error: ' '{1!s}').format(custom_artifacts_path, exception)) setattr(configuration_object, '_artifact_definitions_path', artifacts_path) setattr( configuration_object, '_custom_artifacts_path', custom_artifacts_path)
[ "def", "ParseOptions", "(", "cls", ",", "options", ",", "configuration_object", ")", ":", "if", "not", "isinstance", "(", "configuration_object", ",", "tools", ".", "CLITool", ")", ":", "raise", "errors", ".", "BadConfigObject", "(", "'Configuration object is not ...
39.579545
25.204545
def _set_cell_attr(self, selection, table, attr): """Sets cell attr for key cell and mark grid content as changed Parameters ---------- attr: dict \tContains cell attribute keys \tkeys in ["borderwidth_bottom", "borderwidth_right", \t"bordercolor_bottom", "bordercolor_right", \t"bgcolor", "textfont", \t"pointsize", "fontweight", "fontstyle", "textcolor", "underline", \t"strikethrough", "angle", "column-width", "row-height", \t"vertical_align", "justification", "frozen", "merge_area"] """ # Mark content as changed post_command_event(self.main_window, self.ContentChangedMsg) if selection is not None: self.code_array.cell_attributes.append((selection, table, attr))
[ "def", "_set_cell_attr", "(", "self", ",", "selection", ",", "table", ",", "attr", ")", ":", "# Mark content as changed", "post_command_event", "(", "self", ".", "main_window", ",", "self", ".", "ContentChangedMsg", ")", "if", "selection", "is", "not", "None", ...
35.727273
22
def pass_bucket(f): """Decorate to retrieve a bucket.""" @wraps(f) def decorate(*args, **kwargs): bucket_id = kwargs.pop('bucket_id') bucket = Bucket.get(as_uuid(bucket_id)) if not bucket: abort(404, 'Bucket does not exist.') return f(bucket=bucket, *args, **kwargs) return decorate
[ "def", "pass_bucket", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "decorate", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "bucket_id", "=", "kwargs", ".", "pop", "(", "'bucket_id'", ")", "bucket", "=", "Bucket", ".", "get", "...
33.3
11.9
def add(self, logical_id, property, value): """ Add the information that resource with given `logical_id` supports the given `property`, and that a reference to `logical_id.property` resolves to given `value. Example: "MyApi.Deployment" -> "MyApiDeployment1234567890" :param logical_id: Logical ID of the resource (Ex: MyLambdaFunction) :param property: Property on the resource that can be referenced (Ex: Alias) :param value: Value that this reference resolves to. :return: nothing """ if not logical_id or not property: raise ValueError("LogicalId and property must be a non-empty string") if not value or not isinstance(value, string_types): raise ValueError("Property value must be a non-empty string") if logical_id not in self._refs: self._refs[logical_id] = {} if property in self._refs[logical_id]: raise ValueError("Cannot add second reference value to {}.{} property".format(logical_id, property)) self._refs[logical_id][property] = value
[ "def", "add", "(", "self", ",", "logical_id", ",", "property", ",", "value", ")", ":", "if", "not", "logical_id", "or", "not", "property", ":", "raise", "ValueError", "(", "\"LogicalId and property must be a non-empty string\"", ")", "if", "not", "value", "or", ...
39.392857
27.25
def is_iterable(obj, forbid_literals = (str, bytes), minimum_length = None, maximum_length = None, **kwargs): """Indicate whether ``obj`` is iterable. :param forbid_literals: A collection of literals that will be considered invalid even if they are (actually) iterable. Defaults to a :class:`tuple <python:tuple>` containing :class:`str <python:str>` and :class:`bytes <python:bytes>`. :type forbid_literals: iterable :param minimum_length: If supplied, indicates the minimum number of members needed to be valid. :type minimum_length: :class:`int <python:int>` :param maximum_length: If supplied, indicates the minimum number of members needed to be valid. :type maximum_length: :class:`int <python:int>` :returns: ``True`` if ``obj`` is a valid iterable, ``False`` if not. :rtype: :class:`bool <python:bool>` :raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates keyword parameters passed to the underlying validator """ if obj is None: return False if obj in forbid_literals: return False try: obj = validators.iterable(obj, allow_empty = True, forbid_literals = forbid_literals, minimum_length = minimum_length, maximum_length = maximum_length, **kwargs) except SyntaxError as error: raise error except Exception: return False return True
[ "def", "is_iterable", "(", "obj", ",", "forbid_literals", "=", "(", "str", ",", "bytes", ")", ",", "minimum_length", "=", "None", ",", "maximum_length", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "obj", "is", "None", ":", "return", "False", ...
35.021739
23.130435
def offsets(self): """A generator producing a (path, offset) tuple for all tailed files.""" for path, tailedfile in self._tailedfiles.iteritems(): yield path, tailedfile._offset
[ "def", "offsets", "(", "self", ")", ":", "for", "path", ",", "tailedfile", "in", "self", ".", "_tailedfiles", ".", "iteritems", "(", ")", ":", "yield", "path", ",", "tailedfile", ".", "_offset" ]
48.75
10.75
def derivatives_ctrlpts(**kwargs): """ Computes the control points of all derivative surfaces up to and including the {degree}-th derivative. Output is PKL[k][l][i][j], i,j-th control point of the surface differentiated k times w.r.t to u and l times w.r.t v. """ r1 = kwargs.get('r1') # minimum span on the u-direction r2 = kwargs.get('r2') # maximum span on the u-direction s1 = kwargs.get('s1') # minimum span on the v-direction s2 = kwargs.get('s2') # maximum span on the v-direction deriv_order = kwargs.get('deriv_order') ctrlpts_size = kwargs.get('ctrlpts_size') degree = kwargs.get('degree') knotvector = kwargs.get('knotvector') ctrlpts = kwargs.get('ctrlpts') dimension = kwargs.get('dimension') PKL = [[[[[None for _ in range(dimension)] for _ in range(ctrlpts_size[1])] for _ in range(ctrlpts_size[0])] for _ in range(deriv_order + 1)] for _ in range(deriv_order + 1)] du = min(degree[0], deriv_order) dv = min(degree[1], deriv_order) r = r2 - r1 s = s2 - s1 # Control points of the U derivatives of every U-curve for j in range(s1, s2 + 1): PKu = CurveEvaluator2.derivatives_ctrlpts(r1=r1, r2=r2, degree=degree[0], knotvector=knotvector[0], ctrlpts=[ctrlpts[j + (ctrlpts_size[1] * i)] for i in range(ctrlpts_size[0])], dimension=dimension, deriv_order=du) # Copy into output as the U partial derivatives for k in range(0, du + 1): for i in range(0, r - k + 1): PKL[k][0][i][j - s1] = PKu[k][i] # Control points of the V derivatives of every U-differentiated V-curve for k in range(0, du): for i in range(0, r - k + 1): dd = min(deriv_order - k, dv) PKuv = CurveEvaluator2.derivatives_ctrlpts(r1=0, r2=s, degree=degree[1], knotvector=knotvector[1][s1:], ctrlpts=PKL[k][0][i], dimension=dimension, deriv_order=dd) # Copy into output for l in range(1, dd + 1): for j in range(0, s - l + 1): PKL[k][l][i][j] = PKuv[l][j] return PKL
[ "def", "derivatives_ctrlpts", "(", "*", "*", "kwargs", ")", ":", "r1", "=", "kwargs", ".", "get", "(", "'r1'", ")", "# minimum span on the u-direction", "r2", "=", "kwargs", ".", "get", "(", "'r2'", ")", "# maximum span on the u-direction", "s1", "=", "kwargs"...
46.316667
24
def get_best_auth(self, family, address, dispno, types = ( b"MIT-MAGIC-COOKIE-1", )): """Find an authentication entry matching FAMILY, ADDRESS and DISPNO. The name of the auth scheme must match one of the names in TYPES. If several entries match, the first scheme in TYPES will be choosen. If an entry is found, the tuple (name, data) is returned, otherwise XNoAuthError is raised. """ num = str(dispno).encode() matches = {} for efam, eaddr, enum, ename, edata in self.entries: if efam == family and eaddr == address and num == enum: matches[ename] = edata for t in types: try: return (t, matches[t]) except KeyError: pass raise error.XNoAuthError((family, address, dispno))
[ "def", "get_best_auth", "(", "self", ",", "family", ",", "address", ",", "dispno", ",", "types", "=", "(", "b\"MIT-MAGIC-COOKIE-1\"", ",", ")", ")", ":", "num", "=", "str", "(", "dispno", ")", ".", "encode", "(", ")", "matches", "=", "{", "}", "for",...
29.862069
21.896552
def dataReceived(self, data): """ Do not overwrite this method. Instead implement `on_...` methods for the registered typenames to handle incomming packets. """ self._unprocessed_data.enqueue(data) while True: if len(self._unprocessed_data) < self._header.size: return # not yet enough data hdr_data = self._unprocessed_data.peek(self._header.size) packet_length, typekey = self._header.unpack(hdr_data) total_length = self._header.size + packet_length if len(self._unprocessed_data) < total_length: return # not yet enough data self._unprocessed_data.drop(self._header.size) packet = self._unprocessed_data.dequeue(packet_length) self._start_receive = None typename = self._type_register.get(typekey, None) if typename is None: self.on_unregistered_type(typekey, packet) else: self.packet_received(typename, packet)
[ "def", "dataReceived", "(", "self", ",", "data", ")", ":", "self", ".", "_unprocessed_data", ".", "enqueue", "(", "data", ")", "while", "True", ":", "if", "len", "(", "self", ".", "_unprocessed_data", ")", "<", "self", ".", "_header", ".", "size", ":",...
37.433333
19.433333
def reboot(self, comment=None): """ Send reboot command to this node. :param str comment: comment to audit :raises NodeCommandFailed: reboot failed with reason :return: None """ self.make_request( NodeCommandFailed, method='update', resource='reboot', params={'comment': comment})
[ "def", "reboot", "(", "self", ",", "comment", "=", "None", ")", ":", "self", ".", "make_request", "(", "NodeCommandFailed", ",", "method", "=", "'update'", ",", "resource", "=", "'reboot'", ",", "params", "=", "{", "'comment'", ":", "comment", "}", ")" ]
28.692308
10.692308
def getWifiInfo(self, wifiInterfaceId=1, timeout=1): """Execute GetInfo action to get Wifi basic information's. :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed :return: the basic informations :rtype: WifiBasicInfo """ namespace = Wifi.getServiceType("getWifiInfo") + str(wifiInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetInfo", timeout=timeout) return WifiBasicInfo(results)
[ "def", "getWifiInfo", "(", "self", ",", "wifiInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wifi", ".", "getServiceType", "(", "\"getWifiInfo\"", ")", "+", "str", "(", "wifiInterfaceId", ")", "uri", "=", "self", ".", "getC...
40.785714
20.285714
def to_protobuf(self) -> LinkItemProto: """ Create protobuf item. :return: protobuf structure :rtype: ~unidown.plugin.protobuf.link_item_pb2.LinkItemProto """ result = LinkItemProto() result.name = self._name result.time.CopyFrom(datetime_to_timestamp(self._time)) return result
[ "def", "to_protobuf", "(", "self", ")", "->", "LinkItemProto", ":", "result", "=", "LinkItemProto", "(", ")", "result", ".", "name", "=", "self", ".", "_name", "result", ".", "time", ".", "CopyFrom", "(", "datetime_to_timestamp", "(", "self", ".", "_time",...
31
13
def atype_view_asset(self, ): """View the project of the current assettype :returns: None :rtype: None :raises: None """ if not self.cur_atype: return i = self.atype_asset_treev.currentIndex() item = i.internalPointer() if item: asset = item.internal_data() if isinstance(asset, djadapter.models.Asset): self.view_asset(asset)
[ "def", "atype_view_asset", "(", "self", ",", ")", ":", "if", "not", "self", ".", "cur_atype", ":", "return", "i", "=", "self", ".", "atype_asset_treev", ".", "currentIndex", "(", ")", "item", "=", "i", ".", "internalPointer", "(", ")", "if", "item", ":...
27.3125
14.875
def RetrievePluginAsset(self, plugin_name, asset_name): """Return the contents of a given plugin asset. Args: plugin_name: The string name of a plugin. asset_name: The string name of an asset. Returns: The string contents of the plugin asset. Raises: KeyError: If the asset is not available. """ return plugin_asset_util.RetrieveAsset(self.path, plugin_name, asset_name)
[ "def", "RetrievePluginAsset", "(", "self", ",", "plugin_name", ",", "asset_name", ")", ":", "return", "plugin_asset_util", ".", "RetrieveAsset", "(", "self", ".", "path", ",", "plugin_name", ",", "asset_name", ")" ]
29.142857
20.428571
def letter2num(letters, zbase=False): """A = 1, C = 3 and so on. Convert spreadsheet style column enumeration to a number. Answers: A = 1, Z = 26, AA = 27, AZ = 52, ZZ = 702, AMJ = 1024 >>> from channelpack.pullxl import letter2num >>> letter2num('A') == 1 True >>> letter2num('Z') == 26 True >>> letter2num('AZ') == 52 True >>> letter2num('ZZ') == 702 True >>> letter2num('AMJ') == 1024 True >>> letter2num('AMJ', zbase=True) == 1023 True >>> letter2num('A', zbase=True) == 0 True """ letters = letters.upper() res = 0 weight = len(letters) - 1 assert weight >= 0, letters for i, c in enumerate(letters): assert 65 <= ord(c) <= 90, c # A-Z res += (ord(c) - 64) * 26**(weight - i) if not zbase: return res return res - 1
[ "def", "letter2num", "(", "letters", ",", "zbase", "=", "False", ")", ":", "letters", "=", "letters", ".", "upper", "(", ")", "res", "=", "0", "weight", "=", "len", "(", "letters", ")", "-", "1", "assert", "weight", ">=", "0", ",", "letters", "for"...
22.861111
19.138889
def submit_reading(basename, pmid_list_filename, readers, start_ix=None, end_ix=None, pmids_per_job=3000, num_tries=2, force_read=False, force_fulltext=False, project_name=None): """Submit an old-style pmid-centered no-database s3 only reading job. This function is provided for the sake of backward compatibility. It is preferred that you use the object-oriented PmidSubmitter and the submit_reading job going forward. """ sub = PmidSubmitter(basename, readers, project_name) sub.set_options(force_read, force_fulltext) sub.submit_reading(pmid_list_filename, start_ix, end_ix, pmids_per_job, num_tries) return sub.job_list
[ "def", "submit_reading", "(", "basename", ",", "pmid_list_filename", ",", "readers", ",", "start_ix", "=", "None", ",", "end_ix", "=", "None", ",", "pmids_per_job", "=", "3000", ",", "num_tries", "=", "2", ",", "force_read", "=", "False", ",", "force_fulltex...
50.571429
20.142857
def find_executable(executable, path=None): '''Try to find 'executable' in the directories listed in 'path' (a string listing directories separated by 'os.pathsep'; defaults to os.environ['PATH']).''' if path is None: path = os.environ['PATH'] paths = path.split(os.pathsep) extlist = [''] if os.name == 'os2': ext = os.path.splitext(executable) # executable files on OS/2 can have an arbitrary extension, but # .exe is automatically appended if no dot is present in the name if not ext: executable = executable + ".exe" elif sys.platform == 'win32': pathext = os.environ['PATHEXT'].lower().split(os.pathsep) ext = os.path.splitext(executable) if ext not in pathext: extlist = pathext for ext in extlist: execname = executable + ext if os.path.isfile(execname): return execname else: for pth in paths: fil = os.path.join(pth, execname) if os.path.isfile(fil): return fil break else: return None
[ "def", "find_executable", "(", "executable", ",", "path", "=", "None", ")", ":", "if", "path", "is", "None", ":", "path", "=", "os", ".", "environ", "[", "'PATH'", "]", "paths", "=", "path", ".", "split", "(", "os", ".", "pathsep", ")", "extlist", ...
35.870968
14.967742
def index(self): ''' Index funtion. ''' self.render('ext_excel/index.html', userinfo=self.userinfo, cfg=CMS_CFG, kwd={}, )
[ "def", "index", "(", "self", ")", ":", "self", ".", "render", "(", "'ext_excel/index.html'", ",", "userinfo", "=", "self", ".", "userinfo", ",", "cfg", "=", "CMS_CFG", ",", "kwd", "=", "{", "}", ",", ")" ]
20.7
20.5
def notebook_merge(local, base, remote, check_modified=False): """Unify three notebooks into a single notebook with merge metadata. The result of this function is a valid notebook that can be loaded by the IPython Notebook front-end. This function adds additional cell metadata that the front-end Javascript uses to render the merge. Parameters ---------- local : dict The local branch's version of the notebook. base : dict The last common ancestor of local and remote. remote : dict The remote branch's version of the notebook. Returns ------- nb : A valid notebook containing merge metadata. """ local_cells = get_cells(local) base_cells = get_cells(base) remote_cells = get_cells(remote) rows = [] current_row = [] empty_cell = lambda: { 'cell_type': 'code', 'language': 'python', 'outputs': [], 'prompt_number': 1, 'text': ['Placeholder'], 'metadata': {'state': 'empty'} } diff_of_diffs = merge(local_cells, base_cells, remote_cells) # For each item in the higher-order diff, create a "row" that # corresponds to a row in the NBDiff interface. A row contains: # | LOCAL | BASE | REMOTE | for item in diff_of_diffs: state = item['state'] cell = copy.deepcopy(diff_result_to_cell(item['value'])) if state == 'deleted': # This change is between base and local branches. # It can be an addition or a deletion. if cell['metadata']['state'] == 'unchanged': # This side doesn't have the change; wait # until we encounter the change to create the row. continue cell['metadata']['side'] = 'local' remote_cell = empty_cell() remote_cell['metadata']['side'] = 'remote' if cell['metadata']['state'] == 'deleted' \ or cell['metadata']['state'] == 'unchanged': base_cell = copy.deepcopy(cell) else: base_cell = empty_cell() base_cell['metadata']['side'] = 'base' # This change is on the right. current_row = [ cell, base_cell, remote_cell, ] elif state == 'added': # This change is between base and remote branches. # It can be an addition or a deletion. cell['metadata']['side'] = 'remote' if cell['metadata']['state'] == 'unchanged': # This side doesn't have the change; wait # until we encounter the change to create the row. continue if cell['metadata']['state'] == 'deleted': base_cell = copy.deepcopy(cell) base_cell['metadata']['state'] = 'unchanged' local_cell = copy.deepcopy(cell) local_cell['metadata']['state'] = 'unchanged' else: base_cell = empty_cell() local_cell = empty_cell() base_cell['metadata']['side'] = 'base' local_cell['metadata']['side'] = 'local' current_row = [ local_cell, base_cell, cell, ] elif state == 'unchanged': # The same item occurs between base-local and base-remote. # This happens if both branches made the same change, whether # that is an addition or deletion. If neither branches # changed a given cell, that cell shows up here too. cell1 = copy.deepcopy(cell) cell3 = copy.deepcopy(cell) if cell['metadata']['state'] == 'deleted' \ or cell['metadata']['state'] == 'unchanged': # If the change is a deletion, the cell-to-be-deleted # should in the base as 'unchanged'. The user will # choose to make it deleted. cell2 = copy.deepcopy(cell) cell2['metadata']['state'] = 'unchanged' else: # If the change is an addition, it should not # show in the base; the user must add it to the merged version. cell2 = empty_cell() cell1['metadata']['side'] = 'local' cell2['metadata']['side'] = 'base' cell3['metadata']['side'] = 'remote' current_row = [ cell1, cell2, cell3, ] rows.append(current_row) # Chain all rows together; create a flat array from the nested array. # Use the base notebook's notebook-level metadata (title, version, etc.) result_notebook = local if len(result_notebook['worksheets']) == 0: result_notebook['worksheets'] = [nbformat.new_worksheet()] new_cell_array = list(it.chain.from_iterable(rows)) result_notebook['worksheets'][0]['cells'] = new_cell_array result_notebook['metadata']['nbdiff-type'] = 'merge' return result_notebook
[ "def", "notebook_merge", "(", "local", ",", "base", ",", "remote", ",", "check_modified", "=", "False", ")", ":", "local_cells", "=", "get_cells", "(", "local", ")", "base_cells", "=", "get_cells", "(", "base", ")", "remote_cells", "=", "get_cells", "(", "...
37.727273
18.719697
def getWaveletData(eda): ''' This function computes the wavelet coefficients INPUT: data: DataFrame, index is a list of timestamps at 8Hz, columns include EDA, filtered_eda OUTPUT: wave1Second: DateFrame, index is a list of timestamps at 1Hz, columns include OneSecond_feature1, OneSecond_feature2, OneSecond_feature3 waveHalfSecond: DateFrame, index is a list of timestamps at 2Hz, columns include HalfSecond_feature1, HalfSecond_feature2 ''' # Create wavelet dataframes oneSecond = halfSecond = # Compute wavelets cA_n, cD_3, cD_2, cD_1 = pywt.wavedec(eda, 'Haar', level=3) #3 = 1Hz, 2 = 2Hz, 1=4Hz # Wavelet 1 second window N = int(len(eda)/sampling_rate) coeff1 = np.max(abs(np.reshape(cD_1[0:4*N],(N,4))), axis=1) coeff2 = np.max(abs(np.reshape(cD_2[0:2*N],(N,2))), axis=1) coeff3 = abs(cD_3[0:N]) wave1Second = pd.DataFrame({'OneSecond_feature1':coeff1,'OneSecond_feature2':coeff2,'OneSecond_feature3':coeff3}) wave1Second.index = oneSecond[:len(wave1Second)] # Wavelet Half second window N = int(np.floor((len(data)/8.0)*2)) coeff1 = np.max(abs(np.reshape(cD_1[0:2*N],(N,2))),axis=1) coeff2 = abs(cD_2[0:N]) waveHalfSecond = pd.DataFrame({'HalfSecond_feature1':coeff1,'HalfSecond_feature2':coeff2}) waveHalfSecond.index = halfSecond[:len(waveHalfSecond)] return wave1Second,waveHalfSecond
[ "def", "getWaveletData", "(", "eda", ")", ":", "# Create wavelet dataframes", "oneSecond", "=", "halfSecond", "=", "# Compute wavelets", "cA_n", ",", "cD_3", ",", "cD_2", ",", "cD_1", "=", "pywt", ".", "wavedec", "(", "eda", ",", "'Haar'", ",", "level", "=",...
42.454545
30.939394
def _is_valid_index(self, index): """ Return ``True`` if and only if the given ``index`` is valid. """ if isinstance(index, int): return (index >= 0) and (index < len(self)) if isinstance(index, list): valid = True for i in index: valid = valid or self._is_valid_index(i) return valid return False
[ "def", "_is_valid_index", "(", "self", ",", "index", ")", ":", "if", "isinstance", "(", "index", ",", "int", ")", ":", "return", "(", "index", ">=", "0", ")", "and", "(", "index", "<", "len", "(", "self", ")", ")", "if", "isinstance", "(", "index",...
31.153846
11.923077
def _mainthread_accept_clients(self): """Accepts new clients and sends them to the to _handle_accepted within a subthread """ try: if self._accept_selector.select(timeout=self.block_time): client = self._server_socket.accept() logging.info('Client connected: {}'.format(client[1])) self._threads_limiter.start_thread(target=self._subthread_handle_accepted, args=(client,)) except socket.error: pass
[ "def", "_mainthread_accept_clients", "(", "self", ")", ":", "try", ":", "if", "self", ".", "_accept_selector", ".", "select", "(", "timeout", "=", "self", ".", "block_time", ")", ":", "client", "=", "self", ".", "_server_socket", ".", "accept", "(", ")", ...
45.25
21.25
def authenticate(self, req_data, identifier: Optional[str]=None, signature: Optional[str]=None, threshold: Optional[int] = None, verifier: Verifier=DidVerifier): """ Prepares the data to be serialised for signing and then verifies the signature :param req_data: :param identifier: :param signature: :param verifier: :return: """ to_serialize = {k: v for k, v in req_data.items() if k not in self.excluded_from_signing} if req_data.get(f.SIG.nm) is None and \ req_data.get(f.SIGS.nm) is None and \ signature is None: raise MissingSignature if req_data.get(f.IDENTIFIER.nm) and (req_data.get(f.SIG.nm) or signature): try: # if not identifier: identifier = identifier or self._extract_identifier(req_data) # if not signature: signature = signature or self._extract_signature(req_data) signatures = {identifier: signature} except Exception as ex: if ex in (MissingSignature, EmptySignature, MissingIdentifier, EmptyIdentifier): ex = ex(req_data.get(f.IDENTIFIER.nm), req_data.get(f.SIG.nm)) raise ex else: signatures = req_data.get(f.SIGS.nm, None) return self.authenticate_multi(to_serialize, signatures=signatures, threshold=threshold, verifier=verifier)
[ "def", "authenticate", "(", "self", ",", "req_data", ",", "identifier", ":", "Optional", "[", "str", "]", "=", "None", ",", "signature", ":", "Optional", "[", "str", "]", "=", "None", ",", "threshold", ":", "Optional", "[", "int", "]", "=", "None", "...
43.756757
20.513514
def _max_weight_state(states: Iterable[TensorProductState]) -> Union[None, TensorProductState]: """Construct a TensorProductState by taking the single-qubit state at each qubit position. This function will return ``None`` if the input states are not compatible For example, the max_weight_state of ["(+X, q0)", "(-Z, q1)"] is "(+X, q0; -Z q1)". Asking for the max weight state of something like ["(+X, q0)", "(+Z, q0)"] will return None. """ mapping = dict() # type: Dict[int, _OneQState] for state in states: for oneq_state in state.states: if oneq_state.qubit in mapping: if mapping[oneq_state.qubit] != oneq_state: return None else: mapping[oneq_state.qubit] = oneq_state return TensorProductState(list(mapping.values()))
[ "def", "_max_weight_state", "(", "states", ":", "Iterable", "[", "TensorProductState", "]", ")", "->", "Union", "[", "None", ",", "TensorProductState", "]", ":", "mapping", "=", "dict", "(", ")", "# type: Dict[int, _OneQState]", "for", "state", "in", "states", ...
46.111111
22.5
def create_project(args): """ Create a new django project using the longclaw template """ # Make sure given name is not already in use by another python package/module. try: __import__(args.project_name) except ImportError: pass else: sys.exit("'{}' conflicts with the name of an existing " "Python module and cannot be used as a project " "name. Please try another name.".format(args.project_name)) # Get the longclaw template path template_path = path.join(path.dirname(longclaw.__file__), 'project_template') utility = ManagementUtility(( 'django-admin.py', 'startproject', '--template={}'.format(template_path), '--extension=html,css,js,py,txt', args.project_name )) utility.execute() print("{} has been created.".format(args.project_name))
[ "def", "create_project", "(", "args", ")", ":", "# Make sure given name is not already in use by another python package/module.", "try", ":", "__import__", "(", "args", ".", "project_name", ")", "except", "ImportError", ":", "pass", "else", ":", "sys", ".", "exit", "(...
32.259259
21.074074
def create_autoscale_rule(subscription_id, resource_group, vmss_name, metric_name, operator, threshold, direction, change_count, time_grain='PT1M', time_window='PT5M', cool_down='PT1M'): '''Create a new autoscale rule - pass the output in a list to create_autoscale_setting(). Args: subscription_id (str): Azure subscription id. resource_group (str): Azure resource group name. vmss_name (str): Name of scale set to apply scale events to. metric_name (str): Name of metric being evaluated. operator (str): Operator to evaluate. E.g. "GreaterThan". threshold (str): Threshold to trigger action. direction (str): Direction of action. E.g. Increase. change_count (str): How many to increase or decrease by. time_grain (str): Optional. Measurement granularity. Default 'PT1M'. time_window (str): Optional. Range of time to collect data over. Default 'PT5M'. cool_down (str): Optional. Time to wait after last scaling action. ISO 8601 format. Default 'PT1M'. Returns: HTTP response. JSON body of autoscale setting. ''' metric_trigger = {'metricName': metric_name} metric_trigger['metricNamespace'] = '' metric_trigger['metricResourceUri'] = '/subscriptions/' + subscription_id + \ '/resourceGroups/' + resource_group + \ '/providers/Microsoft.Compute/virtualMachineScaleSets/' + vmss_name metric_trigger['timeGrain'] = time_grain metric_trigger['statistic'] = 'Average' metric_trigger['timeWindow'] = time_window metric_trigger['timeAggregation'] = 'Average' metric_trigger['operator'] = operator metric_trigger['threshold'] = threshold scale_action = {'direction': direction} scale_action['type'] = 'ChangeCount' scale_action['value'] = str(change_count) scale_action['cooldown'] = cool_down new_rule = {'metricTrigger': metric_trigger} new_rule['scaleAction'] = scale_action return new_rule
[ "def", "create_autoscale_rule", "(", "subscription_id", ",", "resource_group", ",", "vmss_name", ",", "metric_name", ",", "operator", ",", "threshold", ",", "direction", ",", "change_count", ",", "time_grain", "=", "'PT1M'", ",", "time_window", "=", "'PT5M'", ",",...
51.435897
19.948718
def string(self) -> bytes: """The capabilities string without the enclosing square brackets.""" if self._raw is not None: return self._raw self._raw = raw = BytesFormat(b' ').join( [b'CAPABILITY', b'IMAP4rev1'] + self.capabilities) return raw
[ "def", "string", "(", "self", ")", "->", "bytes", ":", "if", "self", ".", "_raw", "is", "not", "None", ":", "return", "self", ".", "_raw", "self", ".", "_raw", "=", "raw", "=", "BytesFormat", "(", "b' '", ")", ".", "join", "(", "[", "b'CAPABILITY'"...
41.714286
12.285714
def parse_docs(docs, marks): """ Parse YAML syntax content from docs If docs is None, return {} If docs has no YAML content, return {"$desc": docs} Else, parse YAML content, return {"$desc": docs, YAML} Args: docs (str): docs to be parsed marks (list): list of which indicate YAML content starts Returns: A dict contains information of docs """ if docs is None: return {} indexs = [] for mark in marks: i = docs.find(mark) if i >= 0: indexs.append(i) if not indexs: return {"$desc": textwrap.dedent(docs).strip()} start = min(indexs) start = docs.rfind("\n", 0, start) yamltext = textwrap.dedent(docs[start + 1:]) meta = yaml.load(yamltext) meta["$desc"] = textwrap.dedent(docs[:start]).strip() return meta
[ "def", "parse_docs", "(", "docs", ",", "marks", ")", ":", "if", "docs", "is", "None", ":", "return", "{", "}", "indexs", "=", "[", "]", "for", "mark", "in", "marks", ":", "i", "=", "docs", ".", "find", "(", "mark", ")", "if", "i", ">=", "0", ...
28.275862
16.344828
def prepare_special_info_about_entry(i): """ Input: { } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 dict - dict with info } """ # Add control info d={'engine':'CK', 'version':cfg['version']} if cfg.get('default_developer','')!='': d['author']=cfg['default_developer'] if cfg.get('default_developer_email','')!='': d['author_email']=cfg['default_developer_email'] if cfg.get('default_developer_webpage','')!='': d['author_webpage']=cfg['default_developer_webpage'] if cfg.get('default_license','')!='': d['license']=cfg['default_license'] if cfg.get('default_copyright','')!='': d['copyright']=cfg['default_copyright'] r=get_current_date_time({}) d['iso_datetime']=r['iso_datetime'] return {'return':0, 'dict': d}
[ "def", "prepare_special_info_about_entry", "(", "i", ")", ":", "# Add control info", "d", "=", "{", "'engine'", ":", "'CK'", ",", "'version'", ":", "cfg", "[", "'version'", "]", "}", "if", "cfg", ".", "get", "(", "'default_developer'", ",", "''", ")", "!="...
26.162162
18.702703
def getMessage(self): """Returns a colorized log message based on the log level. If the platform is windows the original message will be returned without colorization windows escape codes are crazy. :returns: ``str`` """ msg = str(self.msg) if self.args: msg = msg % self.args if platform.system().lower() == 'windows' or self.levelno < 10: return msg elif self.levelno >= 50: return utils.return_colorized(msg, 'critical') elif self.levelno >= 40: return utils.return_colorized(msg, 'error') elif self.levelno >= 30: return utils.return_colorized(msg, 'warn') elif self.levelno >= 20: return utils.return_colorized(msg, 'info') else: return utils.return_colorized(msg, 'debug')
[ "def", "getMessage", "(", "self", ")", ":", "msg", "=", "str", "(", "self", ".", "msg", ")", "if", "self", ".", "args", ":", "msg", "=", "msg", "%", "self", ".", "args", "if", "platform", ".", "system", "(", ")", ".", "lower", "(", ")", "==", ...
35.25
17.875
def typed_fields(cls): """Return a tuple of this entity's TypedFields.""" # Checking cls._typed_fields could return a superclass _typed_fields # value. So we check our class __dict__ which does not include # inherited attributes. klassdict = cls.__dict__ try: return klassdict["_typed_fields"] except KeyError: fields = cls.typed_fields_with_attrnames() cls._typed_fields = tuple(field for _, field in fields) return cls._typed_fields
[ "def", "typed_fields", "(", "cls", ")", ":", "# Checking cls._typed_fields could return a superclass _typed_fields", "# value. So we check our class __dict__ which does not include", "# inherited attributes.", "klassdict", "=", "cls", ".", "__dict__", "try", ":", "return", "klassdi...
37.357143
19.928571
def service_group(self, service_name): """ Args: service_name: the name of the service in the service registry Returns: the name of the group the service is in, or None of the service was not found """ for group in EFConfig.SERVICE_GROUPS: if self.services(group).has_key(service_name): return group return None
[ "def", "service_group", "(", "self", ",", "service_name", ")", ":", "for", "group", "in", "EFConfig", ".", "SERVICE_GROUPS", ":", "if", "self", ".", "services", "(", "group", ")", ".", "has_key", "(", "service_name", ")", ":", "return", "group", "return", ...
31.909091
17.181818
def raise_enter_downtime_log_entry(self): """Raise CONTACT DOWNTIME ALERT entry (info level) Format is : "CONTACT DOWNTIME ALERT: *get_name()*;STARTED; Contact has entered a period of scheduled downtime" Example : "CONTACT DOWNTIME ALERT: test_contact;STARTED; Contact has entered a period of scheduled downtime" :return: None """ brok = make_monitoring_log( 'info', "CONTACT DOWNTIME ALERT: %s;STARTED; " "Contact has entered a period of scheduled downtime" % self.get_name() ) self.broks.append(brok)
[ "def", "raise_enter_downtime_log_entry", "(", "self", ")", ":", "brok", "=", "make_monitoring_log", "(", "'info'", ",", "\"CONTACT DOWNTIME ALERT: %s;STARTED; \"", "\"Contact has entered a period of scheduled downtime\"", "%", "self", ".", "get_name", "(", ")", ")", "self",...
44.857143
20.5
def dfa_word_acceptance(dfa: dict, word: list) -> bool: """ Checks if a given **word** is accepted by a DFA, returning True/false. The word w is accepted by a DFA if DFA has an accepting run on w. Since A is deterministic, :math:`w ∈ L(A)` if and only if :math:`ρ(s_0 , w) ∈ F` . :param dict dfa: input DFA; :param list word: list of actions ∈ dfa['alphabet']. :return: *(bool)*, True if the word is accepted, False in the other case. """ current_state = dfa['initial_state'] for action in word: if (current_state, action) in dfa['transitions']: current_state = dfa['transitions'][current_state, action] else: return False if current_state in dfa['accepting_states']: return True else: return False
[ "def", "dfa_word_acceptance", "(", "dfa", ":", "dict", ",", "word", ":", "list", ")", "->", "bool", ":", "current_state", "=", "dfa", "[", "'initial_state'", "]", "for", "action", "in", "word", ":", "if", "(", "current_state", ",", "action", ")", "in", ...
33.291667
18.75
def configure(level=logging.WARNING, handler=None, formatter=None): """Configure Logr @param handler: Logger message handler @type handler: logging.Handler or None @param formatter: Logger message Formatter @type formatter: logging.Formatter or None """ if formatter is None: formatter = LogrFormatter() if handler is None: handler = logging.StreamHandler() handler.setFormatter(formatter) handler.setLevel(level) Logr.handler = handler
[ "def", "configure", "(", "level", "=", "logging", ".", "WARNING", ",", "handler", "=", "None", ",", "formatter", "=", "None", ")", ":", "if", "formatter", "is", "None", ":", "formatter", "=", "LogrFormatter", "(", ")", "if", "handler", "is", "None", ":...
29.722222
14.944444
def prune_non_existent_outputs(compound_match_query): """Remove non-existent outputs from each MatchQuery in the given CompoundMatchQuery. Each of the 2^n MatchQuery objects (except one) has been pruned to exclude some Traverse blocks, For each of these, remove the outputs (that have been implicitly pruned away) from each corresponding ConstructResult block. Args: compound_match_query: CompoundMatchQuery object containing 2^n pruned MatchQuery objects (see convert_optional_traversals_to_compound_match_query) Returns: CompoundMatchQuery with pruned ConstructResult blocks for each of the 2^n MatchQuery objects """ if len(compound_match_query.match_queries) == 1: return compound_match_query elif len(compound_match_query.match_queries) == 0: raise AssertionError(u'Received CompoundMatchQuery with ' u'an empty list of MatchQuery objects.') else: match_queries = [] for match_query in compound_match_query.match_queries: match_traversals = match_query.match_traversals output_block = match_query.output_block present_locations_tuple = _get_present_locations(match_traversals) present_locations, present_non_optional_locations = present_locations_tuple new_output_fields = {} for output_name, expression in six.iteritems(output_block.fields): if isinstance(expression, OutputContextField): # An OutputContextField as an output Expression indicates that we are not # within an @optional scope. Therefore, the location this output uses must # be in present_locations, and the output is never pruned. location_name, _ = expression.location.get_location_name() if location_name not in present_locations: raise AssertionError(u'Non-optional output location {} was not found in ' u'present_locations: {}' .format(expression.location, present_locations)) new_output_fields[output_name] = expression elif isinstance(expression, FoldedContextField): # A FoldedContextField as an output Expression indicates that we are not # within an @optional scope. Therefore, the location this output uses must # be in present_locations, and the output is never pruned. base_location = expression.fold_scope_location.base_location location_name, _ = base_location.get_location_name() if location_name not in present_locations: raise AssertionError(u'Folded output location {} was found in ' u'present_locations: {}' .format(base_location, present_locations)) new_output_fields[output_name] = expression elif isinstance(expression, TernaryConditional): # A TernaryConditional indicates that this output is within some optional scope. # This may be pruned away based on the contents of present_locations. location_name, _ = expression.if_true.location.get_location_name() if location_name in present_locations: if location_name in present_non_optional_locations: new_output_fields[output_name] = expression.if_true else: new_output_fields[output_name] = expression else: raise AssertionError(u'Invalid expression of type {} in output block: ' u'{}'.format(type(expression).__name__, output_block)) match_queries.append( MatchQuery( match_traversals=match_traversals, folds=match_query.folds, output_block=ConstructResult(new_output_fields), where_block=match_query.where_block, ) ) return CompoundMatchQuery(match_queries=match_queries)
[ "def", "prune_non_existent_outputs", "(", "compound_match_query", ")", ":", "if", "len", "(", "compound_match_query", ".", "match_queries", ")", "==", "1", ":", "return", "compound_match_query", "elif", "len", "(", "compound_match_query", ".", "match_queries", ")", ...
58.256757
31.432432
def predict(self, features): """Use the optimized pipeline to predict the target for a feature set. Parameters ---------- features: array-like {n_samples, n_features} Feature matrix Returns ---------- array-like: {n_samples} Predicted target for the samples in the feature matrix """ if not self.fitted_pipeline_: raise RuntimeError('A pipeline has not yet been optimized. Please call fit() first.') features = self._check_dataset(features, target=None, sample_weight=None) return self.fitted_pipeline_.predict(features)
[ "def", "predict", "(", "self", ",", "features", ")", ":", "if", "not", "self", ".", "fitted_pipeline_", ":", "raise", "RuntimeError", "(", "'A pipeline has not yet been optimized. Please call fit() first.'", ")", "features", "=", "self", ".", "_check_dataset", "(", ...
31.5
23.95
def get_opt(key, config, section, booleans, repeatable): """Get one value from config file. :raise DocoptcfgFileError: If an option is the wrong type. :param str key: Option long name (e.g. --config). :param ConfigParser config: ConfigParser instance with config file data already loaded. :param str section: Section in config file to focus on. :param iter booleans: Option long names of boolean/flag types. :param iter repeatable: Option long names of repeatable options. :return: Value to set in the defaults dict. """ # Handle repeatable non-boolean options (e.g. --file=file1.txt --file=file2.txt). if key in repeatable and key not in booleans: return config.get(section, key[2:]).strip('\n').splitlines() # Handle repeatable booleans. if key in repeatable and key in booleans: try: return config.getint(section, key[2:]) except ValueError as exc: raise DocoptcfgFileError('Repeatable boolean option "{0}" invalid.'.format(key[2:]), str(exc)) # Handle non-repeatable booleans. if key in booleans: try: return config.getboolean(section, key[2:]) except ValueError as exc: raise DocoptcfgFileError('Boolean option "{0}" invalid.'.format(key[2:]), str(exc)) # Handle the rest. return str(config.get(section, key[2:]))
[ "def", "get_opt", "(", "key", ",", "config", ",", "section", ",", "booleans", ",", "repeatable", ")", ":", "# Handle repeatable non-boolean options (e.g. --file=file1.txt --file=file2.txt).", "if", "key", "in", "repeatable", "and", "key", "not", "in", "booleans", ":",...
40.848485
23.424242
def start(self, port): """ ε―εŠ¨ζœεŠ‘ε™¨ :param port: 端口号 :return: """ self.application = tornado.web.Application(self.views, template_path=self.templatePath, static_path=self.staticPath) self.application.listen(port) self.ioloop.start()
[ "def", "start", "(", "self", ",", "port", ")", ":", "self", ".", "application", "=", "tornado", ".", "web", ".", "Application", "(", "self", ".", "views", ",", "template_path", "=", "self", ".", "templatePath", ",", "static_path", "=", "self", ".", "st...
28.818182
12.454545
def strategyKLogN(kls, n, k=4): """Return the directory names to preserve under the KLogN purge strategy.""" assert(k>1) s = set([n]) i = 0 while k**i <= n: s.update(range(n, n-k*k**i, -k**i)) i += 1 n -= n % k**i return set(map(str, filter(lambda x:x>=0, s)))
[ "def", "strategyKLogN", "(", "kls", ",", "n", ",", "k", "=", "4", ")", ":", "assert", "(", "k", ">", "1", ")", "s", "=", "set", "(", "[", "n", "]", ")", "i", "=", "0", "while", "k", "**", "i", "<=", "n", ":", "s", ".", "update", "(", "r...
23
21.5
def to_gds(self, multiplier, timestamp=None): """ Convert this cell to a GDSII structure. Parameters ---------- multiplier : number A number that multiplies all dimensions written in the GDSII structure. timestamp : datetime object Sets the GDSII timestamp. If ``None``, the current time is used. Returns ------- out : string The GDSII binary string that represents this cell. """ now = datetime.datetime.today() if timestamp is None else timestamp name = self.name if len(name) % 2 != 0: name = name + '\0' return struct.pack( '>16h', 28, 0x0502, now.year, now.month, now.day, now.hour, now.minute, now.second, now.year, now.month, now.day, now.hour, now.minute, now.second, 4 + len(name), 0x0606) + name.encode('ascii') + b''.join( element.to_gds(multiplier) for element in self.elements) + b''.join( label.to_gds(multiplier) for label in self.labels) + struct.pack('>2h', 4, 0x0700)
[ "def", "to_gds", "(", "self", ",", "multiplier", ",", "timestamp", "=", "None", ")", ":", "now", "=", "datetime", ".", "datetime", ".", "today", "(", ")", "if", "timestamp", "is", "None", "else", "timestamp", "name", "=", "self", ".", "name", "if", "...
37.483871
18.83871
def to_result(self): """Convert to the Linter.run return value""" text = [self.text] if self.note: text.append(self.note) return { 'lnum': self.line_num, 'col': self.column, 'text': ' - '.join(text), 'type': self.types.get(self.message_type, '') }
[ "def", "to_result", "(", "self", ")", ":", "text", "=", "[", "self", ".", "text", "]", "if", "self", ".", "note", ":", "text", ".", "append", "(", "self", ".", "note", ")", "return", "{", "'lnum'", ":", "self", ".", "line_num", ",", "'col'", ":",...
28.083333
15.75
def power_law(target, X, A1='', A2='', A3=''): r""" Calculates the rate, as well as slope and intercept of the following function at the given value of *X*: .. math:: r = A_{1} x^{A_{2}} + A_{3} Parameters ---------- A1 -> A3 : string The dictionary keys on the target object containing the coefficients values to be used in the source term model X : string The dictionary key on the target objecxt containing the the quantity of interest Returns ------- A dictionary containing the following three items: **'rate'** - The value of the source term function at the given X. **'S1'** - The slope of the source term function at the given X. **'S2'** - The intercept of the source term function at the given X. The slope and intercept provide a linearized source term equation about the current value of X as follow: .. math:: rate = S_{1} X + S_{2} """ A = _parse_args(target=target, key=A1, default=1.0) B = _parse_args(target=target, key=A2, default=1.0) C = _parse_args(target=target, key=A3, default=0.0) X = target[X] r = A * X ** B + C S1 = A * B * X ** (B - 1) S2 = A * X ** B * (1 - B) + C values = {'S1': S1, 'S2': S2, 'rate': r} return values
[ "def", "power_law", "(", "target", ",", "X", ",", "A1", "=", "''", ",", "A2", "=", "''", ",", "A3", "=", "''", ")", ":", "A", "=", "_parse_args", "(", "target", "=", "target", ",", "key", "=", "A1", ",", "default", "=", "1.0", ")", "B", "=", ...
29.066667
24.022222
def has_data(d, fullname): """Test if any of the `keys` of the `d` dictionary starts with `fullname`. """ fullname = r'%s-' % (fullname, ) for k in d: if not k.startswith(fullname): continue return True return False
[ "def", "has_data", "(", "d", ",", "fullname", ")", ":", "fullname", "=", "r'%s-'", "%", "(", "fullname", ",", ")", "for", "k", "in", "d", ":", "if", "not", "k", ".", "startswith", "(", "fullname", ")", ":", "continue", "return", "True", "return", "...
28.333333
12.222222
def get_query_param(request, key): """Get query parameter uniformly for GET and POST requests.""" value = request.query_params.get(key) or request.data.get(key) if value is None: raise KeyError() return value
[ "def", "get_query_param", "(", "request", ",", "key", ")", ":", "value", "=", "request", ".", "query_params", ".", "get", "(", "key", ")", "or", "request", ".", "data", ".", "get", "(", "key", ")", "if", "value", "is", "None", ":", "raise", "KeyError...
37.833333
15.166667
def _get_oauth_session(self): """Creates a new OAuth session :return: - OAuth2Session object """ return self._get_session( OAuth2Session( client_id=self.client_id, token=self.token, token_updater=self.token_updater, auto_refresh_url=self.token_url, auto_refresh_kwargs={ "client_id": self.client_id, "client_secret": self.client_secret } ) )
[ "def", "_get_oauth_session", "(", "self", ")", ":", "return", "self", ".", "_get_session", "(", "OAuth2Session", "(", "client_id", "=", "self", ".", "client_id", ",", "token", "=", "self", ".", "token", ",", "token_updater", "=", "self", ".", "token_updater"...
28.263158
14.421053
def as_dict(self): """ Json-serializable dict representation. """ structure = self.final_structure d = {"has_gaussian_completed": self.properly_terminated, "nsites": len(structure)} comp = structure.composition d["unit_cell_formula"] = comp.as_dict() d["reduced_cell_formula"] = Composition(comp.reduced_formula).as_dict() d["pretty_formula"] = comp.reduced_formula d["is_pcm"] = self.is_pcm d["errors"] = self.errors d["Mulliken_charges"] = self.Mulliken_charges unique_symbols = sorted(list(d["unit_cell_formula"].keys())) d["elements"] = unique_symbols d["nelements"] = len(unique_symbols) d["charge"] = self.charge d["spin_multiplicity"] = self.spin_multiplicity vin = {"route": self.route_parameters, "functional": self.functional, "basis_set": self.basis_set, "nbasisfunctions": self.num_basis_func, "pcm_parameters": self.pcm} d["input"] = vin nsites = len(self.final_structure) vout = { "energies": self.energies, "final_energy": self.final_energy, "final_energy_per_atom": self.final_energy / nsites, "molecule": structure.as_dict(), "stationary_type": self.stationary_type, "corrections": self.corrections } d['output'] = vout d["@module"] = self.__class__.__module__ d["@class"] = self.__class__.__name__ return d
[ "def", "as_dict", "(", "self", ")", ":", "structure", "=", "self", ".", "final_structure", "d", "=", "{", "\"has_gaussian_completed\"", ":", "self", ".", "properly_terminated", ",", "\"nsites\"", ":", "len", "(", "structure", ")", "}", "comp", "=", "structur...
34.681818
16.090909
def remove_action(self, action_name, action_id): """ Remove an existing action. action_name -- name of the action action_id -- ID of the action Returns a boolean indicating the presence of the action. """ action = self.get_action(action_name, action_id) if action is None: return False action.cancel() self.actions[action_name].remove(action) return True
[ "def", "remove_action", "(", "self", ",", "action_name", ",", "action_id", ")", ":", "action", "=", "self", ".", "get_action", "(", "action_name", ",", "action_id", ")", "if", "action", "is", "None", ":", "return", "False", "action", ".", "cancel", "(", ...
27.625
15.875
def reportTimes(self): """ Print out a formatted summary of the elapsed times for all the performed steps. """ self.end = _ptime() total_time = 0 print(ProcSteps.__report_header) for step in self.order: if 'elapsed' in self.steps[step]: _time = self.steps[step]['elapsed'] else: _time = 0.0 total_time += _time print(' %20s %0.4f sec.' % (step, _time)) print(' %20s %s' % ('=' * 20, '=' * 20)) print(' %20s %0.4f sec.' % ('Total', total_time))
[ "def", "reportTimes", "(", "self", ")", ":", "self", ".", "end", "=", "_ptime", "(", ")", "total_time", "=", "0", "print", "(", "ProcSteps", ".", "__report_header", ")", "for", "step", "in", "self", ".", "order", ":", "if", "'elapsed'", "in", "self", ...
32.631579
16.631579
def get_config(): """ Prepare and return alembic config These configurations used to live in alembic config initialiser, but that just tight coupling. Ideally we should move that to userspace and find a way to pass these into alembic commands. @todo: think about it """ from boiler.migrations.config import MigrationsConfig # used for errors map = dict( path='MIGRATIONS_PATH', db_url='SQLALCHEMY_DATABASE_URI', metadata='SQLAlchemy metadata' ) app = bootstrap.get_app() params = dict() params['path'] = app.config.get(map['path'], 'migrations') params['db_url'] = app.config.get(map['db_url']) params['metadata'] = db.metadata for param, value in params.items(): if not value: msg = 'Configuration error: [{}] is undefined' raise Exception(msg.format(map[param])) config = MigrationsConfig(**params) return config
[ "def", "get_config", "(", ")", ":", "from", "boiler", ".", "migrations", ".", "config", "import", "MigrationsConfig", "# used for errors", "map", "=", "dict", "(", "path", "=", "'MIGRATIONS_PATH'", ",", "db_url", "=", "'SQLALCHEMY_DATABASE_URI'", ",", "metadata", ...
29.709677
18.354839
def choose_directory(message='Choose a directory', path="", parent=None): "Show a dialog to choose a directory" result = dialogs.directoryDialog(parent, message, path) return result.path
[ "def", "choose_directory", "(", "message", "=", "'Choose a directory'", ",", "path", "=", "\"\"", ",", "parent", "=", "None", ")", ":", "result", "=", "dialogs", ".", "directoryDialog", "(", "parent", ",", "message", ",", "path", ")", "return", "result", "...
49.5
18.5
def get_drug(drug_name: str, name_is_generic: bool = False, include_categories: bool = False) -> Optional[Drug]: """ Converts a drug name to a :class:`.Drug` class. If you already have the generic name, you can get the Drug more efficiently by setting ``name_is_generic=True``. Set ``include_categories=True`` to include drug categories (such as tricyclics) as well as individual drugs. """ drug_name = drug_name.strip().lower() if name_is_generic: drug = DRUGS_BY_GENERIC_NAME.get(drug_name) # type: Optional[Drug] if drug is not None and drug.category_not_drug and not include_categories: # noqa return None return drug else: for d in DRUGS: if d.name_matches(drug_name): return d return None
[ "def", "get_drug", "(", "drug_name", ":", "str", ",", "name_is_generic", ":", "bool", "=", "False", ",", "include_categories", ":", "bool", "=", "False", ")", "->", "Optional", "[", "Drug", "]", ":", "drug_name", "=", "drug_name", ".", "strip", "(", ")",...
35.73913
18.782609
def _download_py2(link, path, __hdr__): """Download a file from a link in Python 2.""" try: req = urllib2.Request(link, headers=__hdr__) u = urllib2.urlopen(req) except Exception as e: raise Exception(' Download failed with the error:\n{}'.format(e)) with open(path, 'wb') as outf: for l in u: outf.write(l) u.close()
[ "def", "_download_py2", "(", "link", ",", "path", ",", "__hdr__", ")", ":", "try", ":", "req", "=", "urllib2", ".", "Request", "(", "link", ",", "headers", "=", "__hdr__", ")", "u", "=", "urllib2", ".", "urlopen", "(", "req", ")", "except", "Exceptio...
30.916667
17.416667
def chunked(iterable, n): """Break an iterable into lists of a given length:: >>> list(chunked([1, 2, 3, 4, 5, 6, 7], 3)) [[1, 2, 3], [4, 5, 6], [7]] If the length of ``iterable`` is not evenly divisible by ``n``, the last returned list will be shorter. This is useful for splitting up a computation on a large number of keys into batches, to be pickled and sent off to worker processes. One example is operations on rows in MySQL, which does not implement server-side cursors properly and would otherwise load the entire dataset into RAM on the client. Taken from more_itertools """ return iter(functools.partial(take, n, iter(iterable)), [])
[ "def", "chunked", "(", "iterable", ",", "n", ")", ":", "return", "iter", "(", "functools", ".", "partial", "(", "take", ",", "n", ",", "iter", "(", "iterable", ")", ")", ",", "[", "]", ")" ]
36.526316
24.631579
def file_list(*packages, **kwargs): # pylint: disable=unused-argument ''' List the files that belong to a package. Not specifying any packages will return a list of _every_ file on the system's package database (not generally recommended). CLI Examples: .. code-block:: bash salt '*' pkg.file_list httpd salt '*' pkg.file_list httpd postfix salt '*' pkg.file_list ''' output = file_dict(*packages) files = [] for package in list(output['packages'].values()): files.extend(package) return {'errors': output['errors'], 'files': files}
[ "def", "file_list", "(", "*", "packages", ",", "*", "*", "kwargs", ")", ":", "# pylint: disable=unused-argument", "output", "=", "file_dict", "(", "*", "packages", ")", "files", "=", "[", "]", "for", "package", "in", "list", "(", "output", "[", "'packages'...
31.263158
22.421053
def _draw_multiclass(self): """ Draw the precision-recall curves in the multiclass case """ # TODO: handle colors better with a mapping and user input if self.per_class: for cls in self.classes_: precision = self.precision_[cls] recall = self.recall_[cls] label = "PR for class {} (area={:0.2f})".format(cls, self.score_[cls]) self._draw_pr_curve(recall, precision, label=label) if self.micro: precision = self.precision_[MICRO] recall = self.recall_[MICRO] self._draw_pr_curve(recall, precision) self._draw_ap_score(self.score_[MICRO])
[ "def", "_draw_multiclass", "(", "self", ")", ":", "# TODO: handle colors better with a mapping and user input", "if", "self", ".", "per_class", ":", "for", "cls", "in", "self", ".", "classes_", ":", "precision", "=", "self", ".", "precision_", "[", "cls", "]", "...
36.263158
17
def commitVersion(self): ''' return a GithubComponentVersion object for a specific commit if valid ''' import re commit_match = re.match('^[a-f0-9]{7,40}$', self.tagOrBranchSpec(), re.I) if commit_match: return GithubComponentVersion( '', '', _getCommitArchiveURL(self.repo, self.tagOrBranchSpec()), self.name, cache_key=None ) return None
[ "def", "commitVersion", "(", "self", ")", ":", "import", "re", "commit_match", "=", "re", ".", "match", "(", "'^[a-f0-9]{7,40}$'", ",", "self", ".", "tagOrBranchSpec", "(", ")", ",", "re", ".", "I", ")", "if", "commit_match", ":", "return", "GithubComponen...
34.833333
30.166667
def get_errors(self): """Verify that this MAR file is well formed. Returns: A list of strings describing errors in the MAR file None if this MAR file appears well formed. """ errors = [] errors.extend(self._get_signature_errors()) errors.extend(self._get_additional_errors()) errors.extend(self._get_entry_errors()) return errors if errors else None
[ "def", "get_errors", "(", "self", ")", ":", "errors", "=", "[", "]", "errors", ".", "extend", "(", "self", ".", "_get_signature_errors", "(", ")", ")", "errors", ".", "extend", "(", "self", ".", "_get_additional_errors", "(", ")", ")", "errors", ".", "...
30.5
18
def create(self, name, *args, **kwargs): """ Standard task creation, but first check for the existence of the containers, and raise an exception if they don't exist. """ cont = kwargs.get("cont") if cont: # Verify that it exists. If it doesn't, a NoSuchContainer exception # will be raised. api = self.api rgn = api.region_name cf = api.identity.object_store[rgn].client cf.get_container(cont) return super(ImageTasksManager, self).create(name, *args, **kwargs)
[ "def", "create", "(", "self", ",", "name", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "cont", "=", "kwargs", ".", "get", "(", "\"cont\"", ")", "if", "cont", ":", "# Verify that it exists. If it doesn't, a NoSuchContainer exception", "# will be raised."...
41.142857
15.142857
def _init_transformer(cls, data): """Convert input into a QuantumChannel subclass object or Operator object""" # This handles common conversion for all QuantumChannel subclasses. # If the input is already a QuantumChannel subclass it will return # the original object if isinstance(data, QuantumChannel): return data if hasattr(data, 'to_quantumchannel'): # If the data object is not a QuantumChannel it will give # preference to a 'to_quantumchannel' attribute that allows # an arbitrary object to define its own conversion to any # quantum channel subclass. return data.to_channel() if hasattr(data, 'to_channel'): # TODO: this 'to_channel' method is the same case as the above # but is used by current version of Aer. It should be removed # once Aer is nupdated to use `to_quantumchannel` # instead of `to_channel`, return data.to_channel() # Finally if the input is not a QuantumChannel and doesn't have a # 'to_quantumchannel' conversion method we try and initialize it as a # regular matrix Operator which can be converted into a QuantumChannel. return Operator(data)
[ "def", "_init_transformer", "(", "cls", ",", "data", ")", ":", "# This handles common conversion for all QuantumChannel subclasses.", "# If the input is already a QuantumChannel subclass it will return", "# the original object", "if", "isinstance", "(", "data", ",", "QuantumChannel",...
55.26087
18.391304
def indent(indent_str=None): """ A complete, standalone indent ruleset. Arguments: indent_str The string used for indentation. Defaults to None, which will defer the value used to the one provided by the Dispatcher. """ def indentation_rule(): inst = Indentator(indent_str) return {'layout_handlers': { OpenBlock: layout_handler_openbrace, CloseBlock: layout_handler_closebrace, EndStatement: layout_handler_semicolon, Space: layout_handler_space_imply, OptionalSpace: layout_handler_space_optional_pretty, RequiredSpace: layout_handler_space_imply, Indent: inst.layout_handler_indent, Dedent: inst.layout_handler_dedent, Newline: inst.layout_handler_newline, OptionalNewline: inst.layout_handler_newline_optional, (Space, OpenBlock): NotImplemented, (Space, EndStatement): layout_handler_semicolon, (OptionalSpace, EndStatement): layout_handler_semicolon, (Indent, Newline, Dedent): rule_handler_noop, }} return indentation_rule
[ "def", "indent", "(", "indent_str", "=", "None", ")", ":", "def", "indentation_rule", "(", ")", ":", "inst", "=", "Indentator", "(", "indent_str", ")", "return", "{", "'layout_handlers'", ":", "{", "OpenBlock", ":", "layout_handler_openbrace", ",", "CloseBlock...
38
16.666667
def apply_weight_drop(block, local_param_regex, rate, axes=(), weight_dropout_mode='training'): """Apply weight drop to the parameter of a block. Parameters ---------- block : Block or HybridBlock The block whose parameter is to be applied weight-drop. local_param_regex : str The regex for parameter names used in the self.params.get(), such as 'weight'. rate : float Fraction of the input units to drop. Must be a number between 0 and 1. axes : tuple of int, default () The axes on which dropout mask is shared. If empty, regular dropout is applied. weight_drop_mode : {'training', 'always'}, default 'training' Whether the weight dropout should be applied only at training time, or always be applied. Examples -------- >>> net = gluon.rnn.LSTM(10, num_layers=2, bidirectional=True) >>> gluonnlp.model.apply_weight_drop(net, r'.*h2h_weight', 0.5) >>> net.collect_params() lstm0_ ( Parameter lstm0_l0_i2h_weight (shape=(40, 0), dtype=<class 'numpy.float32'>) WeightDropParameter lstm0_l0_h2h_weight (shape=(40, 10), dtype=<class 'numpy.float32'>, \ rate=0.5, mode=training) Parameter lstm0_l0_i2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) Parameter lstm0_l0_h2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) Parameter lstm0_r0_i2h_weight (shape=(40, 0), dtype=<class 'numpy.float32'>) WeightDropParameter lstm0_r0_h2h_weight (shape=(40, 10), dtype=<class 'numpy.float32'>, \ rate=0.5, mode=training) Parameter lstm0_r0_i2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) Parameter lstm0_r0_h2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) Parameter lstm0_l1_i2h_weight (shape=(40, 20), dtype=<class 'numpy.float32'>) WeightDropParameter lstm0_l1_h2h_weight (shape=(40, 10), dtype=<class 'numpy.float32'>, \ rate=0.5, mode=training) Parameter lstm0_l1_i2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) Parameter lstm0_l1_h2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) Parameter lstm0_r1_i2h_weight (shape=(40, 20), dtype=<class 'numpy.float32'>) WeightDropParameter lstm0_r1_h2h_weight (shape=(40, 10), dtype=<class 'numpy.float32'>, \ rate=0.5, mode=training) Parameter lstm0_r1_i2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) Parameter lstm0_r1_h2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) ) >>> ones = mx.nd.ones((3, 4, 5)) >>> net.initialize() >>> with mx.autograd.train_mode(): ... net(ones).max().asscalar() != net(ones).max().asscalar() True """ if not rate: return existing_params = _find_params(block, local_param_regex) for (local_param_name, param), \ (ref_params_list, ref_reg_params_list) in existing_params.items(): dropped_param = WeightDropParameter(param, rate, weight_dropout_mode, axes) for ref_params in ref_params_list: ref_params[param.name] = dropped_param for ref_reg_params in ref_reg_params_list: ref_reg_params[local_param_name] = dropped_param if hasattr(block, local_param_name): local_attr = getattr(block, local_param_name) if local_attr == param: local_attr = dropped_param elif isinstance(local_attr, (list, tuple)): if isinstance(local_attr, tuple): local_attr = list(local_attr) for i, v in enumerate(local_attr): if v == param: local_attr[i] = dropped_param elif isinstance(local_attr, dict): for k, v in local_attr: if v == param: local_attr[k] = dropped_param else: continue if local_attr: super(Block, block).__setattr__(local_param_name, local_attr)
[ "def", "apply_weight_drop", "(", "block", ",", "local_param_regex", ",", "rate", ",", "axes", "=", "(", ")", ",", "weight_dropout_mode", "=", "'training'", ")", ":", "if", "not", "rate", ":", "return", "existing_params", "=", "_find_params", "(", "block", ",...
49.886076
24.594937
def _get_corresponding_parsers(self, func): """Get the parser that has been set up by the given `function`""" if func in self._used_functions: yield self if self._subparsers_action is not None: for parser in self._subparsers_action.choices.values(): for sp in parser._get_corresponding_parsers(func): yield sp
[ "def", "_get_corresponding_parsers", "(", "self", ",", "func", ")", ":", "if", "func", "in", "self", ".", "_used_functions", ":", "yield", "self", "if", "self", ".", "_subparsers_action", "is", "not", "None", ":", "for", "parser", "in", "self", ".", "_subp...
48.25
11.625
def _split_stock_code(self, code): stock_str = str(code) split_loc = stock_str.find(".") '''do not use the built-in split function in python. The built-in function cannot handle some stock strings correctly. for instance, US..DJI, where the dot . itself is a part of original code''' if 0 <= split_loc < len( stock_str) - 1 and stock_str[0:split_loc] in MKT_MAP: market_str = stock_str[0:split_loc] partial_stock_str = stock_str[split_loc + 1:] return RET_OK, (market_str, partial_stock_str) else: error_str = ERROR_STR_PREFIX + "format of %s is wrong. (US.AAPL, HK.00700, SZ.000001)" % stock_str return RET_ERROR, error_str
[ "def", "_split_stock_code", "(", "self", ",", "code", ")", ":", "stock_str", "=", "str", "(", "code", ")", "split_loc", "=", "stock_str", ".", "find", "(", "\".\"", ")", "if", "0", "<=", "split_loc", "<", "len", "(", "stock_str", ")", "-", "1", "and"...
46.4375
23.1875
def get_event_stream(self): """Get the event stream associated with this WVA Note that this event stream is shared across all users of this WVA device as the WVA only supports a single event stream. :return: a new :class:`WVAEventStream` instance """ if self._event_stream is None: self._event_stream = WVAEventStream(self._http_client) return self._event_stream
[ "def", "get_event_stream", "(", "self", ")", ":", "if", "self", ".", "_event_stream", "is", "None", ":", "self", ".", "_event_stream", "=", "WVAEventStream", "(", "self", ".", "_http_client", ")", "return", "self", ".", "_event_stream" ]
38.363636
18.090909
def remove(self, pools): """ Remove Pools Running Script And Update to Not Created :param ids: List of ids :return: None on success :raise ScriptRemovePoolException :raise InvalidIdPoolException :raise NetworkAPIException """ data = dict() data["pools"] = pools uri = "api/pools/v2/" return self.delete(uri, data)
[ "def", "remove", "(", "self", ",", "pools", ")", ":", "data", "=", "dict", "(", ")", "data", "[", "\"pools\"", "]", "=", "pools", "uri", "=", "\"api/pools/v2/\"", "return", "self", ".", "delete", "(", "uri", ",", "data", ")" ]
22.157895
17.947368
def main(ctx, debug, base_config, env_file): # pragma: no cover """ \b _____ _ _ | |___| |___ ___ _ _| |___ | | | | . | | -_| _| | | | -_| |_|_|_|___|_|___|___|___|_|___| Molecule aids in the development and testing of Ansible roles. Enable autocomplete issue: eval "$(_MOLECULE_COMPLETE=source molecule)" """ ctx.obj = {} ctx.obj['args'] = {} ctx.obj['args']['debug'] = debug ctx.obj['args']['base_config'] = base_config ctx.obj['args']['env_file'] = env_file
[ "def", "main", "(", "ctx", ",", "debug", ",", "base_config", ",", "env_file", ")", ":", "# pragma: no cover", "ctx", ".", "obj", "=", "{", "}", "ctx", ".", "obj", "[", "'args'", "]", "=", "{", "}", "ctx", ".", "obj", "[", "'args'", "]", "[", "'de...
27.894737
16
def make_server(host, port, app=None, server_class=AsyncWsgiServer, handler_class=AsyncWsgiHandler, ws_handler_class=None, ws_path='/ws'): """Create server instance with an optional WebSocket handler For pure WebSocket server ``app`` may be ``None`` but an attempt to access any path other than ``ws_path`` will cause server error. :param host: hostname or IP :type host: str :param port: server port :type port: int :param app: WSGI application :param server_class: WSGI server class, defaults to AsyncWsgiServer :param handler_class: WSGI handler class, defaults to AsyncWsgiHandler :param ws_handler_class: WebSocket hanlder class, defaults to ``None`` :param ws_path: WebSocket path on the server, defaults to '/ws' :type ws_path: str, optional :return: initialized server instance """ handler_class.ws_handler_class = ws_handler_class handler_class.ws_path = ws_path httpd = server_class((host, port), RequestHandlerClass=handler_class) httpd.set_app(app) return httpd
[ "def", "make_server", "(", "host", ",", "port", ",", "app", "=", "None", ",", "server_class", "=", "AsyncWsgiServer", ",", "handler_class", "=", "AsyncWsgiHandler", ",", "ws_handler_class", "=", "None", ",", "ws_path", "=", "'/ws'", ")", ":", "handler_class", ...
40.62963
16.962963
def uncompress(payload): """ Given a compressed ec key in bytes, uncompress it using math and return (x, y) """ payload = hexlify(payload) even = payload[:2] == b"02" x = int(payload[2:], 16) beta = pow(int(x ** 3 + A * x + B), int((P + 1) // 4), int(P)) y = (P-beta) if even else beta return x, y
[ "def", "uncompress", "(", "payload", ")", ":", "payload", "=", "hexlify", "(", "payload", ")", "even", "=", "payload", "[", ":", "2", "]", "==", "b\"02\"", "x", "=", "int", "(", "payload", "[", "2", ":", "]", ",", "16", ")", "beta", "=", "pow", ...
32.4
14.6
def multiplication_circuit(nbit, vartype=dimod.BINARY): """Multiplication circuit constraint satisfaction problem. A constraint satisfaction problem that represents the binary multiplication :math:`ab=p`, where the multiplicands are binary variables of length `nbit`; for example, :math:`a_0 + 2a_1 + 4a_2 +... +2^ma_{nbit}`. The square below shows a graphic representation of the circuit:: ________________________________________________________________________________ | and20 and10 and00 | | | | | | | and21 add11──and11 add01──and01 | | | |β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜|β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜| | | | and22 add12──and12 add02──and02 | | | | |β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜|β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜| | | | | add13─────────add03 | | | | | β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜| | | | | | | p5 p4 p3 p2 p1 p0 | -------------------------------------------------------------------------------- Args: nbit (int): Number of bits in the multiplicands. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} Returns: CSP (:obj:`.ConstraintSatisfactionProblem`): CSP that is satisfied when variables :math:`a,b,p` are assigned values that correctly solve binary multiplication :math:`ab=p`. Examples: This example creates a multiplication circuit CSP that multiplies two 3-bit numbers, which is then formulated as a binary quadratic model (BQM). It fixes the multiplacands as :math:`a=5, b=6` (:math:`101` and :math:`110`) and uses a simulated annealing sampler to find the product, :math:`p=30` (:math:`111100`). >>> import dwavebinarycsp >>> from dwavebinarycsp.factories.csp.circuits import multiplication_circuit >>> import neal >>> csp = multiplication_circuit(3) >>> bqm = dwavebinarycsp.stitch(csp) >>> bqm.fix_variable('a0', 1); bqm.fix_variable('a1', 0); bqm.fix_variable('a2', 1) >>> bqm.fix_variable('b0', 1); bqm.fix_variable('b1', 1); bqm.fix_variable('b2', 0) >>> sampler = neal.SimulatedAnnealingSampler() >>> response = sampler.sample(bqm) >>> p = next(response.samples(n=1, sorted_by='energy')) >>> print(p['p0'], p['p1'], p['p2'], p['p3'], p['p4'], p['p5']) # doctest: +SKIP 1 1 1 1 0 0 """ if nbit < 1: raise ValueError("num_multiplier_bits, num_multiplicand_bits must be positive integers") num_multiplier_bits = num_multiplicand_bits = nbit # also checks the vartype argument csp = ConstraintSatisfactionProblem(vartype) # throughout, we will use the following convention: # i to refer to the bits of the multiplier # j to refer to the bits of the multiplicand # k to refer to the bits of the product # create the variables corresponding to the input and output wires for the circuit a = {i: 'a%d' % i for i in range(nbit)} b = {j: 'b%d' % j for j in range(nbit)} p = {k: 'p%d' % k for k in range(nbit + nbit)} # we will want to store the internal variables somewhere AND = defaultdict(dict) # the output of the AND gate associated with ai, bj is stored in AND[i][j] SUM = defaultdict(dict) # the sum of the ADDER gate associated with ai, bj is stored in SUM[i][j] CARRY = defaultdict(dict) # the carry of the ADDER gate associated with ai, bj is stored in CARRY[i][j] # we follow a shift adder for i in range(num_multiplier_bits): for j in range(num_multiplicand_bits): ai = a[i] bj = b[j] if i == 0 and j == 0: # in this case there are no inputs from lower bits, so our only input is the AND # gate. And since we only have one bit to add, we don't need an adder, no have a # carry out andij = AND[i][j] = p[0] gate = and_gate([ai, bj, andij], vartype=vartype, name='AND(%s, %s) = %s' % (ai, bj, andij)) csp.add_constraint(gate) continue # we always need an AND gate andij = AND[i][j] = 'and%s,%s' % (i, j) gate = and_gate([ai, bj, andij], vartype=vartype, name='AND(%s, %s) = %s' % (ai, bj, andij)) csp.add_constraint(gate) # the number of inputs will determine the type of adder inputs = [andij] # determine if there is a carry in if i - 1 in CARRY and j in CARRY[i - 1]: inputs.append(CARRY[i - 1][j]) # determine if there is a sum in if i - 1 in SUM and j + 1 in SUM[i - 1]: inputs.append(SUM[i - 1][j + 1]) # ok, add create adders if necessary if len(inputs) == 1: # we don't need an adder and we don't have a carry SUM[i][j] = andij elif len(inputs) == 2: # we need a HALFADDER so we have a sum and a carry if j == 0: sumij = SUM[i][j] = p[i] else: sumij = SUM[i][j] = 'sum%d,%d' % (i, j) carryij = CARRY[i][j] = 'carry%d,%d' % (i, j) name = 'HALFADDER(%s, %s) = %s, %s' % (inputs[0], inputs[1], sumij, carryij) gate = halfadder_gate([inputs[0], inputs[1], sumij, carryij], vartype=vartype, name=name) csp.add_constraint(gate) else: assert len(inputs) == 3, 'unexpected number of inputs' # we need a FULLADDER so we have a sum and a carry if j == 0: sumij = SUM[i][j] = p[i] else: sumij = SUM[i][j] = 'sum%d,%d' % (i, j) carryij = CARRY[i][j] = 'carry%d,%d' % (i, j) name = 'FULLADDER(%s, %s, %s) = %s, %s' % (inputs[0], inputs[1], inputs[2], sumij, carryij) gate = fulladder_gate([inputs[0], inputs[1], inputs[2], sumij, carryij], vartype=vartype, name=name) csp.add_constraint(gate) # now we have a final row of full adders for col in range(nbit - 1): inputs = [CARRY[nbit - 1][col], SUM[nbit - 1][col + 1]] if col == 0: sumout = p[nbit + col] carryout = CARRY[nbit][col] = 'carry%d,%d' % (nbit, col) name = 'HALFADDER(%s, %s) = %s, %s' % (inputs[0], inputs[1], sumout, carryout) gate = halfadder_gate([inputs[0], inputs[1], sumout, carryout], vartype=vartype, name=name) csp.add_constraint(gate) continue inputs.append(CARRY[nbit][col - 1]) sumout = p[nbit + col] if col < nbit - 2: carryout = CARRY[nbit][col] = 'carry%d,%d' % (nbit, col) else: carryout = p[2 * nbit - 1] name = 'FULLADDER(%s, %s, %s) = %s, %s' % (inputs[0], inputs[1], inputs[2], sumout, carryout) gate = fulladder_gate([inputs[0], inputs[1], inputs[2], sumout, carryout], vartype=vartype, name=name) csp.add_constraint(gate) return csp
[ "def", "multiplication_circuit", "(", "nbit", ",", "vartype", "=", "dimod", ".", "BINARY", ")", ":", "if", "nbit", "<", "1", ":", "raise", "ValueError", "(", "\"num_multiplier_bits, num_multiplicand_bits must be positive integers\"", ")", "num_multiplier_bits", "=", "...
43.395349
28.680233
def _insertFont(self, fontname, bfname, fontfile, fontbuffer, set_simple, idx, wmode, serif, encoding, ordering): """_insertFont(self, fontname, bfname, fontfile, fontbuffer, set_simple, idx, wmode, serif, encoding, ordering) -> PyObject *""" return _fitz.Page__insertFont(self, fontname, bfname, fontfile, fontbuffer, set_simple, idx, wmode, serif, encoding, ordering)
[ "def", "_insertFont", "(", "self", ",", "fontname", ",", "bfname", ",", "fontfile", ",", "fontbuffer", ",", "set_simple", ",", "idx", ",", "wmode", ",", "serif", ",", "encoding", ",", "ordering", ")", ":", "return", "_fitz", ".", "Page__insertFont", "(", ...
127.666667
55.666667
def get_price(self): """ Shortcut to self.get_ticks(lookback=1, as_dict=True)['last'] """ tick = self.get_ticks(lookback=1, as_dict=True) return None if tick is None else tick['last']
[ "def", "get_price", "(", "self", ")", ":", "tick", "=", "self", ".", "get_ticks", "(", "lookback", "=", "1", ",", "as_dict", "=", "True", ")", "return", "None", "if", "tick", "is", "None", "else", "tick", "[", "'last'", "]" ]
51
12
def get_structure_by_material_id(self, material_id, final=True, conventional_unit_cell=False): """ Get a Structure corresponding to a material_id. Args: material_id (str): Materials Project material_id (a string, e.g., mp-1234). final (bool): Whether to get the final structure, or the initial (pre-relaxation) structure. Defaults to True. conventional_unit_cell (bool): Whether to get the standard conventional unit cell Returns: Structure object. """ prop = "final_structure" if final else "initial_structure" data = self.get_data(material_id, prop=prop) if conventional_unit_cell: data[0][prop] = SpacegroupAnalyzer(data[0][prop]). \ get_conventional_standard_structure() return data[0][prop]
[ "def", "get_structure_by_material_id", "(", "self", ",", "material_id", ",", "final", "=", "True", ",", "conventional_unit_cell", "=", "False", ")", ":", "prop", "=", "\"final_structure\"", "if", "final", "else", "\"initial_structure\"", "data", "=", "self", ".", ...
41.318182
19.5
def function_arguments(func): """ This returns a list of all arguments :param func: callable object :return: list of str of the arguments for the function """ if getattr(inspect, 'signature', None) is None: return list(inspect.getargspec(func).args) else: return list(inspect.signature(func).parameters.keys())
[ "def", "function_arguments", "(", "func", ")", ":", "if", "getattr", "(", "inspect", ",", "'signature'", ",", "None", ")", "is", "None", ":", "return", "list", "(", "inspect", ".", "getargspec", "(", "func", ")", ".", "args", ")", "else", ":", "return"...
34.5
11.1
def _maybe_coerce_values(self, values): """Input validation for values passed to __init__. Ensure that we have datetime64TZ, coercing if necessary. Parametetrs ----------- values : array-like Must be convertible to datetime64 Returns ------- values : DatetimeArray """ if not isinstance(values, self._holder): values = self._holder(values) if values.tz is None: raise ValueError("cannot create a DatetimeTZBlock without a tz") return values
[ "def", "_maybe_coerce_values", "(", "self", ",", "values", ")", ":", "if", "not", "isinstance", "(", "values", ",", "self", ".", "_holder", ")", ":", "values", "=", "self", ".", "_holder", "(", "values", ")", "if", "values", ".", "tz", "is", "None", ...
27.85
18.4