text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def clean_error(err): """ Take stderr bytes returned from MicroPython and attempt to create a non-verbose error message. """ if err: decoded = err.decode('utf-8') try: return decoded.split('\r\n')[-2] except Exception: return decoded return 'There was an error.'
[ "def", "clean_error", "(", "err", ")", ":", "if", "err", ":", "decoded", "=", "err", ".", "decode", "(", "'utf-8'", ")", "try", ":", "return", "decoded", ".", "split", "(", "'\\r\\n'", ")", "[", "-", "2", "]", "except", "Exception", ":", "return", ...
26.916667
13.416667
def list(cls, params=None): """ Retrieves a list of the model :param params: params as dictionary :type params: dict :return: the list of the parsed xml objects :rtype: list """ return fields.ListField(name=cls.ENDPOINT, init_class=cls).decode( cls.element_from_string(cls._get_request(params=params).text) )
[ "def", "list", "(", "cls", ",", "params", "=", "None", ")", ":", "return", "fields", ".", "ListField", "(", "name", "=", "cls", ".", "ENDPOINT", ",", "init_class", "=", "cls", ")", ".", "decode", "(", "cls", ".", "element_from_string", "(", "cls", "....
31.833333
16.833333
def _init_dflt(self): """Get a list of namedtuples, one for each annotation.""" nts = [] ntobj = cx.namedtuple('ntanno', self.flds) for itemid, gos in self.id2gos.items(): for goid in gos: nts.append(ntobj(DB_ID=itemid, GO_ID=goid)) return nts
[ "def", "_init_dflt", "(", "self", ")", ":", "nts", "=", "[", "]", "ntobj", "=", "cx", ".", "namedtuple", "(", "'ntanno'", ",", "self", ".", "flds", ")", "for", "itemid", ",", "gos", "in", "self", ".", "id2gos", ".", "items", "(", ")", ":", "for",...
38
14.125
def update_contact_of_client(self, contact_id, contact_dict): """ Updates a contact :param contact_id: the id of the contact :param contact_dict: dict :return: dict """ return self._create_put_request(resource=CONTACTS, billomat_id=contact_id, send_data=contact_dict)
[ "def", "update_contact_of_client", "(", "self", ",", "contact_id", ",", "contact_dict", ")", ":", "return", "self", ".", "_create_put_request", "(", "resource", "=", "CONTACTS", ",", "billomat_id", "=", "contact_id", ",", "send_data", "=", "contact_dict", ")" ]
35.111111
19.555556
def snapshot_table( self, name, cluster, snapshot_id, description, ttl=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a new snapshot in the specified cluster from the specified source table. The cluster and the table must be in the same instance. Note: This is a private alpha release of Cloud Bigtable snapshots. This feature is not currently available to most Cloud Bigtable customers. This feature might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableTableAdminClient() >>> >>> name = client.table_path('[PROJECT]', '[INSTANCE]', '[TABLE]') >>> >>> # TODO: Initialize `cluster`: >>> cluster = '' >>> >>> # TODO: Initialize `snapshot_id`: >>> snapshot_id = '' >>> >>> # TODO: Initialize `description`: >>> description = '' >>> >>> response = client.snapshot_table(name, cluster, snapshot_id, description) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): The unique name of the table to have the snapshot taken. Values are of the form ``projects/<project>/instances/<instance>/tables/<table>``. cluster (str): The name of the cluster where the snapshot will be created in. Values are of the form ``projects/<project>/instances/<instance>/clusters/<cluster>``. snapshot_id (str): The ID by which the new snapshot should be referred to within the parent cluster, e.g., ``mysnapshot`` of the form: ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*`` rather than ``projects/<project>/instances/<instance>/clusters/<cluster>/snapshots/mysnapshot``. description (str): Description of the snapshot. ttl (Union[dict, ~google.cloud.bigtable_admin_v2.types.Duration]): The amount of time that the new snapshot can stay active after it is created. Once 'ttl' expires, the snapshot will get deleted. The maximum amount of time a snapshot can stay active is 7 days. If 'ttl' is not specified, the default value of 24 hours will be used. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.Duration` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "snapshot_table" not in self._inner_api_calls: self._inner_api_calls[ "snapshot_table" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.snapshot_table, default_retry=self._method_configs["SnapshotTable"].retry, default_timeout=self._method_configs["SnapshotTable"].timeout, client_info=self._client_info, ) request = bigtable_table_admin_pb2.SnapshotTableRequest( name=name, cluster=cluster, snapshot_id=snapshot_id, description=description, ttl=ttl, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["snapshot_table"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, table_pb2.Snapshot, metadata_type=bigtable_table_admin_pb2.SnapshotTableMetadata, )
[ "def", "snapshot_table", "(", "self", ",", "name", ",", "cluster", ",", "snapshot_id", ",", "description", ",", "ttl", "=", "None", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google...
43.728
25.36
def make_request(url, data, on_complete): """ Make AJAX request to `url` with given POST `data`. Call `on_complete` callback when complete. Args: url (str): URL. data (dict): Dictionary with POST data. on_complete (ref): Reference to function / method which will be called when the request is done. """ req = ajax.ajax() req.bind('complete', on_complete) req.open('POST', url, True) req.set_header('content-type', 'application/x-www-form-urlencoded') req.send(data)
[ "def", "make_request", "(", "url", ",", "data", ",", "on_complete", ")", ":", "req", "=", "ajax", ".", "ajax", "(", ")", "req", ".", "bind", "(", "'complete'", ",", "on_complete", ")", "req", ".", "open", "(", "'POST'", ",", "url", ",", "True", ")"...
32.9375
16.6875
async def initialize(bot: Bot, host, password, rest_port, ws_port, timeout=30): """ Initializes the websocket connection to the lavalink player. .. important:: This function must only be called AFTER the bot has received its "on_ready" event! Parameters ---------- bot : Bot An instance of a discord.py `Bot` object. host : str The hostname or IP address of the Lavalink node. password : str The password of the Lavalink node. rest_port : int The port of the REST API on the Lavalink node. ws_port : int The websocket port on the Lavalink Node. timeout : int Amount of time to allow retries to occur, ``None`` is considered forever. """ global _loop _loop = bot.loop player_manager.user_id = bot.user.id player_manager.channel_finder_func = bot.get_channel register_event_listener(_handle_event) register_update_listener(_handle_update) lavalink_node = node.Node( _loop, dispatch, bot._connection._get_websocket, host, password, port=ws_port, rest=rest_port, user_id=player_manager.user_id, num_shards=bot.shard_count if bot.shard_count is not None else 1, ) await lavalink_node.connect(timeout=timeout) bot.add_listener(node.on_socket_response) bot.add_listener(_on_guild_remove, name="on_guild_remove") return lavalink_node
[ "async", "def", "initialize", "(", "bot", ":", "Bot", ",", "host", ",", "password", ",", "rest_port", ",", "ws_port", ",", "timeout", "=", "30", ")", ":", "global", "_loop", "_loop", "=", "bot", ".", "loop", "player_manager", ".", "user_id", "=", "bot"...
28.32
21.36
def add_item(self, sqla_col_type, item, key_specific=None): """ Add an item to the registry """ if key_specific is not None: self.setdefault(key_specific, {})[sqla_col_type] = item else: self[sqla_col_type] = item
[ "def", "add_item", "(", "self", ",", "sqla_col_type", ",", "item", ",", "key_specific", "=", "None", ")", ":", "if", "key_specific", "is", "not", "None", ":", "self", ".", "setdefault", "(", "key_specific", ",", "{", "}", ")", "[", "sqla_col_type", "]", ...
33.75
10.5
def wrap_str(self, text, strchar, multiline=False): """Wrap a string.""" if multiline: strchar *= 3 return strwrapper + self.add_ref("str", (text, strchar)) + unwrapper
[ "def", "wrap_str", "(", "self", ",", "text", ",", "strchar", ",", "multiline", "=", "False", ")", ":", "if", "multiline", ":", "strchar", "*=", "3", "return", "strwrapper", "+", "self", ".", "add_ref", "(", "\"str\"", ",", "(", "text", ",", "strchar", ...
40
16.4
def resolved_path(path, base=None): """ Args: path (str | unicode | None): Path to resolve base (str | unicode | None): Base path to use to resolve relative paths (default: current working dir) Returns: (str): Absolute path """ if not path or path.startswith(SYMBOLIC_TMP): return path path = os.path.expanduser(path) if base and not os.path.isabs(path): return os.path.join(resolved_path(base), path) return os.path.abspath(path)
[ "def", "resolved_path", "(", "path", ",", "base", "=", "None", ")", ":", "if", "not", "path", "or", "path", ".", "startswith", "(", "SYMBOLIC_TMP", ")", ":", "return", "path", "path", "=", "os", ".", "path", ".", "expanduser", "(", "path", ")", "if",...
28.764706
19.705882
def for_web(self, data): """ Convert data to web output (raster only). Parameters ---------- data : array Returns ------- web data : array """ return memory_file( prepare_array( data, masked=True, nodata=self.nodata, dtype=self.profile()["dtype"] ), self.profile() ), "image/tiff"
[ "def", "for_web", "(", "self", ",", "data", ")", ":", "return", "memory_file", "(", "prepare_array", "(", "data", ",", "masked", "=", "True", ",", "nodata", "=", "self", ".", "nodata", ",", "dtype", "=", "self", ".", "profile", "(", ")", "[", "\"dtyp...
22.5
20.166667
def create_layout_params(self, child, layout): """ Override as there is no (width, height) constructor. """ from .android_fragment import AndroidFragment if isinstance(child, AndroidFragment): return super(AndroidViewPager, self).create_layout_params(child, layout) # Only apply to decor views dp = self.dp w, h = (coerce_size(layout.get('width', 'match_parent')), coerce_size(layout.get('height', 'wrap_content'))) w = w if w < 0 else int(w * dp) h = h if h < 0 else int(h * dp) # No (w,h) constructor params = ViewPagerLayoutParams() params.width = w params.height = h params.isDecor = True return params
[ "def", "create_layout_params", "(", "self", ",", "child", ",", "layout", ")", ":", "from", ".", "android_fragment", "import", "AndroidFragment", "if", "isinstance", "(", "child", ",", "AndroidFragment", ")", ":", "return", "super", "(", "AndroidViewPager", ",", ...
40.7
13.95
def fw_rule_delete(self, data, fw_name=None): """Top level rule delete function. """ LOG.debug("FW Rule delete %s", data) self._fw_rule_delete(fw_name, data)
[ "def", "fw_rule_delete", "(", "self", ",", "data", ",", "fw_name", "=", "None", ")", ":", "LOG", ".", "debug", "(", "\"FW Rule delete %s\"", ",", "data", ")", "self", ".", "_fw_rule_delete", "(", "fw_name", ",", "data", ")" ]
44.5
3
def requirements(ctx): """Write the `requirements-agent-release.txt` file at the root of the repo listing all the Agent-based integrations pinned at the version they currently have in HEAD. """ echo_info('Freezing check releases') checks = get_valid_checks() checks.remove('datadog_checks_dev') entries = [] for check in checks: if check in AGENT_V5_ONLY: echo_info('Check `{}` is only shipped with Agent 5, skipping'.format(check)) continue try: version = get_version_string(check) entries.append('{}\n'.format(get_agent_requirement_line(check, version))) except Exception as e: echo_failure('Error generating line: {}'.format(e)) continue lines = sorted(entries) req_file = get_agent_release_requirements() write_file_lines(req_file, lines) echo_success('Successfully wrote to `{}`!'.format(req_file))
[ "def", "requirements", "(", "ctx", ")", ":", "echo_info", "(", "'Freezing check releases'", ")", "checks", "=", "get_valid_checks", "(", ")", "checks", ".", "remove", "(", "'datadog_checks_dev'", ")", "entries", "=", "[", "]", "for", "check", "in", "checks", ...
34.407407
20.222222
def instruction_BSR_JSR(self, opcode, ea): """ Program control is transferred to the effective address after storing the return address on the hardware stack. A return from subroutine (RTS) instruction is used to reverse this process and must be the last instruction executed in a subroutine. source code forms: BSR dd; LBSR DDDD; JSR EA CC bits "HNZVC": ----- """ # log.info("%x|\tJSR/BSR to $%x \t| %s" % ( # self.last_op_address, # ea, self.cfg.mem_info.get_shortest(ea) # )) self.push_word(self.system_stack_pointer, self.program_counter.value) self.program_counter.set(ea)
[ "def", "instruction_BSR_JSR", "(", "self", ",", "opcode", ",", "ea", ")", ":", "# log.info(\"%x|\\tJSR/BSR to $%x \\t| %s\" % (", "# self.last_op_address,", "# ea, self.cfg.mem_info.get_shortest(ea)", "# ))", "self", ".", "push_word", "(", "sel...
37.722222
19.722222
def fetch(self): """ Fetch the recent refs from the remotes. Unless git-up.fetch.all is set to true, all remotes with locally existent branches will be fetched. """ fetch_kwargs = {'multiple': True} fetch_args = [] if self.is_prune(): fetch_kwargs['prune'] = True if self.settings['fetch.all']: fetch_kwargs['all'] = True else: if '.' in self.remotes: self.remotes.remove('.') if not self.remotes: # Only local target branches, # `git fetch --multiple` will fail return fetch_args.append(self.remotes) try: self.git.fetch(*fetch_args, **fetch_kwargs) except GitError as error: error.message = "`git fetch` failed" raise error
[ "def", "fetch", "(", "self", ")", ":", "fetch_kwargs", "=", "{", "'multiple'", ":", "True", "}", "fetch_args", "=", "[", "]", "if", "self", ".", "is_prune", "(", ")", ":", "fetch_kwargs", "[", "'prune'", "]", "=", "True", "if", "self", ".", "settings...
29.129032
15.645161
def levels(self): """ Get the factor levels. :returns: A list of lists, one list per column, of levels. """ lol = H2OFrame._expr(expr=ExprNode("levels", self)).as_data_frame(False) lol.pop(0) # Remove column headers lol = list(zip(*lol)) return [[ll for ll in l if ll != ''] for l in lol]
[ "def", "levels", "(", "self", ")", ":", "lol", "=", "H2OFrame", ".", "_expr", "(", "expr", "=", "ExprNode", "(", "\"levels\"", ",", "self", ")", ")", ".", "as_data_frame", "(", "False", ")", "lol", ".", "pop", "(", "0", ")", "# Remove column headers", ...
34.5
17.1
def normalize_timedelta(timedelta): """ Given a string like "1w" or "-5d", convert it to an integer in milliseconds. Integers without a suffix are interpreted as seconds. Note: not related to the datetime timedelta class. """ try: return int(timedelta) * 1000 except ValueError as e: t, suffix = timedelta[:-1], timedelta[-1:] suffix_multipliers = {'s': 1000, 'm': 1000*60, 'h': 1000*60*60, 'd': 1000*60*60*24, 'w': 1000*60*60*24*7, 'M': 1000*60*60*24*30, 'y': 1000*60*60*24*365} if suffix not in suffix_multipliers: raise ValueError() return int(t) * suffix_multipliers[suffix]
[ "def", "normalize_timedelta", "(", "timedelta", ")", ":", "try", ":", "return", "int", "(", "timedelta", ")", "*", "1000", "except", "ValueError", "as", "e", ":", "t", ",", "suffix", "=", "timedelta", "[", ":", "-", "1", "]", ",", "timedelta", "[", "...
44.933333
17.866667
def _set_labels(node, apiserver_url, labels): '''Replace labels dict by a new one''' # Prepare URL url = "{0}/api/v1/nodes/{1}".format(apiserver_url, node) # Prepare data data = [{"op": "replace", "path": "/metadata/labels", "value": labels}] # Make request ret = _kpatch(url, data) if ret.get("status") == 404: return "Node {0} doesn't exist".format(node) return ret
[ "def", "_set_labels", "(", "node", ",", "apiserver_url", ",", "labels", ")", ":", "# Prepare URL", "url", "=", "\"{0}/api/v1/nodes/{1}\"", ".", "format", "(", "apiserver_url", ",", "node", ")", "# Prepare data", "data", "=", "[", "{", "\"op\"", ":", "\"replace...
36.454545
17
def resize_lazy(image, width=None, height=None, crop=False, force=False, namespace="resized", storage=default_storage, as_url=False): """ Returns the name of the resized file. Returns the url if as_url is True """ # First normalize params to determine which file to get width, height, crop = _normalize_params(image, width, height, crop) # Fetch the name of the resized image so i can test it if exists name = _get_resized_name(image, width, height, crop, namespace) # Fetch storage if an image has a specific storage try: storage = image.storage except AttributeError: pass # Test if exists or force if force or not storage.exists(name): resized_image = None try: resized_image = resize(image, width, height, crop) name = storage.save(name, resized_image) finally: if resized_image is not None: resized_image.close() if as_url: return storage.url(name) return name
[ "def", "resize_lazy", "(", "image", ",", "width", "=", "None", ",", "height", "=", "None", ",", "crop", "=", "False", ",", "force", "=", "False", ",", "namespace", "=", "\"resized\"", ",", "storage", "=", "default_storage", ",", "as_url", "=", "False", ...
33.129032
20.419355
def queue(self, queue, message, params={}, uids=[]): """ Queue a job in Rhumba """ d = { 'id': uuid.uuid1().get_hex(), 'version': 1, 'message': message, 'params': params } if uids: for uid in uids: yield self.client.lpush('rhumba.dq.%s.%s' % ( uid, queue), json.dumps(d)) else: yield self.client.lpush('rhumba.q.%s' % queue, json.dumps(d)) defer.returnValue(d['id'])
[ "def", "queue", "(", "self", ",", "queue", ",", "message", ",", "params", "=", "{", "}", ",", "uids", "=", "[", "]", ")", ":", "d", "=", "{", "'id'", ":", "uuid", ".", "uuid1", "(", ")", ".", "get_hex", "(", ")", ",", "'version'", ":", "1", ...
27.473684
17.263158
def do_set_logical_switch_config(self, line): """set_logical_switch_config <peer> <logical switch> <key> <value> eg. set_logical_switch_config sw1 running LogicalSwitch7 \ lost-connection-behavior failStandaloneMode """ def f(p, args): try: target, lsw, key, value = args except: print("argument error") return # get switch id o = p.get_config(target) capable_switch_id = o.id try: capable_switch = ofc.OFCapableSwitchType( id=capable_switch_id, logical_switches=ofc.OFCapableSwitchLogicalSwitchesType( switch=[ofc.OFLogicalSwitchType( id=lsw, **{key: value} )] ) ) except TypeError: print("argument error") return try: p.edit_config(target, capable_switch) except Exception as e: print(e) self._request(line, f)
[ "def", "do_set_logical_switch_config", "(", "self", ",", "line", ")", ":", "def", "f", "(", "p", ",", "args", ")", ":", "try", ":", "target", ",", "lsw", ",", "key", ",", "value", "=", "args", "except", ":", "print", "(", "\"argument error\"", ")", "...
31.583333
15.5
def wrap_tuple_streams(unwrapped, kdims, streams): """ Fills in tuple keys with dimensioned stream values as appropriate. """ param_groups = [(s.contents.keys(), s) for s in streams] pairs = [(name,s) for (group, s) in param_groups for name in group] substituted = [] for pos,el in enumerate(wrap_tuple(unwrapped)): if el is None and pos < len(kdims): matches = [(name,s) for (name,s) in pairs if name==kdims[pos].name] if len(matches) == 1: (name, stream) = matches[0] el = stream.contents[name] substituted.append(el) return tuple(substituted)
[ "def", "wrap_tuple_streams", "(", "unwrapped", ",", "kdims", ",", "streams", ")", ":", "param_groups", "=", "[", "(", "s", ".", "contents", ".", "keys", "(", ")", ",", "s", ")", "for", "s", "in", "streams", "]", "pairs", "=", "[", "(", "name", ",",...
42.4
13.2
def set_router_id(self, value=None, default=False, disable=False): """Controls the router id property for the OSPF Proccess Args: value (str): The router-id value default (bool): Controls the use of the default keyword disable (bool): Controls the use of the no keyword Returns: bool: True if the commands are completed successfully """ cmd = self.command_builder('router-id', value=value, default=default, disable=disable) return self.configure_ospf(cmd)
[ "def", "set_router_id", "(", "self", ",", "value", "=", "None", ",", "default", "=", "False", ",", "disable", "=", "False", ")", ":", "cmd", "=", "self", ".", "command_builder", "(", "'router-id'", ",", "value", "=", "value", ",", "default", "=", "defa...
45.615385
19.230769
def _build_vars_dict(vars_file='', variables=None): """Merge variables into a single dictionary Applies to CLI provided variables only """ repex_vars = {} if vars_file: with open(vars_file) as varsfile: repex_vars = yaml.safe_load(varsfile.read()) for var in variables: key, value = var.split('=') repex_vars.update({str(key): str(value)}) return repex_vars
[ "def", "_build_vars_dict", "(", "vars_file", "=", "''", ",", "variables", "=", "None", ")", ":", "repex_vars", "=", "{", "}", "if", "vars_file", ":", "with", "open", "(", "vars_file", ")", "as", "varsfile", ":", "repex_vars", "=", "yaml", ".", "safe_load...
31.538462
12.461538
def save_svg(string, parent=None): """ Prompts the user to save an SVG document to disk. Parameters: ----------- string : basestring A Python string containing a SVG document. parent : QWidget, optional The parent to use for the file dialog. Returns: -------- The name of the file to which the document was saved, or None if the save was cancelled. """ if isinstance(string, unicode): string = string.encode('utf-8') dialog = QtGui.QFileDialog(parent, 'Save SVG Document') dialog.setAcceptMode(QtGui.QFileDialog.AcceptSave) dialog.setDefaultSuffix('svg') dialog.setNameFilter('SVG document (*.svg)') if dialog.exec_(): filename = dialog.selectedFiles()[0] f = open(filename, 'w') try: f.write(string) finally: f.close() return filename return None
[ "def", "save_svg", "(", "string", ",", "parent", "=", "None", ")", ":", "if", "isinstance", "(", "string", ",", "unicode", ")", ":", "string", "=", "string", ".", "encode", "(", "'utf-8'", ")", "dialog", "=", "QtGui", ".", "QFileDialog", "(", "parent",...
27.375
18.25
def get_queryset(self): """ This view should return a list of all the addresses the identity has for the supplied query parameters. Currently only supports address_type and default params Always excludes addresses with optedout = True """ identity_id = self.kwargs["identity_id"] address_type = self.kwargs["address_type"] use_ct = "use_communicate_through" in self.request.query_params default_only = "default" in self.request.query_params if use_ct: identity = Identity.objects.select_related("communicate_through").get( id=identity_id ) if identity.communicate_through is not None: identity = identity.communicate_through else: identity = Identity.objects.get(id=identity_id) addresses = identity.get_addresses_list(address_type, default_only) return [Address(addr) for addr in addresses]
[ "def", "get_queryset", "(", "self", ")", ":", "identity_id", "=", "self", ".", "kwargs", "[", "\"identity_id\"", "]", "address_type", "=", "self", ".", "kwargs", "[", "\"address_type\"", "]", "use_ct", "=", "\"use_communicate_through\"", "in", "self", ".", "re...
45.857143
18.428571
def _extalg(xarr, alpha=100, axis=None): '''Given an array xarr of values, smoothly return the max/min''' return (np.sum(xarr * np.exp(alpha*xarr), axis=axis, keepdims=True)/ np.sum(np.exp(alpha*xarr), axis=axis, keepdims=True))
[ "def", "_extalg", "(", "xarr", ",", "alpha", "=", "100", ",", "axis", "=", "None", ")", ":", "return", "(", "np", ".", "sum", "(", "xarr", "*", "np", ".", "exp", "(", "alpha", "*", "xarr", ")", ",", "axis", "=", "axis", ",", "keepdims", "=", ...
62.25
22.25
def execute_request(server_url, creds, namespace, classname): """ Open a connection with the server_url and creds, and enumerate instances defined by the functions namespace and classname arguments. Displays either the error return or the mof for instances returned. """ print('Requesting url=%s, ns=%s, class=%s' % \ (server_url, namespace, classname)) try: # Create a connection CONN = WBEMConnection(server_url, creds, default_namespace=namespace, no_verification=True) #Issue the request to EnumerateInstances on the defined class INSTANCES = CONN.EnumerateInstances(classname) #Display of characteristics of the result object print('instances type=%s len=%s' % (type(INSTANCES), len(INSTANCES))) #display the mof output for inst in INSTANCES: print('path=%s\n' % inst.path) print(inst.tomof()) # handle any exception except Error as err: # If CIMError, display CIMError attributes if isinstance(err, CIMError): print('Operation Failed: CIMError: code=%s, Description=%s' % \ (err.status_code_name, err.status_description)) else: print ("Operation failed: %s" % err) sys.exit(1)
[ "def", "execute_request", "(", "server_url", ",", "creds", ",", "namespace", ",", "classname", ")", ":", "print", "(", "'Requesting url=%s, ns=%s, class=%s'", "%", "(", "server_url", ",", "namespace", ",", "classname", ")", ")", "try", ":", "# Create a connection"...
37.324324
18.594595
def _dbus_get_object(bus_name, object_name): """ Fetches DBUS proxy object given the specified parameters. `bus_name` Name of the bus interface. `object_name` Object path related to the interface. Returns object or ``None``. """ try: bus = dbus.SessionBus() obj = bus.get_object(bus_name, object_name) return obj except (NameError, dbus.exceptions.DBusException): return None
[ "def", "_dbus_get_object", "(", "bus_name", ",", "object_name", ")", ":", "try", ":", "bus", "=", "dbus", ".", "SessionBus", "(", ")", "obj", "=", "bus", ".", "get_object", "(", "bus_name", ",", "object_name", ")", "return", "obj", "except", "(", "NameEr...
25.666667
18.333333
def artifact_mime_type(instance): """Ensure the 'mime_type' property of artifact objects comes from the Template column in the IANA media type registry. """ for key, obj in instance['objects'].items(): if ('type' in obj and obj['type'] == 'artifact' and 'mime_type' in obj): if enums.media_types(): if obj['mime_type'] not in enums.media_types(): yield JSONError("The 'mime_type' property of object '%s' " "('%s') must be an IANA registered MIME " "Type of the form 'type/subtype'." % (key, obj['mime_type']), instance['id']) else: info("Can't reach IANA website; using regex for mime types.") mime_re = re.compile(r'^(application|audio|font|image|message|model' '|multipart|text|video)/[a-zA-Z0-9.+_-]+') if not mime_re.match(obj['mime_type']): yield JSONError("The 'mime_type' property of object '%s' " "('%s') should be an IANA MIME Type of the" " form 'type/subtype'." % (key, obj['mime_type']), instance['id'])
[ "def", "artifact_mime_type", "(", "instance", ")", ":", "for", "key", ",", "obj", "in", "instance", "[", "'objects'", "]", ".", "items", "(", ")", ":", "if", "(", "'type'", "in", "obj", "and", "obj", "[", "'type'", "]", "==", "'artifact'", "and", "'m...
59.090909
25.909091
def _render_bar(self, bar, value, max_value, label_width, bar_width, **settings): """ Render single chart bar. """ percent = value / max_value barstr = "" barstr += str(settings[self.SETTING_BAR_CHAR]) * int(bar_width * percent) s = {k: settings[k] for k in (self.SETTING_FLAG_PLAIN,)} s.update(settings[self.SETTING_BAR_FORMATING]) barstr = self.fmt_text(barstr, **s) barstr += ' ' * int(bar_width - int(bar_width * percent)) strptrn = "{:"+str(label_width)+"s} [{:s}]" return strptrn.format(bar.get('label'), barstr)
[ "def", "_render_bar", "(", "self", ",", "bar", ",", "value", ",", "max_value", ",", "label_width", ",", "bar_width", ",", "*", "*", "settings", ")", ":", "percent", "=", "value", "/", "max_value", "barstr", "=", "\"\"", "barstr", "+=", "str", "(", "set...
46.307692
16
def cal_g_bm3(p, g, k): """ calculate shear modulus at given pressure :param p: pressure :param g: [g0, g0p] :param k: [v0, k0, k0p] :return: shear modulus at high pressure """ v = cal_v_bm3(p, k) v0 = k[0] k0 = k[1] kp = k[2] g0 = g[0] gp = g[1] f = 0.5 * ((v / v0)**(-2. / 3.) - 1.) return (1. + 2. * f)**(5. / 2.) * (g0 + (3. * k0 * gp - 5. * g0) * f + (6. * k0 * gp - 24. * k0 - 14. * g0 + 9. / 2. * k0 * kp) * f**2.)
[ "def", "cal_g_bm3", "(", "p", ",", "g", ",", "k", ")", ":", "v", "=", "cal_v_bm3", "(", "p", ",", "k", ")", "v0", "=", "k", "[", "0", "]", "k0", "=", "k", "[", "1", "]", "kp", "=", "k", "[", "2", "]", "g0", "=", "g", "[", "0", "]", ...
28.578947
19.105263
def augmentTextWithCONLLstr( conll_str_array, text ): ''' Augments given Text object with the information from Maltparser's output. More specifically, adds information about SYNTAX_LABEL, SYNTAX_HEAD and DEPREL to each token in the Text object; ''' j = 0 for sentence in text.divide( layer=WORDS, by=SENTENCES ): sentence = __sort_analyses(sentence) for i in range(len(sentence)): estnltkToken = sentence[i] maltparserToken = conll_str_array[j] if len( maltparserToken ) > 1: maltParserAnalysis = maltparserToken.split('\t') if estnltkToken[TEXT] == maltParserAnalysis[1]: # Fetch information about the syntactic relation: estnltkToken[SYNTAX_LABEL] = maltParserAnalysis[0] estnltkToken[SYNTAX_HEAD] = maltParserAnalysis[6] # Fetch the name of the surface syntactic relation estnltkToken[DEPREL] = maltParserAnalysis[7] else: raise Exception("A misalignment between Text and Maltparser's output: ",\ estnltkToken, maltparserToken ) j += 1 j += 1
[ "def", "augmentTextWithCONLLstr", "(", "conll_str_array", ",", "text", ")", ":", "j", "=", "0", "for", "sentence", "in", "text", ".", "divide", "(", "layer", "=", "WORDS", ",", "by", "=", "SENTENCES", ")", ":", "sentence", "=", "__sort_analyses", "(", "s...
51.166667
22.25
def api_secret(self, api_secret): """ Sets the api_secret of this GlobalSignCredentials. API Secret matching the API key (provided by GlobalSign). :param api_secret: The api_secret of this GlobalSignCredentials. :type: str """ if api_secret is None: raise ValueError("Invalid value for `api_secret`, must not be `None`") if api_secret is not None and len(api_secret) > 250: raise ValueError("Invalid value for `api_secret`, length must be less than or equal to `250`") self._api_secret = api_secret
[ "def", "api_secret", "(", "self", ",", "api_secret", ")", ":", "if", "api_secret", "is", "None", ":", "raise", "ValueError", "(", "\"Invalid value for `api_secret`, must not be `None`\"", ")", "if", "api_secret", "is", "not", "None", "and", "len", "(", "api_secret...
41.714286
23.285714
def to_tnw(orbit): """In the TNW Local Orbital Reference Frame, x is oriented along the velocity vector, z along the angular momentum, and y complete the frame. Args: orbit (list): Array of length 6 Return: numpy.ndarray: matrix to convert from inertial frame to TNW. >>> delta_tnw = [1, 0, 0] >>> p = [-6142438.668, 3492467.560, -25767.25680] >>> v = [505.8479685, 942.7809215, 7435.922231] >>> pv = p + v >>> mat = to_tnw(pv).T >>> delta_inert = mat @ delta_tnw >>> all(delta_inert == v / norm(v)) True """ pos, vel = _split(orbit) t = vel / norm(vel) w = np.cross(pos, vel) / (norm(pos) * norm(vel)) n = np.cross(w, t) return np.array([t, n, w])
[ "def", "to_tnw", "(", "orbit", ")", ":", "pos", ",", "vel", "=", "_split", "(", "orbit", ")", "t", "=", "vel", "/", "norm", "(", "vel", ")", "w", "=", "np", ".", "cross", "(", "pos", ",", "vel", ")", "/", "(", "norm", "(", "pos", ")", "*", ...
27.538462
19.461538
def tmDiff(tm1, tm2, verbosity = 0, relaxSegmentTests =True): """ Given two TM instances, list the difference between them and returns False if there is a difference. This function checks the major parameters. If this passes (and checkLearn is true) it checks the number of segments on each cell. If this passes, checks each synapse on each segment. When comparing C++ and Py, the segments are usually in different orders in the cells. tmDiff ignores segment order when comparing TM's. """ # First check basic parameters. If we fail here, don't continue if sameTMParams(tm1, tm2) == False: print "Two TM's have different parameters" return False result = True # Compare states at t first, they usually diverge before the structure of the # cells starts diverging if (tm1.activeState['t'] != tm2.activeState['t']).any(): print 'Active states diverge', numpy.where(tm1.activeState['t'] != tm2.activeState['t']) result = False if (tm1.predictedState['t'] - tm2.predictedState['t']).any(): print 'Predicted states diverge', numpy.where(tm1.predictedState['t'] != tm2.predictedState['t']) result = False # TODO: check confidence at T (confT) # Now check some high level learned parameters. if tm1.getNumSegments() != tm2.getNumSegments(): print "Number of segments are different", tm1.getNumSegments(), tm2.getNumSegments() result = False if tm1.getNumSynapses() != tm2.getNumSynapses(): print "Number of synapses are different", tm1.getNumSynapses(), tm2.getNumSynapses() tm1.printCells() tm2.printCells() result = False # Check that each cell has the same number of segments and synapses for c in xrange(tm1.numberOfCols): for i in xrange(tm2.cellsPerColumn): if tm1.getNumSegmentsInCell(c, i) != tm2.getNumSegmentsInCell(c, i): print "Num segments different in cell:",c,i, print tm1.getNumSegmentsInCell(c, i), tm2.getNumSegmentsInCell(c, i) result = False # If the above tests pass, then check each segment and report differences # Note that segments in tm1 can be in a different order than tm2. Here we # make sure that, for each segment in tm1, there is an identical segment # in tm2. if result == True and not relaxSegmentTests: for c in xrange(tm1.numberOfCols): for i in xrange(tm2.cellsPerColumn): nSegs = tm1.getNumSegmentsInCell(c, i) for segIdx in xrange(nSegs): tm1seg = tm1.getSegmentOnCell(c, i, segIdx) # Loop through all segments in tm2seg and see if any of them match tm1seg res = False for tm2segIdx in xrange(nSegs): tm2seg = tm2.getSegmentOnCell(c, i, tm2segIdx) if sameSegment(tm1seg, tm2seg) == True: res = True break if res == False: print "\nSegments are different for cell:",c,i if verbosity >= 1: print "C++" tm1.printCell(c, i) print "Py" tm2.printCell(c, i) result = False if result == True and (verbosity > 1): print "TM's match" return result
[ "def", "tmDiff", "(", "tm1", ",", "tm2", ",", "verbosity", "=", "0", ",", "relaxSegmentTests", "=", "True", ")", ":", "# First check basic parameters. If we fail here, don't continue", "if", "sameTMParams", "(", "tm1", ",", "tm2", ")", "==", "False", ":", "print...
37.839506
23.246914
def get_value(self, dictionary): """ Given the input dictionary, return the field value. """ # We override the default field access in order to support # lists in HTML forms. if html.is_html_input(dictionary): return html.parse_html_list(dictionary, prefix=self.field_name) return dictionary.get(self.field_name, empty)
[ "def", "get_value", "(", "self", ",", "dictionary", ")", ":", "# We override the default field access in order to support", "# lists in HTML forms.", "if", "html", ".", "is_html_input", "(", "dictionary", ")", ":", "return", "html", ".", "parse_html_list", "(", "diction...
42.111111
12.555556
def remove_incomplete_upload(self, bucket_name, object_name): """ Remove all in-complete uploads for a given bucket_name and object_name. :param bucket_name: Bucket to drop incomplete uploads :param object_name: Name of object to remove incomplete uploads :return: None """ is_valid_bucket_name(bucket_name) is_non_empty_string(object_name) recursive = True uploads = self._list_incomplete_uploads(bucket_name, object_name, recursive, is_aggregate_size=False) for upload in uploads: if object_name == upload.object_name: self._remove_incomplete_upload(bucket_name, object_name, upload.upload_id)
[ "def", "remove_incomplete_upload", "(", "self", ",", "bucket_name", ",", "object_name", ")", ":", "is_valid_bucket_name", "(", "bucket_name", ")", "is_non_empty_string", "(", "object_name", ")", "recursive", "=", "True", "uploads", "=", "self", ".", "_list_incomplet...
44.105263
20.315789
def stop(self): """Stop the sensor. """ # Check that everything is running if not self._running: logging.warning('PhoXi not running. Aborting stop') return False # Stop the subscribers self._color_im_sub.unregister() self._depth_im_sub.unregister() self._normal_map_sub.unregister() # Disconnect from the camera rospy.ServiceProxy('phoxi_camera/disconnect_camera', Empty)() self._running = False return True
[ "def", "stop", "(", "self", ")", ":", "# Check that everything is running", "if", "not", "self", ".", "_running", ":", "logging", ".", "warning", "(", "'PhoXi not running. Aborting stop'", ")", "return", "False", "# Stop the subscribers", "self", ".", "_color_im_sub",...
27
16.578947
def add_query_params(self, query_params): '''Overwrite the given query parameters. This is the same as :meth:`Queryable.set_query_params`, except it overwrites existing parameters individually whereas ``set_query_params`` deletes all existing key in ``query_params``. ''' query_params = as_multi_dict(query_params) for k in query_params: self.query_params.pop(k, None) for v in query_params.getlist(k): self.query_params[k] = v self.apply_param_schema() return self
[ "def", "add_query_params", "(", "self", ",", "query_params", ")", ":", "query_params", "=", "as_multi_dict", "(", "query_params", ")", "for", "k", "in", "query_params", ":", "self", ".", "query_params", ".", "pop", "(", "k", ",", "None", ")", "for", "v", ...
38.133333
14.4
def from_header(cls, header, beam=None, lat=None): """ Create a new WCSHelper class from the given header. Parameters ---------- header : `astropy.fits.HDUHeader` or string The header to be used to create the WCS helper beam : :class:`AegeanTools.fits_image.Beam` or None The synthesized beam. If the supplied beam is None then one is constructed form the header. lat : float The latitude of the telescope. Returns ------- obj : :class:`AegeanTools.wcs_helpers.WCSHelper` A helper object. """ try: wcs = pywcs.WCS(header, naxis=2) except: # TODO: figure out what error is being thrown wcs = pywcs.WCS(str(header), naxis=2) if beam is None: beam = get_beam(header) else: beam = beam if beam is None: logging.critical("Cannot determine beam information") _, pixscale = get_pixinfo(header) refpix = (header['CRPIX1'], header['CRPIX2']) return cls(wcs, beam, pixscale, refpix, lat)
[ "def", "from_header", "(", "cls", ",", "header", ",", "beam", "=", "None", ",", "lat", "=", "None", ")", ":", "try", ":", "wcs", "=", "pywcs", ".", "WCS", "(", "header", ",", "naxis", "=", "2", ")", "except", ":", "# TODO: figure out what error is bein...
30.833333
21.111111
def register_timer(period, target=None): """Add timer. Can be used as a decorator: .. code-block:: python @register_timer(3) def repeat(): do() :param int period: The interval (seconds) at which to raise the signal. :param int|Signal|str|unicode target: Existing signal to raise or Signal Target to register signal implicitly. Available targets: * ``workers`` - run the signal handler on all the workers * ``workerN`` - run the signal handler only on worker N * ``worker``/``worker0`` - run the signal handler on the first available worker * ``active-workers`` - run the signal handlers on all the active [non-cheaped] workers * ``mules`` - run the signal handler on all of the mules * ``muleN`` - run the signal handler on mule N * ``mule``/``mule0`` - run the signal handler on the first available mule * ``spooler`` - run the signal on the first available spooler * ``farmN/farm_XXX`` - run the signal handler in the mule farm N or named XXX :rtype: bool|callable :raises ValueError: If unable to add timer. """ return _automate_signal(target, func=lambda sig: uwsgi.add_timer(int(sig), period))
[ "def", "register_timer", "(", "period", ",", "target", "=", "None", ")", ":", "return", "_automate_signal", "(", "target", ",", "func", "=", "lambda", "sig", ":", "uwsgi", ".", "add_timer", "(", "int", "(", "sig", ")", ",", "period", ")", ")" ]
36.6
28.657143
def get_default(parser, section, option, default): """helper to get config settings with a default if not present""" try: result = parser.get(section, option) except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): result = default return result
[ "def", "get_default", "(", "parser", ",", "section", ",", "option", ",", "default", ")", ":", "try", ":", "result", "=", "parser", ".", "get", "(", "section", ",", "option", ")", "except", "(", "ConfigParser", ".", "NoSectionError", ",", "ConfigParser", ...
40.142857
16.285714
def via_scan(): """ IP scan - now implemented """ import socket import ipaddress import httpfind bridges_from_scan = [] hosts = socket.gethostbyname_ex(socket.gethostname())[2] for host in hosts: bridges_from_scan += httpfind.survey( # TODO: how do we determine subnet configuration? ipaddress.ip_interface(host+'/24').network, path='description.xml', pattern='(P|p)hilips') logger.info('Scan on %s', host) logger.info('Scan returned %d Hue bridges(s).', len(bridges_from_scan)) # Confirm Scan gave an accessible bridge device by reading from the returned # location. Should look like: http://192.168.0.1/description.xml found_bridges = {} for bridge in bridges_from_scan: serial, bridge_info = parse_description_xml(bridge) if serial: found_bridges[serial] = bridge_info logger.debug('%s', found_bridges) if found_bridges: return found_bridges else: raise DiscoveryError('Scan returned nothing')
[ "def", "via_scan", "(", ")", ":", "import", "socket", "import", "ipaddress", "import", "httpfind", "bridges_from_scan", "=", "[", "]", "hosts", "=", "socket", ".", "gethostbyname_ex", "(", "socket", ".", "gethostname", "(", ")", ")", "[", "2", "]", "for", ...
37.107143
17.357143
def all_minutes(self): """ Returns a DatetimeIndex representing all the minutes in this calendar. """ opens_in_ns = self._opens.values.astype( 'datetime64[ns]', ).view('int64') closes_in_ns = self._closes.values.astype( 'datetime64[ns]', ).view('int64') return DatetimeIndex( compute_all_minutes(opens_in_ns, closes_in_ns), tz=UTC, )
[ "def", "all_minutes", "(", "self", ")", ":", "opens_in_ns", "=", "self", ".", "_opens", ".", "values", ".", "astype", "(", "'datetime64[ns]'", ",", ")", ".", "view", "(", "'int64'", ")", "closes_in_ns", "=", "self", ".", "_closes", ".", "values", ".", ...
27.5
18.25
def strings(self): """ Return lat/lon as strings. """ return [ toString(self.lat, LAT), toString(self.lon, LON) ]
[ "def", "strings", "(", "self", ")", ":", "return", "[", "toString", "(", "self", ".", "lat", ",", "LAT", ")", ",", "toString", "(", "self", ".", "lon", ",", "LON", ")", "]" ]
26
14.333333
def persist(self, container: Container, image_name: str) -> None: """ Persists the state of a given container as a Docker image on the server. Parameters: container: the container that should be persisted. image_name: the name of the Docker image that should be created. Raises: ContainerNotFound: if the given container does not exist on the server. ImageAlreadyExists: if the given image name is already in use by another Docker image on the server. """ logger.debug("attempting to persist container (%s) to image (%s).", container.id, image_name) path = "containers/{}/persist/{}".format(container.id, image_name) r = self.__api.put(path) if r.status_code == 204: logger.debug("persisted container (%s) to image (%s).", container.id, image_name) return try: self.__api.handle_erroneous_response(r) except Exception: logger.exception("failed to persist container (%s) to image (%s).", # noqa: pycodestyle container.id, image_name) raise
[ "def", "persist", "(", "self", ",", "container", ":", "Container", ",", "image_name", ":", "str", ")", "->", "None", ":", "logger", ".", "debug", "(", "\"attempting to persist container (%s) to image (%s).\"", ",", "container", ".", "id", ",", "image_name", ")",...
40.5
20.5625
def make_library(self, diffuse_yaml, catalog_yaml, binning_yaml): """ Build up the library of all the components Parameters ---------- diffuse_yaml : str Name of the yaml file with the library of diffuse component definitions catalog_yaml : str Name of the yaml file width the library of catalog split definitions binning_yaml : str Name of the yaml file with the binning definitions """ ret_dict = {} #catalog_dict = yaml.safe_load(open(catalog_yaml)) components_dict = Component.build_from_yamlfile(binning_yaml) diffuse_ret_dict = make_diffuse_comp_info_dict(GalpropMapManager=self._gmm, DiffuseModelManager=self._dmm, library=diffuse_yaml, components=components_dict) catalog_ret_dict = make_catalog_comp_dict(library=catalog_yaml, CatalogSourceManager=self._csm) ret_dict.update(diffuse_ret_dict['comp_info_dict']) ret_dict.update(catalog_ret_dict['comp_info_dict']) self._library.update(ret_dict) return ret_dict
[ "def", "make_library", "(", "self", ",", "diffuse_yaml", ",", "catalog_yaml", ",", "binning_yaml", ")", ":", "ret_dict", "=", "{", "}", "#catalog_dict = yaml.safe_load(open(catalog_yaml))", "components_dict", "=", "Component", ".", "build_from_yamlfile", "(", "binning_y...
49
25.269231
def set(context="notebook", style="darkgrid", palette="deep", font="sans-serif", font_scale=1, color_codes=False, rc=None): """Set aesthetic parameters in one step. Each set of parameters can be set directly or temporarily, see the referenced functions below for more information. Parameters ---------- context : string or dict Plotting context parameters, see :func:`plotting_context` style : string or dict Axes style parameters, see :func:`axes_style` palette : string or sequence Color palette, see :func:`color_palette` font : string Font family, see matplotlib font manager. font_scale : float, optional Separate scaling factor to independently scale the size of the font elements. color_codes : bool If ``True`` and ``palette`` is a seaborn palette, remap the shorthand color codes (e.g. "b", "g", "r", etc.) to the colors from this palette. rc : dict or None Dictionary of rc parameter mappings to override the above. """ mpl.rcParams = {} set_context(context, font_scale) set_style(style, rc={"font.family": font}) if rc is not None: mpl.rcParams.update(rc) return mpl.rcParams
[ "def", "set", "(", "context", "=", "\"notebook\"", ",", "style", "=", "\"darkgrid\"", ",", "palette", "=", "\"deep\"", ",", "font", "=", "\"sans-serif\"", ",", "font_scale", "=", "1", ",", "color_codes", "=", "False", ",", "rc", "=", "None", ")", ":", ...
40.566667
17.466667
def state_probability(self, direction, repertoire, purview,): """Compute the probability of the purview in its current state given the repertoire. Collapses the dimensions of the repertoire that correspond to the purview nodes onto their state. All other dimension are already singular and thus receive 0 as the conditioning index. Returns: float: A single probabilty. """ purview_state = self.purview_state(direction) index = tuple(node_state if node in purview else 0 for node, node_state in enumerate(purview_state)) return repertoire[index]
[ "def", "state_probability", "(", "self", ",", "direction", ",", "repertoire", ",", "purview", ",", ")", ":", "purview_state", "=", "self", ".", "purview_state", "(", "direction", ")", "index", "=", "tuple", "(", "node_state", "if", "node", "in", "purview", ...
40.375
21.1875
def update_state(self): """Find out whether the media box is turned on/off.""" sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(self._timeout) try: if sock.connect_ex((self._ip, self._port['state'])) == 0: self.state = True else: self.state = False sock.close() except socket.error: raise
[ "def", "update_state", "(", "self", ")", ":", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "sock", ".", "settimeout", "(", "self", ".", "_timeout", ")", "try", ":", "if", "sock", ".", "c...
35.083333
15.583333
def delete_countries_geo_zone_by_id(cls, countries_geo_zone_id, **kwargs): """Delete CountriesGeoZone Delete an instance of CountriesGeoZone by its ID. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.delete_countries_geo_zone_by_id(countries_geo_zone_id, async=True) >>> result = thread.get() :param async bool :param str countries_geo_zone_id: ID of countriesGeoZone to delete. (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._delete_countries_geo_zone_by_id_with_http_info(countries_geo_zone_id, **kwargs) else: (data) = cls._delete_countries_geo_zone_by_id_with_http_info(countries_geo_zone_id, **kwargs) return data
[ "def", "delete_countries_geo_zone_by_id", "(", "cls", ",", "countries_geo_zone_id", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async'", ")", ":", "return", "cls", ".", "_...
46.952381
25.190476
def banner(text, border='=', width=80): """Center _text_ in a banner _width_ wide with _border_ characters. Args: text (str): What to write in the banner border (str): Border character width (int): How long the border should be """ text_padding = '{0:^%d}' % (width) LOG.info(border * width) LOG.info(text_padding.format(text)) LOG.info(border * width)
[ "def", "banner", "(", "text", ",", "border", "=", "'='", ",", "width", "=", "80", ")", ":", "text_padding", "=", "'{0:^%d}'", "%", "(", "width", ")", "LOG", ".", "info", "(", "border", "*", "width", ")", "LOG", ".", "info", "(", "text_padding", "."...
32.833333
9.833333
def plot(self): """ Visualize the state. :return: The generated figure. :rtype: matplotlib.Figure """ width = 10 # The pleasing golden ratio. height = width / 1.618 f = plt.figure(figsize=(width, height)) ax = f.add_subplot(111, projection="3d") self.plot_state_histogram(ax) return f
[ "def", "plot", "(", "self", ")", ":", "width", "=", "10", "# The pleasing golden ratio.", "height", "=", "width", "/", "1.618", "f", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "width", ",", "height", ")", ")", "ax", "=", "f", ".", "add_subp...
24.533333
13.6
def rm_crypttab(name, config='/etc/crypttab'): ''' Remove the named mapping from the crypttab. If the described entry does not exist, nothing is changed, but the command succeeds by returning ``'absent'``. If a line is removed, it returns ``'change'``. CLI Example: .. code-block:: bash salt '*' cryptdev.rm_crypttab foo ''' modified = False criteria = _crypttab_entry(name=name) # For each line in the config that does not match the criteria, add it to # the list. At the end, re-create the config from just those lines. lines = [] try: with salt.utils.files.fopen(config, 'r') as ifile: for line in ifile: line = salt.utils.stringutils.to_unicode(line) try: if criteria.match(line): modified = True else: lines.append(line) except _crypttab_entry.ParseError: lines.append(line) except (IOError, OSError) as exc: msg = 'Could not read from {0}: {1}' raise CommandExecutionError(msg.format(config, exc)) if modified: try: with salt.utils.files.fopen(config, 'w+') as ofile: ofile.writelines((salt.utils.stringutils.to_str(line) for line in lines)) except (IOError, OSError) as exc: msg = 'Could not write to {0}: {1}' raise CommandExecutionError(msg.format(config, exc)) # If we reach this point, the changes were successful return 'change' if modified else 'absent'
[ "def", "rm_crypttab", "(", "name", ",", "config", "=", "'/etc/crypttab'", ")", ":", "modified", "=", "False", "criteria", "=", "_crypttab_entry", "(", "name", "=", "name", ")", "# For each line in the config that does not match the criteria, add it to", "# the list. At th...
34.8
21.777778
def onRightUp(self, event=None): """ right button up: put back to cursor mode""" if event is None: return self.cursor_mode_action('rightup', event=event) self.ForwardEvent(event=event.guiEvent)
[ "def", "onRightUp", "(", "self", ",", "event", "=", "None", ")", ":", "if", "event", "is", "None", ":", "return", "self", ".", "cursor_mode_action", "(", "'rightup'", ",", "event", "=", "event", ")", "self", ".", "ForwardEvent", "(", "event", "=", "eve...
38.666667
11.166667
def _get_connection(self): """ Returns our cached LDAPObject, which may or may not be bound. """ if self._connection is None: uri = self.settings.SERVER_URI if callable(uri): uri = uri() self._connection = self.backend.ldap.initialize(uri) for opt, value in self.settings.CONNECTION_OPTIONS.items(): self._connection.set_option(opt, value) if self.settings.START_TLS: logger.debug("Initiating TLS") self._connection.start_tls_s() return self._connection
[ "def", "_get_connection", "(", "self", ")", ":", "if", "self", ".", "_connection", "is", "None", ":", "uri", "=", "self", ".", "settings", ".", "SERVER_URI", "if", "callable", "(", "uri", ")", ":", "uri", "=", "uri", "(", ")", "self", ".", "_connecti...
31.736842
17.105263
def hysteresis_magic2(path_to_file='.', hyst_file="rmag_hysteresis.txt", save=False, save_folder='.', fmt="svg", plots=True): """ Calculates hysteresis parameters, saves them in rmag_hysteresis format file. If selected, this function also plots hysteresis loops, delta M curves, d (Delta M)/dB curves, and IRM backfield curves. Parameters (defaults are used if not specified) ---------- path_to_file : path to directory that contains files (default is current directory, '.') hyst_file : hysteresis file (default is 'rmag_hysteresis.txt') save : boolean argument to save plots (default is False) save_folder : relative directory where plots will be saved (default is current directory, '.') fmt : format of saved figures (default is 'pdf') plots: whether or not to display the plots (default is true) """ user, meas_file, rmag_out, rmag_file = "", "agm_measurements.txt", "rmag_hysteresis.txt", "" pltspec = "" dir_path = save_folder verbose = pmagplotlib.verbose version_num = pmag.get_version() rmag_out = save_folder + '/' + rmag_out meas_file = path_to_file + '/' + hyst_file rmag_rem = save_folder + "/rmag_remanence.txt" # # meas_data, file_type = pmag.magic_read(meas_file) if file_type != 'magic_measurements': print(hysteresis_magic.__doc__) print('bad file') return # initialize some variables # define figure numbers for hyst,deltaM,DdeltaM curves HystRecs, RemRecs = [], [] HDD = {} HDD['hyst'], HDD['deltaM'], HDD['DdeltaM'] = 1, 2, 3 experiment_names, sids = [], [] for rec in meas_data: meths = rec['magic_method_codes'].split(':') methods = [] for meth in meths: methods.append(meth.strip()) if 'LP-HYS' in methods: if 'er_synthetic_name' in list(rec.keys()) and rec['er_synthetic_name'] != "": rec['er_specimen_name'] = rec['er_synthetic_name'] if rec['magic_experiment_name'] not in experiment_names: experiment_names.append(rec['magic_experiment_name']) if rec['er_specimen_name'] not in sids: sids.append(rec['er_specimen_name']) # fignum = 1 sample_num = 0 # initialize variables to record some bulk info in first loop first_dcd_rec, first_rec, first_imag_rec = 1, 1, 1 while sample_num < len(sids): sample = sids[sample_num] print(sample, sample_num + 1, 'out of ', len(sids)) # B,M for hysteresis, Bdcd,Mdcd for irm-dcd data B, M, Bdcd, Mdcd = [], [], [], [] Bimag, Mimag = [], [] # Bimag,Mimag for initial magnetization curves for rec in meas_data: methcodes = rec['magic_method_codes'].split(':') meths = [] for meth in methcodes: meths.append(meth.strip()) if rec['er_specimen_name'] == sample and "LP-HYS" in meths: B.append(float(rec['measurement_lab_field_dc'])) M.append(float(rec['measurement_magn_moment'])) if first_rec == 1: e = rec['magic_experiment_name'] HystRec = {} first_rec = 0 if "er_location_name" in list(rec.keys()): HystRec["er_location_name"] = rec["er_location_name"] locname = rec['er_location_name'].replace('/', '-') if "er_sample_name" in list(rec.keys()): HystRec["er_sample_name"] = rec["er_sample_name"] if "er_site_name" in list(rec.keys()): HystRec["er_site_name"] = rec["er_site_name"] if "er_synthetic_name" in list(rec.keys()) and rec['er_synthetic_name'] != "": HystRec["er_synthetic_name"] = rec["er_synthetic_name"] else: HystRec["er_specimen_name"] = rec["er_specimen_name"] if rec['er_specimen_name'] == sample and "LP-IRM-DCD" in meths: Bdcd.append(float(rec['treatment_dc_field'])) Mdcd.append(float(rec['measurement_magn_moment'])) if first_dcd_rec == 1: RemRec = {} irm_exp = rec['magic_experiment_name'] first_dcd_rec = 0 if "er_location_name" in list(rec.keys()): RemRec["er_location_name"] = rec["er_location_name"] if "er_sample_name" in list(rec.keys()): RemRec["er_sample_name"] = rec["er_sample_name"] if "er_site_name" in list(rec.keys()): RemRec["er_site_name"] = rec["er_site_name"] if "er_synthetic_name" in list(rec.keys()) and rec['er_synthetic_name'] != "": RemRec["er_synthetic_name"] = rec["er_synthetic_name"] else: RemRec["er_specimen_name"] = rec["er_specimen_name"] if rec['er_specimen_name'] == sample and "LP-IMAG" in meths: if first_imag_rec == 1: imag_exp = rec['magic_experiment_name'] first_imag_rec = 0 Bimag.append(float(rec['measurement_lab_field_dc'])) Mimag.append(float(rec['measurement_magn_moment'])) if len(B) > 0: hmeths = [] for meth in meths: hmeths.append(meth) # fignum = 1 fig = plt.figure(figsize=(8, 8)) hpars, deltaM, Bdm, B, Mnorm, MadjN = iplot_hys(1, B, M, sample) ax1 = fig.add_subplot(2, 2, 1) ax1.axhline(0, color='k') ax1.axvline(0, color='k') ax1.plot(B, Mnorm, 'r') ax1.plot(B, MadjN, 'b') ax1.set_xlabel('B (T)') ax1.set_ylabel("M/Msat") # ax1.set_title(sample) ax1.set_xlim(-1, 1) ax1.set_ylim(-1, 1) bounds = ax1.axis() n4 = 'Ms: ' + \ '%8.2e' % (float(hpars['hysteresis_ms_moment'])) + ' Am^2' ax1.text(bounds[1] - .9 * bounds[1], -.9, n4, fontsize=9) n1 = 'Mr: ' + \ '%8.2e' % (float(hpars['hysteresis_mr_moment'])) + ' Am^2' ax1.text(bounds[1] - .9 * bounds[1], -.7, n1, fontsize=9) n2 = 'Bc: ' + '%8.2e' % (float(hpars['hysteresis_bc'])) + ' T' ax1.text(bounds[1] - .9 * bounds[1], -.5, n2, fontsize=9) if 'hysteresis_xhf' in list(hpars.keys()): n3 = r'Xhf: ' + \ '%8.2e' % (float(hpars['hysteresis_xhf'])) + ' m^3' ax1.text(bounds[1] - .9 * bounds[1], -.3, n3, fontsize=9) # plt.subplot(1,2,2) # plt.subplot(1,3,3) DdeltaM = [] Mhalf = "" for k in range(2, len(Bdm)): # differnential DdeltaM.append( old_div(abs(deltaM[k] - deltaM[k - 2]), (Bdm[k] - Bdm[k - 2]))) for k in range(len(deltaM)): if old_div(deltaM[k], deltaM[0]) < 0.5: Mhalf = k break try: Bhf = Bdm[Mhalf - 1:Mhalf + 1] Mhf = deltaM[Mhalf - 1:Mhalf + 1] # best fit line through two bounding points poly = polyfit(Bhf, Mhf, 1) Bcr = old_div((.5 * deltaM[0] - poly[1]), poly[0]) hpars['hysteresis_bcr'] = '%8.3e' % (Bcr) hpars['magic_method_codes'] = "LP-BCR-HDM" if HDD['deltaM'] != 0: ax2 = fig.add_subplot(2, 2, 2) ax2.plot(Bdm, deltaM, 'b') ax2.set_xlabel('B (T)') ax2.set_ylabel('Delta M') linex = [0, Bcr, Bcr] liney = [old_div(deltaM[0], 2.), old_div(deltaM[0], 2.), 0] ax2.plot(linex, liney, 'r') # ax2.set_title(sample) ax3 = fig.add_subplot(2, 2, 3) ax3.plot(Bdm[(len(Bdm) - len(DdeltaM)):], DdeltaM, 'b') ax3.set_xlabel('B (T)') ax3.set_ylabel('d (Delta M)/dB') # ax3.set_title(sample) ax4 = fig.add_subplot(2, 2, 4) ax4.plot(Bdcd, Mdcd) ax4.yaxis.set_major_formatter(mtick.FormatStrFormatter('%.2e')) ax4.axhline(0, color='k') ax4.axvline(0, color='k') ax4.set_xlabel('B (T)') ax4.set_ylabel('M/Mr') except: print("not doing it") hpars['hysteresis_bcr'] = '0' hpars['magic_method_codes'] = "" plt.gcf() plt.gca() plt.tight_layout() if save: plt.savefig(save_folder + '/' + sample + '_hysteresis.' + fmt) plt.show() sample_num += 1
[ "def", "hysteresis_magic2", "(", "path_to_file", "=", "'.'", ",", "hyst_file", "=", "\"rmag_hysteresis.txt\"", ",", "save", "=", "False", ",", "save_folder", "=", "'.'", ",", "fmt", "=", "\"svg\"", ",", "plots", "=", "True", ")", ":", "user", ",", "meas_fi...
45.827225
17.722513
def describe_snapshots(self, *snapshot_ids): """Describe available snapshots. TODO: ownerSet, restorableBySet """ snapshot_set = {} for pos, snapshot_id in enumerate(snapshot_ids): snapshot_set["SnapshotId.%d" % (pos + 1)] = snapshot_id query = self.query_factory( action="DescribeSnapshots", creds=self.creds, endpoint=self.endpoint, other_params=snapshot_set) d = query.submit() return d.addCallback(self.parser.snapshots)
[ "def", "describe_snapshots", "(", "self", ",", "*", "snapshot_ids", ")", ":", "snapshot_set", "=", "{", "}", "for", "pos", ",", "snapshot_id", "in", "enumerate", "(", "snapshot_ids", ")", ":", "snapshot_set", "[", "\"SnapshotId.%d\"", "%", "(", "pos", "+", ...
39.461538
13.230769
def _sigma_pi_loE(self, Tp): """ inclusive cross section for Tth < Tp < 2 GeV Fit from experimental data """ m_p = self._m_p m_pi = self._m_pi Mres = 1.1883 # GeV Gres = 0.2264 # GeV s = 2 * m_p * (Tp + 2 * m_p) # center of mass energy gamma = np.sqrt(Mres ** 2 * (Mres ** 2 + Gres ** 2)) K = np.sqrt(8) * Mres * Gres * gamma K /= np.pi * np.sqrt(Mres ** 2 + gamma) fBW = m_p * K fBW /= ( (np.sqrt(s) - m_p) ** 2 - Mres ** 2 ) ** 2 + Mres ** 2 * Gres ** 2 mu = np.sqrt( (s - m_pi ** 2 - 4 * m_p ** 2) ** 2 - 16 * m_pi ** 2 * m_p ** 2 ) mu /= 2 * m_pi * np.sqrt(s) sigma0 = 7.66e-3 # mb sigma1pi = sigma0 * mu ** 1.95 * (1 + mu + mu ** 5) * fBW ** 1.86 # two pion production sigma2pi = 5.7 # mb sigma2pi /= 1 + np.exp(-9.3 * (Tp - 1.4)) E2pith = 0.56 # GeV sigma2pi[np.where(Tp < E2pith)] = 0.0 return (sigma1pi + sigma2pi) * 1e-27
[ "def", "_sigma_pi_loE", "(", "self", ",", "Tp", ")", ":", "m_p", "=", "self", ".", "_m_p", "m_pi", "=", "self", ".", "_m_pi", "Mres", "=", "1.1883", "# GeV", "Gres", "=", "0.2264", "# GeV", "s", "=", "2", "*", "m_p", "*", "(", "Tp", "+", "2", "...
28.888889
18.222222
def _process_patch_operation(cls, operation, obj, state): """ Args: operation (dict): one patch operation in RFC 6902 format. obj (object): an instance which is needed to be patched. state (dict): inter-operations state storage Returns: processing_status (bool): True if operation was handled, otherwise False. """ field_operaion = operation['op'] if field_operaion == cls.OP_REPLACE: return cls.replace(obj, operation['field_name'], operation['value'], state=state) elif field_operaion == cls.OP_TEST: return cls.test(obj, operation['field_name'], operation['value'], state=state) elif field_operaion == cls.OP_ADD: return cls.add(obj, operation['field_name'], operation['value'], state=state) elif field_operaion == cls.OP_MOVE: return cls.move(obj, operation['field_name'], operation['value'], state=state) elif field_operaion == cls.OP_COPY: return cls.copy(obj, operation['field_name'], operation['value'], state=state) elif field_operaion == cls.OP_REMOVE: return cls.remove(obj, operation['field_name'], state=state) return False
[ "def", "_process_patch_operation", "(", "cls", ",", "operation", ",", "obj", ",", "state", ")", ":", "field_operaion", "=", "operation", "[", "'op'", "]", "if", "field_operaion", "==", "cls", ".", "OP_REPLACE", ":", "return", "cls", ".", "replace", "(", "o...
39.677419
26.774194
def csv_tolist(path_to_file, **kwargs): """ Parse the csv file to a list of rows. """ result = [] encoding = kwargs.get('encoding', 'utf-8') delimiter = kwargs.get('delimiter', ',') dialect = kwargs.get('dialect', csv.excel) _, _ext = path_to_file.split('.', 1) try: file = codecs.open(path_to_file, 'r', encoding) items_file = io.TextIOWrapper(file, encoding=encoding) result = list( csv.reader(items_file, delimiter=delimiter, dialect=dialect)) items_file.close() file.close() except Exception as ex: result = [] logger.error('Fail parsing csv to list of rows - {}'.format(ex)) return result
[ "def", "csv_tolist", "(", "path_to_file", ",", "*", "*", "kwargs", ")", ":", "result", "=", "[", "]", "encoding", "=", "kwargs", ".", "get", "(", "'encoding'", ",", "'utf-8'", ")", "delimiter", "=", "kwargs", ".", "get", "(", "'delimiter'", ",", "','",...
24.5
21.642857
def Read(self): """Reads a string from the input. Returns: str: input. """ encoded_string = self._file_object.readline() if isinstance(encoded_string, py2to3.UNICODE_TYPE): return encoded_string try: string = codecs.decode(encoded_string, self._encoding, self._errors) except UnicodeDecodeError: if self._errors == 'strict': logger.error( 'Unable to properly read input due to encoding error. ' 'Switching to error tolerant encoding which can result in ' 'non Basic Latin (C0) characters to be replaced with "?" or ' '"\\ufffd".') self._errors = 'replace' string = codecs.decode(encoded_string, self._encoding, self._errors) return string
[ "def", "Read", "(", "self", ")", ":", "encoded_string", "=", "self", ".", "_file_object", ".", "readline", "(", ")", "if", "isinstance", "(", "encoded_string", ",", "py2to3", ".", "UNICODE_TYPE", ")", ":", "return", "encoded_string", "try", ":", "string", ...
29.8
23.4
def main(): """ Run autosub as a command-line program. """ parser = argparse.ArgumentParser() parser.add_argument('source_path', help="Path to the video or audio file to subtitle", nargs='?') parser.add_argument('-C', '--concurrency', help="Number of concurrent API requests to make", type=int, default=DEFAULT_CONCURRENCY) parser.add_argument('-o', '--output', help="Output path for subtitles (by default, subtitles are saved in \ the same directory and name as the source path)") parser.add_argument('-F', '--format', help="Destination subtitle format", default=DEFAULT_SUBTITLE_FORMAT) parser.add_argument('-S', '--src-language', help="Language spoken in source file", default=DEFAULT_SRC_LANGUAGE) parser.add_argument('-D', '--dst-language', help="Desired language for the subtitles", default=DEFAULT_DST_LANGUAGE) parser.add_argument('-K', '--api-key', help="The Google Translate API key to be used. \ (Required for subtitle translation)") parser.add_argument('--list-formats', help="List all available subtitle formats", action='store_true') parser.add_argument('--list-languages', help="List all available source/destination languages", action='store_true') args = parser.parse_args() if args.list_formats: print("List of formats:") for subtitle_format in FORMATTERS: print("{format}".format(format=subtitle_format)) return 0 if args.list_languages: print("List of all languages:") for code, language in sorted(LANGUAGE_CODES.items()): print("{code}\t{language}".format(code=code, language=language)) return 0 if not validate(args): return 1 try: subtitle_file_path = generate_subtitles( source_path=args.source_path, concurrency=args.concurrency, src_language=args.src_language, dst_language=args.dst_language, api_key=args.api_key, subtitle_file_format=args.format, output=args.output, ) print("Subtitles file created at {}".format(subtitle_file_path)) except KeyboardInterrupt: return 1 return 0
[ "def", "main", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'source_path'", ",", "help", "=", "\"Path to the video or audio file to subtitle\"", ",", "nargs", "=", "'?'", ")", "parser", ".", "a...
41.275862
21.413793
def write_journal(self, journal_file_path): """Write the constructed journal in to the provided file. Args: journal_file_path (str): full path to output journal file """ # TODO: assert the extension is txt and not other with open(journal_file_path, "w") as jrn_file: jrn_file.write(self._journal_contents)
[ "def", "write_journal", "(", "self", ",", "journal_file_path", ")", ":", "# TODO: assert the extension is txt and not other", "with", "open", "(", "journal_file_path", ",", "\"w\"", ")", "as", "jrn_file", ":", "jrn_file", ".", "write", "(", "self", ".", "_journal_co...
40.222222
15.555556
def NoExclusions(self): """Determine that there are no exclusion criterion in play :return: True if there is no real boundary specification of any kind. Simple method allowing parsers to short circuit the determination of missingness, which can be moderately compute intensive. """ if len(self.start_bounds) + len(self.target_rs) + len(self.ignored_rs) == 0: return BoundaryCheck.chrom == -1 return False
[ "def", "NoExclusions", "(", "self", ")", ":", "if", "len", "(", "self", ".", "start_bounds", ")", "+", "len", "(", "self", ".", "target_rs", ")", "+", "len", "(", "self", ".", "ignored_rs", ")", "==", "0", ":", "return", "BoundaryCheck", ".", "chrom"...
42.181818
23.727273
def make_writeable(filename): """ Make sure that the file is writeable. Useful if our source is read-only. """ if not os.access(filename, os.W_OK): st = os.stat(filename) new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR os.chmod(filename, new_permissions)
[ "def", "make_writeable", "(", "filename", ")", ":", "if", "not", "os", ".", "access", "(", "filename", ",", "os", ".", "W_OK", ")", ":", "st", "=", "os", ".", "stat", "(", "filename", ")", "new_permissions", "=", "stat", ".", "S_IMODE", "(", "st", ...
33.333333
5.777778
def _start_local_queue_process(self): """ Starts the interchange process locally Starts the interchange process locally and uses an internal command queue to get the worker task and result ports that the interchange has bound to. """ comm_q = Queue(maxsize=10) self.queue_proc = Process(target=interchange.starter, args=(comm_q,), kwargs={"client_ports": (self.outgoing_q.port, self.incoming_q.port, self.command_client.port), "worker_ports": self.worker_ports, "worker_port_range": self.worker_port_range, "logdir": "{}/{}".format(self.run_dir, self.label), "suppress_failure": self.suppress_failure, "heartbeat_threshold": self.heartbeat_threshold, "poll_period": self.poll_period, "logging_level": logging.DEBUG if self.worker_debug else logging.INFO }, ) self.queue_proc.start() try: (worker_task_port, worker_result_port) = comm_q.get(block=True, timeout=120) except queue.Empty: logger.error("Interchange has not completed initialization in 120s. Aborting") raise Exception("Interchange failed to start") self.worker_task_url = "tcp://{}:{}".format(self.address, worker_task_port) self.worker_result_url = "tcp://{}:{}".format(self.address, worker_result_port)
[ "def", "_start_local_queue_process", "(", "self", ")", ":", "comm_q", "=", "Queue", "(", "maxsize", "=", "10", ")", "self", ".", "queue_proc", "=", "Process", "(", "target", "=", "interchange", ".", "starter", ",", "args", "=", "(", "comm_q", ",", ")", ...
59.5
31.733333
def get_repository_config(namespace, config, snapshot_id): """Get a method configuration from the methods repository. Args: namespace (str): Methods namespace config (str): config name snapshot_id (int): snapshot_id of the method Swagger: https://api.firecloud.org/#!/Method_Repository/getMethodRepositoryConfiguration """ uri = "configurations/{0}/{1}/{2}".format(namespace, config, snapshot_id) return __get(uri)
[ "def", "get_repository_config", "(", "namespace", ",", "config", ",", "snapshot_id", ")", ":", "uri", "=", "\"configurations/{0}/{1}/{2}\"", ".", "format", "(", "namespace", ",", "config", ",", "snapshot_id", ")", "return", "__get", "(", "uri", ")" ]
35.384615
21.615385
def __deprecate_defaults( self, new_func: str, bg_blend: Any, alignment: Any = ..., clear: Any = ..., ) -> None: """Return the parameters needed to recreate the current default state. """ if not __debug__: return fg = self.default_fg # type: Any bg = self.default_bg # type: Any if bg_blend == tcod.constants.BKGND_NONE: bg = None if bg_blend == tcod.constants.BKGND_DEFAULT: bg_blend = self.default_bg_blend else: bg_blend = None if bg_blend == tcod.constants.BKGND_NONE: bg = None bg_blend = None if bg_blend == tcod.constants.BKGND_SET: bg_blend = None if alignment is None: alignment = self.default_alignment if alignment == tcod.constants.LEFT: alignment = None else: alignment = None if clear is not ...: fg = None params = [] if clear is True: params.append('ch=ord(" ")') if clear is False: params.append("ch=0") if fg is not None: params.append("fg=%s" % (fg,)) if bg is not None: params.append("bg=%s" % (bg,)) if bg_blend is not None: params.append("bg_blend=%s" % (self.__BG_BLEND_LOOKUP[bg_blend],)) if alignment is not None: params.append( "alignment=%s" % (self.__ALIGNMENT_LOOKUP[alignment],) ) param_str = ", ".join(params) if not param_str: param_str = "." else: param_str = " and add the following parameters:\n%s" % (param_str,) warnings.warn( "Console functions using default values have been deprecated.\n" "Replace this method with `Console.%s`%s" % (new_func, param_str), DeprecationWarning, stacklevel=3, )
[ "def", "__deprecate_defaults", "(", "self", ",", "new_func", ":", "str", ",", "bg_blend", ":", "Any", ",", "alignment", ":", "Any", "=", "...", ",", "clear", ":", "Any", "=", "...", ",", ")", "->", "None", ":", "if", "not", "__debug__", ":", "return"...
32.881356
15.508475
def template(ctx, url, no_input, role_name): # pragma: no cover """ Initialize a new role from a Cookiecutter URL. """ command_args = { 'role_name': role_name, 'subcommand': __name__, 'url': url, 'no_input': no_input, } t = Template(command_args) t.execute()
[ "def", "template", "(", "ctx", ",", "url", ",", "no_input", ",", "role_name", ")", ":", "# pragma: no cover", "command_args", "=", "{", "'role_name'", ":", "role_name", ",", "'subcommand'", ":", "__name__", ",", "'url'", ":", "url", ",", "'no_input'", ":", ...
27.454545
18.545455
def load_cz2010_hourly_temp_data( self, start, end, read_from_cache=True, write_to_cache=True ): """ Load hourly CZ2010 temperature data from start date to end date (inclusive). This is the primary convenience method for loading hourly CZ2010 temperature data. Parameters ---------- start : datetime.datetime The earliest date from which to load data. end : datetime.datetime The latest date until which to load data. read_from_cache : bool Whether or not to load data from cache. write_to_cache : bool Whether or not to write newly loaded data to cache. """ return load_cz2010_hourly_temp_data( self.usaf_id, start, end, read_from_cache=read_from_cache, write_to_cache=write_to_cache, )
[ "def", "load_cz2010_hourly_temp_data", "(", "self", ",", "start", ",", "end", ",", "read_from_cache", "=", "True", ",", "write_to_cache", "=", "True", ")", ":", "return", "load_cz2010_hourly_temp_data", "(", "self", ".", "usaf_id", ",", "start", ",", "end", ",...
34.92
17.68
def _m2crypto_sign(message, ssldir=None, certname=None, **config): """ Insert two new fields into the message dict and return it. Those fields are: - 'signature' - the computed RSA message digest of the JSON repr. - 'certificate' - the base64 X509 certificate of the sending host. """ if ssldir is None or certname is None: error = "You must set the ssldir and certname keyword arguments." raise ValueError(error) message['crypto'] = 'x509' certificate = M2Crypto.X509.load_cert( "%s/%s.crt" % (ssldir, certname)).as_pem() # Opening this file requires elevated privileges in stg/prod. rsa_private = M2Crypto.RSA.load_key( "%s/%s.key" % (ssldir, certname)) digest = M2Crypto.EVP.MessageDigest('sha1') digest.update(fedmsg.encoding.dumps(message)) signature = rsa_private.sign(digest.digest()) # Return a new dict containing the pairs in the original message as well # as the new authn fields. return dict(message.items() + [ ('signature', signature.encode('base64').decode('ascii')), ('certificate', certificate.encode('base64').decode('ascii')), ])
[ "def", "_m2crypto_sign", "(", "message", ",", "ssldir", "=", "None", ",", "certname", "=", "None", ",", "*", "*", "config", ")", ":", "if", "ssldir", "is", "None", "or", "certname", "is", "None", ":", "error", "=", "\"You must set the ssldir and certname key...
37.064516
20.967742
async def stoplisten(self, connmark = -1): ''' Can call without delegate ''' if connmark is None: connmark = self.connmark self.scheduler.emergesend(ConnectionControlEvent(self, ConnectionControlEvent.STOPLISTEN, True, connmark))
[ "async", "def", "stoplisten", "(", "self", ",", "connmark", "=", "-", "1", ")", ":", "if", "connmark", "is", "None", ":", "connmark", "=", "self", ".", "connmark", "self", ".", "scheduler", ".", "emergesend", "(", "ConnectionControlEvent", "(", "self", "...
39.285714
22.428571
def variables(self, value): """ Setter for **self.__variables** attribute. :param value: Attribute value. :type value: dict """ if value is not None: assert type(value) is dict, "'{0}' attribute: '{1}' type is not 'dict'!".format("variables", value) for key, element in value.iteritems(): assert type(key) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format( "variables", key) assert type(element) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format( "variables", element) self.__variables = value
[ "def", "variables", "(", "self", ",", "value", ")", ":", "if", "value", "is", "not", "None", ":", "assert", "type", "(", "value", ")", "is", "dict", ",", "\"'{0}' attribute: '{1}' type is not 'dict'!\"", ".", "format", "(", "\"variables\"", ",", "value", ")"...
41.625
21.75
def write(self, data): """Write single record. Args: data: record data to write as string, byte array or byte sequence. """ block_remaining = _BLOCK_SIZE - self.__position % _BLOCK_SIZE if block_remaining < _HEADER_LENGTH: # Header won't fit into remainder self.__writer.write('\x00' * block_remaining) self.__position += block_remaining block_remaining = _BLOCK_SIZE if block_remaining < len(data) + _HEADER_LENGTH: first_chunk = data[:block_remaining - _HEADER_LENGTH] self.__write_record(_RECORD_TYPE_FIRST, first_chunk) data = data[len(first_chunk):] while True: block_remaining = _BLOCK_SIZE - self.__position % _BLOCK_SIZE if block_remaining >= len(data) + _HEADER_LENGTH: self.__write_record(_RECORD_TYPE_LAST, data) break else: chunk = data[:block_remaining - _HEADER_LENGTH] self.__write_record(_RECORD_TYPE_MIDDLE, chunk) data = data[len(chunk):] else: self.__write_record(_RECORD_TYPE_FULL, data)
[ "def", "write", "(", "self", ",", "data", ")", ":", "block_remaining", "=", "_BLOCK_SIZE", "-", "self", ".", "__position", "%", "_BLOCK_SIZE", "if", "block_remaining", "<", "_HEADER_LENGTH", ":", "# Header won't fit into remainder", "self", ".", "__writer", ".", ...
34.8
18.4
def save_metadata(self, phase, data_name): """ Save metadata associated with the phase, such as the name of the pipeline, the name of the phase and the name of the data being fit """ with open("{}/.metadata".format(make_path(phase)), "w+") as f: f.write("pipeline={}\nphase={}\ndata={}".format(self.pipeline_name, phase.phase_name, data_name))
[ "def", "save_metadata", "(", "self", ",", "phase", ",", "data_name", ")", ":", "with", "open", "(", "\"{}/.metadata\"", ".", "format", "(", "make_path", "(", "phase", ")", ")", ",", "\"w+\"", ")", "as", "f", ":", "f", ".", "write", "(", "\"pipeline={}\...
56
26
def set_default_viewport(self): """ Calculates the viewport based on the configured aspect ratio. Will add black borders and center the viewport if the window do not match the configured viewport. If aspect ratio is None the viewport will be scaled to the entire window size regardless of size. """ if self.aspect_ratio: expected_width = int(self.buffer_height * self.aspect_ratio) expected_height = int(expected_width / self.aspect_ratio) if expected_width > self.buffer_width: expected_width = self.buffer_width expected_height = int(expected_width / self.aspect_ratio) blank_space_x = self.buffer_width - expected_width blank_space_y = self.buffer_height - expected_height self.ctx.viewport = ( blank_space_x // 2, blank_space_y // 2, expected_width, expected_height, ) else: self.ctx.viewport = (0, 0, self.buffer_width, self.buffer_height)
[ "def", "set_default_viewport", "(", "self", ")", ":", "if", "self", ".", "aspect_ratio", ":", "expected_width", "=", "int", "(", "self", ".", "buffer_height", "*", "self", ".", "aspect_ratio", ")", "expected_height", "=", "int", "(", "expected_width", "/", "...
39.75
19.964286
def get_perf_task(task_name, auth, url): """ function takes the a str object containing the name of an existing performance tasks and issues a RESTFUL call to the IMC REST service. It will return a list :param task_name: str containing the name of the performance task :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: 204 :rtype: dict >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.perf import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> selected_task = get_perf_task('Cisco_Temperature', auth.creds, auth.url) >>> assert type(selected_task) is dict >>> assert 'taskName' in selected_task """ get_perf_task_url = "/imcrs/perf/task?name=" + task_name + "&orderBy=taskId&desc=false" f_url = url + get_perf_task_url response = requests.get(f_url, auth=auth, headers=HEADERS) try: if response.status_code == 200: perf_task_info = (json.loads(response.text)) if 'task' in perf_task_info: perf_task_info = (json.loads(response.text))['task'] else: perf_task_info = "Task Doesn't Exist" return perf_task_info except requests.exceptions.RequestException as error: return "Error:\n" + str(error) + ' get_perf_task: An Error has occured'
[ "def", "get_perf_task", "(", "task_name", ",", "auth", ",", "url", ")", ":", "get_perf_task_url", "=", "\"/imcrs/perf/task?name=\"", "+", "task_name", "+", "\"&orderBy=taskId&desc=false\"", "f_url", "=", "url", "+", "get_perf_task_url", "response", "=", "requests", ...
37.625
26.625
def delete(name, purge=False, force=False): ''' Remove a user from the minion Args: name (str): The name of the user to delete purge (bool, optional): Boolean value indicating that the user profile should also be removed when the user account is deleted. If set to True the profile will be removed. Default is False. force (bool, optional): Boolean value indicating that the user account should be deleted even if the user is logged in. True will log the user out and delete user. Returns: bool: True if successful, otherwise False CLI Example: .. code-block:: bash salt '*' user.delete name ''' if six.PY2: name = _to_unicode(name) # Check if the user exists try: user_info = win32net.NetUserGetInfo(None, name, 4) except win32net.error as exc: log.error('User not found: %s', name) log.error('nbr: %s', exc.winerror) log.error('ctx: %s', exc.funcname) log.error('msg: %s', exc.strerror) return False # Check if the user is logged in # Return a list of logged in users try: sess_list = win32ts.WTSEnumerateSessions() except win32ts.error as exc: log.error('No logged in users found') log.error('nbr: %s', exc.winerror) log.error('ctx: %s', exc.funcname) log.error('msg: %s', exc.strerror) # Is the user one that is logged in logged_in = False session_id = None for sess in sess_list: if win32ts.WTSQuerySessionInformation(None, sess['SessionId'], win32ts.WTSUserName) == name: session_id = sess['SessionId'] logged_in = True # If logged in and set to force, log the user out and continue # If logged in and not set to force, return false if logged_in: if force: try: win32ts.WTSLogoffSession(win32ts.WTS_CURRENT_SERVER_HANDLE, session_id, True) except win32ts.error as exc: log.error('User not found: %s', name) log.error('nbr: %s', exc.winerror) log.error('ctx: %s', exc.funcname) log.error('msg: %s', exc.strerror) return False else: log.error('User %s is currently logged in.', name) return False # Remove the User Profile directory if purge: try: sid = getUserSid(name) win32profile.DeleteProfile(sid) except pywintypes.error as exc: (number, context, message) = exc.args if number == 2: # Profile Folder Not Found pass else: log.error('Failed to remove profile for %s', name) log.error('nbr: %s', exc.winerror) log.error('ctx: %s', exc.funcname) log.error('msg: %s', exc.strerror) return False # And finally remove the user account try: win32net.NetUserDel(None, name) except win32net.error as exc: log.error('Failed to delete user %s', name) log.error('nbr: %s', exc.winerror) log.error('ctx: %s', exc.funcname) log.error('msg: %s', exc.strerror) return False return True
[ "def", "delete", "(", "name", ",", "purge", "=", "False", ",", "force", "=", "False", ")", ":", "if", "six", ".", "PY2", ":", "name", "=", "_to_unicode", "(", "name", ")", "# Check if the user exists", "try", ":", "user_info", "=", "win32net", ".", "Ne...
32.24
19.04
def export_to_file(request): """ Export a part of this source site's page tree to a JSON file on this user's filesystem for subsequent import in a destination site's Wagtail Admin """ if request.method == 'POST': form = ExportForm(request.POST) if form.is_valid(): payload = export_pages(form.cleaned_data['root_page'], export_unpublished=True) response = JsonResponse(payload) response['Content-Disposition'] = 'attachment; filename="export.json"' return response else: form = ExportForm() return render(request, 'wagtailimportexport/export_to_file.html', { 'form': form, })
[ "def", "export_to_file", "(", "request", ")", ":", "if", "request", ".", "method", "==", "'POST'", ":", "form", "=", "ExportForm", "(", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ":", "payload", "=", "export_pages", "(", "for...
35.473684
20
def translate(patterns, *, flags=0): """Translate `fnmatch` pattern.""" flags = _flag_transform(flags) return _wcparse.translate(_wcparse.split(patterns, flags), flags)
[ "def", "translate", "(", "patterns", ",", "*", ",", "flags", "=", "0", ")", ":", "flags", "=", "_flag_transform", "(", "flags", ")", "return", "_wcparse", ".", "translate", "(", "_wcparse", ".", "split", "(", "patterns", ",", "flags", ")", ",", "flags"...
35.4
15.8
def query_disease(): """ Returns list of diseases by query parameters --- tags: - Query functions parameters: - name: identifier in: query type: string required: false description: Disease identifier default: DI-03832 - name: ref_id in: query type: string required: false description: reference identifier default: 104300 - name: ref_type in: query type: string required: false description: Reference type default: MIM - name: name in: query type: string required: false description: Disease name default: Alzheimer disease - name: acronym in: query type: string required: false description: Disease acronym default: AD - name: description in: query type: string required: false description: Description of disease default: '%neurodegenerative disorder%' - name: limit in: query type: integer required: false description: limit of results numbers default: 10 """ allowed_str_args = ['identifier', 'ref_id', 'ref_type', 'name', 'acronym', 'description'] args = get_args( request_args=request.args, allowed_str_args=allowed_str_args ) return jsonify(query.disease(**args))
[ "def", "query_disease", "(", ")", ":", "allowed_str_args", "=", "[", "'identifier'", ",", "'ref_id'", ",", "'ref_type'", ",", "'name'", ",", "'acronym'", ",", "'description'", "]", "args", "=", "get_args", "(", "request_args", "=", "request", ".", "args", ",...
20.573529
20.779412
def ensure_dir(path): """ :param path: path to directory to be created Create a directory if it does not already exist. """ if not os.path.exists(path): # path does not exist, create the directory os.mkdir(path) else: # The path exists, check that it is not a file if os.path.isfile(path): raise Exception("Path %s already exists, and it is a file, not a directory" % path)
[ "def", "ensure_dir", "(", "path", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "# path does not exist, create the directory", "os", ".", "mkdir", "(", "path", ")", "else", ":", "# The path exists, check that it is not a file", ...
33.076923
17.230769
def best_diff(img1, img2, opts): """Find the best alignment of two images that minimizes the differences. Returns (diff, alignments) where ``diff`` is a difference map, and ``alignments`` is a tuple ((x1, y2), (x2, y2)). See ``diff()`` for the description of the alignment numbers. """ w1, h1 = img1.size w2, h2 = img2.size w, h = min(w1, w2), min(h1, h2) best = None best_value = 255 * w * h + 1 xr = abs(w1 - w2) + 1 yr = abs(h1 - h2) + 1 p = Progress(xr * yr, timeout=opts.timeout) for x in range(xr): if w1 > w2: x1, x2 = x, 0 else: x1, x2 = 0, x for y in range(yr): if h1 > h2: y1, y2 = y, 0 else: y1, y2 = 0, y p.next() this = diff(img1, img2, (x1, y1), (x2, y2)) this_value = diff_badness(this) if this_value < best_value: best = this best_value = this_value best_pos = (x1, y1), (x2, y2) return best, best_pos
[ "def", "best_diff", "(", "img1", ",", "img2", ",", "opts", ")", ":", "w1", ",", "h1", "=", "img1", ".", "size", "w2", ",", "h2", "=", "img2", ".", "size", "w", ",", "h", "=", "min", "(", "w1", ",", "w2", ")", ",", "min", "(", "h1", ",", "...
29.055556
16.305556
def get( self, instance, **kwargs ): """Return an attribute from an object using the Ref path. instance The object instance to traverse. """ target = instance for attr in self._path: target = getattr( target, attr ) return target
[ "def", "get", "(", "self", ",", "instance", ",", "*", "*", "kwargs", ")", ":", "target", "=", "instance", "for", "attr", "in", "self", ".", "_path", ":", "target", "=", "getattr", "(", "target", ",", "attr", ")", "return", "target" ]
29.3
11.9
def Sens_m2(poly, dist, **kws): """ Variance-based decomposition/Sobol' indices. Second order sensitivity indices. Args: poly (Poly): Polynomial to find second order Sobol indices on. dist (Dist): The distributions of the input used in ``poly``. Returns: (numpy.ndarray): First order sensitivity indices for each parameters in ``poly``, with shape ``(len(dist), len(dist)) + poly.shape``. Examples: >>> x, y = chaospy.variable(2) >>> poly = chaospy.Poly([1, x*y, x*x*y*y, x*y*y*y]) >>> dist = chaospy.Iid(chaospy.Uniform(0, 1), 2) >>> indices = chaospy.Sens_m2(poly, dist) >>> print(indices) [[[0. 0. 0. 0. ] [0. 0.14285714 0.28571429 0.20930233]] <BLANKLINE> [[0. 0.14285714 0.28571429 0.20930233] [0. 0. 0. 0. ]]] """ dim = len(dist) if poly.dim<dim: poly = chaospy.poly.setdim(poly, len(dist)) zero = [0]*dim out = numpy.zeros((dim, dim) + poly.shape) mean = E(poly, dist) V_total = Var(poly, dist) E_cond_i = [None]*dim V_E_cond_i = [None]*dim for i in range(dim): zero[i] = 1 E_cond_i[i] = E_cond(poly, zero, dist, **kws) V_E_cond_i[i] = Var(E_cond_i[i], dist, **kws) zero[i] = 0 for i in range(dim): zero[i] = 1 for j in range(i+1, dim): zero[j] = 1 E_cond_ij = E_cond(poly, zero, dist, **kws) out[i, j] = ((Var(E_cond_ij, dist, **kws)-V_E_cond_i[i] - V_E_cond_i[j]) / (V_total+(V_total == 0))*(V_total != 0)) out[j, i] = out[i, j] zero[j] = 0 zero[i] = 0 return out
[ "def", "Sens_m2", "(", "poly", ",", "dist", ",", "*", "*", "kws", ")", ":", "dim", "=", "len", "(", "dist", ")", "if", "poly", ".", "dim", "<", "dim", ":", "poly", "=", "chaospy", ".", "poly", ".", "setdim", "(", "poly", ",", "len", "(", "dis...
29.180328
21.04918
def atlasdb_get_zonefile_bits( zonefile_hash, con=None, path=None ): """ What bit(s) in a zonefile inventory does a zonefile hash correspond to? Return their indexes in the bit field. """ with AtlasDBOpen(con=con, path=path) as dbcon: sql = "SELECT inv_index FROM zonefiles WHERE zonefile_hash = ?;" args = (zonefile_hash,) cur = dbcon.cursor() res = atlasdb_query_execute( cur, sql, args ) # NOTE: zero-indexed ret = [] for r in res: ret.append( r['inv_index'] - 1 ) return ret
[ "def", "atlasdb_get_zonefile_bits", "(", "zonefile_hash", ",", "con", "=", "None", ",", "path", "=", "None", ")", ":", "with", "AtlasDBOpen", "(", "con", "=", "con", ",", "path", "=", "path", ")", "as", "dbcon", ":", "sql", "=", "\"SELECT inv_index FROM zo...
29.263158
20.315789
def record_magic(dct, magic_kind, magic_name, func): """Utility function to store a function as a magic of a specific kind. Parameters ---------- dct : dict A dictionary with 'line' and 'cell' subdicts. magic_kind : str Kind of magic to be stored. magic_name : str Key to store the magic as. func : function Callable object to store. """ if magic_kind == 'line_cell': dct['line'][magic_name] = dct['cell'][magic_name] = func else: dct[magic_kind][magic_name] = func
[ "def", "record_magic", "(", "dct", ",", "magic_kind", ",", "magic_name", ",", "func", ")", ":", "if", "magic_kind", "==", "'line_cell'", ":", "dct", "[", "'line'", "]", "[", "magic_name", "]", "=", "dct", "[", "'cell'", "]", "[", "magic_name", "]", "="...
25.190476
19.52381
def set_center_freq(self, center_freq): """Set center frequency and clear averaged PSD data""" psd_state = { 'repeats': 0, 'freq_array': self._base_freq_array + self._lnb_lo + center_freq, 'pwr_array': None, 'update_lock': threading.Lock(), 'futures': [], } return psd_state
[ "def", "set_center_freq", "(", "self", ",", "center_freq", ")", ":", "psd_state", "=", "{", "'repeats'", ":", "0", ",", "'freq_array'", ":", "self", ".", "_base_freq_array", "+", "self", ".", "_lnb_lo", "+", "center_freq", ",", "'pwr_array'", ":", "None", ...
35.7
14.7
def _converttobitstring(cls, bs, offset=0, cache={}): """Convert bs to a bitstring and return it. offset gives the suggested bit offset of first significant bit, to optimise append etc. """ if isinstance(bs, Bits): return bs try: return cache[(bs, offset)] except KeyError: if isinstance(bs, basestring): b = cls() try: _, tokens = tokenparser(bs) except ValueError as e: raise CreationError(*e.args) if tokens: b._append(Bits._init_with_token(*tokens[0])) b._datastore = offsetcopy(b._datastore, offset) for token in tokens[1:]: b._append(Bits._init_with_token(*token)) assert b._assertsanity() assert b.len == 0 or b._offset == offset if len(cache) < CACHE_SIZE: cache[(bs, offset)] = b return b except TypeError: # Unhashable type pass return cls(bs)
[ "def", "_converttobitstring", "(", "cls", ",", "bs", ",", "offset", "=", "0", ",", "cache", "=", "{", "}", ")", ":", "if", "isinstance", "(", "bs", ",", "Bits", ")", ":", "return", "bs", "try", ":", "return", "cache", "[", "(", "bs", ",", "offset...
35.25
14
def incver(self): """Increment all of the version numbers""" d = {} for p in self.__mapper__.attrs: if p.key in ['vid','vname','fqname', 'version', 'cache_key']: continue if p.key == 'revision': d[p.key] = self.revision + 1 else: d[p.key] = getattr(self, p.key) n = Dataset(**d) return n
[ "def", "incver", "(", "self", ")", ":", "d", "=", "{", "}", "for", "p", "in", "self", ".", "__mapper__", ".", "attrs", ":", "if", "p", ".", "key", "in", "[", "'vid'", ",", "'vname'", ",", "'fqname'", ",", "'version'", ",", "'cache_key'", "]", ":"...
28.642857
18.357143
def _docx_to_epub( self): """*convert docx file to epub* """ self.log.debug('starting the ``_docx_to_epub`` method') if self.footer: footer = self._tmp_html_file(self.footer) footer = '"%(footer)s"' % locals() else: footer = "" if self.header: header = self._tmp_html_file(self.header) header = '"%(header)s"' % locals() else: header = "" # FIRST CONVERT THE DOC TO HTML docx = self.urlOrPath if self.title: title = self.title.replace(".html", "") html = "/tmp/" + self.title.replace(".html", "") + ".html" else: title = os.path.basename(docx).replace( ".docx", "").replace("_", " ") html = "/tmp/" + os.path.basename(docx).replace(".docx", ".html") pandoc = self.settings["executables"]["pandoc"] # TMP IMAGE DIR now = datetime.now() now = now.strftime("%Y%m%dt%H%M%S") imageDir = "/tmp/%(now)s" % locals() if not os.path.exists(imageDir): os.makedirs(imageDir) cmd = """%(pandoc)s --extract-media=%(imageDir)s -t html -f docx '%(docx)s' -o '%(html)s' """ % locals() p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) stdout, stderr = p.communicate() self.log.debug('output: %(stdout)s' % locals()) try: with open(html): pass fileExists = True except IOError: fileExists = False self.log.error( "the html %s does not exist on this machine, here is the failure message: %s" % (html, stderr)) try: shutil.rmtree(imageDir) except: pass return None if fileExists: if self.outputDirectory: epub = self.outputDirectory + "/" + \ os.path.basename(html).replace(".html", ".epub") else: epub = docx.replace(".docx", ".epub") pandoc = self.settings["executables"]["pandoc"] cmd = """%(pandoc)s --metadata=title:'%(title)s' -S -s -f html -t epub3 %(header)s '%(html)s' %(footer)s -o '%(epub)s' """ % locals( ) p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) stdout, stderr = p.communicate() self.log.debug('output: %(stdout)s' % locals()) try: shutil.rmtree(imageDir) os.remove(html) except: pass try: with open(epub): pass fileExists = True except IOError: fileExists = False self.log.error( "the epub %s does not exist on this machine, here is the failure message: %s" % (epub, stderr)) return None self.log.debug('completed the ``_docx_to_epub`` method') return epub
[ "def", "_docx_to_epub", "(", "self", ")", ":", "self", ".", "log", ".", "debug", "(", "'starting the ``_docx_to_epub`` method'", ")", "if", "self", ".", "footer", ":", "footer", "=", "self", ".", "_tmp_html_file", "(", "self", ".", "footer", ")", "footer", ...
34.149425
19.08046
def handle_get_account(self, req): """Handles the GET v2/<account> call for getting account information. Can only be called by an account .admin. On success, a JSON dictionary will be returned containing the keys `account_id`, `services`, and `users`. The `account_id` is the value used when creating service accounts. The `services` value is a dict as described in the :func:`handle_get_token` call. The `users` value is a list of dicts, each dict representing a user and currently only containing the single key `name`. For example:: {"account_id": "AUTH_018c3946-23f8-4efb-a8fb-b67aae8e4162", "services": {"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_018c3946"}}, "users": [{"name": "tester"}, {"name": "tester3"}]} :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with a JSON dictionary as explained above. """ account = req.path_info_pop() if req.path_info or not account or account[0] == '.': return HTTPBadRequest(request=req) if not self.is_account_admin(req, account): return self.denied_response(req) path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not obtain the .services object: %s %s' % (path, resp.status)) services = json.loads(resp.body) listing = [] marker = '' while True: path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' % (self.auth_account, account)), quote(marker)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not list in main auth account: %s %s' % (path, resp.status)) account_id = resp.headers['X-Container-Meta-Account-Id'] sublisting = json.loads(resp.body) if not sublisting: break for obj in sublisting: if obj['name'][0] != '.': listing.append({'name': obj['name']}) marker = sublisting[-1]['name'].encode('utf-8') return Response(content_type=CONTENT_TYPE_JSON, body=json.dumps({'account_id': account_id, 'services': services, 'users': listing}))
[ "def", "handle_get_account", "(", "self", ",", "req", ")", ":", "account", "=", "req", ".", "path_info_pop", "(", ")", "if", "req", ".", "path_info", "or", "not", "account", "or", "account", "[", "0", "]", "==", "'.'", ":", "return", "HTTPBadRequest", ...
49.758621
17.931034
def matlab_compatible(name): """ make a channel name compatible with Matlab variable naming Parameters ---------- name : str channel name Returns ------- compatible_name : str channel name compatible with Matlab """ compatible_name = [ch if ch in ALLOWED_MATLAB_CHARS else "_" for ch in name] compatible_name = "".join(compatible_name) if compatible_name[0] not in string.ascii_letters: compatible_name = "M_" + compatible_name # max variable name is 63 and 3 chars are reserved # for get_unique_name in case of multiple channel name occurence return compatible_name[:60]
[ "def", "matlab_compatible", "(", "name", ")", ":", "compatible_name", "=", "[", "ch", "if", "ch", "in", "ALLOWED_MATLAB_CHARS", "else", "\"_\"", "for", "ch", "in", "name", "]", "compatible_name", "=", "\"\"", ".", "join", "(", "compatible_name", ")", "if", ...
26.416667
22.708333
def read(self, length, skip=False): """Consumes the first ``length`` bytes from the accumulator.""" if length > self.__size: raise IndexError( 'Cannot pop %d bytes, %d bytes in buffer queue' % (length, self.__size)) self.position += length self.__size -= length segments = self.__segments offset = self.__offset data = self.__data_cls() while length > 0: segment = segments[0] segment_off = offset segment_len = len(segment) segment_rem = segment_len - segment_off segment_read_len = min(segment_rem, length) if segment_off == 0 and segment_read_len == segment_rem: # consume an entire segment if skip: segment_slice = self.__element_type() else: segment_slice = segment else: # Consume a part of the segment. if skip: segment_slice = self.__element_type() else: segment_slice = segment[segment_off:segment_off + segment_read_len] offset = 0 segment_off += segment_read_len if segment_off == segment_len: segments.popleft() self.__offset = 0 else: self.__offset = segment_off if length <= segment_rem and len(data) == 0: return segment_slice data.extend(segment_slice) length -= segment_read_len if self.is_unicode: return data.as_text() else: return data
[ "def", "read", "(", "self", ",", "length", ",", "skip", "=", "False", ")", ":", "if", "length", ">", "self", ".", "__size", ":", "raise", "IndexError", "(", "'Cannot pop %d bytes, %d bytes in buffer queue'", "%", "(", "length", ",", "self", ".", "__size", ...
36.021739
14.26087
def boolean(value): """ Configuration-friendly boolean type converter. Supports both boolean-valued and string-valued inputs (e.g. from env vars). """ if isinstance(value, bool): return value if value == "": return False return strtobool(value)
[ "def", "boolean", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "bool", ")", ":", "return", "value", "if", "value", "==", "\"\"", ":", "return", "False", "return", "strtobool", "(", "value", ")" ]
19.928571
22.357143
def _fetch_all(self): """ Internal helper that fetches the ring from Redis, including any inactive nodes/replicas. Returns a list of tuples (start, replica, heartbeat, expired), where * start: start of the range for which the replica is responsible * replica: name of the replica * heartbeat: unix time stamp of the last heartbeat * expired: boolean denoting whether this replica is inactive """ now = time.time() expiry_time = now - NODE_TIMEOUT data = self.conn.zrange(self.key, 0, -1, withscores=True) ring = [] for node_data, heartbeat in data: start, replica = node_data.split(':', 1) ring.append((int(start), replica, heartbeat, heartbeat < expiry_time)) ring = sorted(ring, key=operator.itemgetter(0)) return ring
[ "def", "_fetch_all", "(", "self", ")", ":", "now", "=", "time", ".", "time", "(", ")", "expiry_time", "=", "now", "-", "NODE_TIMEOUT", "data", "=", "self", ".", "conn", ".", "zrange", "(", "self", ".", "key", ",", "0", ",", "-", "1", ",", "withsc...
34.16
22.56
def milestones(self): '''Array of all milestones''' if self.cache['milestones']: return self.cache['milestones'] milestone_xml = self.bc.list_milestones(self.id) milestones = [] for node in ET.fromstring(milestone_xml).findall("milestone"): milestones.append(Milestone(node)) milestones.sort() milestones.reverse() self.cache['milestones'] = milestones return self.cache['milestones']
[ "def", "milestones", "(", "self", ")", ":", "if", "self", ".", "cache", "[", "'milestones'", "]", ":", "return", "self", ".", "cache", "[", "'milestones'", "]", "milestone_xml", "=", "self", ".", "bc", ".", "list_milestones", "(", "self", ".", "id", ")...
38.166667
16