code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def get_current_line(self): if not self.has_space(): return None pos = self.pos - self.col string = self.string end = self.length output = [] while pos < len(string) and string[pos] != '\n': output.append(string[pos]) pos += 1 if pos == end: break else: output.append(string[pos]) if not output: return None return SourceLine(''.join(output), self.row)
Return a SourceLine of the current line.
def get_json_files(files, recursive=False): json_files = [] if not files: return json_files for fn in files: if os.path.isdir(fn): children = list_json_files(fn, recursive) json_files.extend(children) elif is_json(fn): json_files.append(fn) else: continue if not json_files: raise NoJSONFileFoundError("No JSON files found!") return json_files
Return a list of files to validate from `files`. If a member of `files` is a directory, its children with a ``.json`` extension will be added to the return value. Args: files: A list of file paths and/or directory paths. recursive: If ``true``, this will descend into any subdirectories of input directories. Returns: A list of file paths to validate.
def new(self, log_block_size): if self._initialized: raise pycdlibexception.PyCdlibInternalError('This Version Volume Descriptor is already initialized') self._data = b'\x00' * log_block_size self._initialized = True
Create a new Version Volume Descriptor. Parameters: log_block_size - The size of one extent. Returns: Nothing.
def reset(self): "If your convolutional window is greater than 1 and you save previous xs, you must reset at the beginning of each new sequence." for layer in self.layers: layer.reset() if self.bidirectional: for layer in self.layers_bwd: layer.reset()
If your convolutional window is greater than 1 and you save previous xs, you must reset at the beginning of each new sequence.
def right(self, speed=1): self.left_motor.forward(speed) self.right_motor.backward(speed)
Make the robot turn right by running the left motor forward and right motor backward. :param float speed: Speed at which to drive the motors, as a value between 0 (stopped) and 1 (full speed). The default is 1.
def _GenerateCRCTable(): poly = 0xedb88320 table = [0] * 256 for i in range(256): crc = i for j in range(8): if crc & 1: crc = ((crc >> 1) & 0x7FFFFFFF) ^ poly else: crc = ((crc >> 1) & 0x7FFFFFFF) table[i] = crc return table
Generate a CRC-32 table. ZIP encryption uses the CRC32 one-byte primitive for scrambling some internal keys. We noticed that a direct implementation is faster than relying on binascii.crc32().
def fetch(self): params = values.of({}) payload = self._version.fetch( 'GET', self._uri, params=params, ) return TaskQueueInstance( self._version, payload, workspace_sid=self._solution['workspace_sid'], sid=self._solution['sid'], )
Fetch a TaskQueueInstance :returns: Fetched TaskQueueInstance :rtype: twilio.rest.taskrouter.v1.workspace.task_queue.TaskQueueInstance
def update_port(self, port_information, id_or_uri, timeout=-1): uri = self._client.build_uri(id_or_uri) + "/ports" return self._client.update(port_information, uri, timeout)
Updates an interconnect port. Args: id_or_uri: Can be either the interconnect id or the interconnect uri. port_information (dict): object to update timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView; it just stops waiting for its completion. Returns: dict: The interconnect.
def masktorgb(mask, color='lightgreen', alpha=1.0): mask = np.asarray(mask) if mask.ndim != 2: raise ValueError('ndim={0} is not supported'.format(mask.ndim)) ht, wd = mask.shape r, g, b = colors.lookup_color(color) rgbobj = RGBImage(data_np=np.zeros((ht, wd, 4), dtype=np.uint8)) rc = rgbobj.get_slice('R') gc = rgbobj.get_slice('G') bc = rgbobj.get_slice('B') ac = rgbobj.get_slice('A') ac[:] = 0 rc[mask] = int(r * 255) gc[mask] = int(g * 255) bc[mask] = int(b * 255) ac[mask] = int(alpha * 255) return rgbobj
Convert boolean mask to RGB image object for canvas overlay. Parameters ---------- mask : ndarray Boolean mask to overlay. 2D image only. color : str Color name accepted by Ginga. alpha : float Opacity. Unmasked data are always transparent. Returns ------- rgbobj : RGBImage RGB image for canvas Image object. Raises ------ ValueError Invalid mask dimension.
def do_genesis_block_audit(genesis_block_path=None, key_id=None): signing_keys = GENESIS_BLOCK_SIGNING_KEYS if genesis_block_path is not None: genesis_block_load(genesis_block_path) if key_id is not None: gpg2_path = find_gpg2() assert gpg2_path, 'You need to install gpg2' p = subprocess.Popen([gpg2_path, '-a', '--export', key_id], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode != 0: log.error('Failed to load key {}\n{}'.format(key_id, err)) return False signing_keys = { key_id: out.strip() } res = genesis_block_audit(get_genesis_block_stages(), key_bundle=signing_keys) if not res: log.error('Genesis block is NOT signed by {}'.format(', '.join(signing_keys.keys()))) return False return True
Loads and audits the genesis block, optionally using an alternative key
def save_pdf(self, save_model=True): from django_afip.views import ReceiptPDFView if not self.receipt.is_validated: raise exceptions.DjangoAfipException( _('Cannot generate pdf for non-authorized receipt') ) self.pdf_file = File(BytesIO(), name='{}.pdf'.format(uuid.uuid4().hex)) render_pdf( template='receipts/code_{}.html'.format( self.receipt.receipt_type.code, ), file_=self.pdf_file, context=ReceiptPDFView.get_context_for_pk(self.receipt_id), ) if save_model: self.save()
Save the receipt as a PDF related to this model. The related :class:`~.Receipt` should be validated first, of course. :param bool save_model: If True, immediately save this model instance.
def start(self, timeout=None): if self._handler_thread and self._handler_thread.isAlive(): raise RuntimeError('Message handler thread already started') self._server.start(timeout) self.ioloop = self._server.ioloop if self._handler_thread: self._handler_thread.set_ioloop(self.ioloop) self._handler_thread.start(timeout)
Start the server in a new thread. Parameters ---------- timeout : float or None, optional Time in seconds to wait for server thread to start.
def add_term_occurrence(self, term, document): if document not in self._documents: self._documents[document] = 0 if term not in self._terms: if self._freeze: return else: self._terms[term] = collections.Counter() if document not in self._terms[term]: self._terms[term][document] = 0 self._documents[document] += 1 self._terms[term][document] += 1
Adds an occurrence of the term in the specified document.
def change_score_for(self, member, delta, member_data=None): self.change_score_for_member_in(self.leaderboard_name, member, delta, member_data)
Change the score for a member in the leaderboard by a score delta which can be positive or negative. @param member [String] Member name. @param delta [float] Score change. @param member_data [String] Optional member data.
def setValues(self, values): if isinstance(values, (list, set)): if any(isinstance(value, basestring) for value in values): values = list(map(str, values)) self._impl.setValuesStr(values, len(values)) elif all(isinstance(value, Real) for value in values): values = list(map(float, values)) self._impl.setValuesDbl(values, len(values)) elif all(isinstance(value, tuple) for value in values): self._impl.setValues(Utils.toTupleArray(values), len(values)) else: raise TypeError else: if np is not None and isinstance(values, np.ndarray): self.setValues(DataFrame.fromNumpy(values).toList()) return Entity.setValues(self, values)
Set the tuples in this set. Valid only for non-indexed sets. Args: values: A list of tuples or a :class:`~amplpy.DataFrame`. In the case of a :class:`~amplpy.DataFrame`, the number of indexing columns of the must be equal to the arity of the set. In the case of a list of tuples, the arity of each tuple must be equal to the arity of the set. For example, considering the following AMPL entities and corresponding Python objects: .. code-block:: ampl set A := 1..2; param p{i in A} := i+10; set AA; The following is valid: .. code-block:: python A, AA = ampl.getSet('A'), ampl.getSet('AA') AA.setValues(A.getValues()) # AA has now the members {1, 2}
def shutdown(self): vm = self.get_vm_failfast(self.config['name']) if vm.runtime.powerState == vim.VirtualMachinePowerState.poweredOff: print("%s already poweredOff" % vm.name) else: if self.guestToolsRunning(vm): timeout_minutes = 10 print("waiting for %s to shutdown " "(%s minutes before forced powerOff)" % ( vm.name, str(timeout_minutes) )) vm.ShutdownGuest() if self.WaitForVirtualMachineShutdown(vm, timeout_minutes * 60): print("shutdown complete") print("%s poweredOff" % vm.name) else: print("%s has not shutdown after %s minutes:" "will powerOff" % (vm.name, str(timeout_minutes))) self.powerOff() else: print("GuestTools not running or not installed: will powerOff") self.powerOff()
Shutdown guest fallback to power off if guest tools aren't installed
def _x_get_physical_path(self): path = self.context.getPath() portal_path = api.get_path(api.get_portal()) if portal_path not in path: return "{}/{}".format(portal_path, path) return path
Generate the physical path
def get(self, run_id): id = self._parse_id(run_id) run = self.generic_dao.find_record(self.collection_name, {"_id": id}) if run is None: raise NotFoundError("Run %s not found." % run_id) return run
Get a single run from the database. :param run_id: The ID of the run. :return: The whole object from the database. :raise NotFoundError when not found
def _validate_date_like_dtype(dtype): try: typ = np.datetime_data(dtype)[0] except ValueError as e: raise TypeError('{error}'.format(error=e)) if typ != 'generic' and typ != 'ns': msg = '{name!r} is too specific of a frequency, try passing {type!r}' raise ValueError(msg.format(name=dtype.name, type=dtype.type.__name__))
Check whether the dtype is a date-like dtype. Raises an error if invalid. Parameters ---------- dtype : dtype, type The dtype to check. Raises ------ TypeError : The dtype could not be casted to a date-like dtype. ValueError : The dtype is an illegal date-like dtype (e.g. the the frequency provided is too specific)
def access_list(package): team, owner, pkg = parse_package(package) session = _get_session(team) lookup_url = "{url}/api/access/{owner}/{pkg}/".format(url=get_registry_url(team), owner=owner, pkg=pkg) response = session.get(lookup_url) data = response.json() users = data['users'] print('\n'.join(users))
Print list of users who can access a package.
def translations(self, lang): key = self._get_translations_cache_key(lang) trans_dict = cache.get(key, {}) if self.translatable_slug is not None: if self.translatable_slug not in self.translatable_fields: self.translatable_fields = self.translatable_fields + (self.translatable_slug,) if not trans_dict: for field in self.translatable_fields: trans_dict[field] = self.get_translation(lang, field) cache.set(key, trans_dict) return trans_dict
Return the list of translation strings of a Translatable instance in a dictionary form @type lang: string @param lang: a string with the name of the language @rtype: python Dictionary @return: Returns a all fieldname / translations (key / value)
def _parse_error_tree(error): errinf = ErrorInfo(error.get('id'), None) if error.text is not None: errinf.message = error.text else: desc = error.find('./desc') if desc is not None: errinf.message = desc.text return errinf
Parse an error ElementTree Node to create an ErrorInfo object :param error: The ElementTree error node :return: An ErrorInfo object containing the error ID and the message.
def commit_config(self): if self.loaded: if self.ssh_connection is False: self._open_ssh() try: self.ssh_device.commit() time.sleep(3) self.loaded = False self.changed = True except: if self.merge_config: raise MergeConfigException('Error while commiting config') else: raise ReplaceConfigException('Error while commiting config') else: raise ReplaceConfigException('No config loaded.')
Netmiko is being used to commit the configuration because it takes a better care of results compared to pan-python.
def _handle_result(self): result = self.inbox.get() if result.success: if self._verbosity >= VERB_PROGRESS: sys.stderr.write("\nuploaded chunk {} \n".format(result.index)) self.results.append((result.index, result.md5)) self._pending_chunks -= 1 else: raise result.traceback
Process one result. Block untill one is available
def list(self, count=10): import IPython data = [] for _, model in zip(range(count), self.get_iterator()): element = {'name': model['name']} if 'defaultVersion' in model: version_short_name = model['defaultVersion']['name'].split('/')[-1] element['defaultVersion'] = version_short_name data.append(element) IPython.display.display( datalab.utils.commands.render_dictionary(data, ['name', 'defaultVersion']))
List models under the current project in a table view. Args: count: upper limit of the number of models to list. Raises: Exception if it is called in a non-IPython environment.
def require(predicate): def decorator(method): @functools.wraps(method) def wrapper(*args, **kwargs): if predicate(): return method(*args, **kwargs) return None return wrapper return decorator
Decorator factory for methods requiring a predicate. If the predicate is not fulfilled during a method call, the method call is skipped and None is returned. :param predicate: A callable returning a truth value :returns: Method decorator .. seealso:: :py:class:`internet`
def set_nodelay(self, value: bool) -> None: assert self.ws_connection is not None self.ws_connection.set_nodelay(value)
Set the no-delay flag for this stream. By default, small messages may be delayed and/or combined to minimize the number of packets sent. This can sometimes cause 200-500ms delays due to the interaction between Nagle's algorithm and TCP delayed ACKs. To reduce this delay (at the expense of possibly increasing bandwidth usage), call ``self.set_nodelay(True)`` once the websocket connection is established. See `.BaseIOStream.set_nodelay` for additional details. .. versionadded:: 3.1
def _GetDictFromStringsTable(self, parser_mediator, table): if not table: return {} record_values = {} for record in table.records: if parser_mediator.abort: break if record.get_number_of_values() != 2: continue identification = self._GetRecordValue(record, 0) filename = self._GetRecordValue(record, 1) if not identification: continue record_values[identification] = filename return record_values
Build a dictionary of the value in the strings table. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. table (pyesedb.table): strings table. Returns: dict[str,object]: values per column name.
def Wp(self): Wp = trapz_loglog(self._Ep * self._J, self._Ep) * u.GeV return Wp.to("erg")
Total energy in protons
def check_qt(): qt_infos = dict(pyqt5=("PyQt5", "5.6")) try: import qtpy package_name, required_ver = qt_infos[qtpy.API] actual_ver = qtpy.PYQT_VERSION if LooseVersion(actual_ver) < LooseVersion(required_ver): show_warning("Please check Spyder installation requirements:\n" "%s %s+ is required (found v%s)." % (package_name, required_ver, actual_ver)) except ImportError: show_warning("Failed to import qtpy.\n" "Please check Spyder installation requirements:\n\n" "qtpy 1.2.0+ and\n" "%s %s+\n\n" "are required to run Spyder." % (qt_infos['pyqt5']))
Check Qt binding requirements
def make_as(self, klass, name, **attributes): return self.of(klass, name).make(**attributes)
Create an instance of the given model and type. :param klass: The class :type klass: class :param name: The type :type name: str :param attributes: The instance attributes :type attributes: dict :return: mixed
def fetchChildren(self): assert self._canFetchChildren, "canFetchChildren must be True" try: childItems = self._fetchAllChildren() finally: self._canFetchChildren = False return childItems
Fetches children. The actual work is done by _fetchAllChildren. Descendant classes should typically override that method instead of this one.
def lock(self, back=None, remote=None): back = self.backends(back) locked = [] errors = [] for fsb in back: fstr = '{0}.lock'.format(fsb) if fstr in self.servers: msg = 'Setting update lock for {0} remotes'.format(fsb) if remote: if not isinstance(remote, six.string_types): errors.append( 'Badly formatted remote pattern \'{0}\'' .format(remote) ) continue else: msg += ' matching {0}'.format(remote) log.debug(msg) good, bad = self.servers[fstr](remote=remote) locked.extend(good) errors.extend(bad) return locked, errors
``remote`` can either be a dictionary containing repo configuration information, or a pattern. If the latter, then remotes for which the URL matches the pattern will be locked.
def count_by_tag(stack, descriptor): ec2_conn = boto.ec2.connection.EC2Connection() resses = ec2_conn.get_all_instances( filters={ 'tag:stack': stack, 'tag:descriptor': descriptor }) instance_list_raw = list() [[instance_list_raw.append(x) for x in res.instances] for res in resses] instance_list = [x for x in instance_list_raw if state_filter(x)] instances = len(instance_list) return instances
Returns the count of currently running or pending instances that match the given stack and deployer combo
def clear(self): self.root = None for leaf in self.leaves: leaf.p, leaf.sib, leaf.side = (None, ) * 3
Clears the Merkle Tree by releasing the Merkle root and each leaf's references, the rest should be garbage collected. This may be useful for situations where you want to take an existing tree, make changes to the leaves, but leave it uncalculated for some time, without node references that are no longer correct still hanging around. Usually it is better just to make a new tree.
def template(self): r = fapi.get_config_template(self.namespace, self.name, self.snapshot_id, self.api_url) fapi._check_response_code(r, 200) return r.json()
Return a method template for this method.
def cmd_serve(self, *args): try: from http.server import SimpleHTTPRequestHandler from socketserver import TCPServer except ImportError: from SimpleHTTPServer import SimpleHTTPRequestHandler from SocketServer import TCPServer os.chdir(self.bin_dir) handler = SimpleHTTPRequestHandler httpd = TCPServer(("", SIMPLE_HTTP_SERVER_PORT), handler) print("Serving via HTTP at port {}".format(SIMPLE_HTTP_SERVER_PORT)) print("Press Ctrl+c to quit serving.") httpd.serve_forever()
Serve the bin directory via SimpleHTTPServer
def build_arch(self, arch): self.ctx.hostpython = '/usr/bin/false' sub_build_dir = join(self.get_build_dir(), 'build') shprint(sh.mkdir, '-p', sub_build_dir) python3crystax = self.get_recipe('python3crystax', self.ctx) system_python = sh.which("python" + python3crystax.version) if system_python is None: raise OSError( ('Trying to use python3crystax=={} but this Python version ' 'is not installed locally.').format(python3crystax.version)) link_dest = join(self.get_build_dir(), 'hostpython') shprint(sh.ln, '-sf', system_python, link_dest)
Creates expected build and symlinks system Python version.
def ResolveClientFlowURN(self, client_id, token=None): if not self._value: raise ValueError("Can't call ResolveClientFlowURN on an empty client id.") components = self.Split() if len(components) == 1: return self._FlowIdToUrn(self._value, client_id) else: root_urn = self._FlowIdToUrn(components[0], client_id) try: flow_symlink = aff4.FACTORY.Open( root_urn, aff4_type=aff4.AFF4Symlink, follow_symlinks=False, token=token) return flow_symlink.Get(flow_symlink.Schema.SYMLINK_TARGET).Add( "/".join(components[1:])) except aff4.InstantiationError: return self._FlowIdToUrn(self._value, client_id)
Resolve a URN of a flow with this id belonging to a given client. Note that this may need a roundtrip to the datastore. Resolving algorithm is the following: 1. If the flow id doesn't contain slashes (flow is not nested), we just append it to the <client id>/flows. 2. If the flow id has slashes (flow is nested), we check if the root flow pointed to by <client id>/flows/<flow id> is a symlink. 2a. If it's a symlink, we append the rest of the flow id to the symlink target. 2b. If it's not a symlink, we just append the whole id to <client id>/flows (meaning we do the same as in 1). Args: client_id: Id of a client where this flow is supposed to be found on. token: Credentials token. Returns: RDFURN pointing to a flow identified by this flow id and client id. Raises: ValueError: if this flow id is not initialized.
def extract_consensus_op(self, opcode, op_data, processed_op_data, current_block_number): ret = {} consensus_fields = op_get_consensus_fields(opcode) quirk_fields = op_get_quirk_fields(opcode) for field in consensus_fields + quirk_fields: try: assert field in processed_op_data, 'Missing consensus field "{}"'.format(field) except Exception as e: log.exception(e) log.error("FATAL: BUG: missing consensus field {}".format(field)) log.error("op_data:\n{}".format(json.dumps(op_data, indent=4, sort_keys=True))) log.error("processed_op_data:\n{}".format(json.dumps(op_data, indent=4, sort_keys=True))) os.abort() ret[field] = processed_op_data[field] return ret
Using the operation data extracted from parsing the virtualchain operation (@op_data), and the checked, processed operation (@processed_op_data), return a dict that contains (1) all of the consensus fields to snapshot this operation, and (2) all of the data fields that we need to store for the name record (i.e. quirk fields)
def all_control_flow_elements_count(bpmn_graph): gateway_counts = get_gateway_counts(bpmn_graph) events_counts = get_events_counts(bpmn_graph) control_flow_elements_counts = gateway_counts.copy() control_flow_elements_counts.update(events_counts) return sum([ count for name, count in control_flow_elements_counts.items() ])
Returns the total count of all control flow elements in the BPMNDiagramGraph instance. :param bpmn_graph: an instance of BpmnDiagramGraph representing BPMN model. :return: total count of the control flow elements in the BPMNDiagramGraph instance
def prefix(*kinds): def wrap(fn): try: fn.prefix_kinds.extend(kinds) except AttributeError: fn.prefix_kinds = list(kinds) return fn return wrap
Decorate a method as handling prefix tokens of the given kinds
def get_filter_value(self, column_name): for flt, value in zip(self.filters, self.values): if flt.column_name == column_name: return value
Returns the filtered value for a certain column :param column_name: The name of the column that we want the value from :return: the filter value of the column
def can_use_cached_output(self, contentitem): plugin = contentitem.plugin return appsettings.FLUENT_CONTENTS_CACHE_OUTPUT and plugin.cache_output and contentitem.pk
Tell whether the code should try reading cached output
def get_camera_info(self, camera_id): response = api.request_camera_info(self.blink, self.network_id, camera_id) try: return response['camera'][0] except (TypeError, KeyError): _LOGGER.error("Could not extract camera info: %s", response, exc_info=True) return []
Retrieve camera information.
def _detect(self): results = [] for c in self.contracts: functions = self.detect_suicidal(c) for func in functions: txt = "{}.{} ({}) allows anyone to destruct the contract\n" info = txt.format(func.contract.name, func.name, func.source_mapping_str) json = self.generate_json_result(info) self.add_function_to_json(func, json) results.append(json) return results
Detect the suicidal functions
def _base_type(self): type_class = self._dimension_dict["type"]["class"] if type_class == "categorical": return "categorical" if type_class == "enum": subclass = self._dimension_dict["type"]["subtype"]["class"] return "enum.%s" % subclass raise NotImplementedError("unexpected dimension type class '%s'" % type_class)
Return str like 'enum.numeric' representing dimension type. This string is a 'type.subclass' concatenation of the str keys used to identify the dimension type in the cube response JSON. The '.subclass' suffix only appears where a subtype is present.
def collection_get_options(collection_name, **kwargs): cluster = cluster_status(**kwargs) options = { "collection.configName": cluster["collections"][collection_name]["configName"], "router.name": cluster["collections"][collection_name]["router"]["name"], "replicationFactor": int(cluster["collections"][collection_name]["replicationFactor"]), "maxShardsPerNode": int(cluster["collections"][collection_name]["maxShardsPerNode"]), "autoAddReplicas": cluster["collections"][collection_name]["autoAddReplicas"] is True } if 'rule' in cluster["collections"][collection_name]: options['rule'] = cluster["collections"][collection_name]['rule'] if 'snitch' in cluster["collections"][collection_name]: options['snitch'] = cluster["collections"][collection_name]['rule'] return options
Get collection options Additional parameters (kwargs) may be passed, they will be proxied to http.query CLI Example: .. code-block:: bash salt '*' solrcloud.collection_get_options collection_name
def read(fname): file_path = os.path.join(SETUP_DIRNAME, fname) with codecs.open(file_path, encoding='utf-8') as rfh: return rfh.read()
Read a file from the directory where setup.py resides
def retry_on_ec2_error(self, func, *args, **kwargs): exception_retry_count = 6 while True: try: return func(*args, **kwargs) except (boto.exception.EC2ResponseError, ssl.SSLError) as msg: exception_retry_count -= 1 if exception_retry_count <= 0: raise msg time.sleep(5)
Call the given method with the given arguments, retrying if the call failed due to an EC2ResponseError. This method will wait at most 30 seconds and perform up to 6 retries. If the method still fails, it will propagate the error. :param func: Function to call :type func: function
def ext_pillar(minion_id, pillar, conf): vs = varstack.Varstack(config_filename=conf) return vs.evaluate(__grains__)
Parse varstack data and return the result
def view_modifier(parser, token): try: tag_name, view_modifier = token.split_contents() except ValueError: raise template.TemplateSyntaxError('view_modifier tag requires 1 argument (view_modifier), %s given' % (len(token.split_contents()) - 1)) return ViewModifierNode(view_modifier)
Output view modifier.
def default_values_of(func): signature = inspect.signature(func) return [k for k, v in signature.parameters.items() if v.default is not inspect.Parameter.empty or v.kind != inspect.Parameter.POSITIONAL_OR_KEYWORD]
Return the defaults of the function `func`.
def sign(self, request, authheaders, response_body, secret): if "nonce" not in authheaders or authheaders["nonce"] == '': raise KeyError("nonce required in authorization headers.") if request.get_header('x-authorization-timestamp') == '': raise KeyError("X-Authorization-Timestamp is required.") try: mac = hmac.HMAC(base64.b64decode(secret.encode('utf-8'), validate=True), digestmod=self.digest) except TypeError: s = secret.encode('utf-8') if not re.match(b'^[A-Za-z0-9+/]*={0,2}$', s): raise binascii.Error('Non-base64 digit found') mac = hmac.HMAC(base64.b64decode(s), digestmod=self.digest) mac.update(self.signable(request, authheaders, response_body).encode('utf-8')) digest = mac.digest() return base64.b64encode(digest).decode('utf-8')
Returns the response signature for the response to the request. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. response_body -- A string or bytes-like object which represents the body of the response. secret -- The base64-encoded secret key for the HMAC authorization.
def zscore(arr): arr = arr - np.mean(arr) std = np.std(arr) if std != 0: arr /= std return arr
Return arr normalized with mean 0 and unit variance. If the input has 0 variance, the result will also have 0 variance. Parameters ---------- arr : array-like Returns ------- zscore : array-like Examples -------- Compute the z score for a small array: >>> result = zscore([1, 0]) >>> result array([ 1., -1.]) >>> np.mean(result) 0.0 >>> np.std(result) 1.0 Does not re-scale in case the input is constant (has 0 variance): >>> zscore([1, 1]) array([ 0., 0.])
def main(): import sys from bokeh.command.bootstrap import main as _main _main(sys.argv)
Execute the "bokeh" command line program.
def toil_get_file(file_store, index, existing, file_store_id): if not file_store_id.startswith("toilfs:"): return file_store.jobStore.getPublicUrl(file_store.jobStore.importFile(file_store_id)) src_path = file_store.readGlobalFile(file_store_id[7:]) index[src_path] = file_store_id existing[file_store_id] = src_path return schema_salad.ref_resolver.file_uri(src_path)
Get path to input file from Toil jobstore.
def configure_root(self, config, incremental=False): root = logging.getLogger() self.common_logger_config(root, config, incremental)
Configure a root logger from a dictionary.
def return_obj(cols, df, return_cols=False): df_holder = DataFrameHolder(cols=cols, df=df) return df_holder.return_self(return_cols=return_cols)
Construct a DataFrameHolder and then return either that or the DataFrame.
def solve(self): start = time.time() root_dependency = Dependency(self._root.name, self._root.version) root_dependency.is_root = True self._add_incompatibility( Incompatibility([Term(root_dependency, False)], RootCause()) ) try: next = self._root.name while next is not None: self._propagate(next) next = self._choose_package_version() return self._result() except Exception: raise finally: self._log( "Version solving took {:.3f} seconds.\n" "Tried {} solutions.".format( time.time() - start, self._solution.attempted_solutions ) )
Finds a set of dependencies that match the root package's constraints, or raises an error if no such set is available.
def add_module(self, module): for key, value in module.__dict__.iteritems(): if key[0:2] != '__': self.__setattr__(attr=key, value=value)
Adds configuration parameters from a Python module.
def get_create_security_group_commands(self, sg_id, sg_rules): cmds = [] in_rules, eg_rules = self._format_rules_for_eos(sg_rules) cmds.append("ip access-list %s dynamic" % self._acl_name(sg_id, n_const.INGRESS_DIRECTION)) for in_rule in in_rules: cmds.append(in_rule) cmds.append("exit") cmds.append("ip access-list %s dynamic" % self._acl_name(sg_id, n_const.EGRESS_DIRECTION)) for eg_rule in eg_rules: cmds.append(eg_rule) cmds.append("exit") return cmds
Commands for creating ACL
def image_info(call=None, kwargs=None): if call != 'function': raise SaltCloudSystemExit( 'The image_info function must be called with -f or --function.' ) if kwargs is None: kwargs = {} name = kwargs.get('name', None) image_id = kwargs.get('image_id', None) if image_id: if name: log.warning( 'Both the \'image_id\' and \'name\' arguments were provided. ' '\'image_id\' will take precedence.' ) elif name: image_id = get_image_id(kwargs={'name': name}) else: raise SaltCloudSystemExit( 'The image_info function requires either a \'name or an \'image_id\' ' 'to be provided.' ) server, user, password = _get_xml_rpc() auth = ':'.join([user, password]) info = {} response = server.one.image.info(auth, int(image_id))[1] tree = _get_xml(response) info[tree.find('NAME').text] = _xml_to_dict(tree) return info
Retrieves information for a given image. Either a name or an image_id must be supplied. .. versionadded:: 2016.3.0 name The name of the image for which to gather information. Can be used instead of ``image_id``. image_id The ID of the image for which to gather information. Can be used instead of ``name``. CLI Example: .. code-block:: bash salt-cloud -f image_info opennebula name=my-image salt-cloud --function image_info opennebula image_id=5
def generate_anomaly(self, input_word, list_of_dict_words, num): results = [] for i in range(0,num): index = randint(0,len(list_of_dict_words)-1) name = list_of_dict_words[index] if name != input_word and name not in results: results.append(PataLib().strip_underscore(name)) else: i = i +1 results = {'input' : input_word, 'results' : results, 'category' : 'anomaly'} return results
Generate an anomaly. This is done via a Psuedo-random number generator.
def addUrlScheme(self, url): if not isinstance(url, str): raise TypeError('url must be a string value') if not url in self._urlSchemes: self._urlSchemes[url] = OEmbedUrlScheme(url)
Add a url scheme to this endpoint. It takes a url string and create the OEmbedUrlScheme object internally. Args: url: The url string that represents a url scheme to add.
def add(self, source_id, auth, validate=True): params = {'id': source_id, 'auth': auth, 'validate': validate} return self.request.post('add', params)
Add one or more sets of authorization credentials to a Managed Source Uses API documented at http://dev.datasift.com/docs/api/rest-api/endpoints/sourceauthadd :param source_id: target Source ID :type source_id: str :param auth: An array of the source-specific authorization credential sets that you're adding. :type auth: array of strings :param validate: Allows you to suppress the validation of the authorization credentials, defaults to true. :type validate: bool :return: dict of REST API output with headers attached :rtype: :class:`~datasift.request.DictResponse` :raises: :class:`~datasift.exceptions.DataSiftApiException`, :class:`requests.exceptions.HTTPError`
def doSolve(fitsfn: Path, args: str=None): opts = args.split(' ') if args else [] cmd = ['solve-field', '--overwrite', str(fitsfn)] cmd += opts print('\n', ' '.join(cmd), '\n') ret = subprocess.check_output(cmd, universal_newlines=True) print(ret) if 'Did not solve' in ret: raise RuntimeError(f'could not solve {fitsfn}') print('\n\n *** done with astrometry.net ***\n ')
Astrometry.net from at least version 0.67 is OK with Python 3.
def top_charts(self): response = self._call(mc_calls.BrowseTopChart) top_charts = response.body return top_charts
Get a listing of the default top charts.
def get_metadata(self, lcid): if self._metadata is None: self._metadata = fetch_rrlyrae_lc_params() i = np.where(self._metadata['id'] == lcid)[0] if len(i) == 0: raise ValueError("invalid lcid: {0}".format(lcid)) return self._metadata[i[0]]
Get the parameters derived from the fit for the given id. This is table 2 of Sesar 2010
def _validate_num_units(num_units, service_name, add_error): if num_units is None: return 0 try: num_units = int(num_units) except (TypeError, ValueError): add_error( 'num_units for service {} must be a digit'.format(service_name)) return if num_units < 0: add_error( 'num_units {} for service {} must be a positive digit' ''.format(num_units, service_name)) return return num_units
Check that the given num_units is valid. Use the given service name to describe possible errors. Use the given add_error callable to register validation error. If no errors are encountered, return the number of units as an integer. Return None otherwise.
def top_parent(self): parent = self.parent while parent is not None: if parent.parent is None: return parent else: parent = parent.parent return self
Reference to top parent declaration. @type: declaration_t
def controller(url_prefix_or_controller_cls: Union[str, Type[Controller]], controller_cls: Optional[Type[Controller]] = None, *, rules: Optional[Iterable[Union[Route, RouteGenerator]]] = None, ) -> RouteGenerator: url_prefix, controller_cls = _normalize_args( url_prefix_or_controller_cls, controller_cls, _is_controller_cls) url_prefix = url_prefix or controller_cls.Meta.url_prefix routes = [] controller_routes = getattr(controller_cls, CONTROLLER_ROUTES_ATTR) if rules is None: routes = controller_routes.values() else: for route in _reduce_routes(rules): existing = controller_routes.get(route.method_name) if existing: routes.append(_inherit_route_options(route, existing[0])) else: routes.append(route) yield from _normalize_controller_routes(routes, controller_cls, url_prefix=url_prefix)
This function is used to register a controller class's routes. Example usage:: routes = lambda: [ controller(SiteController), ] Or with the optional prefix argument:: routes = lambda: [ controller('/products', ProductController), ] Specify ``rules`` to only include those routes from the controller:: routes = lambda: [ controller(SecurityController, rules=[ rule('/login', SecurityController.login), rule('/logout', SecurityController.logout), rule('/sign-up', SecurityController.register), ]), ] :param url_prefix_or_controller_cls: The controller class, or a url prefix for all of the rules from the controller class passed as the second argument :param controller_cls: If a url prefix was given as the first argument, then the controller class must be passed as the second argument :param rules: An optional list of rules to limit/customize the routes included from the controller
def makePublicDir(dirName): if not os.path.exists(dirName): os.mkdir(dirName) os.chmod(dirName, 0o777) return dirName
Makes a given subdirectory if it doesn't already exist, making sure it is public.
def extract_aiml(path='aiml-en-us-foundation-alice.v1-9'): path = find_data_path(path) or path if os.path.isdir(path): paths = os.listdir(path) paths = [os.path.join(path, p) for p in paths] else: zf = zipfile.ZipFile(path) paths = [] for name in zf.namelist(): if '.hg/' in name: continue paths.append(zf.extract(name, path=BIGDATA_PATH)) return paths
Extract an aiml.zip file if it hasn't been already and return a list of aiml file paths
def export(user, directory=None, warnings=True): current_file = os.path.realpath(__file__) current_path = os.path.dirname(current_file) dashboard_path = os.path.join(current_path, 'dashboard_src') if directory: dirpath = directory else: dirpath = tempfile.mkdtemp() copy_tree(dashboard_path + '/public', dirpath, update=1) data = user_data(user) bc.io.to_json(data, dirpath + '/data/bc_export.json', warnings=False) if warnings: print("Successfully exported the visualization to %s" % dirpath) return dirpath
Build a temporary directory with the visualization. Returns the local path where files have been written. Examples -------- >>> bandicoot.visualization.export(U) Successfully exported the visualization to /tmp/tmpsIyncS
def history(ctx, archive_name): _generate_api(ctx) var = ctx.obj.api.get_archive(archive_name) click.echo(pprint.pformat(var.get_history()))
Get archive history
def bids_to_you(self): headers = {"Content-type": "application/x-www-form-urlencoded","Accept": "text/plain",'Referer': 'http://'+self.domain+'/team_news.phtml',"User-Agent": user_agent} req = self.session.get('http://'+self.domain+'/exchangemarket.phtml?viewoffers_x=',headers=headers).content soup = BeautifulSoup(req) table = [] for i in soup.find('table',{'class','tablecontent03'}).find_all('tr')[1:]: player,owner,team,price,bid_date,trans_date,status = self._parse_bid_table(i) table.append([player,owner,team,price,bid_date,trans_date,status]) return table
Get bids made to you @return: [[player,owner,team,money,date,datechange,status],]
def bits_to_dict(bits): cleaned_bits = [bit[:-1] if bit.endswith(',') else bit for bit in bits] options = dict(bit.split('=') for bit in cleaned_bits) for key in options: if options[key] == "'true'" or options[key] == "'false'": options[key] = options[key].title() options[key] = ast.literal_eval(options[key]) return options
Convert a Django template tag's kwargs into a dictionary of Python types. The only necessary types are number, boolean, list, and string. http://pygments.org/docs/formatters/#HtmlFormatter from: ["style='monokai'", "cssclass='cssclass',", "boolean='true',", 'num=0,', "list='[]'"] to: {'style': 'monokai', 'cssclass': 'cssclass', 'boolean': True, 'num': 0, 'list': [],}
def tco_return_handle(tokens): internal_assert(len(tokens) == 2, "invalid tail-call-optimizable return statement tokens", tokens) if tokens[1].startswith("()"): return "return _coconut_tail_call(" + tokens[0] + ")" + tokens[1][2:] else: return "return _coconut_tail_call(" + tokens[0] + ", " + tokens[1][1:]
Process tail-call-optimizable return statements.
def get_class(class_string): split_string = class_string.encode('ascii').split('.') import_path = '.'.join(split_string[:-1]) class_name = split_string[-1] if class_name: try: if import_path: mod = __import__(import_path, globals(), {}, [class_name]) cls = getattr(mod, class_name) else: cls = __import__(class_name, globals(), {}) if cls: return cls except (ImportError, AttributeError): pass return None
Get a class from a dotted string
def reply(self, messageId, *messageParts): routingInfo = self._routingInfo.pop(messageId) self.send(routingInfo + [messageId, b''] + list(messageParts))
Send reply to request with specified ``messageId``. :param messageId: message uuid :type messageId: str :param messageParts: message data :type messageParts: list
def _getFieldsInDB(self, tablename): SQL = 'SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.Columns where TABLE_NAME="%s"' % tablename array_data = self.execQuery(SQL) return [x[0] for x in array_data]
get all the fields from a specific table
def open_assignments(self): qs = Assignment.objects.filter(hard_deadline__gt=timezone.now( )) | Assignment.objects.filter(hard_deadline__isnull=True) if not self.can_see_future(): qs = qs.filter(publish_at__lt=timezone.now()) qs = qs.filter(course__in=self.user_courses()) qs = qs.order_by('soft_deadline', '-gradingScheme', 'title') waiting_for_action = [subm.assignment for subm in self.user.authored.all( ).exclude(state=Submission.WITHDRAWN)] qs_without_soft_deadline = qs.filter(soft_deadline__isnull=True) qs_with_soft_deadline = qs.filter(soft_deadline__isnull=False) ass_list = [ ass for ass in qs_without_soft_deadline if ass not in waiting_for_action] ass_list += [ ass for ass in qs_with_soft_deadline if ass not in waiting_for_action] return ass_list
Returns the list of open assignments from the viewpoint of this user.
def load_config(cls, opts, path=None, profile=None): if path and os.path.exists(path): if os.path.isdir(path): cls.config_searchpath.insert(0, path) else: cls.config_files.insert(0, path) config = cls.read_config() values = config.get("default", {}) cls._load_values_into_opts(opts, values) if profile and profile != "default": values = config.get("profile:%s" % profile, {}) cls._load_values_into_opts(opts, values) return values
Load a configuration file into an options object.
def prep_image(image, tile_size): w, h = image.size x_tiles = w / tile_size y_tiles = h / tile_size new_w = x_tiles * tile_size new_h = y_tiles * tile_size if new_w == w and new_h == h: return image else: crop_bounds = (0, 0, new_w, new_h) return image.crop(crop_bounds)
Takes an image and a tile size and returns a possibly cropped version of the image that is evenly divisible in both dimensions by the tile size.
def stop(self): if self._outstanding: _LOGGER.warning('There were %d outstanding requests', len(self._outstanding)) self._initial_message_sent = False self._outstanding = {} self._one_shots = {} self.connection.close()
Disconnect from device.
def queryset(self, request, queryset): form = self.get_form(request) self.form = form start_date = form.start_date() end_date = form.end_date() if form.is_valid() and (start_date or end_date): args = self.__get_filterargs( start=start_date, end=end_date, ) return queryset.filter(**args)
That's the trick - we create self.form when django tries to get our queryset. This allows to create unbount and bound form in the single place.
def _nonzero_intersection(m, m_hat): n_features, _ = m.shape m_no_diag = m.copy() m_no_diag[np.diag_indices(n_features)] = 0 m_hat_no_diag = m_hat.copy() m_hat_no_diag[np.diag_indices(n_features)] = 0 m_hat_nnz = len(np.nonzero(m_hat_no_diag.flat)[0]) m_nnz = len(np.nonzero(m_no_diag.flat)[0]) intersection_nnz = len( np.intersect1d(np.nonzero(m_no_diag.flat)[0], np.nonzero(m_hat_no_diag.flat)[0]) ) return m_nnz, m_hat_nnz, intersection_nnz
Count the number of nonzeros in and between m and m_hat. Returns ---------- m_nnz : number of nonzeros in m (w/o diagonal) m_hat_nnz : number of nonzeros in m_hat (w/o diagonal) intersection_nnz : number of nonzeros in intersection of m/m_hat (w/o diagonal)
def grad_dot(dy, x1, x2): if len(numpy.shape(x1)) == 1: dy = numpy.atleast_2d(dy) elif len(numpy.shape(x2)) == 1: dy = numpy.transpose(numpy.atleast_2d(dy)) x2 = numpy.transpose(numpy.atleast_2d(x2)) x2_t = numpy.transpose(numpy.atleast_2d( numpy.sum(x2, axis=tuple(numpy.arange(numpy.ndim(x2) - 2))))) dy_x2 = numpy.sum(dy, axis=tuple(-numpy.arange(numpy.ndim(x2) - 2) - 2)) return numpy.reshape(numpy.dot(dy_x2, x2_t), numpy.shape(x1))
Gradient of NumPy dot product w.r.t. to the left hand side. Args: dy: The gradient with respect to the output. x1: The left hand side of the `numpy.dot` function. x2: The right hand side Returns: The gradient with respect to `x1` i.e. `x2.dot(dy.T)` with all the broadcasting involved.
def get_staged_files(): proc = subprocess.Popen(('git', 'status', '--porcelain'), stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, _ = proc.communicate() staged_files = modified_re.findall(out) return staged_files
Get all files staged for the current commit.
def save_headers(cls, filename: str, response: HTTPResponse): new_filename = filename + '-new' with open('wb') as new_file: new_file.write(response.header()) with wpull.util.reset_file_offset(response.body): response.body.seek(0) shutil.copyfileobj(response.body, new_file) os.remove(filename) os.rename(new_filename, filename)
Prepend the HTTP response header to the file. Args: filename: The path of the file response: Response
def get_cash_asset_class(self) -> AssetClass: for ac in self.asset_classes: if ac.name.lower() == "cash": return ac return None
Find the cash asset class by name.
def run( target, target_type, tags=None, ruleset_name=None, ruleset_file=None, ruleset=None, logging_level=logging.WARNING, checks_paths=None, pull=None, insecure=False, skips=None, timeout=None, ): _set_logging(level=logging_level) logger.debug("Checking started.") target = Target.get_instance( target=target, logging_level=logging_level, pull=pull, target_type=target_type, insecure=insecure, ) checks_to_run = _get_checks( target_type=target.__class__, tags=tags, ruleset_name=ruleset_name, ruleset_file=ruleset_file, ruleset=ruleset, checks_paths=checks_paths, skips=skips, ) result = go_through_checks(target=target, checks=checks_to_run, timeout=timeout) return result
Runs the sanity checks for the target. :param timeout: timeout per-check (in seconds) :param skips: name of checks to skip :param target: str (image name, ostree or dockertar) or ImageTarget or path/file-like object for dockerfile :param target_type: string, either image, dockerfile, dockertar :param tags: list of str (if not None, the checks will be filtered by tags.) :param ruleset_name: str (e.g. fedora; if None, default would be used) :param ruleset_file: fileobj instance holding ruleset configuration :param ruleset: dict, content of a ruleset file :param logging_level: logging level (default logging.WARNING) :param checks_paths: list of str, directories where the checks are present :param pull: bool, pull the image from registry :param insecure: bool, pull from an insecure registry (HTTP/invalid TLS) :return: Results instance
def from_request(request=None) -> dict: request = request if request else flask_request try: json_args = request.get_json(silent=True) except Exception: json_args = None try: get_args = request.values except Exception: get_args = None arg_sources = list(filter( lambda arg: arg is not None, [json_args, get_args, {}] )) return arg_sources[0]
Fetches the arguments for the current Flask application request
def preview(ident): source = get_source(ident) cls = backends.get(current_app, source.backend) max_items = current_app.config['HARVEST_PREVIEW_MAX_ITEMS'] backend = cls(source, dryrun=True, max_items=max_items) return backend.harvest()
Preview an harvesting for a given source
def step(self, step_size: Timedelta=None): old_step_size = self.clock.step_size if step_size is not None: if not isinstance(step_size, type(self.clock.step_size)): raise ValueError(f"Provided time must be an instance of {type(self.clock.step_size)}") self.clock._step_size = step_size super().step() self.clock._step_size = old_step_size
Advance the simulation one step. Parameters ---------- step_size An optional size of step to take. Must be the same type as the simulation clock's step size (usually a pandas.Timedelta).
def are_none(sequences: Sequence[Sized]) -> bool: if not sequences: return True return all(s is None for s in sequences)
Returns True if all sequences are None.
def save_state(self): set_setting('lastSourceDir', self.source_directory.text()) set_setting('lastOutputDir', self.output_directory.text()) set_setting( 'useDefaultOutputDir', self.scenario_directory_radio.isChecked())
Save current state of GUI to configuration file.
def derive_value(self, value): return IonEvent( self.event_type, self.ion_type, value, self.field_name, self.annotations, self.depth )
Derives a new event from this one setting the ``value`` attribute. Args: value: (any): The value associated with the derived event. Returns: IonEvent: The newly generated non-thunk event.
def draw_rect(grid, attr, dc, rect): dc.SetBrush(wx.Brush(wx.Colour(15, 255, 127), wx.SOLID)) dc.SetPen(wx.Pen(wx.BLUE, 1, wx.SOLID)) dc.DrawRectangleRect(rect)
Draws a rect