code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def auto_instantiate(*classes): def decorator(f): sig = signature(f) @wraps(f) def _(*args, **kwargs): bvals = sig.bind(*args, **kwargs) for varname, val in bvals.arguments.items(): anno = sig.parameters[varname].annotation if anno in c...
Creates a decorator that will instantiate objects based on function parameter annotations. The decorator will check every argument passed into ``f``. If ``f`` has an annotation for the specified parameter and the annotation is found in ``classes``, the parameter value passed in will be used to construc...
def flatten_all_but_last(a): ret = tf.reshape(a, [-1, tf.shape(a)[-1]]) if not tf.executing_eagerly(): ret.set_shape([None] + a.get_shape().as_list()[-1:]) return ret
Flatten all dimensions of a except the last.
def _apply_Create(self, change): ar = _AzureRecord(self._resource_group, change.new) create = self._dns_client.record_sets.create_or_update create(resource_group_name=ar.resource_group, zone_name=ar.zone_name, relative_record_set_name=ar.relative_record_set_name, ...
A record from change must be created. :param change: a change object :type change: octodns.record.Change :type return: void
def minor_extent(self) -> complex: return min((self.max() - self.null, self.null - self.min()))
Minimum deviation from null.
def load_configs(self, conf_file): with open(conf_file) as stream: lines = itertools.chain(("[global]",), stream) self._config.read_file(lines) return self._config['global']
Assumes that the config file does not have any sections, so throw it all in global
def run_command( host, command, username=None, key_path=None, noisy=True ): with HostSession(host, username, key_path, noisy) as s: if noisy: print("\n{}{} $ {}\n".format(shakedown.fchr('>>'), host, command)) s.run(command) ec, output = s.get_r...
Run a command via SSH, proxied through the mesos master :param host: host or IP of the machine to execute the command on :type host: str :param command: the command to execute :type command: str :param username: SSH username :type username: str :param key_path: p...
def not_query(expression): compiled_expression = compile_query(expression) def _not(index, expression=compiled_expression): all_keys = index.get_all_keys() returned_keys = expression(index) return [key for key in all_keys if key not in returned_keys] return _not
Apply logical not operator to expression.
def parse_multiple_json(json_file, offset=None): json_info_list = [] if not os.path.exists(json_file): return json_info_list try: with open(json_file, "r") as f: if offset: f.seek(offset) for line in f: if line[-1] != "\n": ...
Parse multiple json records from the given file. Seek to the offset as the start point before parsing if offset set. return empty list if the json file does not exists or exception occurs. Args: json_file (str): File path to be parsed. offset (int): Initial seek position of the file. ...
def flat_map(self, func=None, name=None): if func is None: func = streamsx.topology.runtime._identity if name is None: name = 'flatten' sl = _SourceLocation(_source_info(), 'flat_map') _name = self.topology.graph._requested_name(name, action='flat_map', fun...
Maps and flatterns each tuple from this stream into 0 or more tuples. For each tuple on this stream ``func(tuple)`` is called. If the result is not `None` then the the result is iterated over with each value from the iterator that is not `None` will be submitted to the return stream. ...
def unregister_message_handler(self, target_or_handler): if isinstance(target_or_handler, str): del self._message_handlers[target_or_handler] else: for key, val in self._message_handlers.items(): if val == target_or_handler: del self._message_h...
Unregister a mpv script message handler for the given script message target name. You can also call the ``unregister_mpv_messages`` function attribute set on the handler function when it is registered.
def getTokensEndLoc(): import inspect fstack = inspect.stack() try: for f in fstack[2:]: if f[3] == "_parseNoCache": endloc = f[0].f_locals["loc"] return endloc else: raise ParseFatalException("incorrect usage of getTokensEndLo...
Method to be called from within a parse action to determine the end location of the parsed tokens.
def respond(self, code): resp = HttpResponse(code, self.connection) resp.request = self if hasattr(self, 'version'): resp.version = self.version return resp
Starts a response. ``code`` is an integer standing for standard HTTP status code. This method will automatically adjust the response to adapt to request parameters, such as "Accept-Encoding" and "TE".
def packagePlugin(self, dir=os.getcwd(), extraArgs=[]): distDir = os.path.join(os.path.abspath(dir), 'dist') self.runUAT([ 'BuildPlugin', '-Plugin=' + self.getPluginDescriptor(dir), '-Package=' + distDir ] + extraArgs)
Packages a build of the Unreal plugin in the specified directory, suitable for use as a prebuilt Engine module
def defaultMachine(use_rpm_default=True): if use_rpm_default: try: rmachine = subprocess.check_output(['rpm', '--eval=%_target_cpu'], shell=False).rstrip() rmachine = SCons.Util.to_str(rmachine) except Exception as e: return defaultMachine(False) else: ...
Return the canonicalized machine name.
def parse_value(cell): value = cell.value if isinstance(value, string_types): value = value.strip() if isinstance(value, (datetime)): value = value.isoformat() return value
Extrae el valor de una celda de Excel como texto.
def _load_rules(self): for ruleset in self.active_rulesets: section_name = 'sweep_rules_' + ruleset.lower() try: ruledefs = getattr(self.config, section_name) except AttributeError: raise error.UserError("There is no [{}] section in your config...
Load rule definitions from config.
def cookies(self): c = Cookie.SimpleCookie(self.getheader('set-cookie')) return dict((i.key, i.value) for i in c.values())
Cookies in dict
def Run(self): if not data_store.AFF4Enabled(): return try: filestore = aff4.FACTORY.Create( FileStore.PATH, FileStore, mode="rw", token=aff4.FACTORY.root_token) filestore.Close() hash_filestore = aff4.FACTORY.Create( HashFileStore.PATH, HashFileStore, ...
Create FileStore and HashFileStore namespaces.
def enable_autocuts(self, option): option = option.lower() assert(option in self.autocuts_options), \ ImageViewError("Bad autocuts option '%s': must be one of %s" % ( str(self.autocuts_options))) self.t_.set(autocuts=option)
Set ``autocuts`` behavior. Parameters ---------- option : {'on', 'override', 'once', 'off'} Option for auto-cut behavior. A list of acceptable options can also be obtained by :meth:`get_autocuts_options`. Raises ------ ginga.ImageView.ImageViewEr...
def readCell(self, row, col): try: if self.__sheet is None: self.openSheet(super(ExcelRead, self).DEFAULT_SHEET) return self.__sheet.cell(row, col).value except BaseException as excp: raise UfException(Errors.UNKNOWN_ERROR, "Unknown Error in Exce...
read a cell
def truncate(self, table): if isinstance(table, (list, set, tuple)): for t in table: self._truncate(t) else: self._truncate(table)
Empty a table by deleting all of its rows.
def is_seq(obj): if not hasattr(obj, '__iter__'): return False if isinstance(obj, basestring): return False return True
Returns True if object is not a string but is iterable
def _update_device_from_fs(self, device): try: directory_entries = listdir(device["mount_point"]) lowercase_directory_entries = [e.lower() for e in directory_entries] if self.MBED_HTM_NAME.lower() in lowercase_directory_entries: self._update_device_from_htm(de...
Updates the device information based on files from its 'mount_point' @param device Dictionary containing device information
def add_new_devices_callback(self, callback): self._new_devices_callbacks.append(callback) _LOGGER.debug('Added new devices callback to %s', callback)
Register as callback for when new devices are added.
def get_absolute_path(cls, roots, path): for root in roots: abspath = os.path.abspath(os.path.join(root, path)) if abspath.startswith(root) and os.path.exists(abspath): return abspath return 'file-not-found'
Returns the absolute location of ``path`` relative to one of the ``roots``. ``roots`` is the path configured for this `StaticFileHandler` (in most cases the ``static_path`` `Application` setting).
def get_learning_objectives(self): mgr = self._get_provider_manager('LEARNING') lookup_session = mgr.get_objective_lookup_session(proxy=getattr(self, "_proxy", None)) lookup_session.use_federated_objective_bank_view() return lookup_session.get_objectives_by_ids(self.get_learning_objectiv...
This method also mirrors that in the Item.
def init_pipette(): global session pipette_info = set_current_mount(session.adapter, session) pipette = pipette_info['pipette'] res = {} if pipette: session.current_model = pipette_info['model'] if not feature_flags.use_protocol_api_v2(): mount = pipette.mount ...
Finds pipettes attached to the robot currently and chooses the correct one to add to the session. :return: The pipette type and mount chosen for deck calibration
def namedb_get_all_names( cur, current_block, offset=None, count=None, include_expired=False ): unexpired_query = "" unexpired_args = () if not include_expired: unexpired_query, unexpired_args = namedb_select_where_unexpired_names( current_block ) unexpired_query = 'WHERE {}'.format(unexpire...
Get a list of all names in the database, optionally paginated with offset and count. Exclude expired names. Include revoked names.
def cat(self, paths, check_crc=False): if not isinstance(paths, list): raise InvalidInputException("Paths should be a list") if not paths: raise InvalidInputException("cat: no path given") processor = lambda path, node, check_crc=check_crc: self._handle_cat(path, node, ch...
Fetch all files that match the source file pattern and display their content on stdout. :param paths: Paths to display :type paths: list of strings :param check_crc: Check for checksum errors :type check_crc: boolean :returns: a generator that yields strings
def close_statement(self, connection_id, statement_id): request = requests_pb2.CloseStatementRequest() request.connection_id = connection_id request.statement_id = statement_id self._apply(request)
Closes a statement. :param connection_id: ID of the current connection. :param statement_id: ID of the statement to close.
def format(self, fmt): val = '' for x in fmt: if x == 'd': val += self._driv elif x == 'p': val += self._path elif x == 'n': val += self._name elif x == 'x': val += self._ext elif ...
Returns string representing the items specified in the format string The format string can contain: .. code:: d - drive letter p - path n - name x - extension z - file size t - file time in seconds And, you can string th...
def send(self, **kwargs): assert len(kwargs) == 1, "Must make a single request." res = self.send_req(sc_pb.Request(**kwargs)) return getattr(res, list(kwargs.keys())[0])
Create and send a specific request, and return the response. For example: send(ping=sc_pb.RequestPing()) => sc_pb.ResponsePing Args: **kwargs: A single kwarg with the name and value to fill in to Request. Returns: The Response corresponding to your request.
def unhumanize_class(my_classes): result = [] interval = my_classes[-1] - my_classes[-2] min_value = 0 for max_value in my_classes: result.append((format_decimal(interval, min_value), format_decimal(interval, max_value))) min_value = max_value return result
Return class as interval without formatting.
def restore(self): if len(list(self.backup.keys())) == 0: return for key in self.backup.keys(): if key != 'WCSCDATE': self.__dict__[self.wcstrans[key]] = self.orig_wcs[self.backup[key]] self.update()
Reset the active WCS keywords to values stored in the backup keywords.
def max_repetition_level(self, path): max_level = 0 for part in path: element = self.schema_element(part) if element.repetition_type == parquet_thrift.FieldRepetitionType.REQUIRED: max_level += 1 return max_level
Get the max repetition level for the given schema path.
def Validate(self, problems=default_problem_reporter): found_problem = False found_problem = ((not util.ValidateRequiredFieldsAreNotEmpty( self, self._REQUIRED_FIELD_NAMES, problems)) or found_problem) found_problem = self.ValidateAgencyUrl(problems) or fo...
Validate attribute values and this object's internal consistency. Returns: True iff all validation checks passed.
def wrap_passthrough(self, text, multiline=True): if not multiline: text = text.lstrip() if multiline: out = "\\" else: out = "\\\\" out += self.add_ref("passthrough", text) + unwrapper if not multiline: out += "\n" return o...
Wrap a passthrough.
def delete_all(self): def delete_action_gen(): scanner = scan(self.es, index=self.index_name, query={'query': {'match_all':{}}}) for v in scanner: yield { '_op_type': 'delete', '_index': self.in...
Delete all books from the index
def capture(self, payment_id, amount, data={}, **kwargs): url = "{}/{}/capture".format(self.base_url, payment_id) data['amount'] = amount return self.post_url(url, data, **kwargs)
Capture Payment for given Id Args: payment_id : Id for which payment object has to be retrieved Amount : Amount for which the payment has to be retrieved Returns: Payment dict after getting captured
def scaledBy(self, scale): scaled = deepcopy(self) if type(scaled.value) in (int, float): scaled.value *= scale elif isinstance(scaled.value, numbers): scaled.value.values = tuple(v * scale for v in scaled.value.values) return scaled
Return a new Value scaled by a given number for ints and floats.
def create_wallet(password, api_code, service_url, priv=None, label=None, email=None): params = {'password': password, 'api_code': api_code} if priv is not None: params['priv'] = priv if label is not None: params['label'] = label if email is not None: ...
Create a new Blockchain.info wallet. It can be created containing a pre-generated private key or will otherwise generate a new private key. :param str password: password for the new wallet. At least 10 characters. :param str api_code: API code with create wallets permission :param str...
def is_valid_size(size, chunk_size): min_csize = current_app.config['FILES_REST_MULTIPART_CHUNKSIZE_MIN'] max_size = \ chunk_size * current_app.config['FILES_REST_MULTIPART_MAX_PARTS'] return size > min_csize and size <= max_size
Validate max theoretical size.
def is_ready(self): if not self._thread: return False if not self._ready.is_set(): return False return True
Is thread & ioloop ready. :returns bool:
def gid(self): if not self._gid: if self.controller.config.daemon.group: self._gid = grp.getgrnam(self.config.daemon.group).gr_gid else: self._gid = os.getgid() return self._gid
Return the group id that the daemon will run with :rtype: int
def extract_patches(images, patch_shape, samples_per_image=40, seed=0, cycle=True): rs = np.random.RandomState(seed) for Xi in itr.cycle(images): w, h = [Xi.shape[i]-patch_shape[i] for i in range(2)] assert w > 0 and h > 0 indices = np.asarray(list(itr.product(range(w...
Takes a set of images and yields randomly chosen patches of specified size. Parameters ---------- images : iterable The images have to be iterable, and each element must be a Numpy array with at least two spatial 2 dimensions as the first and second axis. patch_shape : tuple, length 2 ...
def mv_normal_cov_like(x, mu, C): R if len(np.shape(x)) > 1: return np.sum([flib.cov_mvnorm(r, mu, C) for r in x]) else: return flib.cov_mvnorm(x, mu, C)
R""" Multivariate normal log-likelihood parameterized by a covariance matrix. .. math:: f(x \mid \pi, C) = \frac{1}{(2\pi|C|)^{1/2}} \exp\left\{ -\frac{1}{2} (x-\mu)^{\prime}C^{-1}(x-\mu) \right\} :Parameters: - `x` : (n,k) - `mu` : (k) Location parameter. - `C` : (k,k) Posit...
def _ApplySudsJurkoAppenderPatch(self): def PatchedAppend(self, parent, content): obj = content.value child = self.node(content) parent.append(child) for item in obj: cont = suds.mx.Content(tag=item[0], value=item[1]) suds.mx.appender.Appender.append(self, child, cont) su...
Appends a Monkey Patch to the suds.mx.appender module. This resolves an issue where empty objects are ignored and stripped from the request output. More details can be found on the suds-jurko issue tracker: https://goo.gl/uyYw0C
def vote(self, direction=0): url = self.reddit_session.config['vote'] data = {'id': self.fullname, 'dir': six.text_type(direction)} if self.reddit_session.user: urls = [urljoin(self.reddit_session.user._url, 'disliked'), urljoin(self.reddit_session...
Vote for the given item in the direction specified. Note: votes must be cast by humans. That is, API clients proxying a human's action one-for-one are OK, but bots deciding how to vote on content or amplifying a human's vote are not. See the reddit rules for more details on what constit...
def image_to_file(self, path): _LOGGER.debug("Writing image from %s to %s", self.name, path) response = self._cached_image if response.status_code == 200: with open(path, 'wb') as imgfile: copyfileobj(response.raw, imgfile) else: _LOGGER.error("Can...
Write image to file. :param path: Path to write file
def plotPixel(self, x, y, color="black"): p = Point(x, y) p.fill(color) p.draw(self) p.t = lambda v: v p.tx = lambda v: v p.ty = lambda v: v
Doesn't use coordinant system.
def cli(env, volume_id, replicant_id, immediate): block_storage_manager = SoftLayer.BlockStorageManager(env.client) success = block_storage_manager.failover_to_replicant( volume_id, replicant_id, immediate ) if success: click.echo("Failover to replicant is now in progress...
Failover a block volume to the given replicant volume.
def precision_at_proportions(self): return plot.precision_at_proportions(self.y_true, self.y_score, ax=_gen_ax())
Precision at proportions plot
def encoding(self): if hasattr(self, '_encoding'): return self._encoding if isinstance(self.content, six.text_type): return 'unicode' encoding = get_encoding(self.headers, self.content) if not encoding and chardet is not None: encoding = chardet.detect...
encoding of Response.content. if Response.encoding is None, encoding will be guessed by header or content or chardet if available.
def update_settings(self, service_id, version_number, settings={}): body = urllib.urlencode(settings) content = self._fetch("/service/%s/version/%d/settings" % (service_id, version_number), method="PUT", body=body) return FastlySettings(self, content)
Update the settings for a particular service and version.
def localize(self, location=None, latitude=None, longitude=None, **kwargs): if location is None: location = Location(latitude, longitude, **kwargs) return LocalizedPVSystem(pvsystem=self, location=location)
Creates a LocalizedPVSystem object using this object and location data. Must supply either location object or latitude, longitude, and any location kwargs Parameters ---------- location : None or Location, default None latitude : None or float, default None longi...
def tokens(self): spans = self.word_tokenizer.span_tokenize(self.text) toks = [Token( text=self.text[span[0]:span[1]], start=span[0] + self.start, end=span[1] + self.start, lexicon=self.lexicon ) for span in spans] return toks
Return a list of token Spans for this sentence.
def add_defaults(self, ctype: ContentType = None) -> "InstanceNode": val = self.value if not (isinstance(val, StructuredValue) and self.is_internal()): return self res = self if isinstance(val, ObjectValue): if val: for mn in self._member_names(): ...
Return the receiver with defaults added recursively to its value. Args: ctype: Content type of the defaults to be added. If it is ``None``, the content type will be the same as receiver's.
def cached(func): ret = None def call_or_cache(*args, **kwargs): nonlocal ret if ret is None: ret = func(*args, **kwargs) return ret return call_or_cache
Memoize a function result.
def buy(self, currencyPair, rate, amount, fillOrKill=None, immediateOrCancel=None, postOnly=None): return self._private('buy', currencyPair=currencyPair, rate=rate, amount=amount, fillOrKill=fillOrKill, immediateOrCancel=immediateOrCancel, ...
Places a limit buy order in a given market. Required POST parameters are "currencyPair", "rate", and "amount". If successful, the method will return the order number. You may optionally set "fillOrKill", "immediateOrCancel", "postOnly" to 1. A fill-or-kill order will either fill in its e...
def convert_md_to_rst(md_path, rst_temp_path): command = "pandoc --write=rst --output=%s %s" % (rst_temp_path, md_path) print("converting with pandoc: %s to %s\n-->%s" % (md_path, rst_temp_path, command)) if os.path.exists(rst_temp_path): os.rem...
Convert the contents of a file from Markdown to reStructuredText. Returns the converted text as a Unicode string. Arguments: md_path: a path to a UTF-8 encoded Markdown file to convert. rst_temp_path: a temporary path to which to write the converted contents.
def from_list(cls, values): self = cls() for value in values: self.add(value) return self
Construct a tree from a list with paths.
def get_all_filters(self, server_id): server = self._get_server(server_id) return server.conn.EnumerateInstances('CIM_IndicationFilter', namespace=server.interop_ns)
Return all indication filters in a WBEM server. This function contacts the WBEM server and retrieves the indication filters by enumerating the instances of CIM class "CIM_IndicationFilter" in the Interop namespace of the WBEM server. Parameters: server_id (:term:`string`): ...
def get_named_by_definition(cls, element_list, string_def): try: return next( ( st.value for st in element_list if st.definition == string_def ) ) except Exception: return None
Attempts to get an IOOS definition from a list of xml elements
def html_init(name): result = "" result += "<html>\n" result += "<head>\n" result += "<title>" + str(name) + "</title>\n" result += "</head>\n" result += "<body>\n" result += '<h1 style="border-bottom:1px solid ' \ 'black;text-align:center;">PyCM Report</h1>' return result
Return HTML report file first lines. :param name: name of file :type name : str :return: html_init as str
def add_lambda_permissions(function='', statement_id='', action='lambda:InvokeFunction', principal='', source_arn='', env='', region='us-east-1'): session...
Add permission to Lambda for the event trigger. Args: function (str): Lambda function name statement_id (str): IAM policy statement (principal) id action (str): Lambda action to allow principal (str): AWS principal to add permissions source_arn (str): ARN of the source of th...
def upload(self, filepath, service_path, remove=False): local = OSFS(os.path.dirname(filepath)) if self.fs.hassyspath(service_path) and ( self.fs.getsyspath(service_path) == local.getsyspath( os.path.basename(filepath))): if remove: os.remove(filep...
"Upload" a file to a service This copies a file from the local filesystem into the ``DataService``'s filesystem. If ``remove==True``, the file is moved rather than copied. If ``filepath`` and ``service_path`` paths are the same, ``upload`` deletes the file if ``remove==True`` and retur...
def run(self, *args): params = self.parser.parse_args(args) sources = params.source code = self.autocomplete(sources) return code
Autocomplete profile information.
def override_if_not_in_args(flag, argument, args): if flag not in args: args.extend([flag, argument])
Checks if flags is in args, and if not it adds the flag to args.
def visit_raise(self, node, parent): newnode = nodes.Raise(node.lineno, node.col_offset, parent) newnode.postinit( _visit_or_none(node, "type", self, newnode), _visit_or_none(node, "inst", self, newnode), _visit_or_none(node, "tback", self, newnode), ) ...
visit a Raise node by returning a fresh instance of it
def _get_cache_key(self, obj): if obj is not None: return '{}-{}'.format(id(self), obj.pk) return "{}-None".format(id(self))
Derive cache key for given object.
def resource_update(sender, instance, created=False, **kwargs): resource = instance try: new_configuration = CostTrackingRegister.get_configuration(resource) except ResourceNotRegisteredError: return models.PriceEstimate.update_resource_estimate( resource, new_configuration, rais...
Update resource consumption details and price estimate if its configuration has changed. Create estimates for previous months if resource was created not in current month.
def get_tags_from_job(user, job_id): job = v1_utils.verify_existence_and_get(job_id, _TABLE) if not user.is_in_team(job['team_id']) and not user.is_read_only_user(): raise dci_exc.Unauthorized() JTT = models.JOIN_JOBS_TAGS query = (sql.select([models.TAGS]) .select_from(JTT.join(mod...
Retrieve all tags attached to a job.
def _pfp__handle_implicit_array(self, name, child): existing_child = self._pfp__children_map[name] if isinstance(existing_child, Array): existing_child.append(child) return existing_child else: cls = child._pfp__class if hasattr(child, "_pfp__class") else chil...
Handle inserting implicit array elements
def _extract(expr, pat, flags=0, group=0): return _string_op(expr, Extract, _pat=pat, _flags=flags, _group=group)
Find group in each string in the Series using passed regular expression. :param expr: :param pat: Pattern or regular expression :param flags: re module, e.g. re.IGNORECASE :param group: if None as group 0 :return: sequence or scalar
def _getModules(self): modules = {} modulesPath = os.path.join("application", "module") moduleList = os.listdir(modulesPath) for moduleName in moduleList: modulePath = os.path.join(modulesPath, moduleName, "module.py") if not os.path.isfile(modulePath): ...
Import and load application modules. :return: <dict>
def create_check(self, label=None, name=None, check_type=None, disabled=False, metadata=None, details=None, monitoring_zones_poll=None, timeout=None, period=None, target_alias=None, target_hostname=None, target_receiver=None, test_only=False, include_debug=False): ...
Creates a check on this entity with the specified attributes. The 'details' parameter should be a dict with the keys as the option name, and the value as the desired setting.
def read(self, size=None): if self.closed: raise ValueError("I/O operation on closed file") buf = b"" if self.buffer: if size is None: buf = self.buffer self.buffer = b"" else: buf = self.buffer[:size] ...
Read at most size bytes from the file. If size is not present or None, read all data until EOF is reached.
def raise_error(self, error): ex = exc.RPCError('Error calling remote procedure: %s' % error.error['message']) if self.raises_errors: raise ex return ex
Raises the exception in the client. Called by the client to convert the :py:class:`RPCErrorResponse` into an Exception and raise or return it depending on the :py:attr:`raises_errors` attribute. :param error: The error response received from the server. :type error: :py:class:`RPCRespo...
def get_url_distribution(self, after=None, reports='true', limit=1000, timeout=None): params = {'apikey': self.api_key, 'after': after, 'reports': reports, 'limit': limit} try: response = requests.get(self.base + 'url/distribution', params=params, ...
Get a live feed with the lastest URLs submitted to VirusTotal. Allows you to retrieve a live feed of URLs submitted to VirusTotal, along with their scan reports. This call enables you to stay synced with VirusTotal URL submissions and replicate our dataset. :param after: (optional) Retrieve UR...
def opt(self, x_init, f_fp=None, f=None, fp=None): tnc_rcstrings = ['Local minimum', 'Converged', 'XConverged', 'Maximum number of f evaluations reached', 'Line search failed', 'Function is constant'] assert f_fp != None, "TNC requires f_fp" opt_dict = {} if self.xtol is not...
Run the TNC optimizer
def main(jlink_serial, device): buf = StringIO.StringIO() jlink = pylink.JLink(log=buf.write, detailed_log=buf.write) jlink.open(serial_no=jlink_serial) jlink.set_tif(pylink.enums.JLinkInterfaces.SWD) jlink.connect(device, verbose=True) sys.stdout.write('ARM Id: %d\n' % jlink.core_id()) sys....
Prints the core's information. Args: jlink_serial (str): the J-Link serial number device (str): the target CPU Returns: Always returns ``0``. Raises: JLinkException: on error
def adapt_meta(self, meta): surge = meta.get('surge_confirmation') href = surge.get('href') surge_id = surge.get('surge_confirmation_id') return href, surge_id
Convert meta from error response to href and surge_id attributes.
def average(arr): if len(arr) == 0: sys.stderr.write("ERROR: no content in array to take average\n") sys.exit() if len(arr) == 1: return arr[0] return float(sum(arr))/float(len(arr))
average of the values, must have more than 0 entries. :param arr: list of numbers :type arr: number[] a number array :return: average :rtype: float
def _get_timethresh_heuristics(self): if self.length > 1E5: time_thresh = 2.5 elif self.length > 1E4: time_thresh = 2.0 elif self.length > 1E3: time_thresh = 1.0 else: time_thresh = 0.5 return time_thresh
resonably decent hueristics for how much time to wait before updating progress.
def update_spec(self): if self.datafile.exists: with self.datafile.reader as r: self.header_lines = r.info['header_rows'] self.comment_lines = r.info['comment_rows'] self.start_line = r.info['data_start_row'] self.end_line = r.info['dat...
Update the source specification with information from the row intuiter, but only if the spec values are not already set.
def image_to_data(image, lang=None, config='', nice=0, output_type=Output.STRING): if get_tesseract_version() < '3.05': raise TSVNotSupported() config = '{} {}'.format('-c tessedit_create_tsv=1', config.strip()).strip() args = [...
Returns string containing box boundaries, confidences, and other information. Requires Tesseract 3.05+
def free(self): LOGGER.debug('Connection %s freeing', self.id) if self.handle.isexecuting(): raise ConnectionBusyError(self) with self._lock: self.used_by = None LOGGER.debug('Connection %s freed', self.id)
Remove the lock on the connection if the connection is not active :raises: ConnectionBusyError
def addQuickElement(self, name, contents=None, attrs=None, escape=True, cdata=False): if attrs is None: attrs = {} self.startElement(name, attrs) if contents is not None: self.characters(contents, escape=escape, cdata=cdata) self.endElement(name)
Convenience method for adding an element with no children.
def BFS(G, start): if start not in G.vertices: raise GraphInsertError("Vertex %s doesn't exist." % (start,)) color = {} pred = {} dist = {} queue = Queue() queue.put(start) for vertex in G.vertices: color[vertex] = 'white' pred[vertex] = None dist[v...
Algorithm for breadth-first searching the vertices of a graph.
def blobs(self, repository_ids=[], reference_names=[], commit_hashes=[]): if not isinstance(repository_ids, list): raise Exception("repository_ids must be a list") if not isinstance(reference_names, list): raise Exception("reference_names must be a list") if not isinstanc...
Retrieves the blobs of a list of repositories, reference names and commit hashes. So the result will be a DataFrame of all the blobs in the given commits that are in the given references that belong to the given repositories. >>> blobs_df = engine.blobs(repo_ids, ref_names, hashes) Cal...
def update_safe(filename: str, **kw: Any) -> Generator[IO, None, None]: with tempfile.NamedTemporaryFile( dir=os.path.dirname(filename), delete=False, prefix=f"{os.path.basename(filename)}.", **kw, ) as tf: if os.path.exists(filename): os.chmod(tf.name, os.sta...
Rewrite a file atomically. Clients are allowed to delete the tmpfile to signal that they don't want to have it updated.
def whois_emails(self, emails): api_name = 'opendns-whois-emails' fmt_url_path = u'whois/emails/{0}' return self._multi_get(api_name, fmt_url_path, emails)
Calls WHOIS Email end point Args: emails: An enumerable of string Emails Returns: A dict of {email: domain_result}
async def make_response(self, result: ResponseReturnValue) -> Response: status_or_headers = None headers = None status = None if isinstance(result, tuple): value, status_or_headers, headers = result + (None,) * (3 - len(result)) else: value = result ...
Make a Response from the result of the route handler. The result itself can either be: - A Response object (or subclass). - A tuple of a ResponseValue and a header dictionary. - A tuple of a ResponseValue, status code and a header dictionary. A ResponseValue is either a R...
def cache(horizon): def cache_step(func): @wraps(func) def cached(*args): try: data = func.__globals__['__data'] assert cached.cache_t == data['time']() assert hasattr(cached, 'cache_val') assert cached.cache_val is not None...
Put a wrapper around a model function Decorators with parameters are tricky, you have to essentially create a decorator that returns a decorator, which itself then returns the function wrapper. Parameters ---------- horizon: string - 'step' means cache just until the next timestep ...
def FromStream(cls, stream): if stream.system: specifier = DataStreamSelector.MatchSystemOnly else: specifier = DataStreamSelector.MatchUserOnly return DataStreamSelector(stream.stream_type, stream.stream_id, specifier)
Create a DataStreamSelector from a DataStream. Args: stream (DataStream): The data stream that we want to convert.
def login(self, username, password=None, email=None, registry=None, reauth=False, **kwargs): response = super(DockerClientWrapper, self).login(username, password, email, registry, reauth=reauth, **kwargs) return response.get('Status') == 'Login Succeeded' or response.get('username') == username
Login to a Docker registry server. :param username: User name for login. :type username: unicode | str :param password: Login password; may be ``None`` if blank. :type password: unicode | str :param email: Optional; email address for login. :type email: unicode | str ...
def type(self, s, enter=False, clear=False): if clear: self.clear_text() self._uiauto.send_keys(s) if enter: self.keyevent('KEYCODE_ENTER')
Input some text, this method has been tested not very stable on some device. "Hi world" maybe spell into "H iworld" Args: - s: string (text to input), better to be unicode - enter(bool): input enter at last - next(bool): perform editor action Next - clear...
def delete(self, path): return self.session.delete(self._request_url(path), auth=self.auth, verify=False)
Call the Infoblox device to delete the ref :param str ref: The reference id :rtype: requests.Response
def add_child(self, node, callback): if node not in self.children: self.children.append(ChildNode(node, callback))
Add node and callback to the children set.
def get_all_tasks(conf): db = HamsterDB(conf) fact_list = db.all_facts_id security_days = int(conf.get_option('tasks.security_days')) today = datetime.today() tasks = {} for fact_id in fact_list: ht = HamsterTask(fact_id, conf, db) if ht.end_time: end_time = ht.get_ob...
Returns a list with every task registred on Hamster.