code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def _handle_produce_response(self, node_id, send_time, batches, response): log.debug('Parsing produce response: %r', response) if response: batches_by_partition = dict([(batch.topic_partition, batch) for batch in batches]) for topic, partitions in response.topics: for partition_info in partitions: if response.API_VERSION < 2: partition, error_code, offset = partition_info ts = None else: partition, error_code, offset, ts = partition_info tp = TopicPartition(topic, partition) error = Errors.for_code(error_code) batch = batches_by_partition[tp] self._complete_batch(batch, error, offset, ts) if response.API_VERSION > 0: self._sensors.record_throttle_time(response.throttle_time_ms, node=node_id) else: for batch in batches: self._complete_batch(batch, None, -1, None)
Handle a produce response.
def update(self, command=None, **kwargs): if command is None: argparser = self.argparser else: argparser = self[command] for k,v in kwargs.items(): setattr(argparser, k, v)
update data, which is usually passed in ArgumentParser initialization e.g. command.update(prog="foo")
def method_id(self, api_info): if api_info.resource_name: resource_part = '.%s' % self.__safe_name(api_info.resource_name) else: resource_part = '' return '%s%s.%s' % (self.__safe_name(api_info.name), resource_part, self.__safe_name(self.name))
Computed method name.
def get_properties(cls_def): prop_list = {prop: value for prop, value in cls_def.items() \ if 'rdf_Property' in value.get('rdf_type', "") or \ value.get('rdfs_domain')} return prop_list
cycles through the class definiton and returns all properties
def location_filter(files_with_tags, location, radius): on_location = dict() for f, tags in files_with_tags.items(): if 'GPS GPSLatitude' in tags: try: lat = convert_to_decimal(str(tags['GPS GPSLatitude'])) long = convert_to_decimal(str(tags['GPS GPSLongitude'])) except ValueError: print('{0} has invalid gps info'.format(f)) try: if haversine(lat, long, location['lat'], location['long']) < radius: on_location[f] = tags except InvalidCoordinate: print('{0} has invalid gps info'.format(f)) return on_location
Get photos taken within the specified radius from a given point.
def _graphite_url(self, query, raw_data=False, graphite_url=None): query = escape.url_escape(query) graphite_url = graphite_url or self.reactor.options.get('public_graphite_url') url = "{base}/render/?target={query}&from=-{from_time}&until=-{until}".format( base=graphite_url, query=query, from_time=self.from_time.as_graphite(), until=self.until.as_graphite(), ) if raw_data: url = "{}&format=raw".format(url) return url
Build Graphite URL.
async def send_api(container, targetname, name, params = {}): handle = object() apiEvent = ModuleAPICall(handle, targetname, name, params = params) await container.wait_for_send(apiEvent)
Send API and discard the result
def get_by_name(self, name): rs, _ = self.list(filter=field('name').eq(name), limit=1) if len(rs) is 0: raise CDRouterError('no such device') return rs[0]
Get a device by name. :param name: Device name as string. :return: :class:`devices.Device <devices.Device>` object :rtype: devices.Device
def _handle_captcha(captcha_data, message=''): from tempfile import NamedTemporaryFile tmpf = NamedTemporaryFile(suffix='.png') tmpf.write(captcha_data) tmpf.flush() captcha_text = input('Please take a look at the captcha image "%s" and provide the code:' % tmpf.name) tmpf.close() return captcha_text
Called when a captcha must be solved Writes the image to a temporary file and asks the user to enter the code. Args: captcha_data: Bytestring of the PNG captcha image. message: Optional. A message from Steam service. Returns: A string containing the solved captcha code.
def upgrade_tools_all(call=None): if call != 'function': raise SaltCloudSystemExit( 'The upgrade_tools_all function must be called with ' '-f or --function.' ) ret = {} vm_properties = ["name"] vm_list = salt.utils.vmware.get_mors_with_properties(_get_si(), vim.VirtualMachine, vm_properties) for vm in vm_list: ret[vm['name']] = _upg_tools_helper(vm['object']) return ret
To upgrade VMware Tools on all virtual machines present in the specified provider .. note:: If the virtual machine is running Windows OS, this function will attempt to suppress the automatic reboot caused by a VMware Tools upgrade. CLI Example: .. code-block:: bash salt-cloud -f upgrade_tools_all my-vmware-config
def _replace_match(m, env): s = m.group()[1:-1].strip() try: return getattr(env, s) except AttributeError: pass for r in [_replace_envvar, _replace_config, _replace_posargs]: try: return r(s, env) except ValueError: pass raise NotImplementedError("{%s} not understood in tox.ini file." % s)
Given a match object, having matched something inside curly braces, replace the contents if matches one of the supported tox-substitutions.
def setup_and_load_epoch(hparams, data_dir, which_epoch_data=None): t2t_env = rl_utils.setup_env( hparams, batch_size=hparams.real_batch_size, max_num_noops=hparams.max_num_noops ) if which_epoch_data is not None: if which_epoch_data == "last": which_epoch_data = infer_last_epoch_num(data_dir) assert isinstance(which_epoch_data, int), \ "{}".format(type(which_epoch_data)) t2t_env.start_new_epoch(which_epoch_data, data_dir) else: t2t_env.start_new_epoch(-999) return t2t_env
Load T2TGymEnv with data from one epoch. Args: hparams: hparams. data_dir: data directory. which_epoch_data: data from which epoch to load. Returns: env.
def customer(self): url = self._get_link('customer') if url: resp = self.client.customers.perform_api_call(self.client.customers.REST_READ, url) return Customer(resp)
Return the customer for this subscription.
def load(fp, expand_includes=True, include_position=False, include_comments=False, **kwargs): p = Parser(expand_includes=expand_includes, include_comments=include_comments, **kwargs) ast = p.load(fp) m = MapfileToDict(include_position=include_position, include_comments=include_comments, **kwargs) d = m.transform(ast) return d
Load a Mapfile from an open file or file-like object. Parameters ---------- fp: file A file-like object - as with all Mapfiles this should be encoded in "utf-8" expand_includes: boolean Load any ``INCLUDE`` files in the MapFile include_comments: boolean Include or discard comment strings from the Mapfile - *experimental* include_position: boolean Include the position of the Mapfile tokens in the output Returns ------- dict A Python dictionary representing the Mapfile in the mappyfile format Example ------- To open a Mapfile from a file and return it as a dictionary object:: with open('mymap.map') as fp: d = mappyfile.load(fp) Notes ----- Partial Mapfiles can also be opened, for example a file containing a ``LAYER`` object.
def all_settings(self, uppercase_keys=False): d = {} for k in self.all_keys(uppercase_keys): d[k] = self.get(k) return d
Return all settings as a `dict`.
def _read_requires_python(metadata): value = metadata.dictionary.get("requires_python") if value is not None: return value if metadata._legacy: value = metadata._legacy.get("Requires-Python") if value is not None and value != "UNKNOWN": return value return ""
Read wheel metadata to know the value of Requires-Python. This is surprisingly poorly supported in Distlib. This function tries several ways to get this information: * Metadata 2.0: metadata.dictionary.get("requires_python") is not None * Metadata 2.1: metadata._legacy.get("Requires-Python") is not None * Metadata 1.2: metadata._legacy.get("Requires-Python") != "UNKNOWN"
def get_allowed(allow, disallow): if allow is None and disallow is None: return SUMO_VEHICLE_CLASSES elif disallow is None: return allow.split() else: disallow = disallow.split() return tuple([c for c in SUMO_VEHICLE_CLASSES if c not in disallow])
Normalize the given string attributes as a list of all allowed vClasses.
def _mark_quoted_email_splitlines(markers, lines): markerlist = list(markers) for i, line in enumerate(lines): if markerlist[i] != 'm': continue for pattern in SPLITTER_PATTERNS: matcher = re.search(pattern, line) if matcher: markerlist[i] = 's' break return "".join(markerlist)
When there are headers indented with '>' characters, this method will attempt to identify if the header is a splitline header. If it is, then we mark it with 's' instead of leaving it as 'm' and return the new markers.
def method(func): attr = abc.abstractmethod(func) attr.__imethod__ = True return attr
Wrap a function as a method.
def _resolve_version(version): if version is not LATEST: return version resp = urlopen('https://pypi.python.org/pypi/setuptools/json') with contextlib.closing(resp): try: charset = resp.info().get_content_charset() except Exception: charset = 'UTF-8' reader = codecs.getreader(charset) doc = json.load(reader(resp)) return str(doc['info']['version'])
Resolve LATEST version
def convert_x_www_form_urlencoded_to_dict(post_data): if isinstance(post_data, str): converted_dict = {} for k_v in post_data.split("&"): try: key, value = k_v.split("=") except ValueError: raise Exception( "Invalid x_www_form_urlencoded data format: {}".format(post_data) ) converted_dict[key] = unquote(value) return converted_dict else: return post_data
convert x_www_form_urlencoded data to dict Args: post_data (str): a=1&b=2 Returns: dict: {"a":1, "b":2}
def tag_reachable_scripts(cls, scratch): if getattr(scratch, 'hairball_prepared', False): return reachable = set() untriggered_events = {} for script in cls.iter_scripts(scratch): if not isinstance(script, kurt.Comment): starting_type = cls.script_start_type(script) if starting_type == cls.NO_HAT: script.reachable = False elif starting_type == cls.HAT_WHEN_I_RECEIVE: script.reachable = False message = script[0].args[0].lower() untriggered_events.setdefault(message, set()).add(script) else: script.reachable = True reachable.add(script) while reachable: for event in cls.get_broadcast_events(reachable.pop()): if event in untriggered_events: for script in untriggered_events.pop(event): script.reachable = True reachable.add(script) scratch.hairball_prepared = True
Tag each script with attribute reachable. The reachable attribute will be set false for any script that does not begin with a hat block. Additionally, any script that begins with a 'when I receive' block whose event-name doesn't appear in a corresponding broadcast block is marked as unreachable.
def operate_on(self, when=None, apply=False, **kwargs): pzone = self.get(**kwargs) now = timezone.now() if when is None: when = now if when < now: histories = pzone.history.filter(date__lte=when) if histories.exists(): pzone.data = histories[0].data else: data = pzone.data next_operation_time = cache.get('pzone-operation-expiry-' + pzone.name) if next_operation_time is None or next_operation_time < when: pending_operations = pzone.operations.filter(when__lte=when, applied=False) for operation in pending_operations: data = operation.apply(data) pzone.data = data if apply and pending_operations.exists(): update_pzone.delay(**kwargs) return pzone
Do something with operate_on. If apply is True, all transactions will be applied and saved via celery task.
def KillOldFlows(self): if not self.IsRunning(): return False start_time = self.Get(self.Schema.LAST_RUN_TIME) lifetime = self.Get(self.Schema.CRON_ARGS).lifetime elapsed = rdfvalue.RDFDatetime.Now() - start_time if lifetime and elapsed > lifetime: self.StopCurrentRun() stats_collector_instance.Get().IncrementCounter( "cron_job_timeout", fields=[self.urn.Basename()]) stats_collector_instance.Get().RecordEvent( "cron_job_latency", elapsed.seconds, fields=[self.urn.Basename()]) return True return False
Disable cron flow if it has exceeded CRON_ARGS.lifetime. Returns: bool: True if the flow is was killed.
def response(code, body='', etag=None, last_modified=None, expires=None, **kw): if etag is not None: if not (etag[0] == '"' and etag[-1] == '"'): etag = '"%s"' % etag kw['etag'] = etag if last_modified is not None: kw['last_modified'] = datetime_to_httpdate(last_modified) if expires is not None: if isinstance(expires, datetime): kw['expires'] = datetime_to_httpdate(expires) else: kw['expires'] = timedelta_to_httpdate(expires) headers = [(k.replace('_', '-').title(), v) for k, v in sorted(kw.items())] return Response(code, headers, body)
Helper to build an HTTP response. Parameters: code : An integer status code. body : The response body. See `Response.__init__` for details. etag : A value for the ETag header. Double quotes will be added unless the string starts and ends with a double quote. last_modified : A value for the Last-Modified header as a datetime.datetime object or Unix timestamp. expires : A value for the Expires header as number of seconds, datetime.timedelta or datetime.datetime object. Note: a value of type int or float is interpreted as a number of seconds in the future, *not* as Unix timestamp. **kw : All other keyword arguments are interpreted as response headers. The names will be converted to header names by replacing underscores with hyphens and converting to title case (e.g. `x_powered_by` => `X-Powered-By`).
def parse_footnotes(document, xmlcontent): footnotes = etree.fromstring(xmlcontent) document.footnotes = {} for footnote in footnotes.xpath('.//w:footnote', namespaces=NAMESPACES): _type = footnote.attrib.get(_name('{{{w}}}type'), None) if _type in ['separator', 'continuationSeparator', 'continuationNotice']: continue paragraphs = [parse_paragraph(document, para) for para in footnote.xpath('.//w:p', namespaces=NAMESPACES)] document.footnotes[footnote.attrib[_name('{{{w}}}id')]] = paragraphs
Parse footnotes document. Footnotes are defined in file 'footnotes.xml'
def cancel(self): try: del self._protocol._consumers[self.queue] except (KeyError, AttributeError): pass try: del self._protocol.factory._consumers[self.queue] except (KeyError, AttributeError): pass self._running = False yield self._read_loop try: yield self._channel.basic_cancel(consumer_tag=self._tag) except pika.exceptions.AMQPChannelError: pass try: yield self._channel.close() except pika.exceptions.AMQPChannelError: pass if not self.result.called: self.result.callback(self)
Cancel the consumer and clean up resources associated with it. Consumers that are canceled are allowed to finish processing any messages before halting. Returns: defer.Deferred: A deferred that fires when the consumer has finished processing any message it was in the middle of and has been successfully canceled.
def info(self, msg=None, *args, **kwargs): return self._log(logging.INFO, msg, args, kwargs)
Similar to DEBUG but at INFO level.
def gender(self): element = self._first('NN') if element: if re.search('([m|f|n)])\.', element, re.U): genus = re.findall('([m|f|n)])\.', element, re.U)[0] return genus
Tries to scrape the gender for a given noun from leo.org.
def _shutdown_cherrypy(self): if cherrypy.engine.state == cherrypy.engine.states.STARTED: threading.Timer(1, cherrypy.engine.exit).start()
Shutdown cherrypy in one second, if it's running
def class_result(classname): def wrap_errcheck(result, func, arguments): if result is None: return None return classname(result) return wrap_errcheck
Errcheck function. Returns a function that creates the specified class.
def n_sections(neurites, neurite_type=NeuriteType.all, iterator_type=Tree.ipreorder): return sum(1 for _ in iter_sections(neurites, iterator_type=iterator_type, neurite_filter=is_type(neurite_type)))
Number of sections in a collection of neurites
def stop_change(self): self.logger.info("Dimmer %s stop_change", self.device_id) self.hub.direct_command(self.device_id, '18', '00') success = self.hub.check_success(self.device_id, '18', '00') if success: self.logger.info("Dimmer %s stop_change: Light stopped changing successfully", self.device_id) self.hub.clear_device_command_cache(self.device_id) else: self.logger.error("Dimmer %s stop_change: Light did not stop", self.device_id) return success
Stop changing light level manually
def disable_all(self, disable): commands = ['ENBH 0', 'ENBL 0', 'MODL 0' ] command_string = '\n'.join(commands) print_string = '\n\t' + command_string.replace('\n', '\n\t') logging.info(print_string) if disable: self.instr.write(command_string)
Disables all modulation and outputs of the Standford MW func. generator
def tcpip(self, port: int or str = 5555) -> None: self._execute('-s', self.device_sn, 'tcpip', str(port))
Restart adb server listening on TCP on PORT.
def parent(self): try: return Resource(self['parent_type'], uuid=self['parent_uuid'], check=True) except KeyError: raise ResourceMissing('%s has no parent resource' % self)
Return parent resource :rtype: Resource :raises ResourceNotFound: parent resource doesn't exists :raises ResourceMissing: parent resource is not defined
def from_filename(cls, filename): if not filename: logger.error('No filename specified') return None if not os.path.exists(filename): logger.error("Err: File '%s' does not exist", filename) return None if os.path.isdir(filename): logger.error("Err: File '%s' is a directory", filename) return None try: audiofile = eyed3.load(filename) except Exception as error: print(type(error), error) return None if audiofile is None: return None tags = audiofile.tag album = tags.album title = tags.title lyrics = ''.join([l.text for l in tags.lyrics]) artist = tags.album_artist if not artist: artist = tags.artist song = cls(artist, title, album, lyrics) song.filename = filename return song
Class constructor using the path to the corresponding mp3 file. The metadata will be read from this file to create the song object, so it must at least contain valid ID3 tags for artist and title.
def next_batch(self): is_success, results = AtlasRequest( url_path=self.atlas_url, user_agent=self._user_agent, server=self.server, verify=self.verify, ).get() if not is_success: raise APIResponseError(results) self.total_count = results.get("count") self.atlas_url = self.build_next_url(results.get("next")) self.current_batch = results.get("results", [])
Querying API for the next batch of objects and store next url and batch of objects.
def iget(self, irods_path, attempts=1, pause=15): if attempts > 1: cmd = cmd = lstrip(cmd) cmd = cmd.format(attempts, irods_path, pause) self.add(cmd) else: self.add('iget -v "{}"'.format(irods_path))
Add an iget command to retrieve a file from iRODS. Parameters ---------- irods_path: str Filepath which should be fetched using iget attempts: int (default: 1) Number of retries, if iRODS access fails pause: int (default: 15) Pause between two access attempts in seconds
def handle_bad_update(operation, ret): print("Error " + operation) sys.exit('Return code: ' + str(ret.status_code) + ' Error: ' + ret.text)
report error for bad update
def from_csv(cls, path): with open(path) as f: fields = map(float, next(f).split(',')) if len(fields) == 3: return u.Quantity([[fields[0], 0, 0], [0, fields[1], 0], [0, 0, fields[2]]], unit=u.nanometers) elif len(fields) == 9: return u.Quantity([fields[0:3], fields[3:6], fields[6:9]], unit=u.nanometers) else: raise ValueError('This type of CSV is not supported. Please ' 'provide a comma-separated list of three or nine ' 'floats in a single-line file.')
Get box vectors from comma-separated values in file `path`. The csv file must containt only one line, which in turn can contain three values (orthogonal vectors) or nine values (triclinic box). The values should be in nanometers. Parameters ---------- path : str Path to CSV file Returns ------- vectors : simtk.unit.Quantity([3, 3], unit=nanometers
def cal(self, opttype, strike, exp1, exp2): assert pd.Timestamp(exp1) < pd.Timestamp(exp2) _row1 = _relevant_rows(self.data, (strike, exp1, opttype,), "No key for {} strike {} {}".format(exp1, strike, opttype)) _row2 = _relevant_rows(self.data, (strike, exp2, opttype,), "No key for {} strike {} {}".format(exp2, strike, opttype)) _price1 = _getprice(_row1) _price2 = _getprice(_row2) _eq = _row1.loc[:, 'Underlying_Price'].values[0] _qt = _row1.loc[:, 'Quote_Time'].values[0] _index = ['Near', 'Far', 'Debit', 'Underlying_Price', 'Quote_Time'] _vals = np.array([_price1, _price2, _price2 - _price1, _eq, _qt]) return pd.DataFrame(_vals, index=_index, columns=['Value'])
Metrics for evaluating a calendar spread. Parameters ------------ opttype : str ('call' or 'put') Type of option on which to collect data. strike : numeric Strike price. exp1 : date or date str (e.g. '2015-01-01') Earlier expiration date. exp2 : date or date str (e.g. '2015-01-01') Later expiration date. Returns ------------ metrics : DataFrame Metrics for evaluating spread.
def get_django_settings(cls, name, default=None): if hasattr(cls, '__django_settings__'): return getattr(cls.__django_settings__, name, default) from django.conf import settings cls.__django_settings__ = settings return cls.get_django_settings(name)
Get params from Django settings. :param name: name of param :type name: str,unicode :param default: default value of param :type default: object :return: Param from Django settings or default.
def gendict(cls, *args, **kwargs): gk = cls.genkey return dict((gk(k), v) for k, v in dict(*args, **kwargs).items())
Pre-translated key dictionary constructor. See :type:`dict` for more info. :returns: dictionary with uppercase keys :rtype: dict
def get_contacts_of_client_per_page(self, client_id, per_page=1000, page=1): return self._get_resource_per_page( resource=CONTACTS, per_page=per_page, page=page, params={'client_id': client_id}, )
Get contacts of client per page :param client_id: the client id :param per_page: How many objects per page. Default: 1000 :param page: Which page. Default: 1 :return: list
def get_uint32(self): token = self.get().unescape() if not token.is_identifier(): raise dns.exception.SyntaxError('expecting an identifier') if not token.value.isdigit(): raise dns.exception.SyntaxError('expecting an integer') value = long(token.value) if value < 0 or value > 4294967296L: raise dns.exception.SyntaxError('%d is not an unsigned 32-bit integer' % value) return value
Read the next token and interpret it as a 32-bit unsigned integer. @raises dns.exception.SyntaxError: @rtype: int
def print_typedefs(self, w=0, **print3opts): for k in _all_kinds: t = [(self._prepr(a), v) for a, v in _items(_typedefs) if v.kind == k and (v.both or self._code_)] if t: self._printf('%s%*d %s type%s: basicsize, itemsize, _len_(), _refs()', linesep, w, len(t), k, _plural(len(t)), **print3opts) for a, v in _sorted(t): self._printf('%*s %s: %s', w, '', a, v, **print3opts) t = _sum([len(v) for v in _values(_dict_classes)]) if t: self._printf('%s%*d dict/-like classes:', linesep, w, t, **print3opts) for m, v in _items(_dict_classes): self._printf('%*s %s: %s', w, '', m, self._prepr(v), **print3opts)
Print the types and dict tables. *w=0* -- indentation for each line *print3options* -- print options, as in Python 3.0
def disable_code_breakpoint(self, dwProcessId, address): p = self.system.get_process(dwProcessId) bp = self.get_code_breakpoint(dwProcessId, address) if bp.is_running(): self.__del_running_bp_from_all_threads(bp) bp.disable(p, None)
Disables the code breakpoint at the given address. @see: L{define_code_breakpoint}, L{has_code_breakpoint}, L{get_code_breakpoint}, L{enable_code_breakpoint} L{enable_one_shot_code_breakpoint}, L{erase_code_breakpoint}, @type dwProcessId: int @param dwProcessId: Process global ID. @type address: int @param address: Memory address of breakpoint.
def is_numeric(value, minimum = None, maximum = None, **kwargs): try: value = validators.numeric(value, minimum = minimum, maximum = maximum, **kwargs) except SyntaxError as error: raise error except Exception: return False return True
Indicate whether ``value`` is a numeric value. :param value: The value to evaluate. :param minimum: If supplied, will make sure that ``value`` is greater than or equal to this value. :type minimum: numeric :param maximum: If supplied, will make sure that ``value`` is less than or equal to this value. :type maximum: numeric :returns: ``True`` if ``value`` is valid, ``False`` if it is not. :rtype: :class:`bool <python:bool>` :raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates keyword parameters passed to the underlying validator
def _glob(filenames): if isinstance(filenames, string_types): filenames = [filenames] matches = [] for name in filenames: matched_names = glob(name) if not matched_names: matches.append(name) else: matches.extend(matched_names) return matches
Glob a filename or list of filenames but always return the original string if the glob didn't match anything so URLs for remote file access are not clobbered.
async def send_message(self, message, **kwargs): if 'end' in kwargs: warnings.warn('"end" argument is deprecated, use ' '"stream.send_trailing_metadata" explicitly', stacklevel=2) end = kwargs.pop('end', False) assert not kwargs, kwargs if not self._send_initial_metadata_done: await self.send_initial_metadata() if not self._cardinality.server_streaming: if self._send_message_count: raise ProtocolError('Server should send exactly one message ' 'in response') message, = await self._dispatch.send_message(message) await send_message(self._stream, self._codec, message, self._send_type) self._send_message_count += 1 if end: await self.send_trailing_metadata()
Coroutine to send message to the client. If server sends UNARY response, then you should call this coroutine only once. If server sends STREAM response, then you can call this coroutine as many times as you need. :param message: message object
def _QueryHash(self, digest): if not self._url: self._url = '{0:s}://{1:s}:{2:d}/file/find'.format( self._protocol, self._host, self._port) request_data = {self.lookup_hash: digest} try: json_response = self.MakeRequestAndDecodeJSON( self._url, 'POST', data=request_data) except errors.ConnectionError as exception: json_response = None logger.error('Unable to query Viper with error: {0!s}.'.format( exception)) return json_response
Queries the Viper Server for a specfic hash. Args: digest (str): hash to look up. Returns: dict[str, object]: JSON response or None on error.
def draw(self): colors = resolve_colors(len(self.support_)) if self._mode == BALANCE: self.ax.bar( np.arange(len(self.support_)), self.support_, color=colors, align='center', width=0.5 ) else: bar_width = 0.35 labels = ["train", "test"] for idx, support in enumerate(self.support_): index = np.arange(len(self.classes_)) if idx > 0: index = index + bar_width self.ax.bar( index, support, bar_width, color=colors[idx], label=labels[idx] ) return self.ax
Renders the class balance chart on the specified axes from support.
def similar(self, similarity): pattern = Pattern(self.path) pattern.similarity = similarity return pattern
Returns a new Pattern with the specified similarity threshold
def max_knob_end_distance(self): return max([distance(self.knob_end, h) for h in self.hole])
Maximum distance between knob_end and each of the hole side-chain centres.
def prep_hla(work_dir, sample, calls, hlas, normal_bam, tumor_bam): work_dir = utils.safe_makedir(os.path.join(work_dir, sample, "inputs")) hla_file = os.path.join(work_dir, "%s-hlas.txt" % sample) with open(calls) as in_handle: with open(hla_file, "w") as out_handle: next(in_handle) for line in in_handle: _, _, a, _, _ = line.strip().split(",") a1, a2 = a.split(";") out_handle.write(get_hla_choice(name_to_absolute(a1), hlas, normal_bam, tumor_bam) + "\n") out_handle.write(get_hla_choice(name_to_absolute(a2), hlas, normal_bam, tumor_bam) + "\n") return hla_file
Convert HLAs into ABSOLUTE format for use with LOHHLA. LOHHLA hard codes names to hla_a, hla_b, hla_c so need to move
def AuthorizeUser(self, user, subject): user_set = self.authorized_users.setdefault(subject, set()) user_set.add(user)
Allow given user access to a given subject.
def _vars(ftype, name, *dims): shape = _dims2shape(*dims) objs = list() for indices in itertools.product(*[range(i, j) for i, j in shape]): objs.append(_VAR[ftype](name, indices)) return farray(objs, shape, ftype)
Return a new farray filled with Boolean variables.
def delete(self, reason=None): response = API.delete_user(self.api_token, self.password, reason=reason, in_background=0) _fail_if_contains_errors(response)
Delete the user's account from Todoist. .. warning:: You cannot recover the user after deletion! :param reason: The reason for deletion. :type reason: str >>> from pytodoist import todoist >>> user = todoist.login('john.doe@gmail.com', 'password') >>> user.delete() ... # The user token is now invalid and Todoist operations will fail.
def use_plenary_composition_view(self): self._object_views['composition'] = PLENARY for session in self._get_provider_sessions(): try: session.use_plenary_composition_view() except AttributeError: pass
Pass through to provider CompositionLookupSession.use_plenary_composition_view
def more_search(self, more_page): next_page = self.current_page + 1 top_page = more_page + self.current_page for page in range(next_page, (top_page + 1)): start = "start={0}".format(str((page - 1) * 10)) url = "{0}{1}&{2}".format(self.google, self.query, start) self._execute_search_request(url) self.current_page += 1
Method to add more result to an already exist result. more_page determine how many result page should be added to the current result.
def cli(env, identifier): mgr = SoftLayer.ObjectStorageManager(env.client) credential_list = mgr.list_credential(identifier) table = formatting.Table(['id', 'password', 'username', 'type_name']) for credential in credential_list: table.add_row([ credential['id'], credential['password'], credential['username'], credential['type']['name'] ]) env.fout(table)
Retrieve credentials used for generating an AWS signature. Max of 2.
def _populate_bunch_with_element(element): if 'value' in element.attrib: return element.get('value') current_bunch = Bunch() if element.get('id'): current_bunch['nextra_element_id'] = element.get('id') for subelement in element.getchildren(): current_bunch[subelement.tag] = _populate_bunch_with_element( subelement) return current_bunch
Helper function to recursively populates a Bunch from an XML tree. Returns leaf XML elements as a simple value, branch elements are returned as Bunches containing their subelements as value or recursively generated Bunch members.
def u(data, bits=None, endian=None, target=None): return globals()['u%d' % _get_bits(bits, target)](data, endian=endian, target=target)
Unpack a signed pointer for a given target. Args: data(bytes): The data to unpack. bits(:class:`pwnypack.target.Target.Bits`): Override the default word size. If ``None`` it will look at the word size of ``target``. endian(:class:`~pwnypack.target.Target.Endian`): Override the default byte order. If ``None``, it will look at the byte order of the ``target`` argument. target(:class:`~pwnypack.target.Target`): Override the default byte order. If ``None``, it will look at the byte order of the global :data:`~pwnypack.target.target`. Returns: int: The pointer value.
def checksum(digits, scale): chk_nbr = 11 - (sum(map(operator.mul, digits, scale)) % 11) if chk_nbr == 11: return 0 return chk_nbr
Calculate checksum of Norwegian personal identity code. Checksum is calculated with "Module 11" method using a scale. The digits of the personal code are multiplied by the corresponding number in the scale and summed; if remainder of module 11 of the sum is less than 10, checksum is the remainder. If remainder is 0, the checksum is 0. https://no.wikipedia.org/wiki/F%C3%B8dselsnummer
def pathparse(value, sep=os.pathsep, os_sep=os.sep): escapes = [] normpath = ntpath.normpath if os_sep == '\\' else posixpath.normpath if '\\' not in (os_sep, sep): escapes.extend(( ('\\\\', '<ESCAPE-ESCAPE>', '\\'), ('\\"', '<ESCAPE-DQUOTE>', '"'), ('\\\'', '<ESCAPE-SQUOTE>', '\''), ('\\%s' % sep, '<ESCAPE-PATHSEP>', sep), )) for original, escape, unescape in escapes: value = value.replace(original, escape) for part in pathsplit(value, sep=sep): if part[-1:] == os_sep and part != os_sep: part = part[:-1] for original, escape, unescape in escapes: part = part.replace(escape, unescape) yield normpath(fsdecode(part))
Get enviroment PATH directories as list. This function cares about spliting, escapes and normalization of paths across OSes. :param value: path string, as given by os.environ['PATH'] :type value: str :param sep: PATH separator, defaults to os.pathsep :type sep: str :param os_sep: OS filesystem path separator, defaults to os.sep :type os_sep: str :yields: every path :ytype: str
def get(self, run_id, metric_id): run_id = self._parse_run_id(run_id) query = self._build_query(run_id, metric_id) row = self._read_metric_from_db(metric_id, run_id, query) metric = self._to_intermediary_object(row) return metric
Read a metric of the given id and run. The returned object has the following format (timestamps are datetime objects). .. code:: {"steps": [0,1,20,40,...], "timestamps": [timestamp1,timestamp2,timestamp3,...], "values": [0,1 2,3,4,5,6,...], "name": "name of the metric", "metric_id": "metric_id", "run_id": "run_id"} :param run_id: ID of the Run that the metric belongs to. :param metric_id: The ID fo the metric. :return: The whole metric as specified. :raise NotFoundError
def _print_details(extra=None): def print_node_handler(name, node, depth): line = "{0}{1} {2} ({3}:{4})".format(depth, (" " * depth), name, node.line, node.col) if extra is not None: line += " [{0}]".format(extra(node)) sys.stdout.write(line + "\n") return print_node_handler
Return a function that prints node details.
def get_server_alerts(call=None, for_output=True, **kwargs): for key, value in kwargs.items(): servername = "" if key == "servername": servername = value creds = get_creds() clc.v2.SetCredentials(creds["user"], creds["password"]) alerts = clc.v2.Server(servername).Alerts() return alerts
Return a list of alerts from CLC as reported by their infra
def ajax_login(request): username = request.POST['username'] password = request.POST['password'] user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) return HttpResponse(content='Successful login', content_type='text/plain', status=200) else: return HttpResponse(content='Disabled account', content_type='text/plain', status=403) else: return HttpResponse(content='Invalid login', content_type='text/plain', status=403)
Accept a POST request to login. :param request: `django.http.HttpRequest` object, containing mandatory parameters username and password required.
def update_pipe_channel(self, uid, channel_name, label): pipe_group_name = _form_pipe_channel_name(channel_name) if self.channel_layer: current = self.channel_maps.get(uid, None) if current != pipe_group_name: if current: async_to_sync(self.channel_layer.group_discard)(current, self.channel_name) self.channel_maps[uid] = pipe_group_name async_to_sync(self.channel_layer.group_add)(pipe_group_name, self.channel_name)
Update this consumer to listen on channel_name for the js widget associated with uid
def indent(indent_str=None): def indentation_rule(): inst = Indentator(indent_str) return {'layout_handlers': { Indent: inst.layout_handler_indent, Dedent: inst.layout_handler_dedent, Newline: inst.layout_handler_newline, OptionalNewline: inst.layout_handler_newline_optional, OpenBlock: layout_handler_openbrace, CloseBlock: layout_handler_closebrace, EndStatement: layout_handler_semicolon, }} return indentation_rule
An example indentation ruleset.
def _load_torrents_directory(self): r = self._req_lixian_get_id(torrent=True) self._downloads_directory = self._load_directory(r['cid'])
Load torrents directory If it does not exist yet, this request will cause the system to create one
def get_parent(self, log_info): if self.data.get('scope', 'log') == 'log': if log_info.scope_type != 'projects': raise ValueError("Invalid log subscriber scope") parent = "%s/%s" % (log_info.scope_type, log_info.scope_id) elif self.data['scope'] == 'project': parent = 'projects/{}'.format( self.data.get('scope_id', self.session.get_default_project())) elif self.data['scope'] == 'organization': parent = 'organizations/{}'.format(self.data['scope_id']) elif self.data['scope'] == 'folder': parent = 'folders/{}'.format(self.data['scope_id']) elif self.data['scope'] == 'billing': parent = 'billingAccounts/{}'.format(self.data['scope_id']) else: raise ValueError( 'invalid log subscriber scope %s' % (self.data)) return parent
Get the parent container for the log sink
def _generate_cpu_stats(): cpu_name = urwid.Text("CPU Name N/A", align="center") try: cpu_name = urwid.Text(get_processor_name().strip(), align="center") except OSError: logging.info("CPU name not available") return [urwid.Text(('bold text', "CPU Detected"), align="center"), cpu_name, urwid.Divider()]
Read and display processor name
def root_manifest_id(self, root_manifest_id): if root_manifest_id is not None and len(root_manifest_id) > 32: raise ValueError("Invalid value for `root_manifest_id`, length must be less than or equal to `32`") self._root_manifest_id = root_manifest_id
Sets the root_manifest_id of this UpdateCampaignPutRequest. :param root_manifest_id: The root_manifest_id of this UpdateCampaignPutRequest. :type: str
def get(self, name): return self.prepare_model(self.client.api.inspect_image(name))
Gets an image. Args: name (str): The name of the image. Returns: (:py:class:`Image`): The image. Raises: :py:class:`docker.errors.ImageNotFound` If the image does not exist. :py:class:`docker.errors.APIError` If the server returns an error.
def create_permission(self): return Permission( self.networkapi_url, self.user, self.password, self.user_ldap)
Get an instance of permission services facade.
def gnuplot_2d(x, y, filename, title='', x_label='', y_label=''): _, ext = os.path.splitext(filename) if ext != '.png': filename += '.png' gnuplot_cmds = \ scr = _GnuplotScriptTemp(gnuplot_cmds) data = _GnuplotDataTemp(x, y) args_dict = { 'filename': filename, 'filename_data': data.name, 'title': title, 'x_label': x_label, 'y_label': y_label } gnuplot(scr.name, args_dict)
Function to produce a general 2D plot. Args: x (list): x points. y (list): y points. filename (str): Filename of the output image. title (str): Title of the plot. Default is '' (no title). x_label (str): x-axis label. y_label (str): y-axis label.
def get_idle_pc_prop(self): is_running = yield from self.is_running() was_auto_started = False if not is_running: yield from self.start() was_auto_started = True yield from asyncio.sleep(20) log.info('Router "{name}" [{id}] has started calculating Idle-PC values'.format(name=self._name, id=self._id)) begin = time.time() idlepcs = yield from self._hypervisor.send('vm get_idle_pc_prop "{}" 0'.format(self._name)) log.info('Router "{name}" [{id}] has finished calculating Idle-PC values after {time:.4f} seconds'.format(name=self._name, id=self._id, time=time.time() - begin)) if was_auto_started: yield from self.stop() return idlepcs
Gets the idle PC proposals. Takes 1000 measurements and records up to 10 idle PC proposals. There is a 10ms wait between each measurement. :returns: list of idle PC proposal
def Dump(self): prefs_file = open(self.prefs_path, 'w') for n in range(0,len(self.prefs)): if len(list(self.prefs.items())[n]) > 1: prefs_file.write(str(list(self.prefs.items())[n][0]) + ' = ' + str(list(self.prefs.items())[n][1]) + '\n') prefs_file.close()
Dumps the current prefs to the preferences.txt file
def get_sum(qs, field): sum_field = '%s__sum' % field qty = qs.aggregate(Sum(field))[sum_field] return qty if qty else 0
get sum for queryset. ``qs``: queryset ``field``: The field name to sum.
def read_configs(__pkg: str, __name: str = 'config', *, local: bool = True) -> ConfigParser: configs = get_configs(__pkg, __name) if local: localrc = path.abspath('.{}rc'.format(__pkg)) if path.exists(localrc): configs.append(localrc) cfg = ConfigParser(converters={ 'datetime': parse_datetime, 'humandelta': parse_timedelta, 'timedelta': parse_delta, }) cfg.read(configs, 'utf-8') cfg.configs = configs if 'NO_COLOUR' in environ or 'NO_COLOR' in environ: cfg.colour = False elif __pkg in cfg: if 'colour' in cfg[__pkg]: cfg.colour = cfg[__pkg].getboolean('colour') if 'color' in cfg[__pkg]: cfg.colour = cfg[__pkg].getboolean('color') else: cfg.colour = True return cfg
Process configuration file stack. We export the time parsing functionality of ``jnrbase`` as custom converters for :class:`configparser.ConfigParser`: =================== =========================================== Method Function =================== =========================================== ``.getdatetime()`` :func:`~jnrbase.iso_8601.parse_datetime` ``.gethumantime()`` :func:`~jnrbase.human_time.parse_timedelta` ``.gettimedelta()`` :func:`~jnrbase.iso_8601.parse_delta` =================== =========================================== Args: __pkg: Package name to use as base for config files __name: File name to search for within config directories local: Whether to include config files from current directory Returns: Parsed configuration files
def anonymize(self, value): assert isinstance(value, bool) if self._anonymize != value: self.dirty = True self._anonymize = value return self.recipe
Should this recipe be anonymized
def _backup_bytes(target, offset, length): click.echo('Backup {l} byes at position {offset} on file {file} to .bytes_backup'.format( l=length, offset=offset, file=target)) with open(target, 'r+b') as f: f.seek(offset) with open(target + '.bytes_backup', 'w+b') as b: for _ in xrange(length): byte = f.read(1) b.write(byte) b.flush() f.flush()
Read bytes from one file and write it to a backup file with the .bytes_backup suffix
def getAmbientThreshold(self): command = '$GO' threshold = self.sendCommand(command) if threshold[0] == 'NK': return 0 else: return float(threshold[1])/10
Returns the ambient temperature threshold in degrees Celcius, or 0 if no Threshold is set
def send_is_typing(self, peer_jid: str, is_typing: bool): if self.is_group_jid(peer_jid): return self._send_xmpp_element(chatting.OutgoingGroupIsTypingEvent(peer_jid, is_typing)) else: return self._send_xmpp_element(chatting.OutgoingIsTypingEvent(peer_jid, is_typing))
Updates the 'is typing' status of the bot during a conversation. :param peer_jid: The JID that the notification will be sent to :param is_typing: If true, indicates that we're currently typing, or False otherwise.
def parse_bytes(self, bytestr, isfinal=True): with self._context(): self.filename = None self.p.Parse(bytestr, isfinal) return self._root
Parse a byte string. If the string is very large, split it in chuncks and parse each chunk with isfinal=False, then parse an empty chunk with isfinal=True.
def observe(self, ob): option = Option() option.number = defines.OptionRegistry.OBSERVE.number option.value = ob self.del_option_by_number(defines.OptionRegistry.OBSERVE.number) self.add_option(option)
Add the Observe option. :param ob: observe count
def stage_import_from_filesystem(self, filepath): schema = ImportSchema() resp = self.service.post(self.base, params={'path': filepath}) return self.service.decode(schema, resp)
Stage an import from a filesystem path. :param filepath: Local filesystem path as string. :return: :class:`imports.Import <imports.Import>` object
def get_notmuch_setting(self, section, key, fallback=None): value = None if section in self._notmuchconfig: if key in self._notmuchconfig[section]: value = self._notmuchconfig[section][key] if value is None: value = fallback return value
look up config values from notmuch's config :param section: key is in :type section: str :param key: key to look up :type key: str :param fallback: fallback returned if key is not present :type fallback: str :returns: config value with type as specified in the spec-file
def get_consumer_groups(self, consumer_group_id=None, names_only=False): if consumer_group_id is None: group_ids = self.get_children("/consumers") else: group_ids = [consumer_group_id] if names_only: return {g_id: None for g_id in group_ids} consumer_offsets = {} for g_id in group_ids: consumer_offsets[g_id] = self.get_group_offsets(g_id) return consumer_offsets
Get information on all the available consumer-groups. If names_only is False, only list of consumer-group ids are sent. If names_only is True, Consumer group offset details are returned for all consumer-groups or given consumer-group if given in dict format as:- { 'group-id': { 'topic': { 'partition': offset-value, ... ... } } } :rtype: dict of consumer-group offset details
def backup_progress(self): epoch_time = int(time.time() * 1000) if self.deploymentType == 'Cloud': url = self._options['server'] + '/rest/obm/1.0/getprogress?_=%i' % epoch_time else: logging.warning( 'This functionality is not available in Server version') return None r = self._session.get( url, headers=self._options['headers']) try: return json.loads(r.text) except Exception: import defusedxml.ElementTree as etree progress = {} try: root = etree.fromstring(r.text) except etree.ParseError as pe: logging.warning('Unable to find backup info. You probably need to initiate a new backup. %s' % pe) return None for k in root.keys(): progress[k] = root.get(k) return progress
Return status of cloud backup as a dict. Is there a way to get progress for Server version?
def chmod_r(root: str, permission: int) -> None: os.chmod(root, permission) for dirpath, dirnames, filenames in os.walk(root): for d in dirnames: os.chmod(os.path.join(dirpath, d), permission) for f in filenames: os.chmod(os.path.join(dirpath, f), permission)
Recursive ``chmod``. Args: root: directory to walk down permission: e.g. ``e.g. stat.S_IWUSR``
def requirements(filename): with open(filename) as f: return [x.strip() for x in f.readlines() if x.strip()]
Reads requirements from a file.
def to_feature_reports(self, debug=False): rest = self.to_string() seq = 0 out = [] while rest: this, rest = rest[:7], rest[7:] if seq > 0 and rest: if this != b'\x00\x00\x00\x00\x00\x00\x00': this += yubico_util.chr_byte(yubikey_defs.SLOT_WRITE_FLAG + seq) out.append(self._debug_string(debug, this)) else: this += yubico_util.chr_byte(yubikey_defs.SLOT_WRITE_FLAG + seq) out.append(self._debug_string(debug, this)) seq += 1 return out
Return the frame as an array of 8-byte parts, ready to be sent to a YubiKey.
def run_step(self): rewriter = StreamRewriter(self.context.iter_formatted_strings) super().run_step(rewriter)
Do the file in-out rewrite.
def publish(self, topic, data, defer=None): if defer is None: self.send(nsq.publish(topic, data)) else: self.send(nsq.deferpublish(topic, data, defer))
Publish a message to the given topic over tcp. :param topic: the topic to publish to :param data: bytestring data to publish :param defer: duration in milliseconds to defer before publishing (requires nsq 0.3.6)
def ip_unnumbered(self, **kwargs): kwargs['ip_donor_interface_name'] = kwargs.pop('donor_name') kwargs['ip_donor_interface_type'] = kwargs.pop('donor_type') kwargs['delete'] = kwargs.pop('delete', False) callback = kwargs.pop('callback', self._callback) valid_int_types = ['gigabitethernet', 'tengigabitethernet', 'fortygigabitethernet', 'hundredgigabitethernet'] if kwargs['int_type'] not in valid_int_types: raise ValueError('int_type must be one of: %s' % repr(valid_int_types)) unnumbered_type = self._ip_unnumbered_type(**kwargs) unnumbered_name = self._ip_unnumbered_name(**kwargs) if kwargs.pop('get', False): return self._get_ip_unnumbered(unnumbered_type, unnumbered_name) config = pynos.utilities.merge_xml(unnumbered_type, unnumbered_name) return callback(config)
Configure an unnumbered interface. Args: int_type (str): Type of interface. (gigabitethernet, tengigabitethernet etc). name (str): Name of interface id. (For interface: 1/0/5, 1/0/10 etc). delete (bool): True is the IP address is added and False if its to be deleted (True, False). Default value will be False if not specified. donor_type (str): Interface type of the donor interface. donor_name (str): Interface name of the donor interface. get (bool): Get config instead of editing config. (True, False) callback (function): A function executed upon completion of the method. The only parameter passed to `callback` will be the ``ElementTree`` `config`. Returns: Return value of `callback`. Raises: KeyError: if `int_type`, `name`, `donor_type`, or `donor_name` is not passed. ValueError: if `int_type`, `name`, `donor_type`, or `donor_name` are invalid. Examples: >>> import pynos.device >>> switches = ['10.24.39.230'] >>> auth = ('admin', 'password') >>> for switch in switches: ... conn = (switch, '22') ... with pynos.device.Device(conn=conn, auth=auth) as dev: ... output = dev.interface.ip_address(int_type='loopback', ... name='1', ip_addr='4.4.4.4/32', rbridge_id='230') ... int_type = 'tengigabitethernet' ... name = '230/0/20' ... donor_type = 'loopback' ... donor_name = '1' ... output = dev.interface.disable_switchport(inter_type= ... int_type, inter=name) ... output = dev.interface.ip_unnumbered(int_type=int_type, ... name=name, donor_type=donor_type, donor_name=donor_name) ... output = dev.interface.ip_unnumbered(int_type=int_type, ... name=name, donor_type=donor_type, donor_name=donor_name, ... get=True) ... output = dev.interface.ip_unnumbered(int_type=int_type, ... name=name, donor_type=donor_type, donor_name=donor_name, ... delete=True) ... output = dev.interface.ip_address(int_type='loopback', ... name='1', ip_addr='4.4.4.4/32', rbridge_id='230', ... delete=True) ... output = dev.interface.ip_unnumbered(int_type='hodor', ... donor_name=donor_name, donor_type=donor_type, name=name) ... # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ValueError
def timestampFormat(self, timestampFormat): if not isinstance(timestampFormat, str): raise TypeError('not of type unicode') self._timestampFormat = timestampFormat
Setter to _timestampFormat. Formatting string for conversion of timestamps to QtCore.QDateTime Raises: AssertionError: if timestampFormat is not of type unicode. Args: timestampFormat (unicode): assign timestampFormat to _timestampFormat. Formatting string for conversion of timestamps to QtCore.QDateTime. Used in data method.