code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def __update_layer_density(self): _stack = self.stack _density_lock = self.density_lock list_compound = _stack.keys() for _key in list_compound: if _density_lock[_key]: continue _list_ratio = _stack[_key]['stoichiometric_ratio'] _list_density = [] for _element in _stack[_key]['elements']: _list_density.append(_stack[_key][_element]['density']['value']) _compound_density = _utilities.get_compound_density(list_density=_list_density, list_ratio=_list_ratio) _stack[_key]['density']['value'] = _compound_density self.stack = _stack
calculate or update the layer density
def _extract_features(self): for parsed_line in self.parsed_lines: if parsed_line.get('program') == 'sshd': result = self._parse_auth_message(parsed_line['message']) if 'ip' in result: self.features['ips'].append(result['ip']) if result['ip'] not in self.ips_to_pids: self.ips_to_pids[result['ip']] = [parsed_line['processid']] else: if parsed_line['processid'] not in self.ips_to_pids[result['ip']]: self.ips_to_pids[result['ip']].append(parsed_line['processid'])
Extracts and sets the feature data from the log file necessary for a reduction
def update(self, m_ag, reset=True, log=True): if reset: self.reset() if len(array(m_ag).shape) == 1: s = self.one_update(m_ag, log) else: s = [] for m in m_ag: s.append(self.one_update(m, log)) s = array(s) return s
Computes sensorimotor values from motor orders. :param numpy.array m_ag: a motor command with shape (self.conf.m_ndims, ) or a set of n motor commands of shape (n, self.conf.m_ndims) :param bool log: emit the motor and sensory values for logging purpose (default: True). :returns: an array of shape (self.conf.ndims, ) or (n, self.conf.ndims) according to the shape of the m_ag parameter, containing the motor values (which can be different from m_ag, e.g. bounded according to self.conf.m_bounds) and the corresponding sensory values. .. note:: self.conf.ndims = self.conf.m_ndims + self.conf.s_ndims is the dimensionality of the sensorimotor space (dim of the motor space + dim of the sensory space).
def orthogonal(*args) -> bool: for i, arg in enumerate(args): if hasattr(arg, "shape"): args[i] = arg.shape for s in zip(*args): if np.product(s) != max(s): return False return True
Determine if a set of arrays are orthogonal. Parameters ---------- args : array-likes or array shapes Returns ------- bool Array orthogonality condition.
def db_remove(name, user=None, password=None, host=None, port=None, authdb=None): conn = _connect(user, password, host, port, authdb=authdb) if not conn: return 'Failed to connect to mongo database' try: log.info('Removing database %s', name) conn.drop_database(name) except pymongo.errors.PyMongoError as err: log.error('Removing database %s failed with error: %s', name, err) return six.text_type(err) return True
Remove a MongoDB database CLI Example: .. code-block:: bash salt '*' mongodb.db_remove <name> <user> <password> <host> <port>
def is_valid(self, value): if not self.is_array: return self._valid(value) if isinstance(value, (list, set, tuple)): return all([self._valid(item) for item in value]) return self._valid(value)
Validate value before actual instance setting based on type. Args: value (object): The value object for validation. Returns: True if value validation succeeds else False.
def default_number_converter(number_str): is_int = (number_str.startswith('-') and number_str[1:].isdigit()) or number_str.isdigit() return int(number_str) if is_int else float(number_str)
Converts the string representation of a json number into its python object equivalent, an int, long, float or whatever type suits.
def get_feeds_url(blog_page, root_page): if root_page == blog_page: return reverse('blog_page_feed') else: blog_path = strip_prefix_and_ending_slash(blog_page.specific.last_url_part) return reverse('blog_page_feed_slug', kwargs={'blog_path': blog_path})
Get the feeds urls a blog page instance. It will use an url or another depending if blog_page is the root page.
def pks_from_objects(self, objects): return {o.pk if isinstance(o, Model) else o for o in objects}
Extract all the primary key strings from the given objects. Objects may be Versionables, or bare primary keys. :rtype : set
def full_keywords(soup): "author keywords list including inline tags, such as italic" if not raw_parser.author_keywords(soup): return [] return list(map(node_contents_str, raw_parser.author_keywords(soup)))
author keywords list including inline tags, such as italic
def clear(self): for ax in self.flat_grid: for im_h in ax.findobj(AxesImage): im_h.remove()
Clears all the axes to start fresh.
def last_year(): since, until = Date.this_year() since = since.date - delta(years=1) until = until.date - delta(years=1) return Date(since), Date(until)
Return start and end date of the last fiscal year
def run(self): self.info_log("The test batch is ready.") self.executed_tests = [] for test in self.tests: localhost_instance = LocalhostInstance( runner=self, browser_config=self.browser_config, test_name=test.Test.name ) localhost_instance.startup() with DbSessionContext(BROME_CONFIG['database']['mongo_database_name']) as session: test_batch = session.query(Testbatch)\ .filter(Testbatch.mongo_id == self.test_batch_id).one() test_batch.total_executing_tests = 1 session.save(test_batch, safe=True) test_ = test.Test( runner=self, browser_config=self.browser_config, name=test.Test.name, test_batch_id=self.test_batch_id, localhost_instance=localhost_instance, index=1 ) test_.execute() self.executed_tests.append(test_) localhost_instance.tear_down()
Run the test batch
def _format_return_timestamps(self, return_timestamps=None): if return_timestamps is None: return_timestamps_array = np.arange( self.components.initial_time(), self.components.final_time() + self.components.saveper(), self.components.saveper(), dtype=np.float64 ) elif inspect.isclass(range) and isinstance(return_timestamps, range): return_timestamps_array = np.array(return_timestamps, ndmin=1) elif isinstance(return_timestamps, (list, int, float, np.ndarray)): return_timestamps_array = np.array(return_timestamps, ndmin=1) elif isinstance(return_timestamps, _pd.Series): return_timestamps_array = return_timestamps.as_matrix() else: raise TypeError('`return_timestamps` expects a list, array, pandas Series, ' 'or numeric value') return return_timestamps_array
Format the passed in return timestamps value as a numpy array. If no value is passed, build up array of timestamps based upon model start and end times, and the 'saveper' value.
def _gcd(a=None, b=None, terms=None): if terms: return reduce(lambda a, b: _gcd(a, b), terms) else: while b: (a, b) = (b, a % b) return a
Return greatest common divisor using Euclid's Algorithm.
def decorate(self, record): attachments = {} if record.levelno >= logging.ERROR: attachments['color'] = 'warning' if record.levelno >= logging.CRITICAL: attachments['color'] = 'danger' attach_text = '{levelname}: {name} {module}.{funcName}:{lineno}'.format( levelname=record.levelname, name=record.name, module=record.module, funcName=record.funcName, lineno=record.lineno ) attachments['text'] = attach_text attachments['fallback'] = attach_text return attachments
add slack-specific flourishes to responses https://api.slack.com/docs/message-attachments Args: record (:obj:`logging.record`): message to log Returns: (:obj:`dict`): attachments object for reporting
def _get_formatted_error(self, error): def bits(n): while n: b = n & (~n+1) yield b n ^= b stsReturn = self.m_objPCANBasic.GetErrorText(error, 0) if stsReturn[0] != PCAN_ERROR_OK: strings = [] for b in bits(error): stsReturn = self.m_objPCANBasic.GetErrorText(b, 0) if stsReturn[0] != PCAN_ERROR_OK: text = "An error occurred. Error-code's text ({0:X}h) couldn't be retrieved".format(error) else: text = stsReturn[1].decode('utf-8', errors='replace') strings.append(text) complete_text = '\n'.join(strings) else: complete_text = stsReturn[1].decode('utf-8', errors='replace') return complete_text
Gets the text using the GetErrorText API function. If the function call succeeds, the translated error is returned. If it fails, a text describing the current error is returned. Multiple errors may be present in which case their individual messages are included in the return string, one line per error.
def _convert_option(self): if sys.byteorder == 'little' and self._endian() == 'big-endian': order = '>' elif sys.byteorder == 'big' and self._endian() == 'little-endian': order = '<' else: order = '=' return order
Determines how to convert CDF byte ordering to the system byte ordering.
def validate(policy): for param, value in policy.items(): if param not in ACCEPTED_SECURITY_TYPES.keys(): raise SecurityError('Invalid Security Parameter: {}'.format(param)) if type(value) != ACCEPTED_SECURITY_TYPES[param]: raise SecurityError('Invalid Parameter Data Type for {}, ' 'Expecting: {} Received: {}'.format( param, ACCEPTED_SECURITY_TYPES[param], type(value)))
Validates a policy and its parameters and raises an error if invalid
def stop_replication(self, repl_id): try: repl_doc = self.database[repl_id] except KeyError: raise CloudantReplicatorException(404, repl_id) repl_doc.fetch() repl_doc.delete()
Stops a replication based on the provided replication id by deleting the replication document from the replication database. The replication can only be stopped if it has not yet completed. If it has already completed then the replication document is still deleted from replication database. :param str repl_id: Replication id used to identify the replication to stop.
def instances(exp=".*"): "Filter list of machines matching an expression" expression = re.compile(exp) instances = [] for node in ec2_instances(): if node.tags and ip(node): try: if expression.match(node.tags.get("Name")): instances.append(node) except TypeError: pass return instances
Filter list of machines matching an expression
def find_library_windows(cls): dll = cls.get_appropriate_windows_sdk_name() + '.dll' root = 'C:\\' for d in os.listdir(root): dir_path = os.path.join(root, d) if d.startswith('Program Files') and os.path.isdir(dir_path): dir_path = os.path.join(dir_path, 'SEGGER') if not os.path.isdir(dir_path): continue ds = filter(lambda x: x.startswith('JLink'), os.listdir(dir_path)) for jlink_dir in ds: lib_path = os.path.join(dir_path, jlink_dir, dll) if os.path.isfile(lib_path): yield lib_path
Loads the SEGGER DLL from the windows installation directory. On Windows, these are found either under: - ``C:\\Program Files\\SEGGER\\JLink`` - ``C:\\Program Files (x86)\\SEGGER\\JLink``. Args: cls (Library): the ``Library`` class Returns: The paths to the J-Link library files in the order that they are found.
def select(self, *cluster_ids): if cluster_ids and isinstance(cluster_ids[0], (tuple, list)): cluster_ids = list(cluster_ids[0]) + list(cluster_ids[1:]) cluster_ids = self._keep_existing_clusters(cluster_ids) self.cluster_view.select(cluster_ids)
Select a list of clusters.
def analysis_title_header_element(feature, parent): _ = feature, parent header = analysis_title_header['string_format'] return header.capitalize()
Retrieve analysis title header string from definitions.
def subtract(self, number1, number2): return self._format_result(int(number1) - int(number2))
Subtracts number2 from number1
def create_package(project, name, sourcefolder=None): if sourcefolder is None: sourcefolder = project.root packages = name.split('.') parent = sourcefolder for package in packages[:-1]: parent = parent.get_child(package) made_packages = parent.create_folder(packages[-1]) made_packages.create_file('__init__.py') return made_packages
Creates a package and returns a `rope.base.resources.Folder`
def versioned_static(file_path): full_path = find(file_path) url = static(file_path) if type(full_path) is list and len(full_path) > 0: full_path = full_path[0] if not full_path: msg = 'Could not find static file: {0}'.format(file_path) logger.warning(msg) return url file_hash = md5() with open(full_path, "rb") as file_contents: for chunk in iter(lambda: file_contents.read(4096), b""): file_hash.update(chunk) return url + '?v=' + file_hash.hexdigest()[:7]
Given the path for a static file Output a url path with a hex has query string for versioning
def _write_min_norm(self, norms:[])->None: "Writes the minimum norm of the gradients to Tensorboard." min_norm = min(norms) self._add_gradient_scalar('min_norm', scalar_value=min_norm)
Writes the minimum norm of the gradients to Tensorboard.
def write_terminator(buff, capacity, ver, length): buff.extend([0] * min(capacity - length, consts.TERMINATOR_LENGTH[ver]))
\ Writes the terminator. :param buff: The byte buffer. :param capacity: Symbol capacity. :param ver: ``None`` if a QR Code is written, "M1", "M2", "M3", or "M4" if a Micro QR Code is written. :param length: Length of the data bit stream.
def signature_required(secret_key_func): def actual_decorator(obj): def test_func(request, *args, **kwargs): secret_key = secret_key_func(request, *args, **kwargs) return validate_signature(request, secret_key) decorator = request_passes_test(test_func) return wrap_object(obj, decorator) return actual_decorator
Requires that the request contain a valid signature to gain access to a specified resource.
def convert(self, argument): if _is_integer_type(argument): return argument elif isinstance(argument, six.string_types): base = 10 if len(argument) > 2 and argument[0] == '0': if argument[1] == 'o': base = 8 elif argument[1] == 'x': base = 16 return int(argument, base) else: raise TypeError('Expect argument to be a string or int, found {}'.format( type(argument)))
Returns the int value of argument.
def get_grade_entry_lookup_session_for_gradebook(self, gradebook_id, proxy): if not self.supports_grade_entry_lookup(): raise errors.Unimplemented() return sessions.GradeEntryLookupSession(gradebook_id, proxy, self._runtime)
Gets the ``OsidSession`` associated with the grade entry lookup service for the given gradebook. arg: gradebook_id (osid.id.Id): the ``Id`` of the gradebook arg: proxy (osid.proxy.Proxy): a proxy return: (osid.grading.GradeEntryLookupSession) - ``a GradeEntryLookupSession`` raise: NotFound - ``gradebook_id`` not found raise: NullArgument - ``gradebook_id`` or ``proxy`` is ``null`` raise: OperationFailed - ``unable to complete request`` raise: Unimplemented - ``supports_grade_entry_lookup()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_grade_entry_lookup()`` and ``supports_visible_federation()`` are ``true``.*
def measure_string(self, text, fontname, fontsize, encoding=0): return _fitz.Tools_measure_string(self, text, fontname, fontsize, encoding)
Measure length of a string for a Base14 font.
def show_prediction_labels_on_image(img_path, predictions): pil_image = Image.open(img_path).convert("RGB") draw = ImageDraw.Draw(pil_image) for name, (top, right, bottom, left) in predictions: draw.rectangle(((left, top), (right, bottom)), outline=(0, 0, 255)) name = name.encode("UTF-8") text_width, text_height = draw.textsize(name) draw.rectangle(((left, bottom - text_height - 10), (right, bottom)), fill=(0, 0, 255), outline=(0, 0, 255)) draw.text((left + 6, bottom - text_height - 5), name, fill=(255, 255, 255, 255)) del draw pil_image.show()
Shows the face recognition results visually. :param img_path: path to image to be recognized :param predictions: results of the predict function :return:
def get_grade_system_lookup_session_for_gradebook(self, gradebook_id, proxy): if not self.supports_grade_system_lookup(): raise errors.Unimplemented() return sessions.GradeSystemLookupSession(gradebook_id, proxy, self._runtime)
Gets the ``OsidSession`` associated with the grade system lookup service for the given gradebook. arg: gradebook_id (osid.id.Id): the ``Id`` of the gradebook arg: proxy (osid.proxy.Proxy): a proxy return: (osid.grading.GradeSystemLookupSession) - ``a GradeSystemLookupSession`` raise: NotFound - ``gradebook_id`` not found raise: NullArgument - ``gradebook_id`` or ``proxy`` is ``null`` raise: OperationFailed - ``unable to complete request`` raise: Unimplemented - ``supports_grade_system_lookup()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_grade_system_lookup()`` and ``supports_visible_federation()`` are ``true``.*
def get_editor_query(sql): sql = sql.strip() pattern = re.compile('(^\\\e|\\\e$)') while pattern.search(sql): sql = pattern.sub('', sql) return sql
Get the query part of an editor command.
def chk_goids(goids, msg=None, raise_except=True): for goid in goids: if not goid_is_valid(goid): if raise_except: raise RuntimeError("BAD GO({GO}): {MSG}".format(GO=goid, MSG=msg)) else: return goid
check that all GO IDs have the proper format.
def logon(): content = {} payload = "<aaaLogin inName='{0}' inPassword='{1}'></aaaLogin>".format(DETAILS['username'], DETAILS['password']) r = __utils__['http.query'](DETAILS['url'], data=payload, method='POST', decode_type='plain', decode=True, verify_ssl=False, raise_error=False, status=True, headers=DETAILS['headers']) _validate_response_code(r['status']) answer = re.findall(r'(<[\s\S.]*>)', r['text'])[0] items = ET.fromstring(answer) for item in items.attrib: content[item] = items.attrib[item] if 'outCookie' not in content: raise salt.exceptions.CommandExecutionError("Unable to log into proxy device.") return content['outCookie']
Logs into the cimc device and returns the session cookie.
def hull_points(obj, qhull_options='QbB Pp'): if hasattr(obj, 'convex_hull'): return obj.convex_hull.vertices initial = np.asanyarray(obj, dtype=np.float64) if len(initial.shape) != 2: raise ValueError('points must be (n, dimension)!') hull = spatial.ConvexHull(initial, qhull_options=qhull_options) points = hull.points[hull.vertices] return points
Try to extract a convex set of points from multiple input formats. Parameters --------- obj: Trimesh object (n,d) points (m,) Trimesh objects Returns -------- points: (o,d) convex set of points
def locateChild(self, context, segments): shortcut = getattr(self, 'child_' + segments[0], None) if shortcut: res = shortcut(context) if res is not None: return res, segments[1:] req = IRequest(context) for plg in self.siteStore.powerupsFor(ISessionlessSiteRootPlugin): spr = getattr(plg, 'sessionlessProduceResource', None) if spr is not None: childAndSegments = spr(req, segments) else: childAndSegments = plg.resourceFactory(segments) if childAndSegments is not None: return childAndSegments return self.guardedRoot.locateChild(context, segments)
Return a statically defined child or a child defined by a sessionless site root plugin or an avatar from guard.
def print_stamp(self): from . import VERSION print_out = partial(self.print_out, format_options='red') print_out('This configuration was automatically generated using') print_out('uwsgiconf v%s on %s' % ('.'.join(map(str, VERSION)), datetime.now().isoformat(' '))) return self
Prints out a stamp containing useful information, such as what and when has generated this configuration.
def get_object(self, queryset=None): obj = super(EntryPreviewMixin, self).get_object(queryset) if obj.is_visible: return obj if (self.request.user.has_perm('zinnia.can_view_all') or self.request.user.pk in [ author.pk for author in obj.authors.all()]): return obj raise Http404(_('No entry found matching the query'))
If the status of the entry is not PUBLISHED, a preview is requested, so we check if the user has the 'zinnia.can_view_all' permission or if it's an author of the entry.
def calculate_partial_digest(username, realm, password): return md5.md5("%s:%s:%s" % (username.encode('utf-8'), realm, password.encode('utf-8'))).hexdigest()
Calculate a partial digest that may be stored and used to authenticate future HTTP Digest sessions.
def set_gpio_pin(self, pin_number, setting, dest_addr_long=None): assert setting in const.GPIO_SETTINGS.values() self._send_and_wait( command=const.IO_PIN_COMMANDS[pin_number], parameter=setting.value, dest_addr_long=dest_addr_long)
Set a gpio pin setting.
def get_summaryf(self, *args, **kwargs): def func(f): doc = f.__doc__ self.get_summary(doc or '', *args, **kwargs) return f return func
Extract the summary from a function docstring Parameters ---------- ``*args`` and ``**kwargs`` See the :meth:`get_summary` method. Note, that the first argument will be the docstring of the specified function Returns ------- function Wrapper that takes a function as input and registers its summary via the :meth:`get_summary` method
def has_calmjs_artifact_declarations(cmd, registry_name='calmjs.artifacts'): return any(get(registry_name).iter_records_for( cmd.distribution.get_name()))
For a distutils command to verify that the artifact build step is possible.
def Sample(self, n): size = n, return numpy.random.beta(self.alpha, self.beta, size)
Generates a random sample from this distribution. n: int sample size
def set_foreign_key(self, parent_table, parent_column, child_table, child_column): self.execute('ALTER TABLE {0} ADD FOREIGN KEY ({1}) REFERENCES {2}({3})'.format(parent_table, parent_column, child_table, child_column))
Create a Foreign Key constraint on a column from a table.
def process(self): "Process the inner datasets." xp,yp = self.get_processors() for ds,n in zip(self.lists, ['train','valid','test']): ds.process(xp, yp, name=n) for ds in self.lists: if getattr(ds, 'warn', False): warn(ds.warn) return self
Process the inner datasets.
def _send_trace(self, chunk=None): self._trace_sm_running = True if chunk is None: chunk = self._next_tracing_chunk(20) if chunk is None or len(chunk) == 0: self._trace_sm_running = False return try: self._send_notification(TracingChar.value_handle, chunk) self._defer(self._send_trace) except bable_interface.BaBLEException as err: if err.packet.status == 'Rejected': time.sleep(0.05) self._defer(self._send_trace, [chunk]) else: self._audit('ErrorStreamingTrace') self._logger.exception("Error while tracing data")
Stream tracing data to the ble client in 20 byte chunks Args: chunk (bytearray): A chunk that should be sent instead of requesting a new chunk from the pending reports.
def set_errors(self, result): errors = result.get_messages() for property_name in errors: if not hasattr(self, property_name): continue prop_errors = errors[property_name] if type(prop_errors) is not list: prop_errors = ['<Nested schema result following...>'] if property_name in self.errors: self.errors[property_name].extend(prop_errors) else: self.errors[property_name] = prop_errors
Populate field errors with errors from schema validation
def loadavg(): if __grains__['kernel'] == 'AIX': return _aix_loadavg() try: load_avg = os.getloadavg() except AttributeError: raise salt.exceptions.CommandExecutionError('status.loadavag is not available on your platform') return {'1-min': load_avg[0], '5-min': load_avg[1], '15-min': load_avg[2]}
Return the load averages for this minion .. versionchanged:: 2016.11.4 Added support for AIX CLI Example: .. code-block:: bash salt '*' status.loadavg :raises CommandExecutionError: If the system cannot report loadaverages to Python
def Get(self, attribute, default=None): if attribute is None: return default elif isinstance(attribute, str): attribute = Attribute.GetAttributeByName(attribute) if "r" not in self.mode and (attribute not in self.new_attributes and attribute not in self.synced_attributes): raise IOError("Fetching %s from object not opened for reading." % attribute) for result in self.GetValuesForAttribute(attribute, only_one=True): try: result.attribute_instance = attribute except AttributeError: pass return result return attribute.GetDefault(self, default)
Gets the attribute from this object.
def _cobalt_ratio_file(paired, work_dir): cobalt_dir = utils.safe_makedir(os.path.join(work_dir, "cobalt")) out_file = os.path.join(cobalt_dir, "%s.cobalt" % dd.get_sample_name(paired.tumor_data)) if not utils.file_exists(out_file): cnr_file = tz.get_in(["depth", "bins", "normalized"], paired.tumor_data) with file_transaction(paired.tumor_data, out_file) as tx_out_file: with open(tx_out_file, "w") as out_handle: writer = csv.writer(out_handle, delimiter="\t") writer.writerow(["Chromosome", "Position", "ReferenceReadCount", "TumorReadCount", "ReferenceGCRatio", "TumorGCRatio", "ReferenceGCDiploidRatio"]) raise NotImplementedError return out_file
Convert CNVkit binning counts into cobalt ratio output. This contains read counts plus normalization for GC, from section 7.2 "Determine read depth ratios for tumor and reference genomes" https://www.biorxiv.org/content/biorxiv/early/2018/09/20/415133.full.pdf Since CNVkit cnr files already have GC bias correction, we re-center the existing log2 ratios to be around 1, rather than zero, which matches the cobalt expectations. XXX This doesn't appear to be a worthwhile direction since PURPLE requires 1000bp even binning. We'll leave this here as a starting point for future work but work on using cobalt directly.
def new(self, text): if self._initialized: raise pycdlibexception.PyCdlibInternalError('This File or Text identifier is already initialized') if len(text) != 128: raise pycdlibexception.PyCdlibInvalidInput('Length of text must be 128') self.text = text self._initialized = True
Create a new file or text identifier. Parameters: text - The text to store into the identifier. Returns: Nothing.
def tenure(self): if self.end_date: return round((date.end_date - self.start_date).days / 365., 2) else: return round((date.today() - self.start_date).days / 365., 2)
Calculates board tenure in years
def get_min_col_num(mention): span = _to_span(mention) if span.sentence.is_tabular(): return span.sentence.cell.col_start else: return None
Return the lowest column number that a Mention occupies. :param mention: The Mention to evaluate. If a candidate is given, default to its first Mention. :rtype: integer or None
def get_recently_played_games(self, steamID, count=0, format=None): parameters = {'steamid' : steamID, 'count' : count} if format is not None: parameters['format'] = format url = self.create_request_url(self.interface, 'GetRecentlyPlayedGames', 1, parameters) data = self.retrieve_request(url) return self.return_data(data, format=format)
Request a list of recently played games by a given steam id. steamID: The users ID count: Number of games to return. (0 is all recent games.) format: Return format. None defaults to json. (json, xml, vdf)
def add_behaviour(self, behaviour, template=None): behaviour.set_agent(self) if issubclass(type(behaviour), FSMBehaviour): for _, state in behaviour.get_states().items(): state.set_agent(self) behaviour.set_template(template) self.behaviours.append(behaviour) if self.is_alive(): behaviour.start()
Adds and starts a behaviour to the agent. If template is not None it is used to match new messages and deliver them to the behaviour. Args: behaviour (spade.behaviour.CyclicBehaviour): the behaviour to be started template (spade.template.Template, optional): the template to match messages with (Default value = None)
def progress_all(self): return self.progress_idxs(range(np.shape(self.get_data_x())[0] - self.progress_win_size, np.shape(self.get_data_x())[0]))
Competence progress of the overall tree.
def in_constraints(self): expressions = [] for uniqueid in self._in_constraints: expressions.append(self._bundle.get_parameter(context='constraint', uniqueid=uniqueid)) return expressions
returns a list of the expressions in which this parameter constrains another
def delete_model(self): request_failed = False failed_models = [] for model_name in self._model_names: try: self.sagemaker_session.delete_model(model_name) except Exception: request_failed = True failed_models.append(model_name) if request_failed: raise Exception('One or more models cannot be deleted, please retry. \n' 'Failed models: {}'.format(', '.join(failed_models)))
Deletes the Amazon SageMaker models backing this predictor.
def apply_inheritance(self): self.apply_partial_inheritance('exclude') for i in self: self.get_customs_properties_by_inheritance(i) for timeperiod in self: self.get_unresolved_properties_by_inheritance(timeperiod)
The only interesting property to inherit is exclude :return: None
def create_dirs(path): fname = os.path.basename(path) if fname.__contains__('.'): path = os.path.dirname(path) if not os.path.exists(path): os.makedirs(path)
Creates all directories mentioned in the given path. Useful to write a new file with the specified path. It carefully skips the file-name in the given path. :param path: Path of a file or directory
def update(self, res, pk, depth=1, since=None): fetch = lambda: self._fetcher.fetch_latest(res, pk, 1, since=since) self._update(res, fetch, depth)
Try to sync an object to the local database, in case of failure where a referenced object is not found, attempt to fetch said object from the REST api
def set_reprompt_text(self, text): self.response.reprompt.outputSpeech.type = 'PlainText' self.response.reprompt.outputSpeech.text = text
Set response reprompt output speech as plain text type. Args: text: str. Response speech used when type is 'PlainText'. Cannot exceed 8,000 characters.
def merge_lists(*args): out = {} for contacts in filter(None, args): for contact in contacts: out[contact.value] = contact return list(out.values())
Merge an arbitrary number of lists into a single list and dedupe it Args: *args: Two or more lists Returns: A deduped merged list of all the provided lists as a single list
def add_time_stamp(self, db_id, name): with self.lock: self.cur.execute( 'insert into "timestamps" ("job", "what")' 'values (?, ?);', (db_id, name))
Add a timestamp to the database.
def _abort(self) -> None: self.client_terminated = True self.server_terminated = True if self.stream is not None: self.stream.close() self.close()
Instantly aborts the WebSocket connection by closing the socket
def post(self): resource = self.__model__.query.filter_by(**request.json).first() if resource: error_message = is_valid_method(self.__model__, resource) if error_message: raise BadRequestException(error_message) return self._no_content_response() resource = self.__model__(**request.json) error_message = is_valid_method(self.__model__, resource) if error_message: raise BadRequestException(error_message) db.session().add(resource) db.session().commit() return self._created_response(resource)
Return the JSON representation of a new resource created through an HTTP POST call. :returns: ``HTTP 201`` if a resource is properly created :returns: ``HTTP 204`` if the resource already exists :returns: ``HTTP 400`` if the request is malformed or missing data
def feature_importances_(self): if getattr(self, 'booster', None) is not None and self.booster != 'gbtree': raise AttributeError('Feature importance is not defined for Booster type {}' .format(self.booster)) b = self.get_booster() score = b.get_score(importance_type=self.importance_type) all_features = [score.get(f, 0.) for f in b.feature_names] all_features = np.array(all_features, dtype=np.float32) return all_features / all_features.sum()
Feature importances property .. note:: Feature importance is defined only for tree boosters Feature importance is only defined when the decision tree model is chosen as base learner (`booster=gbtree`). It is not defined for other base learner types, such as linear learners (`booster=gblinear`). Returns ------- feature_importances_ : array of shape ``[n_features]``
def is_in_intervall(value, min_value, max_value, name='variable'): if not (min_value <= value <= max_value): raise ValueError('{}={} is not in [{}, {}]' .format(name, value, min_value, max_value))
Raise an exception if value is not in an interval. Parameters ---------- value : orderable min_value : orderable max_value : orderable name : str Name of the variable to print in exception.
def save_dash(self, dashboard_id): session = db.session() dash = (session .query(models.Dashboard) .filter_by(id=dashboard_id).first()) check_ownership(dash, raise_if_false=True) data = json.loads(request.form.get('data')) self._set_dash_metadata(dash, data) session.merge(dash) session.commit() session.close() return json_success(json.dumps({'status': 'SUCCESS'}))
Save a dashboard's metadata
def _id_or_key(list_item): if isinstance(list_item, dict): if 'id' in list_item: return list_item['id'] if 'key' in list_item: return list_item['key'] return list_item
Return the value at key 'id' or 'key'.
def device_not_active(self, addr): self.aldb_device_handled(addr) for callback in self._cb_device_not_active: callback(addr)
Handle inactive devices.
def trainable_params(m:nn.Module)->ParamList: "Return list of trainable params in `m`." res = filter(lambda p: p.requires_grad, m.parameters()) return res
Return list of trainable params in `m`.
def namedtuple_row_strategy(column_names): import collections column_names = [name if is_valid_identifier(name) else 'col%s_' % idx for idx, name in enumerate(column_names)] row_class = collections.namedtuple('Row', column_names) def row_factory(row): return row_class(*row) return row_factory
Namedtuple row strategy, rows returned as named tuples Column names that are not valid Python identifiers will be replaced with col<number>_
def _render_content(self): if self.content_format == "rst" and docutils_publish is not None: doc_parts = docutils_publish( source=self.raw_content, writer_name="html4css1" ) self.rendered_content = doc_parts['fragment'] elif self.content_format == "rs" and docutils_publish is None: raise RuntimeError("Install docutils to pubilsh reStructuredText") elif self.content_format == "md" and markdown is not None: self.rendered_content = markdown(self.raw_content) elif self.content_format == "md" and markdown is None: raise RuntimeError("Install Markdown to pubilsh markdown") else: self.rendered_content = self.raw_content
Renders the content according to the ``content_format``.
def _validate_args(self, args): assert(args.bucket) if args.subscribers: for _subscriber in args.subscribers: assert(isinstance(_subscriber, AsperaBaseSubscriber)) if (args.transfer_config): assert(isinstance(args.transfer_config, AsperaConfig)) if args.transfer_config.multi_session > self._config.ascp_max_concurrent: raise ValueError("Max sessions is %d" % self._config.ascp_max_concurrent) for _pair in args.file_pair_list: if not _pair.key or not _pair.fileobj: raise ValueError("Invalid file pair")
validate the user arguments
def by_sql(cls, sql, engine_or_session): ses, auto_close = ensure_session(engine_or_session) result = ses.query(cls).from_statement(sql).all() if auto_close: ses.close() return result
Query with sql statement or texture sql.
def jtag_enable(self): status, _ = self.bulkCommand(_BMSG_ENABLE_JTAG) if status == 0: self._jtagon = True elif status == 3: self._jtagon = True raise JTAGAlreadyEnabledError() else: raise JTAGEnableFailedError("Error enabling JTAG. Error code: %s." %status)
Enables JTAG output on the controller. JTAG operations executed before this function is called will return useless data or fail. Usage: >>> from proteusisc import getAttachedControllers, bitarray >>> c = getAttachedControllers()[0] >>> c.jtag_enable() >>> c.write_tms_bits(bitarray("001011111"), return_tdo=True) >>> c.jtag_disable()
def refresh(self, client_name, refresh_token, **params): client = self.client(client_name, logger=self.app.logger) return client.get_access_token(refresh_token, grant_type='refresh_token', **params)
Get refresh token. :param client_name: A name one of configured clients :param redirect_uri: An URI for authorization redirect :returns: a coroutine
def sub(a, b): if a is None: if b is None: return None else: return -1 * b elif b is None: return a return a - b
Subtract two values, ignoring None
def make_heartbeat(port, path, peer_uid, node_uid, app_id): packet = struct.pack("<BBH", PACKET_FORMAT_VERSION, PACKET_TYPE_HEARTBEAT, port) for string in (path, peer_uid, node_uid, app_id): string_bytes = to_bytes(string) packet += struct.pack("<H", len(string_bytes)) packet += string_bytes return packet
Prepares the heart beat UDP packet Format : Little endian * Kind of beat (1 byte) * Herald HTTP server port (2 bytes) * Herald HTTP servlet path length (2 bytes) * Herald HTTP servlet path (variable, UTF-8) * Peer UID length (2 bytes) * Peer UID (variable, UTF-8) * Node UID length (2 bytes) * Node UID (variable, UTF-8) * Application ID length (2 bytes) * Application ID (variable, UTF-8) :param port: The port to access the Herald HTTP server :param path: The path to the Herald HTTP servlet :param peer_uid: The UID of the peer :param node_uid: The UID of the node :param app_id: Application ID :return: The heart beat packet content (byte array)
def validate_language_key(obj, key): backend = bigchaindb.config['database']['backend'] if backend == 'localmongodb': data = obj.get(key, {}) if isinstance(data, dict): validate_all_values_for_key_in_obj(data, 'language', validate_language) elif isinstance(data, list): validate_all_values_for_key_in_list(data, 'language', validate_language)
Validate all nested "language" key in `obj`. Args: obj (dict): dictionary whose "language" key is to be validated. Returns: None: validation successful Raises: ValidationError: will raise exception in case language is not valid.
def _build_keys(self, slug, date=None, granularity='all'): slug = slugify(slug) if date is None: date = datetime.utcnow() patts = self._build_key_patterns(slug, date) if granularity == "all": return list(patts.values()) return [patts[granularity]]
Builds redis keys used to store metrics. * ``slug`` -- a slug used for a metric, e.g. "user-signups" * ``date`` -- (optional) A ``datetime.datetime`` object used to generate the time period for the metric. If omitted, the current date and time (in UTC) will be used. * ``granularity`` -- Must be one of: "all" (default), "yearly", "monthly", "weekly", "daily", "hourly", "minutes", or "seconds". Returns a list of strings.
def delay(self, dl=0): if dl is None: time.sleep(self.dl) elif dl < 0: sys.stderr.write( "delay cannot less than zero, this takes no effects.\n") else: time.sleep(dl)
Delay for ``dl`` seconds.
def rebase(self): if self.standalone: rebase_msg = 'Merging layered image with base' else: rebase_msg = 'Rebase' with LogTask(rebase_msg): if len(self.src_qemu_info) == 1: return if self.standalone: utils.qemu_rebase(target=self.dst, backing_file="") else: if len(self.src_qemu_info) > 2: raise utils.LagoUserException( 'Layered export is currently supported for one ' 'layer only. You can try to use Standalone export.' ) parent = self.src_qemu_info[0]['backing-filename'] parent = os.path.basename(parent) try: parent = parent.split(':', 1)[1] except IndexError: pass parent = './{}'.format(parent) utils.qemu_rebase( target=self.dst, backing_file=parent, safe=False )
Change the backing-file entry of the exported disk. Please refer to 'qemu-img rebase' manual for more info.
def has_chess960_castling_rights(self) -> bool: chess960 = self.chess960 self.chess960 = True castling_rights = self.clean_castling_rights() self.chess960 = chess960 if castling_rights & ~BB_CORNERS: return True if castling_rights & BB_RANK_1 and not self.occupied_co[WHITE] & self.kings & BB_E1: return True if castling_rights & BB_RANK_8 and not self.occupied_co[BLACK] & self.kings & BB_E8: return True return False
Checks if there are castling rights that are only possible in Chess960.
def _api_wrapper(fn): def _convert(value): if isinstance(value, _datetime.date): return value.strftime('%s') return value @_six.wraps(fn) def _fn(self, command, **params): with self.startup_lock: if self.timer.ident is None: self.timer.setDaemon(True) self.timer.start() params = dict((key, _convert(value)) for key, value in _six.iteritems(params) if value is not None) self.semaphore.acquire() resp = fn(self, command, **params) try: respdata = resp.json(object_hook=_AutoCastDict) except: resp.raise_for_status() raise Exception('No JSON object could be decoded') if 'error' in respdata: raise PoloniexCommandException(respdata['error']) resp.raise_for_status() return respdata return _fn
API function decorator that performs rate limiting and error checking.
def line(h1: Histogram1D, **kwargs) -> dict: lw = kwargs.pop("lw", DEFAULT_STROKE_WIDTH) mark_template = [{ "type": "line", "encode": { "enter": { "x": {"scale": "xscale", "field": "x"}, "y": {"scale": "yscale", "field": "y"}, "stroke": {"scale": "series", "field": "c"}, "strokeWidth": {"value": lw} } }, "from": {"data": "series"}, }] vega = _scatter_or_line(h1, mark_template=mark_template, kwargs=kwargs) return vega
Line plot of 1D histogram values. Points are horizontally placed in bin centers. Parameters ---------- h1 : physt.histogram1d.Histogram1D Dimensionality of histogram for which it is applicable
def get_paths(path_tokens): if len(path_tokens) == 0: return [] token = path_tokens.pop() path = PathToken(token.alias, token.path) return [path] + get_paths(path_tokens)
Given a list of parser path tokens, return a list of path objects for them.
def wait_for_keys_modified(self, *keys, modifiers_to_check=_mod_keys, timeout=0): set_mods = pygame.key.get_mods() return frozenset.union( frozenset([self.wait_for_keys(*keys, timeout=timeout)]), EventListener._contained_modifiers(set_mods, modifiers_to_check))
The same as wait_for_keys, but returns a frozen_set which contains the pressed key, and the modifier keys. :param modifiers_to_check: iterable of modifiers for which the function will check whether they are pressed :type modifiers: Iterable[int]
def rslv(self, interface: str, name: str=None) -> Tuple[str, int, Optional[str]]: if name is None: name = self.name key = '{}-{}'.format(name, interface) host = None if 'host' in self.interfaces[key]: host = self.interfaces[key]['host'] return self.interfaces[key]['ip'], self.interfaces[key]['port'], host
Return the IP address, port and optionally host IP for one of this Nodes interfaces.
def date_time_this_month( self, before_now=True, after_now=False, tzinfo=None): now = datetime.now(tzinfo) this_month_start = now.replace( day=1, hour=0, minute=0, second=0, microsecond=0) next_month_start = this_month_start + \ relativedelta.relativedelta(months=1) if before_now and after_now: return self.date_time_between_dates( this_month_start, next_month_start, tzinfo) elif not before_now and after_now: return self.date_time_between_dates(now, next_month_start, tzinfo) elif not after_now and before_now: return self.date_time_between_dates(this_month_start, now, tzinfo) else: return now
Gets a DateTime object for the current month. :param before_now: include days in current month before today :param after_now: include days in current month after today :param tzinfo: timezone, instance of datetime.tzinfo subclass :example DateTime('2012-04-04 11:02:02') :return DateTime
def _setBitOn(x, bitNum): _checkInt(x, minvalue=0, description='input value') _checkInt(bitNum, minvalue=0, description='bitnumber') return x | (1 << bitNum)
Set bit 'bitNum' to True. Args: * x (int): The value before. * bitNum (int): The bit number that should be set to True. Returns: The value after setting the bit. This is an integer. For example: For x = 4 (dec) = 0100 (bin), setting bit number 0 results in 0101 (bin) = 5 (dec).
def _get_query_uri(self): if 'VCAP_SERVICES' in os.environ: services = json.loads(os.getenv('VCAP_SERVICES')) predix_timeseries = services['predix-timeseries'][0]['credentials'] return predix_timeseries['query']['uri'].partition('/v1')[0] else: return predix.config.get_env_value(self, 'query_uri')
Returns the URI endpoint for performing queries of a Predix Time Series instance from environment inspection.
def list_create(self, title): params = self.__generate_params(locals()) return self.__api_request('POST', '/api/v1/lists', params)
Create a new list with the given `title`. Returns the `list dict`_ of the created list.
def iter_cofactors(self, vs=None): r vs = self._expect_vars(vs) for point in iter_points(vs): yield self.restrict(point)
r"""Iterate through the cofactors of a function over N variables. The *vs* argument is a sequence of :math:`N` Boolean variables. The *cofactor* of :math:`f(x_1, x_2, \dots, x_i, \dots, x_n)` with respect to variable :math:`x_i` is: :math:`f_{x_i} = f(x_1, x_2, \dots, 1, \dots, x_n)` The *cofactor* of :math:`f(x_1, x_2, \dots, x_i, \dots, x_n)` with respect to variable :math:`x_i'` is: :math:`f_{x_i'} = f(x_1, x_2, \dots, 0, \dots, x_n)`
def open(cls, filename): file_info = cls.parse_remote(filename) blob_service = cls.connect(filename) return BlobHandle(blob_service=blob_service, container=file_info.container, blob=file_info.blob, chunk_size=cls._BLOB_CHUNK_DATA_SIZE)
Provide a handle-like object for streaming.