Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
372,300
def _exclude_on_fail(self, df): idx = df if isinstance(df, pd.MultiIndex) else _meta_idx(df) self.meta.loc[idx, ] = True logger().info( .format(len(idx), if len(idx) == 1 else ))
Assign a selection of scenarios as `exclude: True` in meta
372,301
def register_function(self, patterns, instances=None, **reg_kwargs): def wrapper(function): self.register(patterns, function, instances=instances, **reg_kwargs) return function return wrapper
Decorator for register.
372,302
def add_link(self, link): if isinstance(link, link_module.Link): self.links.append(link) else: raise TypeError("Type Error: received {}, but requires Link.". format(type(link).__name__))
Add a Link. :type link: :class: `~opencensus.trace.link.Link` :param link: A Link object.
372,303
def _set_evpn_instance(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("instance_name",evpn_instance.evpn_instance, yang_name="evpn-instance", rest_name="evpn-instance", parent=self, is_container=, user_ordered=False, path_helper=self._path_helper, yang_keys=, extensions={u: {u: u, u: u, u: None, u: None, u: None, u: None, u: None}}), is_container=, yang_name="evpn-instance", rest_name="evpn-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u, u: u, u: None, u: None, u: None, u: None, u: None}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "list", : , }) self.__evpn_instance = t if hasattr(self, ): self._set()
Setter method for evpn_instance, mapped from YANG variable /routing_system/evpn_config/evpn/evpn_instance (list) If this variable is read-only (config: false) in the source YANG file, then _set_evpn_instance is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_evpn_instance() directly. YANG Description: EVPN instance config
372,304
def _db_urls(opts: Namespace) -> Tuple[str, str]: return opts.crcdb.replace("//", "//{crcuser}:{crcpassword}@".format(**opts.__dict__)),\ opts.ontodb.replace("//", "//{ontouser}:{ontopassword}@".format(**opts.__dict__))
Return the crc and ontology db urls :param opts: options :return: Tuple w/ crc and ontology url
372,305
def report_data(self, entity_data): try: response = None response = self.client.post(self.__data_url(), data=self.to_json(entity_data), headers={"Content-Type": "application/json"}, timeout=0.8) if response.status_code is 200: self.last_seen = datetime.now() except (requests.ConnectTimeout, requests.ConnectionError): logger.debug("report_data: host agent connection error") finally: return response
Used to report entity data (metrics & snapshot) to the host agent.
372,306
def recv_file_from_host(src_file, dst_filename, filesize, dst_mode=): import sys import ubinascii if HAS_BUFFER: try: import pyb usb = pyb.USB_VCP() except: try: import machine usb = machine.USB_VCP() except: usb = None if usb and usb.isconnected(): usb.setinterrupt(-1) try: with open(dst_filename, dst_mode) as dst_file: bytes_remaining = filesize if not HAS_BUFFER: bytes_remaining *= 2 buf_size = BUFFER_SIZE write_buf = bytearray(buf_size) read_buf = bytearray(buf_size) while bytes_remaining > 0: sys.stdout.write() read_size = min(bytes_remaining, buf_size) buf_remaining = read_size buf_index = 0 while buf_remaining > 0: if HAS_BUFFER: bytes_read = sys.stdin.buffer.readinto(read_buf, read_size) else: bytes_read = sys.stdin.readinto(read_buf, read_size) if bytes_read > 0: write_buf[buf_index:bytes_read] = read_buf[0:bytes_read] buf_index += bytes_read buf_remaining -= bytes_read if HAS_BUFFER: dst_file.write(write_buf[0:read_size]) else: dst_file.write(ubinascii.unhexlify(write_buf[0:read_size])) bytes_remaining -= read_size return True except: return False
Function which runs on the pyboard. Matches up with send_file_to_remote.
372,307
def post_attachment(self, bugid, attachment): assert type(attachment) is DotDict assert in attachment assert in attachment assert in attachment if (not in attachment): attachment.content_type = attachment.ids = bugid attachment.data = base64.standard_b64encode(bytearray(attachment.data, )).decode() return self._post(.format(bugid=bugid), json.dumps(attachment))
http://bugzilla.readthedocs.org/en/latest/api/core/v1/attachment.html#create-attachment
372,308
def get_value(self, node): if not isinstance(node, ast.Dict): raise TypeError("must be a dictionary") evaluator = SafeEvaluator() try: value = evaluator.run(node) except Exception as ex: raise ex try: value = json.loads(json.dumps(value)) if not isinstance(value, dict): raise TypeError except (TypeError, ValueError): raise TypeError("must be serializable") return value
Convert value from an AST node.
372,309
def accumulate(a_generator, cooperator=None): if cooperator: own_cooperate = cooperator.cooperate else: own_cooperate = cooperate spigot = ValueBucket() items = stream_tap((spigot,), a_generator) d = own_cooperate(items).whenDone() d.addCallback(accumulation_handler, spigot) return d
Start a Deferred whose callBack arg is a deque of the accumulation of the values yielded from a_generator. :param a_generator: An iterator which yields some not None values. :return: A Deferred to which the next callback will be called with the yielded contents of the generator function.
372,310
def time2internaldate(date_time): if isinstance(date_time, (int, float)): dt = datetime.fromtimestamp(date_time, timezone.utc).astimezone() elif isinstance(date_time, tuple): try: gmtoff = date_time.tm_gmtoff except AttributeError: if time.daylight: dst = date_time[8] if dst == -1: dst = time.localtime(time.mktime(date_time))[8] gmtoff = -(time.timezone, time.altzone)[dst] else: gmtoff = -time.timezone delta = timedelta(seconds=gmtoff) dt = datetime(*date_time[:6], tzinfo=timezone(delta)) elif isinstance(date_time, datetime): if date_time.tzinfo is None: raise ValueError("date_time must be aware") dt = date_time elif isinstance(date_time, str) and (date_time[0],date_time[-1]) == (,): return date_time else: raise ValueError("date_time not of a known type") fmt = .format(Months[dt.month]) return dt.strftime(fmt)
Convert date_time to IMAP4 INTERNALDATE representation. Return string in form: '"DD-Mmm-YYYY HH:MM:SS +HHMM"'. The date_time argument can be a number (int or float) representing seconds since epoch (as returned by time.time()), a 9-tuple representing local time, an instance of time.struct_time (as returned by time.localtime()), an aware datetime instance or a double-quoted string. In the last case, it is assumed to already be in the correct format.
372,311
def search(**criteria): results = copy(class_list) for (category, value) in criteria.items(): results &= index[category][value] return results
Search registered *component* classes matching the given criteria. :param criteria: search criteria of the form: ``a='1', b='x'`` :return: parts registered with the given criteria :rtype: :class:`set` Will return an empty :class:`set` if nothing is found. :: from cqparts.search import search import cqparts_motors # example of a 3rd party lib # Get all DC motor classes dc_motors = search(type='motor', current_class='dc') # For more complex queries: air_cooled = search(cooling='air') non_aircooled_dcmotors = dc_motors - air_cooled # will be all DC motors that aren't air-cooled
372,312
def dist_baystat(src, tar, min_ss_len=None, left_ext=None, right_ext=None): return Baystat().dist(src, tar, min_ss_len, left_ext, right_ext)
Return the Baystat distance. This is a wrapper for :py:meth:`Baystat.dist`. Parameters ---------- src : str Source string for comparison tar : str Target string for comparison min_ss_len : int Minimum substring length to be considered left_ext : int Left-side extension length right_ext : int Right-side extension length Returns ------- float The Baystat distance Examples -------- >>> round(dist_baystat('cat', 'hat'), 12) 0.333333333333 >>> dist_baystat('Niall', 'Neil') 0.6 >>> round(dist_baystat('Colin', 'Cuilen'), 12) 0.833333333333 >>> dist_baystat('ATCG', 'TAGC') 1.0
372,313
def _getEventFromUid(self, request, uid): event = getEventFromUid(request, uid) home = request.site.root_page if event.get_ancestors().filter(id=home.id).exists(): return event
Try and find an event with the given UID in this site.
372,314
def urlopen(self, method, url, redirect=True, **kw): u = parse_url(url) conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) kw[] = False kw[] = False if not in kw: kw[] = self.headers.copy() if self.proxy is not None and u.scheme == "http": response = conn.urlopen(method, url, **kw) else: response = conn.urlopen(method, u.request_uri, **kw) redirect_location = redirect and response.get_redirect_location() if not redirect_location: return response redirect_location = urljoin(url, redirect_location) if response.status == 303: method = retries = kw.get() if not isinstance(retries, Retry): retries = Retry.from_int(retries, redirect=redirect) if (retries.remove_headers_on_redirect and not conn.is_same_host(redirect_location)): for header in retries.remove_headers_on_redirect: kw[].pop(header, None) try: retries = retries.increment(method, url, response=response, _pool=conn) except MaxRetryError: if retries.raise_on_redirect: raise return response kw[] = retries kw[] = redirect log.info("Redirecting %s -> %s", url, redirect_location) return self.urlopen(method, redirect_location, **kw)
Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen` with custom cross-host redirect logic and only sends the request-uri portion of the ``url``. The given ``url`` parameter must be absolute, such that an appropriate :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
372,315
def iterable(obj): if isinstance(obj, string_types): return False if hasattr(obj, ): return True try: len(obj) except TypeError: return False return True
Returns ``True`` if *obj* can be iterated over and is *not* a string.
372,316
def release_lock(dax, key, lock_mode=LockMode.wait): lock_fxn = _lock_fxn("unlock", lock_mode, False) return dax.get_scalar( dax.callproc(lock_fxn, key if isinstance(key, (list, tuple)) else [key])[0])
Manually release a pg advisory lock. :dax: a DataAccess instance :key: either a big int or a 2-tuple of integers :lock_mode: a member of the LockMode enum
372,317
def is_import_interface(instrument_interface): if IInstrumentImportInterface.providedBy(instrument_interface): return True if hasattr(instrument_interface, ): obj_name = instrument_interface.__name__.replace(__name__, "") if obj_name[1:] in __all__ and hasattr(instrument_interface, "Import"): return True return False
Returns whether the instrument interface passed in is for results import
372,318
def input(self): delimiters = dict( zoom=self.init_zoom_levels, bounds=self.init_bounds, process_bounds=self.bounds, effective_bounds=self.effective_bounds ) raw_inputs = { get_hash(v): v for zoom in self.init_zoom_levels if "input" in self._params_at_zoom[zoom] for key, v in _flatten_tree(self._params_at_zoom[zoom]["input"]) if v is not None } initalized_inputs = {} for k, v in raw_inputs.items(): if isinstance(v, str): logger.debug("load input reader for simple input %s", v) try: reader = load_input_reader( dict( path=absolute_path(path=v, base_dir=self.config_dir), pyramid=self.process_pyramid, pixelbuffer=self.process_pyramid.pixelbuffer, delimiters=delimiters ), readonly=self.mode == "readonly") except Exception as e: logger.exception(e) raise MapcheteDriverError("error when loading input %s: %s" % (v, e)) logger.debug("input reader for simple input %s is %s", v, reader) elif isinstance(v, dict): logger.debug("load input reader for abstract input %s", v) try: reader = load_input_reader( dict( abstract=deepcopy(v), pyramid=self.process_pyramid, pixelbuffer=self.process_pyramid.pixelbuffer, delimiters=delimiters, conf_dir=self.config_dir ), readonly=self.mode == "readonly") except Exception as e: logger.exception(e) raise MapcheteDriverError("error when loading input %s: %s" % (v, e)) logger.debug("input reader for abstract input %s is %s", v, reader) else: raise MapcheteConfigError("invalid input type %s", type(v)) reader.bbox(out_crs=self.process_pyramid.crs) initalized_inputs[k] = reader return initalized_inputs
Input items used for process stored in a dictionary. Keys are the hashes of the input parameters, values the respective InputData classes.
372,319
def __validate(data, classes, labels): "Validator of inputs." if not isinstance(data, dict): raise TypeError( ) if not isinstance(labels, dict): raise TypeError( ) if classes is not None: if not isinstance(classes, dict): raise TypeError( ) if not len(data) == len(labels) == len(classes): raise ValueError() if not set(list(data)) == set(list(labels)) == set(list(classes)): raise ValueError( ) num_features_in_elements = np.unique([sample.size for sample in data.values()]) if len(num_features_in_elements) > 1: raise ValueError( ) return True
Validator of inputs.
372,320
def contains_plural_field(model, fields): source_model = model for orm_path in fields: model = source_model bits = orm_path.lstrip().split() for bit in bits[:-1]: field = model._meta.get_field(bit) if field.many_to_many or field.one_to_many: return True model = get_model_at_related_field(model, bit) return False
Returns a boolean indicating if ``fields`` contains a relationship to multiple items.
372,321
def full_data(self): data = [ self.chat.title, self._username(), self._type(), self._id() ] return " ".join(filter(None, data))
Returns all the info available for the chat in the following format: title [username] (type) <id> If any data is not available, it is not added.
372,322
def load(self): fd = None try: obj = parse_dot_file( self.dot_file.absolute_path ) finally: if fd is not None: fd.close() return obj
Load the file.
372,323
def get_indices(self, labels): self.check_labels(labels) return np.searchsorted(self.labels, labels)
Find the indices of the input ``labels``. Parameters ---------- labels : int, array-like (1D, int) The label numbers(s) to find. Returns ------- indices : int `~numpy.ndarray` An integer array of indices with the same shape as ``labels``. If ``labels`` is a scalar, then the returned index will also be a scalar. Raises ------ ValueError If any input ``labels`` are invalid.
372,324
def get_connection_details(session, vcenter_resource_model, resource_context): session = session resource_context = resource_context user = vcenter_resource_model.user vcenter_url = resource_context.address password = session.DecryptPassword(vcenter_resource_model.password).Value return VCenterConnectionDetails(vcenter_url, user, password)
Methods retrieves the connection details from the vcenter resource model attributes. :param CloudShellAPISession session: :param VMwarevCenterResourceModel vcenter_resource_model: Instance of VMwarevCenterResourceModel :param ResourceContextDetails resource_context: the context of the command
372,325
def calc_trees(self, indices=None, task_interface=None, jobhandler=default_jobhandler, batchsize=1, show_progress=True, **kwargs): if indices is None: indices = list(range(len(self))) if task_interface is None: task_interface = tasks.RaxmlTaskInterface() records = [self[i] for i in indices] args, to_delete = task_interface.scrape_args(records, **kwargs) msg = .format(task_interface.name) if show_progress else map_result = jobhandler(task_interface.get_task(), args, msg, batchsize) with fileIO.TempFileList(to_delete): for rec, result in zip(records, map_result): rec.parameters.construct_from_dict(result)
Infer phylogenetic trees for the loaded Alignments :param indices: Only run inference on the alignments at these given indices :param task_interface: Inference tool specified via TaskInterface (default RaxmlTaskInterface) :param jobhandler: Launch jobs via this JobHandler (default SequentialJobHandler; also available are ThreadpoolJobHandler and ProcesspoolJobHandler for running inference in parallel) :param batchsize: Batch size for Thread- or ProcesspoolJobHandlers) :param kwargs: Remaining arguments to pass to the TaskInterface :return: None
372,326
def add_tag(self, name, value): self.tags.append(Tag(name, value))
:param name: Name of the tag :type name: string :param value: Value of the tag :type value: string
372,327
def compute_balance_median_ts(self, ts): balance = [self.compute_balance_median(ts, t) for t in np.arange(0, len(ts) - 1)] return balance
Compute the balance at each time 't' of the time series.
372,328
def get_name_as_short_text(cls, name_field: cryptography.x509.Name) -> str: common_names = cls.get_common_names(name_field) if common_names: return common_names[0] else: return cls.get_name_as_text(name_field)
Convert a name field returned by the cryptography module to a string suitable for displaying it to the user.
372,329
def split(self): Path2D = type(self) split = [] paths = self.paths discrete = self.discrete polygons_closed = self.polygons_closed enclosure_directed = self.enclosure_directed for root_index, root in enumerate(self.root): split[-1]._cache.id_set() return np.array(split)
Split a Path2D into multiple Path2D objects where each one has exactly one root curve. Parameters -------------- self : trimesh.path.Path2D Input geometry Returns ------------- split : list of trimesh.path.Path2D Original geometry as separate paths
372,330
def name(self, src=None): return " & ".join(_get_type_name(tt, src) for tt in self._types)
Return string representing the name of this type.
372,331
def desc_from_uri(uri): if ":" in uri: _, uri = uri.split(":", 1) query_string = parse_qs(urlparse(uri, ).query) if query_string.get(): account_serial_number = query_string[][0] try: account = Account.get_accounts()[account_serial_number] desc = "SA_RINCON{}_{}".format( account.service_type, account.username) return desc except KeyError: pass if query_string.get(): service_id = query_string[][0] for service in MusicService._get_music_services_data().values(): if service_id == service["ServiceID"]: service_type = service["ServiceType"] account = Account.get_accounts_for_service(service_type) if not account: break account = account[0] desc = "SA_RINCON{}_{}".format( account.service_type, account.username) return desc desc = return desc
Create the content of DIDL desc element from a uri. Args: uri (str): A uri, eg: ``'x-sonos-http:track%3a3402413.mp3?sid=2&amp;flags=32&amp;sn=4'`` Returns: str: The content of a desc element for that uri, eg ``'SA_RINCON519_email@example.com'``
372,332
def get_facets_ranges(self): SolrClient_unittestq*:*facetfacet.rangepricefacet.range.startfacet.range.endfacet.range.gapprice8010502090706004030 if not hasattr(self,): self.facet_ranges = {} data = self.data if in data.keys() and type(data[]) == dict: if in data[].keys() and type(data[][]) == dict: for facetfield in data[][]: if type(data[][][facetfield][]) == list: l = data[][][facetfield][] self.facet_ranges[facetfield] = OrderedDict(zip(l[::2],l[1::2])) return self.facet_ranges else: raise SolrResponseError("No Facet Ranges in the Response") else: return self.facet_ranges
Returns query facet ranges :: >>> res = solr.query('SolrClient_unittest',{ 'q':'*:*', 'facet':True, 'facet.range':'price', 'facet.range.start':0, 'facet.range.end':100, 'facet.range.gap':10 }) >>> res.get_facets_ranges() {'price': {'80': 9, '10': 5, '50': 3, '20': 7, '90': 3, '70': 4, '60': 7, '0': 3, '40': 5, '30': 4}}
372,333
def get_vads_trans_id(vads_site_id, vads_trans_date): vads_trans_id = "" for i in range(0, 6): vads_trans_id += str(random.randint(0, 9)) return vads_trans_id
Returns a default value for vads_trans_id field. vads_trans_id field is mandatory. It is composed by 6 numeric characters that identifies the transaction. There is a unicity contraint between vads_site_id and vads_trans_date (the first 8 characters representing the transaction date). We consider the probability of having 2 identical generated vads_trans_id in the same day as null.
372,334
def modLocationPort(self, location): components = urlparse.urlparse(location) reverse_proxy_port = self.father.getHost().port reverse_proxy_host = self.father.getHost().host _components = components._asdict() _components[] = % ( reverse_proxy_host, reverse_proxy_port ) return urlparse.urlunparse(_components.values())
Ensures that the location port is a the given port value Used in `handleHeader`
372,335
def mktns(self, root): tns = root.get("targetNamespace") prefix = root.findPrefix(tns) if prefix is None: log.debug("warning: tns (%s), not mapped to prefix", tns) prefix = "tns" return (prefix, tns)
Get/create the target namespace.
372,336
def addItem(self, itemParameters, filePath=None, overwrite=False, folder=None, dataURL=None, url=None, text=None, relationshipType=None, originItemId=None, destinationItemId=None, serviceProxyParams=None, metadata=None, multipart=False): params = { "f" : "json" } res = "" if itemParameters is not None: for k,v in itemParameters.value.items(): if isinstance(v, bool): params[k] = json.dumps(v) else: params[k] = v if itemParameters.overwrite is not None: params[] = json.dumps(overwrite) if itemParameters.overwrite != overwrite: params[] = json.dumps(overwrite) if dataURL is not None: params[] = dataURL if url is not None: params[] = url if text is not None: params[] = text if relationshipType is not None: params[] = relationshipType if originItemId is not None: params[] = originItemId if destinationItemId is not None: params[] = destinationItemId if serviceProxyParams is not None: params[] = serviceProxyParams url = "%s/addItem" % self.location files = {} if multipart: res = self._addItemMultiPart( itemParameters=itemParameters, filePath=filePath) else: if filePath is not None and os.path.isfile(filePath): files[] = filePath params["filename"] = os.path.basename(filePath) elif filePath is not None and multipart: params["filename"] = os.path.basename(filePath) elif filePath is not None and not os.path.isfile(filePath): print ("{0} not found".format(filePath)) if in params: v = params[] del params[] files[] = v if metadata is not None and os.path.isfile(metadata): files[] = metadata if len(files) < 1: res = self._post(url, param_dict=params, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port) else: params[] = params[] = False res = self._post(url=url, param_dict=params, files=files, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port) if (isinstance(res, dict) and \ "id" not in res): raise Exception("Cannot add the item: %s" % res) elif (isinstance(res, (UserItem, Item)) and \ res.id is None): raise Exception("Cannot add the item: %s" % str(res)) elif isinstance(res, (UserItem, Item)): return res else: itemId = res[] return UserItem(url="%s/items/%s" % (self.location, itemId), securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port)
Adds an item to ArcGIS Online or Portal. Te Add Item operation (POST only) is used to upload an item file, submit text content, or submit the item URL to the specified user folder depending on documented items and item types. This operation is available only to the specified user. Inputs: itemParameters - required except for when multipart = True or SD file. This contains all the information regarding type, tags, etc... filePath - if updating the item's content overwrite - if the item exists, it overwrites it folder - id of the folder to place the item dataURL - The URL where the item can be downloaded. The resource will be downloaded and stored as a file type. Similar to uploading a file to be added, but instead of transferring the contents of the file, the URL of the data file is referenced and creates a file item. url - The URL of the item to be submitted. The URL can be a URL to a service, a web mapping application, or any other content available at that URL. text - The text content for the item to be submitted. relationshipType - The type of relationship between the two items. See Relationship types for a complete listing of types. originItemId - The item ID of the origin item of the relationship destinationItemId - item ID of the destination item of the relationship. serviceProxyParams - JSON object that provides rate limiting and referrer checks when accessing secured services. metadata - XML meta data file. multipart - If true, the file is uploaded in multiple parts. Used for files over 100 MBs in size.
372,337
def add(self, client_id, email_address, name, access_level, password): body = { "EmailAddress": email_address, "Name": name, "AccessLevel": access_level, "Password": password} response = self._post("/clients/%s/people.json" % client_id, json.dumps(body)) return json_to_py(response)
Adds a person to a client. Password is optional and if not supplied, an invitation will be emailed to the person
372,338
def launchApp(self, **kwargs): app = self.installedApp vers = self.getVersion() return app.start(version=vers, full_screen=self.fullscreen, verbose=self.debug, **kwargs)
Launch Starcraft2 process in the background using this configuration. WARNING: if the same IP address and port are specified between multiple SC2 process instances, all subsequent processes after the first will fail to initialize and crash.
372,339
def update_intervals(self): s ID is defined here as a tuple of the GitPython/Pygit2 object return {(repo.id, getattr(repo, , None)): repo.update_interval for repo in self.remotes}
Returns a dictionary mapping remote IDs to their intervals, designed to be used for variable update intervals in salt.master.FileserverUpdate. A remote's ID is defined here as a tuple of the GitPython/Pygit2 object's "id" and "name" attributes, with None being assumed as the "name" value if the attribute is not present.
372,340
def autodecode(b): import warnings import chardet try: return b.decode() except UnicodeError: result = chardet.detect(b) if result[] < 0.95: warnings.warn( % result[]) return result.decode(result[])
Try to decode ``bytes`` to text - try default encoding first, otherwise try to autodetect Args: b (bytes): byte string Returns: str: decoded text string
372,341
def detect_encoding(sample, encoding=None): from cchardet import detect if encoding is not None: return normalize_encoding(sample, encoding) result = detect(sample) confidence = result[] or 0 encoding = result[] or encoding = normalize_encoding(sample, encoding) if confidence < config.ENCODING_CONFIDENCE: encoding = config.DEFAULT_ENCODING if encoding == : encoding = config.DEFAULT_ENCODING return encoding
Detect encoding of a byte string sample.
372,342
def create_category(cls, category, **kwargs): kwargs[] = True if kwargs.get(): return cls._create_category_with_http_info(category, **kwargs) else: (data) = cls._create_category_with_http_info(category, **kwargs) return data
Create Category Create a new Category This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.create_category(category, async=True) >>> result = thread.get() :param async bool :param Category category: Attributes of category to create (required) :return: Category If the method is called asynchronously, returns the request thread.
372,343
def add_filtered_folder(self, path, regex, depth=None, source_type=DefaultSourceType): self.add_source(FilteredFolderSource(path, regex, depth, **source_type)) return self
Add a folder source to scan recursively, with a regex filter on directories. :param regex: regex string to filter folders by. :param depth: if provided will be depth limit. 0 = first level only. :param source_type: what to return; files only, folders only, or both.
372,344
def _reset(self, **kwargs): if in kwargs: self.uuid = kwargs[] elif in kwargs: ) super(Storage, self)._reset(**filtered_kwargs)
Reset after repopulating from API.
372,345
def __evaluate_result(self, result, condition): if isinstance(result, bool): return result if isinstance(result, basestring): self.log.debug("Condition %s recognized as string.", condition) return result == condition[1:-1] return self.__evaluation_error( result, condition, "Result not string") if not isinstance(result, (float, int)): return self.__evaluation_error( result, condition, "Result not number on comparision") if condition.startswith("="): number = self.__try_parse_number(condition[1:]) if isinstance(number, bool): return self.__evaluation_error( result, condition, "Number not parsable (=)") return result == number if condition.startswith(">="): number = self.__try_parse_number(condition[2:]) if isinstance(number, bool): return self.__evaluation_error( result, condition, "Number not parsable (>=)") return result >= number if condition.startswith("<="): number = self.__try_parse_number(condition[2:]) if isinstance(number, bool): return self.__evaluation_error( result, condition, "Number not parsable (<=)") return result <= number if condition.startswith(">"): number = self.__try_parse_number(condition[1:]) if isinstance(number, bool): return self.__evaluation_error( result, condition, "Number not parsable (>)") return result > number if condition.startswith("<"): number = self.__try_parse_number(condition[1:]) if isinstance(number, bool): return self.__evaluation_error( result, condition, "Number not parsable (<)") return result < number number = self.__try_parse_number(condition) if isinstance(number, bool): return self.__evaluation_error( result, condition, "Number not parsable") return result == number if isinstance(condition, (float, int)) and isinstance(result, (float, int)): return condition == result return self.__evaluation_error(result, condition, "Unknown")
Evaluates a result of a heuristic with the condition given in the config. :param mixed result: The result of the heuristic :param mixed condition: The condition string to evaluate on the result :return bool: Whether the heuristic result matches the condition
372,346
def seek(self, pos=0): if pos - self.pos >= 0: blocks, remainder = divmod(pos - self.pos, self.bufsize) for i in range(blocks): self.read(self.bufsize) self.read(remainder) else: raise StreamError("seeking backwards is not allowed") return self.pos
Set the stream's file pointer to pos. Negative seeking is forbidden.
372,347
def makeGlyphsBoundingBoxes(self): def getControlPointBounds(glyph): pen.init() glyph.draw(pen) return pen.bounds def toInt(value, else_callback): rounded = otRound(value) if tolerance >= 0.5 or abs(rounded - value) <= tolerance: return rounded else: return int(else_callback(value)) tolerance = self.roundTolerance glyphBoxes = {} pen = ControlBoundsPen(self.allGlyphs) for glyphName, glyph in self.allGlyphs.items(): bounds = None if glyph or glyph.components: bounds = getControlPointBounds(glyph) if bounds: rounded = [] for value in bounds[:2]: rounded.append(toInt(value, math.floor)) for value in bounds[2:]: rounded.append(toInt(value, math.ceil)) bounds = BoundingBox(*rounded) glyphBoxes[glyphName] = bounds return glyphBoxes
Make bounding boxes for all the glyphs, and return a dictionary of BoundingBox(xMin, xMax, yMin, yMax) namedtuples keyed by glyph names. The bounding box of empty glyphs (without contours or components) is set to None. Check that the float values are within the range of the specified self.roundTolerance, and if so use the rounded value; else take the floor or ceiling to ensure that the bounding box encloses the original values.
372,348
def rename_genome(genome_in, genome_out=None): if genome_out is None: genome_out = "{}_renamed.fa".format(genome_in.split(".")[0]) with open(genome_out, "w") as output_handle: for record in SeqIO.parse(genome_in, "fasta"): new_record_id = record.id.replace(" ", "_") new_record_id = new_record_id.replace("-", "_") new_record_id = new_record_id.replace("\t", "_") new_record_id = re.sub("[^_A-Za-z0-9]+", "", new_record_id) header = ">{}\n".format(new_record_id) output_handle.write(header) output_handle.write("{}\n".format(str(record.seq)))
Rename genome and slugify headers Rename genomes according to a simple naming scheme; this is mainly done to avoid special character weirdness. Parameters ---------- genome_in : file, str or pathlib.Path The input genome to be renamed and slugify. genome_out : file, str or pathlib.Path The output genome to be written into. Defaults is <base>_renamed.fa, where <base> is genome_in without its extension.
372,349
def add_issue_comment(self, issue_id_or_key, content, extra_request_params={}): request_params = extra_request_params request_params["content"] = content return self.do("POST", "issues/{issue_id_or_key}/comments", url_params={"issue_id_or_key": issue_id_or_key}, request_params=request_params, )
client = BacklogClient("your_space_name", "your_api_key") client.add_issue_comment("YOUR_PROJECT-999", u"or ... else e.")
372,350
def register_iq_request_coro(self, type_, payload_cls, coro): warnings.warn( "register_iq_request_coro is a deprecated alias to " "register_iq_request_handler and will be removed in aioxmpp 1.0", DeprecationWarning, stacklevel=2) return self.register_iq_request_handler(type_, payload_cls, coro)
Alias of :meth:`register_iq_request_handler`. .. deprecated:: 0.10 This alias will be removed in version 1.0.
372,351
def get_axis_bin_index(self, value, axis): axis = self.get_axis_number(axis) bin_edges = self.bin_edges[axis] if value == bin_edges[-1]: return len(bin_edges) - 2 result = np.searchsorted(bin_edges, [value], side=)[0] - 1 if not 0 <= result <= len(bin_edges) - 1: raise CoordinateOutOfRangeException("Value %s is not in range (%s-%s) of axis %s" % ( value, bin_edges[0], bin_edges[-1], axis)) return result
Returns index along axis of bin in histogram which contains value Inclusive on both endpoints
372,352
def get_cfg_value(config, section, option): try: value = config[section][option] except KeyError: if (section, option) in MULTI_OPTIONS: return [] else: return if (section, option) in MULTI_OPTIONS: value = split_multiline(value) if (section, option) in ENVIRON_OPTIONS: value = eval_environ(value) return value
Get configuration value.
372,353
def find_includes(basedirs, source, log=None): all_basedirs = [os.path.dirname(source)] all_basedirs.extend(basedirs) includes = set() with open(source, ) as thrift: for line in thrift.readlines(): match = INCLUDE_PARSER.match(line) if match: capture = match.group(1) added = False for basedir in all_basedirs: include = os.path.join(basedir, capture) if os.path.exists(include): if log: log.debug(.format(source, include)) includes.add(include) added = True if not added: raise ValueError("{} included in {} not found in bases {}" .format(include, source, all_basedirs)) return includes
Finds all thrift files included by the given thrift source. :basedirs: A set of thrift source file base directories to look for includes in. :source: The thrift source file to scan for includes. :log: An optional logger
372,354
def packvalue(value, *properties): def func(namedstruct): v = namedstruct._target for p in properties[:-1]: v = getattr(v, p) setattr(v, properties[-1], value) return func
Store a specified value to specified property path. Often used in nstruct "init" parameter. :param value: a fixed value :param properties: specified field name, same as sizefromlen. :returns: a function which takes a NamedStruct as parameter, and store the value to property path.
372,355
def _structure_recipients_data(cls, recipients): try: from django.contrib.auth import get_user_model USER_MODEL = get_user_model() except ImportError: from django.contrib.auth.models import User as USER_MODEL if not is_iterable(recipients): recipients = (recipients,) objects = [] for r in recipients: user = None if isinstance(r, USER_MODEL): user = r address = cls.get_address(r) objects.append(Recipient(cls.get_alias(), user, address)) return objects
Converts recipients data into a list of Recipient objects. :param list recipients: list of objects :return: list of Recipient :rtype: list
372,356
def diff_one(model1, model2, **kwargs): changes = [] fields1 = model1._meta.fields fields2 = model2._meta.fields names1 = set(fields1) - set(fields2) if names1: fields = [fields1[name] for name in names1] changes.append(create_fields(model1, *fields, **kwargs)) names2 = set(fields2) - set(fields1) if names2: changes.append(drop_fields(model1, *names2)) fields_ = [] nulls_ = [] indexes_ = [] for name in set(fields1) - names1 - names2: field1, field2 = fields1[name], fields2[name] diff = compare_fields(field1, field2) null = diff.pop(, None) index = diff.pop(, None) if diff: fields_.append(field1) if null is not None: nulls_.append((name, null)) if index is not None: indexes_.append((name, index[0], index[1])) if fields_: changes.append(change_fields(model1, *fields_, **kwargs)) for name, null in nulls_: changes.append(change_not_null(model1, name, null)) for name, index, unique in indexes_: if index is True or unique is True: if fields2[name].unique or fields2[name].index: changes.append(drop_index(model1, name)) changes.append(add_index(model1, name, unique)) else: changes.append(drop_index(model1, name)) return changes
Find difference between given peewee models.
372,357
def scatter_table(self, x, y, c, s, mark=): options = self._parse_plot_options(mark) s = [sqrt(si) for si in s] plot_series = self._create_plot_tables_object(x, y, c, s, options) self.plot_table_list.append(plot_series)
Add a data series to the plot. :param x: array containing x-values. :param y: array containing y-values. :param c: array containing values for the color of the mark. :param s: array containing values for the size of the mark. :param mark: the symbol used to mark the data point. May be None, or any plot mark accepted by TikZ (e.g. ``*, x, +, o, square, triangle``). The dimensions of x, y, c and s should be equal. The c values will be mapped to a colormap.
372,358
def _on_update_rt_filter(self, peer, new_rts, old_rts): for table in self._table_manager._global_tables.values(): if table.route_family == RF_RTC_UC: continue self._spawn( % peer, self._rt_mgr.on_rt_filter_chg_sync_peer, peer, new_rts, old_rts, table) LOG.debug(, table.route_family)
Handles update of peer RT filter. Parameters: - `peer`: (Peer) whose RT filter has changed. - `new_rts`: (set) of new RTs that peer is interested in. - `old_rts`: (set) of RTs that peers is no longer interested in.
372,359
def cache_makedirs(self, subdir=None): if subdir is not None: dirname = self.cache_path if subdir: dirname = os.path.join(dirname, subdir) else: dirname = os.path.dirname(self.cache_path) os.makedirs(dirname, exist_ok=True)
Make necessary directories to hold cache value
372,360
def get_human_size(size, use_giga=True): size_kb = .format(size) if size < SIZE_K: return (.format(size), size_kb) if size < SIZE_M: return (.format(size / SIZE_K), size_kb) if size < SIZE_G or not use_giga: return (.format(size / SIZE_M), size_kb) if size < SIZE_T: return (.format(size / SIZE_G), size_kb) return (.format(size / SIZE_T), size_kb)
将文件大小由byte, 转为人类可读的字符串 size - 整数, 文件的大小, 以byte为单位 use_giga - 如果这个选项为False, 那最大的单位就是MegaBytes, 而不会用到 GigaBytes, 这个在显示下载进度时很有用, 因为可以动态的显示下载 状态.
372,361
def get_pipe(self): lines = [] for line in sys.stdin: try: lines.append(self.line_to_object(line.strip())) except ValueError: pass except KeyError: pass return lines
Returns a list that maps the input of the pipe to an elasticsearch object. Will call id_to_object if it cannot serialize the data from json.
372,362
def convertSequenceMachineSequence(generatedSequences): sequenceList = [] currentSequence = [] for s in generatedSequences: if s is None: sequenceList.append(currentSequence) currentSequence = [] else: currentSequence.append(s) return sequenceList
Convert a sequence from the SequenceMachine into a list of sequences, such that each sequence is a list of set of SDRs.
372,363
def plot_polar( log, title, dataDictionary, pathToOutputPlotsFolder="~/Desktop", dataRange=False, ylabel=False, radius=False, circumference=True, circleTicksRange=(0, 360, 60), circleTicksLabels=".", prependNum=False): import sys import matplotlib.pyplot as plt import numpy as np colors = [ {: }, {: }, {: }, {: }, {: }, {: }, {: }, {: }, {: } ] fig = plt.figure( num=None, figsize=(8, 8), dpi=None, facecolor=None, edgecolor=None, frameon=True) ax = fig.add_axes( [0.1, 0.1, 0.8, 0.8], polar=True, frameon=circumference) ax.set_ylim(0, radius) if circleTicksRange: circleTicks = np.arange(circleTicksRange[0], circleTicksRange[ 1], circleTicksRange[2]) tickLabels = [] for tick in circleTicks: tickLabels.append(".") plt.xticks(2 * np.pi * circleTicks / 360., tickLabels) count = 0 for k, v in dataDictionary.iteritems(): if count <= len(colors): colorDict = colors[count] count += 1 else: count = 0 colorDict = colors[count] thetaList = [] twoPi = 2. * np.pi for i in range(len(v)): thetaList.append(twoPi * np.random.rand()) thetaArray = np.array(thetaList) x = thetaArray y = v plt.scatter( x, y, label=k, s=50, c=colorDict.values(), marker=, cmap=None, norm=None, vmin=None, vmax=None, alpha=0.5, linewidths=None, edgecolor=, verts=None, hold=True) box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) ax.legend(loc=, bbox_to_anchor=(0.7, -0.1), prop={: 8}) plt.grid(True) plt.title(title) if prependNum: title = "%02d_%s" % (prependNum, title) thisTitle = title.replace(" ", "_") thisTitle = thisTitle.replace("-", "_") fileName = pathToOutputPlotsFolder + thisTitle + ".png" imageLink = % (thisTitle, fileName) plt.savefig(fileName) plt.clf() return imageLink
*Plot a dictionary of numpy lightcurves polynomials* **Key Arguments:** - ``log`` -- logger - ``title`` -- title for the plot - ``dataDictionary`` -- dictionary of data to plot { label01 : dataArray01, label02 : dataArray02 } - ``pathToOutputPlotsFolder`` -- path the the output folder to save plot to - ``dataRange`` -- the range for the data [min, max] - ``ylabel`` -- ylabel - ``radius`` -- the max radius of the plot - ``circumference`` -- draw the circumference of the plot? - ``circleTicksRange`` - ``circleTicksLabels`` - ``prependNum`` -- prepend this number to the output filename **Return:** - None
372,364
def getVersion(init_file): try: return os.environ[] except KeyError: pass try: cwd = os.path.dirname(os.path.abspath(init_file)) fn = os.path.join(cwd, ) with open(fn) as f: return f.read().strip() except IOError: pass version = getVersionFromArchiveId() if version is not None: return version try: p = Popen([, , , ], stdout=PIPE, stderr=STDOUT, cwd=cwd) out = p.communicate()[0] if (not p.returncode) and out: v = gitDescribeToPep440(str(out)) if v: return v except OSError: pass try: return mTimeVersion(init_file) except Exception: return "latest"
Return BUILDBOT_VERSION environment variable, content of VERSION file, git tag or 'latest'
372,365
def tokenize (self, value): class STATE: NORMAL = 0 GROUP_PUNCTUATION = 1 PROCESS_HTML_TAG = 2 PROCESS_HTML_ENTITY = 3 GROUP_LINEBREAKS = 4 state_names = { STATE.NORMAL: "normal", STATE.GROUP_PUNCTUATION: "punctuation", STATE.PROCESS_HTML_TAG: "html", STATE.PROCESS_HTML_ENTITY: "html_entity", STATE.GROUP_LINEBREAKS: "break" } state = [STATE.NORMAL] token = [""] tokens = [] index = -1 def clearToken(): token[0] = "" state[0] = STATE.NORMAL def emitToken(): if len(token[0]) > 0: char_start, char_end = index, index + len(token[0]) if self.create_structured_tokens: new_token = {: token[0], : state_names[state[0]], : char_start, : char_end} tokens.append(new_token) else: tokens.append(token[0]) clearToken() def fixBrokenHtmlEntity(): if not self.recognizePunctuation: token[0] += c if c == CrfTokenizer.END_HTML_TAG_CHAR: if self.skipHtmlTags: clearToken() else: emitToken() continue if state[0] == STATE.PROCESS_HTML_ENTITY: if c == CrfTokenizer.END_HTML_ENTITY_CHAR: if len(token[0]) == 1: if not self.recognizePunctuation: token[0] = token[0] + c state[0] = STATE.NORMAL elif self.groupPunctuation: token[0] = token[0] + c state[0] = STATE.GROUP_PUNCTUATION else: emitToken() token[0] = token[0] + c emitToken() if state[0] != STATE.NORMAL: emitToken() token[0] = token[0] + c if state[0] == STATE.PROCESS_HTML_ENTITY: fixBrokenHtmlEntity() emitToken() if self.tokenPrefix is not None and len(self.tokenPrefix) > 0: tokens = map(lambda x: self.tokenPrefix + x, tokens) return tokens
Take a string and break it into tokens. Return the tokens as a list of strings.
372,366
def Open(pathfileext=None, shot=None, t=None, Dt=None, Mesh=None, Deg=None, Deriv=None, Sep=True, Pos=True, OutPath=None, ReplacePath=None, Ves=None, out=, Verb=False, Print=True): assert None in [pathfileext,shot] and not (pathfileext is None and shot is None), "Arg pathfileext or shot must be None, but not both !" if pathfileext is None: File = FindSolFile(shot=shot, t=t, Dt=Dt, Mesh=Mesh, Deg=Deg, Deriv=Deriv, Sep=Sep, Pos=Pos, OutPath=OutPath) if File is None: return File pathfileext = os.path.join(OutPath,File) C = any([ss in pathfileext for ss in []]) assert C, "Arg pathfileext must contain !" if in pathfileext: obj = _open_np(pathfileext, Ves=Ves, ReplacePath=ReplacePath, out=out, Verb=Verb, Print=Print) if Print: print("Loaded : "+pathfileext) return obj
Open a ToFu object saved file This generic open function identifies the required loading routine by detecting how the object was saved from the file name extension. Also, it uses :meth:`~tofu.pathfile.FindSolFile()` to identify the relevant file in case key criteria such as shot, Deg... are provided instead of the file name itself. Finally, once all the relevant data is loaded from the file, a ToFu object is re-created, if necessary by implicitly loading all other objects it may depend on (i.e.: vessel, apertures...) If pathfileext is not provided (None), then the following keyword arguments are fed to :meth:`~tofu.pathfile.FindSolFile()`: shot, t, Dt, Mesh, Deg, Deriv, Sep, Pos Parameters ---------- pathfileext : None / str If provided, the name of the file to load OutPath : None / str If provided, the absolute path where the file is to be found ReplacePath : str If provided, ? (to finish) Ves : None / If provided, the :class:`tofu.geom.Ves` object that shall be used to reconstruct the object (if not provided, the appropriate vessel will be loaded). out : str Flag indicating whether the object should be loaded completely ('full'), in a light dismissing the heaviest attributes ('light') or whether only the Id or a list of Id should be returned ('Id'), valid only for '.npz' Verb : bool Flag indicating whether to pring intermediate comments on the loading procedure Returns ------- obj ToFu object The loaded and re-created ToFu object
372,367
def set_edges(self, name: str, a: np.ndarray, b: np.ndarray, w: np.ndarray, *, axis: int) -> None: deprecated(" is deprecated. Use or instead") try: g = scipy.sparse.coo_matrix((w, (a, b)), (self.shape[axis], self.shape[axis])) except Exception: raise ValueError("Input arrays could not be converted to a sparse matrix") if axis == 0: self.row_graphs[name] = g elif axis == 1: self.col_graphs[name] = g else: raise ValueError("axis must be 0 (rows) or 1 (columns)")
**DEPRECATED** - Use `ds.row_graphs[name] = g` or `ds.col_graphs[name] = g` instead
372,368
def _GetIdValue(self, registry_key): id_value = registry_key.GetValueByName() if id_value: yield registry_key, id_value for sub_key in registry_key.GetSubkeys(): for value_key, id_value in self._GetIdValue(sub_key): yield value_key, id_value
Retrieves the Id value from Task Cache Tree key. Args: registry_key (dfwinreg.WinRegistryKey): Windows Registry key. Yields: tuple: containing: dfwinreg.WinRegistryKey: Windows Registry key. dfwinreg.WinRegistryValue: Windows Registry value.
372,369
def executor(self, max_workers=1): cls = self.__class__ if cls._executor is None: cls._executor = ThreadPoolExecutor(max_workers) return cls._executor
single global executor
372,370
def error(self, s): print(" ERROR: , %s" % (self.src_id, s), file=sys.stderr)
Prints out an error message to stderr. :param s: The error string to print :return: None
372,371
def _remove_dummies(self, to_remove=None, inplace=False): zmat = self if inplace else self.copy() if to_remove is None: to_remove = zmat._has_removable_dummies() if not to_remove: if inplace: return None else: return zmat has_dummies = zmat._metadata[] c_table = zmat.loc[to_remove, [, , ]] c_table[] = [has_dummies[k][] for k in to_remove] zmat.unsafe_loc[to_remove, ] = c_table[].astype() zmat_values = zmat.get_cartesian()._calculate_zmat_values(c_table) zmat.unsafe_loc[to_remove, [, , ]] = zmat_values zmat._frame.drop([has_dummies[k][] for k in to_remove], inplace=True) warnings.warn(.format(to_remove), UserWarning) for k in to_remove: zmat._metadata[].pop(k) if not inplace: return zmat
Works INPLACE
372,372
def copy(self): result = Vector3(self.size, self.deriv) result.x.v = self.x.v result.y.v = self.y.v result.z.v = self.z.v if self.deriv > 0: result.x.d[:] = self.x.d result.y.d[:] = self.y.d result.z.d[:] = self.z.d if self.deriv > 1: result.x.dd[:] = self.x.dd result.y.dd[:] = self.y.dd result.z.dd[:] = self.z.dd return result
Return a deep copy
372,373
def check_multi_dimensional_coords(self, ds): ret_val = [] for coord in self._find_aux_coord_vars(ds): variable = ds.variables[coord] if variable.ndim < 2: continue not_matching = TestCtx(BaseCheck.MEDIUM, self.section_titles[]) not_matching.assert_true(coord not in variable.dimensions, .format(coord)) ret_val.append(not_matching.to_result()) return ret_val
Checks that no multidimensional coordinate shares a name with its dimensions. Chapter 5 paragraph 4 We recommend that the name of a [multidimensional coordinate] should not match the name of any of its dimensions. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
372,374
def run(self, coords=None, debug=False): self.grid.search(coords=coords) return self.grid
Run the likelihood grid search
372,375
def peek(self): try: v = next(self._iter) self._iter = itertools.chain((v,), self._iter) return v except StopIteration: return PeekableIterator.Nothing
Returns PeekableIterator.Nothing when the iterator is exhausted.
372,376
def getUsrCfgFilesForPyPkg(pkgName): tname = getEmbeddedKeyVal(theFile, TASK_NAME_KEY) flist = getCfgFilesInDirForTask(getAppDir(), tname) return flist
See if the user has one of their own local .cfg files for this task, such as might be created automatically during the save of a read-only package, and return their names.
372,377
def get_value_in_base_currency(self) -> Decimal: amt_orig = self.get_value() sec_cur = self.get_currency() cur_svc = CurrenciesAggregate(self.book) base_cur = cur_svc.get_default_currency() if sec_cur == base_cur: return amt_orig single_svc = cur_svc.get_currency_aggregate(sec_cur) rate = single_svc.get_latest_rate(base_cur) result = amt_orig * rate.value return result
Calculates the value of security holdings in base currency
372,378
def dcc_connect(self, address, port, dcctype="chat"): warnings.warn("Use self.dcc(type).connect()", DeprecationWarning) return self.dcc(dcctype).connect(address, port)
Connect to a DCC peer. Arguments: address -- IP address of the peer. port -- Port to connect to. Returns a DCCConnection instance.
372,379
def _any_would_run(func, filenames, *args): if os.environ.get("_POLYSQUARE_GENERIC_FILE_LINTER_NO_STAMPING", None): return True for filename in filenames: stamp_args, stamp_kwargs = _run_lint_on_file_stamped_args(filename, *args, **{}) dependency = jobstamp.out_of_date(func, *stamp_args, **stamp_kwargs) if dependency: return True return False
True if a linter function would be called on any of filenames.
372,380
def quadratic_jacobian_polynomial(nodes): r jac_parts = _helpers.matrix_product(nodes, _QUADRATIC_JACOBIAN_HELPER) jac_at_nodes = np.empty((1, 6), order="F") jac_at_nodes[0, 0] = two_by_two_det(jac_parts[:, :2]) jac_at_nodes[0, 1] = two_by_two_det(jac_parts[:, 2:4]) jac_at_nodes[0, 2] = two_by_two_det(jac_parts[:, 4:6]) jac_at_nodes[0, 3] = two_by_two_det(jac_parts[:, 6:8]) jac_at_nodes[0, 4] = two_by_two_det(jac_parts[:, 8:10]) jac_at_nodes[0, 5] = two_by_two_det(jac_parts[:, 10:]) bernstein = _helpers.matrix_product(jac_at_nodes, _QUADRATIC_TO_BERNSTEIN) return bernstein
r"""Compute the Jacobian determinant of a quadratic surface. .. note:: This is used **only** by :meth:`Surface._compute_valid` (which is in turn used to compute / cache the :attr:`Surface.is_valid` property). Converts :math:`\det(J(s, t))` to a polynomial on the reference triangle and represents it as a surface object. .. note:: This assumes that ``nodes`` is ``2 x 6`` but doesn't verify this. (However, the right multiplication by ``_QUADRATIC_JACOBIAN_HELPER`` would fail if ``nodes`` wasn't ``R x 6`` and then the ensuing determinants would fail if there weren't 2 rows.) Args: nodes (numpy.ndarray): A 2 x 6 array of nodes in a surface. Returns: numpy.ndarray: 1 x 6 array, coefficients in Bernstein basis.
372,381
def emit(self, signal, message=): if signal == : self.log_backend.info(message) elif signal == : self.log_backend.warn(message) elif signal == : self.log_backend.error(message) return self.emit_to_frontend(signal, message)
Emit a signal to the frontend. :param str signal: name of the signal :param message: message to send :returns: return value from frontend emit function :rtype: tornado.concurrent.Future
372,382
def _pywrap_tensorflow(): try: from tensorboard.compat import notf except ImportError: try: from tensorflow.python import pywrap_tensorflow return pywrap_tensorflow except ImportError: pass from tensorboard.compat.tensorflow_stub import pywrap_tensorflow return pywrap_tensorflow
Provide pywrap_tensorflow access in TensorBoard. pywrap_tensorflow cannot be accessed from tf.python.pywrap_tensorflow and needs to be imported using `from tensorflow.python import pywrap_tensorflow`. Therefore, we provide a separate accessor function for it here. NOTE: pywrap_tensorflow is not part of TensorFlow API and this dependency will go away soon. Returns: pywrap_tensorflow import, if available. Raises: ImportError: if we couldn't import pywrap_tensorflow.
372,383
def getchallenge(self): "Return server challenge" self.sock.send(CHALLENGE_PACKET) for packet in self.read_iterator(self.CHALLENGE_TIMEOUT): if packet.startswith(CHALLENGE_RESPONSE_HEADER): return parse_challenge_response(packet)
Return server challenge
372,384
def _parse_dict(element, definition): sub_dict = {} for name, subdef in viewitems(definition): (name, required) = _parse_name(name) sub_dict[name] = xml_to_json(element, subdef, required) return sub_dict
Parse xml element by a definition given in dict format. :param element: ElementTree element :param definition: definition schema :type definition: dict :return: parsed xml :rtype: dict
372,385
def error_response(self, request, error, **kwargs): ctx = {} ctx.update(error) if error[] in [, ]: ctx.update(next=) return self.render_to_response(ctx, **kwargs) ctx.update(next=self.get_redirect_url(request)) return self.render_to_response(ctx, **kwargs)
Return an error to be displayed to the resource owner if anything goes awry. Errors can include invalid clients, authorization denials and other edge cases such as a wrong ``redirect_uri`` in the authorization request. :param request: :attr:`django.http.HttpRequest` :param error: ``dict`` The different types of errors are outlined in :rfc:`4.2.2.1`
372,386
def generate_ha_relation_data(service, extra_settings=None): _haproxy_res = .format(service) _relation_data = { : { _haproxy_res: , }, : { _haproxy_res: }, : { _haproxy_res: }, : { .format(service): _haproxy_res }, } if extra_settings: for k, v in extra_settings.items(): if _relation_data.get(k): _relation_data[k].update(v) else: _relation_data[k] = v if config(): update_hacluster_dns_ha(service, _relation_data) else: update_hacluster_vip(service, _relation_data) return { .format(k): json.dumps(v, **JSON_ENCODE_OPTIONS) for k, v in _relation_data.items() if v }
Generate relation data for ha relation Based on configuration options and unit interfaces, generate a json encoded dict of relation data items for the hacluster relation, providing configuration for DNS HA or VIP's + haproxy clone sets. Example of supplying additional settings:: COLO_CONSOLEAUTH = 'inf: res_nova_consoleauth grp_nova_vips' AGENT_CONSOLEAUTH = 'ocf:openstack:nova-consoleauth' AGENT_CA_PARAMS = 'op monitor interval="5s"' ha_console_settings = { 'colocations': {'vip_consoleauth': COLO_CONSOLEAUTH}, 'init_services': {'res_nova_consoleauth': 'nova-consoleauth'}, 'resources': {'res_nova_consoleauth': AGENT_CONSOLEAUTH}, 'resource_params': {'res_nova_consoleauth': AGENT_CA_PARAMS}) generate_ha_relation_data('nova', extra_settings=ha_console_settings) @param service: Name of the service being configured @param extra_settings: Dict of additional resource data @returns dict: json encoded data for use with relation_set
372,387
def Reverse(self, copy=False): numPoints = self.GetN() if copy: revGraph = self.Clone() else: revGraph = self X = self.GetX() EXlow = self.GetEXlow() EXhigh = self.GetEXhigh() Y = self.GetY() EYlow = self.GetEYlow() EYhigh = self.GetEYhigh() for i in range(numPoints): index = numPoints - 1 - i revGraph.SetPoint(i, X[index], Y[index]) revGraph.SetPointError( i, EXlow[index], EXhigh[index], EYlow[index], EYhigh[index]) return revGraph
Reverse the order of the points
372,388
def fit(self, X=None, u=None, s = None): X = X.copy() if self.mode is : Xall = X.copy() X = np.reshape(Xall.copy(), (-1, Xall.shape[-1])) if ((u is None)): init_sort = np.random.permutation(len(ikeep))[:,np.newaxis] for j in range(1,self.n_components): init_sort = np.concatenate((init_sort, np.random.permutation(len(ikeep))[:,np.newaxis]), axis=-1) else: init_sort = self.init if self.n_components==1 and init_sort.ndim==1: init_sort = init_sort[:,np.newaxis] isort1, iclustup = self._map(u.copy(), self.n_components, self.n_X, init_sort, ikeep, s) self.isort = isort1 self.embedding = iclustup return self
Fit X into an embedded space. Inputs ---------- X : array, shape (n_samples, n_features) u,s,v : svd decomposition of X (optional) Assigns ---------- embedding : array-like, shape (n_samples, n_components) Stores the embedding vectors. u,sv,v : singular value decomposition of data S, potentially with smoothing isort1 : sorting along first dimension of matrix isort2 : sorting along second dimension of matrix (if n_Y > 0) cmap: correlation of each item with all locations in the embedding map (before upsampling) A: PC coefficients of each Fourier mode
372,389
async def declareWorkerType(self, *args, **kwargs): return await self._makeApiCall(self.funcinfo["declareWorkerType"], *args, **kwargs)
Update a worker-type Declare a workerType, supplying some details about it. `declareWorkerType` allows updating one or more properties of a worker-type as long as the required scopes are possessed. For example, a request to update the `gecko-b-1-w2008` worker-type within the `aws-provisioner-v1` provisioner with a body `{description: 'This worker type is great'}` would require you to have the scope `queue:declare-worker-type:aws-provisioner-v1/gecko-b-1-w2008#description`. This method takes input: ``v1/update-workertype-request.json#`` This method gives output: ``v1/workertype-response.json#`` This method is ``experimental``
372,390
def wait(self, timeout=None): if not self._is_owned(): raise RuntimeError("cannot wait on un-acquired lock") waiter = _allocate_lock() waiter.acquire() self.__waiters.append(waiter) saved_state = self._release_save() try: if timeout is None: waiter.acquire() if __debug__: self._note("%s.wait(): got it", self) else: endtime = _time() + timeout delay = 0.0005 while True: gotit = waiter.acquire(0) if gotit: break remaining = endtime - _time() if remaining <= 0: break delay = min(delay * 2, remaining, .05) _sleep(delay) if not gotit: if __debug__: self._note("%s.wait(%s): timed out", self, timeout) try: self.__waiters.remove(waiter) except ValueError: pass else: if __debug__: self._note("%s.wait(%s): got it", self, timeout) finally: self._acquire_restore(saved_state)
Wait until notified or until a timeout occurs. If the calling thread has not acquired the lock when this method is called, a RuntimeError is raised. This method releases the underlying lock, and then blocks until it is awakened by a notify() or notifyAll() call for the same condition variable in another thread, or until the optional timeout occurs. Once awakened or timed out, it re-acquires the lock and returns. When the timeout argument is present and not None, it should be a floating point number specifying a timeout for the operation in seconds (or fractions thereof). When the underlying lock is an RLock, it is not released using its release() method, since this may not actually unlock the lock when it was acquired multiple times recursively. Instead, an internal interface of the RLock class is used, which really unlocks it even when it has been recursively acquired several times. Another internal interface is then used to restore the recursion level when the lock is reacquired.
372,391
def get(self, path): data = self.app.test_client().get("/%s" % path).data return data
Get the content of a file, indentified by its path relative to the folder configured in PyGreen. If the file extension is one of the extensions that should be processed through Mako, it will be processed.
372,392
def cli(ctx, name, dname, license_key, ips_version, force, enable, ssl, spdy, gzip, cache, install, dev): assert isinstance(ctx, Context) login_session = ctx.get_login() log = logging.getLogger() ctx.cache = cache def get_license(): licenses = Licenses(login_session).get() user_license = license_key or ctx.config.get(, ) if user_license: licenses = {license.license_key: license for license in licenses} if user_license in licenses: return licenses[user_license] opt = choice([ (key, .format(u=license.community_url, k=license.license_key)) for key, license in enumerate(licenses) ], 1, ) license = licenses[opt] if click.confirm(, True): ctx.log.debug(.format(k=license.license_key)) ctx.config.set(, , license.license_key) with open(ctx.config_path, ) as configfile: ctx.config.write(configfile) return license lmeta = get_license() p = Echo() ips = IpsManager(ctx, lmeta) p.done() if ips_version: if ips_version == : v = ips.dev_version if not v: click.secho(, err=True, fg=, bold=True) raise Exception() p = Echo(.format(vs=v.version.vstring)) else: ips_version = Version(ips_version) v = ips.versions[ips_version.vtuple] p = Echo(.format(iv=ips_version.vstring)) else: v = ips.latest p = Echo(.format(vs=v.version.vstring)) filename = ips.get(v, cache) p.done()
Downloads and installs a new instance of the latest Invision Power Suite release.
372,393
def make_unicode(string): if sys.version < and isinstance(string, str): return unicode(string.decode()) return string
Python 2 and 3 compatibility function that converts a string to Unicode. In case of Unicode, the string is returned unchanged :param string: input string :return: Unicode string
372,394
def _RetryRequest(self, timeout=None, **request_args): while True: try: now = time.time() if not timeout: timeout = config.CONFIG["Client.http_timeout"] result = requests.request(**request_args) result.raise_for_status() if not result.ok: raise requests.RequestException(response=result) return time.time() - now, result except IOError as e: self.consecutive_connection_errors += 1 if self.active_base_url is not None: response = getattr(e, "response", None) if getattr(response, "status_code", None) == 406: raise if self.consecutive_connection_errors >= self.retry_error_limit: logging.info( "Too many connection errors to %s, retrying another URL", self.active_base_url) self.active_base_url = None raise e logging.debug( "Unable to connect to frontend. Backing off %s seconds.", self.error_poll_min) self.Wait(self.error_poll_min) else: raise e
Retry the request a few times before we determine it failed. Sometimes the frontend becomes loaded and issues a 500 error to throttle the clients. We wait Client.error_poll_min seconds between each attempt to back off the frontend. Note that this does not affect any timing algorithm in the client itself which is controlled by the Timer() class. Args: timeout: Timeout for retry. **request_args: Args to the requests.request call. Returns: a tuple of duration, urllib.request.urlopen response.
372,395
def get_blog_context(context): context[] = get_user_model().objects.filter( owned_pages__live=True, owned_pages__content_type__model= ).annotate(Count()).order_by() context[] = BlogCategory.objects.all() context[] = BlogCategory.objects.filter( parent=None, ).prefetch_related( , ).annotate( blog_count=Count(), ) return context
Get context data useful on all blog related pages
372,396
def standardise_quotes(self, val): if self._in_quotes(val, self.altquote): middle = self.remove_quotes(val) val = self.add_quotes(middle) return self.escape_quotes(val)
Change the quotes used to wrap a value to the pprint default E.g. "val" to 'val' or 'val' to "val"
372,397
def normalized_per_object(image, labels): nobjects = labels.max() objects = np.arange(nobjects + 1) lmin, lmax = scind.extrema(image, labels, objects)[:2] divisor = np.ones((nobjects + 1,)) divisor[lmax > lmin] = (lmax - lmin)[lmax > lmin] return (image - lmin[labels]) / divisor[labels]
Normalize the intensities of each object to the [0, 1] range.
372,398
def qn_df(df, axis=, keep_orig=False): df_qn = {} for mat_type in df: inst_df = df[mat_type] if axis == : inst_df = inst_df.transpose() missing_values = inst_df.isnull().values.any() if missing_values: missing_mask = pd.isnull(inst_df) inst_df = inst_df.fillna(value=0) common_dist = calc_common_dist(inst_df) inst_df = swap_in_common_dist(inst_df, common_dist) if missing_values: inst_df = inst_df.mask(missing_mask, other=np.nan) if axis == : inst_df = inst_df.transpose() df_qn[mat_type] = inst_df return df_qn
do quantile normalization of a dataframe dictionary, does not write to net
372,399
def form_invalid(self, post_form, attachment_formset, poll_option_formset, **kwargs): poll_errors = [k for k in post_form.errors.keys() if k.startswith()] if ( poll_errors or ( poll_option_formset and not poll_option_formset.is_valid() and len(post_form.cleaned_data[]) ) ): messages.error(self.request, self.poll_option_formset_general_error_message) return super().form_invalid( post_form, attachment_formset, poll_option_formset=poll_option_formset, **kwargs)
Processes invalid forms.