code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def _apply_decorator_to_methods(cls, decorator): for method in cls.methods: method_name = method.lower() decorated_method_func = decorator(getattr(cls, method_name)) setattr(cls, method_name, decorated_method_func)
This helper can apply a given decorator to all methods on the current Resource. NOTE: In contrast to ``Resource.method_decorators``, which has a similar use-case, this method applies decorators directly and override methods in-place, while the decorators listed in ``Resource.method_decorators`` are applied on every request which is quite a waste of resources.
def result_anything_found(result): keys = ['version', 'themes', 'plugins', 'interesting urls'] anything_found = False for k in keys: if k not in result: continue else: if not result[k]['is_empty']: anything_found = True return anything_found
Interim solution for the fact that sometimes determine_scanning_method can legitimately return a valid scanning method, but it results that the site does not belong to a particular CMS. @param result: the result as passed to Output.result() @return: whether anything was found.
def create_order_keyword_list(keywords): result = [] for keyword in keywords: result.append((keyword, '')) result.append(('-%s' % keyword, '')) return result
Takes a given keyword list and returns a ready-to-go list of possible ordering values. Example: ['foo'] returns [('foo', ''), ('-foo', '')]
def import_object_ns(name_space, import_str, *args, **kwargs): import_value = "%s.%s" % (name_space, import_str) try: return import_class(import_value)(*args, **kwargs) except ImportError: return import_class(import_str)(*args, **kwargs)
Tries to import object from default namespace. Imports a class and return an instance of it, first by trying to find the class in a default namespace, then failing back to a full path if not found in the default namespace.
def add_workflow_definitions(sbi_config: dict): registered_workflows = [] for i in range(len(sbi_config['processing_blocks'])): workflow_config = sbi_config['processing_blocks'][i]['workflow'] workflow_name = '{}:{}'.format(workflow_config['id'], workflow_config['version']) if workflow_name in registered_workflows: continue workflow_definition = dict( id=workflow_config['id'], version=workflow_config['version'], stages=[] ) key = "workflow_definitions:{}:{}".format(workflow_config['id'], workflow_config['version']) DB.save_dict(key, workflow_definition, hierarchical=False) registered_workflows.append(workflow_name)
Add any missing SBI workflow definitions as placeholders. This is a utility function used in testing and adds mock / test workflow definitions to the database for workflows defined in the specified SBI config. Args: sbi_config (dict): SBI configuration dictionary.
def map_cluster(events, cluster): cluster = np.ascontiguousarray(cluster) events = np.ascontiguousarray(events) mapped_cluster = np.zeros((events.shape[0], ), dtype=dtype_from_descr(data_struct.ClusterInfoTable)) mapped_cluster = np.ascontiguousarray(mapped_cluster) analysis_functions.map_cluster(events, cluster, mapped_cluster) return mapped_cluster
Maps the cluster hits on events. Not existing hits in events have all values set to 0
def treeAggregate(self, zeroValue, seqOp, combOp, depth=2): if depth < 1: raise ValueError("Depth cannot be smaller than 1 but got %d." % depth) if self.getNumPartitions() == 0: return zeroValue def aggregatePartition(iterator): acc = zeroValue for obj in iterator: acc = seqOp(acc, obj) yield acc partiallyAggregated = self.mapPartitions(aggregatePartition) numPartitions = partiallyAggregated.getNumPartitions() scale = max(int(ceil(pow(numPartitions, 1.0 / depth))), 2) while numPartitions > scale + numPartitions / scale: numPartitions /= scale curNumPartitions = int(numPartitions) def mapPartition(i, iterator): for obj in iterator: yield (i % curNumPartitions, obj) partiallyAggregated = partiallyAggregated \ .mapPartitionsWithIndex(mapPartition) \ .reduceByKey(combOp, curNumPartitions) \ .values() return partiallyAggregated.reduce(combOp)
Aggregates the elements of this RDD in a multi-level tree pattern. :param depth: suggested depth of the tree (default: 2) >>> add = lambda x, y: x + y >>> rdd = sc.parallelize([-5, -4, -3, -2, -1, 1, 2, 3, 4], 10) >>> rdd.treeAggregate(0, add, add) -5 >>> rdd.treeAggregate(0, add, add, 1) -5 >>> rdd.treeAggregate(0, add, add, 2) -5 >>> rdd.treeAggregate(0, add, add, 5) -5 >>> rdd.treeAggregate(0, add, add, 10) -5
def logout(self, refresh_token): return self._realm.client.post(self.get_url('end_session_endpoint'), data={ 'refresh_token': refresh_token, 'client_id': self._client_id, 'client_secret': self._client_secret })
The logout endpoint logs out the authenticated user. :param str refresh_token:
def _insert_breathe_configs(c, *, project_name, doxygen_xml_dirname): if doxygen_xml_dirname is not None: c['breathe_projects'] = {project_name: doxygen_xml_dirname} c['breathe_default_project'] = project_name return c
Add breathe extension configurations to the state.
def runSearchReadGroupSets(self, request): return self.runSearchRequest( request, protocol.SearchReadGroupSetsRequest, protocol.SearchReadGroupSetsResponse, self.readGroupSetsGenerator)
Runs the specified SearchReadGroupSetsRequest.
def ip_in_ip_mask(ip, mask_ip, mask): ip = ip2hex(ip) if ip is None: raise Exception("bad ip format") if (mask_ip & mask) == (ip & mask): return True return False
Checks whether an ip is contained in an ip subnet where the subnet is stated as an ip in the dotted format, and a hex mask
def push_resource_cache(resourceid, info): if not resourceid: raise ResourceInitError("Resource id missing") if not DutInformationList._cache.get(resourceid): DutInformationList._cache[resourceid] = dict() DutInformationList._cache[resourceid] = merge(DutInformationList._cache[resourceid], info)
Cache resource specific information :param resourceid: Resource id as string :param info: Dict to push :return: Nothing
def add_host(host): p = new_prefix() p.prefix = str(host['ipaddr']) p.type = "host" p.description = host['description'] p.node = host['fqdn'] p.avps = {} if 'additional' in host: p.comment = host['additional'] if len(host['location']) > 0: p.avps['location'] = host['location'] if len(host['mac']) > 0: p.avps['mac'] = host['mac'] if len(host['phone']) > 0: p.avps['phone'] = host['phone'] if len(host['user']) > 0: p.avps['user'] = host['user'] return p
Put your host information in the prefix object.
def clean_session_table(): sessions = SessionActivity.query_by_expired().all() for session in sessions: delete_session(sid_s=session.sid_s) db.session.commit()
Automatically clean session table. To enable a periodically clean of the session table, you should configure the task as a celery periodic task. .. code-block:: python from datetime import timedelta CELERYBEAT_SCHEDULE = { 'session_cleaner': { 'task': 'invenio_accounts.tasks.clean_session_table', 'schedule': timedelta(days=1), }, } See `Invenio-Celery <https://invenio-celery.readthedocs.io/>`_ documentation for further details.
def __read_docker_compose_file(file_path): if not os.path.isfile(file_path): return __standardize_result(False, 'Path {} is not present'.format(file_path), None, None) try: with salt.utils.files.fopen(file_path, 'r') as fl: file_name = os.path.basename(file_path) result = {file_name: ''} for line in fl: result[file_name] += salt.utils.stringutils.to_unicode(line) except EnvironmentError: return __standardize_result(False, 'Could not read {0}'.format(file_path), None, None) return __standardize_result(True, 'Reading content of {0}'.format(file_path), result, None)
Read the compose file if it exists in the directory :param file_path: :return:
def get_file_hash( fd, hashfunc, fd_len=None ): h = hashfunc() fd.seek(0, os.SEEK_SET) count = 0 while True: buf = fd.read(65536) if len(buf) == 0: break if fd_len is not None: if count + len(buf) > fd_len: buf = buf[:fd_len - count] h.update(buf) count += len(buf) hashed = h.hexdigest() return hashed
Get the hex-encoded hash of the fd's data
def get_contents_sig(self): try: return self.contentsig except AttributeError: pass executor = self.get_executor() result = self.contentsig = SCons.Util.MD5signature(executor.get_contents()) return result
A helper method for get_cachedir_bsig. It computes and returns the signature for this node's contents.
def _sge_info(queue): qhost_out = subprocess.check_output(["qhost", "-q", "-xml"]).decode() qstat_queue = ["-q", queue] if queue and "," not in queue else [] qstat_out = subprocess.check_output(["qstat", "-f", "-xml"] + qstat_queue).decode() slot_info = _sge_get_slots(qstat_out) mem_info = _sge_get_mem(qhost_out, queue) machine_keys = slot_info.keys() mem_per_slot = [mem_info[x]["mem_total"] / float(slot_info[x]["slots_total"]) for x in machine_keys] min_ratio_index = mem_per_slot.index(median_left(mem_per_slot)) mem_info[machine_keys[min_ratio_index]]["mem_total"] return [{"cores": slot_info[machine_keys[min_ratio_index]]["slots_total"], "memory": mem_info[machine_keys[min_ratio_index]]["mem_total"], "name": "sge_machine"}]
Returns machine information for an sge job scheduler.
def from_dict(cls, metadata): hyperparameters = metadata.get('hyperparameters') tunable = metadata.get('tunable_hyperparameters') pipeline = cls( metadata['primitives'], metadata.get('init_params'), metadata.get('input_names'), metadata.get('output_names'), ) if hyperparameters: pipeline.set_hyperparameters(hyperparameters) if tunable is not None: pipeline._tunable_hyperparameters = tunable return pipeline
Create a new MLPipeline from a dict specification. The dict structure is the same as the one created by the `to_dict` method. Args: metadata (dict): Dictionary containing the pipeline specification. Returns: MLPipeline: A new MLPipeline instance with the details found in the given specification dictionary.
def send_quick_chat_from_agent(self, team_only, quick_chat): rlbot_status = send_quick_chat_flat(self.game_interface, self.index, self.team, team_only, quick_chat) if rlbot_status == RLBotCoreStatus.QuickChatRateExceeded: self.logger.debug('quick chat disabled') else: send_quick_chat(self.quick_chat_queue_holder, self.index, self.team, team_only, quick_chat)
Passes the agents quick chats to the game, and also to other python bots. This does perform limiting. You are limited to 5 quick chats in a 2 second period starting from the first chat. This means you can spread your chats out to be even within that 2 second period. You could spam them in the first little bit but then will be throttled.
def get_user_stats(self, name): req = self.conn.get(BASE_URL + "/user/" + name) if req.status_code != 200 or not name: return None return self.conn.make_api_call("getUserInfo", {"name": name})
Return data about the given user. Returns None if user does not exist.
def apns_send_bulk_message( registration_ids, alert, application_id=None, certfile=None, **kwargs ): results = _apns_send( registration_ids, alert, batch=True, application_id=application_id, certfile=certfile, **kwargs ) inactive_tokens = [token for token, result in results.items() if result == "Unregistered"] models.APNSDevice.objects.filter(registration_id__in=inactive_tokens).update(active=False) return results
Sends an APNS notification to one or more registration_ids. The registration_ids argument needs to be a list. Note that if set alert should always be a string. If it is not set, it won"t be included in the notification. You will need to pass None to this for silent notifications.
def convert_transpose(net, node, module, builder): input_name, output_name = _get_input_output_name(net, node) name = node['name'] param = _get_attrs(node) axes = literal_eval(param['axes']) builder.add_permute(name, axes, input_name, output_name)
Convert a transpose layer from mxnet to coreml. Parameters ---------- network: net A mxnet network object. layer: node Node to convert. module: module An module for MXNet builder: NeuralNetworkBuilder A neural network builder object.
def rectangle_centroid(rectangle): bbox = rectangle['coordinates'][0] xmin = bbox[0][0] ymin = bbox[0][1] xmax = bbox[2][0] ymax = bbox[2][1] xwidth = xmax - xmin ywidth = ymax - ymin return {'type': 'Point', 'coordinates': [xmin + xwidth / 2, ymin + ywidth / 2]}
get the centroid of the rectangle Keyword arguments: rectangle -- polygon geojson object return centroid
def configure_logger(self): logger_name = 'brome_runner' self.logger = logging.getLogger(logger_name) format_ = BROME_CONFIG['logger_runner']['format'] if BROME_CONFIG['logger_runner']['streamlogger']: sh = logging.StreamHandler() stream_formatter = logging.Formatter(format_) sh.setFormatter(stream_formatter) self.logger.addHandler(sh) if BROME_CONFIG['logger_runner']['filelogger'] and \ self.runner_dir: self.log_file_path = os.path.join( self.runner_dir, '%s.log' % logger_name ) self.relative_log_file_path = os.path.join( self.relative_runner_dir, '%s.log' % logger_name ) fh = logging.FileHandler( self.log_file_path ) file_formatter = logging.Formatter(format_) fh.setFormatter(file_formatter) self.logger.addHandler(fh) self.logger.setLevel( getattr( logging, BROME_CONFIG['logger_runner']['level'] ) )
Configure the test batch runner logger
def has_pending(self): if self.workqueue: return True for assigned_unit in self.assigned_work.values(): if self._pending_of(assigned_unit) > 0: return True return False
Return True if there are pending test items. This indicates that collection has finished and nodes are still processing test items, so this can be thought of as "the scheduler is active".
def command(self, details): log = self._params.get('log', self._discard) if '_config_running' not in dir(self._parent) or 'commands' not in self._parent._config_running: log.error("Event parent '%s' has no 'commands' config section", self._name) return commands = self._parent._config_running['commands'] if self._handler_arg not in commands: if self._handler_arg == 'stop': self._parent.stop() else: log.error("Event parent '%s' has no '%s' command configured", self._name, self._handler_arg) return pid = _exec_process(commands[self._handler_arg], self._parent._context, log=log) log.info("Forked pid %d for %s(%s)", pid, self._name, self._handler_arg) self._parent._legion.proc_add(event_target(self._parent, 'command_exit', key=pid, arg=self._handler_arg, log=log))
Handles executing a command-based event. This starts the command as specified in the 'commands' section of the task config. A separate event is registered to handle the command exit. This simply logs the exit status.
def message_info(exchange, routing_key, properties): output = [] if properties.message_id: output.append(properties.message_id) if properties.correlation_id: output.append('[correlation_id="{}"]'.format( properties.correlation_id)) if exchange: output.append('published to "{}"'.format(exchange)) if routing_key: output.append('using "{}"'.format(routing_key)) return ' '.join(output)
Return info about a message using the same conditional constructs :param str exchange: The exchange the message was published to :param str routing_key: The routing key used :param properties: The AMQP message properties :type properties: pika.spec.Basic.Properties :rtype: str
def create_chat(self, blogname, **kwargs): kwargs.update({"type": "chat"}) return self._send_post(blogname, kwargs)
Create a chat post on a blog :param blogname: a string, the url of the blog you want to post to. :param state: a string, The state of the post. :param tags: a list of tags that you want applied to the post :param tweet: a string, the customized tweet that you want :param date: a string, the GMT date and time of the post :param format: a string, sets the format type of the post. html or markdown :param slug: a string, a short text summary to the end of the post url :param title: a string, the title of the conversation :param conversation: a string, the conversation you are posting :returns: a dict created from the JSON response
def save_map(self, map_path, map_data): return self._client.send(save_map=sc_pb.RequestSaveMap( map_path=map_path, map_data=map_data))
Save a map into temp dir so create game can access it in multiplayer.
def _tokens_to_subtoken_ids(self, tokens): ret = [] for token in tokens: ret.extend(self._token_to_subtoken_ids(token)) return ret
Converts a list of tokens to a list of subtoken ids. Args: tokens: a list of strings. Returns: a list of integers in the range [0, vocab_size)
def yaml_encode(data): yrepr = yaml.representer.SafeRepresenter() ynode = yrepr.represent_data(data) if not isinstance(ynode, yaml.ScalarNode): raise TypeError( "yaml_encode() only works with YAML scalar data;" " failed for {0}".format(type(data)) ) tag = ynode.tag.rsplit(':', 1)[-1] ret = ynode.value if tag == "str": ret = yaml_dquote(ynode.value) return ret
A simple YAML encode that can take a single-element datatype and return a string representation.
def get_register(self, cpu_id, name): if not isinstance(cpu_id, baseinteger): raise TypeError("cpu_id can only be an instance of type baseinteger") if not isinstance(name, basestring): raise TypeError("name can only be an instance of type basestring") value = self._call("getRegister", in_p=[cpu_id, name]) return value
Gets one register. in cpu_id of type int The identifier of the Virtual CPU. in name of type str The register name, case is ignored. return value of type str The register value. This is usually a hex value (always 0x prefixed) but other format may be used for floating point registers (TBD).
def console_get_height_rect( con: tcod.console.Console, x: int, y: int, w: int, h: int, fmt: str ) -> int: return int( lib.TCOD_console_get_height_rect_fmt( _console(con), x, y, w, h, _fmt(fmt) ) )
Return the height of this text once word-wrapped into this rectangle. Returns: int: The number of lines of text once word-wrapped. .. deprecated:: 8.5 Use :any:`Console.get_height_rect` instead.
def _chunkForSend(self, data): LIMIT = self.CHUNK_LIMIT for i in range(0, len(data), LIMIT): yield data[i:i + LIMIT]
limit the chunks that we send over PB to 128k, since it has a hardwired string-size limit of 640k.
def get_password(hsm, args): expected_len = 32 name = 'HSM password' if hsm.version.have_key_store_decrypt(): expected_len = 64 name = 'master key' if args.stdin: password = sys.stdin.readline() while password and password[-1] == '\n': password = password[:-1] else: if args.debug: password = raw_input('Enter %s (press enter to skip) (will be echoed) : ' % (name)) else: password = getpass.getpass('Enter %s (press enter to skip) : ' % (name)) if len(password) <= expected_len: password = password.decode('hex') if not password: return None return password else: sys.stderr.write("ERROR: Invalid HSM password (expected max %i chars, got %i)\n" % \ (expected_len, len(password))) return 1
Get password of correct length for this YubiHSM version.
def filter_lines_from_comments(lines): for line_nb, raw_line in enumerate(lines): clean_line = remove_comments_from_line(raw_line) if clean_line == '': continue yield line_nb, clean_line, raw_line
Filter the lines from comments and non code lines.
def get_word_app (): if not has_word(): return None pythoncom.CoInitialize() import win32com.client app = win32com.client.gencache.EnsureDispatch("Word.Application") app.Visible = False return app
Return open Word.Application handle, or None if Word is not available on this system.
def list_queries(self, **kwargs): kwargs = self._verify_sort_options(kwargs) kwargs = self._verify_filters(kwargs, Query, True) api = self._get_api(device_directory.DefaultApi) return PaginatedResponse(api.device_query_list, lwrap_type=Query, **kwargs)
List queries in device query service. :param int limit: The number of devices to retrieve. :param str order: The ordering direction, ascending (asc) or descending (desc) :param str after: Get devices after/starting at given `device_id` :param filters: Dictionary of filters to apply. :returns: a list of :py:class:`Query` objects. :rtype: PaginatedResponse
def copy(self, dst_bucket, dst_key, metadata=None, reduced_redundancy=False, preserve_acl=False, encrypt_key=False): dst_bucket = self.bucket.connection.lookup(dst_bucket) if reduced_redundancy: storage_class = 'REDUCED_REDUNDANCY' else: storage_class = self.storage_class return dst_bucket.copy_key(dst_key, self.bucket.name, self.name, metadata, storage_class=storage_class, preserve_acl=preserve_acl, encrypt_key=encrypt_key)
Copy this Key to another bucket. :type dst_bucket: string :param dst_bucket: The name of the destination bucket :type dst_key: string :param dst_key: The name of the destination key :type metadata: dict :param metadata: Metadata to be associated with new key. If metadata is supplied, it will replace the metadata of the source key being copied. If no metadata is supplied, the source key's metadata will be copied to the new key. :type reduced_redundancy: bool :param reduced_redundancy: If True, this will force the storage class of the new Key to be REDUCED_REDUNDANCY regardless of the storage class of the key being copied. The Reduced Redundancy Storage (RRS) feature of S3, provides lower redundancy at lower storage cost. :type preserve_acl: bool :param preserve_acl: If True, the ACL from the source key will be copied to the destination key. If False, the destination key will have the default ACL. Note that preserving the ACL in the new key object will require two additional API calls to S3, one to retrieve the current ACL and one to set that ACL on the new object. If you don't care about the ACL, a value of False will be significantly more efficient. :type encrypt_key: bool :param encrypt_key: If True, the new copy of the object will be encrypted on the server-side by S3 and will be stored in an encrypted form while at rest in S3. :rtype: :class:`boto.s3.key.Key` or subclass :returns: An instance of the newly created key object
def streaming_recognize( self, config, requests, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, ): return super(SpeechHelpers, self).streaming_recognize( self._streaming_request_iterable(config, requests), retry=retry, timeout=timeout, )
Perform bi-directional speech recognition. This method allows you to receive results while sending audio; it is only available via. gRPC (not REST). .. warning:: This method is EXPERIMENTAL. Its interface might change in the future. Example: >>> from google.cloud.speech_v1 import enums >>> from google.cloud.speech_v1 import SpeechClient >>> from google.cloud.speech_v1 import types >>> client = SpeechClient() >>> config = types.StreamingRecognitionConfig( ... config=types.RecognitionConfig( ... encoding=enums.RecognitionConfig.AudioEncoding.FLAC, ... ), ... ) >>> request = types.StreamingRecognizeRequest(audio_content=b'...') >>> requests = [request] >>> for element in client.streaming_recognize(config, requests): ... # process element ... pass Args: config (:class:`~.types.StreamingRecognitionConfig`): The configuration to use for the stream. requests (Iterable[:class:`~.types.StreamingRecognizeRequest`]): The input objects. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. Returns: Iterable[:class:`~.types.StreamingRecognizeResponse`] Raises: :exc:`google.gax.errors.GaxError` if the RPC is aborted. :exc:`ValueError` if the parameters are invalid.
def respond_to_SIGHUP(signal_number, frame, logger=None): global restart restart = True if logger: logger.info('detected SIGHUP') raise KeyboardInterrupt
raise the KeyboardInterrupt which will cause the app to effectively shutdown, closing all it resources. Then, because it sets 'restart' to True, the app will reread all the configuration information, rebuild all of its structures and resources and start running again
def parse_table_column_properties(doc, cell, prop): "Parse table column properties." if not cell: return grid = prop.find(_name('{{{w}}}gridSpan')) if grid is not None: cell.grid_span = int(grid.attrib[_name('{{{w}}}val')]) vmerge = prop.find(_name('{{{w}}}vMerge')) if vmerge is not None: if _name('{{{w}}}val') in vmerge.attrib: cell.vmerge = vmerge.attrib[_name('{{{w}}}val')] else: cell.vmerge = ""
Parse table column properties.
def store_many_vectors(self, hash_name, bucket_keys, vs, data): if data is None: data = itertools.repeat(data) for v, k, d in zip(vs, bucket_keys, data): self.store_vector(hash_name, k, v, d)
Store a batch of vectors. Stores vector and JSON-serializable data in bucket with specified key.
def set_connection(connection=defaults.sqlalchemy_connection_string_default): cfp = defaults.config_file_path config = RawConfigParser() if not os.path.exists(cfp): with open(cfp, 'w') as config_file: config['database'] = {'sqlalchemy_connection_string': connection} config.write(config_file) log.info('create configuration file %s', cfp) else: config.read(cfp) config.set('database', 'sqlalchemy_connection_string', connection) with open(cfp, 'w') as configfile: config.write(configfile)
Set the connection string for SQLAlchemy :param str connection: SQLAlchemy connection string
async def connection_exists(ssid: str) -> Optional[str]: nmcli_conns = await connections() for wifi in [c['name'] for c in nmcli_conns if c['type'] == 'wireless']: res, _ = await _call(['-t', '-f', '802-11-wireless.ssid', '-m', 'tabular', 'connection', 'show', wifi]) if res == ssid: return wifi return None
If there is already a connection for this ssid, return the name of the connection; if there is not, return None.
def _GetConnectionArgs(host=None, port=None, user=None, password=None, database=None, client_key_path=None, client_cert_path=None, ca_cert_path=None): connection_args = dict( autocommit=False, use_unicode=True, charset=CHARACTER_SET) if host is not None: connection_args["host"] = host if port is not None: connection_args["port"] = port if user is not None: connection_args["user"] = user if password is not None: connection_args["passwd"] = password if database is not None: connection_args["db"] = database if client_key_path is not None: connection_args["ssl"] = { "key": client_key_path, "cert": client_cert_path, "ca": ca_cert_path, } return connection_args
Builds connection arguments for MySQLdb.Connect function.
def union(self, other): if isinstance(other, tuple): other = list(other) return type(self)(super().__add__(other))
Returns a FrozenList with other concatenated to the end of self. Parameters ---------- other : array-like The array-like whose elements we are concatenating. Returns ------- diff : FrozenList The collection difference between self and other.
def copy_and_sum_families(family_source, family_target): for every in family_source: if every not in family_target: family_target[every] = family_source[every] else: family_target[every] += family_source[every]
methods iterates thru source family and copies its entries to target family in case key already exists in both families - then the values are added
def safestr(str_): str_ = str_ or "" return "".join(x for x in str_ if x.isalnum())
get back an alphanumeric only version of source
def get_temp_url(self, obj, seconds, method="GET", key=None, cached=True): return self.manager.get_temp_url(self, obj, seconds, method=method, key=key, cached=cached)
Given a storage object in this container, returns a URL that can be used to access that object. The URL will expire after `seconds` seconds. The only methods supported are GET and PUT. Anything else will raise an `InvalidTemporaryURLMethod` exception. If you have your Temporary URL key, you can pass it in directly and potentially save an API call to retrieve it. If you don't pass in the key, and don't wish to use any cached value, pass `cached=False`.
def get_theming_attribute(self, mode, name, part=None): colours = int(self._config.get('colourmode')) return self._theme.get_attribute(colours, mode, name, part)
looks up theming attribute :param mode: ui-mode (e.g. `search`,`thread`...) :type mode: str :param name: identifier of the atttribute :type name: str :rtype: urwid.AttrSpec
def do_list_organizations(self, line): org_list = self.dcnm_client.list_organizations() if not org_list: print('No organization found.') return org_table = PrettyTable(['Organization Name']) for org in org_list: org_table.add_row([org['organizationName']]) print(org_table)
Get list of organization on DCNM.
def clone(cls, objective, model=None, **kwargs): return cls(cls._substitute_variables(objective, model=model), name=objective.name, direction=objective.direction, sloppy=True, **kwargs)
Make a copy of an objective. The objective being copied can be of the same type or belong to a different solver interface. Example ---------- >>> new_objective = Objective.clone(old_objective)
def get_available_translations(): locale_path = get_module_data_path("spyder", relpath="locale", attr_name='LOCALEPATH') listdir = os.listdir(locale_path) langs = [d for d in listdir if osp.isdir(osp.join(locale_path, d))] langs = [DEFAULT_LANGUAGE] + langs langs = list( set(langs) - set(DISABLED_LANGUAGES) ) for lang in langs: if lang not in LANGUAGE_CODES: error = ('Update LANGUAGE_CODES (inside config/base.py) if a new ' 'translation has been added to Spyder') print(error) return ['en'] return langs
List available translations for spyder based on the folders found in the locale folder. This function checks if LANGUAGE_CODES contain the same information that is found in the 'locale' folder to ensure that when a new language is added, LANGUAGE_CODES is updated.
def html_to_tags(code): code = ('<div>' + code + '</div>').encode('utf8') el = ET.fromstring(code) return [tag_from_element(c) for c in el]
Convert HTML code to tags. ``code`` is a string containing HTML code. The return value is a list of corresponding instances of ``TagBase``.
def run_recipe_timed(task, recipe, rinput): _logger.info('running recipe') now1 = datetime.datetime.now() task.state = 1 task.time_start = now1 result = recipe(rinput) _logger.info('result: %r', result) task.result = result now2 = datetime.datetime.now() task.state = 2 task.time_end = now2 return task
Run the recipe and count the time it takes.
def parse_group(cls, group, lines, dist=None): if not MODULE(group): raise ValueError("Invalid group name", group) this = {} for line in yield_lines(lines): ep = cls.parse(line, dist) if ep.name in this: raise ValueError("Duplicate entry point", group, ep.name) this[ep.name] = ep return this
Parse an entry point group
def set_menu(self, menu): self.menu = menu wx_menu = menu.wx_menu() self.frame.SetMenuBar(wx_menu) self.frame.Bind(wx.EVT_MENU, self.on_menu)
add a menu from the parent
def infer(self, sensationList, reset=True, objectName=None): self._unsetLearningMode() statistics = collections.defaultdict(list) if objectName is not None: if objectName not in self.objectRepresentationsL2: raise ValueError("The provided objectName was not given during" " learning") for sensations in sensationList: for col in xrange(self.numColumns): location, coarseFeature, fineFeature = sensations[col] self.locationInputs[col].addDataToQueue(list(location), 0, 0) self.coarseSensors[col].addDataToQueue(list(coarseFeature), 0, 0) self.sensors[col].addDataToQueue(list(fineFeature), 0, 0) self.network.run(1) self._updateInferenceStats(statistics, objectName) if reset: self._sendReset() statistics["numSteps"] = len(sensationList) statistics["object"] = objectName if objectName is not None else "Unknown" self.statistics.append(statistics)
Infer on a given set of sensations for a single object. The provided sensationList is a list of sensations, and each sensation is a mapping from cortical column to a tuple of three SDR's respectively corresponding to the locationInput, the coarseSensorInput, and the sensorInput. For example, the input can look as follows, if we are inferring a simple object with two sensations (with very few active bits for simplicity): sensationList = [ { # location, coarse feature, fine feature for CC0, sensation 1 0: ( [1, 5, 10], [9, 32, 75], [6, 12, 52] ), # location, coarse feature, fine feature for CC1, sensation 1 1: ( [6, 2, 15], [11, 42, 92], [7, 11, 50] ), }, { # location, coarse feature, fine feature for CC0, sensation 2 0: ( [2, 9, 10], [10, 35, 78], [6, 12, 52] ), # location, coarse feature, fine feature for CC1, sensation 2 1: ( [1, 4, 12], [10, 32, 52], [6, 10, 52] ), }, ] If the object is known by the caller, an object name can be specified as an optional argument, and must match the objects given while learning. This is used later when evaluating inference statistics. Parameters: ---------------------------- @param objects (dict) Objects to learn, in the canonical format specified above @param reset (bool) If set to True (which is the default value), the network will be reset after learning. @param objectName (str) Name of the objects (must match the names given during learning).
def update_devices(self, devices): for qspacket in devices: try: qsid = qspacket[QS_ID] except KeyError: _LOGGER.debug("Device without ID: %s", qspacket) continue if qsid not in self: self[qsid] = QSDev(data=qspacket) dev = self[qsid] dev.data = qspacket newqs = _legacy_status(qspacket[QS_VALUE]) if dev.is_dimmer: newqs = min(round(math.pow(newqs, self.dim_adj)), 100) newin = round(newqs * _MAX / 100) if abs(dev.value - newin) > 1: _LOGGER.debug("%s qs=%s --> %s", qsid, newqs, newin) dev.value = newin self._cb_value_changed(self, qsid, newin)
Update values from response of URL_DEVICES, callback if changed.
def pin_assets(self, file_or_dir_path: Path) -> List[Dict[str, str]]: if file_or_dir_path.is_dir(): asset_data = [dummy_ipfs_pin(path) for path in file_or_dir_path.glob("*")] elif file_or_dir_path.is_file(): asset_data = [dummy_ipfs_pin(file_or_dir_path)] else: raise FileNotFoundError( f"{file_or_dir_path} is not a valid file or directory path." ) return asset_data
Return a dict containing the IPFS hash, file name, and size of a file.
def delete_telnet_template(auth, url, template_name= None, template_id= None): try: if template_id is None: telnet_templates = get_telnet_template(auth, url) if template_name is None: template_name = telnet_template['name'] template_id = None for template in telnet_templates: if template['name'] == template_name: template_id = template['id'] f_url = url + "/imcrs/plat/res/telnet/%s/delete" % template_id response = requests.delete(f_url, auth=auth, headers=HEADERS) return response.status_code except requests.exceptions.RequestException as error: return "Error:\n" + str(error) + " delete_telnet_template: An Error has occured"
Takes template_name as input to issue RESTUL call to HP IMC which will delete the specific telnet template from the IMC system :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :param template_name: str value of template name :param template_id: str value template template_id value :return: int HTTP response code :rtype int
def solve(self): Xi = self.cbpdn.solve() self.timer = self.cbpdn.timer self.itstat = self.cbpdn.itstat return Xi
Call the solve method of the inner cbpdn object and return the result.
def get_templates(self, id_or_uri, start=0, count=-1, filter='', query='', sort=''): uri = self._client.build_uri(id_or_uri) + "/templates" return self._client.get(self._client.build_query_uri(start=start, count=count, filter=filter, query=query, sort=sort, uri=uri))
Gets a list of volume templates. Returns a list of storage templates belonging to the storage system. Returns: list: Storage Template List.
def setProperty(self, name, value): styleDict = self._styleDict if value in ('', None): try: del styleDict[name] except KeyError: pass else: styleDict[name] = str(value)
setProperty - Set a style property to a value. NOTE: To remove a style, use a value of empty string, or None @param name <str> - The style name. NOTE: The dash names are expected here, whereas dot-access expects the camel case names. Example: name="font-weight" versus the dot-access style.fontWeight @param value <str> - The style value, or empty string to remove property
def ttvar(name, index=None): bvar = boolfunc.var(name, index) try: var = _VARS[bvar.uniqid] except KeyError: var = _VARS[bvar.uniqid] = TTVariable(bvar) return var
Return a TruthTable variable. Parameters ---------- name : str The variable's identifier string. index : int or tuple[int], optional One or more integer suffixes for variables that are part of a multi-dimensional bit-vector, eg x[1], x[1][2][3]
def libvlc_audio_output_set(p_mi, psz_name): f = _Cfunctions.get('libvlc_audio_output_set', None) or \ _Cfunction('libvlc_audio_output_set', ((1,), (1,),), None, ctypes.c_int, MediaPlayer, ctypes.c_char_p) return f(p_mi, psz_name)
Selects an audio output module. @note: Any change will take be effect only after playback is stopped and restarted. Audio output cannot be changed while playing. @param p_mi: media player. @param psz_name: name of audio output, use psz_name of See L{AudioOutput}. @return: 0 if function succeded, -1 on error.
def save_config(self, cmd="write", confirm=False, confirm_response=""): return super(IpInfusionOcNOSBase, self).save_config( cmd=cmd, confirm=confirm, confirm_response=confirm_response )
Saves Config Using write command
def samples_by_indices(self, indices): if not self._random_access: raise TypeError('samples_by_indices method not supported as one ' 'or more of the underlying data sources does ' 'not support random access') batch = self.source.samples_by_indices(indices) return self.fn(*batch)
Gather a batch of samples by indices, applying any index mapping defined by the underlying data sources. Parameters ---------- indices: 1D-array of ints or slice An index array or a slice that selects the samples to retrieve Returns ------- nested list of arrays A mini-batch
def client_auth(self): if not self._client_auth: self._client_auth = E.Element('merchantAuthentication') E.SubElement(self._client_auth, 'name').text = self.config.login_id E.SubElement(self._client_auth, 'transactionKey').text = self.config.transaction_key return self._client_auth
Generate an XML element with client auth data populated.
def delete(self, remove_tombstone=True): response = self.repo.api.http_request('DELETE', self.uri) if response.status_code == 204: self._empty_resource_attributes() if remove_tombstone: self.repo.api.http_request('DELETE', '%s/fcr:tombstone' % self.uri) return True
Method to delete resources. Args: remove_tombstone (bool): If True, will remove tombstone at uri/fcr:tombstone when removing resource. Returns: (bool)
def add_get(self, path: str, handler: _WebHandler, *, name: Optional[str]=None, allow_head: bool=True, **kwargs: Any) -> AbstractRoute: resource = self.add_resource(path, name=name) if allow_head: resource.add_route(hdrs.METH_HEAD, handler, **kwargs) return resource.add_route(hdrs.METH_GET, handler, **kwargs)
Shortcut for add_route with method GET, if allow_head is true another route is added allowing head requests to the same endpoint
def _get_dependency_specification(dep_spec: typing.List[tuple]) -> str: return ",".join(dep_range[0] + dep_range[1] for dep_range in dep_spec)
Get string representation of dependency specification as provided by PythonDependencyParser.
def add_log_type(self, logType, name=None, level=0, stdoutFlag=None, fileFlag=None, color=None, highlight=None, attributes=None): assert logType not in self.__logTypeStdoutFlags.keys(), "logType '%s' already defined" %logType assert isinstance(logType, basestring), "logType must be a string" logType = str(logType) self.__set_log_type(logType=logType, name=name, level=level, stdoutFlag=stdoutFlag, fileFlag=fileFlag, color=color, highlight=highlight, attributes=attributes)
Add a new logtype. :Parameters: #. logType (string): The logtype. #. name (None, string): The logtype name. If None, name will be set to logtype. #. level (number): The level of logging. #. stdoutFlag (None, boolean): Force standard output logging flag. If None, flag will be set according to minimum and maximum levels. #. fileFlag (None, boolean): Force file logging flag. If None, flag will be set according to minimum and maximum levels. #. color (None, string): The logging text color. The defined colors are:\n black , red , green , orange , blue , magenta , cyan , grey , dark grey , light red , light green , yellow , light blue , pink , light cyan #. highlight (None, string): The logging text highlight color. The defined highlights are:\n black , red , green , orange , blue , magenta , cyan , grey #. attributes (None, string): The logging text attribute. The defined attributes are:\n bold , underline , blink , invisible , strike through **N.B** *logging color, highlight and attributes are not allowed on all types of streams.*
def get(self, sid): return TaskQueueContext(self._version, workspace_sid=self._solution['workspace_sid'], sid=sid, )
Constructs a TaskQueueContext :param sid: The sid :returns: twilio.rest.taskrouter.v1.workspace.task_queue.TaskQueueContext :rtype: twilio.rest.taskrouter.v1.workspace.task_queue.TaskQueueContext
def _repr_html_(self, **kwargs): html = self.render(**kwargs) html = "data:text/html;charset=utf-8;base64," + base64.b64encode(html.encode('utf8')).decode('utf8') if self.height is None: iframe = ( '<div style="width:{width};">' '<div style="position:relative;width:100%;height:0;padding-bottom:{ratio};">' '<iframe src="{html}" style="position:absolute;width:100%;height:100%;left:0;top:0;' 'border:none !important;" ' 'allowfullscreen webkitallowfullscreen mozallowfullscreen>' '</iframe>' '</div></div>').format iframe = iframe(html=html, width=self.width, ratio=self.ratio) else: iframe = ('<iframe src="{html}" width="{width}" height="{height}"' 'style="border:none !important;" ' '"allowfullscreen" "webkitallowfullscreen" "mozallowfullscreen">' '</iframe>').format iframe = iframe(html=html, width=self.width, height=self.height) return iframe
Displays the Figure in a Jupyter notebook.
def active(self) -> bool: states = self._client.get_state(self._state_url)['states'] for state in states: state = state['State'] if int(state['Id']) == self._state_id: return state['IsActive'] == "1" return False
Indicate if this RunState is currently active.
def _load_connection_error(hostname, error): ret = {'code': None, 'content': 'Error: Unable to connect to the bigip device: {host}\n{error}'.format(host=hostname, error=error)} return ret
Format and Return a connection error
def stop(self, timeout=None): assert self.scan_id is not None, 'No scan_id has been set' if timeout is None: url = '/scans/%s/stop' % self.scan_id self.conn.send_request(url, method='GET') return self.stop() for _ in xrange(timeout): time.sleep(1) is_running = self.get_status()['is_running'] if not is_running: return msg = 'Failed to stop the scan in %s seconds' raise ScanStopTimeoutException(msg % timeout)
Send the GET request required to stop the scan If timeout is not specified we just send the request and return. When it is the method will wait for (at most) :timeout: seconds until the scan changes it's status/stops. If the timeout is reached then an exception is raised. :param timeout: The timeout in seconds :return: None, an exception is raised if the timeout is exceeded
def add_insert(self, document): validate_is_document_type("document", document) if not (isinstance(document, RawBSONDocument) or '_id' in document): document['_id'] = ObjectId() self.ops.append((_INSERT, document))
Add an insert document to the list of ops.
def _ExtractGoogleSearchQuery(self, url): if 'search' not in url or 'q=' not in url: return None line = self._GetBetweenQEqualsAndAmpersand(url) if not line: return None return line.replace('+', ' ')
Extracts a search query from a Google URL. Google Drive: https://drive.google.com/drive/search?q=query Google Search: https://www.google.com/search?q=query Google Sites: https://sites.google.com/site/.*/system/app/pages/ search?q=query Args: url (str): URL. Returns: str: search query or None if no query was found.
def comment(self, body): self.github_request.comment(issue=self, body=body) if self.state == 'closed': self.open_issue() return self
Adds a comment to the issue. :params body: body, content of the comment :returns: issue object :rtype: :class:`exreporter.stores.github.GithubIssue`
def setUserAgent(self, uA=None): logger = logging.getLogger("osrframework.utils") if not uA: if self.userAgents: logger = logging.debug("Selecting a new random User Agent.") uA = random.choice(self.userAgents) else: logger = logging.debug("No user agent was inserted.") return False self.br.addheaders = [ ('User-agent', uA), ] return True
This method will be called whenever a new query will be executed. :param uA: Any User Agent that was needed to be inserted. This parameter is optional. :return: Returns True if a User Agent was inserted and False if no User Agent could be inserted.
def check_table(table=None, family='ipv4'): ret = {'comment': '', 'result': False} if not table: ret['comment'] = 'Table needs to be specified' return ret nft_family = _NFTABLES_FAMILIES[family] cmd = '{0} list tables {1}' . format(_nftables_cmd(), nft_family) out = __salt__['cmd.run'](cmd, python_shell=False).find('table {0} {1}'.format(nft_family, table)) if out == -1: ret['comment'] = 'Table {0} in family {1} does not exist'.\ format(table, family) else: ret['comment'] = 'Table {0} in family {1} exists'.\ format(table, family) ret['result'] = True return ret
Check for the existence of a table CLI Example:: salt '*' nftables.check_table nat
def conll_ner2json(input_data, **kwargs): delimit_docs = "-DOCSTART- -X- O O" output_docs = [] for doc in input_data.strip().split(delimit_docs): doc = doc.strip() if not doc: continue output_doc = [] for sent in doc.split("\n\n"): sent = sent.strip() if not sent: continue lines = [line.strip() for line in sent.split("\n") if line.strip()] words, tags, chunks, iob_ents = zip(*[line.split() for line in lines]) biluo_ents = iob_to_biluo(iob_ents) output_doc.append( { "tokens": [ {"orth": w, "tag": tag, "ner": ent} for (w, tag, ent) in zip(words, tags, biluo_ents) ] } ) output_docs.append( {"id": len(output_docs), "paragraphs": [{"sentences": output_doc}]} ) output_doc = [] return output_docs
Convert files in the CoNLL-2003 NER format into JSON format for use with train cli.
def ddb_path(self): try: return self._ddb_path except AttributeError: path = self.outdir.has_abiext("DDB") if path: self._ddb_path = path return path
Absolute path of the DDB file. Empty string if file is not present.
def get_preferred(self, addr_1, addr_2): if addr_1 > addr_2: addr_1, addr_2 = addr_2, addr_1 return self._cache.get((addr_1, addr_2))
Return the preferred address.
def register_wcs(name, wrapper_class, coord_types): global custom_wcs custom_wcs[name] = Bunch.Bunch(name=name, wrapper_class=wrapper_class, coord_types=coord_types)
Register a custom WCS wrapper. Parameters ---------- name : str The name of the custom WCS wrapper wrapper_class : subclass of `~ginga.util.wcsmod.BaseWCS` The class implementing the WCS wrapper coord_types : list of str List of names of coordinate types supported by the WCS
def _build_client(self, name=None): url_path = self._url_path + [name] if name else self._url_path return Client(host=self.host, version=self._version, request_headers=self.request_headers, url_path=url_path, append_slash=self.append_slash, timeout=self.timeout)
Make a new Client object :param name: Name of the url segment :type name: string :return: A Client object
def next_session_label(self, session_label): idx = self.schedule.index.get_loc(session_label) try: return self.schedule.index[idx + 1] except IndexError: if idx == len(self.schedule.index) - 1: raise ValueError("There is no next session as this is the end" " of the exchange calendar.") else: raise
Given a session label, returns the label of the next session. Parameters ---------- session_label: pd.Timestamp A session whose next session is desired. Returns ------- pd.Timestamp The next session label (midnight UTC). Notes ----- Raises ValueError if the given session is the last session in this calendar.
def add_hypermedia(self, obj): if hasattr(self, 'pk'): obj['_links'] = { 'self': { 'href': '{}{}/'.format(self.get_resource_uri(), obj[self.pk]) } }
Adds HATEOAS links to the resource. Adds href link to self. Override in subclasses to include additional functionality
def _date2int(date): if isinstance(date, str): if date.endswith(' 00:00:00') or date.endswith('T00:00:00'): date = date[0:-9] tmp = datetime.datetime.strptime(date, '%Y-%m-%d') return tmp.toordinal() if isinstance(date, datetime.date): return date.toordinal() if isinstance(date, int): return date raise ValueError('Unexpected type {0!s}'.format(date.__class__))
Returns an integer representation of a date. :param str|datetime.date date: The date. :rtype: int
def _match_setters(self, query): q = query.decode('utf-8') for name, parser, response, error_response in self._setters: try: value = parser(q) logger.debug('Found response in setter of %s' % name) except ValueError: continue try: self._properties[name].set_value(value) return response except ValueError: if isinstance(error_response, bytes): return error_response return self.error_response('command_error') return None
Tries to match in setters :param query: message tuple :type query: Tuple[bytes] :return: response if found or None :rtype: Tuple[bytes] | None
def get_to_persist(persisters): def specs(): for p in persisters: if isinstance(p, dict): yield p["name"], p.get("enabled", True) else: yield p, True components = sorted(dr.DELEGATES, key=dr.get_name) names = dict((c, dr.get_name(c)) for c in components) results = set() for p, e in specs(): for c in components: if names[c].startswith(p): if e: results.add(c) elif c in results: results.remove(c) return results
Given a specification of what to persist, generates the corresponding set of components.
def op_right(op): def method(self, other): return op(value_left(self, other), value_right(self, other)) return method
Returns a type instance method for the given operator, applied when the instance appears on the right side of the expression.
def register_event_listener(coro): if not asyncio.iscoroutinefunction(coro): raise TypeError("Function is not a coroutine.") if coro not in _event_listeners: _event_listeners.append(coro)
Registers a coroutine to receive lavalink event information. This coroutine will accept three arguments: :py:class:`Player`, :py:class:`LavalinkEvents`, and possibly an extra. The value of the extra depends on the value of the second argument. If the second argument is :py:attr:`LavalinkEvents.TRACK_END`, the extra will be a :py:class:`TrackEndReason`. If the second argument is :py:attr:`LavalinkEvents.TRACK_EXCEPTION`, the extra will be an error string. If the second argument is :py:attr:`LavalinkEvents.TRACK_STUCK`, the extra will be the threshold milliseconds that the track has been stuck for. If the second argument is :py:attr:`LavalinkEvents.TRACK_START`, the extra will be a :py:class:`Track` object. If the second argument is any other value, the third argument will not exist. Parameters ---------- coro A coroutine function that accepts the arguments listed above. Raises ------ TypeError If ``coro`` is not a coroutine.
def create_supercut(composition, outputfile, padding): print("[+] Creating clips.") demo_supercut(composition, padding) for (clip, nextclip) in zip(composition, composition[1:]): if ((nextclip['file'] == clip['file']) and (nextclip['start'] < clip['end'])): nextclip['start'] += padding all_filenames = set([c['file'] for c in composition]) videofileclips = dict([(f, VideoFileClip(f)) for f in all_filenames]) cut_clips = [videofileclips[c['file']].subclip(c['start'], c['end']) for c in composition] print("[+] Concatenating clips.") final_clip = concatenate(cut_clips) print("[+] Writing ouput file.") final_clip.to_videofile(outputfile, codec="libx264", temp_audiofile='temp-audio.m4a', remove_temp=True, audio_codec='aac')
Concatenate video clips together and output finished video file to the output directory.
def noisy_operation(self, operation: 'cirq.Operation') -> 'cirq.OP_TREE': if not hasattr(self.noisy_moments, '_not_overridden'): return self.noisy_moments([ops.Moment([operation])], operation.qubits) if not hasattr(self.noisy_moment, '_not_overridden'): return self.noisy_moment(ops.Moment([operation]), operation.qubits) assert False, 'Should be unreachable.'
Adds noise to an individual operation. Args: operation: The operation to make noisy. Returns: An OP_TREE corresponding to the noisy operations implementing the noisy version of the given operation.
def update_domain_queues(self): for key in self.domain_config: final_key = "{name}:{domain}:queue".format( name=self.spider.name, domain=key) if final_key in self.queue_dict: self.queue_dict[final_key][0].window = float(self.domain_config[key]['window']) self.logger.debug("Updated queue {q} with new config" .format(q=final_key)) if 'scale' in self.domain_config[key]: hits = int(self.domain_config[key]['hits'] * self.fit_scale( self.domain_config[key]['scale'])) self.queue_dict[final_key][0].limit = float(hits) else: self.queue_dict[final_key][0].limit = float(self.domain_config[key]['hits'])
Check to update existing queues already in memory new queues are created elsewhere