code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def process_request(self, request_object): entity = request_object.entity_cls.get(request_object.identifier) entity.delete() return ResponseSuccessWithNoContent()
Process the Delete Resource Request
def point(self, x, y): shapeType = POINT pointShape = Shape(shapeType) pointShape.points.append([x, y]) self.shape(pointShape)
Creates a POINT shape.
async def install_update(filename, loop): log.info("Installing update server into system environment") log.debug('File {} exists? {}'.format(filename, os.path.exists(filename))) out, err, returncode = await _install(sys.executable, filename, loop) if returncode == 0: msg = out else: msg = err res = {'message': msg, 'filename': filename} return res, returncode
Install the update into the system environment.
def worker_logfinish(self, node, nodeid, location): self.config.hook.pytest_runtest_logfinish(nodeid=nodeid, location=location)
Emitted when a node calls the pytest_runtest_logfinish hook.
def parse_na_line(txt: str, units: Units) -> typing.Dict[str, str]: retwx = {} wxdata = txt.split(' ') wxdata, _, retwx['wind_shear'] = core.sanitize_report_list(wxdata) wxdata, retwx['type'], retwx['start_time'], retwx['end_time'] = core.get_type_and_times(wxdata) wxdata, retwx['wind_direction'], retwx['wind_speed'], \ retwx['wind_gust'], _ = core.get_wind(wxdata, units) wxdata, retwx['visibility'] = core.get_visibility(wxdata, units) wxdata, retwx['clouds'] = core.get_clouds(wxdata) retwx['other'], retwx['altimeter'], retwx['icing'], retwx['turbulance'] \ = core.get_taf_alt_ice_turb(wxdata) return retwx
Parser for the North American TAF forcast varient
def cli_guilds(world, tibiadata, json): world = " ".join(world) guilds = _fetch_and_parse(ListedGuild.get_world_list_url, ListedGuild.list_from_content, ListedGuild.get_world_list_url_tibiadata, ListedGuild.list_from_tibiadata, tibiadata, world) if json and guilds: import json as _json print(_json.dumps(guilds, default=dict, indent=2)) return print(get_guilds_string(guilds))
Displays the list of guilds for a specific world
def save(self, commit=True): is_new_user = self.instance.pk is None user = super(UserForm, self).save(commit) if is_new_user: user.is_active = True new_pass = self.cleaned_data['new_password'] if new_pass: user.set_password(new_pass) if commit: user.save() return user
Overloaded so we can save any new password that is included.
def enable(config, master, tags, accounts, debug, message, region): accounts_config, master_info, executor = guardian_init( config, debug, master, accounts, tags) regions = expand_regions(region) for r in regions: log.info("Processing Region:%s", r) enable_region(master_info, accounts_config, executor, message, r)
enable guard duty on a set of accounts
def value(self): originalPrice = self.lineItem.totalPrice if self.flatRate == 0: return originalPrice * self.percent return self.flatRate
Returns the positive value to subtract from the total.
def visit_BinOp(self, node): args = [self.visit(arg) for arg in (node.left, node.right)] return list({frozenset.union(*x) for x in itertools.product(*args)})
Return type depend from both operand of the binary operation.
def encode(self, pdu): if _debug: BSLCI._debug("encode %r", pdu) PCI.update(pdu, self) pdu.put( self.bslciType ) pdu.put( self.bslciFunction ) if (self.bslciLength != len(self.pduData) + 4): raise EncodingError("invalid BSLCI length") pdu.put_short( self.bslciLength )
encode the contents of the BSLCI into the PDU.
def plot(config, image, file): image = np.squeeze(image) print(file, image.shape) imsave(file, image)
Plot a single CIFAR image.
def floating_point_to_datetime(day, fp_time): result = datetime(year=day.year, month=day.month, day=day.day) result += timedelta(minutes=math.ceil(60 * fp_time)) return result
Convert a floating point time to a datetime.
def table(self): if self._table is None: self._table = list(self._iter_rows()) return self._table
Build and cache a table from query results
def _validate_image_rank(self, img_array): if img_array.ndim == 1 or img_array.ndim > 3: msg = "{0}D imagery is not allowed.".format(img_array.ndim) raise IOError(msg)
Images must be either 2D or 3D.
def title_translations2marc(self, key, value): return { 'a': value.get('title'), 'b': value.get('subtitle'), '9': value.get('source'), }
Populate the ``242`` MARC field.
def to_struct(self, value): if self.str_format: return value.strftime(self.str_format) return value.strftime(self.default_format)
Cast `date` object to string.
def symbols_count(self): if not self._ptr: raise BfdException("BFD not initialized") return _bfd.get_bfd_attribute(self._ptr, BfdAttributes.SYMCOUNT)
Return the symcount attribute of the BFD file being processed.
def close(self): if self._is_open: self._is_open = False hidapi.hid_close(self._device)
Closes the hid device
def close(self): if hasattr(self, "_browser"): self.loop.run_until_complete(self._browser.close()) super().close()
If a browser was created close it first.
def validate_vertex_directives(directives): for directive_name in six.iterkeys(directives): if directive_name in PROPERTY_ONLY_DIRECTIVES: raise GraphQLCompilationError( u'Found property-only directive {} set on vertex.'.format(directive_name))
Validate the directives that appear at a vertex field.
def calc_route_info(self, real_time=True, stop_at_bounds=False, time_delta=0): route = self.get_route(1, time_delta) results = route['results'] route_time, route_distance = self._add_up_route(results, real_time=real_time, stop_at_bounds=stop_at_bounds) self.log.info('Time %.2f minutes, distance %.2f km.', route_time, route_distance) return route_time, route_distance
Calculate best route info.
def _loadMore(self, start=0, trys=0, validation=True): self._log.debug("Loading page starting from %d" % start) self._czero = start self._pageoffs = 0 try: pyxb.RequireValidWhenParsing(validation) self._object_list = self._client.listObjects( start=start, count=self._pagesize, fromDate=self._fromDate, nodeId=self._nodeId, ) except http.client.BadStatusLine as e: self._log.warning("Server responded with Bad Status Line. Retrying in 5sec") self._client.connection.close() if trys > 3: raise e trys += 1 self._loadMore(start, trys) except d1_common.types.exceptions.ServiceFailure as e: self._log.error(e) if trys > 3: raise e trys += 1 self._loadMore(start, trys, validation=False)
Retrieves the next page of results.
def show_all_prices(call=None, kwargs=None): if call == 'action': raise SaltCloudSystemExit( 'The show_all_prices function must be called with -f or --function.' ) if kwargs is None: kwargs = {} conn = get_conn(service='SoftLayer_Product_Package') if 'code' not in kwargs: return conn.getCategories(id=50) ret = {} for category in conn.getCategories(id=50): if category['categoryCode'] != kwargs['code']: continue for group in category['groups']: for price in group['prices']: ret[price['id']] = price['item'].copy() del ret[price['id']]['id'] return ret
Return a dict of all prices on the cloud provider.
def from_range(cls, data, name=None, dtype=None, **kwargs): if not isinstance(data, range): raise TypeError( '{0}(...) must be called with object coercible to a ' 'range, {1} was passed'.format(cls.__name__, repr(data))) start, stop, step = data.start, data.stop, data.step return RangeIndex(start, stop, step, dtype=dtype, name=name, **kwargs)
Create RangeIndex from a range object.
def check(self, url_data): content = url_data.get_content() self._check_by_re(url_data, content) self._check_inline_links(url_data, content)
Extracts urls from the file.
def init(): try: config = get_config() print(config.dir) alembic_command.init(config, config.dir, 'project') except CommandError as e: click.echo(red(str(e)))
Initialize new migrations directory
def notify_peer_message(self, message, sender_id): payload = message.SerializeToString() self._notify( "consensus_notifier_notify_peer_message", payload, len(payload), sender_id, len(sender_id))
A new message was received from a peer
def lookup_url(self, url): if type(url) is not str: url = url.encode('utf8') if not url.strip(): raise ValueError("Empty input string.") url_hashes = URL(url).hashes try: list_names = self._lookup_hashes(url_hashes) self.storage.commit() except Exception: self.storage.rollback() raise if list_names: return list_names return None
Look up specified URL in Safe Browsing threat lists.
def source_debianize_name(name): "make name acceptable as a Debian source package name" name = name.replace('_','-') name = name.replace('.','-') name = name.lower() return name
make name acceptable as a Debian source package name
def _asdict(self): return OrderedDict((f.name, getattr(self, f.name)) for f in self._struct)
Return an OrderedDict of the fields.
def _move_cursor_to_column(self, column): last_col = len(self._cursor.block().text()) self._cursor.movePosition(self._cursor.EndOfBlock) to_insert = '' for i in range(column - last_col): to_insert += ' ' if to_insert: self._cursor.insertText(to_insert) self._cursor.movePosition(self._cursor.StartOfBlock) self._cursor.movePosition(self._cursor.Right, self._cursor.MoveAnchor, column) self._last_cursor_pos = self._cursor.position()
Moves the cursor to the specified column, if possible.
def hash(self): if self._hash is None: tohash = [self.path.name] tohash.append(hashfile(self.path, blocksize=65536, count=20)) self._hash = hashobj(tohash) return self._hash
Hash value based on file name and content
def distance(r1: 'Region', r2: 'Region'): return math.sqrt((r2.x - r1.x) ** 2 + (r2.y - r1.y) ** 2)
Calculate distance between the x and y of the two regions.
def create_bundle(self, bundleId, data=None): headers={'content-type':'application/json'} url = self.__get_base_bundle_url() + "/" + bundleId if data is None: data = {} data['sourceLanguage'] = 'en' data['targetLanguages'] = [] data['notes']=[] data['metadata']={} data['partner']='' data['segmentSeparatorPattern']='' data['noTranslationPattern']='' json_data = json.dumps(data) response = self.__perform_rest_call(requestURL=url, restType='PUT', body=json_data, headers=headers) return response
Creates a bundle using Globalization Pipeline service
def check_schema(self): if self.valid_schema: return config = self.config metadata = self.metadata() if 'current_version' not in metadata: raise GaugedSchemaError('Gauged schema not found, ' 'try a gauged.sync()') if metadata['current_version'] != Gauged.VERSION: msg = 'The schema is version %s while this Gauged is version %s. ' msg += 'Try upgrading Gauged and/or running gauged_migrate.py' msg = msg % (metadata['current_version'], Gauged.VERSION) raise GaugedVersionMismatchError(msg) expected_block_size = '%s/%s' % (config.block_size, config.resolution) block_size = '%s/%s' % (metadata['block_size'], metadata['resolution']) if block_size != expected_block_size: msg = 'Expected %s and got %s' % (expected_block_size, block_size) warn(msg, GaugedBlockSizeMismatch) self.valid_schema = True
Check the schema exists and matches configuration
def _get_urls(self, version, cluster_stats): pshard_stats_url = "/_stats" health_url = "/_cluster/health" if version >= [0, 90, 10]: pending_tasks_url = "/_cluster/pending_tasks" stats_url = "/_nodes/stats" if cluster_stats else "/_nodes/_local/stats" if version < [5, 0, 0]: stats_url += "?all=true" else: pending_tasks_url = None stats_url = "/_cluster/nodes/stats?all=true" if cluster_stats else "/_cluster/nodes/_local/stats?all=true" return health_url, stats_url, pshard_stats_url, pending_tasks_url
Compute the URLs we need to hit depending on the running ES version
def L_fc(fdata): fd = np.copy(fdata) dphi_fc(fdata) divsin_fc(fdata) dtheta_fc(fd) return (1j * fdata, -1j * fd)
Apply L in the Fourier domain.
def _select(self, current_venvs, requirements=None, interpreter='', uuid='', options=None): if uuid: logger.debug("Searching a venv by uuid: %s", uuid) venv = self._match_by_uuid(current_venvs, uuid) else: logger.debug("Searching a venv for: reqs=%s interpreter=%s options=%s", requirements, interpreter, options) venv = self._match_by_requirements(current_venvs, requirements, interpreter, options) if venv is None: logger.debug("No matching venv found :(") return logger.debug("Found a matching venv! %s", venv) return venv['metadata']
Select which venv satisfy the received requirements.
def edit_command(self, payload): key = payload['key'] command = payload['command'] if self.queue[key]: if self.queue[key]['status'] in ['queued', 'stashed']: self.queue[key]['command'] = command answer = {'message': 'Command updated', 'status': 'error'} else: answer = {'message': "Entry is not 'queued' or 'stashed'", 'status': 'error'} else: answer = {'message': 'No entry with this key', 'status': 'error'} return answer
Edit the command of a specific entry.
def caller(self, fun, **kwargs): self.client_cache['caller'].cmd(fun, *kwargs['arg'], **kwargs['kwarg'])
Wrap LocalCaller to execute remote exec functions locally on the Minion
def fw_policy_delete(self, data, fw_name=None): LOG.debug("FW Policy Debug") self._fw_policy_delete(fw_name, data)
Top level policy delete routine.
def filter_dependencies(self): dependencies = self.event['check'].get('dependencies', None) if dependencies is None or not isinstance(dependencies, list): return for dependency in self.event['check']['dependencies']: if not str(dependency): continue dependency_split = tuple(dependency.split('/')) if len(dependency_split) == 2: client, check = dependency_split else: client = self.event['client']['name'] check = dependency_split[0] if self.event_exists(client, check): self.bail('check dependency event exists')
Determine whether a check has dependencies.
def send_custom_hsm(self, whatsapp_id, template_name, language, variables): data = { "to": whatsapp_id, "type": "hsm", "hsm": { "namespace": self.hsm_namespace, "element_name": template_name, "language": {"policy": "deterministic", "code": language}, "localizable_params": [{"default": variable} for variable in variables], }, } if self.ttl is not None: data["ttl"] = self.ttl response = self.session.post( urllib_parse.urljoin(self.api_url, "/v1/messages"), json=data ) return self.return_response(response)
Sends an HSM with more customizable fields than the send_hsm function
def dump(self, msg): msg_size = len(msg) if self._batch_size + msg_size > self._max_batch_file_size \ and self._batch_size > 0: self._startNewBatch() global getTime index_file_entry = '{:},{:09d},{:},{:}\n'.format( getTime(), self._batch_index, self._batch_size, msg_size) if sys.version_info >= (3,): self._index_file.write(index_file_entry.encode('utf-8')) else: self._index_file.write(index_file_entry) self._batch_file.write(msg) self._batch_size += msg_size self._message_count += 1
Dumps the provided message to this dump.
def import_task_modules(): top_level_modules = settings.INSTALLED_APPS module_names = [] for module in top_level_modules: mod = import_module(module) for loader, module_name, is_pkg in pkgutil.walk_packages(mod.__path__): if not module_name.startswith("__"): submod_name = "{0}.{1}".format(module,module_name) module_names.append(submod_name) modules = map(import_module, module_names) return modules
Import all installed apps and add modules to registry
def get(id_, hwid, type_, unit, precision, as_json): if id_ and (hwid or type_): raise click.BadOptionUsage( "If --id is given --hwid and --type are not allowed." ) if id_: try: sensor = W1ThermSensor.get_available_sensors()[id_ - 1] except IndexError: raise click.BadOptionUsage( "No sensor with id {0} available. " "Use the ls command to show all available sensors.".format(id_) ) else: sensor = W1ThermSensor(type_, hwid) if precision: sensor.set_precision(precision, persist=False) temperature = sensor.get_temperature(unit) if as_json: data = { "hwid": sensor.id, "type": sensor.type_name, "temperature": temperature, "unit": unit, } click.echo(json.dumps(data, indent=4, sort_keys=True)) else: click.echo( "Sensor {0} measured temperature: {1} {2}".format( click.style(sensor.id, bold=True), click.style(str(temperature), bold=True), click.style(unit, bold=True), ) )
Get temperature of a specific sensor
def list_resource_groups(call=None): if call == 'action': raise SaltCloudSystemExit( 'The list_hosted_services function must be called with ' '-f or --function' ) resconn = get_conn(client_type='resource') ret = {} try: groups = resconn.resource_groups.list() for group_obj in groups: group = group_obj.as_dict() ret[group['name']] = group except CloudError as exc: __utils__['azurearm.log_cloud_error']('resource', exc.message) ret = {'Error': exc.message} return ret
List resource groups associated with the subscription
def check_crc(f, inf, desc): exp = inf._md_expect if exp is None: return ucrc = f._md_context.digest() if ucrc != exp: print('crc error - %s - exp=%r got=%r' % (desc, exp, ucrc))
Compare result crc to expected value.
def validate_rows_length(data, length, message=None, exception=MatrixError): if message is None: message = 'All rows must have the same length (same number of columns)' for row in data: if len(row) != length: raise exception(message)
Validate that all rows have the same length.
def download_from_files(files, output_path, width): files_to_download = get_files_from_arguments(files, width) download_files_if_not_in_manifest(files_to_download, output_path)
Download files from a given file list.
def _check_grain_minions(self, expr, delimiter, greedy): return self._check_cache_minions(expr, delimiter, greedy, 'grains')
Return the minions found by looking via grains
def user_path(self, team, user): return os.path.join(self.team_path(team), user)
Returns the path to directory with the user's package repositories.
def log(package): team, owner, pkg = parse_package(package) session = _get_session(team) response = session.get( "{url}/api/log/{owner}/{pkg}/".format( url=get_registry_url(team), owner=owner, pkg=pkg ) ) table = [("Hash", "Pushed", "Author", "Tags", "Versions")] for entry in reversed(response.json()['logs']): ugly = datetime.fromtimestamp(entry['created']) nice = ugly.strftime("%Y-%m-%d %H:%M:%S") table.append((entry['hash'], nice, entry['author'], str(entry.get('tags', [])), str(entry.get('versions', [])))) _print_table(table)
List all of the changes to a package on the server.
def _find_interfaces_mac(ip): all_interfaces = _get_mine('net.interfaces') all_ipaddrs = _get_mine('net.ipaddrs') for device, device_ipaddrs in six.iteritems(all_ipaddrs): if not device_ipaddrs.get('result', False): continue for interface, interface_ipaddrs in six.iteritems(device_ipaddrs.get('out', {})): ip_addresses = interface_ipaddrs.get('ipv4', {}).keys() ip_addresses.extend(interface_ipaddrs.get('ipv6', {}).keys()) for ipaddr in ip_addresses: if ip != ipaddr: continue interface_mac = all_interfaces.get(device, {}).get('out', {}).get(interface, {}).get('mac_address', '') return device, interface, interface_mac return ('', '', '')
Helper to get the interfaces hardware address using the IP Address.
def _get_date(day=None, month=None, year=None): now = datetime.date.today() if day is None: return now try: return datetime.date( day=int(day), month=int(month or now.month), year=int(year or now.year), ) except ValueError as error: print("error: {0}".format(error), file=sys.stderr)
Returns a datetime object with optional params or today.
def unpickle(pickle_file): pickle = None with open(pickle_file, "rb") as pickle_f: pickle = dill.load(pickle_f) if not pickle: LOG.error("Could not load python object from file") return pickle
Unpickle a python object from the given path.
def perform(self): db_versions = self.table.versions() version = self.version if (version.is_processed(db_versions) and not self.config.force_version == self.version.number): self.log( u'version {} is already installed'.format(version.number) ) return self.start() try: self._perform_version(version) except Exception: if sys.version_info < (3, 4): msg = traceback.format_exc().decode('utf8', errors='ignore') else: msg = traceback.format_exc() error = u'\n'.join(self.logs + [u'\n', msg]) self.table.record_log(version.number, error) raise self.finish()
Perform the version upgrade on the database.
def rfft2d_freqs(h, w): fy = np.fft.fftfreq(h)[:, None] if w % 2 == 1: fx = np.fft.fftfreq(w)[: w // 2 + 2] else: fx = np.fft.fftfreq(w)[: w // 2 + 1] return np.sqrt(fx * fx + fy * fy)
Computes 2D spectrum frequencies.
def __execute_queries(self): def func(item): or_check = False for queries in self._queries: and_check = True for query in queries: and_check &= self._matcher._match( item.get(query.get('key'), None), query.get('operator'), query.get('value') ) or_check |= and_check return or_check self._json_data = list(filter(lambda item: func(item), self._json_data))
Execute all condition and filter result data
def _check_configuration(self, *attrs): for attr in attrs: if getattr(self, attr, None) is None: raise ConfigurationError("{} not configured".format(attr))
Check that each named attr has been configured
def _get_assessment_taken(self, assessment_taken_id): if assessment_taken_id not in self._assessments_taken: mgr = self._get_provider_manager('ASSESSMENT') lookup_session = mgr.get_assessment_taken_lookup_session(proxy=self._proxy) lookup_session.use_federated_bank_view() self._assessments_taken[assessment_taken_id] = ( lookup_session.get_assessment_taken(assessment_taken_id)) return self._assessments_taken[assessment_taken_id]
Helper method for getting an AssessmentTaken objects given an Id.
def _from_dict(cls, _dict): args = {} if 'logs' in _dict: args['logs'] = [Log._from_dict(x) for x in (_dict.get('logs'))] else: raise ValueError( 'Required property \'logs\' not present in LogCollection JSON') if 'pagination' in _dict: args['pagination'] = LogPagination._from_dict( _dict.get('pagination')) else: raise ValueError( 'Required property \'pagination\' not present in LogCollection JSON' ) return cls(**args)
Initialize a LogCollection object from a json dictionary.
def ordered_dump(data, stream=None, Dumper=yaml.SafeDumper, **kwds): class OrderedDumper(Dumper): pass def dict_representer(dumper, data): return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, data.items()) OrderedDumper.add_representer(OrderedDict, dict_representer) return yaml.dump(data, stream, OrderedDumper, **kwds)
Dump a yaml configuration as an OrderedDict.
def list_nodes_full(**kwargs): nodes = _query('server/list') ret = {} for node in nodes: name = nodes[node]['label'] ret[name] = nodes[node].copy() ret[name]['id'] = node ret[name]['image'] = nodes[node]['os'] ret[name]['size'] = nodes[node]['VPSPLANID'] ret[name]['state'] = nodes[node]['status'] ret[name]['private_ips'] = nodes[node]['internal_ip'] ret[name]['public_ips'] = nodes[node]['main_ip'] return ret
Return all data on nodes
def _assemble_flowtable(self, values): values = map(lambda x: [] if x is None else x, values) src = values[0] + values[1] dst = values[2] + values[3] thistable = dict() for s in src: thistable[s] = dst return thistable
generate a flowtable from a tuple of descriptors.
def _shutdown_proc(p, timeout): freq = 10 for _ in range(1 + timeout * freq): ret = p.poll() if ret is not None: logging.info("Shutdown gracefully.") return ret time.sleep(1 / freq) logging.warning("Killing the process.") p.kill() return p.wait()
Wait for a proc to shut down, then terminate or kill it after `timeout`.
def hidden_from(self, a, b): return a in self.hidden_indices and not self.in_same_box(a, b)
Return True if ``a`` is hidden in a different box than ``b``.
def _nbOperations(n): if n < 2: return 0 else: n0 = (n + 2) // 3 n02 = n0 + n // 3 return 3 * (n02) + n0 + _nbOperations(n02)
Exact number of atomic operations in _radixPass.
def overview(self, tag=None, fromdate=None, todate=None): return self.call("GET", "/stats/outbound", tag=tag, fromdate=fromdate, todate=todate)
Gets a brief overview of statistics for all of your outbound email.
def copy(self): return Sequence(self.name, self.sequenceData, self.start, self.end, self.strand, self.remaining, self.meta_data, self.mutableString)
Copy constructor for Sequence objects.
def FetchCompletedRequests(self, session_id, timestamp=None): if timestamp is None: timestamp = (0, self.frozen_timestamp or rdfvalue.RDFDatetime.Now()) for request, status in self.data_store.ReadCompletedRequests( session_id, timestamp=timestamp, limit=self.request_limit): yield request, status
Fetch all the requests with a status message queued for them.
def log(self): if self.tot < 3: return msgs = [] for name, t in self.times: if t / self.tot > 0.3 and t > 1: msgs.append(name + ": " + humanize_time_delta(t)) logger.info( "Callbacks took {:.3f} sec in total. {}".format( self.tot, '; '.join(msgs)))
log the time of some heavy callbacks
def _check_decorator(fct): def wrapper(self, *args, **kw): if self.is_enable(): ret = fct(self, *args, **kw) else: ret = self.stats return ret return wrapper
Check if the plugin is enabled.
def check_response(res): try: res.raise_for_status() except Exception as exc: try: err_info = res.json() err_msg = err_info['message'] except ValueError: err_msg = res.text[:40] except KeyError: err_msg = res.text[:40] + ("(No 'message' in err_info dict: %s" % list(err_info.keys())) exc.args += (err_msg,) raise exc
Check HTTP response and raise exception if response is not OK.
def disconnect(self, *args, **kwargs): quit_message = "%s %s" % (settings.GNOTTY_VERSION_STRING, settings.GNOTTY_PROJECT_URL) self.client.connection.quit(quit_message) super(IRCNamespace, self).disconnect(*args, **kwargs)
WebSocket was disconnected - leave the IRC channel.
def person_same_name_map(json_content, role_from): "to merge multiple editors into one record, filter by role values and group by name" matched_editors = [(i, person) for i, person in enumerate(json_content) if person.get('role') in role_from] same_name_map = {} for i, editor in matched_editors: if not editor.get("name"): continue name = editor.get("name").get("index") if name not in same_name_map: same_name_map[name] = [] same_name_map[name].append(i) return same_name_map
to merge multiple editors into one record, filter by role values and group by name
def delete(self): if self.oracle: self.time += self.oracle.time_accum() self.oracle.delete() self.oracle = None
Explicit destructor of the internal SAT oracle.
def removeByIndex(self, index): if index < len(self._invites) -1 and \ index >=0: self._invites.remove(index)
removes a user from the invitation list by position
def __setLock(self, command): if command in (TURN_ON, TURN_OFF): self._operation = command elif command in INV_SOURCES: self._operation = SOURCE else: self._operation = ALL self._isLocked = True self._timer = time.time()
Set lock on requests.
def setup_logger(log_level, log_file=None): level = getattr(logging, log_level.upper(), None) if not level: color_print("Invalid log level: %s" % log_level, "RED") sys.exit(1) if level >= logging.INFO: sys.tracebacklimit = 0 formatter = ColoredFormatter( u"%(log_color)s%(bg_white)s%(levelname)-8s%(reset)s %(message)s", datefmt=None, reset=True, log_colors=log_colors_config ) if log_file: handler = logging.FileHandler(log_file, encoding="utf-8") else: handler = logging.StreamHandler() handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(level)
setup root logger with ColoredFormatter.
def _get_catalogue_bin_limits(catalogue, dmag): mag_bins = np.arange( float(np.floor(np.min(catalogue.data['magnitude']))) - dmag, float(np.ceil(np.max(catalogue.data['magnitude']))) + dmag, dmag) counter = np.histogram(catalogue.data['magnitude'], mag_bins)[0] idx = np.where(counter > 0)[0] mag_bins = mag_bins[idx[0]:(idx[-1] + 2)] return mag_bins
Returns the magnitude bins corresponing to the catalogue
def create_big_url(name): global BIG_URLS filemeta = get_url_filemeta(name) if not filemeta: return None filename = filemeta['filename'] remote_size = filemeta['remote_size'] url = filemeta['url'] name = filename.split('.') name = (name[0] if name[0] not in ('', '.') else name[1]).replace(' ', '-') name = name.lower().strip() BIG_URLS[name] = (url, int(remote_size or -1), filename) return name
If name looks like a url, with an http, add an entry for it in BIG_URLS
def terminate_all(self): logger.info('Job {0} terminating all currently running tasks'.format(self.name)) for task in self.tasks.itervalues(): if task.started_at and not task.completed_at: task.terminate()
Terminate all currently running tasks.
def toFloat(value): if isinstance(value, str): return strFloat(value) elif isinstance(value, list): return slistFloat(value) else: return value
Converts string or signed list to float.
def isInteractive(): if sys.stdout.isatty() and os.name != 'nt': try: import threading except ImportError: return False else: return True else: return False
A basic check of if the program is running in interactive mode
def nonmatches(self): report = [] for result in self.aligned_results: report.append(self._analyze_single(self.aligned_reference, result)) return report
Report mismatches, indels, and coverage.
def integer_ceil(a, b): quanta, mod = divmod(a, b) if mod: quanta += 1 return quanta
Return the ceil integer of a div b.
def check_response(response): if response.status_code < 200 or response.status_code > 300: raise ServerError('API requests returned with error: %s' % response.status_code) try: response_text = loads(response.text) except ValueError: raise ServerError('The API did not returned a JSON string.') if not response_text: raise EmptyResponse() if 'failure' in response_text: if response_text['failure'] == 'Falscher Dateityp': raise UnsupportedFormat('Please look at picflash.org ' 'witch formats are supported') else: raise UnkownError(response_text['failure'])
checks the response if the server returned an error raises an exception.
def off(self, event): 'Remove an event handler' try: self._once_events.remove(event) except KeyError: pass self._callback_by_event.pop(event, None)
Remove an event handler
def field_date_to_json(self, day): if isinstance(day, six.string_types): day = parse_date(day) return [day.year, day.month, day.day] if day else None
Convert a date to a date triple.
def create_filter(self): return Filter( self.networkapi_url, self.user, self.password, self.user_ldap)
Get an instance of filter services facade.
def write_skills_data(self, data=None): data = data or self.skills_data if skills_data_hash(data) != self.skills_data_hash: write_skills_data(data) self.skills_data_hash = skills_data_hash(data)
Write skills data hash if it has been modified.
def sec_as_hex(self, is_compressed=None): sec = self.sec(is_compressed=is_compressed) return self._network.sec_text_for_blob(sec)
Return the SEC representation of this key as hex text.
def builder_types(cls) -> List[Type[ParameterBuilder]]: return [ entry_point.load() for entry_point in iter_entry_points(ENTRY_POINT) ]
Define the available builder types.
def render(data, saltenv='base', sls='', argline='', **kwargs): translate_newlines = kwargs.get('translate_newlines', False) return _decrypt_object(data, translate_newlines=translate_newlines)
Decrypt the data to be rendered that was encrypted using AWS KMS envelope encryption.
def human_readable_delta(start, end): start_date = datetime.datetime.fromtimestamp(start) end_date = datetime.datetime.fromtimestamp(end) delta = end_date - start_date result = [] if delta.days > 0: result.append('%d days' % (delta.days,)) if delta.seconds > 0: hours = int(delta.seconds / 3600) if hours > 0: result.append('%d hours' % (hours,)) minutes = int((delta.seconds - hours * 3600) / 60) if minutes: result.append('%d minutes' % (minutes,)) seconds = delta.seconds % 60 if seconds > 0: result.append('%d seconds' % (seconds,)) if result: return ', '.join(result) return 'super fast'
Return a string of human readable time delta.
def maybe_download_and_extract(self): if not os.path.exists(self.cifar10_dir): if not os.path.exists(self.data_dir): os.makedirs(self.data_dir) def _progress(count, block_size, total_size): status_msg = '\r>> Downloading {} {:>3}% ' sys.stdout.write(status_msg.format(self.cifar10_tarball, float(count * block_size) / total_size * 100.0)) sys.stdout.flush() file_path, _ = urlretrieve(CIFAR10_URL, self.cifar10_tarball, _progress) stat_info = os.stat(file_path) print('\nSuccessfully downloaded', file_path, stat_info.st_size, 'bytes.\n') tarfile.open(file_path, 'r:gz').extractall(self.data_dir)
Download and extract the tarball from Alex Krizhevsky's website.
def xpointerNewRange(self, startindex, end, endindex): if end is None: end__o = None else: end__o = end._o ret = libxml2mod.xmlXPtrNewRange(self._o, startindex, end__o, endindex) if ret is None:raise treeError('xmlXPtrNewRange() failed') return xpathObjectRet(ret)
Create a new xmlXPathObjectPtr of type range
def binlm2arpa(input_file, output_file, verbosity=2): cmd = ['binlm2arpa', '-binary', input_file, '-arpa'. output_file] if verbosity: cmd.extend(['-verbosity', verbosity]) cmd = [str(x) for x in cmd] with tempfile.SpooledTemporaryFile() as output_f: with output_to_debuglogger() as err_f: exitcode = subprocess.call(cmd, stdout=output_f, stderr=err_f) output = output_f.read() logger = logging.getLogger(__name__) logger.debug("Command '%s' returned with exit code '%d'." % (' '.join(cmd), exitcode)) if exitcode != 0: raise ConversionError("'%s' returned with non-zero exit status '%s'" % (cmd[0], exitcode)) if sys.version_info >= (3,) and type(output) is bytes: output = output.decode('utf-8') return output.strip()
Converts a binary format language model, as generated by idngram2lm, into an an ARPA format language model.