code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def transformArray(data, keysToSplit=[]): transformed = [ ] for item in data: transformed.append(transform(item, keysToSplit)) return transformed
Transform a SPARQL json array based on the rules of transform
def translate_state(self, s): if not isinstance(s, basestring): return s s = s.capitalize().replace("_", " ") return t(_(s))
Translate the given state string
def connect_client_to_kernel(self, client, is_cython=False, is_pylab=False, is_sympy=False): connection_file = client.connection_file stderr_handle = None if self.test_no_stderr else client.stderr_handle km, kc = self.create_kernel_manager_and_kernel_client( connection_file, stderr_handle, is_cython=is_cython, is_pylab=is_pylab, is_sympy=is_sympy) if is_string(km) and kc is None: client.shellwidget.kernel_manager = None client.show_kernel_error(km) return if not self.testing: kc.started_channels.connect( lambda c=client: self.process_started(c)) kc.stopped_channels.connect( lambda c=client: self.process_finished(c)) kc.start_channels(shell=True, iopub=True) shellwidget = client.shellwidget shellwidget.kernel_manager = km shellwidget.kernel_client = kc
Connect a client to its kernel
def update_org_owner(cls, module): try: cls.module_registry[module]["OrgOwnerModel"]._meta.get_field( "organization_user" ) except FieldDoesNotExist: cls.module_registry[module]["OrgOwnerModel"].add_to_class( "organization_user", models.OneToOneField( cls.module_registry[module]["OrgUserModel"], on_delete=models.CASCADE, ), ) try: cls.module_registry[module]["OrgOwnerModel"]._meta.get_field("organization") except FieldDoesNotExist: cls.module_registry[module]["OrgOwnerModel"].add_to_class( "organization", models.OneToOneField( cls.module_registry[module]["OrgModel"], related_name="owner", on_delete=models.CASCADE, ), )
Creates the links to the organization and organization user for the owner.
def _wire_events(self): self._device.on_open += self._on_open self._device.on_close += self._on_close self._device.on_read += self._on_read self._device.on_write += self._on_write self._zonetracker.on_fault += self._on_zone_fault self._zonetracker.on_restore += self._on_zone_restore
Wires up the internal device events.
def tracker_class(clsname): stats = server.stats if not stats: bottle.redirect('/tracker') stats.annotate() return dict(stats=stats, clsname=clsname)
Get class instance details.
def _normalize_stack(graphobjs): for operands, operator in graphobjs: operator = str(operator) if re.match(r'Q*q+$', operator): for char in operator: yield ([], char) else: yield (operands, operator)
Convert runs of qQ's in the stack into single graphobjs
def convert_to_duckling_language_id(cls, lang): if lang is not None and cls.is_supported(lang): return lang elif lang is not None and cls.is_supported(lang + "$core"): return lang + "$core" else: raise ValueError("Unsupported language '{}'. Supported languages: {}".format( lang, ", ".join(cls.SUPPORTED_LANGUAGES)))
Ensure a language identifier has the correct duckling format and is supported.
def join_images(img_files, out_file): images = [PIL.Image.open(f) for f in img_files] joined = PIL.Image.new( 'RGB', (sum(i.size[0] for i in images), max(i.size[1] for i in images)) ) left = 0 for img in images: joined.paste(im=img, box=(left, 0)) left = left + img.size[0] joined.save(out_file)
Join the list of images into the out file
def image_by_id(self, id): if not id: return None return next((image for image in self.images() if image['Id'] == id), None)
Return image with given Id
def cached(function): cache_variable = '_cached_' + function.__name__ @wraps(function) def function_wrapper(obj, *args, **kwargs): try: cache = getattr(obj, cache_variable) except AttributeError: cache = {} setattr(obj, cache_variable, cache) args_kwargs = args + tuple(kwargs.values()) try: return cache[args_kwargs] except KeyError: cache_value = function(obj, *args, **kwargs) cache[args_kwargs] = cache_value return cache_value return function_wrapper
Method decorator caching a method's returned values.
def symmetry_cycles(self): result = set([]) for symmetry in self.symmetries: result.add(symmetry.cycles) return result
The cycle representations of the graph symmetries
def confirm_user(query): user = _query_to_user(query) if click.confirm(f'Are you sure you want to confirm {user!r}?'): if security_service.confirm_user(user): click.echo(f'Successfully confirmed {user!r} at ' f'{user.confirmed_at.strftime("%Y-%m-%d %H:%M:%S%z")}') user_manager.save(user, commit=True) else: click.echo(f'{user!r} has already been confirmed.') else: click.echo('Cancelled.')
Confirm a user account.
def follow_model(self, model): if model: self.models_by_name[model.__name__.lower()] = model signals.post_save.connect(create_or_update, sender=model) signals.post_delete.connect(self.remove_orphans, sender=model)
Follow a particular model class, updating associated Activity objects automatically.
def __set_premature_stop_codon_status(self, hgvs_string): if re.search('.+\*(\d+)?$', hgvs_string): self.is_premature_stop_codon = True self.is_non_silent = True if hgvs_string.endswith('*'): self.is_nonsense_mutation = True else: self.is_nonsense_mutation = False else: self.is_premature_stop_codon = False self.is_nonsense_mutation = False
Set whether there is a premature stop codon.
def median(self): mu = self.mean() ret_val = math.exp(mu) if math.isnan(ret_val): ret_val = float("inf") return ret_val
Computes the median of a log-normal distribution built with the stats data.
def _normalize_path(self, path): if type(path) is str: path = path.encode() path = path.split(b'\0')[0] if path[0:1] != self.pathsep: path = self.cwd + self.pathsep + path keys = path.split(self.pathsep) i = 0 while i < len(keys): if keys[i] == b'': keys.pop(i) elif keys[i] == b'.': keys.pop(i) elif keys[i] == b'..': keys.pop(i) if i != 0: keys.pop(i-1) i -= 1 else: i += 1 return keys
Takes a path and returns a simple absolute path as a list of directories from the root
def _createIndexRti(self, index, nodeName): return PandasIndexRti(index=index, nodeName=nodeName, fileName=self.fileName, iconColor=self._iconColor)
Auxiliary method that creates a PandasIndexRti.
def cookies(self): return (self.get_query() .select(PageView.ip, PageView.headers['Cookie']) .where(PageView.headers['Cookie'].is_null(False)) .tuples())
Retrieve the cookies header from all the users who visited.
def removeReadGroupSet(self): self._openRepo() dataset = self._repo.getDatasetByName(self._args.datasetName) readGroupSet = dataset.getReadGroupSetByName( self._args.readGroupSetName) def func(): self._updateRepo(self._repo.removeReadGroupSet, readGroupSet) self._confirmDelete("ReadGroupSet", readGroupSet.getLocalId(), func)
Removes a readGroupSet from the repo.
def ensureFulltextIndex(self, fields, minLength = None) : data = { "type" : "fulltext", "fields" : fields, } if minLength is not None : data["minLength"] = minLength ind = Index(self, creationData = data) self.indexes["fulltext"][ind.infos["id"]] = ind return ind
Creates a fulltext index if it does not already exist, and returns it
def generate_folder_names(name, project): out_data_dir = prms.Paths.outdatadir project_dir = os.path.join(out_data_dir, project) batch_dir = os.path.join(project_dir, name) raw_dir = os.path.join(batch_dir, "raw_data") return out_data_dir, project_dir, batch_dir, raw_dir
Creates sensible folder names.
def _groupby_and_aggregate(self, how, grouper=None, *args, **kwargs): if grouper is None: self._set_binner() grouper = self.grouper obj = self._selected_obj grouped = groupby(obj, by=None, grouper=grouper, axis=self.axis) try: if isinstance(obj, ABCDataFrame) and callable(how): result = grouped._aggregate_item_by_item(how, *args, **kwargs) else: result = grouped.aggregate(how, *args, **kwargs) except Exception: result = grouped.apply(how, *args, **kwargs) result = self._apply_loffset(result) return self._wrap_result(result)
Re-evaluate the obj with a groupby aggregation.
def save_sensors(self): if not self.need_save: return fname = os.path.realpath(self.persistence_file) exists = os.path.isfile(fname) dirname = os.path.dirname(fname) if (not os.access(dirname, os.W_OK) or exists and not os.access(fname, os.W_OK)): _LOGGER.error('Permission denied when writing to %s', fname) return split_fname = os.path.splitext(fname) tmp_fname = '{}.tmp{}'.format(split_fname[0], split_fname[1]) _LOGGER.debug('Saving sensors to persistence file %s', fname) self._perform_file_action(tmp_fname, 'save') if exists: os.rename(fname, self.persistence_bak) os.rename(tmp_fname, fname) if exists: os.remove(self.persistence_bak) self.need_save = False
Save sensors to file.
def post(self, id): model = self.model.objects.only('id').get_or_404(id=id) follow, created = Follow.objects.get_or_create( follower=current_user.id, following=model, until=None) count = Follow.objects.followers(model).count() if not current_app.config['TESTING']: tracking.send_signal(on_new_follow, request, current_user) return {'followers': count}, 201 if created else 200
Follow an object given its ID
def change_user_password(self, ID, data): log.info('Change user %s password' % ID) self.put('users/%s/change_password.json' % ID, data)
Change password of a User.
def _delete_fields(self): for key in self._json_dict.keys(): if not key.startswith("_"): delattr(self, key) self._json_dict = {} self.id = None self.name = None
Delete this object's attributes, including name and id
def starting_expression(source_code, offset): word_finder = worder.Worder(source_code, True) expression, starting, starting_offset = \ word_finder.get_splitted_primary_before(offset) if expression: return expression + '.' + starting return starting
Return the expression to complete
def current_values(self): current_dict = { 'date': self.current_session_date, 'score': self.current_sleep_score, 'stage': self.current_sleep_stage, 'breakdown': self.current_sleep_breakdown, 'tnt': self.current_tnt, 'bed_temp': self.current_bed_temp, 'room_temp': self.current_room_temp, 'resp_rate': self.current_resp_rate, 'heart_rate': self.current_heart_rate, 'processing': self.current_session_processing, } return current_dict
Return a dict of all the 'current' parameters.
def getObjectTypeBit(self, t): if isinstance(t, string_types): t = t.upper() try: return self.objectType[t] except KeyError: raise CommandExecutionError(( 'Invalid object type "{0}". It should be one of the following: {1}' ).format(t, ', '.join(self.objectType))) else: return t
returns the bit value of the string object type
def hide_auth(msg): for pattern, repl in RE_HIDE_AUTH: msg = pattern.sub(repl, msg) return msg
Remove sensitive information from msg.
def press_check(df): new_df = df.copy() press = new_df.copy().index.values ref = press[0] inversions = np.diff(np.r_[press, press[-1]]) < 0 mask = np.zeros_like(inversions) for k, p in enumerate(inversions): if p: ref = press[k] cut = press[k + 1 :] < ref mask[k + 1 :][cut] = True new_df[mask] = np.NaN return new_df
Remove pressure reversals from the index.
def fcs(bits): fcs = FCS() for bit in bits: yield bit fcs.update_bit(bit) digest = bitarray(endian="little") digest.frombytes(fcs.digest()) for bit in digest: yield bit
Append running bitwise FCS CRC checksum to end of generator
def _WritePathInfo(self, client_id, path_info): if client_id not in self.metadatas: raise db.UnknownClientError(client_id) path_record = self._GetPathRecord(client_id, path_info) path_record.AddPathInfo(path_info) parent_path_info = path_info.GetParent() if parent_path_info is not None: parent_path_record = self._GetPathRecord(client_id, parent_path_info) parent_path_record.AddChild(path_info)
Writes a single path info record for given client.
def split_none(self): "Don't split the data and create an empty validation set." val = self[[]] val.ignore_empty = True return self._split(self.path, self, val)
Don't split the data and create an empty validation set.
def proper_repr(value: Any) -> str: if isinstance(value, sympy.Basic): result = sympy.srepr(value) fixed_tokens = [ 'Symbol', 'pi', 'Mul', 'Add', 'Mod', 'Integer', 'Float', 'Rational' ] for token in fixed_tokens: result = result.replace(token, 'sympy.' + token) return result if isinstance(value, np.ndarray): return 'np.array({!r})'.format(value.tolist()) return repr(value)
Overrides sympy and numpy returning repr strings that don't parse.
def _vpc_peering_conn_id_for_name(name, conn): log.debug('Retrieving VPC peering connection id') ids = _get_peering_connection_ids(name, conn) if not ids: ids = [None] elif len(ids) > 1: raise SaltInvocationError('Found multiple VPC peering connections ' 'with the same name!! ' 'Please make sure you have only ' 'one VPC peering connection named {0} ' 'or invoke this function with a VPC ' 'peering connection ID'.format(name)) return ids[0]
Get the ID associated with this name
def symlink(source, link_name): if os.path.islink(link_name) and os.readlink(link_name) == source: return os_symlink = getattr(os, "symlink", None) if callable(os_symlink): os_symlink(source, link_name) else: import ctypes csl = ctypes.windll.kernel32.CreateSymbolicLinkW csl.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32) csl.restype = ctypes.c_ubyte flags = 1 if os.path.isdir(source) else 0 if csl(link_name, source, flags) == 0: raise ctypes.WinError()
Method to allow creating symlinks on Windows
def _get_jenks_config(): config_file = (get_configuration_file() or os.path.expanduser(os.path.join("~", CONFIG_FILE_NAME))) if not os.path.exists(config_file): open(config_file, 'w').close() with open(config_file, 'r') as fh: return JenksData( yaml.load(fh.read()), write_method=generate_write_yaml_to_file(config_file) )
retrieve the jenks configuration object
def _list_paths(self, bucket, prefix): s3 = self.s3 kwargs = {"Bucket": bucket, "Prefix": prefix} if self.list_objects: list_objects_api = "list_objects" else: list_objects_api = "list_objects_v2" paginator = s3.get_paginator(list_objects_api) for page in paginator.paginate(**kwargs): contents = page.get("Contents", None) if not contents: continue for item in contents: yield item["Key"]
Read config for list object api, paginate through list objects.
def read_writer_config(config_files, loader=UnsafeLoader): conf = {} LOG.debug('Reading %s', str(config_files)) for config_file in config_files: with open(config_file) as fd: conf.update(yaml.load(fd.read(), Loader=loader)) try: writer_info = conf['writer'] except KeyError: raise KeyError( "Malformed config file {}: missing writer 'writer'".format( config_files)) writer_info['config_files'] = config_files return writer_info
Read the writer `config_files` and return the info extracted.
def GetAnnotatedMethods(cls): result = {} for i_cls in reversed(inspect.getmro(cls)): for name in compatibility.ListAttrs(i_cls): cls_method = getattr(i_cls, name) if not callable(cls_method): continue if not hasattr(cls_method, "__http_methods__"): continue result[name] = RouterMethodMetadata( name=name, doc=cls_method.__doc__, args_type=getattr(cls_method, "__args_type__", None), result_type=getattr(cls_method, "__result_type__", None), category=getattr(cls_method, "__category__", None), http_methods=getattr(cls_method, "__http_methods__", set()), no_audit_log_required=getattr(cls_method, "__no_audit_log_required__", False)) return result
Returns a dictionary of annotated router methods.
def prepare_request(self, request): try: request_id = local.request_id except AttributeError: request_id = NO_REQUEST_ID if self.request_id_header and request_id != NO_REQUEST_ID: request.headers[self.request_id_header] = request_id return super(Session, self).prepare_request(request)
Include the request ID, if available, in the outgoing request
def store_snow_tweets_from_disk_to_mongodb(snow_tweets_folder): client = pymongo.MongoClient("localhost", 27017) db = client["snow_tweet_storage"] collection = db["tweets"] for tweet in extract_all_snow_tweets_from_disk_generator(snow_tweets_folder): collection.insert(tweet)
Store all SNOW tweets in a mongodb collection.
def to_bytes(s, encoding=None, errors='strict'): encoding = encoding or 'utf-8' if is_unicode(s): return s.encode(encoding, errors) elif is_strlike(s): return s else: if six.PY2: return str(s) else: return str(s).encode(encoding, errors)
Convert string to bytes.
def _simple_cmd(cmd, app, url='http://localhost:8080/manager', timeout=180): try: opts = { 'path': app, 'version': ls(url)[app]['version'] } return '\n'.join(_wget(cmd, opts, url, timeout=timeout)['msg']) except Exception: return 'FAIL - No context exists for path {0}'.format(app)
Simple command wrapper to commands that need only a path option
def tag(name, tag_name): with LOCK: metric(name) TAGS.setdefault(tag_name, set()).add(name)
Tag the named metric with the given tag.
def toggle_eventtype(self): check = self.check_all_eventtype.isChecked() for btn in self.idx_eventtype_list: btn.setChecked(check)
Check or uncheck all event types in event type scroll.
def reset_position(self): self.pos = 0 self.col = 0 self.row = 1 self.eos = 0
Reset all current positions.
def _evaluate_cut(transition, cut, unpartitioned_account, direction=Direction.BIDIRECTIONAL): cut_transition = transition.apply_cut(cut) partitioned_account = account(cut_transition, direction) log.debug("Finished evaluating %s.", cut) alpha = account_distance(unpartitioned_account, partitioned_account) return AcSystemIrreducibilityAnalysis( alpha=round(alpha, config.PRECISION), direction=direction, account=unpartitioned_account, partitioned_account=partitioned_account, transition=transition, cut=cut)
Find the |AcSystemIrreducibilityAnalysis| for a given cut.
def clean(self): super().clean() if self.voter is None and self.anonymous_key is None: raise ValidationError(_('A user id or an anonymous key must be used.')) if self.voter and self.anonymous_key: raise ValidationError(_('A user id or an anonymous key must be used, but not both.'))
Validates the considered instance.
def dashes(phone): if isinstance(phone, str): if phone.startswith("+1"): return "1-" + "-".join((phone[2:5], phone[5:8], phone[8:])) elif len(phone) == 10: return "-".join((phone[:3], phone[3:6], phone[6:])) else: return phone else: return phone
Returns the phone number formatted with dashes.
def remove_env(environment): if not environment: print("You need to supply an environment name") return parser = read_config() if not parser.remove_section(environment): print("Unknown environment type '%s'" % environment) return write_config(parser) print("Removed environment '%s'" % environment)
Remove an environment from the configuration.
def return_value(self): if self._raises: if inspect.isclass(self._raises): raise self._raises() else: raise self._raises else: if isinstance(self._returns, tuple): return tuple([x.value if isinstance(x, Variable) else x for x in self._returns]) return self._returns.value if isinstance(self._returns, Variable) \ else self._returns
Returns the value for this expectation or raises the proper exception.
def direct_reply(self, text): channel_id = self._client.open_dm_channel(self._get_user_id()) self._client.rtm_send_message(channel_id, text)
Send a reply via direct message using RTM API
def parse_value(self, value_string: str): self.value = Decimal(value_string) return self.value
Parses the amount string.
def jscanner(url): response = requester(url, main_url, delay, cook, headers, timeout, host, proxies, user_agents, failed, processed) matches = rendpoint.findall(response) for match in matches: match = match[0] + match[1] if not re.search(r'[}{><"\']', match) and not match == '/': verb('JS endpoint', match) endpoints.add(match)
Extract endpoints from JavaScript code.
def make_mesh( coor, ngroups, conns, mesh_in ): mat_ids = [] for ii, conn in enumerate( conns ): mat_id = nm.empty( (conn.shape[0],), dtype = nm.int32 ) mat_id.fill( mesh_in.mat_ids[ii][0] ) mat_ids.append( mat_id ) mesh_out = Mesh.from_data( 'merged mesh', coor, ngroups, conns, mat_ids, mesh_in.descs ) return mesh_out
Create a mesh reusing mat_ids and descs of mesh_in.
def leaveoneout(self): traintestfile = self.fileprefix + '.train' options = "-F " + self.format + " " + self.timbloptions + " -t leave_one_out" if sys.version < '3': self.api = timblapi.TimblAPI(b(options), b"") else: self.api = timblapi.TimblAPI(options, "") if self.debug: print("Enabling debug for timblapi",file=stderr) self.api.enableDebug() print("Calling Timbl API : " + options,file=stderr) if sys.version < '3': self.api.learn(b(traintestfile)) self.api.test(b(traintestfile), b(self.fileprefix + '.out'),b'') else: self.api.learn(u(traintestfile)) self.api.test(u(traintestfile), u(self.fileprefix + '.out'),'') return self.api.getAccuracy()
Train & Test using leave one out
def next_frame_sampling(): hparams = next_frame_basic_deterministic() hparams.scheduled_sampling_mode = "prob_inverse_exp" hparams.scheduled_sampling_max_prob = 1.0 hparams.scheduled_sampling_decay_steps = 10000 return hparams
Basic conv model with scheduled sampling.
def send(self, data): self._send_data(int_to_hex(len(data))) self._send_data(data)
Send a formatted message to the ADB server
def compute_expansion_alignment(satz_a, satz_b, satz_c, satz_d): zeta_a = satz_a zeta_b = satz_b phi_a = compute_phi(zeta_a) phi_b = compute_phi(zeta_b) theta_a = compute_theta(zeta_a, phi_a) theta_b = compute_theta(zeta_b, phi_b) phi = (phi_a + phi_b) / 2 zeta = compute_zeta(phi) theta = compute_theta(zeta, phi) c_expansion = 4 * (((theta_a + theta_b) / 2 - theta) / (theta_a - theta_b)) sin_beta_2 = scan_width / (2 * H) d = ((R + H) / R * np.cos(phi) - np.cos(zeta)) * sin_beta_2 e = np.cos(zeta) - np.sqrt(np.cos(zeta) ** 2 - d ** 2) c_alignment = 4 * e * np.sin(zeta) / (theta_a - theta_b) return c_expansion, c_alignment
All angles in radians.
def __apf_cmd(cmd): apf_cmd = '{0} {1}'.format(salt.utils.path.which('apf'), cmd) out = __salt__['cmd.run_all'](apf_cmd) if out['retcode'] != 0: if not out['stderr']: msg = out['stdout'] else: msg = out['stderr'] raise CommandExecutionError( 'apf failed: {0}'.format(msg) ) return out['stdout']
Return the apf location
def sorted_source_files(self): assert self.final, 'Call build() before using the graph.' out = [] for node in nx.topological_sort(self.graph): if isinstance(node, NodeSet): out.append(node.nodes) else: out.append([node]) return list(reversed(out))
Returns a list of targets in topologically sorted order.
def save_current_figure_as(self): if self.current_thumbnail is not None: self.save_figure_as(self.current_thumbnail.canvas.fig, self.current_thumbnail.canvas.fmt)
Save the currently selected figure.
def process_slice(self, b_rot90=None): if b_rot90: self._Mnp_2Dslice = np.rot90(self._Mnp_2Dslice) if self.func == 'invertIntensities': self.invert_slice_intensities()
Processes a single slice.
def wrapped_target(target, q_stdout, q_stderr, q_error, robust, name, *args, **kwargs): import sys sys.stdout = IOQueue(q_stdout) sys.stderr = IOQueue(q_stderr) try: target(*args, **kwargs) except: if not robust: s = 'Error in tab\n' + traceback.format_exc() logger = daiquiri.getLogger(name) logger.error(s) else: raise if not robust: q_error.put(name) raise
Wraps a target with queues replacing stdout and stderr
def _async_callable(func): if isinstance(func, types.CoroutineType): return func @functools.wraps(func) async def _async_def_wrapper(*args, **kwargs): return func(*args, **kwargs) return _async_def_wrapper
Ensure the callable is an async def.
def do_mute(self, sender, body, args): if sender.get('MUTED'): self.send_message('you are already muted', sender) else: self.broadcast('%s has muted this chatroom' % (sender['NICK'],)) sender['QUEUED_MESSAGES'] = [] sender['MUTED'] = True
Temporarily mutes chatroom for a user
def run_checks(collector): artifact = collector.configuration["dashmat"].artifact chosen = artifact if chosen in (None, "", NotSpecified): chosen = None dashmat = collector.configuration["dashmat"] modules = collector.configuration["__active_modules__"] config_root = collector.configuration["config_root"] module_options = collector.configuration["modules"] datastore = JsonDataStore(os.path.join(config_root, "data.json")) if dashmat.redis_host: datastore = RedisDataStore(redis.Redis(dashmat.redis_host)) scheduler = Scheduler(datastore) for name, module in modules.items(): if chosen is None or name == chosen: server = module.make_server(module_options[name].server_options) scheduler.register(module, server, name) scheduler.twitch(force=True)
Just run the checks for our modules
def cut_for_search(self, sentence, HMM=True): words = self.cut(sentence, HMM=HMM) for w in words: if len(w) > 2: for i in xrange(len(w) - 1): gram2 = w[i:i + 2] if self.FREQ.get(gram2): yield gram2 if len(w) > 3: for i in xrange(len(w) - 2): gram3 = w[i:i + 3] if self.FREQ.get(gram3): yield gram3 yield w
Finer segmentation for search engines.
def _wait_until_connectable(self, timeout=30): count = 0 while not utils.is_connectable(self.profile.port): if self.process.poll() is not None: raise WebDriverException( "The browser appears to have exited " "before we could connect. If you specified a log_file in " "the FirefoxBinary constructor, check it for details.") if count >= timeout: self.kill() raise WebDriverException( "Can't load the profile. Possible firefox version mismatch. " "You must use GeckoDriver instead for Firefox 48+. Profile " "Dir: %s If you specified a log_file in the " "FirefoxBinary constructor, check it for details." % (self.profile.path)) count += 1 time.sleep(1) return True
Blocks until the extension is connectable in the firefox.
async def get_real_ext_ip(self): while self._ip_hosts: try: timeout = aiohttp.ClientTimeout(total=self._timeout) async with aiohttp.ClientSession( timeout=timeout, loop=self._loop ) as session, session.get(self._pop_random_ip_host()) as resp: ip = await resp.text() except asyncio.TimeoutError: pass else: ip = ip.strip() if self.host_is_ip(ip): log.debug('Real external IP: %s', ip) break else: raise RuntimeError('Could not get the external IP') return ip
Return real external IP address.
def time_logger(name): start_time = time.time() yield end_time = time.time() total_time = end_time - start_time logging.info("%s; time: %ss", name, total_time)
This logs the time usage of a code block
def _round(ctx, number, num_digits): number = conversions.to_decimal(number, ctx) num_digits = conversions.to_integer(num_digits, ctx) return decimal_round(number, num_digits, ROUND_HALF_UP)
Rounds a number to a specified number of digits
def adduser(app, username, password): with app.app_context(): create_user(username=username, password=password) click.echo('user created!')
Add new user with admin access
def setup(self, **kwargs): for key in kwargs: if key not in self.config.keys(): raise error.GrabMisuseError('Unknown option: %s' % key) if 'url' in kwargs: if self.config.get('url'): kwargs['url'] = self.make_url_absolute(kwargs['url']) self.config.update(kwargs)
Setting up Grab instance configuration.
def shutdown(self): if not self._shutdown: self._data_queue.put((None, None)) self._fetcher.join() for _ in range(self._num_workers): self._key_queue.put((None, None)) for w in self._workers: if w.is_alive(): w.terminate() self._shutdown = True
Shutdown internal workers by pushing terminate signals.
def delete(self): try: self.revert() except errors.ChangelistError: pass self._connection.run(['change', '-d', str(self._change)])
Reverts all files in this changelist then deletes the changelist from perforce
def bind(cls): document["show_author_picker"].bind("click", lambda x: cls.show()) cls.storno_btn_el.bind("click", lambda x: cls.hide()) cls.pick_btn_el.bind("click", cls.on_pick_button_pressed)
Bind the callbacks to the buttons.
def PrintTags(self, file): print >>file, '/* Tag definition for %s */' % self._name print >>file, 'enum %s_ {' % self._name.lower() for entry in self._entries: print >>file, ' %s=%d,' % (self.EntryTagName(entry), entry.Tag()) print >>file, ' %s_MAX_TAGS' % (self._name.upper()) print >>file, '};\n'
Prints the tag definitions for a structure.
def form_node(cls): assert issubclass(cls, FormNode) res = attrs(init=False, slots=True)(cls) res._args = [] res._required_args = 0 res._rest_arg = None state = _FormArgMode.REQUIRED for field in fields(res): if 'arg_mode' in field.metadata: if state is _FormArgMode.REST: raise RuntimeError('rest argument must be last') if field.metadata['arg_mode'] is _FormArgMode.REQUIRED: if state is _FormArgMode.OPTIONAL: raise RuntimeError('required arg after optional arg') res._args.append(field) res._required_args += 1 elif field.metadata['arg_mode'] is _FormArgMode.OPTIONAL: state = _FormArgMode.OPTIONAL res._args.append(field) elif field.metadata['arg_mode'] is _FormArgMode.REST: state = _FormArgMode.REST res._rest_arg = field else: assert 0 return res
A class decorator to finalize fully derived FormNode subclasses.
def compare_version(self, version_string, op): from pkg_resources import parse_version from monty.operator import operator_from_str op = operator_from_str(op) return op(parse_version(self.version), parse_version(version_string))
Compare Abinit version to `version_string` with operator `op`
def clean(self): if self.cleaners: yield from asyncio.wait([x() for x in self.cleaners], loop=self.loop)
Run all of the cleaners added by the user.
def _load_vocab_file(vocab_file, reserved_tokens=None): if reserved_tokens is None: reserved_tokens = RESERVED_TOKENS subtoken_list = [] with tf.gfile.Open(vocab_file, mode="r") as f: for line in f: subtoken = _native_to_unicode(line.strip()) subtoken = subtoken[1:-1] if subtoken in reserved_tokens: continue subtoken_list.append(_native_to_unicode(subtoken)) return reserved_tokens + subtoken_list
Load vocabulary while ensuring reserved tokens are at the top.
def _do_scale(image, size): shape = tf.cast(tf.shape(image), tf.float32) w_greater = tf.greater(shape[0], shape[1]) shape = tf.cond(w_greater, lambda: tf.cast([shape[0] / shape[1] * size, size], tf.int32), lambda: tf.cast([size, shape[1] / shape[0] * size], tf.int32)) return tf.image.resize_bicubic([image], shape)[0]
Rescale the image by scaling the smaller spatial dimension to `size`.
def __AddWorkers(self, num_to_add): assert(self.__IsWebUiReady()) zone_to_ips = self.__GetZoneToWorkerIpsTable() zone_old_new = [] for zone, ips in zone_to_ips.iteritems(): num_nodes_in_zone = len(ips) num_nodes_to_add = 0 zone_old_new.append((zone, num_nodes_in_zone, num_nodes_to_add)) print 'num_to_add %s' % num_to_add for _ in range(num_to_add): zone_old_new.sort(key= lambda z : z[1]+z[2]) zt = zone_old_new[0] zone_old_new[0] = (zt[0], zt[1], zt[2]+1) zone_plan = [(zt[2], zt[0]) for zt in zone_old_new] print 'resize plan' if self.config.workers_on_spot_instances: new_worker_instances = self.__LaunchSpotWorkerInstances(zone_plan) else: new_worker_instances = self.__LaunchOnDemandWorkerInstances(zone_plan) self.__WaitForInstancesReachable(new_worker_instances) self.__ConfigureWorkers(new_worker_instances) return
Adds workers evenly across all enabled zones.
def _handshake(self, conn, addr): msg = conn.recv(len(MAGIC_REQ)) log.debug('Received message %s from %s', msg, addr) if msg != MAGIC_REQ: log.warning('%s is not a valid REQ message from %s', msg, addr) return log.debug('Sending the private key') conn.send(self.__key) log.debug('Waiting for the client to confirm') msg = conn.recv(len(MAGIC_ACK)) if msg != MAGIC_ACK: return log.debug('Sending the signature key') conn.send(self.__sgn) log.debug('Waiting for the client to confirm') msg = conn.recv(len(MAGIC_ACK)) if msg != MAGIC_ACK: return log.info('%s is now authenticated', addr) self.keep_alive(conn)
Ensures that the client receives the AES key.
def update(d, e): res = copy.copy(d) res.update(e) return res
Return a copy of dict `d` updated with dict `e`.
def create(self, verbose=None): if not self.email_enabled: raise EmailNotEnabledError("See settings.EMAIL_ENABLED") response = requests.post( self.api_url, auth=("api", self.api_key), data={ "address": self.address, "name": self.name, "description": self.display_name, }, ) if verbose: sys.stdout.write( f"Creating mailing list {self.address}. " f"Got response={response.status_code}.\n" ) return response
Returns a response after attempting to create the list.
def delete(self) : "deletes the document from the database" if self.URL is None : raise DeletionError("Can't delete a document that was not saved") r = self.connection.session.delete(self.URL) data = r.json() if (r.status_code != 200 and r.status_code != 202) or 'error' in data : raise DeletionError(data['errorMessage'], data) self.reset(self.collection) self.modified = True
deletes the document from the database
def cnn_learner(data:DataBunch, base_arch:Callable, cut:Union[int,Callable]=None, pretrained:bool=True, lin_ftrs:Optional[Collection[int]]=None, ps:Floats=0.5, custom_head:Optional[nn.Module]=None, split_on:Optional[SplitFuncOrIdxList]=None, bn_final:bool=False, init=nn.init.kaiming_normal_, concat_pool:bool=True, **kwargs:Any)->Learner: "Build convnet style learner." meta = cnn_config(base_arch) model = create_cnn_model(base_arch, data.c, cut, pretrained, lin_ftrs, ps=ps, custom_head=custom_head, split_on=split_on, bn_final=bn_final, concat_pool=concat_pool) learn = Learner(data, model, **kwargs) learn.split(split_on or meta['split']) if pretrained: learn.freeze() if init: apply_init(model[1], init) return learn
Build convnet style learner.
def stupid_hack(most=10, wait=None): if wait is not None: time.sleep(wait) else: time.sleep(random.randrange(1, most))
Return a random time between 1 - 10 Seconds.
def question_default_loader(self, pk): try: obj = Question.objects.get(pk=pk) except Question.DoesNotExist: return None else: self.question_default_add_related_pks(obj) return obj
Load a Question from the database.
def after_import(self, dataset, result, using_transactions, dry_run, **kwargs): if not dry_run and any(r.import_type == RowResult.IMPORT_TYPE_NEW for r in result.rows): connection = connections[DEFAULT_DB_ALIAS] sequence_sql = connection.ops.sequence_reset_sql(no_style(), [self._meta.model]) if sequence_sql: cursor = connection.cursor() try: for line in sequence_sql: cursor.execute(line) finally: cursor.close()
Reset the SQL sequences after new objects are imported
def isotime(s): "Convert timestamps in ISO8661 format to and from Unix time." if type(s) == type(1): return time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(s)) elif type(s) == type(1.0): date = int(s) msec = s - date date = time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(s)) return date + "." + repr(msec)[3:] elif type(s) == type("") or type(s) == type(u""): if s[-1] == "Z": s = s[:-1] if "." in s: (date, msec) = s.split(".") else: date = s msec = "0" return calendar.timegm(time.strptime(date, "%Y-%m-%dT%H:%M:%S")) + float("0." + msec) else: raise TypeError
Convert timestamps in ISO8661 format to and from Unix time.
def executable_exists(executable): for directory in os.getenv("PATH").split(":"): if os.path.exists(os.path.join(directory, executable)): return True return False
Test if an executable is available on the system.
def git_sha_metadata(content, git_content): if not content.settings['GIT_SHA_METADATA']: return if not git_content.is_committed(): return content.metadata['gitsha_newest'] = str(git_content.get_newest_commit()) content.metadata['gitsha_oldest'] = str(git_content.get_oldest_commit())
Add sha metadata to content
def convert_kv(key, val, attr_type, attr={}, cdata=False): LOG.info('Inside convert_kv(): key="%s", val="%s", type(val) is: "%s"' % ( unicode_me(key), unicode_me(val), type(val).__name__) ) key, attr = make_valid_xml_name(key, attr) if attr_type: attr['type'] = get_xml_type(val) attrstring = make_attrstring(attr) return '<%s%s>%s</%s>' % ( key, attrstring, wrap_cdata(val) if cdata == True else escape_xml(val), key )
Converts a number or string into an XML element
def _inspectArguments(self, args): if args: self.exec_path = PathStr(args[0]) else: self.exec_path = None session_name = None args = args[1:] openSession = False for arg in args: if arg in ('-h', '--help'): self._showHelp() elif arg in ('-d', '--debug'): print('RUNNGING IN DEBUG-MODE') self.opts['debugMode'] = True elif arg in ('-l', '--log'): print('CREATE LOG') self.opts['createLog'] = True elif arg in ('-s', '--server'): self.opts['server'] = True elif arg in ('-o', '--open'): openSession = True elif openSession: session_name = arg else: print("Argument '%s' not known." % arg) return self._showHelp() return session_name
inspect the command-line-args and give them to appBase