code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def make_tutor(user): tutor_group, owner_group = _get_user_groups() user.is_staff = True user.is_superuser = False user.save() owner_group.user_set.remove(user) owner_group.save() tutor_group.user_set.add(user) tutor_group.save()
Makes the given user a tutor.
def single_user_mode(self, command=None, options=None): cmd = [self._pgcommand('postgres'), '--single', '-D', self._data_dir] for opt, val in sorted((options or {}).items()): cmd.extend(['-c', '{0}={1}'.format(opt, val)]) cmd.append(self._database) return self.cancellable_sub...
run a given command in a single-user mode. If the command is empty - then just start and stop
def HTMLHelpWorkshop(self): if self.vc_ver < 11.0: return [] return [os.path.join(self.si.ProgramFilesx86, 'HTML Help Workshop')]
Microsoft HTML Help Workshop
def add(self, split_info): if split_info.name in self: raise ValueError("Split {} already present".format(split_info.name)) super(SplitDict, self).__setitem__(split_info.name, split_info)
Add the split info.
def refresh(self): self._screen.block_transfer(self._buffer, self._dx, self._dy)
Flush the canvas content to the underlying screen.
def unblock_all(self): self.unblock() for em in self._emitters.values(): em.unblock()
Unblock all emitters in this group.
def update_wrapper(self, process_list): self.set_count(len(process_list)) if self.should_update(): return self.update(process_list) else: return self.result()
Wrapper for the children update
def _setup_preferred_paths(self, preferred_conversion_paths): for path in preferred_conversion_paths: for pair in pair_looper(path): if pair not in self.converters: log.warning('Invalid conversion path %s, unknown step %s' % (repr(p...
Add given valid preferred conversion paths
def load_dynamic_class(fqn, subclass): if not isinstance(fqn, basestring): return fqn cls = load_class_from_name(fqn) if cls == subclass or not issubclass(cls, subclass): raise TypeError("%s is not a valid %s" % (fqn, subclass.__name__)) return cls
Dynamically load fqn class and verify it's a subclass of subclass
def sevenths(key): if _sevenths_cache.has_key(key): return _sevenths_cache[key] res = map(lambda x: seventh(x, key), keys.get_notes(key)) _sevenths_cache[key] = res return res
Return all the sevenths chords in key in a list.
def stats(self, metrics, **kwargs): return self.__class__.all_stats(self.account, [self.id], metrics, **kwargs)
Pulls a list of metrics for the current object instance.
def cleanup(config_dir): stdout_path = os.path.join(config_dir, 'pueue.stdout') stderr_path = os.path.join(config_dir, 'pueue.stderr') if os._exists(stdout_path): os.remove(stdout_path) if os._exists(stderr_path): os.remove(stderr_path) socketPath = os.path.join(config_dir, 'pueue.so...
Remove temporary stderr and stdout files as well as the daemon socket.
def purge_cache(self, object_type): if object_type in self.mapping: cache = self.mapping[object_type] log.debug("Purging [{}] cache of {} values.".format(object_type, len(cache))) cache.purge()
Purge the named cache of all values. If no cache exists for object_type, nothing is done
def executions(self) -> List[Execution]: return list(fill.execution for fill in self.wrapper.fills.values())
List of all executions from this session.
def _generate_ipaddressfield(self, **kwargs): field = kwargs['field'] if field.default != NOT_PROVIDED: return self._generate_field_with_default(**kwargs) num_octets = 4 octets = [str(random.randint(0, 255)) for n in range(num_octets)] return '.'.join(octets)
Currently only IPv4 fields.
def template(cls, address=None, target=None, commandtuple=None, userdata=None, cmd2=-1, flags=None): msgraw = bytearray([0x02, cls._code]) msgraw.extend(bytes(cls._receivedSize)) msg = ExtendedReceive.from_raw_message(msgraw) if commandtuple: cmd1 = commandtu...
Create message template for callbacks.
def rightStatus(self, sheet): 'Compose right side of status bar.' if sheet.currentThreads: gerund = (' '+sheet.progresses[0].gerund) if sheet.progresses else '' status = '%9d %2d%%%s' % (len(sheet), sheet.progressPct, gerund) else: status = '%9d %s' % (len(sh...
Compose right side of status bar.
def queue_manager_stats(self, queue_manager, tags): for mname, pymqi_value in iteritems(metrics.queue_manager_metrics()): try: m = queue_manager.inquire(pymqi_value) mname = '{}.queue_manager.{}'.format(self.METRIC_PREFIX, mname) self.gauge(mname, m, t...
Get stats from the queue manager
def remote_pdb_handler(signum, frame): try: from remote_pdb import RemotePdb rdb = RemotePdb(host="127.0.0.1", port=0) rdb.set_trace(frame=frame) except ImportError: log.warning( "remote_pdb unavailable. Please install remote_pdb to " "allow remote debugg...
Handler to drop us into a remote debugger upon receiving SIGUSR1
def equal_set(self, a, b): "See if a and b have the same elements" if len(a) != len(b): return 0 if a == b: return 1 return self.subset(a, b) and self.subset(b, a)
See if a and b have the same elements
def show_env(environment): if not environment: print("You need to supply an environment name") return parser = read_config() try: commands = parser.get(environment, "cmd").split("\n") except KeyError: print("Unknown environment type '%s'" % environment) return ...
Show the commands for a given environment.
def upgrade_api(request, client, version): min_ver, max_ver = api_versions._get_server_version_range(client) if min_ver <= api_versions.APIVersion(version) <= max_ver: client = _nova.novaclient(request, version) return client
Ugrade the nova API to the specified version if possible.
def limit(self, limit): clone = self._clone() if isinstance(limit, int): clone._limit = limit return clone
Limit number of records
def select_date(self, rows: List[Row], column: DateColumn) -> Date: dates: List[Date] = [] for row in rows: cell_value = row.values[column.name] if isinstance(cell_value, Date): dates.append(cell_value) return dates[0] if dates else Date(-1, -1, -1)
Select function takes a row as a list and a column name and returns the date in that column.
def interactive(self): while True: line = sys.stdin.readline().strip() if line == 'quit': sys.exit() elif line == 'validate': self.check_syntax() self.check_imports() self.check_install_json() sel...
Run in interactive mode.
def guess_unmatched(self, token): if token.lower() in ['apt', 'apartment']: return False if token.strip() == '-': return True if len(token) <= 2: return False if self.street_suffix is None and self.street is None and self.street_prefix is None and self...
When we find something that doesn't match, we can make an educated guess and log it as such.
def close(self): if self.pyb and self.pyb.serial: self.pyb.serial.close() self.pyb = None
Closes the serial port.
def save_model(self, request, obj, form, change): like_metrics = self.model.objects.filter(name=obj.name) updates = {} for key in form.changed_data: updates[key] = form.cleaned_data[key] like_metrics.update(**updates)
Updates all metrics with the same name
def namedb_open( path ): con = sqlite3.connect( path, isolation_level=None, timeout=2**30 ) db_query_execute(con, 'pragma mmap_size=536870912', ()) con.row_factory = namedb_row_factory version = namedb_get_version(con) if not semver_equal(version, VERSION): raise Exception('Database has vers...
Open a connection to our database
def load_module(name, filename): if sys.version_info < (3, 5): import imp import warnings with warnings.catch_warnings(): warnings.simplefilter("ignore", RuntimeWarning) return imp.load_source(name, filename) else: from importlib.machinery import SourceFil...
Load a module into name given its filename
def _call_api_single_related_resource(self, resource, full_resource_url, method_name, **kwargs): url = full_resource_url params = { 'headers': self.get_http_headers( resource.Meta.name, method_name, **kwargs), 'url': url ...
For HypermediaResource - make an API call to a known URL
def xmlrpc_save2file(self, filename): savefile = open(filename,'wb') try: pickle.dump({'scheduled':self.scheduled_tasks, 'reschedule':self.reschedule},savefile) except pickle.PicklingError: return -1 savefile.close() return 1
Save results and own state into file.
def deep_unicode(s, encodings=None): if encodings is None: encodings = ['utf-8', 'latin-1'] if isinstance(s, (list, tuple)): return [deep_unicode(i) for i in s] if isinstance(s, dict): return dict([ (deep_unicode(key), deep_unicode(s[key])) for key in s ...
decode "DEEP" S using the codec registered for encoding.
def _terminate_instance(self): try: self.resource.resource_groups.delete(self.running_instance_id) except Exception as error: raise AzureCloudException( 'Unable to terminate resource group: {0}.'.format(error) )
Terminate the resource group and instance.
def _check_1d_array_object(parameter, name_param): if isinstance(parameter, (np.ndarray, list, tuple, np.matrix)): parameter = np.array(parameter) if parameter.ndim != 1: raise TypeError("{} should be a 1d array type object".format(name_param)) else: raise TypeError("{} shoul...
Checks whether given parameter is a 1d array like object, and returns a numpy array object
def _is_multiframe_4d(dicom_input): if not common.is_multiframe_dicom(dicom_input): return False header = dicom_input[0] number_of_stack_slices = common.get_ss_value(header[Tag(0x2001, 0x105f)][0][Tag(0x2001, 0x102d)]) number_of_stacks = int(int(header.NumberOfFrames) / number_of_stack_slices) ...
Use this function to detect if a dicom series is a philips multiframe 4D dataset
def _process_regex(cls, regex, rflags, state): if isinstance(regex, Future): regex = regex.get() return re.compile(regex, rflags).match
Preprocess the regular expression component of a token definition.
def getMaskArray(self, signature): if signature in self.masklist: mask = self.masklist[signature] else: mask = None return mask
Returns the appropriate StaticMask array for the image.
def __json_strnum_to_bignum(json_object): for key in ('id', 'week', 'in_reply_to_id', 'in_reply_to_account_id', 'logins', 'registrations', 'statuses'): if (key in json_object and isinstance(json_object[key], six.text_type)): try: json_object[key] = int(json_object...
Converts json string numerals to native python bignums.
def SignFiles(self, filenames): file_list = " ".join(filenames) subprocess.check_call("%s %s" % (self._signing_cmdline, file_list)) if self._verification_cmdline: subprocess.check_call("%s %s" % (self._verification_cmdline, file_list))
Signs multiple files at once.
def _prependstore(self, store): if not store.bitlength: return store = offsetcopy(store, (self.offset - store.bitlength) % 8) assert (store.offset + store.bitlength) % 8 == self.offset % 8 bit_offset = self.offset % 8 if bit_offset: store.setbyte(-1, (stor...
Join another store on to the start of this one.
def components(self) -> List['DAGCircuit']: comps = nx.weakly_connected_component_subgraphs(self.graph) return [DAGCircuit(comp) for comp in comps]
Split DAGCircuit into independent components
def _delete_handler(self, handler_class): to_remove = self._get_handler(handler_class) if not to_remove: logging.warning('Error we should have an element to remove') else: self.handlers.remove(to_remove) self.logger.removeHandler(to_remove)
Delete a specific handler from our logger.
def region_path(cls, project, region): return google.api_core.path_template.expand( "projects/{project}/regions/{region}", project=project, region=region )
Return a fully-qualified region string.
def emulate_rel(self, key_code, value, timeval): return self.create_event_object( "Relative", key_code, value, timeval)
Emulate the relative changes of the mouse cursor.
def shared_options(rq): "Default class options to pass to the CLI commands." return { 'url': rq.redis_url, 'config': None, 'worker_class': rq.worker_class, 'job_class': rq.job_class, 'queue_class': rq.queue_class, 'connection_class': rq.connection_class, }
Default class options to pass to the CLI commands.
def save(self): check_bind(self) creating = self.id is None if creating and not self.__class__._has_schema_method("create"): raise MethodNotSupported("{} do not support creating.".format(self.__class__.__name__)) if not creating and not self.__class__._has_schema_method("upda...
Either create or persist changes on this object back to the One Codex server.
def stop(self) -> None: end_time = datetime.datetime.now() elapsed_time = end_time - self.start_time elapsed_seconds = elapsed_time.seconds hours, remainder = divmod(int(elapsed_seconds), 3600) minutes, seconds = divmod(remainder, 60) as_str = "%sh %sm %ss %dms" % ( ...
Stop the timer and emit a nice log
def find_best_server(filename): servers = [si for si in notebookapp.list_running_servers() if filename.startswith(si['notebook_dir'])] try: return max(servers, key=lambda si: len(si['notebook_dir'])) except ValueError: return None
Find the best server to open a notebook with.
def refresh(self): self.trace(list(self._fnames.keys()), _refresh=True)
Re-traces modules modified since the time they were traced.
def hello_rivescript(): from_number = request.values.get("From", "unknown") message = request.values.get("Body") reply = "(Internal error)" if message: reply = bot.reply(from_number, message) resp = twilio.twiml.Response() resp.message(reply) return str(resp)
Receive an inbound SMS and send a reply from RiveScript.
def transform(self, attrs): self.collect(attrs) self.add_missing_implementations() self.fill_attrs(attrs)
Perform all actions on a given attribute dict.
def generate_static_site(self, output_root=None, extra_context=None): self.app.config['BUILD_PATH'] = output_root self.call_hook("generate", self, output_root, extra_context) if output_root is not None: self.app.config['FREEZER_DESTINATION'] = os.path.realpath(output_root) se...
Bake out static site
def find_module(self, fullname, path=None): if not fullname.startswith(self._module_name + '.'): return None submodule = fullname[len(self._module_name) + 1:] parts = submodule.split('.') if self._teams: team = parts.pop(0) else: team = None ...
Looks up the table based on the module path.
def _type_description(self): iexec = self._element.target if iexec is not None: result = "method() | " + iexec.summary else: result = "Type Method: points to executable in module." return result
Gets the completion description for a TypeExecutable.
def _proportions(self): return self._slice.proportions( axis=self._axis, include_mr_cat=self._include_mr_cat )
ndarray representing slice proportions along correct axis.
def mark(self): def pos(text, index): return ParseError.loc_info(text, index) @Parser def mark_parser(text, index): res = self(text, index) if res.status: return Value.success(res.index, (pos(text, index), res.value, pos(text, res.index))) ...
Mark the line and column information of the result of this parser.
def _interpret_oserror(exc, cwd, cmd): if len(cmd) == 0: raise dbt.exceptions.CommandError(cwd, cmd) if os.name == 'nt': _handle_windows_error(exc, cwd, cmd) else: _handle_posix_error(exc, cwd, cmd) raise dbt.exceptions.InternalException( 'Unhandled exception in _interpre...
Interpret an OSError exc and raise the appropriate dbt exception.
def res(arg): def _res(ctx): _arg = arg(ctx) if callable(arg) else arg return I(arg) return _res
Convert the argument into an IRI ref
def getDayStart(self, dateTime): return ensure_localtime(dateTime).replace(hour=0,minute=0,second=0,microsecond=0)
Ensure local time and get the beginning of the day
def _validate_other_keys(optional, types, missing, validated, data, to_validate): errors = [] for key in to_validate: value = data[key] if key in optional: errors.extend( _validate_optional_key( key, missing, value, validat...
Validate the rest of the keys present in the data.
def prepend_schema(self, name): return '.'.join([self.schema, name]) if self.schema else name
Prepend schema name to 'name' when a schema is specified
def removetmp(): for path in _tmp_paths: if os.path.exists(path): try: os.remove(path) except PermissionError: pass
Remove the temporary files created by gettemp
def create(cls, selection, config, **kwargs): if selection['target'] is not None: return cls.create_from_source(selection['target'], config, **kwargs) else: target_skydir = wcs_utils.get_target_skydir(selection) return cls.cre...
Create an ROIModel instance.
def parse(self, rec): final_studies = [] for study in rec.studies: source_data = self._parse_study(study.metadata["Study File Name"], ["Source Name", "Sample Name", "Comment[ENA_SAMPLE]"]) if source_data: study.nodes = s...
Retrieve row data from files associated with the ISATabRecord.
def translate_v3(vec, amount): return Vec3(vec.x+amount, vec.y+amount, vec.z+amount)
Return a new Vec3 that is translated version of vec.
def close(self): import sys, os for store in self.stores: if hasattr(store, 'save'): store.save(reimport=False) path, filename = os.path.split(store._filename) modname = filename[:-3] if modname in sys.modules: del sys.modul...
Commit changes and close the database.
def update(self, value): "Updates the progress bar to a new value." if value <= 0.1: value = 0 assert 0 <= value <= self.maxval self.currval = value if not self._need_update() or self.finished: return if not self.start_time: self.start_...
Updates the progress bar to a new value.
def raises(self) -> T.List[DocstringRaises]: return [ DocstringRaises.from_meta(meta) for meta in self.meta if meta.args[0] in {"raises", "raise", "except", "exception"} ]
Return exceptions indicated in docstring.
def add_f95_to_env(env): try: F95Suffixes = env['F95FILESUFFIXES'] except KeyError: F95Suffixes = ['.f95'] try: F95PPSuffixes = env['F95PPFILESUFFIXES'] except KeyError: F95PPSuffixes = [] DialectAddToEnv(env, "F95", F95Suffixes, F95PPSuffixes, sup...
Add Builders and construction variables for f95 to an Environment.
def _IncrementNestLevel(): if getattr(_import_local, 'nest_level', None) is None: _import_local.nest_level = 0 if _import_local.nest_level == 0: _import_local.names = set() _import_local.nest_level += 1
Increments the per thread nest level of imports.
async def popHiveKey(self, path): perm = ('hive:pop',) + path self.user.allowed(perm) return await self.cell.hive.pop(path)
Remove and return the value of a key in the cell default hive
def _load(self, scale=0.001): with open_workbook(self.path) as wb_: for sheet in wb_.sheets(): if sheet.name in ['Plot of AllBands', ]: continue ch_name = OLI_BAND_NAMES.get(sheet.name.strip()) if ch_name != self.bandname: ...
Load the Landsat OLI relative spectral responses
def to_serializable_dict(self, attrs_to_serialize=None, rels_to_expand=None, rels_to_serialize=None, key_modifications=None): return self.todict( attrs_to_serialize=attrs_to_serialize, rels_to_expand=r...
An alias for `todict`
def my_version(): if os.path.exists(resource_filename(__name__, 'version')): return resource_string(__name__, 'version') return open(os.path.join(os.path.dirname(__file__), "..", "version")).read()
Return the version, checking both packaged and development locations
def _checkJobGraphAcylicDFS(self, stack, visited, extraEdges): if self not in visited: visited.add(self) stack.append(self) for successor in self._children + self._followOns + extraEdges[self]: successor._checkJobGraphAcylicDFS(stack, visited, extraEdges) ...
DFS traversal to detect cycles in augmented job graph.
def safe_chmod(path, mode): if stat.S_IMODE(os.stat(path).st_mode) != mode: os.chmod(path, mode)
Set the permissions mode on path, but only if it differs from the current mode.
def io_check(*args, func=None): func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, io.IOBase): name = type(var).__name__ raise IOObjError( f'Function {func} expected file-like object, {name} got instead.')
Check if arguments are file-like object.
def filter_oauth_params(params): is_oauth = lambda kv: kv[0].startswith("oauth_") if isinstance(params, dict): return list(filter(is_oauth, list(params.items()))) else: return list(filter(is_oauth, params))
Removes all non oauth parameters from a dict or a list of params.
def _fire_task(self, exclude=None): connection = mail.get_connection(fail_silently=True) connection.open() for m in self._mails(self._users_watching(exclude=exclude)): connection.send_messages([m])
Build and send the emails as a celery task.
def _gauss_funct(p, fjac=None, x=None, y=None, err=None, weights=None): if p[2] != 0.0: Z = (x - p[1]) / p[2] model = p[0] * np.e ** (-Z ** 2 / 2.0) else: model = np.zeros(np.size(x)) status = 0 if weights is not None: if err is not None: prin...
Defines the gaussian function to be used as the model.
def samples(self): names = self.series.dimensions for n, offset in enumerate(self.series.offsets): dt = datetime.timedelta(microseconds=offset * 1000) d = {"ts": self.ts + dt} for name in names: d[name] = getattr(self.series, name)[n] yield...
Yield samples as dictionaries, keyed by dimensions.
def verify_verify(self, id, token): return Verify().load(self.request('verify/' + str(id), params={'token': token}))
Verify the token of a specific verification.
def _stringify_column(self, column_index): table_column = TableTranspose(self.table)[column_index] prior_cell = None for row_index in range(self.start[0], self.end[0]): cell, changed = self._check_interpret_cell(table_column[row_index], prior_cell, row_index, column_index) ...
Same as _stringify_row but for columns.
def generate_hash(self, length=30): import random, string chars = string.ascii_letters + string.digits ran = random.SystemRandom().choice hash = ''.join(ran(chars) for i in range(length)) return hash
Generate random string of given length
def conrad(self): repo = self.def_repos_dict["conrad"] log = self.log_path + "conrad/" lib = self.lib_path + "conrad_repo/" repo_name = log[:-1].split("/")[-1] lib_file = "PACKAGES.TXT" md5_file = "CHECKSUMS.md5" log_file = "ChangeLog.txt" if not os.path.e...
Creating slackers local library
def init_tables(self): for model in self.models: _log.debug('Checking for table {0}'.format(model)) assert not model.__table__.exists(self.session.bind) _log.debug('Creating {0}'.format(self.models)) self.migration_model.metadata.create_all( self.session.bind,...
Create all tables relative to this package
def checkpat(self, pattern): if pattern is None: return try: re.match(pattern, "") except re.error: print3("\nBad user-defined singular pattern:\n\t%s\n" % pattern) raise BadUserDefinedPatternError
check for errors in a regex pattern
def flush(self, parser): if self._verbose: show = self._verbose if isinstance(self._verbose, str) else None self.show(show) parser.parse(self._filter(self.clear(), parser))
Flush all current commands to the GLIR interpreter.
def connect_to_apple_tv(details, loop, protocol=None, session=None): service = _get_service_used_to_connect(details, protocol) if session is None: session = ClientSession(loop=loop) airplay = _setup_airplay(loop, session, details) if service.protocol == PROTOCOL_DMAP: return DmapAppleTV(...
Connect and logins to an Apple TV.
def updateMesh(self, polydata): self.poly = polydata self.mapper.SetInputData(polydata) self.mapper.Modified() return self
Overwrite the polygonal mesh of the actor with a new one.
def view_rect(self) -> QRectF: top_left = self.mapToScene(0, 0) bottom_right = self.mapToScene(self.viewport().width() - 1, self.viewport().height() - 1) return QRectF(top_left, bottom_right)
Return the boundaries of the view in scene coordinates
def _is_under_root(self, full_path): if (path.abspath(full_path) + path.sep)\ .startswith(path.abspath(self.root) + path.sep): return True else: return False
Guard against arbitrary file retrieval.
def render(self): data = self.prepare_data() if self.field.readable: return self.env.template.render(self.template, **data) return ''
Renders widget to template
def from_response(cls, response): hash_key = None range_key = None if 'KeySchema' in response: attrs = dict(((d['AttributeName'], DynamoKey(d['AttributeName'], d['AttributeType'])) for d in response['AttributeDefinitions'])) ...
Create a Table from returned Dynamo data
def estimate_map(interface, state, label, inp): out = interface.output(0) centers = {} for row in inp: row = row.strip().split(state["delimiter"]) if len(row) > 1: x = [(0 if row[i] in state["missing_vals"] else float(row[i])) for i in state["X_indices"]] cluster = mi...
Find the cluster `i` that is closest to the datapoint `e`.
def setup_client_rpc(self): self.clnt = rpc.DfaRpcClient(self._url, constants.DFA_SERVER_QUEUE, exchange=constants.DFA_EXCHANGE)
Setup RPC client for dfa agent.
async def dispatch(request): if session: message = '' data = await request.json() try: log.info("Dispatching {}".format(data)) _id = data.get('token') if not _id: message = '"token" field required for calibration requests' r...
Routes commands to subhandlers based on the command field in the body.
def _GetDebuggeeDescription(self): return '-'.join(self._debuggee_labels[label] for label in _DESCRIPTION_LABELS if label in self._debuggee_labels)
Formats debuggee description based on debuggee labels.
def drop(self, force=False): if not self.exists: logger.info("Schema named `{database}` does not exist. Doing nothing.".format(database=self.database)) elif (not config['safemode'] or force or user_choice("Proceed to delete entire schema `%s`?" % self.database, de...
Drop the associated schema if it exists