code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def reset(self): self.report("sending reset") try: self.p.stdin.write(bytes("T\n","utf-8")) self.p.stdin.flush() except IOError: self.report("could not send reset command")
Tell the external analyzer to reset itself
def _get_files_to_lint(self, external_directories): all_f = [] for external_dir in external_directories: all_f.extend(_all_files_matching_ext(external_dir, "py")) packages = self.distribution.packages or list() for package in packages: all_f.extend(_all_files_matching_ext(package, "py")) py_modules = self.distribution.py_modules or list() for filename in py_modules: all_f.append(os.path.realpath(filename + ".py")) all_f.append(os.path.join(os.getcwd(), "setup.py")) all_f = list(set([os.path.realpath(f) for f in all_f])) exclusions = [ "*.egg/*", "*.eggs/*" ] + self.exclusions return sorted([f for f in all_f if not _is_excluded(f, exclusions)])
Get files to lint.
def sample_string(self, individual=-1): base = str(self) extra = self.get_sample_info(individual=individual) extra = [':'.join([str(j) for j in i]) for i in zip(*extra.values())] return '\t'.join([base, '\t'.join(extra)])
Returns the VCF entry as it appears in the vcf file
def list_vrf(self): try: vrfs = VRF.list() except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__}) return json.dumps(vrfs, cls=NipapJSONEncoder)
List VRFs and return JSON encoded result.
def unique_everseen(iterable, filterfalse_=itertools.filterfalse): seen = set() seen_add = seen.add for element in filterfalse_(seen.__contains__, iterable): seen_add(element) yield element
Unique elements, preserving order.
def change_dir(): try: d = os.environ['HADOOPY_CHDIR'] sys.stderr.write('HADOOPY: Trying to chdir to [%s]\n' % d) except KeyError: pass else: try: os.chdir(d) except OSError: sys.stderr.write('HADOOPY: Failed to chdir to [%s]\n' % d)
Change the local directory if the HADOOPY_CHDIR environmental variable is provided
def move_where_clause_to_column(self, column='condition', key=None): if self.conditions: expr = " AND ".join(self.conditions) params = self.params self.params = [] self.conditions = [] else: expr = '1' params = [] self.add_column('({0}) AS {1}'.format(expr, column), key or column, params)
Move whole WHERE clause to a column named `column`.
def nest(thing): tfutil = util.get_module('tensorflow.python.util') if tfutil: return tfutil.nest.flatten(thing) else: return [thing]
Use tensorflows nest function if available, otherwise just wrap object in an array
def type(self) -> str: model = self.name if 'single' in model: return 'single' elif 'multi' in model: return 'multi' else: raise RuntimeError("Bad pipette model name: {}".format(model))
One of `'single'` or `'multi'`.
def variables(template): vars = set() for varlist in TEMPLATE.findall(template): if varlist[0] in OPERATOR: varlist = varlist[1:] varspecs = varlist.split(',') for var in varspecs: var = var.split(':')[0] if var.endswith('*'): var = var[:-1] vars.add(var) return vars
Returns the set of keywords in a uri template
def close(self): if self.closed: return for func,arglist in self.callbacks: apply(func, arglist) self.closed = True
Invoke all the callbacks, and close off the SOAP message.
def do_stacktrace(self) -> None: frame = sys._current_frames()[self._event_loop_thread_id] traceback.print_stack(frame, file=self._sout)
Print a stack trace from the event loop thread
def validate(self): super().validate() message_dsm = 'Matrix at [%s:%s] is not an instance of '\ 'DesignStructureMatrix or MultipleDomainMatrix.' message_ddm = 'Matrix at [%s:%s] is not an instance of '\ 'DomainMappingMatrix or MultipleDomainMatrix.' messages = [] for i, row in enumerate(self.data): for j, cell in enumerate(row): if i == j: if not isinstance(cell, ( DesignStructureMatrix, MultipleDomainMatrix)): messages.append(message_dsm % (i, j)) elif not isinstance(cell, ( DomainMappingMatrix, MultipleDomainMatrix)): messages.append(message_ddm % (i, j)) if messages: raise self.error('\n'.join(messages))
Base validation + each cell is instance of DSM or MDM.
def _getEventsByWeek(self, request, year, month): home = request.site.root_page return getAllEventsByWeek(request, year, month, home=home)
Return the events in this site for the given month grouped by week.
def update_channels(self): self.interlock_channel = -1 self.override_channel = -1 self.zero_I_channel = -1 self.no_vtol_channel = -1 self.rsc_out_channel = 9 self.fwd_thr_channel = 10 for ch in range(1,16): option = self.get_mav_param("RC%u_OPTION" % ch, 0) if option == 32: self.interlock_channel = ch; elif option == 63: self.override_channel = ch; elif option == 64: self.zero_I_channel = ch; elif option == 65: self.override_channel = ch; elif option == 66: self.no_vtol_channel = ch; function = self.get_mav_param("SERVO%u_FUNCTION" % ch, 0) if function == 32: self.rsc_out_channel = ch if function == 70: self.fwd_thr_channel = ch
update which channels provide input
def read_plain_int96(file_obj, count): items = struct.unpack(b"<" + b"qi" * count, file_obj.read(12 * count)) return [q << 32 | i for (q, i) in zip(items[0::2], items[1::2])]
Read `count` 96-bit ints using the plain encoding.
def __generate_location(self): screen_width = world.get_backbuffer_size().X self.movement_speed = random.randrange(10, 25) self.coords = R.Vector2(screen_width + self.image.get_width(), random.randrange(0, 100))
Reset the location of the cloud once it has left the viewable area of the screen.
def add_new_data_port(self): try: new_data_port_ids = gui_helper_state_machine.add_data_port_to_selected_states('OUTPUT', int, [self.model]) if new_data_port_ids: self.select_entry(new_data_port_ids[self.model.state]) except ValueError: pass
Add a new port with default values and select it
def _show_mpl_backend_errors(self): if not self.external_kernel: self.shellwidget.silent_execute( "get_ipython().kernel._show_mpl_backend_errors()") self.shellwidget.sig_prompt_ready.disconnect( self._show_mpl_backend_errors)
Show possible errors when setting the selected Matplotlib backend.
def mask_image_data(data): if data.bands.size in (2, 4): if not np.issubdtype(data.dtype, np.integer): raise ValueError("Only integer datatypes can be used as a mask.") mask = data.data[-1, :, :] == np.iinfo(data.dtype).min data = data.astype(np.float64) masked_data = da.stack([da.where(mask, np.nan, data.data[i, :, :]) for i in range(data.shape[0])]) data.data = masked_data data = data.sel(bands=BANDS[data.bands.size - 1]) return data
Mask image data if alpha channel is present.
def start_ray_processes(self): logger.info( "Process STDOUT and STDERR is being redirected to {}.".format( self._logs_dir)) self.start_plasma_store() self.start_raylet() if PY3: self.start_reporter() if self._ray_params.include_log_monitor: self.start_log_monitor()
Start all of the processes on the node.
def check_complete(self): logger.debug('Running check_complete for task {0}'.format(self.name)) if self.remote_not_complete() or self.local_not_complete(): self._start_check_timer() return return_code = self.completed_task() if self.terminate_sent: self.stderr += '\nDAGOBAH SENT SIGTERM TO THIS PROCESS\n' if self.kill_sent: self.stderr += '\nDAGOBAH SENT SIGKILL TO THIS PROCESS\n' if self.remote_failure: return_code = -1 self.stderr += '\nAn error occurred with the remote machine.\n' self.stdout_file = None self.stderr_file = None self._task_complete(success=True if return_code == 0 else False, return_code=return_code, stdout=self.stdout, stderr=self.stderr, start_time=self.started_at, complete_time=datetime.utcnow())
Runs completion flow for this task if it's finished.
def random_init_map(interface, state, label, inp): import random out = interface.output(0) centers = {} for row in inp: row = row.strip().split(state["delimiter"]) if len(row) > 1: x = [(0 if row[i] in state["missing_vals"] else float(row[i])) for i in state["X_indices"]] cluster = random.randint(0, state['k'] - 1) vertex = state['create'](x, 1.0) centers[cluster] = vertex if cluster not in centers else state["update"](centers[cluster], vertex) for cluster, values in centers.iteritems(): out.add(cluster, values)
Assign datapoint `e` randomly to one of the `k` clusters.
def _get_method(self, request): if self._is_doc_request(request): return self.get_documentation else: return super(DocumentedResource, self)._get_method(request)
Override to check if this is a documentation request.
def mpub(self, topic, *messages): with self.random_connection() as client: client.mpub(topic, *messages) return self.wait_response()
Publish messages to a topic
def __process_gprest_response(self, r=None, restType='GET'): if r is None: logging.info('No response for REST '+restType+' request') return None httpStatus = r.status_code logging.info('HTTP status code: %s', httpStatus) if httpStatus == requests.codes.ok or \ httpStatus == requests.codes.created: jsonR = r.json() if jsonR: statusStr = 'REST response status: %s' % \ jsonR.get(self.__RESPONSE_STATUS_KEY) msgStr = 'REST response message: %s' % \ jsonR.get(self.__RESPONSE_MESSAGE_KEY) logging.info(statusStr) logging.info(msgStr) return jsonR else: logging.warning('Unable to parse JSON body.') logging.warning(r.text) return None logging.warning('Invalid HTTP status code.') logging.warning(r.text) return r.json()
Returns the processed response for rest calls
def export_data(filename_or_fobj, data, mode="w"): if filename_or_fobj is None: return data _, fobj = get_filename_and_fobj(filename_or_fobj, mode=mode) source = Source.from_file(filename_or_fobj, mode=mode, plugin_name=None) source.fobj.write(data) source.fobj.flush() return source.fobj
Return the object ready to be exported or only data if filename_or_fobj is not passed.
def run(self): self.OnStartup() try: while True: message = self._in_queue.get() if message is None: break try: self.HandleMessage(message) except Exception as e: logging.warning("%s", e) self.SendReply( rdf_flows.GrrStatus( status=rdf_flows.GrrStatus.ReturnedStatus.GENERIC_ERROR, error_message=utils.SmartUnicode(e)), request_id=message.request_id, response_id=1, session_id=message.session_id, task_id=message.task_id, message_type=rdf_flows.GrrMessage.Type.STATUS) if flags.FLAGS.pdb_post_mortem: pdb.post_mortem() except Exception as e: logging.error("Exception outside of the processing loop: %r", e) finally: logging.fatal("The client has broken out of its processing loop.") os.kill(os.getpid(), signal.SIGKILL)
Main thread for processing messages.
def print_menuconfig(kconf): print("\n======== {} ========\n".format(kconf.mainmenu_text)) print_menuconfig_nodes(kconf.top_node.list, 0) print("")
Prints all menu entries for the configuration.
def extractFromHTML(html, blur=5): try: html = unicode(html, errors='ignore') except TypeError: pass assert isinstance(html, unicode) _file = StringIO() f = formatter.AbstractFormatter(formatter.DumbWriter(_file)) p = TextExtractor() p.pathBlur = blur p.feed(html) p.close() text = p.get_plaintext() text = re.sub("\s[\(\),;\.\?\!](?=\s)", " ", text).strip() text = re.sub("[\n\s]+", " ", text).strip() text = re.sub("\-{2,}", "", text).strip() text = re.sub("\.{2,}", "", text).strip() return text
Extracts text from HTML content.
def resolve(self, key): registration = self._registrations.get(key) if registration is None: raise KeyError("Unknown key: '{0}'".format(key)) return registration.resolve(self, key)
Resolves the requested key to an object instance, raising a KeyError if the key is missing
def ajax_count_plus(self, slug): output = { 'status': 1 if MWiki.view_count_plus(slug) else 0, } return json.dump(output, self)
post count plus one via ajax.
def enable_napp(cls, mgr): try: if not mgr.is_enabled(): LOG.info(' Enabling...') mgr.enable() LOG.info(' Enabled.') except (FileNotFoundError, PermissionError) as exception: LOG.error(' %s', exception)
Install one NApp using NAppManager object.
def _metaconfigure(self, argv=None): metaconfig = self._get_metaconfig_class() if not metaconfig: return if self.__class__ is metaconfig: return override = { 'conflict_handler': 'resolve', 'add_help': False, 'prog': self._parser_kwargs.get('prog'), } self._metaconf = metaconfig(**override) metaparser = self._metaconf.build_parser( options=self._metaconf._options, permissive=False, **override) self._parser_kwargs.setdefault('parents', []) self._parser_kwargs['parents'].append(metaparser) self._metaconf._values = self._metaconf.load_options( argv=argv) self._metaconf.provision(self)
Initialize metaconfig for provisioning self.
def filenames(self) -> Tuple[str, ...]: return tuple(sorted(set(itertools.chain( *(_.keys() for _ in self.folders.values())))))
A |tuple| of names of all handled |NetCDFFile| objects.
def AddClientLabels(self, client_id, owner, labels): if client_id not in self.metadatas: raise db.UnknownClientError(client_id) labelset = self.labels.setdefault(client_id, {}).setdefault(owner, set()) for l in labels: labelset.add(utils.SmartUnicode(l))
Attaches a user label to a client.
def full(self, external=False): return self.fs.url(self.filename, external=external) if self.filename else None
Get the full image URL in respect with ``max_size``
def header(*msg, level='h1', separator=" ", print_out=print): out_string = separator.join(str(x) for x in msg) if level == 'h0': box_len = 80 print_out('+' + '-' * (box_len + 2)) print_out("| %s" % out_string) print_out('+' + '-' * (box_len + 2)) elif level == 'h1': print_out("") print_out(out_string) print_out('-' * 60) elif level == 'h2': print_out('\t%s' % out_string) print_out('\t' + ('-' * 40)) else: print_out('\t\t%s' % out_string) print_out('\t\t' + ('-' * 20))
Print header block in text mode
def _get_log_format(self, request): user = getattr(request, 'user', None) if not user: return if not request.user.is_authenticated: return method = request.method.upper() if not (method in self.target_methods): return request_url = urlparse.unquote(request.path) for rule in self._ignored_urls: if rule.search(request_url): return return self.format
Return operation log format.
def which_api_version(self, api_call): if api_call.endswith('.php'): return 1 elif api_call.startswith('api/2.0/'): return 2 elif '/am/' in api_call: return 'am' elif '/was/' in api_call: return 'was' return False
Return QualysGuard API version for api_call specified.
def send_html_mail(subject, message, message_html, from_email, recipient_list, priority=None, fail_silently=False, auth_user=None, auth_password=None, headers={}): from django.utils.encoding import force_text from django.core.mail import EmailMultiAlternatives from mailer.models import make_message priority = get_priority(priority) subject = force_text(subject) message = force_text(message) msg = make_message(subject=subject, body=message, from_email=from_email, to=recipient_list, priority=priority) email = msg.email email = EmailMultiAlternatives( email.subject, email.body, email.from_email, email.to, headers=headers ) email.attach_alternative(message_html, "text/html") msg.email = email msg.save() return 1
Function to queue HTML e-mails
def create_directories(self, create_project_dir=True): return task.create_directories(self.datadir, self.sitedir, self.target if create_project_dir else None)
Call once for new projects to create the initial project directories.
def reward_battery(self): if not 'battery' in self.mode: return mode = self.mode['battery'] if mode and mode and self.__test_cond(mode): self.logger.debug('Battery out') self.player.stats['reward'] += mode['reward'] self.player.game_over = self.player.game_over or mode['terminal']
Add a battery level reward
def _bddnode(root, lo, hi): if lo is hi: node = lo else: key = (root, lo, hi) try: node = _NODES[key] except KeyError: node = _NODES[key] = BDDNode(*key) return node
Return a unique BDD node.
def equals(self, rest_object): if self._is_dirty: return False if rest_object is None: return False if not isinstance(rest_object, NURESTObject): raise TypeError('The object is not a NURESTObject %s' % rest_object) if self.rest_name != rest_object.rest_name: return False if self.id and rest_object.id: return self.id == rest_object.id if self.local_id and rest_object.local_id: return self.local_id == rest_object.local_id return False
Compare with another object
def add_to(self, parent, name=None, index=None): parent.add_child(self, name=name, index=index) return self
Add element to a parent.
def DeleteSubjects(self, subjects, sync=False): for subject in subjects: self.DeleteSubject(subject, sync=sync)
Delete multiple subjects at once.
def evaluate(grid): "Return the value for the player to move, assuming perfect play." if is_won(grid): return -1 succs = successors(grid) return -min(map(evaluate, succs)) if succs else 0
Return the value for the player to move, assuming perfect play.
def _inverse_i(self, y, i): lb = self._lb[self._index(i)] ub = self._ub[self._index(i)] al = self._al[self._index(i)] au = self._au[self._index(i)] if 1 < 3: if not lb <= y <= ub: raise ValueError('argument of inverse must be within the given bounds') if y < lb + al: return (lb - al) + 2 * (al * (y - lb))**0.5 elif y < ub - au: return y else: return (ub + au) - 2 * (au * (ub - y))**0.5
return inverse of y in component i
def __parse_fc_data(fc_data): fc = [] for day in fc_data: fcdata = { CONDITION: __cond_from_desc( __get_str( day, __WEATHERDESCRIPTION) ), TEMPERATURE: __get_float(day, __MAXTEMPERATURE), MIN_TEMP: __get_float(day, __MINTEMPERATURE), MAX_TEMP: __get_float(day, __MAXTEMPERATURE), SUN_CHANCE: __get_int(day, __SUNCHANCE), RAIN_CHANCE: __get_int(day, __RAINCHANCE), RAIN: __get_float(day, __MMRAINMAX), MIN_RAIN: __get_float(day, __MMRAINMIN), MAX_RAIN: __get_float(day, __MMRAINMAX), SNOW: 0, WINDFORCE: __get_int(day, __WIND), WINDDIRECTION: __get_str(day, __WINDDIRECTION), DATETIME: __to_localdatetime(__get_str(day, __DAY)), } fcdata[CONDITION][IMAGE] = day[__ICONURL] fc.append(fcdata) return fc
Parse the forecast data from the json section.
def stream_skypipe_output(endpoint, name=None): name = name or '' socket = ctx.socket(zmq.DEALER) socket.connect(endpoint) try: socket.send_multipart(sp_msg(SP_CMD_LISTEN, name)) while True: msg = socket.recv_multipart() try: data = parse_skypipe_data_stream(msg, name) if data: yield data except EOFError: raise StopIteration() finally: socket.send_multipart(sp_msg(SP_CMD_UNLISTEN, name)) socket.close()
Generator for reading skypipe data
def client(self, name=None): name = name or self.default if not name: return NullClient(self, None, None) params = self.backends_hash[name] ccls = self.backends_schemas.get(params.scheme, TCPClient) return (yield from ccls(self, params.hostname, params.port).connect())
Initialize a backend's client with given name or default.
def _check_for_boolean_pair_reduction(self, kwargs): if 'reduction_forcing_pairs' in self._meta_data: for key1, key2 in self._meta_data['reduction_forcing_pairs']: kwargs = self._reduce_boolean_pair(kwargs, key1, key2) return kwargs
Check if boolean pairs should be reduced in this resource.
def _findPoint(self, name, force_read=True): for point in self.points: if point.properties.name == name: if force_read: point.value return point raise ValueError("{} doesn't exist in controller".format(name))
Used by getter and setter functions
def stack_sparse_frame(frame): lengths = [s.sp_index.npoints for _, s in frame.items()] nobs = sum(lengths) minor_codes = np.repeat(np.arange(len(frame.columns)), lengths) inds_to_concat = [] vals_to_concat = [] for _, series in frame.items(): if not np.isnan(series.fill_value): raise TypeError('This routine assumes NaN fill value') int_index = series.sp_index.to_int_index() inds_to_concat.append(int_index.indices) vals_to_concat.append(series.sp_values) major_codes = np.concatenate(inds_to_concat) stacked_values = np.concatenate(vals_to_concat) index = MultiIndex(levels=[frame.index, frame.columns], codes=[major_codes, minor_codes], verify_integrity=False) lp = DataFrame(stacked_values.reshape((nobs, 1)), index=index, columns=['foo']) return lp.sort_index(level=0)
Only makes sense when fill_value is NaN
def find_water_flow(self, world, water_path): for x in range(world.width - 1): for y in range(world.height - 1): path = self.find_quick_path([x, y], world) if path: tx, ty = path flow_dir = [tx - x, ty - y] key = 0 for direction in DIR_NEIGHBORS_CENTER: if direction == flow_dir: water_path[y, x] = key key += 1
Find the flow direction for each cell in heightmap
def stat_smt_query(func: Callable): stat_store = SolverStatistics() def function_wrapper(*args, **kwargs): if not stat_store.enabled: return func(*args, **kwargs) stat_store.query_count += 1 begin = time() result = func(*args, **kwargs) end = time() stat_store.solver_time += end - begin return result return function_wrapper
Measures statistics for annotated smt query check function
def _handle_error(self, response): auth_msg = "The query could not be completed. Invalid auth token." status_code = response.status_code if 400 <= status_code < 500: if status_code == 400: raise auth_error(auth_msg) else: raise auth_error("The query could not be completed. " "There was a client-side error with your " "request.") elif 500 <= status_code < 600: raise auth_error("The query could not be completed. " "There was a server-side error with " "your request.") else: raise auth_error("The query could not be completed.")
Handles all responses which return an error status code
def load_site_config(name): return _load_config_json( os.path.join( CONFIG_PATH, CONFIG_SITES_PATH, name + CONFIG_EXT ) )
Load and return site configuration as a dict.
def protocols(self): if self._protocols is None: uri = "/loadbalancers/protocols" resp, body = self.method_get(uri) self._protocols = [proto["name"] for proto in body["protocols"]] return self._protocols
Returns a list of available load balancing protocols.
def ind_zero_freq(self): ind = np.searchsorted(self.frequencies, 0) if ind >= len(self.frequencies): raise ValueError("No positive frequencies found") return ind
Index of the first point for which the freqencies are equal or greater than zero.
def getIndexes(cls) : "returns a list of the indexes of a class" con = RabaConnection(cls._raba_namespace) idxs = [] for idx in con.getIndexes(rabaOnly = True) : if idx[2] == cls.__name__ : idxs.append(idx) else : for k in cls.columns : if RabaFields.isRabaListField(getattr(cls, k)) and idx[2] == con.makeRabaListTableName(cls.__name__, k) : idxs.append(idx) return idxs
returns a list of the indexes of a class
def list_styles(style_name): style = get_style_by_name(style_name) keys = list(style)[0][1] Styles = namedtuple("Style", keys) existing_styles = {} for ttype, ndef in style: s = Styles(**ndef) if s in existing_styles: existing_styles[s].append(ttype) else: existing_styles[s] = [ttype] for ndef, ttypes in existing_styles.items(): print(ndef) for ttype in sorted(ttypes): print("\t%s" % str(ttype).split("Token.",1)[1])
Just list all different styles entries
def load(self, data, many=None, partial=None): result = super(ResumptionTokenSchema, self).load( data, many=many, partial=partial ) result.data.update( result.data.get('resumptionToken', {}).get('kwargs', {}) ) return result
Deserialize a data structure to an object.
def validate(self, value): if not self.blank and value == '': self.error_message = 'Can not be empty. Please provide a value.' return False self._choice = value return True
The most basic validation
def _from_dict(cls, _dict): args = {} if 'global' in _dict: args['global_'] = MessageContextGlobal._from_dict( _dict.get('global')) if 'skills' in _dict: args['skills'] = MessageContextSkills._from_dict( _dict.get('skills')) return cls(**args)
Initialize a MessageContext object from a json dictionary.
def setImportDataInterface(self, values): exims = self.getImportDataInterfacesList() new_values = [value for value in values if value in exims] if len(new_values) < len(values): logger.warn("Some Interfaces weren't added...") self.Schema().getField('ImportDataInterface').set(self, new_values)
Return the current list of import data interfaces
def handle_var(value, context): if isinstance(value, FilterExpression) or isinstance(value, Variable): return value.resolve(context) stringval = QUOTED_STRING.search(value) if stringval: return stringval.group("noquotes") try: return Variable(value).resolve(context) except VariableDoesNotExist: return value
Handle template tag variable
def depth(self): if self.indentation is None: yield else: previous = self.previous_indent self.previous_indent = self.indent self.indent += self.indentation yield self.indent = self.previous_indent self.previous_indent = previous
Increase the level of indentation by one.
def pprint_label(self): "The pretty-printed label string for the Dimension" unit = ('' if self.unit is None else type(self.unit)(self.unit_format).format(unit=self.unit)) return bytes_to_unicode(self.label) + bytes_to_unicode(unit)
The pretty-printed label string for the Dimension
def list(self, request): query = get_query_params(request).get("search", "") results = [] base = self.model.get_base_class() doctypes = indexable_registry.families[base] for doctype, klass in doctypes.items(): name = klass._meta.verbose_name.title() if query.lower() in name.lower(): results.append(dict( name=name, doctype=doctype )) results.sort(key=lambda x: x["name"]) return Response(dict(results=results))
Search the doctypes for this model.
def unstash(self): if not self.stashed: LOGGER.error('no stash') else: LOGGER.info('popping stash') self.repo.git.stash('pop') self.stashed = False
Pops the last stash if EPAB made a stash before
def green(cls): "Make the text foreground color green." wAttributes = cls._get_text_attributes() wAttributes &= ~win32.FOREGROUND_MASK wAttributes |= win32.FOREGROUND_GREEN cls._set_text_attributes(wAttributes)
Make the text foreground color green.
def process_view(self, request, view_func, view_args, view_kwargs): profiler = getattr(request, 'profiler', None) if profiler: original_get = request.GET request.GET = original_get.copy() request.GET.pop('profile', None) request.GET.pop('show_queries', None) request.GET.pop('show_stats', None) try: return profiler.runcall( view_func, request, *view_args, **view_kwargs ) finally: request.GET = original_get
Run the profiler on _view_func_.
def cli(env, volume_id, sortby, columns): file_manager = SoftLayer.FileStorageManager(env.client) snapshots = file_manager.get_file_volume_snapshot_list( volume_id, mask=columns.mask() ) table = formatting.Table(columns.columns) table.sortby = sortby for snapshot in snapshots: table.add_row([value or formatting.blank() for value in columns.row(snapshot)]) env.fout(table)
List file storage snapshots.
def gpg_command(args, env=None): if env is None: env = os.environ cmd = get_gnupg_binary(neopg_binary=env.get('NEOPG_BINARY')) return [cmd] + args
Prepare common GPG command line arguments.
def touch_if_touching(self): if self._touching_parent(): self.get_parent().touch() if self.get_parent().touches(self._relation_name): self.touch()
Touch if the parent model is being touched.
def _make_policies(self): self.policies = [AutoScalePolicy(self.manager, dct, self) for dct in self.scalingPolicies]
Convert the 'scalingPolicies' dictionary into AutoScalePolicy objects.
def get(self, name: str, default: Any = None) -> Any: return super().get(name, [default])[0]
Return the first value, either the default or actual
def worker(workers): logging.info( "The 'superset worker' command is deprecated. Please use the 'celery " "worker' command instead.") if workers: celery_app.conf.update(CELERYD_CONCURRENCY=workers) elif config.get('SUPERSET_CELERY_WORKERS'): celery_app.conf.update( CELERYD_CONCURRENCY=config.get('SUPERSET_CELERY_WORKERS')) worker = celery_app.Worker(optimization='fair') worker.start()
Starts a Superset worker for async SQL query execution.
def _call(self, x, out=None): if out is None: out = self.range.zero() else: out.set_zero() out[self.index] = x return out
Extend ``x`` from the subspace.
def filter(cls, filters, iterable): if isinstance(filters, Filter): filters = [filters] for filter in filters: iterable = filter.generator(iterable) return iterable
Returns the elements in `iterable` that pass given `filters`
def collect_gaps(blast, use_subject=False): key = lambda x: x.sstart if use_subject else x.qstart blast.sort(key=key) for a, b in zip(blast, blast[1:]): if use_subject: if a.sstop < b.sstart: yield b.sstart - a.sstop else: if a.qstop < b.qstart: yield b.qstart - a.qstop
Collect the gaps between adjacent HSPs in the BLAST file.
def _manhattan_distance(vec_a, vec_b): if len(vec_a) != len(vec_b): raise ValueError('len(vec_a) must equal len(vec_b)') return sum(map(lambda a, b: abs(a - b), vec_a, vec_b))
Return manhattan distance between two lists of numbers.
def post_build_time_coverage(self): from ambry.util.datestimes import expand_to_years years = set() if self.metadata.about.time: for year in expand_to_years(self.metadata.about.time): years.add(year) if self.identity.btime: for year in expand_to_years(self.identity.btime): years.add(year) for p in self.partitions: years |= set(p.time_coverage)
Collect all of the time coverage for the bundle.
def update_license(license_id, **kwargs): updated_license = pnc_api.licenses.get_specific(id=license_id).content for key, value in iteritems(kwargs): if value: setattr(updated_license, key, value) response = utils.checked_api_call( pnc_api.licenses, 'update', id=int(license_id), body=updated_license) if response: return utils.format_json(response.content)
Replace the License with given ID with a new License
def mkdir(self, foldername): 'Create a new subfolder and return the new JFSFolder' url = posixpath.join(self.path, foldername) params = {'mkDir':'true'} r = self.jfs.post(url, params) self.sync() return r
Create a new subfolder and return the new JFSFolder
def skipgram_fasttext_batch(centers, contexts, num_tokens, subword_lookup, dtype, index_dtype): contexts = mx.nd.array(contexts[2], dtype=index_dtype) data, row, col = subword_lookup(centers) centers = mx.nd.array(centers, dtype=index_dtype) centers_csr = mx.nd.sparse.csr_matrix( (data, (row, col)), dtype=dtype, shape=(len(centers), num_tokens)) return centers_csr, contexts, centers
Create a batch for SG training objective with subwords.
def nested_dict_to_list(path, dic, exclusion=None): result = [] exclusion = ['__self'] if exclusion is None else exclusion for key, value in dic.items(): if not any([exclude in key for exclude in exclusion]): if isinstance(value, dict): aux = path + key + "/" result.extend(nested_dict_to_list(aux, value)) else: if path.endswith("/"): path = path[:-1] result.append([path, key, value]) return result
Transform nested dict to list
def done(self, result): self._geometry = self.geometry() QtWidgets.QDialog.done(self, result)
save the geometry before dialog is close to restore it later
def file_list(self, load): if 'env' in load: load.pop('env') ret = set() if 'saltenv' not in load: return [] if not isinstance(load['saltenv'], six.string_types): load['saltenv'] = six.text_type(load['saltenv']) for fsb in self.backends(load.pop('fsbackend', None)): fstr = '{0}.file_list'.format(fsb) if fstr in self.servers: ret.update(self.servers[fstr](load)) prefix = load.get('prefix', '').strip('/') if prefix != '': ret = [f for f in ret if f.startswith(prefix)] return sorted(ret)
Return a list of files from the dominant environment
def insert_blob(filename, hosts=None, table=None): conn = connect(hosts) container = conn.get_blob_container(table) with open(filename, 'rb') as f: digest = container.put(f) return '{server}/_blobs/{table}/{digest}'.format( server=conn.client.active_servers[0], table=table, digest=digest )
Upload a file into a blob table
def prt_tsv(prt, data_nts, **kws): prt_tsv_hdr(prt, data_nts, **kws) return prt_tsv_dat(prt, data_nts, **kws)
Print tab-separated table headers and data
def batch_norm(inputs, training, data_format): outputs = tf.layers.batch_normalization( inputs=inputs, axis=1 if data_format == 'channels_first' else 3, momentum=_BATCH_NORM_DECAY, epsilon=_BATCH_NORM_EPSILON, center=True, scale=True, training=training, fused=True) resnet_log_helper.log_batch_norm( input_tensor=inputs, output_tensor=outputs, momentum=_BATCH_NORM_DECAY, epsilon=_BATCH_NORM_EPSILON, center=True, scale=True, training=training) return outputs
Performs a batch normalization using a standard set of parameters.
def _validate_wavelengths(self, wave): if wave is None: if self.waveset is None: raise exceptions.SynphotError( 'self.waveset is undefined; ' 'Provide wavelengths for sampling.') wavelengths = self.waveset else: w = self._process_wave_param(wave) utils.validate_wavelengths(w) wavelengths = w * self._internal_wave_unit return wavelengths
Validate wavelengths for sampling.
def isopen(self) -> bool: if self._file is None: return False return bool(self._file.id)
State of backing file.
def resolve(self, key, keylist): raise AmbiguousKeyError("Ambiguous key "+ repr(key) + ", could be any of " + str(sorted(keylist)))
Hook to resolve ambiguities in selected keys
def split_string(x: str, n: int) -> List[str]: return [x[i:i+n] for i in range(0, len(x), n)]
Split string into chunks of length n
def handleNotification(self, req): name = req["method"] params = req["params"] try: obj = getMethodByName(self.service, name) rslt = obj(*params) except: pass
handles a notification request by calling the appropriete method the service exposes
def toListString(value): if TypeConverters._can_convert_to_list(value): value = TypeConverters.toList(value) if all(map(lambda v: TypeConverters._can_convert_to_string(v), value)): return [TypeConverters.toString(v) for v in value] raise TypeError("Could not convert %s to list of strings" % value)
Convert a value to list of strings, if possible.