code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def request(self, method, url, params=None, **aio_kwargs): oparams = { 'oauth_consumer_key': self.consumer_key, 'oauth_nonce': sha1(str(RANDOM()).encode('ascii')).hexdigest(), 'oauth_signature_method': self.signature.name, 'oauth_timestamp': str(int(time.time())), 'oauth_version': self.version, } oparams.update(params or {}) if self.oauth_token: oparams['oauth_token'] = self.oauth_token url = self._get_url(url) if urlsplit(url).query: raise ValueError( 'Request parameters should be in the "params" parameter, ' 'not inlined in the URL') oparams['oauth_signature'] = self.signature.sign( self.consumer_secret, method, url, oauth_token_secret=self.oauth_token_secret, **oparams) self.logger.debug("%s %s", url, oparams) return self._request(method, url, params=oparams, **aio_kwargs)
Make a request to provider.
def class_from_string(name): module_name, class_name = name.rsplit('.', 1) __import__(module_name) module = sys.modules[module_name] return getattr(module, class_name)
Get a python class object from its name
def _collect_headers(self): res = [] for prop in self.get_sorted_columns(): main_infos = self._get_prop_infos(prop) if self._is_excluded(prop, main_infos): continue if isinstance(prop, RelationshipProperty): main_infos = self._collect_relationship(main_infos, prop, res) if not main_infos: print("Maybe there's missing some informations \ about a relationship") continue else: main_infos = self._merge_many_to_one_field_from_fkey( main_infos, prop, res ) if not main_infos: continue if isinstance(main_infos, (list, tuple)): res.extend(main_infos) else: res.append(main_infos) return res
Collect headers from the models attribute info col
def start_stream(self, stream_type, **stream_parameters): if stream_type: self.subscribe(stream_type, **stream_parameters) else: self.handle_error("You need to set a stream type")
Starts new stream for given type with given parameters
def save_state(self): set_setting('lastSourceDir', self.source_directory.text()) set_setting('lastOutputDir', self.output_directory.text()) set_setting( 'useDefaultOutputDir', self.scenario_directory_radio.isChecked())
Save current state of GUI to configuration file.
def xmlns(source): namespaces = {} events=("end", "start-ns", "end-ns") for (event, elem) in iterparse(source, events): if event == "start-ns": prefix, ns = elem namespaces[prefix] = ns elif event == "end": break if hasattr(source, "seek"): source.seek(0) return namespaces
Returns a map of prefix to namespace for the given XML file.
def new(cls, package): partname = package.next_partname("/word/header%d.xml") content_type = CT.WML_HEADER element = parse_xml(cls._default_header_xml()) return cls(partname, content_type, element, package)
Return newly created header part.
def _send_invitation(self, enrollment, event): self.log('Sending enrollment status mail to user') self._send_mail(self.config.invitation_subject, self.config.invitation_mail, enrollment, event)
Send an invitation mail to an open enrolment
def do_step(self, args): self._do_print_from_last_cmd = True self._interp.step_into() return True
Step INTO the next statement
def ping_entry(self, entry): entry_url = '%s%s' % (self.ressources.site_url, entry.get_absolute_url()) categories = '|'.join([c.title for c in entry.categories.all()]) try: reply = self.server.weblogUpdates.extendedPing( self.ressources.current_site.name, self.ressources.blog_url, entry_url, self.ressources.blog_feed, categories) except Exception: try: reply = self.server.weblogUpdates.ping( self.ressources.current_site.name, self.ressources.blog_url, entry_url, categories) except Exception: reply = {'message': '%s is an invalid directory.' % self.server_name, 'flerror': True} return reply
Ping an entry to a directory.
def clear_to_reset(self, config_vars): super(TileManagerState, self).clear_to_reset(config_vars) self.registered_tiles = self.registered_tiles[:1] self.safe_mode = False self.debug_mode = False
Clear to the state immediately after a reset.
def check_image_file_header(filename): with tf.gfile.Open(filename, 'rb') as f: magic = read32(f) read32(f) rows = read32(f) cols = read32(f) if magic != 2051: raise ValueError('Invalid magic number %d in MNIST file %s' % (magic, f.name)) if rows != 28 or cols != 28: raise ValueError( 'Invalid MNIST file %s: Expected 28x28 images, found %dx%d' % (f.name, rows, cols))
Validate that filename corresponds to images for the MNIST dataset.
def _add_intent_interactive(self, intent_num=0): print ("Name of intent number : ", intent_num) slot_type_mappings = load_builtin_slots() intent_name = read_from_user(str) print ("How many slots?") num_slots = read_from_user(int) slot_list = [] for i in range(num_slots): print ("Slot name no.", i+1) slot_name = read_from_user(str).strip() print ("Slot type? Enter a number for AMAZON supported types below," "else enter a string for a Custom Slot") print (json.dumps(slot_type_mappings, indent=True)) slot_type_str = read_from_user(str) try: slot_type = slot_type_mappings[int(slot_type_str)]['name'] except: slot_type = slot_type_str slot_list += [self.build_slot(slot_name, slot_type)] self.add_intent(intent_name, slot_list)
Interactively add a new intent to the intent schema object
def _iter_backtrack(ex, rand=False): if ex is One: yield dict() elif ex is not Zero: if rand: v = random.choice(ex.inputs) if rand else ex.top else: v = ex.top points = [{v: 0}, {v: 1}] if rand: random.shuffle(points) for point in points: for soln in _iter_backtrack(ex.restrict(point), rand): soln.update(point) yield soln
Iterate through all satisfying points using backtrack algorithm.
def process_object(obj): "Hook to process the object currently being displayed." invalid_options = OptsMagic.process_element(obj) if invalid_options: return invalid_options OutputMagic.info(obj)
Hook to process the object currently being displayed.
def __get_file(self, file): file_object = None if file['name'] in request.files: file_object = request.files[file['name']] clean_filename = secure_filename(file_object.filename) if clean_filename == '': return file_object if file_object and self.__allowed_extension( clean_filename, file['extensions']): return file_object elif file['name'] not in request.files and file['required']: return file_object return file_object
Get request file and do a security check
def _adjust_width(self): if self.bar_width > self.max_iter: self.bar_width = int(self.max_iter)
Shrinks bar if number of iterations is less than the bar width
def pong_received(self, payload=None): if self._timer is not None: self._timer.cancel() self._failures = 0 asyncio.async(self.send_ping(payload=payload))
Called when a pong is received. So the timer is cancelled
def evaluate_binop_math(self, operation, left, right, **kwargs): if not operation in self.binops_math: raise ValueError("Invalid math binary operation '{}'".format(operation)) if left is None or right is None: return None if not isinstance(left, (list, ListIP)): left = [left] if not isinstance(right, (list, ListIP)): right = [right] if not left or not right: return None try: vect = self._calculate_vector(operation, left, right) if len(vect) > 1: return vect return vect[0] except: return None
Evaluate given mathematical binary operation with given operands.
def validate_headers(self): super().validate() self.validate_header(self.channeldir, self.channelinfo, CHANNEL_INFO_HEADER) self.validate_header(self.channeldir, self.contentinfo, CONTENT_INFO_HEADER) if self.has_exercises(): self.validate_header(self.channeldir, self.exercisesinfo, EXERCISE_INFO_HEADER) self.validate_header(self.channeldir, self.questionsinfo, EXERCISE_QUESTIONS_INFO_HEADER)
Check if CSV metadata files have the right format.
def find_tasks(self, overrides): tasks = self.default_tasks() configuration = self.collector.configuration for image in list(configuration["images"].keys()): path = configuration.path(["images", image, "tasks"], joined="images.{0}.tasks".format(image)) nxt = configuration.get(path, {}) tasks.update(nxt) if overrides: tasks.update(overrides) self.tasks = tasks return tasks
Find the custom tasks and record the associated image with each task
def copy(self, klass=_x): chain = super().copy() new_chain = klass(chain._args[0]) new_chain._tokens = [[ chain.compose, [], {}, ]] return new_chain
A new chain beginning with the current chain tokens and argument.
def delete(self): resp = self.r_session.delete(self.database_url) resp.raise_for_status()
Deletes the current database from the remote instance.
def setup(self, *args, **kwargs): if self.comp is None: self.comp = Compiler(*args, **kwargs) else: self.comp.setup(*args, **kwargs)
Set parameters for the compiler.
def url(self): return reverse('archives:thread_view', args=[self.mailinglist.name, self.thread.subject_token])
Shortcut to get thread url
def verify(opts): resources = _load(opts.resources, opts.output_dir) if opts.all: opts.resource_names = ALL invalid = _invalid(resources, opts.resource_names) if not invalid: if not opts.quiet: print("All resources successfully downloaded") return 0 else: if not opts.quiet: print("Invalid or missing resources: {}".format(', '.join(invalid))) return 1
Verify that one or more resources were downloaded successfully.
def scan(self): found = [] for addr in range(0,0x80): try: self._i2c_bus.read_byte(addr) except OSError: continue found.append(addr) return found
Try to read a byte from each address, if you get an OSError it means the device isnt there
def estimate_band_connection(prev_eigvecs, eigvecs, prev_band_order): metric = np.abs(np.dot(prev_eigvecs.conjugate().T, eigvecs)) connection_order = [] for overlaps in metric: maxval = 0 for i in reversed(range(len(metric))): val = overlaps[i] if i in connection_order: continue if val > maxval: maxval = val maxindex = i connection_order.append(maxindex) band_order = [connection_order[x] for x in prev_band_order] return band_order
A function to order the phonon eigenvectors taken from phonopy
def download_seq_file(self, outdir, force_rerun=False): uniprot_fasta_file = download_uniprot_file(uniprot_id=self.id, filetype='fasta', outdir=outdir, force_rerun=force_rerun) self.sequence_path = uniprot_fasta_file
Download and load the UniProt FASTA file
def _text_image(page): img = None alt = page.data.get('label') or page.data.get('title') source = _image(page) if source: img = "![%s](%s)" % (alt, source) return img
returns text image URL
def append_vobject(self, ical, filename=None): if not filename: filename = self._filename elif filename not in self._reminders: return with self._lock: outdat = self.to_reminders(ical) open(filename, 'a').write(outdat) return Remind._get_uid(outdat)
Append a Remind command generated from the iCalendar to the file
def log_config(verbose=1): if verbose == 0: level = logging.WARNING fmt = "%(module)s: %(message)s" elif verbose == 1: level = logging.INFO fmt = "%(module)s [@%(lineno)s]: %(message)s" else: level = logging.DEBUG fmt = "%(module)s [%(lineno)s]: %(levelname)s: %(message)s" logging.basicConfig(format=fmt, level=level)
Set up logging the way I like it.
def pending(): upgrader = InvenioUpgrader() logger = upgrader.get_logger() try: upgrades = upgrader.get_upgrades() if not upgrades: logger.info("All upgrades have been applied.") return logger.info("Following upgrade(s) are ready to be applied:") for u in upgrades: logger.info( " * {0} {1}".format(u.name, u.info)) except RuntimeError as e: for msg in e.args: logger.error(unicode(msg)) raise
Command for showing upgrades ready to be applied.
def _calcDepths(self): depths = self.layerBoundaries.mean(axis=1)[1:] depth_y = [] for y in self.y: if y in ['p23', 'b23', 'nb23']: depth_y = np.r_[depth_y, depths[0]] elif y in ['p4', 'ss4(L23)', 'ss4(L4)', 'b4', 'nb4']: depth_y = np.r_[depth_y, depths[1]] elif y in ['p5(L23)', 'p5(L56)', 'b5', 'nb5']: depth_y = np.r_[depth_y, depths[2]] elif y in ['p6(L4)', 'p6(L56)', 'b6', 'nb6']: depth_y = np.r_[depth_y, depths[3]] else: raise Exception, 'this aint right' return depth_y
return the cortical depth of each subpopulation
def serialize_number(x, fmt=SER_BINARY, outlen=None): ret = b'' if fmt == SER_BINARY: while x: x, r = divmod(x, 256) ret = six.int2byte(int(r)) + ret if outlen is not None: assert len(ret) <= outlen ret = ret.rjust(outlen, b'\0') return ret assert fmt == SER_COMPACT while x: x, r = divmod(x, len(COMPACT_DIGITS)) ret = COMPACT_DIGITS[r:r + 1] + ret if outlen is not None: assert len(ret) <= outlen ret = ret.rjust(outlen, COMPACT_DIGITS[0:1]) return ret
Serializes `x' to a string of length `outlen' in format `fmt'
def contrast(self, color, step): hls = colorsys.rgb_to_hls(*self.rgb(color)) if self.is_light(color): return colorsys.hls_to_rgb(hls[0], hls[1] - step, hls[2]) else: return colorsys.hls_to_rgb(hls[0], hls[1] + step, hls[2])
if color is dark, will return a lighter one, otherwise darker
def download(self): url = 'http://forms.irs.gov/app/pod/dataDownload/fullData' r = requests.get(url, stream=True) with open(self.zip_path, 'wb') as f: for chunk in r.iter_content(chunk_size=30720): logger.debug('Downloading...') f.write(chunk) f.flush()
Download the archive from the IRS website.
def extract_feed( inpath: str, outpath: str, view: View, config: nx.DiGraph = None ) -> str: config = default_config() if config is None else config config = remove_node_attributes(config, "converters") feed = load_feed(inpath, view, config) return write_feed_dangerously(feed, outpath)
Extract a subset of a GTFS zip into a new file
def sign_message(body: ByteString, secret: Text) -> Text: return 'sha1={}'.format( hmac.new(secret.encode(), body, sha1).hexdigest() )
Compute a message's signature.
def register_cmdfinalization_hook(self, func: Callable[[plugin.CommandFinalizationData], plugin.CommandFinalizationData]) -> None: self._validate_cmdfinalization_callable(func) self._cmdfinalization_hooks.append(func)
Register a hook to be called after a command is completed, whether it completes successfully or not.
def getFields(cls) : s = set(cls.columns.keys()) s.remove('json') s.remove('raba_id') return s
returns a set of the available fields. In order to be able ti securely loop of the fields, "raba_id" and "json" are not included in the set
def use_comparative_sequence_rule_enabler_rule_view(self): self._object_views['sequence_rule_enabler_rule'] = COMPARATIVE for session in self._get_provider_sessions(): try: session.use_comparative_sequence_rule_enabler_rule_view() except AttributeError: pass
Pass through to provider SequenceRuleEnablerRuleLookupSession.use_comparative_sequence_rule_enabler_rule_view
def _handle_call(self, actual_call, stubbed_call): self._actual_calls.append(actual_call) use_call = stubbed_call or actual_call return use_call.return_value
Extends Stub call handling behavior to be callable by default.
def view_contents(token, dstore): try: desc = dstore['oqparam'].description except KeyError: desc = '' data = sorted((dstore.getsize(key), key) for key in dstore) rows = [(key, humansize(nbytes)) for nbytes, key in data] total = '\n%s : %s' % ( dstore.filename, humansize(os.path.getsize(dstore.filename))) return rst_table(rows, header=(desc, '')) + total
Returns the size of the contents of the datastore and its total size
def value(self): dtype = lib.get_data_type(self._data) dvalue = lib.get_data_value(self._data) if dvalue == ffi.NULL: return None return self.python_value(dtype, dvalue)
Return the DATA_OBJECT stored value.
def write_config(cfg): cfg_path = '/usr/local/etc/freelan' cfg_file = 'freelan_TEST.cfg' cfg_lines = [] if not isinstance(cfg, FreelanCFG): if not isinstance(cfg, (list, tuple)): print("Freelan write input can not be processed.") return cfg_lines = cfg else: cfg_lines = cfg.build() if not os.path.isdir(cfg_path): print("Can not find default freelan config directory.") return cfg_file_path = os.path.join(cfg_path,cfg_file) if os.path.isfile( cfg_file_path ): print("freelan config file already exists - moving to not replace content.") ts = time.time() backup_file = cfg_file_path+'.ORG-'+datetime.datetime.fromtimestamp(ts).strftime('%y-%m-%d-%H-%M-%S') shutil.move(cfg_file_path, backup_file) cfg_lines = [cfg_line+'\n' for cfg_line in cfg_lines] with open(cfg_file_path, 'w') as cfg_f: cfg_f.writelines(cfg_lines)
try writing config file to a default directory
def _other_pipeline_samples(summary_file, cur_samples): cur_descriptions = set([s[0]["description"] for s in cur_samples]) out = [] if utils.file_exists(summary_file): with open(summary_file) as in_handle: for s in yaml.safe_load(in_handle).get("samples", []): if s["description"] not in cur_descriptions: out.append(s) return out
Retrieve samples produced previously by another pipeline in the summary output.
def _get_from(self, required_for=None): line = self.next_line() if line is None: return None elif line.startswith(b'from '): return line[len(b'from '):] elif required_for: self.abort(errors.MissingSection, required_for, 'from') else: self.push_line(line) return None
Parse a from section.
def write_csv_header(mol, csv_writer): line = [] line.append('id') line.append('status') queryList = mol.properties.keys() for queryLabel in queryList: line.append(queryLabel) csv_writer.writerow(line)
Write the csv header
def delete_vpnservice(self, vpnservice): vpnservice_id = self._find_vpnservice_id(vpnservice) ret = self.network_conn.delete_vpnservice(vpnservice_id) return ret if ret else True
Deletes the specified VPN service
def django_js(context, jquery=True, i18n=True, csrf=True, init=True): return { 'js': { 'minified': not settings.DEBUG, 'jquery': _boolean(jquery), 'i18n': _boolean(i18n), 'csrf': _boolean(csrf), 'init': _boolean(init), } }
Include Django.js javascript library in the page
def daily_returns(ts, **kwargs): relative = kwargs.get('relative', 0) return returns(ts, delta=BDay(), relative=relative)
re-compute ts on a daily basis
def run_shell(args: dict) -> int: if args.get('project_directory'): return run_batch(args) shell = CauldronShell() if in_project_directory(): shell.cmdqueue.append('open "{}"'.format(os.path.realpath(os.curdir))) shell.cmdloop() return 0
Run the shell sub command
def split_sentences(s, pad=0): sentences = [] for index, sentence in enumerate(s.split('. ')): padding = '' if index > 0: padding = ' ' * (pad + 1) if sentence.endswith('.'): sentence = sentence[:-1] sentences.append('%s %s.' % (padding, sentence.strip())) return "\n".join(sentences)
Split sentences for formatting.
def setSizeMetadata(self, size): assert((self.needMetadataUpdate(CoverImageMetadata.SIZE)) or (self.size == size)) self.size = size self.check_metadata &= ~CoverImageMetadata.SIZE
Set size image metadata to what has been reliably identified.
def _interpolation_escape(self, namespace): for key, val in namespace.items(): if '%' in val: namespace[key] = self.INTERPOLATION_ESCAPE.sub(lambda match: '%' + match.group(0), val)
Re-escape interpolation strings.
def conflicted(path_to_file): for line in open(path_to_file, 'r'): for marker in '>="<': if line.startswith(marker * 8): return True return False
Whether there are any conflict markers in that file
def serialize(self): if self.response is not None: return {'messaging_type': 'RESPONSE'} if self.update is not None: return {'messaging_type': 'UPDATE'} if self.tag is not None: return { 'messaging_type': 'MESSAGE_TAG', 'tag': self.tag.value, } if self.subscription is not None: return {'messaging_type': 'NON_PROMOTIONAL_SUBSCRIPTION'}
Generates the messaging-type-related part of the message dictionary.
def __arg_comps(self): fun = self.argv[0] if self.argv else '' parsed = salt.utils.args.parse_input( self.argv[1:], condition=False, no_parse=self.opts.get('no_parse', [])) args = parsed[0] kws = parsed[1] return fun, args, kws
Return the function name and the arg list
def pks(self): if self._primary_keys is None: self._primary_keys = list( self.queryset.values_list('pk', flat=True)) return self._primary_keys
Lazy-load the primary keys.
def _get_go2nt(goids, go2nt_all): go_nt_list = [] goids_seen = set() for goid_usr in goids: ntgo = go2nt_all[goid_usr] goid_main = ntgo.id if goid_main not in goids_seen: goids_seen.add(goid_main) go_nt_list.append((goid_main, ntgo)) return go_nt_list
Get user go2nt using main GO IDs, not alt IDs.
def finish(): pretend = context.get('pretend', False) if not pretend and (git.staged() or git.unstaged()): log.err( "You have uncommitted changes in your repo!\n" "You need to stash them before you merge the hotfix branch" ) sys.exit(1) develop = conf.get('git.devel_branch', 'develop') master = conf.get('git.master_branch', 'master') branch = git.current_branch(refresh=True) common.assert_branch_type('hotfix') common.git_checkout(master) common.git_pull(master) common.git_merge(master, branch.name) common.git_checkout(develop) common.git_pull(develop) common.git_merge(develop, branch.name) common.git_branch_delete(branch.name) common.git_prune() common.git_checkout(master)
Merge current feature into develop.
def build(self, name): "build an object for the specified typename as defined in the schema" if isinstance(name, basestring): type = self.resolver.find(name) if type is None: raise TypeNotFound(name) else: type = name cls = type.name if type.mixed(): data = Factory.property(cls) else: data = Factory.object(cls) resolved = type.resolve() md = data.__metadata__ md.sxtype = resolved md.ordering = self.ordering(resolved) history = [] self.add_attributes(data, resolved) for child, ancestry in type.children(): if self.skip_child(child, ancestry): continue self.process(data, child, history[:]) return data
build an object for the specified typename as defined in the schema
def cli(env, prop): try: if prop == 'network': env.fout(get_network()) return meta_prop = META_MAPPING.get(prop) or prop env.fout(SoftLayer.MetadataManager().get(meta_prop)) except SoftLayer.TransportError: raise exceptions.CLIAbort( 'Cannot connect to the backend service address. Make sure ' 'this command is being ran from a device on the backend ' 'network.')
Find details about this machine.
def tags(norm): parts = norm.split('.') return ['.'.join(parts[:i]) for i in range(1, len(parts) + 1)]
Divide a normalized tag string into hierarchical layers.
def perform_create(self, serializer): if serializer.instance is None: serializer.save(user=self.request.user)
determine user when node is added
def batch(batch_size, items): "Batch items into groups of batch_size" items = list(items) if batch_size is None: return [items] MISSING = object() padded_items = items + [MISSING] * (batch_size - 1) groups = zip(*[padded_items[i::batch_size] for i in range(batch_size)]) return [[item for item in group if item != MISSING] for group in groups]
Batch items into groups of batch_size
def auto_detect_serial_unix(preferred_list=['*']): import glob glist = glob.glob('/dev/ttyS*') + glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyACM*') + glob.glob('/dev/serial/by-id/*') ret = [] others = [] for d in glist: matches = False for preferred in preferred_list: if fnmatch.fnmatch(d, preferred): matches = True if matches: ret.append(SerialPort(d)) else: others.append(SerialPort(d)) if len(ret) > 0: return ret ret.extend(others) return ret
try to auto-detect serial ports on unix
def volume_create(self, name, size=100, snapshot=None, voltype=None, availability_zone=None): if self.volume_conn is None: raise SaltCloudSystemExit('No cinder endpoint available') nt_ks = self.volume_conn response = nt_ks.volumes.create( size=size, display_name=name, volume_type=voltype, snapshot_id=snapshot, availability_zone=availability_zone ) return self._volume_get(response.id)
Create a block device
def make_lcc(attrs_dict, globe): attr_mapping = [('central_longitude', 'longitude_of_central_meridian'), ('standard_parallels', 'standard_parallel')] kwargs = CFProjection.build_projection_kwargs(attrs_dict, attr_mapping) if 'standard_parallels' in kwargs: try: len(kwargs['standard_parallels']) except TypeError: kwargs['standard_parallels'] = [kwargs['standard_parallels']] return ccrs.LambertConformal(globe=globe, **kwargs)
Handle Lambert conformal conic projection.
def _init_file_logger(logger, level, log_path, log_size, log_count): if level not in [logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL]: level = logging.DEBUG for h in logger.handlers: if isinstance(h, logging.handlers.RotatingFileHandler): if h.level == level: return fh = logging.handlers.RotatingFileHandler( log_path, maxBytes=log_size, backupCount=log_count) fh.setLevel(level) fh.setFormatter(_formatter) logger.addHandler(fh)
one logger only have one level RotatingFileHandler
def senses(self, bestonly=False): l = [] for word_id, senses,distance in self: for sense, confidence in senses: if not sense in l: l.append(sense) if bestonly: break return l
Returns a list of all predicted senses
def update_finished(self): if self.is_updateable: javabridge.call(self.jobject, "updateFinished", "()V") else: logger.critical(classes.get_classname(self.jobject) + " is not updateable!")
Signals the clusterer that updating with new data has finished.
def alchemyencoder(obj): if isinstance(obj, datetime.date): return obj.isoformat() elif isinstance(obj, decimal.Decimal): return float(obj)
JSON encoder function for SQLAlchemy special classes.
def parse_rss_file(filename: str) -> RSSChannel: root = parse_xml(filename).getroot() return _parse_rss(root)
Parse an RSS feed from a local XML file.
def show(self, show=True): if show and not self.visible: self._show() if not show and self.visible: self._hide()
Show or hide the tray icon.
def reg(self): if self.type.is_displ or self.type.is_phrase: size = core.get_native_size() return base.get_register_name(self.reg_id, size) if self.type.is_reg: return base.get_register_name(self.reg_id, self.size) else: raise exceptions.SarkOperandWithoutReg("Operand does not have a register.")
Name of the register used in the operand.
def Description(self): descr = " ".join((self.getId(), self.aq_parent.Title())) return safe_unicode(descr).encode('utf-8')
Returns searchable data as Description
def request_transfer(subject, recipient, comment): TransferPermission(subject).test() if recipient == (subject.organization or subject.owner): raise ValueError( 'Recipient should be different than the current owner') transfer = Transfer.objects.create( owner=subject.organization or subject.owner, recipient=recipient, subject=subject, comment=comment ) return transfer
Initiate a transfer request
def count_processors(): if 'SLURM_NTASKS' in os.environ: return int(os.environ['SLURM_NTASKS']) elif 'SLURM_JOB_CPUS_PER_NODE' in os.environ: text = os.environ['SLURM_JOB_CPUS_PER_NODE'] if is_integer(text): return int(text) else: n, N = re.findall("([1-9]+)\(x([1-9]+)\)", text)[0] return int(n) * int(N) else: return multiprocessing.cpu_count()
How many cores does the current computer have ?
def find_child_element(elm, child_local_name): for n in range(len(elm)): child_elm = elm[n] tag = etree.QName(child_elm) if tag.localname == child_local_name: return child_elm return None
Find an XML child element by local tag name.
def cancel(task_id, secret_key=None, url=None): if not secret_key: secret_key = default_key() if not url: url = default_url() url = '%s/cancel' % url values = { 'id': task_id, } return _send_with_auth(values, secret_key, url)
Cancel scheduled task with `task_id`
def contributors(self): if not self._contributors: self._contributors = self.get_contributors() return self._contributors
Property to retrieve or access the list of contributors.
def cli(env, account_id): manager = SoftLayer.CDNManager(env.client) account = manager.get_account(account_id) table = formatting.KeyValueTable(['name', 'value']) table.align['name'] = 'r' table.align['value'] = 'l' table.add_row(['id', account['id']]) table.add_row(['account_name', account['cdnAccountName']]) table.add_row(['type', account['cdnSolutionName']]) table.add_row(['status', account['status']['name']]) table.add_row(['created', account['createDate']]) table.add_row(['notes', account.get('cdnAccountNote', formatting.blank())]) env.fout(table)
Detail a CDN Account.
def generate_gaussian_profile(seeing_fwhm): FWHM_G = 2 * math.sqrt(2 * math.log(2)) sigma = seeing_fwhm / FWHM_G amplitude = 1.0 / (2 * math.pi * sigma * sigma) seeing_model = Gaussian2D(amplitude=amplitude, x_mean=0.0, y_mean=0.0, x_stddev=sigma, y_stddev=sigma) return seeing_model
Generate a normalized Gaussian profile from its FWHM
def dlogprior(self, param): assert param in self.freeparams, "Invalid param: {0}".format(param) return self._dlogprior[param]
Value of derivative of prior depends on value of `prior`.
def _get_var_res(self, graph, var, other_var): with tf.Session(graph=graph) as sess: sess.run(other_var["init"]) var_res = self._get_var_res_sess(sess, var) return var_res
Get the weights from our graph
def team(page): soup = BeautifulSoup(page) try: return soup.find('title').text.split(' | ')[0].split(' - ')[1] except: return None
Return the team name
def _get_populate_from(self, model_instance): if hasattr(self.populate_from, '__call__'): tz = self.populate_from(model_instance) else: from_attr = getattr(model_instance, self.populate_from) tz = callable(from_attr) and from_attr() or from_attr try: tz = pytz.timezone(str(tz)) except pytz.UnknownTimeZoneError: raise self.timezone = tz return tz
Retrieves the timezone or None from the `populate_from` attribute.
def _set_config(xpath, element): query = {'type': 'config', 'action': 'set', 'xpath': xpath, 'element': element} response = __proxy__['panos.call'](query) return _validate_response(response)
Sends a set request to the device.
def compute(self): if "Signature" in self.params: raise RuntimeError("Existing signature in parameters") if self.signature_version is not None: version = self.signature_version else: version = self.params["SignatureVersion"] if str(version) == "1": bytes = self.old_signing_text() hash_type = "sha1" elif str(version) == "2": bytes = self.signing_text() if self.signature_method is not None: signature_method = self.signature_method else: signature_method = self.params["SignatureMethod"] hash_type = signature_method[len("Hmac"):].lower() else: raise RuntimeError("Unsupported SignatureVersion: '%s'" % version) return self.creds.sign(bytes, hash_type)
Compute and return the signature according to the given data.
def _process(self, segments): mlh, mlw = self.max_line_height, self.max_line_width s = segments.astype(numpy.uint32) order = mlw * (s[:, 1] // mlh) + s[:, 0] sort_order = numpy.argsort(order) return segments[sort_order]
sort segments in read order - left to right, up to down
def _on_closed(self): LOGGER.error('Redis connection closed') self.connected = False self._on_close() self._stream = None
Invoked when the connection is closed
def _validate_freq(self): from pandas.tseries.frequencies import to_offset try: return to_offset(self.window) except (TypeError, ValueError): raise ValueError("passed window {0} is not " "compatible with a datetimelike " "index".format(self.window))
Validate & return window frequency.
def network_traffic_ports(instance): for key, obj in instance['objects'].items(): if ('type' in obj and obj['type'] == 'network-traffic' and ('src_port' not in obj or 'dst_port' not in obj)): yield JSONError("The Network Traffic object '%s' should contain " "both the 'src_port' and 'dst_port' properties." % key, instance['id'], 'network-traffic-ports')
Ensure network-traffic objects contain both src_port and dst_port.
def _SignedBinaryIDFromURN(binary_urn ): if binary_urn.RelativeName(GetAFF4PythonHackRoot()): return rdf_objects.SignedBinaryID( binary_type=rdf_objects.SignedBinaryID.BinaryType.PYTHON_HACK, path=binary_urn.RelativeName(GetAFF4PythonHackRoot())) elif binary_urn.RelativeName(GetAFF4ExecutablesRoot()): return rdf_objects.SignedBinaryID( binary_type=rdf_objects.SignedBinaryID.BinaryType.EXECUTABLE, path=binary_urn.RelativeName(GetAFF4ExecutablesRoot())) else: raise ValueError("Unable to determine type of signed binary: %s." % binary_urn)
Converts an AFF4 URN for a signed binary to a SignedBinaryID.
def transformer_mlperf_tpu(): hparams = transformer_base_v3() hparams.mlperf_mode = True hparams.symbol_modality_num_shards = 1 hparams.max_length = 256 hparams.batch_size = 2048 hparams.hidden_size = 1024 hparams.filter_size = 4096 hparams.num_heads = 16 hparams.attention_dropout_broadcast_dims = "0,1" hparams.relu_dropout_broadcast_dims = "1" hparams.layer_prepostprocess_dropout_broadcast_dims = "1" return hparams
HParams for Transformer model on TPU for MLPerf on TPU 2x2.
def show_event_handlers(self, stream=sys.stdout, verbose=0): lines = ["List of event handlers installed:"] for handler in self.event_handlers: if verbose: lines.extend(handler.__class__.cls2str().split("\n")) else: lines.extend(str(handler).split("\n")) stream.write("\n".join(lines)) stream.write("\n")
Print to `stream` the event handlers installed for this flow.
def unread(thread, user): return bool(thread.userthread_set.filter(user=user, unread=True))
Check whether there are any unread messages for a particular thread for a user.
def handle(self, **options): template = os.path.dirname(os.path.abspath(__file__)) + "/app_template" name = options.pop("name") call_command("startapp", name, template=template, **options)
Call "startapp" to generate app with custom user model.