code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def _cache_translation(translation, timeout=cache.default_timeout): if not appsettings.PARLER_ENABLE_CACHING: return if translation.master_id is None: raise ValueError("Can't cache unsaved translation") fields = translation.get_translated_fields() values = {'id': translation.id} for name in fields: values[name] = getattr(translation, name) key = get_translation_cache_key(translation.__class__, translation.master_id, translation.language_code) cache.set(key, values, timeout=timeout)
Store a new translation in the cache.
def __edit_line(line, code, code_obj): try: result = eval(code_obj, globals(), locals()) except TypeError as ex: log.error("failed to execute %s: %s", code, ex) raise if result is None: log.error("cannot process line '%s' with %s", line, code) raise RuntimeError('failed to process line') elif isinstance(result, list) or isinstance(result, tuple): line = unicode(' '.join([unicode(res_element) for res_element in result])) else: line = unicode(result) return line
Edit a line with one code object built in the ctor.
def worklog(accountable): worklog = accountable.issue_worklog() headers = ['author_name', 'comment', 'time_spent'] if worklog: rows = [[v for k, v in sorted(w.items()) if k in headers] for w in worklog] rows.insert(0, headers) print_table(SingleTable(rows)) else: click.secho( 'No worklogs found for {}'.format(accountable.issue_key), fg='red' )
List all worklogs for a given issue key.
def send_username_changed_email(self, user): if not self.user_manager.USER_ENABLE_EMAIL: return if not self.user_manager.USER_SEND_USERNAME_CHANGED_EMAIL: return user_or_user_email_object = self.user_manager.db_manager.get_primary_user_email_object(user) email = user_or_user_email_object.email self._render_and_send_email( email, user, self.user_manager.USER_USERNAME_CHANGED_EMAIL_TEMPLATE, )
Send the 'username has changed' notification email.
def _calculate_sv_coverage_cnvkit(data, work_dir): from bcbio.variation import coverage from bcbio.structural import annotate out_target_file = os.path.join(work_dir, "%s-target-coverage.cnn" % dd.get_sample_name(data)) out_anti_file = os.path.join(work_dir, "%s-antitarget-coverage.cnn" % dd.get_sample_name(data)) if ((not utils.file_exists(out_target_file) or not utils.file_exists(out_anti_file)) and (dd.get_align_bam(data) or dd.get_work_bam(data))): target_cov = coverage.run_mosdepth(data, "target", tz.get_in(["regions", "bins", "target"], data)) anti_cov = coverage.run_mosdepth(data, "antitarget", tz.get_in(["regions", "bins", "antitarget"], data)) target_cov_genes = annotate.add_genes(target_cov.regions, data, max_distance=0) out_target_file = _add_log2_depth(target_cov_genes, out_target_file, data) out_anti_file = _add_log2_depth(anti_cov.regions, out_anti_file, data) return out_target_file, out_anti_file
Calculate coverage in an CNVkit ready format using mosdepth.
def UnregisterMessageHandler(self, timeout=None): if self.handler_thread: self.handler_stop = True self.handler_thread.join(timeout) if self.handler_thread.isAlive(): raise RuntimeError("Message handler thread did not join in time.") self.handler_thread = None
Unregisters any registered message handler.
def socket_closed(self, sock): while True: try: if self._poller: with self._lock: self._poller.register(sock, _EVENT_MASK) try: rd = self._poller.poll(0) finally: self._poller.unregister(sock) else: rd, _, _ = select.select([sock], [], [], 0) except (RuntimeError, KeyError): raise except ValueError: return True except (_SELECT_ERROR, IOError) as exc: if _errno_from_exception(exc) in (errno.EINTR, errno.EAGAIN): continue return True except Exception: return True return len(rd) > 0
Return True if we know socket has been closed, False otherwise.
def log_message(self, kind, alert): if kind == 'ERROR': logging.error(alert) else: logging.warning(alert) return
Parses different event types and passes them to logging
def state_pop(self): super(Composite,self).state_pop() for gen in self.generators: gen.state_pop()
Pop the state of all generators
def flush(self): self.stages = [] self.stagenames = [] if not self.queue: return if self.print_statistics: print("LEN OF QUENE", len(self)) t = time() if self._chain._collect_compiler_artifacts: self._compile(debug=True, stages=self.stages, stagenames=self.stagenames) else: self._compile() if self.debug: print("ABOUT TO EXEC", self.queue) if self.print_statistics: print("COMPILE TIME", time()-t) print("TOTAL BITS OF ALL PRIMS", sum( (p.count for p in self.queue if hasattr(p, 'count')))) t = time() self._chain._controller._execute_primitives(self.queue) if self.print_statistics: print("EXECUTE TIME", time()-t) self.queue = [] self._chain._sm.state = self._fsm.state
Force the queue of Primitives to compile, execute on the Controller, and fulfill promises with the data returned.
def validate(self, model_name, object): if model_name not in self.swagger_dict['definitions']: raise ValidationError("Swagger spec has no definition for model %s" % model_name) model_def = self.swagger_dict['definitions'][model_name] log.debug("Validating %s" % model_name) return validate_schema_object(self.spec, model_def, object)
Validate an object against its swagger model
def get(self, client_method, get_params, is_json=True): url = self._wa.apollo_url + self.CLIENT_BASE + client_method headers = {} response = requests.get(url, headers=headers, verify=self.__verify, params=get_params, **self._request_args) if response.status_code == 200: if is_json: data = response.json() return self._scrub_data(data) else: return response.text raise Exception("Unexpected response from apollo %s: %s" % (response.status_code, response.text))
Make a GET request
def reset_state(self): self.y = self.y0.copy() self.dy = np.zeros(self.dmps) self.ddy = np.zeros(self.dmps) self.cs.reset_state()
Reset the system state
def _trace_full (frame, event, arg): if event == "line": _trace_line(frame, event, arg) else: _trace(frame, event, arg) return _trace_full
Trace every executed line.
async def get(self, cid, coinid): if settings.SIGNATURE_VERIFICATION: super().verify() if coinid in settings.bridges.keys(): self.account.blockchain.setendpoint(settings.bridges[coinid]) reviews = await self.account.blockchain.getreviews(cid=cid) if isinstance(reviews, dict): if "error" in reviews: self.set_status(500) self.write(reviews) raise tornado.web.Finish for review in reviews: review["confirmed"] = 1 storage_reviews = await self.account.getreviews(coinid=coinid, cid=cid) if isinstance(reviews, dict): if "error" in reviews.keys(): self.set_status(reviews["error"]) self.write(reviews) raise tornado.web.Finish self.write(json.dumps(reviews + storage_reviews))
Receives all contents reviews
def parse_ok(l1, l3): return { 'annee': l1.text.split(' : ')[1].split()[2], 'siren valide': ''.join( l1.text.split(' : ')[1].split(u'\xab')[0].split()[-4:-1]), 'categorie': ' '.join( l1.text.split(' : ')[1].split(u'\xab')[1].split()[:-1]), 'raison sociale': l3.text.split(' : ')[1][:-1], 'statut': 'ok'}
parse html when siren is ok
def select_unit(action, action_space, select_unit_act, select_unit_id): del action_space select = action.action_ui.multi_panel select.type = select_unit_act select.unit_index = select_unit_id
Select a specific unit from the multi-unit selection.
def _batchify(self, batch_data, batch_label, start=0): i = start batch_size = self.batch_size try: while i < batch_size: label, s = self.next_sample() data = self.imdecode(s) try: self.check_valid_image(data) except RuntimeError as e: logging.debug('Invalid image, skipping: %s', str(e)) continue data = self.augmentation_transform(data) assert i < batch_size, 'Batch size must be multiples of augmenter output length' batch_data[i] = self.postprocess_data(data) batch_label[i] = label i += 1 except StopIteration: if not i: raise StopIteration return i
Helper function for batchifying data
def add_source(self, tier): tier.add_dest(self) self.sources.append(tier)
Schedule this tier to be called when another tier emits.
def create_device_from_category(self, plm, addr, cat, subcat, product_key=0x00): saved_device = self._saved_devices.get(Address(addr).id, {}) cat = saved_device.get('cat', cat) subcat = saved_device.get('subcat', subcat) product_key = saved_device.get('product_key', product_key) device_override = self._overrides.get(Address(addr).id, {}) cat = device_override.get('cat', cat) subcat = device_override.get('subcat', subcat) product_key = device_override.get('firmware', product_key) product_key = device_override.get('product_key', product_key) return insteonplm.devices.create(plm, addr, cat, subcat, product_key)
Create a new device from the cat, subcat and product_key data.
def removeGaps(self) : for i in range(1, len(self.children)) : if self.children[i].x1 > self.children[i-1].x2: aux_moveTree(self.children[i-1].x2-self.children[i].x1, self.children[i])
Remove all gaps between regions
def BROKER_URL(self): broker_url = get('BROKER_URL', None) if broker_url: log.info("Using BROKER_URL setting: {}".format(broker_url)) return broker_url redis_available = self._redis_available() broker_type = self.BROKER_TYPE if broker_type == 'redis' and not redis_available: log.warn("Choosed broker type is redis, but redis not available. \ Check redis package, and REDIS_HOST, REDIS_PORT settings") if broker_type == 'redis' and redis_available: return 'redis://{host}:{port}/{db}'.format( host=self.REDIS_HOST, port=self.REDIS_PORT, db=self.CELERY_REDIS_BROKER_DB) elif broker_type == 'rabbitmq': return 'amqp://{user}:{passwd}@{host}:{port}/{vhost}'.format( user=self.RABBITMQ_USER, passwd=self.RABBITMQ_PASSWD, host=self.RABBITMQ_HOST, port=self.RABBITMQ_PORT, vhost=self.RABBITMQ_VHOST) else: return DEFAULT_BROKER_URL
Sets BROKER_URL depending on redis or rabbitmq settings
def _expand_autotag(atag, container): if atag.tag != "auto": return if "names" in atag.attrib: i = -1 for name in re.split("[\s,]+", atag.attrib["names"]): if name[0] == '^': name = name[1::] insert = True i += 1 else: insert = False for child in atag: dupe = child.copy() for attr, value in dupe.items(): dupe.attrib[attr] = value.replace("$", name) if insert: container.insert(i, dupe) else: container.append(dupe) else: from fortpy.msg import warn warn("'names' is a required attribute of the <auto> tag.")
Expands the contents of the specified auto tag within its parent container.
def interrupt(self): if self._database and self._databaseThreadId: try: self._database.interrupt(self._databaseThreadId) except AttributeError: pass self._database = None self._databaseThreadId = 0
Interrupts the current database from processing.
def _normal_map_callback(self, msg): try: self._cur_normal_map = self._bridge.imgmsg_to_cv2(msg) except: self._cur_normal_map = None
Callback for handling normal maps.
def draw_screen(self, surf): if (self._render_rgb and self._obs.observation.HasField("render_data") and self._obs.observation.render_data.HasField("map")): self.draw_rendered_map(surf) else: self.draw_base_map(surf) self.draw_units(surf) self.draw_selection(surf) self.draw_build_target(surf) self.draw_overlay(surf) self.draw_commands(surf) self.draw_panel(surf)
Draw the screen area.
def Remove(self, *descriptor_names): new_descriptor_map = self.descriptor_map.copy() for name in descriptor_names: new_descriptor_map.pop(name, None) new_descriptors = [ desc for desc in self.descriptors if desc in itervalues(new_descriptor_map) ] return TypeDescriptorSet(*new_descriptors)
Returns a copy of this set without elements with given names.
def publish(self): if self.config.publish: logger.info('Publish') self.execute(self.config.publish)
Publish the current release to PyPI
def ddtodms(self, dd): negative = dd < 0 dd = abs(dd) minutes,seconds = divmod(dd*3600,60) degrees,minutes = divmod(minutes,60) if negative: if degrees > 0: degrees = -degrees elif minutes > 0: minutes = -minutes else: seconds = -seconds return (degrees,minutes,seconds)
Take in dd string and convert to dms
def addRemoteCandidate(self, candidate): if not self._connection._remote_candidates_end: if candidate is None: self._connection.add_remote_candidate(None) else: self._connection.add_remote_candidate(candidate_to_aioice(candidate))
Add a remote candidate.
def download(self, sub_url): response = requests.get(sub_url, headers=self.headers).text soup = BS(response, 'lxml') downlink = self.base_url+soup.select('.download a')[0]['href'] data = requests.get(downlink, headers=self.headers) z = zipfile.ZipFile(cStringIO.StringIO(data.content)) srt_files = [f.filename for f in z.filelist if f.filename.rsplit('.')[-1].lower() in ['srt', 'ass']] z.extract(srt_files[0], '/tmp/') return srt_files[0]
download and unzip subtitle archive to a temp location
def start(self): Global.LOGGER.info("starting the flow manager") self._start_actions() self._start_message_fetcher() Global.LOGGER.debug("flow manager started")
Start all the processes
def _is_match(self, response, answer): if self._only_generic_right_feedback(): return str(answer.genus_type) == str(RIGHT_ANSWER_GENUS) elif self._is_multiple_choice(): return MultiChoiceItemRecord._is_match(self, response, answer) elif self._is_image_sequence() or self._is_mw_sentence(): return OrderedChoiceItemRecord._is_match(self, response, answer) elif self._is_numeric_response(): return CalculationInteractionItemRecord._is_match(self, response, answer) elif self._is_fitb(): return MagicRandomizedInlineChoiceItemRecord._is_match(self, response, answer) return False
For MC, can call through to MultiChoice Item Record?
def SetRaw(self, name, value): if self.writeback is None: logging.warning("Attempting to modify a read only config object.") if name in self.constants: raise ConstModificationError( "Attempting to modify constant value %s" % name) self.writeback_data[name] = value self.FlushCache()
Set the raw string without verification or escaping.
def _altair_line_num_(self, xfield, yfield, opts, style, encode): try: c = self._altair_chart_num_("line", xfield, yfield, opts, style, encode) except Exception as e: self.err(e, "Can not draw a line num chart") return return c
Get a line + text number chart
def _set_choices(self, value): super(LazyChoicesMixin, self)._set_choices(value) self.widget.choices = value
Also update the widget's choices.
def addCircleAnnot(self, rect): CheckParent(self) val = _fitz.Page_addCircleAnnot(self, rect) if not val: return val.thisown = True val.parent = weakref.proxy(self) self._annot_refs[id(val)] = val return val
Add a 'Circle' annotation.
def _contiguous_slices(self): k = j = None for i in self._sorted(): if k is None: k = j = i if i - j > 1: yield slice(k, j + 1, 1) k = i j = i if k is not None: yield slice(k, j + 1, 1)
Internal iterator over contiguous slices in RangeSet.
async def set_playstate(self, state, pos=0): url = '{}/Sessions/{}/Playing/{}'.format( self.server.construct_url(API_URL), self.session_id, state) params = {'api_key': self.server.api_key} if state == 'seek': params['SeekPositionTicks'] = int(pos * 10000000) params['static'] = 'true' _LOGGER.debug('Playstate URL: %s', url) post = await self.server.api_post(url, params) if post is None: _LOGGER.debug('Error sending command.') else: _LOGGER.debug('Post response: %s', post)
Send media commands to server.
def insert_source_info(result): comment = result.get("comment") if comment: return source = result.get("source") job_name = result.get("job_name") run = result.get("run") source_list = [source, job_name, run] if not all(source_list): return source_note = "/".join(source_list) source_note = "Source: {}".format(source_note) result["comment"] = source_note
Adds info about source of test result if available.
def save(self, async=False, callback=None, encrypted=True): if self._new_password and encrypted: self.password = Sha1.encrypt(self._new_password) controller = NURESTSession.get_current_session().login_controller controller.password = self._new_password controller.api_key = None data = json.dumps(self.to_dict()) request = NURESTRequest(method=HTTP_METHOD_PUT, url=self.get_resource_url(), data=data) if async: return self.send_request(request=request, async=async, local_callback=self._did_save, remote_callback=callback) else: connection = self.send_request(request=request) return self._did_save(connection)
Updates the user and perform the callback method
def acquisition_function_withGradients(self, x): f_acqu,df_acqu = self._compute_acq_withGradients(x) cost_x, cost_grad_x = self.cost_withGradients(x) f_acq_cost = f_acqu/cost_x df_acq_cost = (df_acqu*cost_x - f_acqu*cost_grad_x)/(cost_x**2) return -f_acq_cost*self.space.indicator_constraints(x), -df_acq_cost*self.space.indicator_constraints(x)
Takes an acquisition and it gradient and weights it so the domain and cost are taken into account.
def change_access_key(self): method = 'POST' endpoint = '/rest/v1/users/{}/accesskey/change'.format( self.client.sauce_username) return self.client.request(method, endpoint)
Change access key of your account.
def to_str(cls, values, callback=None): if callback and callable(callback): if isinstance(values, dict): return callback(_es.to_str(values)) return [callback(_es.to_str(i)) for i in values] return _es.to_str(values)
Convert many records's values to str
def on_train_begin(self, **kwargs:Any)->None: "Prepare the master model." self.model_params, self.master_params = get_master(self.learn.layer_groups, self.flat_master) new_opt = self.learn.opt.new_with_params(self.master_params) if self.opt is not None: self.opt.lr,self.opt.wd = self.learn.opt.lr,self.learn.opt.wd new_opt.load_state_dict(self.opt) self.learn.opt.opt = new_opt.opt self.noskip = 0
Prepare the master model.
def should_see_link_text(self, link_text, link_url): elements = ElementSelector( world.browser, str('//a[@href="%s"][./text()="%s"]' % (link_url, link_text)), filter_displayed=True, ) if not elements: raise AssertionError("Expected link not found.")
Assert a link with the provided text points to the provided URL.
def action(context, request, action=None, resource=None, uid=None): if action is None: action = request.get_header("HTTP_X_HTTP_METHOD_OVERRIDE", "CREATE").lower() func_name = "{}_items".format(action) action_func = getattr(api, func_name, None) if action_func is None: api.fail(500, "API has no member named '{}'".format(func_name)) portal_type = api.resource_to_portal_type(resource) items = action_func(portal_type=portal_type, uid=uid) return { "count": len(items), "items": items, "url": api.url_for("senaite.jsonapi.v1.action", action=action), }
Various HTTP POST actions
def readOne(stream, validate=False, transform=True, ignoreUnreadable=False, allowQP=False): return next(readComponents(stream, validate, transform, ignoreUnreadable, allowQP))
Return the first component from stream.
def items(self): return [ (c.name, getattr(self, c.name, None)) for c in self.__table__._columns ]
return list of pair of name and value of all declared columns.
def ListDirectory(self, pathspec, depth=0): if depth >= self.request.max_depth: return try: fd = vfs.VFSOpen(pathspec, progress_callback=self.Progress) files = fd.ListFiles(ext_attrs=self.request.collect_ext_attrs) except (IOError, OSError) as e: if depth == 0: self.SetStatus(rdf_flows.GrrStatus.ReturnedStatus.IOERROR, e) else: logging.info("Find failed to ListDirectory for %s. Err: %s", pathspec, e) return if not self.request.cross_devs and self.filesystem_id is None: dir_stat = fd.Stat() self.filesystem_id = dir_stat.st_dev for file_stat in files: if stat.S_ISDIR(int(file_stat.st_mode)): is_same_fs = self.filesystem_id == file_stat.st_dev if is_same_fs or self.request.cross_devs: for child_stat in self.ListDirectory(file_stat.pathspec, depth + 1): yield child_stat yield file_stat
A recursive generator of files.
def logger_add (self, loggerclass): self.loggers[loggerclass.LoggerName] = loggerclass self[loggerclass.LoggerName] = {}
Add a new logger type to the known loggers.
def cyclic(self): "Returns True if the options cycle, otherwise False" return any(isinstance(val, Cycle) for val in self.kwargs.values())
Returns True if the options cycle, otherwise False
def update_bounds_boxes(self): if self.s not in list(self.Data.keys()): self.select_specimen(list(self.Data.keys())[0]) self.T_list = self.Data[self.s]['zijdblock_steps'] if self.current_fit: self.tmin_box.SetItems(self.T_list) self.tmax_box.SetItems(self.T_list) if type(self.current_fit.tmin) == str and type(self.current_fit.tmax) == str: self.tmin_box.SetStringSelection(self.current_fit.tmin) self.tmax_box.SetStringSelection(self.current_fit.tmax) if self.ie_open: self.ie.update_bounds_boxes(self.T_list)
updates bounds boxes with bounds of current specimen and fit
def _handle_fetch_response(self, request, send_time, response): fetch_offsets = {} for topic, partitions in request.topics: for partition_data in partitions: partition, offset = partition_data[:2] fetch_offsets[TopicPartition(topic, partition)] = offset partitions = set([TopicPartition(topic, partition_data[0]) for topic, partitions in response.topics for partition_data in partitions]) metric_aggregator = FetchResponseMetricAggregator(self._sensors, partitions) random.shuffle(response.topics) for topic, partitions in response.topics: random.shuffle(partitions) for partition_data in partitions: tp = TopicPartition(topic, partition_data[0]) completed_fetch = CompletedFetch( tp, fetch_offsets[tp], response.API_VERSION, partition_data[1:], metric_aggregator ) self._completed_fetches.append(completed_fetch) if response.API_VERSION >= 1: self._sensors.fetch_throttle_time_sensor.record(response.throttle_time_ms) self._sensors.fetch_latency.record((time.time() - send_time) * 1000)
The callback for fetch completion
def gan_loss_from_func(loss_gen, loss_crit, weights_gen:Tuple[float,float]=None): "Define loss functions for a GAN from `loss_gen` and `loss_crit`." def _loss_G(fake_pred, output, target, weights_gen=weights_gen): ones = fake_pred.new_ones(fake_pred.shape[0]) weights_gen = ifnone(weights_gen, (1.,1.)) return weights_gen[0] * loss_crit(fake_pred, ones) + weights_gen[1] * loss_gen(output, target) def _loss_C(real_pred, fake_pred): ones = real_pred.new_ones (real_pred.shape[0]) zeros = fake_pred.new_zeros(fake_pred.shape[0]) return (loss_crit(real_pred, ones) + loss_crit(fake_pred, zeros)) / 2 return _loss_G, _loss_C
Define loss functions for a GAN from `loss_gen` and `loss_crit`.
def _fetch_access_token(self, url, data): logger.info('Fetching component access token') res = self._http.post( url=url, data=data ) try: res.raise_for_status() except requests.RequestException as reqe: raise WeChatClientException( errcode=None, errmsg=None, client=self, request=reqe.request, response=reqe.response ) result = res.json() if 'errcode' in result and result['errcode'] != 0: raise WeChatClientException( result['errcode'], result['errmsg'], client=self, request=res.request, response=res ) expires_in = 7200 if 'expires_in' in result: expires_in = result['expires_in'] self.session.set( 'component_access_token', result['component_access_token'], expires_in ) self.expires_at = int(time.time()) + expires_in return result
The real fetch access token
def dump_pickle(name, obj): with open(name,"wb") as f: pickle.dump(obj,f,2); pass;
quick pickle dump similar to np.save
def _configure_port_binding(self, is_provider_vlan, duplicate_type, is_native, switch_ip, vlan_id, intf_type, nexus_port, vni): if duplicate_type == const.DUPLICATE_PORT: return auto_create, auto_trunk = self._gather_config_parms( is_provider_vlan, vlan_id) if duplicate_type == const.DUPLICATE_VLAN: auto_create = False if auto_create and auto_trunk: LOG.debug("Nexus: create vlan %s and add to interface", vlan_id) self.driver.create_and_trunk_vlan( switch_ip, vlan_id, intf_type, nexus_port, vni, is_native) elif auto_create: LOG.debug("Nexus: create vlan %s", vlan_id) self.driver.create_vlan(switch_ip, vlan_id, vni) elif auto_trunk: LOG.debug("Nexus: trunk vlan %s", vlan_id) self.driver.send_enable_vlan_on_trunk_int( switch_ip, vlan_id, intf_type, nexus_port, is_native)
Conditionally calls vlan and port Nexus drivers.
def arch_size(self): if not self._ptr: raise BfdException("BFD not initialized") try: return _bfd.get_arch_size(self._ptr) except Exception, err: raise BfdException("Unable to determine architeure size.")
Return the architecure size in bits.
def stop_listening(cls, event, func): signal(event).disconnect(func, sender=cls)
Remove a callback for a signal against the class
def cleanup_nodes(doc): for node in doc.documentElement.childNodes: if node.nodeType == Node.TEXT_NODE and node.nodeValue.isspace(): doc.documentElement.removeChild(node) return doc
Remove text nodes containing only whitespace
def save(filename, html): try: out_file = open(filename, "w") out_file.write(html) out_file.close() except IOError as ex: parser.error("Could not write baked HTML to local file {name}. ({ex})".format(name=filename, ex=ex))
Creates a baked HTML file on the local system
def normalize_request(request): if isinstance(request, Request): return request for normalizer in REQUEST_NORMALIZERS: try: return normalizer(request) except TypeError: continue raise ValueError("Unable to normalize the provided request")
Given a request, normalize it to the internal Request class.
def commajoin_as_strings(iterable): return _(u',').join((six.text_type(i) for i in iterable))
Join the given iterable with ','
def associate_hosting_device_with_config_agent( self, client, config_agent_id, body): return client.post((ConfigAgentHandlingHostingDevice.resource_path + CFG_AGENT_HOSTING_DEVICES) % config_agent_id, body=body)
Associates a hosting_device with a config agent.
def _call(self, x, out=None): if out is None: out = self.range.zero() else: out.set_zero() x_arr = x.asarray() out_arr = out.asarray() tmp = np.empty(out.shape, out.dtype, order=out.space.default_order) ndim = self.domain.ndim dx = self.domain.cell_sides with writable_array(out) as out_arr: for axis in range(ndim): finite_diff(x_arr, axis=axis, dx=dx[axis] ** 2, method='forward', pad_mode=self.pad_mode, pad_const=self.pad_const, out=tmp) out_arr += tmp finite_diff(x_arr, axis=axis, dx=dx[axis] ** 2, method='backward', pad_mode=self.pad_mode, pad_const=self.pad_const, out=tmp) out_arr -= tmp return out
Calculate the spatial Laplacian of ``x``.
def prettyval(self, val): if len(val) == self.wordsize and val[-1:] in (b'\x00', b'\xff'): return "%x" % struct.unpack("<" + self.fmt, val) if len(val) == self.wordsize and re.search(b'[\x00-\x08\x0b\x0c\x0e-\x1f]', val, re.DOTALL): return "%x" % struct.unpack("<" + self.fmt, val) if len(val) < 2 or not re.match(b'^[\x09\x0a\x0d\x20-\xff]+.$', val, re.DOTALL): return hexdump(val) val = val.replace(b"\n", b"\\n") return "'%s'" % val.decode('utf-8', 'ignore')
returns the value in a readable format.
def _scale_x_values_timestamps(self, values, max_width): first_timestamp = float(values[0][0]) last_timestamp = float(values[-1][0]) step_size = (last_timestamp - first_timestamp) / max_width values_by_column = [[] for i in range(max_width)] for timestamp, value in values: if value is None: continue timestamp = float(timestamp) column = (timestamp - first_timestamp) // step_size column = int(min(column, max_width - 1)) values_by_column[column].append(value) adjusted_values = [statistics.mean(values) if values else 0 for values in values_by_column] return adjusted_values
Scale X values to new width based on timestamps
def icon(self): if self._icon is None: self._icon = QIcon(self.pm()) return self._icon
Get QIcon from wrapper
def ipython(image): from spython.main import get_client from spython.main.parse import ( DockerRecipe, SingularityRecipe ) client = get_client() client.load(image) client.DockerRecipe = DockerRecipe client.SingularityRecipe = SingularityRecipe from IPython import embed embed()
give the user an ipython shell
def resource_path(opts): resources = _load(opts.resources, opts.output_dir) if opts.resource_name not in resources: sys.stderr.write('Invalid resource name: {}\n'.format(opts.resource_name)) return 1 print(resources[opts.resource_name].destination)
Return the full path to a named resource.
def select_subreddit(self): name = self.get_selected_item()['name'] self.selected_page = self.open_subreddit_page(name)
Store the selected subreddit and return to the subreddit page
def scroll(self, delta_x, delta_y): if delta_x < 0: shift_x = 0 xend = self.width + delta_x dt_x = 1 else: shift_x = self.width - 1 xend = delta_x - 1 dt_x = -1 if delta_y < 0: y = 0 yend = self.height + delta_y dt_y = 1 else: y = self.height - 1 yend = delta_y - 1 dt_y = -1 while y != yend: x = shift_x while x != xend: self.format.set_pixel( self, x, y, self.format.get_pixel(self, x - delta_x, y - delta_y)) x += dt_x y += dt_y
shifts framebuf in x and y direction
def tickmark(self, x, y, size=10, orientation=90): (x1, y1) = self.p2c([x, y]) x2 = x1 + size * math.cos(math.radians(orientation)) y2 = y1 - size * math.sin(math.radians(orientation)) self.create_line(x1, y1, x2, y2)
Draw a line of size and orientation at x,y
def device_selected(self, index): device = self.devicelist_model.itemFromIndex(index) print(device.device.addr) self.btnConnect.setEnabled(True)
Handler for selecting a device from the list in the UI
def visit_arg(self, node, parent): return self.visit_assignname(node, parent, node.arg)
visit an arg node by returning a fresh AssName instance
def _discretize_check(self, table, att, col): label = "'%s'" % col if table in self.discr_intervals and att in self.discr_intervals[table]: intervals = self.discr_intervals[table][att] n_intervals = len(intervals) prev_value = None for i, value in enumerate(intervals): if i > 0: prev_value = intervals[i - 1] if not prev_value and col <= value: label = "'=<%.2f'" % value break elif prev_value and col <= value: label = "'(%.2f;%.2f]'" % (prev_value, value) break elif col > value and i == n_intervals - 1: label = "'>%.2f'" % value break else: label = label.replace('[', 'I') label = label.replace(']', 'I') return label
Replaces the value with an appropriate interval symbol, if available.
def _lint(): project_python_files = [filename for filename in get_project_files() if filename.endswith(b'.py')] retcode = subprocess.call( ['flake8', '--max-complexity=10'] + project_python_files) if retcode == 0: print_success_message('No style errors') return retcode
Run lint and return an exit code.
def render_django_response(self, **kwargs): from django.http import HttpResponse return HttpResponse( self.render(**kwargs), content_type='image/svg+xml' )
Render the graph, and return a Django response
def create_checkbox(self, name, margin=10): chk_btn = Gtk.CheckButton(name) chk_btn.set_margin_right(margin) return chk_btn
Function creates a checkbox with his name
def _check_error_response(response, query): if "error" in response: http_error = ["HTTP request timed out.", "Pool queue is full"] geo_error = [ "Page coordinates unknown.", "One of the parameters gscoord, gspage, gsbbox is required", "Invalid coordinate provided", ] err = response["error"]["info"] if err in http_error: raise HTTPTimeoutError(query) elif err in geo_error: raise MediaWikiGeoCoordError(err) else: raise MediaWikiException(err)
check for default error messages and throw correct exception
def generate(env): static_obj, shared_obj = SCons.Tool.createObjBuilders(env) for suffix in CSuffixes: static_obj.add_action(suffix, SCons.Defaults.CAction) shared_obj.add_action(suffix, SCons.Defaults.ShCAction) static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) add_common_cc_variables(env) if 'CC' not in env: env['CC'] = env.Detect(compilers) or compilers[0] env['CFLAGS'] = SCons.Util.CLVar('') env['CCCOM'] = '$CC -o $TARGET -c $CFLAGS $CCFLAGS $_CCCOMCOM $SOURCES' env['SHCC'] = '$CC' env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS') env['SHCCCOM'] = '$SHCC -o $TARGET -c $SHCFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES' env['CPPDEFPREFIX'] = '-D' env['CPPDEFSUFFIX'] = '' env['INCPREFIX'] = '-I' env['INCSUFFIX'] = '' env['SHOBJSUFFIX'] = '.os' env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0 env['CFILESUFFIX'] = '.c'
Add Builders and construction variables for C compilers to an Environment.
def _nodedev_event_lifecycle_cb(conn, dev, event, detail, opaque): _salt_send_event(opaque, conn, { 'nodedev': { 'name': dev.name() }, 'event': _get_libvirt_enum_string('VIR_NODE_DEVICE_EVENT_', event), 'detail': 'unknown' })
Node device lifecycle events handler
def which(x): for p in os.environ.get('PATH').split(os.pathsep): p = os.path.join(p, x) if os.path.exists(p): return os.path.abspath(p) return None
Same as which command on Linux
def save(self, fname: str): with open(fname, "wb") as fp: pickle.dump(self, fp)
Saves this training state to fname.
def update_project(config, task_presenter, results, long_description, tutorial, watch): if watch: res = _update_project_watch(config, task_presenter, results, long_description, tutorial) else: res = _update_project(config, task_presenter, results, long_description, tutorial) click.echo(res)
Update project templates and information.
def load(self, key=None): if key is None: key = '_pickle' obj = None if _compat_hooks: save = _compat_hooks[0]() try: self.__n += 1 s = self.__file.Get(key + ';{0:d}'.format(self.__n)) self.__io.setvalue(s.GetName()) if sys.version_info[0] < 3: obj = pickle.Unpickler.load(self) else: obj = super(Unpickler, self).load() self.__io.reopen() finally: if _compat_hooks: save = _compat_hooks[1](save) return obj
Read a pickled object representation from the open file.
def add_query_kwargs(kwargs, visitor, constraints, index): (query_const, filter_const) = constraints.remove_index(index) kwargs["key_condition_expr"] = query_const.build(visitor) if filter_const: kwargs["filter"] = filter_const.build(visitor) if index.name != "TABLE": kwargs["index"] = index.name
Construct KeyConditionExpression and FilterExpression
def delete(self, *args): key = self.name_wid.text or self.name_wid.hint_text if not hasattr(self.store, key): return delattr(self.store, key) try: return min(kee for kee in dir(self.store) if kee > key) except ValueError: return '+'
Remove the currently selected item from my store
def check_cache(self, template): if template not in self.cached: self.cache_file(template) self.cached.append(template)
Cache a file only once
def _create_request_record(self, identifier, rtype, name, content, ttl, priority): record = collections.OrderedDict() if identifier is not None: record['id'] = identifier record['type'] = rtype if name is not None: record['name'] = self._relative_name(name) if content is not None: record['content'] = content if ttl is not None: record['ttl'] = ttl if priority is not None: record['prio'] = priority return record
Creates record for Subreg API calls
def GenerateCSRFToken(user_id, time): precondition.AssertType(user_id, Text) precondition.AssertOptionalType(time, int) time = time or rdfvalue.RDFDatetime.Now().AsMicrosecondsSinceEpoch() secret = config.CONFIG.Get("AdminUI.csrf_secret_key", None) if not secret: secret = config.CONFIG["AdminUI.django_secret_key"] digester = hmac.new(secret.encode("ascii"), digestmod=hashlib.sha256) digester.update(user_id.encode("ascii")) digester.update(CSRF_DELIMITER) digester.update(str(time).encode("ascii")) digest = digester.digest() token = base64.urlsafe_b64encode(b"%s%s%d" % (digest, CSRF_DELIMITER, time)) return token.rstrip(b"=")
Generates a CSRF token based on a secret key, id and time.
def _on_move(self, event): w = self.winfo_width() x = min(max(event.x, 0), w) self.coords('cursor', x, 0, x, self.winfo_height()) self._variable.set(round2((360. * x) / w))
Make selection cursor follow the cursor.
def set(self, folder: str, subscribed: bool) -> None: if subscribed: self.add(folder) else: self.remove(folder)
Set the subscribed status of a folder.
def by_sql(cls, sql, engine_or_session): ses, auto_close = ensure_session(engine_or_session) result = ses.query(cls).from_statement(sql).all() if auto_close: ses.close() return result
Query with sql statement or texture sql.
def check_status_mapping(self): self.verbose('checking status mapping...') if not self.status_mapping: self.message('no status mapping found') return for old_val, new_val in self.status_mapping.iteritems(): try: if isinstance(new_val, basestring): lookup = {'slug': new_val} else: lookup = {'pk': new_val} status = Status.objects.get(**lookup) self.status_mapping[old_val] = status.id except Status.DoesNotExist: raise ImproperlyConfigured('Error! Status with slug %s not found in the database' % new_val) self.verbose('status map correct')
ensure status map does not contain status values which are not present in DB
def reverse(array): l = list(array) l.reverse() return _n.array(l)
returns a reversed numpy array
def partition(self): if self.urltype != 'partition': return None return self._bundle.library.partition(self.url)
For partition urltypes, return the partition specified by the ref
def notify_block_new(self, block): payload = block.SerializeToString() self._notify( "consensus_notifier_notify_block_new", payload, len(payload))
A new block was received and passed initial consensus validation
def options(self, parser, env=os.environ): "Add options to nosetests." parser.add_option("--%s-record" % self.name, action="store", metavar="FILE", dest="record_filename", help="Record actions to this file.") parser.add_option("--%s-playback" % self.name, action="store", metavar="FILE", dest="playback_filename", help="Playback actions from this file.")
Add options to nosetests.