code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def unused(self, _dict): for key, value in _dict.items(): if value is None: del _dict[key] return _dict
Remove empty parameters from the dict
def delete_all_but_self(self): prefix = self.settings.alias name = self.settings.index if prefix == name: Log.note("{{index_name}} will not be deleted", index_name= prefix) for a in self.cluster.get_aliases(): if re.match(re.escape(prefix) + "\\d{8}_\\d{6}", a.in...
DELETE ALL INDEXES WITH GIVEN PREFIX, EXCEPT name
def _slice(self, view): if self._data is not None: return self._data[view] return self._proxy.get_view(self.id, view)
Send view to remote server and do slicing there.
def validate_link(link_data): from django.apps import apps try: Model = apps.get_model(*link_data['model'].split('.')) Model.objects.get(pk=link_data['pk']) except Model.DoesNotExist: raise ValidationError(_("Unable to link onto '{0}'.").format(Model.__name__))
Check if the given model exists, otherwise raise a Validation error
def parse_pseudo_open(self, sel, name, has_selector, iselector, index): flags = FLG_PSEUDO | FLG_OPEN if name == ':not': flags |= FLG_NOT if name == ':has': flags |= FLG_RELATIVE sel.selectors.append(self.parse_selectors(iselector, index, flags)) has_selec...
Parse pseudo with opening bracket.
def check_hex_chain(chain): return codecs.encode(check_chain([(codecs.decode(i[0], 'hex_codec'), i[1]) for i in chain]), 'hex_codec')
Verify a merkle chain, with hashes hex encoded, to see if the Merkle root can be reproduced.
def dispatch_request(self, *args, **kwargs): self.args = args self.kwargs = kwargs self.meth = request.method.lower() self.resource = current_app.blueprints.get(request.blueprint, None) if not any([self.meth in self.methods, self.meth.upper() in self.methods]): return...
Dispatch the incoming HTTP request to the appropriate handler.
def prepare_notes(self, *notes, **keyword_notes): __partial = keyword_notes.pop('__partial', False) args = tuple(self.get(note) for note in notes) kwargs = {} for arg in keyword_notes: note = keyword_notes[arg] if isinstance(note, tuple) and len(note) == 2 and not...
Get injection values for all given notes.
def create(ctx, data): swag = create_swag_from_ctx(ctx) data = json.loads(data.read()) for account in data: swag.create(account, dry_run=ctx.dry_run)
Create a new SWAG item.
def setImageMode(self): if self._version_server == 3.889: self.setPixelFormat( bpp = 16, depth = 16, bigendian = 0, truecolor = 1, redmax = 31, greenmax = 63, bluemax = 31, redshift = 11, greenshift = 5, blueshift = 0 ) ...
Extracts color ordering and 24 vs. 32 bpp info out of the pixel format information
def play_env_problem_randomly(env_problem, num_steps): env_problem.reset() for _ in range(num_steps): actions = np.stack([env_problem.action_space.sample() for _ in range( env_problem.batch_size)]) _, _, dones, _ = env_problem.step(actions) env_problem.reset(indices...
Plays the env problem by randomly sampling actions for `num_steps`.
def encode(x, x_space, hparams, name): with tf.variable_scope(name): (encoder_input, encoder_self_attention_bias, ed) = transformer.transformer_prepare_encoder(x, x_space, hparams) encoder_input = tf.nn.dropout(encoder_input, 1.0 - hparams.dropout) return transformer.transformer_encoder( enco...
Transformer preparations and encoder.
def create_query_constraint(): op = oneOf("= < > >= <= != <>", caseless=True).setName("operator") basic_constraint = (var + op + var_val).setResultsName("operator") between = ( var + Suppress(upkey("between")) + value + Suppress(and_) + value ).setResultsName("between") is_in = (var + Suppre...
Create a constraint for a query WHERE clause
def dd2dm(dd): d,m,s = dd2dms(dd) m = m + float(s)/3600 return d,m,s
Convert decimal to degrees, decimal minutes
def _url(self, url, file_upload=False): host = self.api_url if file_upload: host = self.uploads_api_url protocol = 'https' if self.https else 'http' if url.endswith('/'): url = url[:-1] return '{0}://{1}/{2}'.format( protocol, host,...
Creates the request URL.
def print_new(ctx, name, migration_type): click.echo(ctx.obj.repository.generate_migration_name(name, migration_type))
Prints filename of a new migration
def clean(): for queue in MyQueue.collection().instances(): queue.delete() for job in MyJob.collection().instances(): job.delete() for person in Person.collection().instances(): person.delete()
Clean data created by this script
def openFile(self, openDQ=False): if self._im.closed: if not self._dq.closed: self._dq.release() assert(self._dq.closed) fi = FileExtMaskInfo(clobber=False, doNotOpenDQ=not openDQ, im_fmode=self...
Open file and set up filehandle for image file
def check_input_files(headerDir, sourceDir, containers=['vector', 'list', 'set', 'map'], seqType='both', verbose=False): result1 = False if seqType == "both" or seqType == "variadic": if verbose: print "Check if input files for pre-processing Boost.MPL variadic containe...
Checks if source- and header-files, used as input when pre-processing MPL-containers, need fixing.
def pre_sql_setup(self): super(MultilingualSQLCompiler, self).pre_sql_setup() if not self.query.include_translation_data: return opts = self.query.model._meta qn = self.quote_name_unless_alias qn2 = self.connection.ops.quote_name if hasattr(opts, 'translation_...
Adds the JOINS and SELECTS for fetching multilingual data.
def getreference(self, validate=True): if self.offset is None: return None if self.ref: ref = self.doc[self.ref] else: ref = self.finddefaultreference() if not ref: raise UnresolvableTextContent("Default reference for phonetic content not found!") ...
Return and validate the Phonetic Content's reference. Raises UnresolvableTextContent when invalid
def _register(self, session, url): dist = self._poetry.file.parent / "dist" file = dist / "{}-{}.tar.gz".format( self._package.name, normalize_version(self._package.version.text) ) if not file.exists(): raise RuntimeError('"{0}" does not exist.'.format(file.name))...
Register a package to a repository.
def restore_default_settings(): global __DEFAULTS __DEFAULTS.CACHE_DIR = defaults.CACHE_DIR __DEFAULTS.SET_SEED = defaults.SET_SEED __DEFAULTS.SEED = defaults.SEED logging.info('Settings reverted to their default values.')
Restore settings to default values.
def askopenfile(mode="r", **options): "Ask for a filename to open, and returned the opened file" filename = askopenfilename(**options) if filename: return open(filename, mode) return None
Ask for a filename to open, and returned the opened file
def dict_from_hdf5(dict_like, h5group): for name, value in h5group.attrs.items(): dict_like[name] = value
Load a dictionnary-like object from a h5 file group
def _pprint(dic): for key, value in dic.items(): print(" {0}: {1}".format(key, value))
Prints a dictionary with one indentation level
def node2freqt(docgraph, node_id, child_str='', include_pos=False, escape_func=FREQT_ESCAPE_FUNC): node_attrs = docgraph.node[node_id] if istoken(docgraph, node_id): token_str = escape_func(node_attrs[docgraph.ns+':token']) if include_pos: pos_str = escape_func(node_at...
convert a docgraph node into a FREQT string.
def after_start_check(self): try: conn = HTTPConnection(self.host, self.port) conn.request('HEAD', self.url.path) status = str(conn.getresponse().status) if status == self.status or self.status_re.match(status): conn.close() return ...
Check if defined URL returns expected status to a HEAD request.
def map_equal_contributions(contributors): equal_contribution_map = {} equal_contribution_keys = [] for contributor in contributors: if contributor.get("references") and "equal-contrib" in contributor.get("references"): for key in contributor["references"]["equal-contrib"]: ...
assign numeric values to each unique equal-contrib id
def build_thumb_path(self, image): image_file = image.file image_name_w_ext = split(image.name)[-1] image_name, ext = splitext(image_name_w_ext) if not self.in_memory(image_file): image_name = image_name.split('/')[-1] upload_to = image.field.upload_to if not ...
Build the absolute path of the to-be-saved thumbnail.
def _find_already_built_wheel(metadata_directory): if not metadata_directory: return None metadata_parent = os.path.dirname(metadata_directory) if not os.path.isfile(pjoin(metadata_parent, WHEEL_BUILT_MARKER)): return None whl_files = glob(os.path.join(metadata_parent, '*.whl')) if n...
Check for a wheel already built during the get_wheel_metadata hook.
def userCreate(self, request, tag): userCreator = liveform.LiveForm( self.createUser, [liveform.Parameter( "localpart", liveform.TEXT_INPUT, unicode, "localpart"), liveform.Parameter( ...
Render a form for creating new users.
def _process_wave_param(self, pval): return self._process_generic_param( pval, self._internal_wave_unit, equivalencies=u.spectral())
Process individual model parameter representing wavelength.
def _get_focused_item(self): focused_model = self._selection.focus if not focused_model: return None return self.canvas.get_view_for_model(focused_model)
Returns the currently focused item
def auth(self): if self.oauth: return self.oauth return (self.username, self.password)
Return credentials for current Bitbucket user.
def prepend_field(self, field_name, list_value): return self._single_list_field_operation(field_name, list_value, prepend=True)
Return a copy of this object with `list_value` prepended to the field named `field_name`.
def _setupHttp(self): if self._http == None: http = httplib2.Http() self._http = self._credentials.authorize(http)
Setup an HTTP session authorized by OAuth2.
def getLeastUsedCell(self, c): segmentsPerCell = numpy.zeros(self.cellsPerColumn, dtype='uint32') for i in range(self.cellsPerColumn): segmentsPerCell[i] = self.getNumSegmentsInCell(c,i) cellMinUsage = numpy.where(segmentsPerCell==segmentsPerCell.min())[0] self._random.getUInt32(len(cellMinUsage))...
For the least used cell in a column
def render_app_label(context, app, fallback=""): try: text = app['app_label'] except KeyError: text = fallback except TypeError: text = app return text
Render the application label.
def _AcquireLock(self, urn, blocking=None, blocking_lock_timeout=None, lease_time=None, blocking_sleep_interval=None): if urn is None: raise ValueError("URN cannot be None") urn = rdfvalue.RDFURN(urn) try: ...
This actually acquires the lock for a given URN.
def _extract_obo_relation(cls, rawterm): relations = {} if 'subClassOf' in rawterm: relations[Relationship('is_a')] = l = [] l.extend(map(cls._get_id_from_url, rawterm.pop('subClassOf'))) return relations
Extract the relationships defined in the rawterm.
def strict_err_or_warn(self, *args, **kwargs): if self.strict: raise self.make_err(CoconutStyleError, *args, **kwargs) else: logger.warn_err(self.make_err(CoconutSyntaxWarning, *args, **kwargs))
Raises an error if in strict mode, otherwise raises a warning.
def _get(self, url, params=None): self._call(self.GET, url, params, None)
Wrapper method for GET calls.
def load_table(self, table): region = table.database if table.database else self.default_region resource_name, collection_name = table.table.split('_', 1) boto_region_name = region.replace('_', '-') resource = self.boto3_session.resource(resource_name, region_name=boto_region_name) ...
Load resources as specified by given table into our db.
def rpy2(): if LazyImport.rpy2_module is None: try: rpy2 = __import__('rpy2.robjects') except ImportError: raise ImportError('The rpy2 module is required') LazyImport.rpy2_module = rpy2 try: rpy2.forecast = rpy2.robj...
Lazily import the rpy2 module
def blend_vars(secrets, opt): base_obj = load_vars(opt) merged = merge_dicts(base_obj, secrets) template_obj = dict((k, v) for k, v in iteritems(merged) if v) template_obj['aomi_items'] = template_obj.copy() return template_obj
Blends secret and static variables together
def _check_sample_config(items, in_file, config): logger.info("Checking sample YAML configuration: %s" % in_file) _check_quality_format(items) _check_for_duplicates(items, "lane") _check_for_duplicates(items, "description") _check_for_degenerate_interesting_groups(items) _check_for_batch_clashes...
Identify common problems in input sample configuration files.
def safeprint(*args, **kwargs): new_args = [] str_encoding = getattr(sys.stdout, 'encoding', None) or 'ascii' for s in args: new_args.append(s.encode('utf-8').decode(str_encoding, 'ignore')) return print(*new_args, **kwargs)
Convert and print characters using the proper encoding
def blur(self): scene = self.get_scene() if scene and scene._focus_sprite == self: scene._focus_sprite = None
removes focus from the current element if it has it
def bytes2num(s): res = 0 for i, c in enumerate(reversed(bytearray(s))): res += c << (i * 8) return res
Convert MSB-first bytes to an unsigned integer.
def result(self): final_result = {'epoch_idx': self.global_epoch_idx} for key, value in self.frozen_results.items(): final_result[key] = value return final_result
Return the epoch result
def construct_asset_path(self, asset_path, css_path, output_filename, variant=None): public_path = self.absolute_path(asset_path, os.path.dirname(css_path).replace('\\', '/')) if self.embeddable(public_path, variant): return "__EMBED__%s" % public_path if not posixpath.isabs(asset_pa...
Return a rewritten asset URL for a stylesheet
def create_bucket(self): bucket_exists = self._bucket_exists() if self.s3props.get('shared_bucket_target'): if bucket_exists: LOG.info('App uses shared bucket - %s ', self.bucket) else: LOG.error("Shared bucket %s does not exist", self.bucket) ...
Create or update bucket based on app name.
def _maybe_nest_bare_single(items_by_key, parallel): if (parallel == "multi-parallel" and (sum([1 for x in items_by_key.values() if not _is_nested_item(x)]) >= sum([1 for x in items_by_key.values() if _is_nested_item(x)]))): out = {} for k, v in items_by_key.items(): ...
Nest single inputs to avoid confusing single items and lists like files.
def run(): server_address = (args.listen_addr, args.listen_port) httpd = YHSM_VALServer(server_address, YHSM_VALRequestHandler) my_log_message(args, syslog.LOG_INFO, "Serving requests to 'http://%s:%s%s' (YubiHSM: '%s')" \ % (args.listen_addr, args.listen_port, args.serve_url, args.de...
Start the BaseHTTPServer and serve requests forever.
def _augment(graph, capacity, flow, source, target): n = len(graph) A = [0] * n augm_path = [None] * n Q = deque() Q.append(source) augm_path[source] = source A[source] = float('inf') while Q: u = Q.popleft() for v in graph[u]: cuv = capacity[u][v] ...
find a shortest augmenting path
def _fill_col_borders(self): first = True last = True if self.col_indices[0] == self.hcol_indices[0]: first = False if self.col_indices[-1] == self.hcol_indices[-1]: last = False for num, data in enumerate(self.tie_data): self.tie_data[num] = s...
Add the first and last column to the data by extrapolation.
def process_upload(self, set_content_type=True): metadata = self.get_upload_key_metadata() if set_content_type: content_type = self.get_upload_content_type() metadata.update({b'Content-Type': b'{0}'.format(content_type)}) upload_key = self.get_upload_key() process...
Process the uploaded file.
def list_rocs_files(url=ROCS_URL): soup = BeautifulSoup(get(url)) if not url.endswith('/'): url += '/' files = [] for elem in soup.findAll('a'): if elem['href'].startswith('?'): continue if elem.string.lower() == 'parent directory': continue files....
Gets the contents of the given url.
def on_number(self, ctx, value): value = int(value) if value.isdigit() else float(value) top = self._stack[-1] if top is JSONCompositeType.OBJECT: self.fire(JSONStreamer.VALUE_EVENT, value) elif top is JSONCompositeType.ARRAY: self.fire(JSONStreamer.ELEMENT_EVENT,...
Since this is defined both integer and double callbacks are useless
def _parse_args(): token_file = os.path.expanduser('~/.nikeplus_access_token') parser = argparse.ArgumentParser(description='Export NikePlus data to CSV') parser.add_argument('-t', '--token', required=False, default=None, help=('Access token for API, can also store in file %s' ...
Parse sys.argv arguments
def initialise_logging(level: str, target: str, short_format: bool): try: log_level = getattr(logging, level) except AttributeError: raise SystemExit( "invalid log level %r, expected any of 'DEBUG', 'INFO', 'WARNING', 'ERROR' or 'CRITICAL'" % level ) handler = create_hand...
Initialise basic logging facilities
def run(self): if (self.repo in self.meta.default_repositories and self.repo in self.meta.repositories): try: self.check = self.all_repos[self.repo]() except OSError: usage(self.repo) raise SystemExit() elif self.rep...
Run and check if new in ChangeLog.txt
def _get_fieldnames(item): if hasattr(item, "_fldsdefprt"): return item.get_prtflds_all() if hasattr(item, "_fields"): return item._fields
Return fieldnames of either a namedtuple or GOEnrichmentRecord.
def add_interface_router(self, router, body=None): return self.put((self.router_path % router) + "/add_router_interface", body=body)
Adds an internal network interface to the specified router.
def hsetnx(self, key, field, value): return self.execute(b'HSETNX', key, field, value)
Set the value of a hash field, only if the field does not exist.
def hmsToDeg(h, m, s): return h * degPerHMSHour + m * degPerHMSMin + s * degPerHMSSec
Convert RA hours, minutes, seconds into an angle in degrees.
def GetSource(self, row, col, table=None): if table is None: table = self.grid.current_table value = self.code_array((row, col, table)) if value is None: return u"" else: return value
Return the source string of a cell
def search_datasets( self, license=None, format=None, query=None, featured=None, owner=None, organization=None, badge=None, reuses=None, page_size=20, x_fields=None, ): payload = {"badge": badge, "size": page_size, "X-Fi...
Search datasets within uData portal.
def dispatch(self, operation, request, **path_args): request.current_operation = operation try: for middleware in self.middleware.pre_request: response = middleware(request, path_args) if isinstance(response, HttpResponse): return response ...
Dispatch incoming request and capture top level exceptions.
def do_disconnect(self, arg): if not self.arm.is_connected(): print(self.style.error('Error: ', 'Arm is already disconnected.')) else: self.arm.disconnect() print(self.style.success('Success: ', 'Disconnected.'))
Disconnect from the arm.
def __distinguished_name(self, type, fname=None, lname=None, username=None): if username is None: uid = "uid={}".format(self.username) else: uid = "uid={}".format(username) dn_list = [ uid, "ou={}".format(self.__organiz...
Assemble the DN of the user.
def reward_goal(self): if not 'goal' in self.mode: return mode = self.mode['goal'] if mode and mode['reward'] and self.__test_cond(mode): if mode['reward'] > 0: self.logger.info("Escaped!!") self.player.stats['reward'] += mode['reward'] ...
Add an end goal reward
def _parse_file_spec(self, spec): if '*' in spec['file']: expanded_paths = _expand_paths(spec['file']) if not expanded_paths: return [] expanded_specs = [] for p in expanded_paths: _spec = copy.copy(spec) _spec['file...
Separate wildcard specs into more specs
def _add_or_remove_flag(self, flag, add): meth = self.add_flag if add else self.remove_flag meth(flag)
Add the given `flag` if `add` is True, remove it otherwise.
def from_key(cls, container, key): if key is None: raise errors.NoObjectException return cls(container, name=key.name, size=key.size, content_type=key.content_type, content_encoding=key.content_encoding, ...
Create from key object.
def file_signature(filename): if not os.path.isfile(filename): return None if not os.path.exists(filename): return None sig = hashlib.sha1() with open(filename, "rb") as f: buf = f.read() sig.update(buf) return sig.hexdigest()
Return a signature for a file.
def make_tensor_value_info( name, elem_type, shape, doc_string="", shape_denotation=None, ): value_info_proto = ValueInfoProto() value_info_proto.name = name if doc_string: value_info_proto.doc_string = doc_string tensor_type_proto = value_info_proto.type....
Makes a ValueInfoProto based on the data type and shape.
def change_approver_email_address(self, order_id, approver_email): response = self.request( E.changeApproverEmailAddressSslCertRequest( E.id(order_id), E.approverEmail(approver_email) ) ) return int(response.data.id)
Change the approver email address for an ordered SSL certificate.
def readBatchTupleQuotes(self, symbols, start, end): if end is None: end=sys.maxint ret={} session=self.getReadSession()() try: symbolChunks=splitListEqually(symbols, 100) for chunk in symbolChunks: rows=session.query(Quote.symb...
read batch quotes as tuple to save memory
def instruction_PAGE(self, opcode): op_address, opcode2 = self.read_pc_byte() paged_opcode = opcode * 256 + opcode2 self.call_instruction_func(op_address - 1, paged_opcode)
call op from page 2 or 3
def _nanmean_ddof_object(ddof, value, axis=None, **kwargs): from .duck_array_ops import (count, fillna, _dask_or_eager_func, where_method) valid_count = count(value, axis=axis) value = fillna(value, 0) dtype = kwargs.pop('dtype', None) if dtype is None and value.dtyp...
In house nanmean. ddof argument will be used in _nanvar method
def update_hpolist(self, case_obj): hpo_list = self.case_genelist(case_obj) hpo_results = hpo_genes(case_obj.phenotype_ids(), *self.phenomizer_auth) if hpo_results is None: pass else: gene_ids = [result['gene_id'] for result in hpo_...
Update the HPO gene list for a case based on current terms.
def content_type(self) -> Optional[ContentTypeHeader]: try: return cast(ContentTypeHeader, self[b'content-type'][0]) except (KeyError, IndexError): return None
The ``Content-Type`` header.
def cmap2pixmap(cmap, steps=50): import numpy as np inds = np.linspace(0, 1, steps) n = len(cmap.clst) - 1 tups = [cmap.clst[int(x * n)] for x in inds] rgbas = [QColor(int(r * 255), int(g * 255), int(b * 255), 255).rgba() for r, g, b in tups] im = QImage(steps, 1, QImage.Form...
Convert a Ginga colormap into a QPixmap
def fixup_parents(self, node, parent): start, finish = 0, self.last_finish needs_range = not hasattr(node, 'start') if not hasattr(node, 'parent'): node.parent = parent for n in node: if needs_range and hasattr(n, 'start'): if n.start < start: star...
Make sure each node has a parent
def unread(self, include_deleted=False): if is_soft_delete() and not include_deleted: return self.filter(unread=True, deleted=False) return self.filter(unread=True)
Return only unread items in the current queryset
def run_forever(self, **kwargs): for key in kwargs: setattr(self, key, kwargs[key]) self.command = self.COMMAND_RUN_FOREVER
Run the motor until another command is sent.
def seconds(num): now = pytime.time() end = now + num until(end)
Pause for this many seconds
def shift(self, next_state, token_type, value, lineno, column): dfa, state, node = self.stack[-1] new_node = Node(token_type, value, None, lineno, column) node.children.append(new_node) self.stack[-1] = (dfa, next_state, node)
Shift a non-terminal and prepare for the next state.
def queued_spans(self): spans = [] while True: try: s = self.queue.get(False) except queue.Empty: break else: spans.append(s) return spans
Get all of the spans in the queue
def enter_state(self, request, application): authorised_persons = self.get_email_persons(application) link, is_secret = self.get_request_email_link(application) emails.send_request_email( self.authorised_text, self.authorised_role, authorised_persons, ...
This is becoming the new current state.
def network_details(): ipv4_addresses = [ info[4][0] for info in socket.getaddrinfo( socket.gethostname(), None, socket.AF_INET ) ] ipv4_addresses.extend( info[4][0] for info in socket.getaddrinfo("localhost", None, sock...
Returns details about the network links
def _indent_decor(lbl): def closure_indent(func): if util_arg.TRACE: @ignores_exc_tb(outer_wrapper=False) def wrp_indent(*args, **kwargs): with util_print.Indenter(lbl): print(' ...trace[in]') ret = func(*args, **kwargs) ...
does the actual work of indent_func
def choose(s, possibilities, threshold=.6): if not possibilities: return None if s in possibilities: return s if s == '': return None startswith = [x for x in possibilities if x.lower().startswith(s.lower())] if len(startswith) == 1: return startswith[0] contained = [x for x in possibilities if ...
Returns the closest match to string s if exceeds threshold, else returns None
def loop(self, max_seconds=None): loop_started = datetime.datetime.now() self._is_running = True while self._is_running: self.process_error_queue(self.q_error) if max_seconds is not None: if (datetime.datetime.now() - loop_started).total_seconds() > max_s...
Main loop for the process. This will run continuously until maxiter
def _load(self, scale=1.0): LOG.debug("File: %s", str(self.requested_band_filename)) ncf = Dataset(self.requested_band_filename, 'r') wvl = ncf.variables['wavelength'][:] * scale resp = ncf.variables['response'][:] self.rsr = {'wavelength': wvl, 'response': resp}
Load the SLSTR relative spectral responses
def log(message, type): (sys.stdout if type == 'notice' else sys.stderr).write(message + "\n")
Log notices to stdout and errors to stderr
def del_bridge_port(name, port): log('Deleting port {} from bridge {}'.format(port, name)) subprocess.check_call(["ovs-vsctl", "--", "--if-exists", "del-port", name, port]) subprocess.check_call(["ip", "link", "set", port, "down"]) subprocess.check_call(["ip", "link", "set", p...
Delete a port from the named openvswitch bridge
def copy_plan(modeladmin, request, queryset): for plan in queryset: plan_copy = deepcopy(plan) plan_copy.id = None plan_copy.available = False plan_copy.default = False plan_copy.created = None plan_copy.save(force_insert=True) for pricing in plan.planpricing_...
Admin command for duplicating plans preserving quotas and pricings.