code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def remote_exception(exc, tb):
if type(exc) in exceptions:
typ = exceptions[type(exc)]
return typ(exc, tb)
else:
try:
typ = type(exc.__class__.__name__,
(RemoteException, type(exc)),
{'exception_type': type(exc)})
exce... | Metaclass that wraps exception type in RemoteException |
def _repo_url_to_path(self, repo):
repo = repo.replace('http://', '')
repo = repo.replace('https://', '')
repo = repo.replace('/', '_')
return os.sep.join([self._data_directory, repo]) | Convert a `repo` url to a file path for local storage. |
def refresh(self, fields=None, **kwargs):
cond = self.c[self._primary_field]==self._key
query = self.filter(cond, **kwargs)
if not fields:
fields = list(self.table.c)
v = query.values_one(*fields)
if not v:
raise NotFound('Instance <{0}:{1}> can not be fou... | Re get the instance of current id |
def update_fw_local_result_str(self, os_result=None, dcnm_result=None,
dev_result=None):
fw_dict = self.get_fw_dict()
if os_result is not None:
fw_dict['os_status'] = os_result
if dcnm_result is not None:
fw_dict['dcnm_status'] = dcnm_re... | Update the FW result in the dict. |
def _parity_interaction(q0: ops.Qid,
q1: ops.Qid,
rads: float,
atol: float,
gate: Optional[ops.Gate] = None):
if abs(rads) < atol:
return
h = rads * -2 / np.pi
if gate is not None:
g = cast(ops.Ga... | Yields a ZZ interaction framed by the given operation. |
def ghz_state(qubits: Union[int, Qubits]) -> State:
N, qubits = qubits_count_tuple(qubits)
ket = np.zeros(shape=[2] * N)
ket[(0, ) * N] = 1 / sqrt(2)
ket[(1, ) * N] = 1 / sqrt(2)
return State(ket, qubits) | Return a GHZ state on N qubits |
def complete_shells(line, text, predicate=lambda i: True):
res = [i.display_name + ' ' for i in dispatchers.all_instances() if
i.display_name.startswith(text) and
predicate(i) and
' ' + i.display_name + ' ' not in line]
return res | Return the shell names to include in the completion |
def getErrorComponent(result, tag):
return math.sqrt(sum(
(error*2)**2
for (var, error) in result.error_components().items()
if var.tag == tag
)) | get total error contribution for component with specific tag |
def cat_hist(val, shade, ax, **kwargs_shade):
bins = get_bins(val)
binned_d, _ = np.histogram(val, bins=bins, normed=True)
bin_edges = np.linspace(np.min(val), np.max(val), len(bins))
centers = 0.5 * (bin_edges + np.roll(bin_edges, 1))[:-1]
heights = np.diff(bin_edges)
lefts = -0.5 * binned_d
... | Auxiliary function to plot discrete-violinplots. |
async def stop(self):
if self.__started:
self.__transport._unregister_rtp_receiver(self)
self.__stop_decoder()
self.__rtcp_task.cancel()
await self.__rtcp_exited.wait() | Irreversibly stop the receiver. |
def parents(self):
parents = []
if self.parent is None:
return []
category = self
while category.parent is not None:
parents.append(category.parent)
category = category.parent
return parents[::-1] | Returns a list of all the current category's parents. |
def CA(self):
try:
return self._CA
except AttributeError:
pass
self._CA = [self.C, self.A]
return self._CA | Vertices C and A, list. |
def evaluate_result(self, m):
fixed_fields = list(m.groups())
for n in self._fixed_fields:
if n in self._type_conversions:
fixed_fields[n] = self._type_conversions[n](fixed_fields[n], m)
fixed_fields = tuple(fixed_fields[n] for n in self._fixed_fields)
groupdi... | Generate a Result instance for the given regex match object |
def get(self, *args, **kwargs):
assert not args
assert list(kwargs.keys()) == ['pk']
pk = kwargs['pk']
model_name = self.model.__name__
object_spec = (model_name, pk, None)
instances = self.cache.get_instances((object_spec,))
try:
model_data = instance... | Return the single item from the filtered queryset. |
def inspect_node(self, index):
if index >= len(self.graph.nodes):
raise RPCErrorCode(6)
return create_binary_descriptor(str(self.graph.nodes[index])) | Inspect the graph node at the given index. |
def sort_func(self, key):
if key == self._KEYS.VALUE:
return 'aaa'
if key == self._KEYS.SOURCE:
return 'zzz'
return key | Sorting logic for `Quantity` objects. |
def reset (self):
self.number = 0
self.errors = 0
self.errors_printed = 0
self.warnings = 0
self.warnings_printed = 0
self.internal_errors = 0
self.link_types = ContentTypes.copy()
self.max_url_length = 0
self.min_url_length = 0
self.avg_ur... | Reset all log statistics to default values. |
def list_classification_predictors(self):
preds = [self.create(x) for x in self._predictors.keys()]
return [x.name for x in preds if x.ptype == "classification"] | List available classification predictors. |
def queue_jobs(main_task_path, params_list, queue=None, batch_size=1000):
if len(params_list) == 0:
return []
if queue is None:
task_def = context.get_current_config().get("tasks", {}).get(main_task_path) or {}
queue = task_def.get("queue", "default")
from .queue import Queue
que... | Queue multiple jobs on a regular queue |
def video(video_type, video_mime, doc=None):
@on_valid(video_mime)
def video_handler(data, **kwargs):
if hasattr(data, 'read'):
return data
elif hasattr(data, 'save'):
output = stream()
data.save(output, format=video_type.upper())
output.seek(0)
... | Dynamically creates a video type handler for the specified video type |
def add_hs_service(self, info, address):
if self.protocol and self.protocol != PROTOCOL_DMAP:
return
name = info.properties[b'Name'].decode('utf-8')
hsgid = info.properties[b'hG'].decode('utf-8')
self._handle_service(
address, name, conf.DmapService(hsgid, port=in... | Add a new device to discovered list. |
def bind(self, form):
field = self.field(default=self.default, **self.field_kwargs)
form._fields[self.name] = field.bind(form, self.name, prefix=form._prefix) | Bind to filters form. |
def normalization_factor(self):
return 1.0 / (self.num_atoms - self.composition.get('H', 0)
- self.composition.get('O', 0)) | Sum of number of atoms minus the number of H and O in composition |
def _get_request_param(self, request):
params = {}
try:
params = request.POST.copy()
if not params:
params = json.loads(request.body)
except Exception:
pass
for key in params:
if key in self.mask_fields:
para... | Change POST data to JSON string and mask data. |
def create_mailbox(self, actor, loop):
client = MailboxClient(actor.monitor.address, actor, loop)
loop.call_soon_threadsafe(self.hand_shake, actor)
return client | Create the mailbox for ``actor``. |
def _set_roi_mask(self, roi_mask):
if isinstance(roi_mask,
np.ndarray):
self._verify_shape_compatibility(roi_mask, 'ROI set')
self.roi_mask = roi_mask
self.roi_list = np.unique(roi_mask.flatten())
np.setdiff1d(self.roi_list, cfg.background_va... | Sets a new ROI mask. |
def create_rflink_connection(port=None, host=None, baud=57600, protocol=RflinkProtocol,
packet_callback=None, event_callback=None,
disconnect_callback=None, ignore=None, loop=None):
protocol = partial(
protocol,
loop=loop if loop else asyncio... | Create Rflink manager class, returns transport coroutine. |
def imprints(self, key, value):
return {
'place': value.get('a'),
'publisher': value.get('b'),
'date': normalize_date_aggressively(value.get('c')),
} | Populate the ``imprints`` key. |
def unregister(self):
machine_id = generate_machine_id()
try:
logger.debug("Unregistering %s", machine_id)
url = self.api_url + "/v1/systems/" + machine_id
net_logger.info("DELETE %s", url)
self.session.delete(url)
logger.info(
... | Unregister this system from the insights service |
def _currentLineExtraSelections(self):
if self._currentLineColor is None:
return []
def makeSelection(cursor):
selection = QTextEdit.ExtraSelection()
selection.format.setBackground(self._currentLineColor)
selection.format.setProperty(QTextFormat.FullWidthS... | QTextEdit.ExtraSelection, which highlightes current line |
def txn(self, overwrite=False, lock=True):
if lock:
self._lock.acquire()
try:
new_state, existing_generation = self.state_and_generation
new_state = copy.deepcopy(new_state)
yield new_state
if overwrite:
existing_generation = No... | Context manager for a state modification transaction. |
def clear_and_configure(config=None, bind_in_runtime=True):
with _INJECTOR_LOCK:
clear()
return configure(config, bind_in_runtime=bind_in_runtime) | Clear an existing injector and create another one with a callable config. |
def rollback(using=None):
if using is None:
for using in tldap.backend.connections:
connection = tldap.backend.connections[using]
connection.rollback()
return
connection = tldap.backend.connections[using]
connection.rollback() | This function does the rollback itself and resets the dirty flag. |
def service(self):
service_class = getattr(self, 'service_class')
service = service_class(self.http_request)
return service | Instantiate service class with django http_request |
def save_admin_log(build, **kwargs):
message = kwargs.pop('message', None)
release = kwargs.pop('release', None)
run = kwargs.pop('run', None)
if not len(kwargs) == 1:
raise TypeError('Must specify a LOG_TYPE argument')
log_enum = kwargs.keys()[0]
log_type = getattr(models.AdminLog, log_... | Saves an action to the admin log. |
def import_sqlite(db_file, older_than=None, **kwargs):
conn = _open_sqlite(db_file)
cur = conn.cursor()
select = "SELECT * FROM testcases WHERE exported != 'yes'"
if older_than:
cur.execute(" ".join((select, "AND sqltime < ?")), (older_than,))
else:
cur.execute(select)
columns = ... | Reads the content of the database file and returns imported data. |
def add_override(self, addr, key, value):
address = Address(str(addr)).id
_LOGGER.debug('New override for %s %s is %s', address, key, value)
device_override = self._overrides.get(address, {})
device_override[key] = value
self._overrides[address] = device_override | Register an attribute override for a device. |
def _get_user_allowed_fields(self):
model_name = self.Meta.model.__name__.lower()
app_label = self.Meta.model._meta.app_label
full_model_name = '%s.%s' % (app_label, model_name)
permissions = self.cached_allowed_fields.get(full_model_name)
if not permissions:
permissi... | Retrieve all allowed field names ofr authenticated user. |
def ext_pillar(minion_id,
pillar,
key=None,
only=()):
url = __opts__['cobbler.url']
user = __opts__['cobbler.user']
password = __opts__['cobbler.password']
log.info("Querying cobbler at %r for information for %r", url, minion_id)
try:
server = sal... | Read pillar data from Cobbler via its API. |
def remove_style(self):
for n in self.root.xpath('.//w:rStyle[@w:val="%s"]' % self.style, namespaces=self.namespaces):
n.getparent().remove(n) | Remove all XSL run rStyle elements |
def as_dict(self):
d = {"@module": self.__class__.__module__,
"@class": self.__class__.__name__}
d["structure"] = self.structure
d["energy"] = self.energy
d["miller_index"] = self.miller_index
d["label"] = self.label
d["coverage"] = self.coverage
d["a... | Returns dict which contains Slab Entry data. |
def getHighestVersion(name, region=None, table="credential-store",
**kwargs):
session = get_session(**kwargs)
dynamodb = session.resource('dynamodb', region_name=region)
secrets = dynamodb.Table(table)
response = secrets.query(Limit=1,
ScanIndexForward=... | Return the highest version of `name` in the table |
def convert_row(self, row, schema, fallbacks):
for index, field in enumerate(schema.fields):
value = row[index]
if index in fallbacks:
value = _uncast_value(value, field=field)
else:
value = field.cast_value(value)
row[index] = valu... | Convert row to BigQuery |
def _create_algo_bin(self, ro_rw_zi):
sect_ro, sect_rw, sect_zi = ro_rw_zi
algo_size = sect_ro.length + sect_rw.length + sect_zi.length
algo_data = bytearray(algo_size)
for section in (sect_ro, sect_rw):
start = section.start
size = section.length
data... | Create a binary blob of the flash algo which can execute from ram |
def with_options(self, component):
options = component.get_required_config()
component_name = _get_component_name(component)
return BoundConfig(self._get_base_config(), component_name, options) | Apply options component options to this configuration. |
def _generate_random_word(self, length):
return ''.join(random.choice(string.ascii_lowercase) for _ in range(length)) | Generates a random word |
def encrypt(self, orig_pkt, assoclen=None):
hdr = copy.deepcopy(orig_pkt)
del hdr[MACsec].payload
del hdr[MACsec].type
pktlen = len(orig_pkt)
if self.send_sci:
hdrlen = NOSCI_LEN + SCI_LEN
else:
hdrlen = NOSCI_LEN
if assoclen is None or not... | encrypt a MACsec frame for this Secure Association |
def _store_lasterr(self):
from sys import exc_info
from traceback import format_exception
e = exc_info()
self.lasterr = '\n'.join(format_exception(e[0], e[1], e[2])) | Stores the information about the last unhandled exception. |
def dict_jsonify(param):
if not isinstance(param, dict):
param = dict(param)
return jsonify(param) | Convert the parameter into a dictionary before calling jsonify, if it's not already one |
def print_summary(self, strm):
nerr = 0
nerr += LintHelper._print_summary_map(strm, self.cpp_header_map, 'cpp-header')
nerr += LintHelper._print_summary_map(strm, self.cpp_src_map, 'cpp-soruce')
nerr += LintHelper._print_summary_map(strm, self.python_map, 'python')
if nerr == 0:
... | Print summary of lint. |
def template_runner(client, parser, args):
if args.builtin_list:
aomi.template.builtin_list()
elif args.builtin_info:
aomi.template.builtin_info(args.builtin_info)
elif args.template and args.destination and args.vault_paths:
aomi.render.template(client, args.template,
... | Executes template related operations |
def _raw_pack(key_handle, flags, data):
return struct.pack('<IBB', key_handle, flags, len(data)) + data | Common code for packing payload to YHSM_HMAC_SHA1_GENERATE command. |
def query_gfy(self, gfyname):
self.check_token()
r = requests.get(QUERY_ENDPOINT + gfyname, headers=self.headers)
response = r.json()
if r.status_code != 200 and not ERROR_KEY in response:
raise GfycatClientError('Bad response from Gfycat',
... | Query a gfy name for URLs and more information. |
def catalog_split_yaml(self, **kwargs):
kwargs_copy = self.base_dict.copy()
kwargs_copy.update(**kwargs)
self._replace_none(kwargs_copy)
localpath = NameFactory.catalog_split_yaml_format.format(**kwargs_copy)
if kwargs.get('fullpath', False):
return self.fullp... | return the name of a catalog split yaml file |
def map(self, func, *columns):
if not columns:
return map(func, self.rows)
else:
values = (self.values(column) for column in columns)
result = [map(func, v) for v in values]
if len(columns) == 1:
return result[0]
else:
... | Map a function to rows, or to given columns |
def reset_generation(self):
with self._lock:
self._generation = Generation.NO_GENERATION
self.rejoin_needed = True
self.state = MemberState.UNJOINED | Reset the generation and memberId because we have fallen out of the group. |
def extract_haml(fileobj, keywords, comment_tags, options):
import haml
from mako import lexer, parsetree
from mako.ext.babelplugin import extract_nodes
encoding = options.get('input_encoding', options.get('encoding', None))
template_node = lexer.Lexer(haml.preprocessor(fileobj.read()), input_encod... | babel translation token extract function for haml files |
def run(configobj=None):
acscte(configobj['input'],
exec_path=configobj['exec_path'],
time_stamps=configobj['time_stamps'],
verbose=configobj['verbose'],
quiet=configobj['quiet'],
single_core=configobj['single_core']
) | TEAL interface for the `acscte` function. |
def merge(cls, source_blocks):
if len(source_blocks) == 1:
return source_blocks[0]
source_blocks.sort(key=operator.attrgetter('start_line_number'))
main_block = source_blocks[0]
boot_lines = main_block.boot_lines
source_lines = [source_line for source_block in source_... | Merge multiple SourceBlocks together |
def _overlapping_channels(self, wavelengths):
sizes = self.meta["channel_sizes"]
min_a, max_a = wavelengths.min(), wavelengths.max()
matched_channel_names = []
for i, (name, size) in enumerate(zip(self.channel_names, sizes)):
si = sum(sizes[:i])
min_b, max_b = sel... | Return the channels that match the given wavelength array. |
def _load_environment_vars(self):
self._config[kinds.ENV_VAR].update(
self._normalized_keys(":env:", self._get_environ_vars())
) | Loads configuration from environment variables |
def htmlDocDump(self, f):
ret = libxml2mod.htmlDocDump(f, self._o)
return ret | Dump an HTML document to an open FILE. |
def load_from_args(args):
if not args.reads:
return None
if args.read_source_name:
read_source_names = util.expand(
args.read_source_name,
'read_source_name',
'read source',
len(args.reads))
else:
read_source_names = util.drop_prefix(ar... | Given parsed commandline arguments, returns a list of ReadSource objects |
def add_filehandler(level, fmt, filename, mode, backup_count, limit, when):
kwargs = {}
if filename is None:
filename = getattr(sys.modules['__main__'], '__file__', 'log.py')
filename = os.path.basename(filename.replace('.py', '.log'))
filename = os.path.join('/tmp', filename)
if not... | Add a file handler to the global logger. |
def _win32_strerror(err):
buf = ctypes.c_char_p()
FormatMessage(
FORMAT_MESSAGE_FROM_SYSTEM
| FORMAT_MESSAGE_ALLOCATE_BUFFER
| FORMAT_MESSAGE_IGNORE_INSERTS,
None,
err,
0,
buf,
0,
None,
)
try:
return buf.value
finally:
... | expand a win32 error code into a human readable message |
def find_overlapping_slots(all_slots):
overlaps = set([])
for slot in all_slots:
start = slot.get_start_time()
end = slot.end_time
for other_slot in all_slots:
if other_slot.pk == slot.pk:
continue
if other_slot.get_day() != slot.get_day():
... | Find any slots that overlap |
def pickle_save(thing,fname=None):
if fname is None:
fname=os.path.expanduser("~")+"/%d.pkl"%time.time()
assert type(fname) is str and os.path.isdir(os.path.dirname(fname))
pickle.dump(thing, open(fname,"wb"),pickle.HIGHEST_PROTOCOL)
print("saved",fname) | save something to a pickle file |
def end_output (self, **kwargs):
self.write_edges()
self.end_graph()
if self.has_part("outro"):
self.write_outro()
self.close_fileoutput() | Write edges and end of checking info as gml comment. |
def get(self, name, default=_MISSING):
name = self._convert_name(name)
if name not in self._fields:
if default is _MISSING:
default = self._default_value(name)
return default
if name in _UNICODEFIELDS:
value = self._fields[name]
ret... | Get a metadata field. |
def repr(self, *args, **kwargs):
if not self.is_numpy:
text = "'" + self.str(*args, **kwargs) + "'"
else:
text = "{} numpy array, {} uncertainties".format(self.shape, len(self.uncertainties))
return "<{} at {}, {}>".format(self.__class__.__name__, hex(id(self)), text) | Returns the unique string representation of the number. |
def run():
_parser_options()
set_verbose(args["verbose"])
if _check_global_settings():
_load_db()
else:
exit(-1)
_setup_server()
if args["rollback"]:
_server_rollback()
okay("The server rollback appears to have been successful.")
exit(0)
_server_enable... | Main script entry to handle the arguments given to the script. |
def list():
"List EC2 name and public and private ip address"
for node in env.nodes:
print "%s (%s, %s)" % (node.tags["Name"], node.ip_address,
node.private_ip_address) | List EC2 name and public and private ip address |
def resize_lazy(image, width=None, height=None, crop=False, force=False,
namespace="resized", storage=default_storage,
as_url=False):
width, height, crop = _normalize_params(image, width, height, crop)
name = _get_resized_name(image, width, height, crop, namespace)
try:
... | Returns the name of the resized file. Returns the url if as_url is True |
def read_name(self, start: int, line: int, col: int, prev: Token) -> Token:
body = self.source.body
body_length = len(body)
position = start + 1
while position < body_length:
char = body[position]
if not (
char == "_"
or "0" <= char... | Read an alphanumeric + underscore name from the source. |
def _etree_py26_write(f, tree):
f.write("<?xml version='1.0' encoding='utf-8'?>\n".encode('utf-8'))
if etree.VERSION[:3] == '1.2':
def fixtag(tag, namespaces):
if tag == XML_NS + 'lang':
return 'xml:lang', ""
if '}' in tag:
j = tag.index('}') + 1
... | Compatibility workaround for ElementTree shipped with py2.6 |
def broadcast_tx(self, tx):
url = self.base_url("txs/push")
data = {"tx": tx.as_hex()}
result = json.loads(urlopen(url, data=json.dumps(data)).read().decode("utf8"))
return result | broadcast a transaction to the network |
def __init_object(self):
if self.init_function is not None:
new_obj = self.init_function()
self.__enqueue(new_obj)
else:
raise TypeError("The Pool must have a non None function to fill the pool.") | Create a new object for the pool. |
def print_poem(self):
for index, verse in enumerate(self.verses):
for line in verse:
print(line)
if index != len(self.verses) - 1:
print('') | Print all the verses. |
def save(self):
client = self._new_api_client()
params = {'id': self.id} if hasattr(self, 'id') else {}
action = 'patch' if hasattr(self, 'id') else 'post'
saved_model = client.make_request(self, action, url_params=params, post_data=self._to_json)
self.__init__(**saved_model._to... | Save an instance of a Union object |
def _parent_tile(tiles):
parent = None
for t in tiles:
if parent is None:
parent = t
else:
parent = common_parent(parent, t)
return parent | Find the common parent tile for a sequence of tiles. |
def show(self, annotations=True):
if self.is_closed:
self.plot_discrete(show=True, annotations=annotations)
else:
self.plot_entities(show=True, annotations=annotations) | Plot the current Path2D object using matplotlib. |
def normalize_residuals(self, points):
residuals = self.evaluate_residual(points)
solutions = self.evaluate_solution(points)
return [resid / soln for resid, soln in zip(residuals, solutions)] | Normalize residuals by the level of the variable. |
def parse_extras(extras_str):
from pkg_resources import Requirement
extras = Requirement.parse("fakepkg{0}".format(extras_to_string(extras_str))).extras
return sorted(dedup([extra.lower() for extra in extras])) | Turn a string of extras into a parsed extras list |
def check_password_readable(self, section, fields):
if not fields:
return
if len(self.read_ok) != 1:
return
fn = self.read_ok[0]
if fileutil.is_accessable_by_others(fn):
log.warn(LOG_CHECK, "The configuration file %s contains password information (in s... | Check if there is a readable configuration file and print a warning. |
def stops(self):
serves = set()
for trip in self.trips():
for stop_time in trip.stop_times():
serves |= stop_time.stops()
return serves | Return stops served by this route. |
def metric_details(request, pk, format=None):
metric = get_object_or_404(Metric, pk=pk)
if request.method == 'GET':
try:
results = metric.select(q=request.query_params.get('q', metric.query))
except InfluxDBClientError as e:
return Response({'detail': e.content}, status=e... | Get or write metric values |
def generate_non_rabs(self):
logging.info('Building non-Rab DB')
run_cmd([self.pathfinder['cd-hit'], '-i', self.path['non_rab_db'], '-o', self.output['non_rab_db'],
'-d', '100', '-c', str(config['param']['non_rab_db_identity_threshold']), '-g', '1', '-T', self.cpu])
os.remove(se... | Shrink the non-Rab DB size by reducing sequence redundancy. |
def remove_dependency(id=None, name=None, dependency_id=None, dependency_name=None):
data = remove_dependency_raw(id, name, dependency_id, dependency_name)
if data:
return utils.format_json_list(data) | Remove a BuildConfiguration from the dependency list of another BuildConfiguration |
def connect(self, host=None, port=None):
host = self.host if host is None else host
port = self.port if port is None else port
self.socket.connect(host, port) | Connects to given host address and port. |
def inflate_dtype(arr, names):
arr = np.asanyarray(arr)
if has_structured_dt(arr):
return arr.dtype
s_dt = arr.dtype
dt = [(n, s_dt) for n in names]
dt = np.dtype(dt)
return dt | Create structured dtype from a 2d ndarray with unstructured dtype. |
def add_action_view(self, name, url, actions, **kwargs):
view = ActionsView(name, url=url, self_var=self, **kwargs)
if isinstance(actions, dict):
for group, actions in actions.iteritems():
view.actions.extend(load_actions(actions, group=group or None))
else:
... | Creates an ActionsView instance and registers it. |
def _recursive_upsert(context, params, data):
children = params.get("children", {})
nested_calls = []
for child_params in children:
key = child_params.get("key")
child_data_list = ensure_list(data.pop(key))
if isinstance(child_data_list, dict):
child_data_list = [child_da... | Insert or update nested dicts recursively into db tables |
def inspect(self):
policy = self.policy
config_id = self.config_id
if self.config_id.config_type == ItemType.VOLUME:
if self.container_map.use_attached_parent_name:
container_name = policy.aname(config_id.map_name, config_id.instance_name, config_id.config_name)
... | Fetches information about the container from the client. |
def abort_io(self, iocb, err):
if _debug: IOChainMixIn._debug("abort_io %r %r", iocb, err)
if iocb is not self.ioChain:
raise RuntimeError("broken chain")
self.abort(err) | Forward the abort downstream. |
def itemgetter_handle(tokens):
internal_assert(len(tokens) == 2, "invalid implicit itemgetter args", tokens)
op, args = tokens
if op == "[":
return "_coconut.operator.itemgetter(" + args + ")"
elif op == "$[":
return "_coconut.functools.partial(_coconut_igetitem, index=" + args + ")"
... | Process implicit itemgetter partials. |
def _run_strip_accents(self, text):
text = unicodedata.normalize("NFD", text)
output = []
for char in text:
cat = unicodedata.category(char)
if cat == "Mn":
continue
output.append(char)
return "".join(output) | Strips accents from a piece of text. |
def away(self, message=None):
cmd = 'AWAY'
if message:
cmd += ' :' + message
self.send_line(cmd) | mark ourself as away |
def _repr_png_(self):
app.process_events()
QApplication.processEvents()
img = read_pixels()
return bytes(_make_png(img)) | This is used by ipython to plot inline. |
def check_file_version(notebook, source_path, outputs_path):
if not insert_or_test_version_number():
return
_, ext = os.path.splitext(source_path)
if ext.endswith('.ipynb'):
return
version = notebook.metadata.get('jupytext', {}).get('text_representation', {}).get('format_version')
fo... | Raise if file version in source file would override outputs |
def runserver(port=8080, debug_toolbar="yes", werkzeug="no", dummy_cache="no", short_cache="no", template_warnings="no", log_level="DEBUG",
insecure="no"):
if not port or (not isinstance(port, int) and not port.isdigit()):
abort("You must specify a port.")
yes_or_no = ("debug_toolbar", "we... | Clear compiled python files and start the Django dev server. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.