code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def rlmb_tiny_recurrent():
hparams = rlmb_ppo_tiny()
hparams.epochs = 1
hparams.generative_model = "next_frame_basic_recurrent"
hparams.generative_model_params = "next_frame_basic_recurrent"
return hparams | Tiny setting with a recurrent next-frame model. |
def legal_date(year, month, day):
if month == 2:
daysinmonth = 29 if isleap(year) else 28
else:
daysinmonth = 30 if month in HAVE_30_DAYS else 31
if not (0 < day <= daysinmonth):
raise ValueError("Month {} doesn't have a day {}".format(month, day))
return True | Check if this is a legal date in the Gregorian calendar |
def _check_team_login(team):
contents = _load_auth()
for auth in itervalues(contents):
existing_team = auth.get('team')
if team and team != existing_team:
raise CommandException(
"Can't log in as team %r; log out first." % team
)
elif not team and ... | Disallow simultaneous public cloud and team logins. |
def disconnect(self):
if self.r_session:
self.session_logout()
self.r_session = None
self.clear() | Ends a client authentication session, performs a logout and a clean up. |
def _init_study_items_max(self):
if self.study_items is None:
return None
if self.study_items is True:
return None
if isinstance(self.study_items, int):
return self.study_items
return None | User can limit the number of genes printed in a GO term. |
def add_header(self, key, value):
assert isinstance(key, str), 'header key must be of type str'
assert isinstance(value, str), 'header value must be of type str'
self.headers[key] = value | Add a response header |
def create_build_paths(context: Context):
paths = [context.app.asset_build_path, context.app.screenshots_build_path, context.app.collected_assets_path]
for path in filter(None, paths):
os.makedirs(path, exist_ok=True) | Creates directories needed for build outputs |
def class_string(self, klass):
if isinstance(klass, string_types):
return klass
return klass.__module__ + '.' + klass.__name__ | Return a string representative of the class |
def shell(config, type_):
from warehouse.db import Session
if type_ is None:
type_ = autodetect()
runner = {"bpython": bpython, "ipython": ipython, "plain": plain}[type_]
session = Session(bind=config.registry["sqlalchemy.engine"])
try:
runner(config=config, db=session)
except Im... | Open up a Python shell with Warehouse preconfigured in it. |
def write_path(path, s, owner=None, group=None, mode=None,
utimes=None, sync=False):
path = os.path.abspath(path)
fd, tmp_path = tempfile.mkstemp(suffix='.tmp',
prefix='.ansible_mitogen_transfer-',
dir=os.path.dirname(path))
... | Writes bytes `s` to a filesystem `path`. |
def datetime_from_json(js, manager):
if js is None:
return None
else:
return dt.datetime(
js['year'],
js['month'] + 1,
js['date'],
js['hours'],
js['minutes'],
js['seconds'],
js['milliseconds'] * 1000
) | Deserialize a Python datetime object from json. |
def cli(ctx=None, verbose=0):
if ctx:
ctx.auto_envvar_prefix = "THOTH_SOLVER"
if verbose:
_LOG.setLevel(logging.DEBUG)
_LOG.debug("Debug mode is on") | Thoth solver command line interface. |
def validate_plugin(self, plugin_class, experimental=False):
valid_subclasses = [IndependentPlugin] + self.valid_subclasses
if experimental:
valid_subclasses += [ExperimentalPlugin]
return any(issubclass(plugin_class, class_) for
class_ in valid_subclasses) | Verifies that the plugin_class should execute under this policy |
def validate(self) :
if not self.mustValidate :
return True
res = {}
for field in self.validators.keys() :
try :
if isinstance(self.validators[field], dict) and field not in self.store :
self.store[field] = DocumentStore(self.collection... | Validate the whole document |
def generic_adjust(colors, light):
if light:
for color in colors:
color = util.saturate_color(color, 0.60)
color = util.darken_color(color, 0.5)
colors[0] = util.lighten_color(colors[0], 0.95)
colors[7] = util.darken_color(colors[0], 0.75)
colors[8] = util.dar... | Generic color adjustment for themers. |
def write_to(self, f):
f.write(self.version + "\r\n")
for name, value in self.items():
name = name.title()
name = name.replace("Warc-", "WARC-").replace("-Ip-", "-IP-").replace("-Id", "-ID").replace("-Uri", "-URI")
f.write(name)
f.write(": ")
f... | Writes this header to a file, in the format specified by WARC. |
def find(self, other):
iset = self._iset
l = binsearch_left_start(iset, other[0] - self._maxlen, 0, len(iset))
r = binsearch_right_end(iset, other[1], 0, len(iset))
iopts = iset[l:r]
iiter = (s for s in iopts if s[0] <= other[1] and s[1] >= other[0])
for o in iiter: yield... | Return an interable of elements that overlap other in the tree. |
def query_module_funcs(self, module):
funcs = self.session.query(Export).filter_by(
module=module).all()
return funcs | Query the functions in the specified module. |
def conn_ali(cred, crid):
driver = get_driver(Provider.ALIYUN_ECS)
try:
ali_obj = driver(cred['ali_access_key_id'],
cred['ali_access_key_secret'],
region=cred['ali_region'])
except SSLError as e:
abort_err("\r SSL Error with AliCloud: {}".for... | Establish connection to AliCloud service. |
def new(self, operation='no operation', **kw):
self_vars = vars(self).copy()
del self_vars['operation']
del self_vars['children']
del self_vars['counts']
del self_vars['_flush']
new = self.__class__(operation)
vars(new).update(self_vars)
vars(new).update(k... | Return a copy of the monitor usable for a different operation. |
def process_request(self, request):
if self._is_enabled():
self._cache.set(self.guid_key, six.text_type(uuid4()))
log_prefix = self._log_prefix(u"Before", request)
self._cache.set(self.memory_data_key, self._memory_data(log_prefix)) | Store memory data to log later. |
def _one_q_sic_prep(index, qubit):
if index == 0:
return Program()
theta = 2 * np.arccos(1 / np.sqrt(3))
zx_plane_rotation = Program([
RX(-pi / 2, qubit),
RZ(theta - pi, qubit),
RX(-pi / 2, qubit),
])
if index == 1:
return zx_plane_rotation
elif index == 2... | Prepare the index-th SIC basis state. |
def _arg2opt(arg):
res = [o for o, a in option_toggles.items() if a == arg]
res += [o for o, a in option_flags.items() if a == arg]
return res[0] if res else None | Turn a pass argument into the correct option |
def jsondump(model, fp):
fp.write('[')
links_ser = []
for link in model:
links_ser.append(json.dumps(link))
fp.write(',\n'.join(links_ser))
fp.write(']') | Dump Versa model into JSON form |
def mixed_use_of_local_and_run(self):
cxn = Connection("localhost")
result = cxn.local("echo foo", hide=True)
assert result.stdout == "foo\n"
assert not cxn.is_connected
result = cxn.run("echo foo", hide=True)
assert cxn.is_connected
assert result.stdout == "foo\n... | Run command truly locally, and over SSH via localhost |
def open_url(self, url):
try:
c = pycurl.Curl()
c.setopt(pycurl.FAILONERROR, True)
c.setopt(pycurl.URL, "%s/api/v0/%s" % (self.url, url))
c.setopt(pycurl.HTTPHEADER, ["User-Agent: %s" % self.userAgent,
"apiToken: %s" % self... | Open's URL with apiToken in the headers |
def pop(self, key, *args, **kwargs):
return super(CaseInsensitiveDict, self).pop(CaseInsensitiveStr(key)) | Remove and return the value associated with case-insensitive ``key``. |
def __create_phantom_js_driver(self):
try:
return webdriver.PhantomJS(executable_path=self._config_reader.get(self.PHANTOMEJS_EXEC_PATH),
service_args=['--ignore-ssl-errors=true'])
except KeyError:
return webdriver.PhantomJS(service_args=['-... | Creates an instance of PhantomJS driver. |
def from_code(cls, code: int) -> 'ColorCode':
c = cls()
c._init_code(code)
return c | Return a ColorCode from a terminal code. |
def to_dict(self):
import copy
options = copy.deepcopy(self._options)
eta = options.get('task_args', {}).get('eta')
if eta:
options['task_args']['eta'] = time.mktime(eta.timetuple())
return options | Return this message as a dict suitable for json encoding. |
def _query_systemstate(self):
def status_filter_func(event):
if event.command_class == 3 and event.command == 0:
return True
return False
try:
response = self._send_command(0, 6, [])
maxconn, = unpack("<B", response.payload)
except ... | Query the maximum number of connections supported by this adapter |
def visualize(x, y, xlabel=None, ylabel=None, title=None, ylim=None):
total_seconds = (x[-1] - x[0]).total_seconds()
if total_seconds <= 86400 * 1 * 3:
return plot_one_day(x, y, xlabel, ylabel, title, ylim)
elif total_seconds <= 86400 * 7 * 2:
return plot_one_week(x, y, xlabel, ylabel, title... | A universal function plot arbitrary time series data. |
def _gen_packet_setcolor(self, sequence, hue, sat, bri, kel, fade):
hue = min(max(hue, HUE_MIN), HUE_MAX)
sat = min(max(sat, SATURATION_MIN), SATURATION_MAX)
bri = min(max(bri, BRIGHTNESS_MIN), BRIGHTNESS_MAX)
kel = min(max(kel, TEMP_MIN), TEMP_MAX)
reserved1 = pack("<B", 0)
... | Generate "setcolor" packet payload. |
def trace():
def fget(self):
return self._options.get('trace', None)
def fset(self, value):
self._options['trace'] = value
return locals() | Enables and disables request tracing. |
def _get_schema(self):
d={}
layout_kwargs=dict((_,'') for _ in get_layout_kwargs())
for _ in ('data','layout','theme','panels'):
d[_]={}
for __ in eval('__QUANT_FIGURE_{0}'.format(_.upper())):
layout_kwargs.pop(__,None)
d[_][__]=None
d['layout'].update(annotations=dict(values=[],
params... | Returns a dictionary with the schema for a QuantFigure |
def _format_contracts(what: str, obj: Any) -> List[str]:
if what in ['function', 'method', 'attribute']:
if what == 'attribute':
if not isinstance(obj, property):
return []
return _format_property_contracts(prop=obj)
if what in ['function', 'method']:
... | Format the contracts as reST. |
def check_no_alert(self):
try:
alert = Alert(world.browser)
raise AssertionError("Should not see an alert. Alert '%s' shown." %
alert.text)
except NoAlertPresentException:
pass | Assert there is no alert. |
def run_func(func, *args, **kwargs):
ray.init()
func = ray.remote(func)
result = ray.get(func.remote(*args))
caller = inspect.stack()[1][3]
print("%s: %s" % (caller, str(result)))
return result | Helper function for running examples |
def reset(self):
self.pid = os.getpid()
self._created_connections = 0
self._created_connections_per_node = {}
self._available_connections = {}
self._in_use_connections = {}
self._check_lock = threading.Lock()
self.initialized = False | Resets the connection pool back to a clean state. |
def add_log_callback(callback):
global _log_callbacks
if not callable(callback):
raise ValueError("Callback must be callable")
_log_callbacks.add(callback)
return callback | Adds a log callback. |
def DEFINE_constant_string(self, name, default, help):
self.AddOption(
type_info.String(name=name, default=default or "", description=help),
constant=True) | A helper for defining constant strings. |
def tree(c):
ignore = ".git|*.pyc|*.swp|dist|*.egg-info|_static|_build|_templates"
c.run('tree -Ca -I "{0}" {1}'.format(ignore, c.sphinx.source)) | Display documentation contents with the 'tree' program. |
def delete(self, p_timestamp=None, p_write=True):
timestamp = p_timestamp or self.timestamp
index = self._get_index()
try:
del self.backup_dict[timestamp]
index.remove(index[[change[0] for change in index].index(timestamp)])
self._save_index(index)
... | Removes backup from the backup file. |
def replace(self, year=None, month=None, day=None):
if year is None:
year = self._year
if month is None:
month = self._month
if day is None:
day = self._day
_check_date_fields(year, month, day)
return date(year, month, day) | Return a new date with new values for the specified fields. |
def _maybe_registered(self, failure, new_reg):
failure.trap(ServerError)
response = failure.value.response
if response.code == http.CONFLICT:
reg = new_reg.update(
resource=messages.UpdateRegistration.resource_type)
uri = self._maybe_location(response)
... | If the registration already exists, we should just load it. |
def obtain(self):
self.check_destination()
url = self.url
cmd = ['clone', '--progress']
if self.git_shallow:
cmd.extend(['--depth', '1'])
if self.tls_verify:
cmd.extend(['-c', 'http.sslVerify=false'])
cmd.extend([url, self.path])
self.info(... | Retrieve the repository, clone if doesn't exist. |
def _filter_attrs(attrs, ignored_attrs):
return dict((k, v) for k, v in attrs.items() if k not in ignored_attrs) | Return attrs that are not in ignored_attrs |
def paragraph(self, paragraph, prefix=""):
content = []
for text in paragraph.content:
content.append(self.text(text))
content = u"".join(content).encode("utf-8")
for line in content.split("\n"):
self.target.write(" " * self.indent)
self.target.write(... | process a pyth paragraph into the target |
def named_series(self, ordering=None):
series = self.series()
if ordering:
series = list(series)
todo = dict(((n, idx) for idx, n in enumerate(self.names())))
for name in ordering:
if name in todo:
idx = todo.pop(name)
... | Generator of tuples with name and serie data. |
def add_current_text_if_valid(self):
valid = self.is_valid(self.currentText())
if valid or valid is None:
self.add_current_text()
return True
else:
self.set_current_text(self.selected_text) | Add current text to combo box history if valid |
def getSkeletalBoneData(self, action, eTransformSpace, eMotionRange, unTransformArrayCount):
fn = self.function_table.getSkeletalBoneData
pTransformArray = VRBoneTransform_t()
result = fn(action, eTransformSpace, eMotionRange, byref(pTransformArray), unTransformArrayCount)
return result,... | Reads the state of the skeletal bone data associated with this action and copies it into the given buffer. |
def _calc_sampleset(w1, w2, step, minimal):
if minimal:
arr = [w1 - step, w1, w2, w2 + step]
else:
arr = np.arange(w1 - step, w2 + step + step, step)
return arr | Calculate sampleset for each model. |
def select(self, html, stype, expression):
etree = html5lib.parse(html,
treebuilder='lxml',
namespaceHTMLElements=False)
if stype == 'css':
selector = lxml.cssselect.CSSSelector(expression)
frag = list(selector(etree))... | returns WHATWG spec HTML fragment from selector expression. |
def pauli_sum(*elements: Pauli) -> Pauli:
terms = []
key = itemgetter(0)
for term, grp in groupby(heapq.merge(*elements, key=key), key=key):
coeff = sum(g[1] for g in grp)
if not isclose(coeff, 0.0):
terms.append((term, coeff))
return Pauli(tuple(terms)) | Return the sum of elements of the Pauli algebra |
def QueueDeleteTasks(self, queue, tasks):
predicates = []
for task in tasks:
task_id = getattr(task, "task_id", None) or int(task)
predicates.append(DataStore.QueueTaskIdToColumn(task_id))
self.DeleteAttributes(queue, predicates) | Removes the given tasks from the queue. |
def find(self, tagtype, **kwargs):
for t in self.__tags:
if t.tagtype == tagtype:
return t
if 'default' in kwargs:
return kwargs['default']
else:
raise LookupError("Token {} is not tagged with the speficied tagtype ({})".format(self, tagtype)) | Get the first tag with a type in this token |
def print_entitlements(opts, data, page_info=None, show_list_info=True):
if utils.maybe_print_as_json(opts, data, page_info):
return
headers = ["Name", "Token", "Created / Updated", "Identifier"]
rows = []
for entitlement in sorted(data, key=itemgetter("name")):
rows.append(
... | Print entitlements as a table or output in another format. |
def push(item, remote_addr, trg_queue, protocol=u'jsonrpc'):
if protocol == u'jsonrpc':
try:
server = Server(remote_addr, encoding=_c.FSQ_CHARSET)
return server.enqueue(item.id, trg_queue, item.item.read())
except Exception, e:
raise FSQPushError(e)
raise Valu... | Enqueue an FSQWorkItem at a remote queue |
def delete(self, filepath):
Filewatcher.remove_directory_to_watch(filepath)
self.write({'msg':'Watcher deleted for {}'.format(filepath)}) | Stop and delete the specified filewatcher. |
def do_tagg(self, arglist: List[str]):
if len(arglist) >= 2:
tag = arglist[0]
content = arglist[1:]
self.poutput('<{0}>{1}</{0}>'.format(tag, ' '.join(content)))
else:
self.perror("tagg requires at least 2 arguments") | version of creating an html tag using arglist instead of argparser |
def load_default_model_sentencizer(lang):
loading_start = time.time()
lang_class = get_lang_class(lang)
nlp = lang_class()
nlp.add_pipe(nlp.create_pipe('sentencizer'))
loading_end = time.time()
loading_time = loading_end - loading_start
return nlp, loading_time, lang + "_default_" + 'sentenc... | Load a generic spaCy model and add the sentencizer for sentence tokenization |
def close(self):
self._execute_plugin_hooks_sync(hook='close')
if not self.session.closed:
ensure_future(self.session.close(), loop=self.loop) | Close service client and its plugins. |
def resolve(self, var, context):
if var is None:
return var
if var[0] in ('"', "'") and var[-1] == var[0]:
return var[1:-1]
else:
return template.Variable(var).resolve(context) | Resolves a variable out of context if it's not in quotes |
def select(self, *itms):
if not itms:
itms = ['*']
self.terms.append("select %s from %s" % (', '.join(itms), self.table))
return self | Joins the items to be selected and inserts the current table name |
def retry_func_accept_retry_state(retry_func):
if not six.callable(retry_func):
return retry_func
if func_takes_retry_state(retry_func):
return retry_func
@_utils.wraps(retry_func)
def wrapped_retry_func(retry_state):
warn_about_non_retry_state_deprecation(
'retry', r... | Wrap "retry" function to accept "retry_state" parameter. |
def remove_redundant_items(items):
result = []
for item in items:
for other in items:
if item is not other and is_redundant_union_item(item, other):
break
else:
result.append(item)
return result | Filter out redundant union items. |
def embedded_preview(src_path):
try:
assert(exists(src_path) and isdir(src_path))
preview_list = glob(join(src_path, '[Q|q]uicklook', '[P|p]review.*'))
assert(preview_list)
preview_path = preview_list[0]
with NamedTemporaryFile(prefix='pyglass', suffix=extension(preview_path), delete=False) as tem... | Returns path to temporary copy of embedded QuickLook preview, if it exists |
def _create_slice(self, key):
if isinstance(key, slice):
step = key.step
if step is None:
step = 1
if step != 1:
raise ArgumentError("You cannot slice with a step that is not equal to 1", step=key.step)
start_address = key.start
... | Create a slice in a memory segment corresponding to a key. |
def subset_bam_by_region(in_file, region, config, out_file_base=None):
if out_file_base is not None:
base, ext = os.path.splitext(out_file_base)
else:
base, ext = os.path.splitext(in_file)
out_file = "%s-subset%s%s" % (base, region, ext)
if not file_exists(out_file):
with pysam.S... | Subset BAM files based on specified chromosome region. |
def _pnorm_diagweight(x, p, w):
order = 'F' if all(a.flags.f_contiguous for a in (x.data, w)) else 'C'
xp = np.abs(x.data.ravel(order))
if p == float('inf'):
xp *= w.ravel(order)
return np.max(xp)
else:
xp = np.power(xp, p, out=xp)
xp *= w.ravel(order)
return np.s... | Diagonally weighted p-norm implementation. |
def _analyse_overview_field(content):
if "(" in content:
return content.split("(")[0], content.split("(")[0]
elif "/" in content:
return content.split("/")[0], content.split("/")[1]
return content, "" | Split the field in drbd-overview |
def getfnc_qual_ev(self):
fnc_key = (
self.nd_not2desc[(self._keep_nd, self._keep_not)],
self.incexc2num[(
self.include_evcodes is not None,
self.exclude_evcodes is not None)],
)
return self.param2fnc[fnc_key] | Keep annotaion if it passes potentially modified selection. |
def convert_path(cls, file):
if isinstance(file,str):
return file
elif isinstance(file, list) and all([isinstance(x, str) for x in file]):
return "/".join(file)
else:
print("Incorrect path specified")
return -1 | Check to see if an extended path is given and convert appropriately |
def count_replica(self, partition):
return sum(1 for b in partition.replicas if b in self.brokers) | Return count of replicas of given partition. |
def _params(sig):
params = []
for p in sig.parameters:
param = sig.parameters[p]
optional = param.default != inspect.Signature.empty
default = UIBuilder._safe_default(param.default) if param.default != inspect.Signature.empty else ''
annotation = param.ann... | Read params, values and annotations from the signature |
def _messageFromSender(self, sender, messageID):
return self.store.findUnique(
_QueuedMessage,
AND(_QueuedMessage.senderUsername == sender.localpart,
_QueuedMessage.senderDomain == sender.domain,
_QueuedMessage.messageID == messageID),
default=... | Locate a previously queued message by a given sender and messageID. |
def _handle_actiondefinefunction(self, _):
obj = _make_object("ActionDefineFunction")
obj.FunctionName = self._get_struct_string()
obj.NumParams = unpack_ui16(self._src)
for i in range(1, obj.NumParams + 1):
setattr(obj, "param" + str(i), self._get_struct_string())
ob... | Handle the ActionDefineFunction action. |
def parse_description(s):
s = "".join(s.split()[1:]).replace("/", ";")
a = parse_qs(s)
return a | Returns a dictionary based on the FASTA header, assuming JCVI data |
def parse_cg(self):
line = self.readline()
while self._cg_header_re.match(line):
line = self.readline()
entry_lines = []
while not self._cg_footer_re.match(line):
if line.isspace():
self.parse_cg_entry(entry_lines)
entry_lines = []
... | Parse the call graph. |
def parse_from_parent(
self,
parent,
state
):
xml_value = self._processor.parse_from_parent(parent, state)
return _hooks_apply_after_parse(self._hooks, state, xml_value) | Parse the element from the given parent element. |
def download_directory(self, bucket, key, directory, transfer_config=None, subscribers=None):
check_io_access(directory, os.W_OK)
return self._queue_task(bucket, [FilePair(key, directory)], transfer_config,
subscribers, enumAsperaDirection.RECEIVE) | download a directory using Aspera |
def remove_user(self, user):
org_user = self._org_user_model.objects.get(user=user, organization=self)
org_user.delete()
user_removed.send(sender=self, user=user) | Deletes a user from an organization. |
def _writeReplacementFiles(self, session, directory, name):
if self.replaceParamFile:
self.replaceParamFile.write(session=session, directory=directory,
name=name)
if self.replaceValFile:
self.replaceValFile.write(session=session, directory=... | Write the replacement files |
def import_dashboards(self):
f = request.files.get('file')
if request.method == 'POST' and f:
dashboard_import_export.import_dashboards(db.session, f.stream)
return redirect('/dashboard/list/')
return self.render_template('superset/import_dashboards.html') | Overrides the dashboards using json instances from the file. |
def _get_removed(self):
removed, packages = [], []
if "--tag" in self.extra:
for pkg in find_package("", self.meta.pkg_path):
for tag in self.binary:
if pkg.endswith(tag):
removed.append(split_package(pkg)[0])
... | Manage removed packages by extra options |
def get(key, section='main'):
return nago.settings.get_option(option_name=key, section_name=section) | Get a single option from |
def path(self):
names = []
obj = self
while obj:
names.insert(0, obj.name)
obj = obj.parent_dir
sep = self.filesystem._path_separator(self.name)
if names[0] == sep:
names.pop(0)
dir_path = sep.join(names)
is_drive = name... | Return the full path of the current object. |
def _clone(self, *args, **kwargs):
for attr in ("_search_terms", "_search_fields", "_search_ordered"):
kwargs[attr] = getattr(self, attr)
return super(SearchableQuerySet, self)._clone(*args, **kwargs) | Ensure attributes are copied to subsequent queries. |
def db_value(self, value):
if isinstance(value, string_types):
value = arrow.get(value)
if isinstance(value, arrow.Arrow):
value = value.datetime
return super(ArrowDateTimeField, self).db_value(value) | Convert the Arrow instance to a datetime for saving in the db. |
def total(self, xbin1=1, xbin2=-2):
return self.hist.integral(xbin1=xbin1, xbin2=xbin2, error=True) | Return the total yield and its associated statistical uncertainty. |
def parse_band_set_name(self, message):
if message.get("name"):
self._service_name = message["name"]
else:
self.log.warning(
"Received broken record on set_name band\nMessage: %s", str(message)
) | Process incoming message indicating service name change. |
def store(bank, key, data, cachedir):
base = os.path.join(cachedir, os.path.normpath(bank))
try:
os.makedirs(base)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise SaltCacheError(
'The cache directory, {0}, could not be created: {1}'.format(
... | Store information in a file. |
def parse_timestamp(timestamp):
dt = dateutil.parser.parse(timestamp)
return dt.astimezone(dateutil.tz.tzutc()) | Parse ISO8601 timestamps given by github API. |
def _call(self, x, out=None):
if out is None:
out = self.range.zero()
for i, j, op in zip(self.ops.row, self.ops.col, self.ops.data):
out[i] += op(x[j])
else:
has_evaluated_row = np.zeros(len(self.range), dtype=bool)
for i, j, op in zip(sel... | Call the operators on the parts of ``x``. |
def add_device(self, resource_name, device):
if device.resource_name is not None:
msg = 'The device %r is already assigned to %s'
raise ValueError(msg % (device, device.resource_name))
device.resource_name = resource_name
self._internal[device.resource_name] = device | Bind device to resource name |
def many(cls, filter=None, **kwargs):
from mongoframes.queries import Condition, Group, to_refs
kwargs['projection'], references, subs = \
cls._flatten_projection(
kwargs.get('projection', cls._default_projection)
)
if isinstance(filter, (C... | Return a list of documents matching the filter |
def status(context):
context.obj.find_repo_type()
context.obj.call([context.obj.vc_name, 'status']) | See which files have changed, checked in, and uploaded |
def promote(self, name):
return PartitionName(**dict(list(name.dict.items()) + list(self.dict.items()))) | Promote to a PartitionName by combining with a bundle Name. |
def _run(*args, **kwargs):
verbose = kwargs.pop("verbose", False)
if verbose:
click.secho(" ".join([repr(i) for i in args]), bg='blue', fg='white')
executable = args[0]
if not os.path.isfile(executable):
raise RuntimeError("First argument %r is not a existing file!" % executable)
if ... | Run current executable via subprocess and given args |
def _zip(self) -> ObjectValue:
res = ObjectValue(self.siblings.copy(), self.timestamp)
res[self.name] = self.value
return res | Zip the receiver into an object and return it. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.