code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def _return_rows(self, table, cols, values, return_type):
if return_type is dict:
cols = self.get_columns(table) if cols is '*' else cols
if len(values) > 0 and isinstance(values[0], (set, list, tuple)):
return [dict(zip(cols, row)) for row in values]
else:
... | Return fetched rows in the desired type. |
def upload_package(context):
if not context.dry_run and build_distributions(context):
upload_args = 'twine upload '
upload_args += ' '.join(Path('dist').files())
if context.pypi:
upload_args += ' -r %s' % context.pypi
upload_result = shell.dry_run(upload_args, context.dry... | Uploads your project packages to pypi with twine. |
def _show_annotation_box(self, event):
ax = event.artist.axes
if self.display != 'multiple':
annotation = self.annotations[ax]
elif event.mouseevent in self.annotations:
annotation = self.annotations[event.mouseevent]
else:
annotation = self.annotate(a... | Update an existing box or create an annotation box for an event. |
def appdata_roaming_dir():
install = arcpy.GetInstallInfo('desktop')
app_data = arcpy.GetSystemEnvironment("APPDATA")
product_dir = ''.join((install['ProductName'], major_version()))
return os.path.join(app_data, 'ESRI', product_dir) | Returns the roaming AppData directory for the installed ArcGIS Desktop. |
def has(cls):
deps = {}
for i in dir(cls):
if i.startswith('__') and i.endswith('__'):
continue
val = getattr(cls, i, None)
if isinstance(val, Dependency):
deps[i] = val
if val.name is None:
val.name = i
cls.__injections__ = deps
... | Class decorator that declares dependencies |
def post_commit_status(self):
if self.violations:
plural = '' if self.introduced_issues_count == 1 else 's'
description = 'Pull Request introduced {} linting violation{}'.format(
self.introduced_issues_count, plural)
self._post_status('failure', description)
... | Posts results to a commit status in GitHub if this build is for a pull request. |
def _get_logger_file_handles(self):
handles = []
for handler in self.logger.handlers:
for attr in ['sock', 'socket', 'stream']:
try:
handle = getattr(handler, attr)
if handle:
handles.append(handle)
... | Find the file handles used by our logger's handlers. |
def getPolicyValue(self):
self._cur.execute("SELECT action FROM policy")
r = self._cur.fetchall()
policy = [x[0] for x in r]
self._cur.execute("SELECT value FROM V")
r = self._cur.fetchall()
value = [x[0] for x in r]
return policy, value | Get the policy and value vectors. |
def _repr_attributes(self):
if self._repr_attributes_override is None:
attrs = getfullargspec(self.__init__).args[1:]
mapping = self._repr_attributes_mapping
if mapping:
attrs = [mapping[a] if a in mapping else a for a in attrs]
return attrs
... | Return attributes that should be part of the repr string. |
def show_plane(orig, n, scale=1.0, **kwargs):
b1 = orthogonal_vector(n)
b1 /= la.norm(b1)
b2 = np.cross(b1, n)
b2 /= la.norm(b2)
verts = [orig + scale*(-b1 - b2),
orig + scale*(b1 - b2),
orig + scale*(b1 + b2),
orig + scale*(-b1 + b2)]
faces = [(0, 1, 2), (... | Show the plane with the given origin and normal. scale give its size |
def all_options(self):
items = chain.from_iterable(hosts.values() for hosts in self.data.values())
return set(chain.from_iterable(items)) | Returns the set of all options used in all export entries |
def _import_all_troposphere_modules(self):
dirname = os.path.join(os.path.dirname(__file__))
module_names = [
pkg_name
for importer, pkg_name, is_pkg in
pkgutil.walk_packages([dirname], prefix="troposphere.")
if not is_pkg and pkg_name not in self.EXCLUDE_... | Imports all troposphere modules and returns them |
def qn(self, namespace):
nsmap = {
'text': 'urn:oasis:names:tc:opendocument:xmlns:text:1.0',
}
spl = namespace.split(':')
return '{{{}}}{}'.format(nsmap[spl[0]], spl[1]) | Connect tag prefix to longer namespace |
def isinstance_(x, A_tuple):
if is_union(A_tuple):
return any(isinstance_(x, t) for t in A_tuple.__args__)
elif getattr(A_tuple, '__origin__', None) is not None:
return isinstance(x, A_tuple.__origin__)
else:
return isinstance(x, A_tuple) | native isinstance_ with the test for typing.Union overridden |
def flightmode_colour(self, flightmode):
if flightmode not in self.flightmode_colourmap:
self.flightmode_colourmap[flightmode] = self.next_flightmode_colour()
return self.flightmode_colourmap[flightmode] | return colour to be used for rendering a flight mode background |
def write(path, data, binary=False):
mode = "w"
if binary:
mode = "wb"
with open(path, mode) as f:
f.write(data)
f.close() | Writes a given data to a file located at the given path. |
def reader(stream, fieldnames=None):
if not fieldnames:
fieldnames = load_line(stream.readline())
for line in stream:
values = load_line(line)
item = Item()
item.__dict__ = dict(zip(fieldnames, values))
yield item | Read Items from a stream containing TSV. |
def safe_better_repr(
self, obj, context=None, html=True, level=0, full=False
):
context = context and dict(context) or {}
recursion = id(obj) in context
if not recursion:
context[id(obj)] = obj
try:
rv = self.better_repr(obj, context, html... | Repr with inspect links on objects |
def add_accounts_to_institute(accounts_query, institute):
query = accounts_query.filter(date_deleted__isnull=True)
for account in query:
add_account_to_institute(account, institute) | Add accounts to institute. |
def _truncate_seq_pair(self, tokens_a, tokens_b, max_length):
while True:
total_length = len(tokens_a) + len(tokens_b)
if total_length <= max_length:
break
if len(tokens_a) > len(tokens_b):
tokens_a.pop()
else:
token... | Truncates a sequence pair in place to the maximum length. |
def work_wait(self):
if len(self.queues_with_notify) > 0:
connections.redis.blpop(*(self.queues_with_notify + [max(1, int(self.config["max_latency"]))]))
else:
gevent.sleep(self.config["max_latency"]) | Wait for new jobs to arrive |
def handle_page_crumb(func):
@wraps(func)
def wrapper(path, model, page, root_name):
path = PAGE_REGEXP.sub('', path)
breadcrumbs = func(path, model, root_name)
if page:
if page.number > 1:
breadcrumbs[-1].url = path
page_crumb = Crumb(_('Page ... | Decorator for handling the current page in the breadcrumbs. |
def add_filter(self, filter_):
assert has_pil, _("Cannot add filters without python PIL")
self.cache.basename += filter_.basename
self._filters.append(filter_) | Add an image filter for post-processing |
def rowlenselect(table, n, complement=False):
where = lambda row: len(row) == n
return select(table, where, complement=complement) | Select rows of length `n`. |
def from_soup(self,author,soup):
email = soup.find('span',class_='icon icon-mail').findParent('a').get('href').split(':')[-1] if soup.find('span',class_='icon icon-mail') else ''
facebook = soup.find('span',class_='icon icon-facebook').findParent('a').get('href') if soup.find('span',class_='icon icon-facebook') el... | Factory Pattern. Fetches contact data from given soup and builds the object |
def _pad_add_1d(av, size, stlen):
assert len(size) == 1
padx = _get_pad_left_right(av.shape[0], size[0])
mask = np.zeros(av.shape, dtype=bool)
mask[stlen:-stlen] = True
border = av[~mask]
if av.dtype.name.count("complex"):
padval = np.average(np.abs(border)) * \
np.exp(1j*np.... | 2D component of `pad_add` |
def from_jd(jd):
jd = trunc(jd) + 0.5
year = trunc(((30 * (jd - EPOCH)) + 10646) / 10631)
month = min(12, ceil((jd - (29 + to_jd(year, 1, 1))) / 29.5) + 1)
day = int(jd - to_jd(year, month, 1)) + 1
return (year, month, day) | Calculate Islamic date from Julian day |
def register_scm_provider(scm_name: str):
def register_decorator(scm_class: SourceControl):
if scm_name in ScmManager.providers:
raise KeyError('{} already registered!'.format(scm_name))
ScmManager.providers[scm_name] = scm_class
SourceControl.register(scm_class)
logger.d... | Return a decorator for registering a SCM provider named `scm_name`. |
def _get_stat(self):
def dev_filter(x):
x = x.strip().split(" ")[0][:-1]
if x in self.interfaces_blacklist:
return False
if self.all_interfaces:
return True
if x in self.interfaces:
return True
return Fal... | Get statistics from devfile in list of lists of words |
def format_help(self):
if not self._cell_args:
return super(CommandParser, self).format_help()
else:
epilog = self.epilog
self.epilog = None
orig_help = super(CommandParser, self).format_help()
cell_args_help = '\nCell args:\n\n'
for cell_arg, v in six.iteritems(self._cell_ar... | Override help doc to add cell args. |
def approve_task(self, task_id):
uri = 'tasks/%s' % task_id
data = {"approved": True}
resp, body = self.post(uri, json.dumps(data))
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body) | Returns dict of tasks matching the provided filters |
def _splpy_all_ports_ready(callable_):
if hasattr(type(callable_), 'all_ports_ready'):
try:
return callable_.all_ports_ready()
except:
ei = sys.exc_info()
if streamsx._streams._runtime._call_exit(callable_, ei):
return None
raise e1[1]
... | Call all_ports_ready for a primitive operator. |
def format_sass_stack(self):
if not self.rule_stack:
return ""
ret = ["on ", self.format_file_and_line(self.rule_stack[0]), "\n"]
last_file = self.rule_stack[0].source_file
for rule in self.rule_stack[1:]:
if rule.source_file is not last_file:
ret.... | Return a "traceback" of Sass imports. |
def make_cache_key(request):
headers = frozenset(request._p['header'].items())
path = frozenset(request._p['path'].items())
query = frozenset(request._p['query'])
return (request.url, headers, path, query) | Generate a cache key from request object data |
async def verify_credentials(self):
_, public_key = self.srp.initialize()
msg = messages.crypto_pairing({
tlv8.TLV_SEQ_NO: b'\x01',
tlv8.TLV_PUBLIC_KEY: public_key})
resp = await self.protocol.send_and_receive(
msg, generate_identifier=False)
resp = _g... | Verify credentials with device. |
def read_file(filepath: str) -> str:
filepath = os.path.expanduser(filepath)
with open(filepath) as opened_file:
file_read = opened_file.read()
return file_read | Read a file and return it as a string |
def issue_add(lancet, assign, add_to_sprint, summary):
summary = " ".join(summary)
issue = create_issue(
lancet,
summary,
add_to_active_sprint=add_to_sprint,
)
if assign:
if assign == "me":
username = lancet.tracker.whoami()
else:
username ... | Create a new issue on the issue tracker. |
def moving_average(self, window, method=SIMPLE):
if len(self.points) < window:
raise ArithmeticError('Not enough points for moving average')
numpy = LazyImport.numpy()
if method == TimeSeries.SIMPLE:
weights = numpy.ones(window) / float(window)
ma_x = self.timesta... | Calculate a moving average using the specified method and window |
def _send_loop(self):
while self._enable_send:
hub.sleep(self._xmit_period)
if self._remote_discr == 0 and not self._active_role:
continue
if self._remote_min_rx_interval == 0:
continue
if self._remote_demand_mode and \
... | A loop to proceed periodic BFD packet transmission. |
def chdir(path: str) -> Iterator[None]:
curdir = os.getcwd()
os.chdir(path)
try:
yield
finally:
os.chdir(curdir) | Context manager for changing dir and restoring previous workdir after exit. |
def flush_pending(function):
s = boto3.Session()
client = s.client('lambda')
results = client.invoke(
FunctionName=function,
Payload=json.dumps({'detail-type': 'Scheduled Event'})
)
content = results.pop('Payload').read()
pprint.pprint(results)
pprint.pprint(json.loads(conten... | Attempt to acquire any pending locks. |
def save(self, filename):
filename = pathlib.Path(filename)
out = []
keys = sorted(list(self.keys()))
for key in keys:
out.append("[{}]".format(key))
section = self[key]
ikeys = list(section.keys())
ikeys.sort()
for ikey in ikey... | Save the configuration to a file |
def _check_config():
config.CONFIG_DIR.mkdir(parents=True, exist_ok=True)
verfile = config.CONFIG_DIR / '.version'
uptodate = verfile.is_file() and verfile.read_text() == __version__
if not uptodate:
verfile.write_text(__version__)
if not (uptodate and config.CONFIG_FILE.is_file()):
... | Create config files as necessary. |
def load_name(absolute_name: str):
path = absolute_name.split('.')
try:
__import__(absolute_name)
except ImportError:
try:
obj = sys.modules[path[0]]
except KeyError:
raise ModuleNotFoundError('No module named %r' % path[0])
... | Load an object based on an absolute, dotted name |
def _last_index(x, default_dim):
if x.get_shape().ndims is not None:
return len(x.get_shape()) - 1
else:
return default_dim | Returns the last dimension's index or default_dim if x has no shape. |
def implied_feature (implicit_value):
assert isinstance(implicit_value, basestring)
components = implicit_value.split('-')
if components[0] not in __implicit_features:
raise InvalidValue ("'%s' is not a value of an implicit feature" % implicit_value)
return __implicit_features[components[0]] | Returns the implicit feature associated with the given implicit value. |
def validate(self, value, redis):
value = self.value_or_default(value)
self.validate_required(value)
if value is None:
return None
if type(value) == str:
try:
value = datetime.datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
except ValueE... | Validates data obtained from a request in ISO 8061 and returns it in Datetime data type |
def _pys2code(self, line):
row, col, tab, code = self._split_tidy(line, maxsplit=3)
key = self._get_key(row, col, tab)
self.code_array.dict_grid[key] = unicode(code, encoding='utf-8') | Updates code in pys code_array |
def getInstIdFromIndices(self, *indices):
try:
return self._idxToIdCache[indices]
except TypeError:
cacheable = False
except KeyError:
cacheable = True
idx = 0
instId = ()
parentIndices = []
for impliedFlag, modName, symName in ... | Return column instance identification from indices |
def loggers(self):
ret = []
if self.logger_name:
if isinstance(self.logger_name, logging.Logger):
ret.append((self.logger_name.name, self.logger_name))
else:
ret.append((self.logger_name, logging.getLogger(self.logger_name)))
else:
... | Return all the loggers that should be activated |
def connected():
opts = salt.config.master_config(__opts__['conf_file'])
if opts.get('con_cache'):
cache_cli = CacheCli(opts)
minions = cache_cli.get_cached()
else:
minions = list(salt.utils.minions.CkMinions(opts).connected_ids())
return minions | List all connected minions on a salt-master |
def apptags(self):
logger.debug("populate tags map ...")
apps = self._apps.keys()
unknown = set(apps)
unknown.difference_update(self._config_apps.keys())
if unknown:
raise ValueError("unknown apps: %r" % list(unknown))
apps = [v for v in self._config_apps.valu... | Map from log app-name to an application. |
def exists(cls, id):
return bool(redisco.get_client().exists(cls._key[str(id)]) or
redisco.get_client().sismember(cls._key['all'], str(id))) | Checks if the model with id exists. |
def modified(self):
if self._removes:
return True
for v in self._value:
if self._value[v].modified:
return True
for v in self._updates:
if self._updates[v].modified:
return True
return False | Whether the map has staged local modifications. |
def anorm(x, axis=None, keepdims=False):
return np.sqrt((x*x).sum(axis=axis, keepdims=keepdims)) | Compute L2 norms alogn specified axes. |
def _post_response(self, params):
return self._session.post(
self._api_url, data=params, timeout=self._timeout
).json(encoding="utf8") | wrap a post call to the requests package |
def template_from_filename(filename):
ext = filename.split(os.path.extsep)[-1]
if not ext in TEMPLATES_MAP:
raise ValueError("No template for file extension {}".format(ext))
return TEMPLATES_MAP[ext] | Returns the appropriate template name based on the given file name. |
def _address_from_row(self, row):
addr = None
if "rpc_address" in row:
addr = row.get("rpc_address")
if "native_transport_address" in row:
addr = row.get("native_transport_address")
if not addr or addr in ["0.0.0.0", "::"]:
addr = row.get("peer")
... | Parse the broadcast rpc address from a row and return it untranslated. |
def read(fname, URL, URLImage):
readme = open(path.join(path.dirname(__file__), fname)).read()
if hasattr(readme, 'decode'):
readme = readme.decode('utf8')
readme = re.sub(r'`<([^>]*)>`__',
r'`\1 <' + URL + r"/blob/master/\1>`__",
readme)
readme = re.sub(r... | Read the content of a file. |
def _maybe_from_pandas(data, feature_names, feature_types):
try:
import pandas as pd
except ImportError:
return data, feature_names, feature_types
if not isinstance(data, pd.DataFrame):
return data, feature_names, feature_types
dtypes = data.dtypes
if not all(dtype.name in ('... | Extract internal data from pd.DataFrame |
def writeGenerator(self, gen):
n = getattr(gen, 'next')
while True:
try:
self.writeElement(n())
except StopIteration:
break | Iterates over a generator object and encodes all that is returned. |
def _replace_services_in_kwargs(self, kwargs):
_check_type('kwargs', kwargs, dict)
new_kwargs = {}
for (name, value) in iteritems(kwargs):
if isinstance(value, list):
new_kwargs[name] = self._replace_services_in_args(value)
elif isinstance(value, dict):
... | Replace service references in keyed arguments dictionary |
def save(self, filename=None, debug=False):
if not filename: filename = self.name
with open(filename, "w") as f:
f.write(self.toJson(self.attrs))
if self.debug or debug:
print("saved configuration %s"%(self.name))
for k,v in sorted(iteritems(self.attrs)):
... | save a data file such that all processes know the game that is running |
def return_file_objects(connection, container, prefix='database'):
options = []
meta_data = objectstore.get_full_container_list(
connection, container, prefix='database')
env = ENV.upper()
for o_info in meta_data:
expected_file = f'database.{ENV}'
if o_info['name'].startswith(exp... | Given connecton and container find database dumps |
def register_serializers(self, serializers):
for new_serializer in serializers:
if not isinstance(new_serializer, serializer.Base):
msg = "registered serializer %s.%s does not inherit from prestans.serializer.Serializer" % (
new_serializer.__module__,
... | Adds extra serializers; generally registered during the handler lifecycle |
def _fit_and_score_ensemble(self, X, y, cv, **fit_params):
fit_params_steps = self._split_fit_params(fit_params)
folds = list(cv.split(X, y))
base_estimators, kernel_cache = self._get_base_estimators(X)
out = Parallel(
n_jobs=self.n_jobs, verbose=self.verbose
)(
... | Create a cross-validated model by training a model for each fold with the same model parameters |
def node_exclusion_predicate_builder(nodes: Iterable[BaseEntity]) -> NodePredicate:
nodes = set(nodes)
@node_predicate
def node_exclusion_predicate(node: BaseEntity) -> bool:
return node not in nodes
return node_exclusion_predicate | Build a node predicate that returns false for the given nodes. |
def set(self, name, option, value):
fut = self.execute(b"SET", name, option, value)
return wait_ok(fut) | Set Sentinel monitoring parameters for a given master. |
def _attribute_is_magic(node, attrs, parents):
if node.attrname not in attrs:
return False
if not node.last_child():
return False
try:
for cls in node.last_child().inferred():
if isinstance(cls, Super):
cls = cls._self_class
if node_is_subclass... | Checks that node is an attribute used inside one of allowed parents |
def pretty_print(self, indent=0):
s = tab = ' '*indent
s += '%s: ' %self.tag
if isinstance(self.value, basestring):
s += self.value
else:
s += '\n'
for e in self.value:
s += e.pretty_print(indent+4)
s += '\n'
return s | Print the document without tags using indentation |
def add_arguments(cls, parser, sys_arg_list=None):
parser.add_argument('-f', '--file', dest='file', required=True,
help="config file for routing groups "
"(only in configfile mode)")
return ["file"] | Arguments for the configfile mode. |
def check_read_permission(self, user_id, do_raise=True):
return self.get_resource().check_read_permission(user_id, do_raise=do_raise) | Check whether this user can read this resource attribute |
def save_to_file(destination_filename, append=False):
def decorator_fn(f):
@wraps(f)
def wrapper_fn(*args, **kwargs):
res = f(*args, **kwargs)
makedirs(os.path.dirname(destination_filename))
mode = "a" if append else "w"
with open(destination_filename,... | Save the output value to file. |
def _add_view_menu(self):
mainMenu = self.app.mainMenu()
viewMenu = AppKit.NSMenu.alloc().init()
viewMenu.setTitle_(localization["cocoa.menu.view"])
viewMenuItem = AppKit.NSMenuItem.alloc().init()
viewMenuItem.setSubmenu_(viewMenu)
mainMenu.addItem_(viewMenuItem)
... | Create a default View menu that shows 'Enter Full Screen'. |
def find_count_label(d):
for name in COUNT_NAMES:
if name in d:
return name
for name in COUNT_NAMES:
if str(name).lower() in d:
return name | Find the member of a set that means "count" or "frequency" or "probability" or "number of occurrences". |
def show(self):
import matplotlib.pyplot as plt
nx.draw(self.transforms, with_labels=True)
plt.show() | Plot the graph layout of the scene. |
def _detect(self):
results = []
for c in self.contracts:
functions = self.detect_suicidal(c)
for func in functions:
txt = "{}.{} ({}) allows anyone to destruct the contract\n"
info = txt.format(func.contract.name,
... | Detect the suicidal functions |
def _xray_register_type_fix(wrapped, instance, args, kwargs):
our_args = list(copy.copy(args))
if len(our_args) == 2 and isinstance(our_args[1], (XRayTracedConn, XRayTracedCursor)):
our_args[1] = our_args[1].__wrapped__
return wrapped(*our_args, **kwargs) | Send the actual connection or curser to register type. |
def load_image(fname):
with open(fname, "rb") as f:
i = Image.open(fname)
return i | read an image from file - PIL doesnt close nicely |
def _cal_color(self, value, color_index):
range_min_p = self._domain[color_index]
range_p = self._domain[color_index + 1] - range_min_p
try:
factor = (value - range_min_p) / range_p
except ZeroDivisionError:
factor = 0
min_color = self.colors[color_index]
... | Blend between two colors based on input value. |
def removeBiosample(self, biosample):
q = models.Biosample.delete().where(
models.Biosample.id == biosample.getId())
q.execute() | Removes the specified biosample from this repository. |
def merge_dicts(*dict_list):
all_dicts = []
for ag in dict_list:
if isinstance(ag, dict):
all_dicts.append(ag)
try:
qitem = { k: v for d in all_dicts for k, v in d.items() }
return qitem
except Exception:
return {} | Extract all of the dictionaries from this list, then merge them together |
def read_all(self):
log.debug("read all static memory")
cmd = "\x00\x00\x00" + self.uid
return self.transceive(cmd) | Returns the 2 byte Header ROM and all 120 byte static memory. |
def download_and_bootstrap(src, name, prereq=None):
if prereq:
prereq_cmd = '{0} -c "{1}"'.format(PY_EXE, prereq)
rv = os.system(prereq_cmd)
if rv == 0:
return
ulp = urllib2.urlopen(src)
fp = open(name, "wb")
fp.write(ulp.read())
fp.close()
cmdline = "{0} {1}"... | Download and install something if 'prerequisite' fails |
def cross(self, vector):
return Vector((self.y * vector.z - self.z * vector.y),
(self.z * vector.x - self.x * vector.z),
(self.x * vector.y - self.y * vector.x)) | Return a Vector instance as the cross product of two vectors |
def polylinear_gradient(colors, n):
n_out = int(float(n) / (len(colors)-1))
gradient = linear_gradient(colors[0], colors[1], n_out)
if len(colors) == len(gradient):
return gradient
for col in range(1, len(colors) - 1):
next_colors = linear_gradient(colors[col], colors[col+1], n_out+1)
... | Interpolates the color gradients between a list of hex colors. |
def cleanup_tmpdir(dirname):
if dirname is not None and os.path.exists(dirname):
shutil.rmtree(dirname) | Removes the given temporary directory if it exists. |
def dumpLines(self):
for i, line in enumerate(self.lines):
logger.debug("Line %d:", i)
logger.debug(line.dumpFragments()) | For debugging dump all line and their content |
def eccentricity(self, directed=None, weighted=None):
sp = self.shortest_path(directed=directed, weighted=weighted)
return sp.max(axis=0) | Maximum distance from each vertex to any other vertex. |
def local_attention1d_spatial_decoder(x, kv_dim, heads_dim,
feedforward_dim, hparams):
batch_dim, length_dim, model_dim = x.shape.dims
blocks_w_dim = mtf.Dimension("blocksw", hparams.block_length)
num_w_blocks_dim = mtf.Dimension("num_wblocks",
... | Image Transformer decoder with local1D spatial layers. |
def flux_v2(v_vars: List[fl.Var], i: int):
k = 3*i
return fl.square(v_vars[k+0]) + fl.square(v_vars[k+1]) + fl.square(v_vars[k+2]) | Make Fluxion with the speed squared of body i |
def listen_for_dweets_from(thing_name, timeout=900, key=None, session=None):
url = BASE_URL + '/listen/for/dweets/from/{0}'.format(thing_name)
session = session or requests.Session()
if key is not None:
params = {'key': key}
else:
params = None
start = datetime.datetime.utcnow()
... | Create a real-time subscription to dweets |
def flatten(x):
result = []
for el in x:
if hasattr(el, "__iter__") and not isinstance(el, basestring):
result.extend(flatten(el))
else:
result.append(el)
return result | Flatten an arbitrary depth nested list. |
def clear_pictures(self):
blocks = [b for b in self.metadata_blocks if b.code != Picture.code]
self.metadata_blocks = blocks | Delete all pictures from the file. |
def backup_path(self) -> str:
return "{}/{}/{}{}/optimizer_backup".format(conf.instance.output_path, self.phase_path, self.phase_name,
self.phase_tag) | The path to the backed up optimizer folder. |
def _guess(self, filename):
encoding = None
file_size = os.path.getsize(filename)
if not self._is_very_large(file_size):
with open(filename, "rb") as f:
if file_size == 0:
encoding = 'ascii'
else:
encoding = self... | Guess the encoding and decode the content of the file. |
def contents(self, path):
try:
out, code, err = self.command_exec(['cat-file', '-p', self.ref_head+':'+path])
if not code:
return out.decode('utf-8')
except Exception:
pass
return None | Reads the given path of current ref_head and returns its content as utf-8 |
def get(self, resource_manager, identities):
m = self.resolve(resource_manager.resource_type)
params = {}
client_filter = False
if m.filter_name:
if m.filter_type == 'list':
params[m.filter_name] = identities
elif m.filter_type == 'scalar':
... | Get resources by identities |
def run_mapper(self, stdin=sys.stdin, stdout=sys.stdout):
self.init_hadoop()
self.init_mapper()
outputs = self._map_input((line[:-1] for line in stdin))
if self.reducer == NotImplemented:
self.writer(outputs, stdout)
else:
self.internal_writer(outputs, std... | Run the mapper on the hadoop node. |
def embeddedFileDel(self, name):
if self.isClosed or self.isEncrypted:
raise ValueError("operation illegal for closed / encrypted doc")
return _fitz.Document_embeddedFileDel(self, name) | Delete embedded file by name. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.