code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def local_variable_action(self, text, loc, var):
exshared.setpos(loc, text)
if DEBUG > 0:
print("LOCAL_VAR:",var, var.name, var.type)
if DEBUG == 2: self.symtab.display()
if DEBUG > 2: return
index = self.symtab.insert_local_var(var.name, var.type, self.... | Code executed after recognising a local variable |
def handle_existing_user(self, provider, user, access, info):
"Login user and redirect."
login(self.request, user)
return redirect(self.get_login_redirect(provider, user, access)) | Login user and redirect. |
def _make_nested_list(self, gen):
res = []
for ele in gen:
if ele is None:
res.append(None)
elif not is_string_like(ele) and is_generator_like(ele):
res.append(self._make_nested_list(ele))
else:
res.append(ele)
r... | Makes nested list from generator for creating numpy.array |
def stats(args):
logger.info("Reading sequeces")
data = parse_ma_file(args.ma)
logger.info("Get sequences from sam")
is_align = _read_sam(args.sam)
is_json, is_db = _read_json(args.json)
res = _summarise_sam(data, is_align, is_json, is_db)
_write_suma(res, os.path.join(args.out, "stats_align... | Create stats from the analysis |
def on_failure(self, exc, task_id, args, kwargs, einfo):
key = self._get_cache_key(args, kwargs)
_, penalty = cache.get(key, (0, 0))
if penalty < self.MAX_PENALTY:
penalty += 1
logger.debug('The task %s is penalized and will be executed on %d run.' % (self.name, penalty))
... | Increases penalty for the task and resets the counter. |
def com_google_fonts_check_ligature_carets(ttFont, ligature_glyphs):
if ligature_glyphs == -1:
yield FAIL, Message("malformed", "Failed to lookup ligatures."
" This font file seems to be malformed."
" For more info, read:"
" https://github.co... | Are there caret positions declared for every ligature? |
def create_experiment_summary():
temperature_list = struct_pb2.ListValue()
temperature_list.extend(TEMPERATURE_LIST)
materials = struct_pb2.ListValue()
materials.extend(HEAT_COEFFICIENTS.keys())
return summary.experiment_pb(
hparam_infos=[
api_pb2.HParamInfo(name='initial_temperature',
... | Returns a summary proto buffer holding this experiment. |
def should_exclude(self, filename) -> bool:
for skip_glob in self.skip_globs:
if self.filename_matches_glob(filename, skip_glob):
return True
return False | Should we exclude this file from consideration? |
def add_crosshair_to_image(fname, opFilename):
im = Image.open(fname)
draw = ImageDraw.Draw(im)
draw.line((0, 0) + im.size, fill=(255, 255, 255))
draw.line((0, im.size[1], im.size[0], 0), fill=(255, 255, 255))
del draw
im.save(opFilename) | convert an image by adding a cross hair |
def _wrapped(self):
assignments = tuple(
a for a in functools.WRAPPER_ASSIGNMENTS if a != '__name__' and a != '__module__')
@functools.wraps(self.func, assigned=assignments)
def wrapper(*args):
return self(*args)
wrapper.__name__ = self._name
wrapper.__mod... | Wrap this udf with a function and attach docstring from func |
def pause(message='Press any key to continue . . . '):
if message is not None:
print(message, end='')
sys.stdout.flush()
getch()
print() | Prints the specified message if it's not None and waits for a keypress. |
def valid_status(*valid):
def decorator(func):
@functools.wraps(func)
def _valid_status(self, *args, **kwargs):
if self.status not in valid:
raise protocol.ProtocolError(
"`%s` called while in state: %s, valid: (%s)" % (
func.__name__, self.status, ",".join(map(str, v... | Decorator to assert that we're in a valid state. |
def generate_cdef():
include_libc_path = path.join(HERE, 'fake_libc_include')
include_vulkan_path = path.join(HERE, 'vulkan_include')
out_file = path.join(HERE, path.pardir, 'vulkan', 'vulkan.cdef.h')
header = path.join(include_vulkan_path, 'vulkan.h')
command = ['cpp',
'-std=c99',
... | Generate the cdef output file |
def _line_shift(x:Tensor, mask:bool=False):
"Shift the line i of `x` by p-i elements to the left, is `mask` puts 0s on the diagonal."
bs,nh,n,p = x.size()
x_pad = torch.cat([x.new_zeros(bs,nh,n,1), x], dim=3)
x_shift = x_pad.view(bs,nh,p + 1,n)[:,:,1:].view_as(x)
if mask: x_shift.mul_(torch.tril(x.n... | Shift the line i of `x` by p-i elements to the left, is `mask` puts 0s on the diagonal. |
async def login(self, email: str, password: str) -> bool:
login_resp = await self._request(
'post',
API_URL_USER,
json={
'version': '1.0',
'method': 'Signin',
'param': {
'Email': email,
'P... | Login to the profile. |
def send_message() -> None:
if not _msg_queue:
return
msg = json.dumps(_msg_queue)
_msg_queue.clear()
for conn in module.connections:
conn.write_message(msg) | Send message via WS to all client connections. |
def validate(self):
if not isinstance(self.fields, dict):
raise TypeError(u'Expected dict fields, got: {} {}'.format(
type(self.fields).__name__, self.fields))
for key, value in six.iteritems(self.fields):
validate_safe_string(key)
if not isinstance(va... | Ensure that the ConstructResult block is valid. |
def setup_failures(self, gremlins):
assert isinstance(gremlins, dict) and 'gremlins' in gremlins
for gremlin in gremlins['gremlins']:
self.setup_failure(**gremlin)
self.push_rules() | Add gremlins to environment |
def _save_percolator(self):
index = Content.search_objects.mapping.index
query_filter = self.get_content().to_dict()
q = {}
if "query" in query_filter:
q = {"query": query_filter.get("query", {})}
else:
return
es.index(
index=index,
... | saves the query field as an elasticsearch percolator |
def _get_django_queryset(self):
prefetches = []
for field, fprefetch in self.prefetches.items():
has_query = hasattr(fprefetch, 'query')
qs = fprefetch.query.queryset if has_query else None
prefetches.append(
Prefetch(field, queryset=qs)
)
... | Return Django QuerySet with prefetches properly configured. |
def create_copy_without_data(G):
H = nx.Graph()
for i in H.nodes_iter():
H.node[i] = {}
return H | Return a copy of the graph G with all the data removed |
def func_dump(func):
code = marshal.dumps(func.__code__).decode('raw_unicode_escape')
defaults = func.__defaults__
if func.__closure__:
closure = tuple(c.cell_contents for c in func.__closure__)
else:
closure = None
return code, defaults, closure | Serialize user defined function. |
def url(self, name):
key = blobstore.create_gs_key('/gs' + name)
return images.get_serving_url(key) | Ask blobstore api for an url to directly serve the file |
def load_bytes(buf, num, pos):
end = pos + num
if end > len(buf):
raise BadRarFile('cannot load bytes')
return buf[pos : end], end | Load sequence of bytes |
def collect_lockfile_dependencies(lockfile_data):
output = {}
for dependencyName, installedVersion in lockfile_data.items():
output[dependencyName] = {
'source': 'example-package-manager',
'installed': {'name': installedVersion},
}
return output | Convert the lockfile format to the dependencies schema |
def notblocked(page):
for blocked in PAGES_TO_BLOCK:
if blocked[0] != '*':
blocked = '*' + blocked
rx = re.compile(blocked.replace('*', '[^$]*'))
if rx.match(page):
return False
return True | Determine if given url is a page that should be in sitemap. |
def _show_one(audio_file):
"given an audio file, print out the artist, title and some audio attributes of the song"
print 'File: ', audio_file
pytrack = track.track_from_filename(audio_file)
print 'Artist: ', pytrack.artist if hasattr(pytrack, 'artist') else 'Unknown'
print 'Title: ... | given an audio file, print out the artist, title and some audio attributes of the song |
def map_to_linear(self, with_stocks: bool=False):
result = []
for ac in self.model.classes:
rows = self.__get_ac_tree(ac, with_stocks)
result += rows
return result | Maps the tree to a linear representation suitable for display |
def ecp_pot_str(pot):
am = pot['angular_momentum']
amchar = lut.amint_to_char(am)
rexponents = pot['r_exponents']
gexponents = pot['gaussian_exponents']
coefficients = pot['coefficients']
point_places = [0, 10, 33]
s = 'Potential: {} potential\n'.format(amchar)
s += 'Type: {}\n'.format(p... | Return a string representing the data for an ECP potential |
def copy_settings(self, settings_module):
source = inspect.getsourcefile(settings_module)
dest = os.path.join(self.model_dir, 'settings.py')
shutil.copyfile(source, dest) | Copy settings module to the model_dir directory |
def send_error_explain(self, code, message=None, headers=None, content_type=None):
"do not use directly"
if headers is None:
headers = {}
if code in self.responses:
if message is None:
message = self.responses[code][0]
explain = self.responses[... | do not use directly |
async def expose(self):
app_facade = client.ApplicationFacade.from_connection(self.connection)
log.debug(
'Exposing %s', self.name)
return await app_facade.Expose(self.name) | Make this application publicly available over the network. |
def write_to_directory(self, dataset_info_dir):
if self.features:
self.features.save_metadata(dataset_info_dir)
if self.redistribution_info.license:
with tf.io.gfile.GFile(self._license_filename(dataset_info_dir),
"w") as f:
f.write(self.redistribution_info.licen... | Write `DatasetInfo` as JSON to `dataset_info_dir`. |
def available(self):
if not self.adb_server_ip:
return bool(self._adb)
try:
adb_devices = self._adb_client.devices()
try:
if any([self.host in dev.get_serial_no() for dev in adb_devices]):
if not self._available:
... | Check whether the ADB connection is intact. |
def _initialize_initial_state_fluents(self):
state_fluents = self.rddl.domain.state_fluents
initializer = self.rddl.instance.init_state
self.initial_state_fluents = self._initialize_pvariables(
state_fluents,
self.rddl.domain.state_fluent_ordering,
initializer... | Returns the initial state-fluents instantiated. |
def lerp(self, a, t):
return self.plus(a.minus(self).times(t)); | Lerp. Linear interpolation from self to a |
def steady_connection(self):
return SteadyPgConnection(
self._maxusage, self._setsession, self._closeable,
*self._args, **self._kwargs) | Get a steady, non-persistent PyGreSQL connection. |
def _trigger_rpc(self, device_info):
method = device_info.rpc_trigger
if isinstance(method, devices.RPCTriggerViaSWI):
self._jlink.memory_write32(method.register, [1 << method.bit])
else:
raise HardwareError("Unknown RPC trigger method", method=method) | Trigger an RPC in a device specific way. |
def toString(value):
if isinstance(value, basestring):
return value
elif type(value) in [np.string_, np.str_]:
return str(value)
elif type(value) == np.unicode_:
return unicode(value)
else:
raise TypeError("Could not convert %s to string ty... | Convert a value to a string, if possible. |
def run_apidoc(_):
current_directory = os.path.abspath(os.path.dirname(__file__))
output_path = os.path.join(current_directory, 'source')
cmd_path = 'sphinx-apidoc'
if hasattr(sys, 'real_prefix'):
cmd_path = os.path.abspath(os.path.join(sys.prefix, 'bin', 'sphinx-apidoc'))
main([cmd_path, '-... | Heler function for run apidoc as part of the build. |
def group_callback(self, iocb):
if _debug: IOGroup._debug("group_callback %r", iocb)
for iocb in self.ioMembers:
if not iocb.ioComplete.isSet():
if _debug: IOGroup._debug(" - waiting for child: %r", iocb)
break
else:
if _debug: IOGroup._... | Callback when a child iocb completes. |
def nested_srcmdl_xml(self, **kwargs):
kwargs_copy = self.base_dict.copy()
kwargs_copy.update(**kwargs)
self._replace_none(kwargs_copy)
localpath = NameFactory.nested_srcmdl_xml_format.format(**kwargs_copy)
if kwargs.get('fullpath', False):
return self.fullpat... | return the file name for source model xml files of nested sources |
def _apply_replace_backrefs(m, repl=None, flags=0):
if m is None:
raise ValueError("Match is None!")
else:
if isinstance(repl, ReplaceTemplate):
return repl.expand(m)
elif isinstance(repl, (str, bytes)):
return _bregex_parse._ReplaceParser().parse(m.re, repl, bool... | Expand with either the `ReplaceTemplate` or compile on the fly, or return None. |
def main(arguments):
global verbose
global veryVerbose
global iteration_num
global single_score
global pr_flag
global match_triple_dict
iteration_num = arguments.r + 1
if arguments.ms:
single_score = False
if arguments.v:
verbose = True
if arguments.vv:
ve... | Main function of smatch score calculation |
def _starts_with_vowel(self, letter_group: str) -> bool:
if len(letter_group) == 0:
return False
return self._contains_vowels(letter_group[0]) | Check if a string starts with a vowel. |
def _hide(self):
self._icon.set_visible(False)
self._icon.disconnect(self._conn_left)
self._icon.disconnect(self._conn_right)
self._conn_left = None
self._conn_right = None | Hide the tray icon. |
def api_delete(service, file_id, owner_token):
service += 'api/delete/%s' % file_id
r = requests.post(service, json={'owner_token': owner_token, 'delete_token': owner_token})
r.raise_for_status()
if r.text == 'OK':
return True
return False | Delete a file already uploaded to Send |
async def set(self, key, value):
if not check_dht_value_type(value):
raise TypeError(
"Value must be of type int, float, bool, str, or bytes"
)
log.info("setting '%s' = '%s' on network", key, value)
dkey = digest(key)
return await self.set_digest(d... | Set the given string key to the given value in the network. |
def remove_readonly(func, path, _):
"Clear the readonly bit and reattempt the removal"
os.chmod(path, stat.S_IWRITE)
func(path) | Clear the readonly bit and reattempt the removal |
def flush_records(self):
s = b"".join(p.raw_stateful() for p in self.buffer_out)
self.socket.send(s)
self.buffer_out = [] | Send all buffered records and update the session accordingly. |
def decode_mail_header(value, default_charset='us-ascii'):
try:
headers = decode_header(value)
except email.errors.HeaderParseError:
return str_decode(str_encode(value, default_charset, 'replace'), default_charset)
else:
for index, (text, charset) in enumerate(headers):
l... | Decode a header value into a unicode string. |
def sheets(self):
data = Dict()
for src in [src for src in self.zipfile.namelist() if 'xl/worksheets/' in src]:
name = os.path.splitext(os.path.basename(src))[0]
xml = self.xml(src)
data[name] = xml
return data | return the sheets of data. |
def update_continuously(records, update_interval=600):
while True:
for record in records:
try:
record.update()
except (ApiError, RequestException):
pass
time.sleep(update_interval) | Update `records` every `update_interval` seconds |
def write_json_or_yaml(file_path, content):
with codecs.open(file_path, "w", "utf-8") as f:
f.write(get_json_or_yaml(file_path, content)) | Write JSON or YAML depending on the file extension. |
def _slice_generator(self, index):
start, stop, step = index.indices(len(self))
for i in range(start, stop, step):
yield self.get_blob(i) | A simple slice generator for iterations |
def _commit_run_log(self):
logger.debug('Committing run log for job {0}'.format(self.name))
self.backend.commit_log(self.run_log) | Commit the current run log to the backend. |
def collect(self):
collectors = None
with self._lock:
collectors = copy.copy(self._collector_to_names)
for collector in collectors:
for metric in collector.collect():
yield metric | Yields metrics from the collectors in the registry. |
def skip(self, steps=1):
for _ in six.moves.range(steps):
self.next_token() | Skip ahead by 'steps' tokens. |
def com_google_fonts_check_valid_glyphnames(ttFont):
if ttFont.sfntVersion == b'\x00\x01\x00\x00' and ttFont.get(
"post") and ttFont["post"].formatType == 3.0:
yield SKIP, ("TrueType fonts with a format 3.0 post table contain no"
" glyph names.")
else:
import re
bad_names = []
... | Glyph names are all valid? |
def rollback(self):
if self.contents['rollback'] is None:
raise ValueError("No operation to roll back on refpkg")
future_msg = self.contents['log'][0]
rolledback_log = self.contents['log'][1:]
rollforward = copy.deepcopy(self.contents)
rollforward.pop('rollback')
... | Revert the previous modification to the refpkg. |
def decode_b64(data):
byte_string = data.encode('utf-8')
decoded = base64.b64decode(byte_string)
return decoded.decode('utf-8') | Wrapper for b64decode, without having to struggle with bytestrings. |
def assert_numbers_almost_equal(self, actual_val, expected_val, allowed_delta=0.0001,
failure_message='Expected numbers to be within {} of each other: "{}" and "{}"'):
assertion = lambda: abs(expected_val - actual_val) <= allowed_delta
self.webdriver_assert(assertion,... | Asserts that two numbers are within an allowed delta of each other |
def run(self):
self.toplevel.protocol("WM_DELETE_WINDOW", self.__on_window_close)
self.toplevel.mainloop() | Ejecute the main loop. |
def __insert_frond_LF(d_w, d_u, dfs_data):
dfs_data['LF'].append( (d_w, d_u) )
dfs_data['FG']['l'] += 1
dfs_data['last_inserted_side'] = 'LF' | Encapsulates the process of inserting a frond uw into the left side frond group. |
def _copy_settings_file(source, destination, name):
if os.path.exists(destination):
try:
ch = six.moves.input(
'File %s already exists, overwrite? y/[n]):' % destination)
if ch not in ('Y', 'y'):
return
except KeyboardInterrupt:
ret... | Copy a file from the repo to the user's home directory. |
def _merge_summary(in_files, out_file, data):
if not utils.file_exists(out_file):
with file_transaction(data, out_file) as tx_out_file:
with open(tx_out_file, "w") as out_handle:
for i, in_file in enumerate(in_files):
with open(in_file) as in_handle:
... | Create one big summary file for disambiguation from multiple splits. |
def create(self, name, *args, **kwargs):
resource_name = self._resource_name(name)
log.info(
"Creating {} '{}'...".format(self._model_name, resource_name))
resource = self.collection.create(*args, name=resource_name, **kwargs)
self._ids.add(resource.id)
return resourc... | Create an instance of this resource type. |
def _get_total_read_size(self):
if self.read_size:
read_size = EVENT_SIZE * self.read_size
else:
read_size = EVENT_SIZE
return read_size | How much event data to process at once. |
def _repr(self, obj, level):
try:
obj_repr = type(obj).__repr__
except Exception:
obj_repr = None
def has_obj_repr(t):
r = t.__repr__
try:
return obj_repr == r
except Exception:
return obj_repr is r
... | Returns an iterable of the parts in the final repr string. |
def _isdst(dt):
if type(dt) == datetime.date:
dt = datetime.datetime.combine(dt, datetime.datetime.min.time())
dtc = dt.replace(year=datetime.datetime.now().year)
if time.localtime(dtc.timestamp()).tm_isdst == 1:
return True
return False | Check if date is in dst. |
def _structure(msg, fp=None, level=0, include_default=False):
if fp is None:
fp = sys.stdout
tab = ' ' * (level * 4)
print(tab + msg.get_content_type(), end='', file=fp)
if include_default:
print(' [%s]' % msg.get_default_type(), file=fp)
else:
print(file=fp)
if msg.is_mu... | A handy debugging aid |
def _init_records(self, record_types):
for record_type in record_types:
if str(record_type) not in self._my_map['recordTypeIds']:
record_initialized = self._init_record(str(record_type))
if record_initialized:
self._my_map['recordTypeIds'].append(s... | Initalize all records for this form. |
def dom_id(self):
parameter = 'DOMID'
if parameter not in self._by:
self._populate(by=parameter)
return self._by[parameter] | A dict of CLBs with DOM ID as key |
def exit(self):
if hasattr(self, 'digital'):
for pin in self.digital:
if pin.mode == SERVO:
pin.mode = OUTPUT
if hasattr(self, 'sp'):
self.sp.close() | Call this to exit cleanly. |
def deploy_file(self,
file_name,
calc_md5=True,
calc_sha1=True,
parameters={}):
if calc_md5:
md5 = md5sum(file_name)
if calc_sha1:
sha1 = sha1sum(file_name)
target = self
if self.is_di... | Upload the given file to this path |
def display_animation(anim, **kwargs):
from IPython.display import HTML
return HTML(anim_to_html(anim, **kwargs)) | Display the animation with an IPython HTML object |
def render_tooltip(self, tooltip, obj):
if self.tooltip_attr:
val = getattr(obj, self.tooltip_attr)
elif self.tooltip_value:
val = self.tooltip_value
else:
return False
setter = getattr(tooltip, TOOLTIP_SETTERS.get(self.tooltip_type))
if self.t... | Render the tooltip for this column for an object |
def handle_response(self, response, **kwargs):
num_401s = kwargs.pop('num_401s', 0)
if not self.cbt_binding_tried and self.send_cbt:
cbt_application_data = _get_channel_bindings_application_data(response)
if cbt_application_data:
try:
self.cbt_... | Takes the given response and tries kerberos-auth, as needed. |
def cio_close(cio):
OPENJPEG.opj_cio_close.argtypes = [ctypes.POINTER(CioType)]
OPENJPEG.opj_cio_close(cio) | Wraps openjpeg library function cio_close. |
def getDarkCurrentAverages(exposuretimes, imgs):
x, imgs_p = sortForSameExpTime(exposuretimes, imgs)
s0, s1 = imgs[0].shape
imgs = np.empty(shape=(len(x), s0, s1),
dtype=imgs[0].dtype)
for i, ip in zip(imgs, imgs_p):
if len(ip) == 1:
i[:] = ip[0]
e... | return exposure times, image averages for each exposure time |
def download(url, filename, overwrite = False):
from requests import get
from os.path import exists
debug('Downloading ' + unicode(url) + '...')
data = get(url)
if data.status_code == 200:
if not exists(filename) or overwrite:
f = open(filename, 'wb')
f.write(data.content)
f.close()
return True
retur... | Downloads a file via HTTP. |
def show(*args, **kw):
if len(kw):
raise TypeError('unexpected keyword argument: %r' % list(kw))
if args:
for arg in args:
print(envget(arg))
else:
listVars(prefix=" ", equals="=") | Print value of IRAF or OS environment variables. |
def after_processing(eng, objects):
super(InvenioProcessingFactory, InvenioProcessingFactory)\
.after_processing(eng, objects)
if eng.has_completed:
eng.save(WorkflowStatus.COMPLETED)
else:
eng.save(WorkflowStatus.HALTED)
db.session.commit() | Process to update status. |
def toStr(self) :
s = [self.strLegend]
for l in self.lines :
s.append(l.toStr())
return self.lineSeparator.join(s) | returns a string version of the CSV |
def __create_session(username=None, password=None):
config = Config()
if not username or not password:
username = config.username
password = config.password
payload = {
"username": username,
"password": password,
}
session_resp = re... | grabs the configuration, and makes the call to Authentise to create the session |
def redis_version(self):
if not hasattr(self, '_redis_version'):
self._redis_version = tuple(
map(int, self.connection.info().get('redis_version').split('.')[:3])
)
return self._redis_version | Return the redis version as a tuple |
def _search_keys(text, keyserver, user=None):
gpg = _create_gpg(user)
if keyserver:
_keys = gpg.search_keys(text, keyserver)
else:
_keys = gpg.search_keys(text)
return _keys | Helper function for searching keys from keyserver |
def reload(self, d=None):
if d:
self.clear()
self.update(d)
elif self.id:
new_dict = self.by_id(self._id)
self.clear()
self.update(new_dict)
else:
pass | Reload model from given dict or database. |
def BinaryBool(pred):
class Predicate(Bool):
def __init__(self, value, ignore_case=False):
self.value = caseless(value) if ignore_case else value
self.ignore_case = ignore_case
def __call__(self, data):
if not isinstance(data, list):
data = [data]
... | Lifts predicates that take an argument into the DSL. |
def in6_getscope(addr):
if in6_isgladdr(addr) or in6_isuladdr(addr):
scope = IPV6_ADDR_GLOBAL
elif in6_islladdr(addr):
scope = IPV6_ADDR_LINKLOCAL
elif in6_issladdr(addr):
scope = IPV6_ADDR_SITELOCAL
elif in6_ismaddr(addr):
if in6_ismgladdr(addr):
scope = IPV6... | Returns the scope of the address. |
def _draw_swap_cv(self, board_image, swap):
tile_h, tile_w = self._TILE_SHAPE[0:2]
(row_1, col_1), (row_2, col_2) = swap
t = tile_h * min(row_1, row_2)
b = tile_h * (1 + max(row_1, row_2))
l = tile_w * min(col_1, col_2)
r = tile_w * (1 + max(col_1, col_2))
top_lef... | Add a white tile border to indicate the swap. |
def dct2(input, K=13):
nframes, N = input.shape
freqstep = numpy.pi / N
cosmat = dctmat(N,K,freqstep,False)
return numpy.dot(input, cosmat) * (2.0 / N) | Convert log-power-spectrum to MFCC using the normalized DCT-II |
async def _get_subscriptions(self) -> Tuple[Set[Text], Text]:
url, params = self._get_subscriptions_endpoint()
get = self.session.get(url, params=params)
async with get as r:
await self._handle_fb_response(r)
data = await r.json()
for scope in data['data']:
... | List the subscriptions currently active |
def clear_data(self):
self.log(u"Clear audio_data")
self.__samples_capacity = 0
self.__samples_length = 0
self.__samples = None | Clear the audio data, freeing memory. |
def to_json(self, depth=-1, **kwargs):
return json.dumps(self.to_dict(depth=depth, ordered=True), **kwargs) | Returns a JSON representation of the object. |
def send(self):
try:
for mx in self.mxrecords:
logging.info('Connecting to {} {}...'.format(mx, self.port))
server = smtplib.SMTP(mx, self.port)
server.set_debuglevel(logging.root.level < logging.WARN)
server.sendmail(
... | Attempts the delivery through recipient's domain MX records. |
def absent(name=None, canonical=None, **api_opts):
ret = {'name': name, 'result': False, 'comment': '', 'changes': {}}
obj = __salt__['infoblox.get_cname'](name=name, canonical=canonical, **api_opts)
if not obj:
ret['result'] = True
ret['comment'] = 'infoblox already removed'
return ... | Ensure the CNAME with the given name or canonical name is removed |
def update_req(req):
if not req.name:
return req, None
info = get_package_info(req.name)
if info['info'].get('_pypi_hidden'):
print('{} is hidden on PyPI and will not be updated.'.format(req))
return req, None
if _is_pinned(req) and _is_version_range(req):
print('{} is pi... | Updates a given req object with the latest version. |
def query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_pre_approvals(
initial_date, final_date, page, max_results)
... | query pre-approvals by date range |
def generate_report(book_url):
shares_no = None
avg_price = None
stock_template = templates.load_jinja_template("stock_template.html")
stock_rows = ""
with piecash.open_book(book_url, readonly=True, open_if_lock=True) as book:
all_stocks = portfoliovalue.get_all_stocks(book)
for stoc... | Generates an HTML report content. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.