function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def get_common_ancestor(self, lnode, rnode, stop=None):
stop = stop or self.root
if lnode is rnode:
return lnode
if stop in (lnode, rnode):
return stop
if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'):
return
if (lnode.level > rnode.level):
return self.get_common_ancestor(lnode.parent, rnode, stop)
if (rnode.level > lnode.level):
return self.get_common_ancestor(lnode, rnode.parent, stop)
return self.get_common_ancestor(lnode.parent, rnode.parent, stop) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def on_fork(self, parent, lnode, rnode, items):
return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def add_binding(self, node, value, report_redef=True):
"""Called when a binding is altered.
- `node` is the statement responsible for the change
- `value` is the optional new value, a Binding instance, associated
with the binding; if None, the binding is deleted if it exists.
- if `report_redef` is True (default), rebinding while unused will be
reported.
"""
redefinedWhileUnused = False
if not isinstance(self.scope, ClassScope):
for scope in self.scope_stack[::-1]:
existing = scope.get(value.name)
if (isinstance(existing, Importation)
and not existing.used
and (not isinstance(value, Importation) or
value.fullName == existing.fullName)
and report_redef
and not self.different_forks(node, existing.source)):
redefinedWhileUnused = True
self.report(messages.RedefinedWhileUnused,
node, value.name, existing.source)
existing = self.scope.get(value.name)
if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp):
if (existing and report_redef
and not self.has_parent(existing.source, (ast.For, ast.ListComp))
and not self.different_forks(node, existing.source)):
self.report(messages.RedefinedInListComp,
node, value.name, existing.source)
if (isinstance(existing, Definition)
and not existing.used
and not self.different_forks(node, existing.source)):
self.report(messages.RedefinedWhileUnused,
node, value.name, existing.source)
else:
self.scope[value.name] = value | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def iter_visible_scopes(self):
outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1)
scopes = [scope for scope in outerScopes
if isinstance(scope, (FunctionScope, ModuleScope))]
if (isinstance(self.scope, GeneratorScope)
and scopes[-1] != self.scope_stack[-2]):
scopes.append(self.scope_stack[-2])
scopes.append(self.scope_stack[-1])
return iter(reversed(scopes)) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def handle_node_store(self, node):
name = node_name(node)
if not name:
return
# if the name hasn't already been defined in the current scope
if isinstance(self.scope, FunctionScope) and name not in self.scope:
# for each function or module scope above us
for scope in self.scope_stack[:-1]:
if not isinstance(scope, (FunctionScope, ModuleScope)):
continue
# if the name was defined in that scope, and the name has
# been accessed already in the current scope, and hasn't
# been declared global
used = name in scope and scope[name].used
if used and used[0] is self.scope and name not in self.scope.globals:
# then it's probably a mistake
self.report(messages.UndefinedLocal,
scope[name].used[1], name, scope[name].source)
break
parent = getattr(node, 'parent', None)
if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)):
binding = Binding(name, node)
elif (parent is not None and name == '__all__' and
isinstance(self.scope, ModuleScope)):
binding = ExportBinding(name, parent.value)
else:
binding = Assignment(name, node)
if name in self.scope:
binding.used = self.scope[name].used
self.add_binding(node, binding) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def handle_children(self, tree):
for node in ast.iter_child_nodes(tree):
self.handleNode(node, tree) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def docstring(self, node):
if isinstance(node, ast.Expr):
node = node.value
if not isinstance(node, ast.Str):
return (None, None)
# Computed incorrectly if the docstring has backslash
doctest_lineno = node.lineno - node.s.count('\n') - 1
return (node.s, doctest_lineno) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def handle_doctests(self, node):
try:
docstring, node_lineno = self.docstring(node.body[0])
if not docstring:
return
examples = self._get_doctest_examples(docstring)
except (ValueError, IndexError):
# e.g. line 6 of the docstring for <string> has inconsistent
# leading whitespace: ...
return
node_offset = self.offset or (0, 0)
self.push_scope()
for example in examples:
try:
tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST)
except SyntaxError:
e = sys.exc_info()[1]
position = (node_lineno + example.lineno + e.lineno,
example.indent + 4 + (e.offset or 0))
self.report(messages.DoctestSyntaxError, node, position)
else:
self.offset = (node_offset[0] + node_lineno + example.lineno,
node_offset[1] + example.indent + 4)
self.handle_children(tree)
self.offset = node_offset
self.pop_scope() | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def is_generator(self, node):
"""Checks whether a function is a generator by looking for a yield
statement or expression."""
if not isinstance(node.body, list):
# lambdas can not be generators
return False
for item in node.body:
if isinstance(item, (ast.Assign, ast.Expr)):
if isinstance(item.value, ast.Yield):
return True
elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'):
if self.is_generator(item):
return True
return False | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def GLOBAL(self, node):
"""Keep track of globals declarations."""
if isinstance(self.scope, FunctionScope):
self.scope.globals.update(node.names) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def LISTCOMP(self, node):
# handle generators before element
for gen in node.generators:
self.handleNode(gen, node)
self.handleNode(node.elt, node) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def DICTCOMP(self, node):
self.push_scope(GeneratorScope)
for gen in node.generators:
self.handleNode(gen, node)
self.handleNode(node.key, node)
self.handleNode(node.value, node)
self.pop_scope() | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def collectLoopVars(n):
if isinstance(n, ast.Name):
vars.append(n.id)
elif isinstance(n, ast.expr_context):
return
else:
for c in ast.iter_child_nodes(n):
collectLoopVars(c) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def NAME(self, node):
"""Handle occurrence of Name (which can be a load/store/delete
access.)"""
# Locate the name in locals / function / globals scopes.
if isinstance(node.ctx, (ast.Load, ast.AugLoad)):
self.handle_node_load(node)
if (node.id == 'locals' and isinstance(self.scope, FunctionScope)
and isinstance(node.parent, ast.Call)):
# we are doing locals() call in current scope
self.scope.uses_locals = True
elif isinstance(node.ctx, (ast.Store, ast.AugStore)):
self.handle_node_store(node)
elif isinstance(node.ctx, ast.Del):
self.handle_node_delete(node)
else:
# must be a Param context -- this only happens for names in function
# arguments, but these aren't dispatched through here
raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def FUNCTIONDEF(self, node):
for deco in node.decorator_list:
self.handleNode(deco, node)
self.add_binding(node, FunctionDefinition(node.name, node))
self.LAMBDA(node)
if self.settings.get('run_doctests', False):
self.defer_function(lambda: self.handle_doctests(node)) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def addArgs(arglist):
for arg in arglist:
if isinstance(arg, ast.Tuple):
addArgs(arg.elts)
else:
if arg.id in args:
self.report(messages.DuplicateArgument,
node, arg.id)
args.append(arg.id) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def runFunction():
self.push_scope()
for name in args:
self.add_binding(node, Argument(name, node), report_redef=False)
if isinstance(node.body, list):
# case for FunctionDefs
for stmt in node.body:
self.handleNode(stmt, node)
else:
# case for Lambdas
self.handleNode(node.body, node)
def checkUnusedAssignments():
"""Check to see if any assignments have not been used."""
for name, binding in self.scope.unusedAssignments():
self.report(messages.UnusedVariable, binding.source, name)
self.defer_assignment(checkUnusedAssignments)
if PY2:
def checkReturnWithArgumentInsideGenerator():
"""Check to see if there are any return statements with
arguments but the function is a generator."""
if self.is_generator(node):
stmt = self.find_return_with_argument(node)
if stmt is not None:
self.report(messages.ReturnWithArgsInsideGenerator, stmt)
self.defer_assignment(checkReturnWithArgumentInsideGenerator)
self.pop_scope() | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def CLASSDEF(self, node):
"""Check names used in a class definition, including its decorators,
base classes, and the body of its definition.
Additionally, add its name to the current scope.
"""
for deco in node.decorator_list:
self.handleNode(deco, node)
for baseNode in node.bases:
self.handleNode(baseNode, node)
if not PY2:
for keywordNode in node.keywords:
self.handleNode(keywordNode, node)
self.push_scope(ClassScope)
if self.settings.get('run_doctests', False):
self.defer_function(lambda: self.handle_doctests(node))
for stmt in node.body:
self.handleNode(stmt, node)
self.pop_scope()
self.add_binding(node, ClassDefinition(node.name, node)) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def AUGASSIGN(self, node):
self.handle_node_load(node.target)
self.handleNode(node.value, node)
self.handleNode(node.target, node) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def IMPORTFROM(self, node):
if node.module == '__future__':
if not self.futures_allowed:
self.report(messages.LateFutureImport,
node, [n.name for n in node.names])
else:
self.futures_allowed = False
for alias in node.names:
if alias.name == '*':
self.scope.importStarred = True
self.report(messages.ImportStarUsed, node, node.module)
continue
name = alias.asname or alias.name
importation = Importation(name, node)
if node.module == '__future__':
importation.used = (self.scope, node)
self.add_binding(node, importation) | timothycrosley/frosted | [
252,
22,
252,
11,
1390080851
] |
def setUp(self):
signal, samplingrate, _ = waveread("test/cmu_arctic/arctic_a0001.wav")
self.world = World(samplingrate)
self.alpha = estimate_alpha(samplingrate)
self.samplingrate = samplingrate
self.signal = signal
self.f0, self.spec_mat, _ = self.world.analyze(signal)
self.ep = ExcitePulse(16000, 80, False)
self.order = 24 | shunsukeaihara/pyworld | [
16,
5,
16,
1,
1439190266
] |
def split_to_field_and_filter_type(filter_name):
filter_split = filter_name.split(OPERATOR_SEPARATOR)
filter_type = filter_split[-1] if len(filter_split) > 0 else None
if filter_type in ALL_OPERATORS:
return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type
else:
return filter_name, None | Aplopio/rip | [
14,
6,
14,
14,
1417874861
] |
def _get_keystone_manager_class(endpoint, token, api_version):
"""Return KeystoneManager class for the given API version
@param endpoint: the keystone endpoint to point client at
@param token: the keystone admin_token
@param api_version: version of the keystone api the client should use
@returns keystonemanager class used for interrogating keystone
"""
if api_version == 2:
return KeystoneManager2(endpoint, token)
if api_version == 3:
return KeystoneManager3(endpoint, token)
raise ValueError('No manager found for api version {}'.format(api_version)) | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def get_keystone_manager(endpoint, token, api_version=None):
"""Return a keystonemanager for the correct API version
If api_version has not been set then create a manager based on the endpoint
Use this manager to query the catalogue and determine which api version
should actually be being used. Return the correct client based on that.
Function is wrapped in a retry_on_exception to catch the case where the
keystone service is still initialising and not responding to requests yet.
XXX I think the keystone client should be able to do version
detection automatically so the code below could be greatly
simplified
@param endpoint: the keystone endpoint to point client at
@param token: the keystone admin_token
@param api_version: version of the keystone api the client should use
@returns keystonemanager class used for interrogating keystone
"""
if api_version:
return _get_keystone_manager_class(endpoint, token, api_version)
else:
if 'v2.0' in endpoint.split('/'):
manager = _get_keystone_manager_class(endpoint, token, 2)
else:
manager = _get_keystone_manager_class(endpoint, token, 3)
if endpoint.endswith('/'):
base_ep = endpoint.rsplit('/', 2)[0]
else:
base_ep = endpoint.rsplit('/', 1)[0]
svc_id = None
for svc in manager.api.services.list():
if svc.type == 'identity':
svc_id = svc.id
version = None
for ep in manager.api.endpoints.list():
if ep.service_id == svc_id and hasattr(ep, 'adminurl'):
version = ep.adminurl.split('/')[-1]
if version and version == 'v2.0':
new_ep = base_ep + "/" + 'v2.0'
return _get_keystone_manager_class(new_ep, token, 2)
elif version and version == 'v3':
new_ep = base_ep + "/" + 'v3'
return _get_keystone_manager_class(new_ep, token, 3)
else:
return manager | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def resolve_domain_id(self, name):
pass | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def resolve_service_id(self, name, service_type=None):
"""Find the service_id of a given service"""
services = [s._info for s in self.api.services.list()]
for s in services:
if service_type:
if (name.lower() == s['name'].lower() and
service_type == s['type']):
return s['id']
else:
if name.lower() == s['name'].lower():
return s['id'] | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def __init__(self, endpoint, token):
self.api_version = 2
self.api = client.Client(endpoint=endpoint, token=token) | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def create_endpoints(self, region, service_id, publicurl, adminurl,
internalurl):
self.api.endpoints.create(region=region, service_id=service_id,
publicurl=publicurl, adminurl=adminurl,
internalurl=internalurl) | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def resolve_tenant_id(self, name, domain=None):
"""Find the tenant_id of a given tenant"""
tenants = [t._info for t in self.api.tenants.list()]
for t in tenants:
if name.lower() == t['name'].lower():
return t['id'] | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def delete_tenant(self, tenant_id):
self.api.tenants.delete(tenant_id) | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def update_password(self, user, password):
self.api.users.update_password(user=user, password=password) | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def add_user_role(self, user, role, tenant, domain):
self.api.roles.add_user_role(user=user, role=role, tenant=tenant) | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def __init__(self, endpoint, token):
self.api_version = 3
keystone_auth_v3 = token_endpoint.Token(endpoint=endpoint, token=token)
keystone_session_v3 = session.Session(auth=keystone_auth_v3)
self.api = keystoneclient_v3.Client(session=keystone_session_v3) | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def resolve_domain_id(self, name):
"""Find the domain_id of a given domain"""
domains = [d._info for d in self.api.domains.list()]
for d in domains:
if name.lower() == d['name'].lower():
return d['id'] | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def create_endpoints(self, region, service_id, publicurl, adminurl,
internalurl):
self.api.endpoints.create(service_id, publicurl, interface='public',
region=region)
self.api.endpoints.create(service_id, adminurl, interface='admin',
region=region)
self.api.endpoints.create(service_id, internalurl,
interface='internal', region=region) | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def create_domain(self, domain_name, description):
self.api.domains.create(domain_name, description=description) | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def delete_tenant(self, tenant_id):
self.api.projects.delete(tenant_id) | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def update_password(self, user, password):
self.api.users.update(user, password=password) | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def add_user_role(self, user, role, tenant, domain):
# Specify either a domain or project, not both
if domain:
self.api.roles.grant(role, user=user, domain=domain)
if tenant:
self.api.roles.grant(role, user=user, project=tenant) | konono/equlipse | [
3,
2,
3,
1,
1500949023
] |
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1 | JeremyRubin/bitcoin | [
10,
7,
10,
2,
1457071573
] |
def isDividerLine(line):
# At least 80 chars, all slashes except the last (which is newline). The number is inconsistent for some reason.
return (len(line)>=80
and line.endswith("\n")
and all([c=='/' for c in line[0:-1]])) | Discordius/Telescope | [
418,
93,
418,
657,
1483407157
] |
def readFileLines(filename):
f = open(filename, 'r')
lines = f.readlines()
f.close()
return lines | Discordius/Telescope | [
418,
93,
418,
657,
1483407157
] |
def unpackFile(lines):
sizes = {}
currentFileStart = None
currentFileName = None
for i in range(0,len(lines)):
if i+4<len(lines) and isDividerLine(lines[i]) and isSpacerLine(lines[i+1]) and isSpacerLine(lines[i+3]) and isDividerLine(lines[i+4]):
if currentFileName:
fileContents = '\n'.join(lines[currentFileStart:i])
sizes[currentFileName] = len(fileContents)
currentFileStart = i+5
currentFileName = lines[i+2].strip()[2:-2].strip()
if currentFileName:
fileContents = '\n'.join(lines[currentFileStart:i])
sizes[currentFileName] = len(fileContents) | Discordius/Telescope | [
418,
93,
418,
657,
1483407157
] |
def ancestorPaths(filename):
pathComponents = filename.split('/')
return ['.']+['/'.join(pathComponents[0:i]) for i in range(1,len(pathComponents))] | Discordius/Telescope | [
418,
93,
418,
657,
1483407157
] |
def packagesFileToDependencyRoots(packagesFileName):
f = open(packagesFileName, 'r')
packagesJson = json.loads(f.read())
f.close()
return packagesJson[dependencies] | Discordius/Telescope | [
418,
93,
418,
657,
1483407157
] |
def create(kernel):
result = Building()
result.template = "object/building/poi/shared_tatooine_evil_nomad_small2.iff"
result.attribute_template_id = -1
result.stfName("poi_n","base_poi_building") | anhstudios/swganh | [
62,
37,
62,
37,
1297996365
] |
def get_info(name):
g = globals()
return g.get(name, g.get(name + "_info", {})) | ryfeus/lambda-packs | [
1086,
234,
1086,
13,
1476901359
] |
def definition(name=None):
'Returns a valid definition.'
random.seed(name)
definition, list_one, list_two = str(), range(256), range(256)
for index in range(256):
index_one, index_two = random.randrange(256 - index), random.randrange(256 - index)
definition += chr(list_one[index_one]) + chr(list_two[index_two])
del list_one[index_one], list_two[index_two]
return definition | ActiveState/code | [
1884,
686,
1884,
41,
1500923597
] |
def key(definition, select):
'Returns a valid key.'
key = range(256)
for index in range(256):
key[ord(definition[index * 2 + int(bool(select))])] = definition[index * 2 + int(not bool(select))]
return ''.join(key) | ActiveState/code | [
1884,
686,
1884,
41,
1500923597
] |
def main():
args = parse_args()
source_root = os.path.abspath(args.source_root)
initial_app_path = os.path.join(source_root, args.build_dir)
app_path = create_app_copy(initial_app_path)
returncode = 0
try:
with scoped_cwd(app_path):
if args.snapshot_files_dir is None:
with open(os.path.join(app_path, 'mksnapshot_args')) as f:
mkargs = f.read().splitlines()
subprocess.check_call(mkargs + [ SNAPSHOT_SOURCE ], cwd=app_path)
print('ok mksnapshot successfully created snapshot_blob.bin.')
context_snapshot = 'v8_context_snapshot.bin'
if platform.system() == 'Darwin':
if os.environ.get('TARGET_ARCH') == 'arm64':
context_snapshot = 'v8_context_snapshot.arm64.bin'
else:
context_snapshot = 'v8_context_snapshot.x86_64.bin'
context_snapshot_path = os.path.join(app_path, context_snapshot)
gen_binary = get_binary_path('v8_context_snapshot_generator', \
app_path)
genargs = [ gen_binary, \
'--output_file={0}'.format(context_snapshot_path) ]
subprocess.check_call(genargs)
print('ok v8_context_snapshot_generator successfully created ' \
+ context_snapshot)
if args.create_snapshot_only:
return 0
else:
gen_bin_path = os.path.join(args.snapshot_files_dir, '*.bin')
generated_bin_files = glob.glob(gen_bin_path)
for bin_file in generated_bin_files:
shutil.copy2(bin_file, app_path)
test_path = os.path.join(SOURCE_ROOT, 'spec', 'fixtures', \
'snapshot-items-available')
if sys.platform == 'darwin':
bin_files = glob.glob(os.path.join(app_path, '*.bin'))
app_dir = os.path.join(app_path, '{0}.app'.format(PRODUCT_NAME))
electron = os.path.join(app_dir, 'Contents', 'MacOS', PRODUCT_NAME)
bin_out_path = os.path.join(app_dir, 'Contents', 'Frameworks',
'{0} Framework.framework'.format(PROJECT_NAME),
'Resources')
for bin_file in bin_files:
shutil.copy2(bin_file, bin_out_path)
elif sys.platform == 'win32':
electron = os.path.join(app_path, '{0}.exe'.format(PROJECT_NAME))
else:
electron = os.path.join(app_path, PROJECT_NAME)
subprocess.check_call([electron, test_path])
print('ok successfully used custom snapshot.')
except subprocess.CalledProcessError as e:
print('not ok an error was encountered while testing mksnapshot.')
print(e)
returncode = e.returncode
except KeyboardInterrupt:
print('Other error')
returncode = 0
print('Returning with error code: {0}'.format(returncode))
return returncode | electron/electron | [
106249,
14423,
106249,
940,
1365731256
] |
def create_app_copy(initial_app_path):
print('Creating copy of app for testing')
app_path = os.path.join(os.path.dirname(initial_app_path),
os.path.basename(initial_app_path)
+ '-mksnapshot-test')
rm_rf(app_path)
shutil.copytree(initial_app_path, app_path, symlinks=True)
return app_path | electron/electron | [
106249,
14423,
106249,
940,
1365731256
] |
def parse_args():
parser = argparse.ArgumentParser(description='Test mksnapshot')
parser.add_argument('-b', '--build-dir',
help='Path to an Electron build folder. \
Relative to the --source-root.',
default=None,
required=True)
parser.add_argument('--create-snapshot-only',
help='Just create snapshot files, but do not run test',
action='store_true')
parser.add_argument('--snapshot-files-dir',
help='Directory containing snapshot files to use \
for testing',
default=None,
required=False)
parser.add_argument('--source-root',
default=SOURCE_ROOT,
required=False)
return parser.parse_args() | electron/electron | [
106249,
14423,
106249,
940,
1365731256
] |
def _test_load_store_instrument(source_lsdsng, lsdinst_path, original_index):
proj = load_lsdsng(source_lsdsng)
proj.song.instruments.import_from_file(0x2a, lsdinst_path)
target_instr = proj.song.instruments[0x2a]
original_instr = proj.song.instruments[original_index]
assert_equal(original_instr, target_instr)
with temporary_file() as tmpfile:
original_instr.export_to_file(tmpfile)
with open(tmpfile, 'r') as fp:
saved_inst = json.load(fp)
with open(lsdinst_path, 'r') as fp:
original_inst = json.load(fp)
assert_equal(original_inst, saved_inst) | alexras/pylsdj | [
25,
2,
25,
3,
1407729384
] |
def test_load_store_pulse_instrument():
_test_load_store_instrument(
os.path.join(SCRIPT_DIR, 'test_data', 'UNTOLDST.lsdsng'),
os.path.join(SCRIPT_DIR, 'test_data', 'UNTOLDST_0x03_pulse.lsdinst'),
0x03) | alexras/pylsdj | [
25,
2,
25,
3,
1407729384
] |
def test_load_store_noise_instrument():
_test_load_store_instrument(
os.path.join(SCRIPT_DIR, 'test_data', 'ANNARKTE.lsdsng'),
os.path.join(SCRIPT_DIR, 'test_data', 'ANNARKTE_0x06_noise.lsdinst'),
0x06) | alexras/pylsdj | [
25,
2,
25,
3,
1407729384
] |
def execute(my):
base_dir = "%s%s" % (BACKUP_DIR, DB_DIR)
import datetime
now = datetime.datetime.now()
date = now.strftime("%Y%m%d_%H%M") | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def execute(my):
base_dir = "%s%s" % (BACKUP_DIR, PROJECT_DIR) | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def execute(my):
base_dir = "%s%s" % (BACKUP_DIR, ASSETS_DIR)
asset_dir = Environment.get_asset_dir() | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def execute(my, directory, days):
'''Removes files in directory older than specified days.'''
dir = directory
print("Pruning backup files older than [%s] days" % days)
import datetime
today = datetime.datetime.today()
files = os.listdir(dir)
for file in files:
path = "%s/%s" % (dir, file)
(mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(path)
ctime = datetime.datetime.fromtimestamp(ctime)
if today - ctime > datetime.timedelta(days=days):
os.unlink(path) | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def __init__(self, **kwargs):
for k, v in kwargs.items():
if k == 'port':
setattr(self, k, int(v))
setattr(self, k, v) | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
cherrypy.server.httpserver = self.httpserver_class
# This is perhaps the wrong place for this call but this is the only
# place that i've found so far that I KNOW is early enough to set this.
cherrypy.config.update({'log.screen': False})
engine = cherrypy.engine
if hasattr(engine, 'signal_handler'):
engine.signal_handler.subscribe()
if hasattr(engine, 'console_control_handler'):
engine.console_control_handler.subscribe() | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def sync_apps(self):
"""Tell the server about any apps which the setup functions mounted."""
pass | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def __str__(self):
return 'Builtin HTTP Server on %s:%s' % (self.host, self.port) | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def __str__(self):
return 'Builtin WSGI Server on %s:%s' % (self.host, self.port) | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def get_app(self, app=None):
"""Obtain a new (decorated) WSGI app to hook into the origin server."""
if app is None:
app = cherrypy.tree
if self.validate:
try:
from wsgiref import validate
except ImportError:
warnings.warn(
'Error importing wsgiref. The validator will not run.')
else:
# wraps the app in the validator
app = validate.validator(app)
return app | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def get_modpygw_supervisor(**options):
from cherrypy.test import modpy
sup = modpy.ModPythonSupervisor(**options)
sup.template = modpy.conf_modpython_gateway
sup.using_wsgi = True
return sup | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def get_modfcgid_supervisor(**options):
from cherrypy.test import modfcgid
return modfcgid.ModFCGISupervisor(**options) | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def get_wsgi_u_supervisor(**options):
cherrypy.server.wsgi_version = ('u', 0)
return LocalWSGISupervisor(**options) | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def _setup_server(cls, supervisor, conf):
v = sys.version.split()[0]
log.info('Python version used to run this test script: %s' % v)
log.info('CherryPy version: %s' % cherrypy.__version__)
if supervisor.scheme == 'https':
ssl = ' (ssl)'
else:
ssl = ''
log.info('HTTP server version: %s%s' % (supervisor.protocol, ssl))
log.info('PID: %s' % os.getpid())
cherrypy.server.using_apache = supervisor.using_apache
cherrypy.server.using_wsgi = supervisor.using_wsgi
if sys.platform[:4] == 'java':
cherrypy.config.update({'server.nodelay': False})
if isinstance(conf, text_or_bytes):
parser = cherrypy.lib.reprconf.Parser()
conf = parser.dict_from_file(conf).get('global', {})
else:
conf = conf or {}
baseconf = conf.copy()
baseconf.update({'server.socket_host': supervisor.host,
'server.socket_port': supervisor.port,
'server.protocol_version': supervisor.protocol,
'environment': 'test_suite',
})
if supervisor.scheme == 'https':
# baseconf['server.ssl_module'] = 'builtin'
baseconf['server.ssl_certificate'] = serverpem
baseconf['server.ssl_private_key'] = serverpem
# helper must be imported lazily so the coverage tool
# can run against module-level statements within cherrypy.
# Also, we have to do "from cherrypy.test import helper",
# exactly like each test module does, because a relative import
# would stick a second instance of webtest in sys.modules,
# and we wouldn't be able to globally override the port anymore.
if supervisor.scheme == 'https':
webtest.WebCase.HTTP_CONN = HTTPSConnection
return baseconf | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def setup_class(cls):
''
# Creates a server
conf = {
'scheme': 'http',
'protocol': 'HTTP/1.1',
'port': 54583,
'host': '127.0.0.1',
'validate': False,
'server': 'wsgi',
}
supervisor_factory = cls.available_servers.get(
conf.get('server', 'wsgi'))
if supervisor_factory is None:
raise RuntimeError('Unknown server in config: %s' % conf['server'])
supervisor = supervisor_factory(**conf)
# Copied from "run_test_suite"
cherrypy.config.reset()
baseconf = cls._setup_server(supervisor, conf)
cherrypy.config.update(baseconf)
setup_client()
if hasattr(cls, 'setup_server'):
# Clear the cherrypy tree and clear the wsgi server so that
# it can be updated with the new root
cherrypy.tree = cherrypy._cptree.Tree()
cherrypy.server.httpserver = None
cls.setup_server()
# Add a resource for verifying there are no refleaks
# to *every* test class.
cherrypy.tree.mount(gctools.GCRoot(), '/gc')
cls.do_gc_test = True
supervisor.start(cls.__module__)
cls.supervisor = supervisor | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def teardown_class(cls):
''
if hasattr(cls, 'setup_server'):
cls.supervisor.stop() | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def test_gc(self):
if not self.do_gc_test:
return
self.getPage('/gc/stats')
try:
self.assertBody('Statistics:')
except Exception:
'Failures occur intermittently. See #1420' | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def base(self):
if ((self.scheme == 'http' and self.PORT == 80) or
(self.scheme == 'https' and self.PORT == 443)):
port = ''
else:
port = ':%s' % self.PORT
return '%s://%s%s%s' % (self.scheme, self.HOST, port,
self.script_name.rstrip('/')) | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def getPage(self, url, headers=None, method='GET', body=None,
protocol=None, raise_subcls=None):
"""Open the url. Return status, headers, body.
`raise_subcls` must be a tuple with the exceptions classes
or a single exception class that are not going to be considered
a socket.error regardless that they were are subclass of a
socket.error and therefore not considered for a connection retry.
"""
if self.script_name:
url = httputil.urljoin(self.script_name, url)
return webtest.WebCase.getPage(self, url, headers, method, body,
protocol, raise_subcls) | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def assertErrorPage(self, status, message=None, pattern=''):
"""Compare the response body with a built in error page.
The function will optionally look for the regexp pattern,
within the exception embedded in the error page."""
# This will never contain a traceback
page = cherrypy._cperror.get_error_page(status, message=message)
# First, test the response body without checking the traceback.
# Stick a match-all group (.*) in to grab the traceback.
def esc(text):
return re.escape(ntob(text))
epage = re.escape(page)
epage = epage.replace(
esc('<pre id="traceback"></pre>'),
esc('<pre id="traceback">') + b'(.*)' + esc('</pre>'))
m = re.match(epage, self.body, re.DOTALL)
if not m:
self._handlewebError(
'Error page does not match; expected:\n' + page)
return
# Now test the pattern against the traceback
if pattern is None:
# Special-case None to mean that there should be *no* traceback.
if m and m.group(1):
self._handlewebError('Error page contains traceback')
else:
if (m is None) or (
not re.search(ntob(re.escape(pattern), self.encoding),
m.group(1))):
msg = 'Error page does not contain %s in traceback'
self._handlewebError(msg % repr(pattern)) | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def assertEqualDates(self, dt1, dt2, seconds=None):
"""Assert abs(dt1 - dt2) is within Y seconds."""
if seconds is None:
seconds = self.date_tolerance
if dt1 > dt2:
diff = dt1 - dt2
else:
diff = dt2 - dt1
if not diff < datetime.timedelta(seconds=seconds):
raise AssertionError('%r and %r are not within %r seconds.' %
(dt1, dt2, seconds)) | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def setup_client():
"""Set up the WebCase classes to match the server's socket settings."""
webtest.WebCase.PORT = cherrypy.server.socket_port
webtest.WebCase.HOST = cherrypy.server.socket_host
if cherrypy.server.ssl_certificate:
CPWebCase.scheme = 'https' | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def __init__(self, wait=False, daemonize=False, ssl=False,
socket_host=None, socket_port=None):
self.wait = wait
self.daemonize = daemonize
self.ssl = ssl
self.host = socket_host or cherrypy.server.socket_host
self.port = socket_port or cherrypy.server.socket_port | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def start(self, imports=None):
"""Start cherryd in a subprocess."""
portend.free(self.host, self.port, timeout=1)
args = [
'-m',
'cherrypy',
'-c', self.config_file,
'-p', self.pid_file,
]
r"""
Command for running cherryd server with autoreload enabled
Using
```
['-c',
"__requires__ = 'CherryPy'; \
import pkg_resources, re, sys; \
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]); \
sys.exit(\
pkg_resources.load_entry_point(\
'CherryPy', 'console_scripts', 'cherryd')())"]
```
doesn't work as it's impossible to reconstruct the `-c`'s contents.
Ref: https://github.com/cherrypy/cherrypy/issues/1545
"""
if not isinstance(imports, (list, tuple)):
imports = [imports]
for i in imports:
if i:
args.append('-i')
args.append(i)
if self.daemonize:
args.append('-d')
env = os.environ.copy()
# Make sure we import the cherrypy package in which this module is
# defined.
grandparentdir = os.path.abspath(os.path.join(thisdir, '..', '..'))
if env.get('PYTHONPATH', ''):
env['PYTHONPATH'] = os.pathsep.join(
(grandparentdir, env['PYTHONPATH']))
else:
env['PYTHONPATH'] = grandparentdir
self._proc = subprocess.Popen([sys.executable] + args, env=env)
if self.wait:
self.exit_code = self._proc.wait()
else:
portend.occupied(self.host, self.port, timeout=5)
# Give the engine a wee bit more time to finish STARTING
if self.daemonize:
time.sleep(2)
else:
time.sleep(1) | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def join(self):
"""Wait for the process to exit."""
if self.daemonize:
return self._join_daemon()
self._proc.wait() | Southpaw-TACTIC/TACTIC | [
473,
170,
473,
29,
1378771601
] |
def dehydrate(self, bundle):
return bundle | hzlf/openbroadcast.org | [
9,
2,
9,
44,
1413831364
] |
def parse_pRDF(f):
pRDFs={}
count=0
for line in open(f).readlines():
words=line.split()
if words[0]=="dstep":
dstep=locale.atof(words[1])
continue
elif words[0]=="nstep":
nstep=locale.atof(words[1])
continue
else:
atom_type = words[0]
pRDF=[]
for word in words[1:]:
pRDF.append(locale.atof(word))
pRDFs[atom_type]=pRDF
return (pRDFs,dstep) | madscatt/zazzie | [
3,
3,
3,
70,
1469049025
] |
def __getitem__(self, k):
v = OrderedDict.__getitem__(self, k)
if callable(v):
v = v(k)
OrderedDict.__setitem__(self, k, v)
return v | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def __init__(self,
show_metadata=False, # type: bool
episode_metadata=False, # type: bool
use_fanart=False, # type: bool
use_poster=False, # type: bool
use_banner=False, # type: bool
episode_thumbnails=False, # type: bool
season_posters=False, # type: bool
season_banners=False, # type: bool
season_all_poster=False, # type: bool
season_all_banner=False # type: bool
):
self.name = "Generic" # type: AnyStr
self._ep_nfo_extension = "nfo" # type: AnyStr
self._show_metadata_filename = "tvshow.nfo" # type: AnyStr
self.fanart_name = "fanart.jpg" # type: AnyStr
self.poster_name = "poster.jpg" # type: AnyStr
self.banner_name = "banner.jpg" # type: AnyStr
self.season_all_poster_name = "season-all-poster.jpg" # type: AnyStr
self.season_all_banner_name = "season-all-banner.jpg" # type: AnyStr
self.show_metadata = show_metadata
self.episode_metadata = episode_metadata
self.fanart = use_fanart
self.poster = use_poster
self.banner = use_banner
self.episode_thumbnails = episode_thumbnails
self.season_posters = season_posters
self.season_banners = season_banners
self.season_all_poster = season_all_poster
self.season_all_banner = season_all_banner | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def get_id(self):
# type: (...) -> AnyStr
return GenericMetadata.makeID(self.name) | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def makeID(name):
# type: (AnyStr) -> AnyStr
name_id = re.sub("[+]", "plus", name)
name_id = re.sub(r"[^\w\d_]", "_", name_id).lower()
return name_id | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def _has_show_metadata(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_show_file_path(show_obj))
logger.log(u"Checking if " + self.get_show_file_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def _has_fanart(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_fanart_path(show_obj))
logger.log(u"Checking if " + self.get_fanart_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def _has_banner(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_banner_path(show_obj))
logger.log(u"Checking if " + self.get_banner_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def _has_season_poster(self, show_obj, season):
# type: (sickbeard.tv.TVShow,int) -> AnyStr
location = self.get_season_poster_path(show_obj, season)
result = None is not location and ek.ek(os.path.isfile, location)
if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG)
return result | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def _has_season_all_poster(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_season_all_poster_path(show_obj))
logger.log(u"Checking if " + self.get_season_all_poster_path(show_obj) + " exists: " + str(result),
logger.DEBUG)
return result | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def get_show_year(show_obj, show_info, year_only=True):
# type: (sickbeard.tv.TVShow, Dict, bool) -> Optional[AnyStr]
if None is not getattr(show_info, 'firstaired', None):
try:
first_aired = datetime.datetime.strptime(show_info['firstaired'], '%Y-%m-%d')
if first_aired:
if year_only:
return str(first_aired.year)
return str(first_aired.date())
except (BaseException, Exception):
pass
if isinstance(show_obj, sickbeard.tv.TVShow):
if year_only and show_obj.startyear:
return '%s' % show_obj.startyear
if not show_obj.sxe_ep_obj.get(1, {}).get(1, None):
show_obj.get_all_episodes()
try:
first_ep_obj = show_obj.first_aired_regular_episode
except (BaseException, Exception):
first_ep_obj = None
if isinstance(first_ep_obj, sickbeard.tv.TVEpisode) \
and isinstance(first_ep_obj.airdate, datetime.date) and 1900 < first_ep_obj.airdate.year:
return '%s' % (first_ep_obj.airdate.year, first_ep_obj.airdate)[not year_only] | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def get_episode_file_path(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> AnyStr
return sg_helpers.replace_extension(ep_obj.location, self._ep_nfo_extension) | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def get_poster_path(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
return ek.ek(os.path.join, show_obj.location, self.poster_name) | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def get_episode_thumb_path(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> Optional[AnyStr]
"""
Returns the path where the episode thumbnail should be stored.
ep_obj: a TVEpisode instance for which to create the thumbnail
"""
if ek.ek(os.path.isfile, ep_obj.location):
tbn_filename = ep_obj.location.rpartition('.')
if '' == tbn_filename[0]:
tbn_filename = ep_obj.location
else:
tbn_filename = tbn_filename[0]
return tbn_filename + '-thumb.jpg' | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def get_season_banner_path(self, show_obj, season):
# type: (sickbeard.tv.TVShow, int) -> AnyStr
"""
Returns the full path to the file for a given season banner.
show_obj: a TVShow instance for which to generate the path
season: a season number to be used for the path. Note that season 0
means specials.
"""
# Our specials thumbnail is, well, special
if 0 == season:
season_banner_filename = 'season-specials'
else:
season_banner_filename = 'season' + str(season).zfill(2)
return ek.ek(os.path.join, show_obj.location, season_banner_filename + '-banner.jpg') | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def get_season_all_banner_path(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
return ek.ek(os.path.join, show_obj.location, self.season_all_banner_name) | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def _valid_show(fetched_show_info, show_obj):
# type: (Dict, sickbeard.tv.TVShow) -> bool
"""
Test the integrity of fetched show data
:param fetched_show_info: the object returned from the tvinfo source
:param show_obj: Show that the fetched data relates to
:return: True if fetched_show_obj is valid data otherwise False
"""
if not (isinstance(fetched_show_info, dict) and
isinstance(getattr(fetched_show_info, 'data', None), (list, dict)) and
'seriesname' in getattr(fetched_show_info, 'data', [])) and \
not hasattr(fetched_show_info, 'seriesname'):
logger.log(u'Show %s not found on %s ' %
(show_obj.name, sickbeard.TVInfoAPI(show_obj.tvid).name), logger.WARNING)
return False
return True | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def create_show_metadata(self, show_obj, force=False):
# type: (sickbeard.tv.TVShow, bool) -> bool
result = False
if self.show_metadata and show_obj and (not self._has_show_metadata(show_obj) or force):
logger.debug('Metadata provider %s creating show metadata for %s' % (self.name, show_obj.unique_name))
try:
result = self.write_show_file(show_obj)
except BaseTVinfoError as e:
logger.log('Unable to find useful show metadata for %s on %s: %s' % (
self.name, sickbeard.TVInfoAPI(show_obj.tvid).name, ex(e)), logger.WARNING)
return result | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def update_show_indexer_metadata(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.show_metadata and show_obj and self._has_show_metadata(show_obj):
logger.debug(u'Metadata provider %s updating show indexer metadata file for %s' % (
self.name, show_obj.unique_name))
nfo_file_path = self.get_show_file_path(show_obj)
with ek.ek(io.open, nfo_file_path, 'r', encoding='utf8') as xmlFileObj:
show_xml = etree.ElementTree(file=xmlFileObj)
tvid = show_xml.find('indexer')
prodid = show_xml.find('id')
root = show_xml.getroot()
show_tvid = str(show_obj.tvid)
if None is not tvid:
tvid.text = '%s' % show_tvid
else:
etree.SubElement(root, 'indexer').text = '%s' % show_tvid
show_prodid = str(show_obj.prodid)
if None is not prodid:
prodid.text = '%s' % show_prodid
else:
etree.SubElement(root, 'id').text = '%s' % show_prodid
# Make it purdy
sg_helpers.indent_xml(root)
sg_helpers.write_file(nfo_file_path, show_xml, xmltree=True, utf8=True)
return True | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.