id_within_dataset int64 1 55.5k | snippet stringlengths 19 14.2k | tokens listlengths 6 1.63k | nl stringlengths 6 352 | split_within_dataset stringclasses 1
value | is_duplicated bool 2
classes |
|---|---|---|---|---|---|
11,122 | def _patch_logger_class():
try:
from multiprocessing.process import current_process
except ImportError:
current_process = None
logging._acquireLock()
try:
OldLoggerClass = logging.getLoggerClass()
if (not getattr(OldLoggerClass, '_process_aware', False)):
class ProcessAwareLogger(OldLoggerClass, ):
_process_aware = True
def makeRecord(self, *args, **kwds):
record = OldLoggerClass.makeRecord(self, *args, **kwds)
if current_process:
record.processName = current_process()._name
else:
record.processName = ''
return record
logging.setLoggerClass(ProcessAwareLogger)
finally:
logging._releaseLock()
| [
"def",
"_patch_logger_class",
"(",
")",
":",
"try",
":",
"from",
"multiprocessing",
".",
"process",
"import",
"current_process",
"except",
"ImportError",
":",
"current_process",
"=",
"None",
"logging",
".",
"_acquireLock",
"(",
")",
"try",
":",
"OldLoggerClass",
... | make sure process name is recorded when loggers are used . | train | false |
11,123 | @receiver(user_logged_out)
def log_user_lockout(sender, request, user, signal, *args, **kwargs):
if (not user):
return
try:
username = user.get_username()
except AttributeError:
username = user.username
access_logs = AccessLog.objects.filter(username=username, logout_time__isnull=True).order_by('-attempt_time')
if access_logs:
access_log = access_logs[0]
access_log.logout_time = now()
access_log.save()
| [
"@",
"receiver",
"(",
"user_logged_out",
")",
"def",
"log_user_lockout",
"(",
"sender",
",",
"request",
",",
"user",
",",
"signal",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"not",
"user",
")",
":",
"return",
"try",
":",
"username",
... | when a user logs out . | train | false |
11,125 | def project_status_represent(value):
if (current.auth.permission.format == 'geojson'):
return value
if (value >= 80):
colour = '00ff00'
elif (value >= 60):
colour = 'ffff00'
else:
colour = 'ff0000'
represent = IS_FLOAT_AMOUNT.represent(value, precision=2)
return SPAN(represent, _class='project_status', _style=('background:#%s' % colour))
| [
"def",
"project_status_represent",
"(",
"value",
")",
":",
"if",
"(",
"current",
".",
"auth",
".",
"permission",
".",
"format",
"==",
"'geojson'",
")",
":",
"return",
"value",
"if",
"(",
"value",
">=",
"80",
")",
":",
"colour",
"=",
"'00ff00'",
"elif",
... | colour-coding of statuses @todo: configurable thresholds . | train | false |
11,126 | def get_fignums():
return sorted(_pylab_helpers.Gcf.figs)
| [
"def",
"get_fignums",
"(",
")",
":",
"return",
"sorted",
"(",
"_pylab_helpers",
".",
"Gcf",
".",
"figs",
")"
] | return a list of existing figure numbers . | train | false |
11,127 | def _mixed_join(iterable, sentinel):
iterator = iter(iterable)
first_item = next(iterator, sentinel)
if isinstance(first_item, bytes):
return (first_item + ''.join(iterator))
return (first_item + u''.join(iterator))
| [
"def",
"_mixed_join",
"(",
"iterable",
",",
"sentinel",
")",
":",
"iterator",
"=",
"iter",
"(",
"iterable",
")",
"first_item",
"=",
"next",
"(",
"iterator",
",",
"sentinel",
")",
"if",
"isinstance",
"(",
"first_item",
",",
"bytes",
")",
":",
"return",
"(... | concatenate any string type in an intelligent way . | train | true |
11,128 | def get_unix_user(user=None):
if isinstance(user, six.string_types):
try:
user_info = pwd.getpwnam(user)
except KeyError:
try:
i = int(user)
except ValueError:
raise KeyError(("user name '%s' not found" % user))
try:
user_info = pwd.getpwuid(i)
except KeyError:
raise KeyError(('user id %d not found' % i))
elif isinstance(user, int):
try:
user_info = pwd.getpwuid(user)
except KeyError:
raise KeyError(('user id %d not found' % user))
elif (user is None):
user_info = pwd.getpwuid(os.geteuid())
else:
user_cls_name = reflection.get_class_name(user, fully_qualified=False)
raise TypeError(('user must be string, int or None; not %s (%r)' % (user_cls_name, user)))
return (user_info.pw_uid, user_info.pw_name)
| [
"def",
"get_unix_user",
"(",
"user",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"user",
",",
"six",
".",
"string_types",
")",
":",
"try",
":",
"user_info",
"=",
"pwd",
".",
"getpwnam",
"(",
"user",
")",
"except",
"KeyError",
":",
"try",
":",
"i"... | get the uid and user name . | train | false |
11,129 | def _create_models_for_thread_and_first_message(exploration_id, state_name, original_author_id, subject, text, has_suggestion):
thread_id = feedback_models.FeedbackThreadModel.generate_new_thread_id(exploration_id)
thread = feedback_models.FeedbackThreadModel.create(exploration_id, thread_id)
thread.exploration_id = exploration_id
thread.state_name = state_name
thread.original_author_id = original_author_id
thread.status = feedback_models.STATUS_CHOICES_OPEN
thread.subject = subject
thread.has_suggestion = has_suggestion
thread.put()
create_message(exploration_id, thread_id, original_author_id, feedback_models.STATUS_CHOICES_OPEN, subject, text)
return thread_id
| [
"def",
"_create_models_for_thread_and_first_message",
"(",
"exploration_id",
",",
"state_name",
",",
"original_author_id",
",",
"subject",
",",
"text",
",",
"has_suggestion",
")",
":",
"thread_id",
"=",
"feedback_models",
".",
"FeedbackThreadModel",
".",
"generate_new_thr... | creates a feedback thread and its first message . | train | false |
11,130 | def do_get_messages(parser, token):
tags = parse_ttag(token, ['for', 'as'])
if (len(tags) != 3):
raise template.TemplateSyntaxError, ('%r tag has invalid arguments' % tags['tag_name'])
return GetMessages(object_name=tags['for'], varname=tags['as'])
| [
"def",
"do_get_messages",
"(",
"parser",
",",
"token",
")",
":",
"tags",
"=",
"parse_ttag",
"(",
"token",
",",
"[",
"'for'",
",",
"'as'",
"]",
")",
"if",
"(",
"len",
"(",
"tags",
")",
"!=",
"3",
")",
":",
"raise",
"template",
".",
"TemplateSyntaxErro... | get messages for an object . | train | false |
11,131 | def finalize_backup_info(backup_info_pk, mapper_params):
def tx():
backup_info = BackupInformation.get(backup_info_pk)
if backup_info:
backup_info.complete_time = datetime.datetime.now()
if (backup_info.filesystem == files.GS_FILESYSTEM):
gs_bucket = mapper_params['gs_bucket_name']
BackupInfoWriter(gs_bucket).write(backup_info)
backup_info.put(force_writes=True)
logging.info('Backup %s completed', backup_info.name)
else:
logging.warn('Backup %s could not be found', backup_info_pk)
db.run_in_transaction(tx)
| [
"def",
"finalize_backup_info",
"(",
"backup_info_pk",
",",
"mapper_params",
")",
":",
"def",
"tx",
"(",
")",
":",
"backup_info",
"=",
"BackupInformation",
".",
"get",
"(",
"backup_info_pk",
")",
"if",
"backup_info",
":",
"backup_info",
".",
"complete_time",
"=",... | finalize the state of backupinformation and creates info file for gs . | train | false |
11,132 | def task_and_vm_state_from_status(statuses):
vm_states = set()
task_states = set()
lower_statuses = [status.lower() for status in statuses]
for (state, task_map) in _STATE_MAP.items():
for (task_state, mapped_state) in task_map.items():
status_string = mapped_state
if (status_string.lower() in lower_statuses):
vm_states.add(state)
task_states.add(task_state)
return (sorted(vm_states), sorted(task_states))
| [
"def",
"task_and_vm_state_from_status",
"(",
"statuses",
")",
":",
"vm_states",
"=",
"set",
"(",
")",
"task_states",
"=",
"set",
"(",
")",
"lower_statuses",
"=",
"[",
"status",
".",
"lower",
"(",
")",
"for",
"status",
"in",
"statuses",
"]",
"for",
"(",
"... | map the servers multiple status strings to list of vm states and list of task states . | train | false |
11,133 | def config_absent(name):
name = name.lower()
ret = {'name': name, 'changes': {}, 'result': None, 'comment': ''}
config = _load_config()
if (name in config):
ret['result'] = True
ret['comment'] = 'property {0} deleted'.format(name)
ret['changes'][name] = None
del config[name]
else:
ret['result'] = True
ret['comment'] = 'property {0} is absent'.format(name)
if ((not __opts__['test']) and (len(ret['changes']) > 0)):
ret['result'] = _write_config(config)
return ret
| [
"def",
"config_absent",
"(",
"name",
")",
":",
"name",
"=",
"name",
".",
"lower",
"(",
")",
"ret",
"=",
"{",
"'name'",
":",
"name",
",",
"'changes'",
":",
"{",
"}",
",",
"'result'",
":",
"None",
",",
"'comment'",
":",
"''",
"}",
"config",
"=",
"_... | ensure a specific configuration line does not exist in the running config name config line to remove examples: . | train | true |
11,134 | def get_dynamic_segment(context, network_id, physical_network=None, segmentation_id=None):
with context.session.begin(subtransactions=True):
query = context.session.query(segments_model.NetworkSegment).filter_by(network_id=network_id, is_dynamic=True)
if physical_network:
query = query.filter_by(physical_network=physical_network)
if segmentation_id:
query = query.filter_by(segmentation_id=segmentation_id)
record = query.first()
if record:
return _make_segment_dict(record)
else:
LOG.debug('No dynamic segment found for Network:%(network_id)s, Physical network:%(physnet)s, segmentation_id:%(segmentation_id)s', {'network_id': network_id, 'physnet': physical_network, 'segmentation_id': segmentation_id})
return None
| [
"def",
"get_dynamic_segment",
"(",
"context",
",",
"network_id",
",",
"physical_network",
"=",
"None",
",",
"segmentation_id",
"=",
"None",
")",
":",
"with",
"context",
".",
"session",
".",
"begin",
"(",
"subtransactions",
"=",
"True",
")",
":",
"query",
"="... | return a dynamic segment for the filters provided if one exists . | train | false |
11,135 | def secure_hash(filename, hash_func=sha1):
if ((not os.path.exists(to_bytes(filename, errors='surrogate_or_strict'))) or os.path.isdir(to_bytes(filename, errors='strict'))):
return None
digest = hash_func()
blocksize = (64 * 1024)
try:
infile = open(to_bytes(filename, errors='surrogate_or_strict'), 'rb')
block = infile.read(blocksize)
while block:
digest.update(block)
block = infile.read(blocksize)
infile.close()
except IOError as e:
raise AnsibleError(('error while accessing the file %s, error was: %s' % (filename, e)))
return digest.hexdigest()
| [
"def",
"secure_hash",
"(",
"filename",
",",
"hash_func",
"=",
"sha1",
")",
":",
"if",
"(",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"to_bytes",
"(",
"filename",
",",
"errors",
"=",
"'surrogate_or_strict'",
")",
")",
")",
"or",
"os",
".",
"p... | return a secure hash hex digest of local file . | train | false |
11,137 | def password(caller, string_input):
menutree = caller.ndb._menutree
string_input = string_input.strip()
player = menutree.player
password_attempts = (menutree.password_attempts if hasattr(menutree, 'password_attempts') else 0)
bans = ServerConfig.objects.conf('server_bans')
banned = (bans and (any(((tup[0] == player.name.lower()) for tup in bans)) or any((tup[2].match(caller.address) for tup in bans if tup[2]))))
if (not player.check_password(string_input)):
password_attempts += 1
if (password_attempts > 2):
caller.sessionhandler.disconnect(caller, '|rToo many failed attempts. Disconnecting.|n')
text = ''
options = {}
else:
menutree.password_attempts = password_attempts
text = dedent('\n |rIncorrect password.|n\n Try again or leave empty to go back.\n '.strip('\n'))
options = ({'key': '', 'exec': (lambda caller: caller.msg('', options={'echo': True})), 'goto': 'start'}, {'key': '_default', 'goto': 'password'})
elif banned:
string = dedent('\n |rYou have been banned and cannot continue from here.\n If you feel this ban is in error, please email an admin.|n\n Disconnecting.\n '.strip('\n'))
caller.sessionhandler.disconnect(caller, string)
text = ''
options = {}
else:
text = ''
options = {}
caller.msg('', options={'echo': True})
caller.sessionhandler.login(caller, player)
return (text, options)
| [
"def",
"password",
"(",
"caller",
",",
"string_input",
")",
":",
"menutree",
"=",
"caller",
".",
"ndb",
".",
"_menutree",
"string_input",
"=",
"string_input",
".",
"strip",
"(",
")",
"player",
"=",
"menutree",
".",
"player",
"password_attempts",
"=",
"(",
... | resets password for email to password . | train | false |
11,139 | def test_set_format_basic():
for (format, value) in (('jd', 2451577.5), ('mjd', 51577.0), ('cxcsec', 65923264.184), ('datetime', datetime.datetime(2000, 2, 3, 0, 0)), ('iso', '2000-02-03 00:00:00.000')):
t = Time('+02000-02-03', format='fits')
t0 = t.replicate()
t.format = format
assert (t.value == value)
assert (t._time.jd1 is t0._time.jd1)
assert (t._time.jd2 is t0._time.jd2)
| [
"def",
"test_set_format_basic",
"(",
")",
":",
"for",
"(",
"format",
",",
"value",
")",
"in",
"(",
"(",
"'jd'",
",",
"2451577.5",
")",
",",
"(",
"'mjd'",
",",
"51577.0",
")",
",",
"(",
"'cxcsec'",
",",
"65923264.184",
")",
",",
"(",
"'datetime'",
","... | test basics of setting format attribute . | train | false |
11,140 | def get_aug_path(file_path):
return ('/files%s' % file_path)
| [
"def",
"get_aug_path",
"(",
"file_path",
")",
":",
"return",
"(",
"'/files%s'",
"%",
"file_path",
")"
] | return augeas path for full filepath . | train | false |
11,141 | def get_private_ip(vm_):
return config.get_cloud_config_value('assign_private_ip', vm_, __opts__, default=False)
| [
"def",
"get_private_ip",
"(",
"vm_",
")",
":",
"return",
"config",
".",
"get_cloud_config_value",
"(",
"'assign_private_ip'",
",",
"vm_",
",",
"__opts__",
",",
"default",
"=",
"False",
")"
] | get the private ip of the current machine . | train | false |
11,142 | def encode_name(name):
if isinstance(name, unicode):
name = name.encode(u'utf-8')
return hexlify(name)
| [
"def",
"encode_name",
"(",
"name",
")",
":",
"if",
"isinstance",
"(",
"name",
",",
"unicode",
")",
":",
"name",
"=",
"name",
".",
"encode",
"(",
"u'utf-8'",
")",
"return",
"hexlify",
"(",
"name",
")"
] | encode a name as url safe characters . | train | false |
11,143 | @utils.arg('class_name', metavar='<class>', help='Name of quota class to list the quotas for.')
@utils.service_type('monitor')
def do_quota_class_show(cs, args):
_quota_show(cs.quota_classes.get(args.class_name))
| [
"@",
"utils",
".",
"arg",
"(",
"'class_name'",
",",
"metavar",
"=",
"'<class>'",
",",
"help",
"=",
"'Name of quota class to list the quotas for.'",
")",
"@",
"utils",
".",
"service_type",
"(",
"'monitor'",
")",
"def",
"do_quota_class_show",
"(",
"cs",
",",
"args... | list the quotas for a quota class . | train | false |
11,145 | def build_in_docker(destination_path, distribution, top_level, package_uri):
if (destination_path.exists() and (not destination_path.isdir())):
raise ValueError('go away')
volumes = {FilePath('/output'): destination_path, FilePath('/flocker'): top_level}
if (package_uri == top_level.path):
package_uri = '/flocker'
tag = ('clusterhq/build-%s' % (distribution,))
build_targets_directory = top_level.descendant(BUILD_TARGETS_SEGMENTS)
build_directory = build_targets_directory.child(distribution)
requirements_directory = top_level.child('requirements')
requirements_directory.copyTo(build_directory.child('requirements'))
return BuildSequence(steps=[DockerBuild(tag=tag, build_directory=build_directory), DockerRun(tag=tag, volumes=volumes, command=[package_uri])])
| [
"def",
"build_in_docker",
"(",
"destination_path",
",",
"distribution",
",",
"top_level",
",",
"package_uri",
")",
":",
"if",
"(",
"destination_path",
".",
"exists",
"(",
")",
"and",
"(",
"not",
"destination_path",
".",
"isdir",
"(",
")",
")",
")",
":",
"r... | build a flocker package for a given distribution inside a clean docker container of that distribution . | train | false |
11,147 | def condition_db_filter(model, field, value):
orm_field = getattr(model, field)
if (isinstance(value, collections.Iterable) and (not isinstance(value, six.string_types))):
if (None not in value):
return orm_field.in_(value)
return or_(((orm_field == v) for v in value))
return (orm_field == value)
| [
"def",
"condition_db_filter",
"(",
"model",
",",
"field",
",",
"value",
")",
":",
"orm_field",
"=",
"getattr",
"(",
"model",
",",
"field",
")",
"if",
"(",
"isinstance",
"(",
"value",
",",
"collections",
".",
"Iterable",
")",
"and",
"(",
"not",
"isinstanc... | create matching filter . | train | false |
11,149 | def cpu_stats():
(ctx_switches, interrupts, dpcs, syscalls) = cext.cpu_stats()
soft_interrupts = 0
return _common.scpustats(ctx_switches, interrupts, soft_interrupts, syscalls)
| [
"def",
"cpu_stats",
"(",
")",
":",
"(",
"ctx_switches",
",",
"interrupts",
",",
"dpcs",
",",
"syscalls",
")",
"=",
"cext",
".",
"cpu_stats",
"(",
")",
"soft_interrupts",
"=",
"0",
"return",
"_common",
".",
"scpustats",
"(",
"ctx_switches",
",",
"interrupts... | return cpu statistics . | train | false |
11,150 | def get_g77_abi_wrappers(info):
wrapper_sources = []
path = os.path.abspath(os.path.dirname(__file__))
if needs_g77_abi_wrapper(info):
wrapper_sources += [os.path.join(path, 'src', 'wrap_g77_abi_f.f'), os.path.join(path, 'src', 'wrap_g77_abi_c.c')]
if uses_accelerate(info):
wrapper_sources += [os.path.join(path, 'src', 'wrap_accelerate_c.c'), os.path.join(path, 'src', 'wrap_accelerate_f.f')]
elif uses_mkl(info):
wrapper_sources += [os.path.join(path, 'src', 'wrap_dummy_accelerate.f')]
else:
raise NotImplementedError(('Do not know how to handle LAPACK %s on mac os x' % (info,)))
else:
wrapper_sources += [os.path.join(path, 'src', 'wrap_dummy_g77_abi.f'), os.path.join(path, 'src', 'wrap_dummy_accelerate.f')]
return wrapper_sources
| [
"def",
"get_g77_abi_wrappers",
"(",
"info",
")",
":",
"wrapper_sources",
"=",
"[",
"]",
"path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
")",
"if",
"needs_g77_abi_wrapper",
"(",
"info",
")",
... | returns file names of source files containing fortran abi wrapper routines . | train | false |
11,151 | def clipboard_get(self):
from IPython.lib.clipboard import osx_clipboard_get, tkinter_clipboard_get, win32_clipboard_get
if (sys.platform == 'win32'):
chain = [win32_clipboard_get, tkinter_clipboard_get]
elif (sys.platform == 'darwin'):
chain = [osx_clipboard_get, tkinter_clipboard_get]
else:
chain = [tkinter_clipboard_get]
dispatcher = CommandChainDispatcher()
for func in chain:
dispatcher.add(func)
text = dispatcher()
return text
| [
"def",
"clipboard_get",
"(",
"self",
")",
":",
"from",
"IPython",
".",
"lib",
".",
"clipboard",
"import",
"osx_clipboard_get",
",",
"tkinter_clipboard_get",
",",
"win32_clipboard_get",
"if",
"(",
"sys",
".",
"platform",
"==",
"'win32'",
")",
":",
"chain",
"=",... | get text from the clipboard . | train | true |
11,152 | def make_list_slug(name):
slug = name.lower()
for char in u'!@#$%^*()[]{}/=?+\\|':
slug = slug.replace(char, u'')
slug = slug.replace(u'&', u'and')
slug = slug.replace(u' ', u'-')
return slug
| [
"def",
"make_list_slug",
"(",
"name",
")",
":",
"slug",
"=",
"name",
".",
"lower",
"(",
")",
"for",
"char",
"in",
"u'!@#$%^*()[]{}/=?+\\\\|'",
":",
"slug",
"=",
"slug",
".",
"replace",
"(",
"char",
",",
"u''",
")",
"slug",
"=",
"slug",
".",
"replace",
... | return the slug for use in url for given list name . | train | false |
11,153 | def get_installed_business_segment(shop):
return configuration.get(shop, SAMPLE_BUSINESS_SEGMENT_KEY)
| [
"def",
"get_installed_business_segment",
"(",
"shop",
")",
":",
"return",
"configuration",
".",
"get",
"(",
"shop",
",",
"SAMPLE_BUSINESS_SEGMENT_KEY",
")"
] | returns the installed business segment . | train | false |
11,155 | def prepareWikify():
removeGeneratedFiles()
wikifier.main()
removeZip()
| [
"def",
"prepareWikify",
"(",
")",
":",
"removeGeneratedFiles",
"(",
")",
"wikifier",
".",
"main",
"(",
")",
"removeZip",
"(",
")"
] | remove generated files . | train | false |
11,156 | def from_yaml(yamlstr, **kwargs):
data = yaml.load(yamlstr)
if (kwargs is not None):
for (key, value) in kwargs.items():
data[key] = value
return Environment(**data)
| [
"def",
"from_yaml",
"(",
"yamlstr",
",",
"**",
"kwargs",
")",
":",
"data",
"=",
"yaml",
".",
"load",
"(",
"yamlstr",
")",
"if",
"(",
"kwargs",
"is",
"not",
"None",
")",
":",
"for",
"(",
"key",
",",
"value",
")",
"in",
"kwargs",
".",
"items",
"(",... | construct a dictionary of resources from a yaml specification . | train | false |
11,157 | def get_location_from_uri(uri):
pieces = urlparse.urlparse(uri)
if (pieces.scheme not in SCHEME_TO_CLS_MAP.keys()):
raise exception.UnknownScheme(pieces.scheme)
scheme_info = SCHEME_TO_CLS_MAP[pieces.scheme]
return Location(pieces.scheme, uri=uri, store_location_class=scheme_info['location_class'])
| [
"def",
"get_location_from_uri",
"(",
"uri",
")",
":",
"pieces",
"=",
"urlparse",
".",
"urlparse",
"(",
"uri",
")",
"if",
"(",
"pieces",
".",
"scheme",
"not",
"in",
"SCHEME_TO_CLS_MAP",
".",
"keys",
"(",
")",
")",
":",
"raise",
"exception",
".",
"UnknownS... | given a uri . | train | false |
11,159 | def validate_boolean(option, value):
if isinstance(value, bool):
return value
raise TypeError(('%s must be True or False' % (option,)))
| [
"def",
"validate_boolean",
"(",
"option",
",",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"bool",
")",
":",
"return",
"value",
"raise",
"TypeError",
"(",
"(",
"'%s must be True or False'",
"%",
"(",
"option",
",",
")",
")",
")"
] | check/normalize boolean settings: true: 1 . | train | false |
11,160 | def init(mpstate):
return SerialModule(mpstate)
| [
"def",
"init",
"(",
"mpstate",
")",
":",
"return",
"SerialModule",
"(",
"mpstate",
")"
] | initialize remotes . | train | false |
11,161 | def GetPropertyValueTag(value_pb):
if value_pb.has_booleanvalue():
return entity_pb.PropertyValue.kbooleanValue
elif value_pb.has_doublevalue():
return entity_pb.PropertyValue.kdoubleValue
elif value_pb.has_int64value():
return entity_pb.PropertyValue.kint64Value
elif value_pb.has_pointvalue():
return entity_pb.PropertyValue.kPointValueGroup
elif value_pb.has_referencevalue():
return entity_pb.PropertyValue.kReferenceValueGroup
elif value_pb.has_stringvalue():
return entity_pb.PropertyValue.kstringValue
elif value_pb.has_uservalue():
return entity_pb.PropertyValue.kUserValueGroup
else:
return 0
| [
"def",
"GetPropertyValueTag",
"(",
"value_pb",
")",
":",
"if",
"value_pb",
".",
"has_booleanvalue",
"(",
")",
":",
"return",
"entity_pb",
".",
"PropertyValue",
".",
"kbooleanValue",
"elif",
"value_pb",
".",
"has_doublevalue",
"(",
")",
":",
"return",
"entity_pb"... | returns the tag constant associated with the given entity_pb . | train | false |
11,162 | def catalog():
global _default
t = getattr(_active, u'value', None)
if (t is not None):
return t
if (_default is None):
from django.conf import settings
_default = translation(settings.LANGUAGE_CODE)
return _default
| [
"def",
"catalog",
"(",
")",
":",
"global",
"_default",
"t",
"=",
"getattr",
"(",
"_active",
",",
"u'value'",
",",
"None",
")",
"if",
"(",
"t",
"is",
"not",
"None",
")",
":",
"return",
"t",
"if",
"(",
"_default",
"is",
"None",
")",
":",
"from",
"d... | restful crud controller . | train | false |
11,163 | def format_rfc3339(datetime_instance=None):
return (datetime_instance.isoformat('T') + 'Z')
| [
"def",
"format_rfc3339",
"(",
"datetime_instance",
"=",
"None",
")",
":",
"return",
"(",
"datetime_instance",
".",
"isoformat",
"(",
"'T'",
")",
"+",
"'Z'",
")"
] | formats a datetime per rfc 3339 . | train | false |
11,164 | def check_git_remote_exists(url, version, tags_valid=False):
cmd = ('git ls-remote %s refs/heads/*' % url).split()
try:
output = subprocess.check_output(cmd)
except:
return False
if (not version):
return True
if (('refs/heads/%s' % version) in output):
return True
if (not tags_valid):
return False
cmd = ('git ls-remote %s refs/tags/*' % url).split()
try:
output = subprocess.check_output(cmd)
except:
return False
if (('refs/tags/%s' % version) in output):
return True
return False
| [
"def",
"check_git_remote_exists",
"(",
"url",
",",
"version",
",",
"tags_valid",
"=",
"False",
")",
":",
"cmd",
"=",
"(",
"'git ls-remote %s refs/heads/*'",
"%",
"url",
")",
".",
"split",
"(",
")",
"try",
":",
"output",
"=",
"subprocess",
".",
"check_output"... | check if the remote exists and has the branch version . | train | false |
11,165 | def xpath_tokenizer(p):
out = []
for (op, tag) in ElementPath.xpath_tokenizer(p):
out.append((op or tag))
return out
| [
"def",
"xpath_tokenizer",
"(",
"p",
")",
":",
"out",
"=",
"[",
"]",
"for",
"(",
"op",
",",
"tag",
")",
"in",
"ElementPath",
".",
"xpath_tokenizer",
"(",
"p",
")",
":",
"out",
".",
"append",
"(",
"(",
"op",
"or",
"tag",
")",
")",
"return",
"out"
] | test the xpath tokenizer . | train | false |
11,166 | def get_error_string(error_code):
if (error_code in ERRORS):
return ('%s (%s)' % (ERRORS[error_code]['description'], ERRORS[error_code]['name']))
else:
return str(error_code)
| [
"def",
"get_error_string",
"(",
"error_code",
")",
":",
"if",
"(",
"error_code",
"in",
"ERRORS",
")",
":",
"return",
"(",
"'%s (%s)'",
"%",
"(",
"ERRORS",
"[",
"error_code",
"]",
"[",
"'description'",
"]",
",",
"ERRORS",
"[",
"error_code",
"]",
"[",
"'na... | error code lookup . | train | false |
11,167 | def ReportEvent(appName, eventID, eventCategory=0, eventType=win32evtlog.EVENTLOG_ERROR_TYPE, strings=None, data=None, sid=None):
hAppLog = win32evtlog.RegisterEventSource(None, appName)
win32evtlog.ReportEvent(hAppLog, eventType, eventCategory, eventID, sid, strings, data)
win32evtlog.DeregisterEventSource(hAppLog)
| [
"def",
"ReportEvent",
"(",
"appName",
",",
"eventID",
",",
"eventCategory",
"=",
"0",
",",
"eventType",
"=",
"win32evtlog",
".",
"EVENTLOG_ERROR_TYPE",
",",
"strings",
"=",
"None",
",",
"data",
"=",
"None",
",",
"sid",
"=",
"None",
")",
":",
"hAppLog",
"... | report an event for a previously added event source . | train | false |
11,168 | def p_postfix_expression_3(t):
pass
| [
"def",
"p_postfix_expression_3",
"(",
"t",
")",
":",
"pass"
] | postfix_expression : postfix_expression lparen argument_expression_list rparen . | train | false |
11,169 | def get_maxdays(name):
policies = _get_account_policy(name)
if ('maxMinutesUntilChangePassword' in policies):
max_minutes = policies['maxMinutesUntilChangePassword']
return ((int(max_minutes) / 24) / 60)
return 0
| [
"def",
"get_maxdays",
"(",
"name",
")",
":",
"policies",
"=",
"_get_account_policy",
"(",
"name",
")",
"if",
"(",
"'maxMinutesUntilChangePassword'",
"in",
"policies",
")",
":",
"max_minutes",
"=",
"policies",
"[",
"'maxMinutesUntilChangePassword'",
"]",
"return",
... | get the maximum age of the password . | train | true |
11,170 | def warehouse_type():
return s3_rest_controller()
| [
"def",
"warehouse_type",
"(",
")",
":",
"return",
"s3_rest_controller",
"(",
")"
] | restful crud controller . | train | false |
11,172 | def _get_vdi_chain_size(session, vdi_uuid):
size_bytes = 0
for vdi_rec in _walk_vdi_chain(session, vdi_uuid):
cur_vdi_uuid = vdi_rec['uuid']
vdi_size_bytes = int(vdi_rec['physical_utilisation'])
LOG.debug('vdi_uuid=%(cur_vdi_uuid)s vdi_size_bytes=%(vdi_size_bytes)d', {'cur_vdi_uuid': cur_vdi_uuid, 'vdi_size_bytes': vdi_size_bytes})
size_bytes += vdi_size_bytes
return size_bytes
| [
"def",
"_get_vdi_chain_size",
"(",
"session",
",",
"vdi_uuid",
")",
":",
"size_bytes",
"=",
"0",
"for",
"vdi_rec",
"in",
"_walk_vdi_chain",
"(",
"session",
",",
"vdi_uuid",
")",
":",
"cur_vdi_uuid",
"=",
"vdi_rec",
"[",
"'uuid'",
"]",
"vdi_size_bytes",
"=",
... | compute the total size of a vdi chain . | train | false |
11,173 | @cache_permission
def can_delete_dictionary(user, project):
return check_permission(user, project, 'trans.delete_dictionary')
| [
"@",
"cache_permission",
"def",
"can_delete_dictionary",
"(",
"user",
",",
"project",
")",
":",
"return",
"check_permission",
"(",
"user",
",",
"project",
",",
"'trans.delete_dictionary'",
")"
] | checks whether user can delete dictionary for given project . | train | false |
11,174 | def find_exe(exename):
warnings.warn('deprecated', DeprecationWarning)
exes = which(exename)
exe = (exes and exes[0])
if (not exe):
exe = os.path.join(sys.prefix, 'scripts', (exename + '.py'))
if os.path.exists(exe):
(path, ext) = os.path.splitext(exe)
if (ext.lower() in ['.exe', '.bat']):
cmd = [exe]
else:
cmd = [sys.executable, exe]
return cmd
else:
return False
| [
"def",
"find_exe",
"(",
"exename",
")",
":",
"warnings",
".",
"warn",
"(",
"'deprecated'",
",",
"DeprecationWarning",
")",
"exes",
"=",
"which",
"(",
"exename",
")",
"exe",
"=",
"(",
"exes",
"and",
"exes",
"[",
"0",
"]",
")",
"if",
"(",
"not",
"exe",... | look for something named exename or exename + " . | train | false |
11,175 | def create_handlers_map():
pipeline_handlers_map = []
if pipeline:
pipeline_handlers_map = pipeline.create_handlers_map(prefix='.*/pipeline')
return (pipeline_handlers_map + [('.*/worker_callback', handlers.MapperWorkerCallbackHandler), ('.*/controller_callback', handlers.ControllerCallbackHandler), ('.*/kickoffjob_callback', handlers.KickOffJobHandler), ('.*/finalizejob_callback', handlers.FinalizeJobHandler), ('.*/command/start_job', handlers.StartJobHandler), ('.*/command/cleanup_job', handlers.CleanUpJobHandler), ('.*/command/abort_job', handlers.AbortJobHandler), ('.*/command/list_configs', status.ListConfigsHandler), ('.*/command/list_jobs', status.ListJobsHandler), ('.*/command/get_job_detail', status.GetJobDetailHandler), (STATIC_RE, status.ResourceHandler), ('.*', RedirectHandler)])
| [
"def",
"create_handlers_map",
"(",
")",
":",
"pipeline_handlers_map",
"=",
"[",
"]",
"if",
"pipeline",
":",
"pipeline_handlers_map",
"=",
"pipeline",
".",
"create_handlers_map",
"(",
"prefix",
"=",
"'.*/pipeline'",
")",
"return",
"(",
"pipeline_handlers_map",
"+",
... | create new handlers map . | train | false |
11,177 | def send_email_ses(sender, subject, message, recipients, image_png):
from boto3 import client as boto3_client
client = boto3_client('ses')
msg_root = generate_email(sender, subject, message, recipients, image_png)
response = client.send_raw_email(Source=sender, Destinations=recipients, RawMessage={'Data': msg_root.as_string()})
logger.debug('Message sent to SES.\nMessageId: {},\nRequestId: {},\nHTTPSStatusCode: {}'.format(response['MessageId'], response['ResponseMetadata']['RequestId'], response['ResponseMetadata']['HTTPStatusCode']))
| [
"def",
"send_email_ses",
"(",
"sender",
",",
"subject",
",",
"message",
",",
"recipients",
",",
"image_png",
")",
":",
"from",
"boto3",
"import",
"client",
"as",
"boto3_client",
"client",
"=",
"boto3_client",
"(",
"'ses'",
")",
"msg_root",
"=",
"generate_email... | sends notification through aws ses . | train | true |
11,178 | def _embed_ipython_shell(namespace={}, banner=''):
try:
from IPython.terminal.embed import InteractiveShellEmbed
from IPython.terminal.ipapp import load_default_config
except ImportError:
from IPython.frontend.terminal.embed import InteractiveShellEmbed
from IPython.frontend.terminal.ipapp import load_default_config
@wraps(_embed_ipython_shell)
def wrapper(namespace=namespace, banner=''):
config = load_default_config()
InteractiveShellEmbed.clear_instance()
shell = InteractiveShellEmbed.instance(banner1=banner, user_ns=namespace, config=config)
shell()
return wrapper
| [
"def",
"_embed_ipython_shell",
"(",
"namespace",
"=",
"{",
"}",
",",
"banner",
"=",
"''",
")",
":",
"try",
":",
"from",
"IPython",
".",
"terminal",
".",
"embed",
"import",
"InteractiveShellEmbed",
"from",
"IPython",
".",
"terminal",
".",
"ipapp",
"import",
... | start an ipython shell . | train | false |
11,179 | def zeros_aligned(shape, dtype, order='C', align=128):
nbytes = (np.prod(shape, dtype=np.int64) * np.dtype(dtype).itemsize)
buffer = np.zeros((nbytes + align), dtype=np.uint8)
start_index = ((- buffer.ctypes.data) % align)
return buffer[start_index:(start_index + nbytes)].view(dtype).reshape(shape, order=order)
| [
"def",
"zeros_aligned",
"(",
"shape",
",",
"dtype",
",",
"order",
"=",
"'C'",
",",
"align",
"=",
"128",
")",
":",
"nbytes",
"=",
"(",
"np",
".",
"prod",
"(",
"shape",
",",
"dtype",
"=",
"np",
".",
"int64",
")",
"*",
"np",
".",
"dtype",
"(",
"dt... | like np . | train | false |
11,180 | def filter_thing2(x):
return x._thing2
| [
"def",
"filter_thing2",
"(",
"x",
")",
":",
"return",
"x",
".",
"_thing2"
] | return the thing2 of a given relationship . | train | false |
11,181 | @pytest.mark.django_db
def test_apiview_get_single(rf):
view = UserAPIView.as_view()
user = UserFactory.create(username='foo')
request = create_api_request(rf)
response = view(request, id=user.id)
assert (response.status_code == 200)
response_data = json.loads(response.content)
assert isinstance(response_data, dict)
assert (response_data['username'] == 'foo')
assert ('email' not in response_data)
with pytest.raises(Http404):
view(request, id='777')
| [
"@",
"pytest",
".",
"mark",
".",
"django_db",
"def",
"test_apiview_get_single",
"(",
"rf",
")",
":",
"view",
"=",
"UserAPIView",
".",
"as_view",
"(",
")",
"user",
"=",
"UserFactory",
".",
"create",
"(",
"username",
"=",
"'foo'",
")",
"request",
"=",
"cre... | tests retrieving a single object using the api . | train | false |
11,182 | def force_to_valid_python_variable_name(old_name):
new_name = old_name
new_name = new_name.lstrip().rstrip()
if (old_name in python_keywords):
new_name = (u'opt_' + old_name)
return new_name
| [
"def",
"force_to_valid_python_variable_name",
"(",
"old_name",
")",
":",
"new_name",
"=",
"old_name",
"new_name",
"=",
"new_name",
".",
"lstrip",
"(",
")",
".",
"rstrip",
"(",
")",
"if",
"(",
"old_name",
"in",
"python_keywords",
")",
":",
"new_name",
"=",
"(... | valid c++ names are not always valid in python . | train | false |
11,183 | @pytest.mark.xfail(reason='https://github.com/The-Compiler/qutebrowser/issues/1070', strict=False)
def test_installed_package():
print sys.path
assert ('.tox' in qutebrowser.__file__.split(os.sep))
| [
"@",
"pytest",
".",
"mark",
".",
"xfail",
"(",
"reason",
"=",
"'https://github.com/The-Compiler/qutebrowser/issues/1070'",
",",
"strict",
"=",
"False",
")",
"def",
"test_installed_package",
"(",
")",
":",
"print",
"sys",
".",
"path",
"assert",
"(",
"'.tox'",
"in... | make sure the tests are running against the installed package . | train | false |
11,184 | def generate_filename(instance, old_filename):
extension = os.path.splitext(old_filename)[1]
filename = (str(time.time()) + extension)
return ('documents/files/' + filename)
| [
"def",
"generate_filename",
"(",
"instance",
",",
"old_filename",
")",
":",
"extension",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"old_filename",
")",
"[",
"1",
"]",
"filename",
"=",
"(",
"str",
"(",
"time",
".",
"time",
"(",
")",
")",
"+",
"ext... | generate filename . | train | false |
11,186 | def errnoToFailure(e, path):
if (e == errno.ENOENT):
return defer.fail(FileNotFoundError(path))
elif ((e == errno.EACCES) or (e == errno.EPERM)):
return defer.fail(PermissionDeniedError(path))
elif (e == errno.ENOTDIR):
return defer.fail(IsNotADirectoryError(path))
elif (e == errno.EEXIST):
return defer.fail(FileExistsError(path))
elif (e == errno.EISDIR):
return defer.fail(IsADirectoryError(path))
else:
return defer.fail()
| [
"def",
"errnoToFailure",
"(",
"e",
",",
"path",
")",
":",
"if",
"(",
"e",
"==",
"errno",
".",
"ENOENT",
")",
":",
"return",
"defer",
".",
"fail",
"(",
"FileNotFoundError",
"(",
"path",
")",
")",
"elif",
"(",
"(",
"e",
"==",
"errno",
".",
"EACCES",
... | map c{oserror} and c{ioerror} to standard ftp errors . | train | false |
11,190 | def extract_comic(path_to_comic_file):
tdir = PersistentTemporaryDirectory(suffix='_comic_extract')
if (not isinstance(tdir, unicode)):
tdir = tdir.decode(filesystem_encoding)
extract(path_to_comic_file, tdir)
for x in walk(tdir):
bn = os.path.basename(x)
nbn = bn.replace('#', '_')
if (nbn != bn):
os.rename(x, os.path.join(os.path.dirname(x), nbn))
return tdir
| [
"def",
"extract_comic",
"(",
"path_to_comic_file",
")",
":",
"tdir",
"=",
"PersistentTemporaryDirectory",
"(",
"suffix",
"=",
"'_comic_extract'",
")",
"if",
"(",
"not",
"isinstance",
"(",
"tdir",
",",
"unicode",
")",
")",
":",
"tdir",
"=",
"tdir",
".",
"deco... | un-archive the comic file . | train | false |
11,192 | def get_package_data():
package_data = {}
package_data['jupyterhub'] = ['alembic.ini', 'alembic/*', 'alembic/versions/*']
return package_data
| [
"def",
"get_package_data",
"(",
")",
":",
"package_data",
"=",
"{",
"}",
"package_data",
"[",
"'jupyterhub'",
"]",
"=",
"[",
"'alembic.ini'",
",",
"'alembic/*'",
",",
"'alembic/versions/*'",
"]",
"return",
"package_data"
] | all of all of the "extra" package data files collected by the package_files and package_path functions in setup . | train | false |
11,193 | def make_sparse(arr, kind='block', fill_value=None):
arr = _sanitize_values(arr)
if (arr.ndim > 1):
raise TypeError('expected dimension <= 1 data')
if (fill_value is None):
fill_value = na_value_for_dtype(arr.dtype)
if isnull(fill_value):
mask = notnull(arr)
else:
mask = (arr != fill_value)
length = len(arr)
if (length != mask.size):
indices = mask.sp_index.indices
else:
indices = np.arange(length, dtype=np.int32)[mask]
index = _make_index(length, indices, kind)
sparsified_values = arr[mask]
return (sparsified_values, index, fill_value)
| [
"def",
"make_sparse",
"(",
"arr",
",",
"kind",
"=",
"'block'",
",",
"fill_value",
"=",
"None",
")",
":",
"arr",
"=",
"_sanitize_values",
"(",
"arr",
")",
"if",
"(",
"arr",
".",
"ndim",
">",
"1",
")",
":",
"raise",
"TypeError",
"(",
"'expected dimension... | convert ndarray to sparse format parameters arr : ndarray kind : {block . | train | false |
11,194 | def ReplaceChunksInBuffer(chunks, vim_buffer, locations):
chunks.sort(key=(lambda chunk: (chunk[u'range'][u'start'][u'line_num'], chunk[u'range'][u'start'][u'column_num'])))
last_line = (-1)
line_delta = 0
for chunk in chunks:
if (chunk[u'range'][u'start'][u'line_num'] != last_line):
last_line = chunk[u'range'][u'end'][u'line_num']
char_delta = 0
(new_line_delta, new_char_delta) = ReplaceChunk(chunk[u'range'][u'start'], chunk[u'range'][u'end'], chunk[u'replacement_text'], line_delta, char_delta, vim_buffer, locations)
line_delta += new_line_delta
char_delta += new_char_delta
| [
"def",
"ReplaceChunksInBuffer",
"(",
"chunks",
",",
"vim_buffer",
",",
"locations",
")",
":",
"chunks",
".",
"sort",
"(",
"key",
"=",
"(",
"lambda",
"chunk",
":",
"(",
"chunk",
"[",
"u'range'",
"]",
"[",
"u'start'",
"]",
"[",
"u'line_num'",
"]",
",",
"... | apply changes in |chunks| to the buffer-like object |buffer| . | train | false |
11,195 | def pixel_shuffle(input, upscale_factor):
(batch_size, channels, in_height, in_width) = input.size()
channels //= (upscale_factor ** 2)
out_height = (in_height * upscale_factor)
out_width = (in_width * upscale_factor)
input_view = input.contiguous().view(batch_size, channels, upscale_factor, upscale_factor, in_height, in_width)
shuffle_out = input_view.permute(0, 1, 4, 2, 5, 3).contiguous()
return shuffle_out.view(batch_size, channels, out_height, out_width)
| [
"def",
"pixel_shuffle",
"(",
"input",
",",
"upscale_factor",
")",
":",
"(",
"batch_size",
",",
"channels",
",",
"in_height",
",",
"in_width",
")",
"=",
"input",
".",
"size",
"(",
")",
"channels",
"//=",
"(",
"upscale_factor",
"**",
"2",
")",
"out_height",
... | rearranges elements in a tensor of shape [* . | train | false |
11,196 | def plugin_info(plugin):
try:
return plugin_meta_data[plugin]
except KeyError:
raise ValueError(('No information on plugin "%s"' % plugin))
| [
"def",
"plugin_info",
"(",
"plugin",
")",
":",
"try",
":",
"return",
"plugin_meta_data",
"[",
"plugin",
"]",
"except",
"KeyError",
":",
"raise",
"ValueError",
"(",
"(",
"'No information on plugin \"%s\"'",
"%",
"plugin",
")",
")"
] | return plugin meta-data . | train | false |
11,197 | @register(u'backward-kill-word')
def backward_kill_word(event):
unix_word_rubout(event, WORD=False)
| [
"@",
"register",
"(",
"u'backward-kill-word'",
")",
"def",
"backward_kill_word",
"(",
"event",
")",
":",
"unix_word_rubout",
"(",
"event",
",",
"WORD",
"=",
"False",
")"
] | kills the word before point . | train | false |
11,199 | def set_disable_keyboard_on_lock(enable):
state = salt.utils.mac_utils.validate_enabled(enable)
cmd = 'systemsetup -setdisablekeyboardwhenenclosurelockisengaged {0}'.format(state)
salt.utils.mac_utils.execute_return_success(cmd)
return salt.utils.mac_utils.confirm_updated(state, get_disable_keyboard_on_lock, normalize_ret=True)
| [
"def",
"set_disable_keyboard_on_lock",
"(",
"enable",
")",
":",
"state",
"=",
"salt",
".",
"utils",
".",
"mac_utils",
".",
"validate_enabled",
"(",
"enable",
")",
"cmd",
"=",
"'systemsetup -setdisablekeyboardwhenenclosurelockisengaged {0}'",
".",
"format",
"(",
"state... | get whether or not the keyboard should be disabled when the x serve enclosure lock is engaged . | train | false |
11,200 | def concrete_type(ds):
if isinstance(ds, (str, unicode)):
ds = dshape(ds)
if ((not iscollection(ds)) and isscalar(ds.measure)):
measure = getattr(ds.measure, 'ty', ds.measure)
if (measure in integral.types):
return int
elif (measure in floating.types):
return float
elif (measure in boolean.types):
return bool
elif (measure in complexes.types):
return complex
else:
return ds.measure.to_numpy_dtype().type
if (not iscollection(ds)):
return type(ds)
if (ndim(ds) == 1):
return (pd.DataFrame if isrecord(ds.measure) else pd.Series)
if (ndim(ds) > 1):
return np.ndarray
return list
| [
"def",
"concrete_type",
"(",
"ds",
")",
":",
"if",
"isinstance",
"(",
"ds",
",",
"(",
"str",
",",
"unicode",
")",
")",
":",
"ds",
"=",
"dshape",
"(",
"ds",
")",
"if",
"(",
"(",
"not",
"iscollection",
"(",
"ds",
")",
")",
"and",
"isscalar",
"(",
... | a type into which we can safely deposit streaming data . | train | false |
11,201 | def ConditionalElement(node, tag, condition, attr=None):
sub = partial(SubElement, node, tag)
if bool(condition):
if isinstance(attr, str):
elem = sub({attr: '1'})
elif isinstance(attr, dict):
elem = sub(attr)
else:
elem = sub()
return elem
| [
"def",
"ConditionalElement",
"(",
"node",
",",
"tag",
",",
"condition",
",",
"attr",
"=",
"None",
")",
":",
"sub",
"=",
"partial",
"(",
"SubElement",
",",
"node",
",",
"tag",
")",
"if",
"bool",
"(",
"condition",
")",
":",
"if",
"isinstance",
"(",
"at... | utility function for adding nodes if certain criteria are fulfilled an optional attribute can be passed in which will always be serialised as 1 . | train | false |
11,203 | def _read_ch(fid, subtype, samp, data_size):
if (subtype in ('24BIT', 'bdf')):
ch_data = np.fromfile(fid, dtype=np.uint8, count=(samp * data_size))
ch_data = ch_data.reshape((-1), 3).astype(np.int32)
ch_data = ((ch_data[:, 0] + (ch_data[:, 1] << 8)) + (ch_data[:, 2] << 16))
ch_data[(ch_data >= (1 << 23))] -= (1 << 24)
else:
ch_data = np.fromfile(fid, dtype='<i2', count=samp)
return ch_data
| [
"def",
"_read_ch",
"(",
"fid",
",",
"subtype",
",",
"samp",
",",
"data_size",
")",
":",
"if",
"(",
"subtype",
"in",
"(",
"'24BIT'",
",",
"'bdf'",
")",
")",
":",
"ch_data",
"=",
"np",
".",
"fromfile",
"(",
"fid",
",",
"dtype",
"=",
"np",
".",
"uin... | read a number of samples for a single channel . | train | false |
11,204 | def reload_(name):
term(name)
| [
"def",
"reload_",
"(",
"name",
")",
":",
"term",
"(",
"name",
")"
] | reload the named service cli example: . | train | false |
11,205 | def get_saucelabs_username_and_key():
return {'username': settings.SAUCE.get('USERNAME'), 'access-key': settings.SAUCE.get('ACCESS_ID')}
| [
"def",
"get_saucelabs_username_and_key",
"(",
")",
":",
"return",
"{",
"'username'",
":",
"settings",
".",
"SAUCE",
".",
"get",
"(",
"'USERNAME'",
")",
",",
"'access-key'",
":",
"settings",
".",
"SAUCE",
".",
"get",
"(",
"'ACCESS_ID'",
")",
"}"
] | returns the sauce labs username and access id as set by environment variables . | train | false |
11,206 | def Summary(urlstats):
summary = RequestSummary()
for request in reversed(urlstats.urlrequestlist):
summary.timestamps.append(request.timestamp)
summary.totaltimes.append(request.totalresponsetime)
summary.totalrpctimes.append(request.totalrpctime)
return summary
| [
"def",
"Summary",
"(",
"urlstats",
")",
":",
"summary",
"=",
"RequestSummary",
"(",
")",
"for",
"request",
"in",
"reversed",
"(",
"urlstats",
".",
"urlrequestlist",
")",
":",
"summary",
".",
"timestamps",
".",
"append",
"(",
"request",
".",
"timestamp",
")... | summarize relevant statistics for requests . | train | false |
11,207 | def order_query(statement, order, natural_order=None, labels=None):
order = (order or [])
labels = (labels or {})
natural_order = (natural_order or [])
final_order = OrderedDict()
columns = OrderedDict(zip(labels, statement.columns))
if (SPLIT_DIMENSION_NAME in statement.columns):
split_column = sql.expression.column(SPLIT_DIMENSION_NAME)
final_order[SPLIT_DIMENSION_NAME] = split_column
for (attribute, direction) in order:
attribute = str(attribute)
column = order_column(columns[attribute], direction)
if (attribute not in final_order):
final_order[attribute] = column
for (name, column) in columns.items():
if ((name in natural_order) and (name not in order_by)):
final_order[name] = order_column(column, natural_order[name])
statement = statement.order_by(*final_order.values())
return statement
| [
"def",
"order_query",
"(",
"statement",
",",
"order",
",",
"natural_order",
"=",
"None",
",",
"labels",
"=",
"None",
")",
":",
"order",
"=",
"(",
"order",
"or",
"[",
"]",
")",
"labels",
"=",
"(",
"labels",
"or",
"{",
"}",
")",
"natural_order",
"=",
... | returns a sql statement which is ordered according to the order . | train | false |
11,209 | @gen.engine
def GetMergedLogsFileList(merged_store, logs_paths, marker, callback):
registry_file = logs_paths.ProcessedRegistryPath()
def _WantFile(filename):
if (filename == registry_file):
return False
instance = logs_paths.MergedLogPathToInstance(filename)
if (instance is None):
logging.error(('Could not extract instance from file name %s' % filename))
return False
return ((not options.options.ec2_only) or logs_util.IsEC2Instance(instance))
base_path = logs_paths.MergedDirectory()
marker = (os.path.join(base_path, marker) if (marker is not None) else None)
file_list = (yield gen.Task(store_utils.ListAllKeys, merged_store, prefix=base_path, marker=marker))
files = [f for f in file_list if _WantFile(f)]
files.sort()
logging.info(('found %d merged log files, analyzing %d' % (len(file_list), len(files))))
callback(files)
| [
"@",
"gen",
".",
"engine",
"def",
"GetMergedLogsFileList",
"(",
"merged_store",
",",
"logs_paths",
",",
"marker",
",",
"callback",
")",
":",
"registry_file",
"=",
"logs_paths",
".",
"ProcessedRegistryPath",
"(",
")",
"def",
"_WantFile",
"(",
"filename",
")",
"... | fetch the list of file names from s3 . | train | false |
11,210 | def get_data_member(parent, path):
if (parent.meta.data is None):
if hasattr(parent, 'load'):
parent.load()
else:
raise ResourceLoadException('{0} has no load method!'.format(parent.__class__.__name__))
return jmespath.search(path, parent.meta.data)
| [
"def",
"get_data_member",
"(",
"parent",
",",
"path",
")",
":",
"if",
"(",
"parent",
".",
"meta",
".",
"data",
"is",
"None",
")",
":",
"if",
"hasattr",
"(",
"parent",
",",
"'load'",
")",
":",
"parent",
".",
"load",
"(",
")",
"else",
":",
"raise",
... | get a data member from a parent using a jmespath search query . | train | false |
11,211 | def no_real_s3_credentials():
if (parse_boolean_envvar(os.getenv('WALE_S3_INTEGRATION_TESTS')) is not True):
return True
for e_var in ('AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY'):
if (os.getenv(e_var) is None):
return True
return False
| [
"def",
"no_real_s3_credentials",
"(",
")",
":",
"if",
"(",
"parse_boolean_envvar",
"(",
"os",
".",
"getenv",
"(",
"'WALE_S3_INTEGRATION_TESTS'",
")",
")",
"is",
"not",
"True",
")",
":",
"return",
"True",
"for",
"e_var",
"in",
"(",
"'AWS_ACCESS_KEY_ID'",
",",
... | helps skip integration tests without live credentials . | train | false |
11,212 | @nodes_or_number(0)
def cycle_graph(n, create_using=None):
(n_orig, nodes) = n
G = empty_graph(nodes, create_using)
G.name = ('cycle_graph(%s)' % (n_orig,))
G.add_edges_from(nx.utils.pairwise(nodes))
G.add_edge(nodes[(-1)], nodes[0])
return G
| [
"@",
"nodes_or_number",
"(",
"0",
")",
"def",
"cycle_graph",
"(",
"n",
",",
"create_using",
"=",
"None",
")",
":",
"(",
"n_orig",
",",
"nodes",
")",
"=",
"n",
"G",
"=",
"empty_graph",
"(",
"nodes",
",",
"create_using",
")",
"G",
".",
"name",
"=",
"... | return the cycle graph c_n of cyclicly connected nodes . | train | false |
11,213 | def get_xml(vm_):
dom = _get_domain(vm_)
return dom.XMLDesc(0)
| [
"def",
"get_xml",
"(",
"vm_",
")",
":",
"dom",
"=",
"_get_domain",
"(",
"vm_",
")",
"return",
"dom",
".",
"XMLDesc",
"(",
"0",
")"
] | returns the xml for a given vm cli example: . | train | false |
11,214 | def scan_for_null_records(table, col_name, check_fkeys):
if (col_name in table.columns):
if check_fkeys:
fkey_found = False
fkeys = (table.c[col_name].foreign_keys or [])
for fkey in fkeys:
if (fkey.column.table.name == 'instances'):
fkey_found = True
if (not fkey_found):
return
records = len(list(table.select().where((table.c[col_name] == null())).execute()))
if records:
msg = (_("There are %(records)d records in the '%(table_name)s' table where the uuid or instance_uuid column is NULL. These must be manually cleaned up before the migration will pass. Consider running the 'nova-manage db null_instance_uuid_scan' command.") % {'records': records, 'table_name': table.name})
raise exception.ValidationError(detail=msg)
| [
"def",
"scan_for_null_records",
"(",
"table",
",",
"col_name",
",",
"check_fkeys",
")",
":",
"if",
"(",
"col_name",
"in",
"table",
".",
"columns",
")",
":",
"if",
"check_fkeys",
":",
"fkey_found",
"=",
"False",
"fkeys",
"=",
"(",
"table",
".",
"c",
"[",
... | queries the table looking for null instances of the given column . | train | false |
11,215 | def get_available_disk_space():
stat_struct = os.statvfs(os.path.dirname(BACKUP_DIR_LOCATION))
return (stat_struct[statvfs.F_BAVAIL] * stat_struct[statvfs.F_BSIZE])
| [
"def",
"get_available_disk_space",
"(",
")",
":",
"stat_struct",
"=",
"os",
".",
"statvfs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"BACKUP_DIR_LOCATION",
")",
")",
"return",
"(",
"stat_struct",
"[",
"statvfs",
".",
"F_BAVAIL",
"]",
"*",
"stat_struct",
... | returns the amount of available disk space under /opt/appscale . | train | false |
11,216 | def _read_annot(annot, annotmap, sfreq, data_length):
pat = '([+/-]\\d+.\\d+),(\\w+)'
annot = open(annot).read()
triggers = re.findall(pat, annot)
(times, values) = zip(*triggers)
times = [(float(time) * sfreq) for time in times]
pat = '(\\w+):(\\d+)'
annotmap = open(annotmap).read()
mappings = re.findall(pat, annotmap)
maps = {}
for mapping in mappings:
maps[mapping[0]] = mapping[1]
triggers = [int(maps[value]) for value in values]
stim_channel = np.zeros(data_length)
for (time, trigger) in zip(times, triggers):
stim_channel[time] = trigger
return stim_channel
| [
"def",
"_read_annot",
"(",
"annot",
",",
"annotmap",
",",
"sfreq",
",",
"data_length",
")",
":",
"pat",
"=",
"'([+/-]\\\\d+.\\\\d+),(\\\\w+)'",
"annot",
"=",
"open",
"(",
"annot",
")",
".",
"read",
"(",
")",
"triggers",
"=",
"re",
".",
"findall",
"(",
"p... | annotation file reader . | train | false |
11,219 | def init_helper(obj, kw):
for (k, v) in kw.iteritems():
if (not hasattr(obj, k)):
raise TypeError(((((obj.__class__.__name__ + ' constructor got ') + "unexpected keyword argument '") + k) + "'"))
setattr(obj, k, v)
| [
"def",
"init_helper",
"(",
"obj",
",",
"kw",
")",
":",
"for",
"(",
"k",
",",
"v",
")",
"in",
"kw",
".",
"iteritems",
"(",
")",
":",
"if",
"(",
"not",
"hasattr",
"(",
"obj",
",",
"k",
")",
")",
":",
"raise",
"TypeError",
"(",
"(",
"(",
"(",
... | helper for classes with attributes initialized by keyword arguments . | train | false |
11,220 | def month_crumb(date):
year = date.strftime('%Y')
month = date.strftime('%m')
month_text = format(date, 'F').capitalize()
return Crumb(month_text, reverse('zinnia:entry_archive_month', args=[year, month]))
| [
"def",
"month_crumb",
"(",
"date",
")",
":",
"year",
"=",
"date",
".",
"strftime",
"(",
"'%Y'",
")",
"month",
"=",
"date",
".",
"strftime",
"(",
"'%m'",
")",
"month_text",
"=",
"format",
"(",
"date",
",",
"'F'",
")",
".",
"capitalize",
"(",
")",
"r... | crumb for a month . | train | true |
11,221 | def fix_uri_credentials(uri, to_quoted):
if (not uri):
return
location = glance.store.swift.StoreLocation({})
if to_quoted:
location.parse_uri = types.MethodType(legacy_parse_uri, location)
else:
location._get_credstring = types.MethodType(legacy__get_credstring, location)
decrypted_uri = None
try:
decrypted_uri = decrypt_location(uri)
except (TypeError, ValueError) as e:
raise exception.Invalid(str(e))
location.parse_uri(decrypted_uri)
return encrypt_location(location.get_uri())
| [
"def",
"fix_uri_credentials",
"(",
"uri",
",",
"to_quoted",
")",
":",
"if",
"(",
"not",
"uri",
")",
":",
"return",
"location",
"=",
"glance",
".",
"store",
".",
"swift",
".",
"StoreLocation",
"(",
"{",
"}",
")",
"if",
"to_quoted",
":",
"location",
".",... | fix the given uris embedded credentials by round-tripping with storelocation . | train | false |
11,222 | def val_dump(rels, db):
concepts = process_bundle(rels).values()
valuation = make_valuation(concepts, read=True)
db_out = shelve.open(db, u'n')
db_out.update(valuation)
db_out.close()
| [
"def",
"val_dump",
"(",
"rels",
",",
"db",
")",
":",
"concepts",
"=",
"process_bundle",
"(",
"rels",
")",
".",
"values",
"(",
")",
"valuation",
"=",
"make_valuation",
"(",
"concepts",
",",
"read",
"=",
"True",
")",
"db_out",
"=",
"shelve",
".",
"open",... | make a valuation from a list of relation metadata bundles and dump to persistent database . | train | false |
11,225 | def table_to_file(table, file_out, fmt='', dtype='int16'):
(file_fmt, data_fmt) = _get_pyo_codes(fmt, dtype, file_out)
try:
pyo.savefileFromTable(table=table, path=file_out, fileformat=file_fmt, sampletype=data_fmt)
except Exception:
msg = 'could not save `{0}`; permissions or other issue?'
raise IOError(msg.format(file_out))
| [
"def",
"table_to_file",
"(",
"table",
",",
"file_out",
",",
"fmt",
"=",
"''",
",",
"dtype",
"=",
"'int16'",
")",
":",
"(",
"file_fmt",
",",
"data_fmt",
")",
"=",
"_get_pyo_codes",
"(",
"fmt",
",",
"dtype",
",",
"file_out",
")",
"try",
":",
"pyo",
"."... | write data to file . | train | false |
11,227 | def findSmallest(root):
if (root.left is None):
return root
else:
return findSmallest(root.left)
| [
"def",
"findSmallest",
"(",
"root",
")",
":",
"if",
"(",
"root",
".",
"left",
"is",
"None",
")",
":",
"return",
"root",
"else",
":",
"return",
"findSmallest",
"(",
"root",
".",
"left",
")"
] | finds the smallest node in the subtree . | train | false |
11,228 | def getKeysM(prefix=''):
keysM = []
for row in xrange(4):
for column in xrange(4):
key = getKeyM(row, column, prefix)
keysM.append(key)
return keysM
| [
"def",
"getKeysM",
"(",
"prefix",
"=",
"''",
")",
":",
"keysM",
"=",
"[",
"]",
"for",
"row",
"in",
"xrange",
"(",
"4",
")",
":",
"for",
"column",
"in",
"xrange",
"(",
"4",
")",
":",
"key",
"=",
"getKeyM",
"(",
"row",
",",
"column",
",",
"prefix... | get the matrix keys . | train | false |
11,230 | def determineMetaclass(bases, explicit_mc=None):
meta = [getattr(b, '__class__', type(b)) for b in bases]
if (explicit_mc is not None):
meta.append(explicit_mc)
if (len(meta) == 1):
return meta[0]
candidates = minimalBases(meta)
if (not candidates):
assert (not __python3)
return ClassType
elif (len(candidates) > 1):
raise TypeError('Incompatible metatypes', bases)
return candidates[0]
| [
"def",
"determineMetaclass",
"(",
"bases",
",",
"explicit_mc",
"=",
"None",
")",
":",
"meta",
"=",
"[",
"getattr",
"(",
"b",
",",
"'__class__'",
",",
"type",
"(",
"b",
")",
")",
"for",
"b",
"in",
"bases",
"]",
"if",
"(",
"explicit_mc",
"is",
"not",
... | determine metaclass from 1+ bases and optional explicit __metaclass__ . | train | false |
11,231 | def convert_package_name_or_id_to_id(package_name_or_id, context):
session = context['session']
result = session.query(model.Package).filter_by(id=package_name_or_id).first()
if (not result):
result = session.query(model.Package).filter_by(name=package_name_or_id).first()
if (not result):
raise df.Invalid(('%s: %s' % (_('Not found'), _('Dataset'))))
return result.id
| [
"def",
"convert_package_name_or_id_to_id",
"(",
"package_name_or_id",
",",
"context",
")",
":",
"session",
"=",
"context",
"[",
"'session'",
"]",
"result",
"=",
"session",
".",
"query",
"(",
"model",
".",
"Package",
")",
".",
"filter_by",
"(",
"id",
"=",
"pa... | return the package id for the given package name or id . | train | false |
11,232 | def test_time3():
ip = get_ipython()
ip.user_ns.pop('run', None)
with tt.AssertNotPrints('not found', channel='stderr'):
ip.run_cell('%%time\nrun = 0\nrun += 1')
| [
"def",
"test_time3",
"(",
")",
":",
"ip",
"=",
"get_ipython",
"(",
")",
"ip",
".",
"user_ns",
".",
"pop",
"(",
"'run'",
",",
"None",
")",
"with",
"tt",
".",
"AssertNotPrints",
"(",
"'not found'",
",",
"channel",
"=",
"'stderr'",
")",
":",
"ip",
".",
... | erroneous magic function calls . | train | false |
11,234 | def func_load(code, defaults=None, closure=None, globs=None):
if isinstance(code, (tuple, list)):
(code, defaults, closure) = code
code = marshal.loads(code.encode('raw_unicode_escape'))
if (globs is None):
globs = globals()
return python_types.FunctionType(code, globs, name=code.co_name, argdefs=defaults, closure=closure)
| [
"def",
"func_load",
"(",
"code",
",",
"defaults",
"=",
"None",
",",
"closure",
"=",
"None",
",",
"globs",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"code",
",",
"(",
"tuple",
",",
"list",
")",
")",
":",
"(",
"code",
",",
"defaults",
",",
"c... | deserializes a user defined function . | train | true |
11,235 | def ranks_from_scores(scores, rank_gap=1e-15):
prev_score = None
rank = 0
for (i, (key, score)) in enumerate(scores):
try:
if (abs((score - prev_score)) > rank_gap):
rank = i
except TypeError:
pass
(yield (key, rank))
prev_score = score
| [
"def",
"ranks_from_scores",
"(",
"scores",
",",
"rank_gap",
"=",
"1e-15",
")",
":",
"prev_score",
"=",
"None",
"rank",
"=",
"0",
"for",
"(",
"i",
",",
"(",
"key",
",",
"score",
")",
")",
"in",
"enumerate",
"(",
"scores",
")",
":",
"try",
":",
"if",... | given a sequence of tuples . | train | false |
11,236 | def onlyif_any_cmd_exists(*commands):
warnings.warn('The function `onlyif_any_cmd_exists` is deprecated since IPython 4.0', DeprecationWarning, stacklevel=2)
for cmd in commands:
if which(cmd):
return null_deco
return skip('This test runs only if one of the commands {0} is installed'.format(commands))
| [
"def",
"onlyif_any_cmd_exists",
"(",
"*",
"commands",
")",
":",
"warnings",
".",
"warn",
"(",
"'The function `onlyif_any_cmd_exists` is deprecated since IPython 4.0'",
",",
"DeprecationWarning",
",",
"stacklevel",
"=",
"2",
")",
"for",
"cmd",
"in",
"commands",
":",
"i... | decorator to skip test unless at least one of commands is found . | train | false |
11,237 | def get_jids_filter(count, filter_find_job=True):
keys = []
ret = []
for (jid, job, _, _) in _walk_through(_job_dir()):
job = salt.utils.jid.format_jid_instance_ext(jid, job)
if (filter_find_job and (job['Function'] == 'saltutil.find_job')):
continue
i = bisect.bisect(keys, jid)
if ((len(keys) == count) and (i == 0)):
continue
keys.insert(i, jid)
ret.insert(i, job)
if (len(keys) > count):
del keys[0]
del ret[0]
return ret
| [
"def",
"get_jids_filter",
"(",
"count",
",",
"filter_find_job",
"=",
"True",
")",
":",
"keys",
"=",
"[",
"]",
"ret",
"=",
"[",
"]",
"for",
"(",
"jid",
",",
"job",
",",
"_",
",",
"_",
")",
"in",
"_walk_through",
"(",
"_job_dir",
"(",
")",
")",
":"... | return a list of all job ids . | train | true |
11,238 | def headers_to_account_info(headers, status_int=HTTP_OK):
(headers, meta, sysmeta) = _prep_headers_to_info(headers, 'account')
account_info = {'status': status_int, 'container_count': headers.get('x-account-container-count'), 'total_object_count': headers.get('x-account-object-count'), 'bytes': headers.get('x-account-bytes-used'), 'meta': meta, 'sysmeta': sysmeta}
if is_success(status_int):
account_info['account_really_exists'] = (not config_true_value(headers.get('x-backend-fake-account-listing')))
return account_info
| [
"def",
"headers_to_account_info",
"(",
"headers",
",",
"status_int",
"=",
"HTTP_OK",
")",
":",
"(",
"headers",
",",
"meta",
",",
"sysmeta",
")",
"=",
"_prep_headers_to_info",
"(",
"headers",
",",
"'account'",
")",
"account_info",
"=",
"{",
"'status'",
":",
"... | construct a cacheable dict of account info based on response headers . | train | false |
11,240 | def mutEphemeral(individual, mode):
if (mode not in ['one', 'all']):
raise ValueError('Mode must be one of "one" or "all"')
ephemerals_idx = [index for (index, node) in enumerate(individual) if isinstance(node, Ephemeral)]
if (len(ephemerals_idx) > 0):
if (mode == 'one'):
ephemerals_idx = (random.choice(ephemerals_idx),)
for i in ephemerals_idx:
individual[i] = type(individual[i])()
return (individual,)
| [
"def",
"mutEphemeral",
"(",
"individual",
",",
"mode",
")",
":",
"if",
"(",
"mode",
"not",
"in",
"[",
"'one'",
",",
"'all'",
"]",
")",
":",
"raise",
"ValueError",
"(",
"'Mode must be one of \"one\" or \"all\"'",
")",
"ephemerals_idx",
"=",
"[",
"index",
"for... | this operator works on the constants of the tree *individual* . | train | false |
11,241 | def notrack_this(cls):
if (not hasattr(cls, '_no_instance_tracking')):
cls._no_instance_tracking = cls
return cls
| [
"def",
"notrack_this",
"(",
"cls",
")",
":",
"if",
"(",
"not",
"hasattr",
"(",
"cls",
",",
"'_no_instance_tracking'",
")",
")",
":",
"cls",
".",
"_no_instance_tracking",
"=",
"cls",
"return",
"cls"
] | disables instance tracking of plugin instances within plugincontext via get/get_all and similar methods . | train | false |
11,242 | def x_label(epoch_axis):
return ('Epoch' if epoch_axis else 'Minibatch')
| [
"def",
"x_label",
"(",
"epoch_axis",
")",
":",
"return",
"(",
"'Epoch'",
"if",
"epoch_axis",
"else",
"'Minibatch'",
")"
] | get the x axis label depending on the boolean epoch_axis . | train | false |
11,243 | def GC(seq):
gc = sum((seq.count(x) for x in ['G', 'C', 'g', 'c', 'S', 's']))
try:
return ((gc * 100.0) / len(seq))
except ZeroDivisionError:
return 0.0
| [
"def",
"GC",
"(",
"seq",
")",
":",
"gc",
"=",
"sum",
"(",
"(",
"seq",
".",
"count",
"(",
"x",
")",
"for",
"x",
"in",
"[",
"'G'",
",",
"'C'",
",",
"'g'",
",",
"'c'",
",",
"'S'",
",",
"'s'",
"]",
")",
")",
"try",
":",
"return",
"(",
"(",
... | calculates g+c content . | train | false |
11,244 | def _date_to_unix(arg):
if isinstance(arg, datetime):
arg = arg.utctimetuple()
elif isinstance(arg, (integer_types + (float,))):
return int(arg)
(year, month, day, hour, minute, second) = arg[:6]
days = (((date(year, month, 1).toordinal() - _epoch_ord) + day) - 1)
hours = ((days * 24) + hour)
minutes = ((hours * 60) + minute)
seconds = ((minutes * 60) + second)
return seconds
| [
"def",
"_date_to_unix",
"(",
"arg",
")",
":",
"if",
"isinstance",
"(",
"arg",
",",
"datetime",
")",
":",
"arg",
"=",
"arg",
".",
"utctimetuple",
"(",
")",
"elif",
"isinstance",
"(",
"arg",
",",
"(",
"integer_types",
"+",
"(",
"float",
",",
")",
")",
... | converts a timetuple . | train | true |
11,245 | def add_url_parameters(url, *args, **params):
for arg in args:
params.update(arg)
if params:
return ('%s?%s' % (url, urlencode(params)))
return url
| [
"def",
"add_url_parameters",
"(",
"url",
",",
"*",
"args",
",",
"**",
"params",
")",
":",
"for",
"arg",
"in",
"args",
":",
"params",
".",
"update",
"(",
"arg",
")",
"if",
"params",
":",
"return",
"(",
"'%s?%s'",
"%",
"(",
"url",
",",
"urlencode",
"... | adds parameters to an url -> url?p1=v1&p2=v2 . | train | false |
11,246 | @task
@needs('pavelib.prereqs.install_prereqs', 'pavelib.i18n.i18n_validate_gettext', 'pavelib.assets.compile_coffeescript')
@cmdopts([('verbose', 'v', "Sets 'verbose' to True")])
@timed
def i18n_extract(options):
verbose = getattr(options, 'verbose', None)
cmd = 'i18n_tool extract'
if verbose:
cmd += ' -vv'
sh(cmd)
| [
"@",
"task",
"@",
"needs",
"(",
"'pavelib.prereqs.install_prereqs'",
",",
"'pavelib.i18n.i18n_validate_gettext'",
",",
"'pavelib.assets.compile_coffeescript'",
")",
"@",
"cmdopts",
"(",
"[",
"(",
"'verbose'",
",",
"'v'",
",",
"\"Sets 'verbose' to True\"",
")",
"]",
")",... | extract localizable strings from sources . | train | false |
11,248 | def write_png_depth(filename, depth):
data = struct.pack('!i', depth)
f = open(filename, 'r+b')
try:
f.seek((- LEN_IEND), 2)
f.write(((DEPTH_CHUNK_LEN + DEPTH_CHUNK_START) + data))
crc = (binascii.crc32((DEPTH_CHUNK_START + data)) & 4294967295)
f.write(struct.pack('!I', crc))
f.write(IEND_CHUNK)
finally:
f.close()
| [
"def",
"write_png_depth",
"(",
"filename",
",",
"depth",
")",
":",
"data",
"=",
"struct",
".",
"pack",
"(",
"'!i'",
",",
"depth",
")",
"f",
"=",
"open",
"(",
"filename",
",",
"'r+b'",
")",
"try",
":",
"f",
".",
"seek",
"(",
"(",
"-",
"LEN_IEND",
... | write the special text chunk indicating the depth to a png file . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.