id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
35,658
def list_vdirs(site, app=_DEFAULT_APP): ret = dict() pscmd = list() pscmd.append("Get-WebVirtualDirectory -Site '{0}' -Application '{1}'".format(site, app)) pscmd.append(" | Select-Object PhysicalPath, @{ Name = 'name';") pscmd.append(" Expression = { $_.path.Split('/')[-1] } }") cmd_ret = _srvmgr(func=str().join(pscmd), as_json=True) try: items = json.loads(cmd_ret['stdout'], strict=False) except ValueError: _LOG.error('Unable to parse return data as Json.') for item in items: ret[item['name']] = {'sourcepath': item['physicalPath']} if (not ret): _LOG.warning('No vdirs found in output: %s', cmd_ret) return ret
[ "def", "list_vdirs", "(", "site", ",", "app", "=", "_DEFAULT_APP", ")", ":", "ret", "=", "dict", "(", ")", "pscmd", "=", "list", "(", ")", "pscmd", ".", "append", "(", "\"Get-WebVirtualDirectory -Site '{0}' -Application '{1}'\"", ".", "format", "(", "site", ...
get all configured iis virtual directories for the specified site .
train
false
35,659
def expand_format_text(hosts, text): return direct_format_text(expand_line(text, hosts))
[ "def", "expand_format_text", "(", "hosts", ",", "text", ")", ":", "return", "direct_format_text", "(", "expand_line", "(", "text", ",", "hosts", ")", ")" ]
following redirects in links .
train
false
35,660
def _MakeQuery(query, filters, orders): clone = datastore_pb.Query() clone.CopyFrom(query) clone.clear_filter() clone.clear_order() clone.filter_list().extend(filters) clone.order_list().extend(orders) return datastore_query.Query._from_pb(clone)
[ "def", "_MakeQuery", "(", "query", ",", "filters", ",", "orders", ")", ":", "clone", "=", "datastore_pb", ".", "Query", "(", ")", "clone", ".", "CopyFrom", "(", "query", ")", "clone", ".", "clear_filter", "(", ")", "clone", ".", "clear_order", "(", ")"...
make a datastore_query .
train
false
35,661
def get_event_conditions(doctype, filters=None): from frappe.desk.reportview import build_match_conditions if (not frappe.has_permission(doctype)): frappe.throw(_(u'Not Permitted'), frappe.PermissionError) conditions = build_match_conditions(doctype) conditions = ((conditions and (u' and ' + conditions)) or u'') if filters: filters = json.loads(filters) for key in filters: if filters[key]: conditions += u'and `{0}` = "{1}"'.format(frappe.db.escape(key), frappe.db.escape(filters[key])) return conditions
[ "def", "get_event_conditions", "(", "doctype", ",", "filters", "=", "None", ")", ":", "from", "frappe", ".", "desk", ".", "reportview", "import", "build_match_conditions", "if", "(", "not", "frappe", ".", "has_permission", "(", "doctype", ")", ")", ":", "fra...
returns sql conditions with user permissions and filters for event queries .
train
false
35,665
@mobile_template('questions/{mobile/}product_list.html') def product_list(request, template): return render(request, template, {'products': Product.objects.filter(questions_locales__locale=request.LANGUAGE_CODE)})
[ "@", "mobile_template", "(", "'questions/{mobile/}product_list.html'", ")", "def", "product_list", "(", "request", ",", "template", ")", ":", "return", "render", "(", "request", ",", "template", ",", "{", "'products'", ":", "Product", ".", "objects", ".", "filte...
view to select a product to see related questions .
train
false
35,666
def _parse_record_data(entry_data): ret = {} ipv4addrs = [] aliases = [] if ('canonical' in entry_data): ret['Canonical Name'] = entry_data['canonical'] if ('ipv4addrs' in entry_data): for ipaddrs in entry_data['ipv4addrs']: ipv4addrs.append(ipaddrs['ipv4addr']) ret['IP Addresses'] = ipv4addrs if ('ipv4addr' in entry_data): ret['IP Address'] = entry_data['ipv4addr'] if ('aliases' in entry_data): for alias in entry_data['aliases']: aliases.append(alias) ret['Aliases'] = aliases if ('name' in entry_data): ret['Name'] = entry_data['name'] if ('view' in entry_data): ret['DNS View'] = entry_data['view'] if ('network_view' in entry_data): ret['Network View'] = entry_data['network_view'] if ('comment' in entry_data): ret['Comment'] = entry_data['comment'] if ('network' in entry_data): ret['Network'] = entry_data['network'] if ('_ref' in entry_data): ret['Record ID'] = entry_data['_ref'] return ret
[ "def", "_parse_record_data", "(", "entry_data", ")", ":", "ret", "=", "{", "}", "ipv4addrs", "=", "[", "]", "aliases", "=", "[", "]", "if", "(", "'canonical'", "in", "entry_data", ")", ":", "ret", "[", "'Canonical Name'", "]", "=", "entry_data", "[", "...
returns the right value data wed be interested in for the specified record type .
train
false
35,667
def InitDB(schema=None, callback=None, verify_or_create=True): assert (not hasattr(DBClient, '_instance')), 'instance already initialized' assert (schema is not None) if options.options.localdb: from local_client import LocalClient DBClient.SetInstance(LocalClient(schema, read_only=options.options.readonly_db)) else: from dynamodb_client import DynamoDBClient DBClient._instance = DynamoDBClient(schema, read_only=options.options.readonly_db) if verify_or_create: schema.VerifyOrCreate(DBClient.Instance(), callback) else: callback([])
[ "def", "InitDB", "(", "schema", "=", "None", ",", "callback", "=", "None", ",", "verify_or_create", "=", "True", ")", ":", "assert", "(", "not", "hasattr", "(", "DBClient", ",", "'_instance'", ")", ")", ",", "'instance already initialized'", "assert", "(", ...
sets the db client instance .
train
false
35,668
def is_subclass(cls, classinfo): try: return issubclass(cls, classinfo) except TypeError: return False
[ "def", "is_subclass", "(", "cls", ",", "classinfo", ")", ":", "try", ":", "return", "issubclass", "(", "cls", ",", "classinfo", ")", "except", "TypeError", ":", "return", "False" ]
a more sensible version of the issubclass builtin .
train
false
35,669
def extract_barcodes_from_mapping(labels): barcodes = {} re = compile('(\\w+) ([a-zA-Z0-9.]+) orig_bc=(\\w*) new_bc=\\w* bc_diffs=\\d+') for label in labels: tmatch = search(re, label) flowgram_id = tmatch.group(2) barcode = tmatch.group(3) barcodes[flowgram_id] = barcode return barcodes
[ "def", "extract_barcodes_from_mapping", "(", "labels", ")", ":", "barcodes", "=", "{", "}", "re", "=", "compile", "(", "'(\\\\w+) ([a-zA-Z0-9.]+) orig_bc=(\\\\w*) new_bc=\\\\w* bc_diffs=\\\\d+'", ")", "for", "label", "in", "labels", ":", "tmatch", "=", "search", "(", ...
extract barcodes from split_libraries fasta headers .
train
false
35,670
def long_token(): hash = hashlib.sha1(shortuuid.uuid()) hash.update(settings.SECRET_KEY) return hash.hexdigest()
[ "def", "long_token", "(", ")", ":", "hash", "=", "hashlib", ".", "sha1", "(", "shortuuid", ".", "uuid", "(", ")", ")", "hash", ".", "update", "(", "settings", ".", "SECRET_KEY", ")", "return", "hash", ".", "hexdigest", "(", ")" ]
generate a hash that can be used as an application secret .
train
true
35,672
def hash_password(password): return hash_password_PBKDF2(password)
[ "def", "hash_password", "(", "password", ")", ":", "return", "hash_password_PBKDF2", "(", "password", ")" ]
hash a password .
train
false
35,673
def ready(zone): ret = {'status': True} res = __salt__['cmd.run_all']('zoneadm {zone} ready'.format(zone=('-u {0}'.format(zone) if _is_uuid(zone) else '-z {0}'.format(zone)))) ret['status'] = (res['retcode'] == 0) ret['message'] = (res['stdout'] if ret['status'] else res['stderr']) ret['message'] = ret['message'].replace('zoneadm: ', '') if (ret['message'] == ''): del ret['message'] return ret
[ "def", "ready", "(", "zone", ")", ":", "ret", "=", "{", "'status'", ":", "True", "}", "res", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "'zoneadm {zone} ready'", ".", "format", "(", "zone", "=", "(", "'-u {0}'", ".", "format", "(", "zone", ")", ...
prepares a zone for running applications .
train
true
35,674
def equalContents(arr1, arr2): return (frozenset(arr1) == frozenset(arr2))
[ "def", "equalContents", "(", "arr1", ",", "arr2", ")", ":", "return", "(", "frozenset", "(", "arr1", ")", "==", "frozenset", "(", "arr2", ")", ")" ]
checks if the set of unique elements of arr1 and arr2 are equivalent .
train
false
35,676
def generate_milestone_namespace(namespace, course_key=None): if (namespace in NAMESPACE_CHOICES.values()): if (namespace == 'entrance_exams'): return '{}.{}'.format(unicode(course_key), NAMESPACE_CHOICES['ENTRANCE_EXAM'])
[ "def", "generate_milestone_namespace", "(", "namespace", ",", "course_key", "=", "None", ")", ":", "if", "(", "namespace", "in", "NAMESPACE_CHOICES", ".", "values", "(", ")", ")", ":", "if", "(", "namespace", "==", "'entrance_exams'", ")", ":", "return", "'{...
returns a specifically-formatted namespace string for the specified type .
train
false
35,677
def call_on_class_only(*args, **kwargs): raise AttributeError('This method should be called on a class object.')
[ "def", "call_on_class_only", "(", "*", "args", ",", "**", "kwargs", ")", ":", "raise", "AttributeError", "(", "'This method should be called on a class object.'", ")" ]
raise exception when load methods are called on instance .
train
false
35,678
def cigar(individual): return (((individual[0] ** 2) + (1000000.0 * sum(((gene * gene) for gene in individual)))),)
[ "def", "cigar", "(", "individual", ")", ":", "return", "(", "(", "(", "individual", "[", "0", "]", "**", "2", ")", "+", "(", "1000000.0", "*", "sum", "(", "(", "(", "gene", "*", "gene", ")", "for", "gene", "in", "individual", ")", ")", ")", ")"...
cigar test objective function .
train
false
35,679
def format_log(request, message_type, message): from django_socketio.settings import MESSAGE_LOG_FORMAT if (MESSAGE_LOG_FORMAT is None): return None now = datetime.now().replace(microsecond=0) args = dict(request.META, TYPE=message_type, MESSAGE=message, TIME=now) return ((MESSAGE_LOG_FORMAT % args) + '\n')
[ "def", "format_log", "(", "request", ",", "message_type", ",", "message", ")", ":", "from", "django_socketio", ".", "settings", "import", "MESSAGE_LOG_FORMAT", "if", "(", "MESSAGE_LOG_FORMAT", "is", "None", ")", ":", "return", "None", "now", "=", "datetime", "...
formats a log message similar to gevents pywsgi request logging .
train
true
35,680
def strFile(p, f, caseSensitive=True): buf = type(p)() buf_len = max(len(p), (2 ** (2 ** (2 ** 2)))) if (not caseSensitive): p = p.lower() while 1: r = f.read((buf_len - len(p))) if (not caseSensitive): r = r.lower() bytes_read = len(r) if (bytes_read == 0): return False l = ((len(buf) + bytes_read) - buf_len) if (l <= 0): buf = (buf + r) else: buf = (buf[l:] + r) if (buf.find(p) != (-1)): return True
[ "def", "strFile", "(", "p", ",", "f", ",", "caseSensitive", "=", "True", ")", ":", "buf", "=", "type", "(", "p", ")", "(", ")", "buf_len", "=", "max", "(", "len", "(", "p", ")", ",", "(", "2", "**", "(", "2", "**", "(", "2", "**", "2", ")...
find whether string c{p} occurs in a read()able object c{f} .
train
false
35,682
def test_suggested_multiple_column_names(completer, complete_event): text = u'SELECT id, from custom.products' position = len(u'SELECT id, ') result = set(completer.get_completions(Document(text=text, cursor_position=position), complete_event)) assert (set(result) == set(((testdata.columns(u'products', u'custom') + testdata.functions()) + list((testdata.builtin_functions() + testdata.keywords())))))
[ "def", "test_suggested_multiple_column_names", "(", "completer", ",", "complete_event", ")", ":", "text", "=", "u'SELECT id, from custom.products'", "position", "=", "len", "(", "u'SELECT id, '", ")", "result", "=", "set", "(", "completer", ".", "get_completions", "(...
suggest column and function names when selecting multiple columns from table .
train
false
35,683
def encryptARC(s): iv = helpers.randomKey(8) key = helpers.randomKey(8) arc4main = ARC4.new(key) encrypted = arc4main.encrypt(s) return (encrypted, (key, iv))
[ "def", "encryptARC", "(", "s", ")", ":", "iv", "=", "helpers", ".", "randomKey", "(", "8", ")", "key", "=", "helpers", ".", "randomKey", "(", "8", ")", "arc4main", "=", "ARC4", ".", "new", "(", "key", ")", "encrypted", "=", "arc4main", ".", "encryp...
generates a random arc key and iv .
train
false
35,684
def fix_target(target, unit): if (target == []): return (target, []) fixups = [] for (dummy, fix) in AUTOFIXES.items(): (target, fixed) = fix.fix_target(target, unit) if fixed: fixups.append(fix.name) return (target, fixups)
[ "def", "fix_target", "(", "target", ",", "unit", ")", ":", "if", "(", "target", "==", "[", "]", ")", ":", "return", "(", "target", ",", "[", "]", ")", "fixups", "=", "[", "]", "for", "(", "dummy", ",", "fix", ")", "in", "AUTOFIXES", ".", "items...
apply each autofix to the target translation .
train
false
35,685
def _set_sleep_timer_service(service): _apply_service(service, SonosDevice.set_sleep_timer, service.data[ATTR_SLEEP_TIME])
[ "def", "_set_sleep_timer_service", "(", "service", ")", ":", "_apply_service", "(", "service", ",", "SonosDevice", ".", "set_sleep_timer", ",", "service", ".", "data", "[", "ATTR_SLEEP_TIME", "]", ")" ]
set a timer .
train
false
35,686
def _FilesMatching(root, predicate=(lambda f: True)): matches = [] for (path, _, files) in os.walk(root): matches += [os.path.join(path, f) for f in files if predicate(f)] return matches
[ "def", "_FilesMatching", "(", "root", ",", "predicate", "=", "(", "lambda", "f", ":", "True", ")", ")", ":", "matches", "=", "[", "]", "for", "(", "path", ",", "_", ",", "files", ")", "in", "os", ".", "walk", "(", "root", ")", ":", "matches", "...
finds all files under the given root that match the given predicate .
train
false
35,687
def fixAPI(document, url): for node in domhelpers.findElementsWithAttribute(document, 'class', 'API'): fullname = _getAPI(node) anchor = dom.Element('a') anchor.setAttribute('href', (url % (fullname,))) anchor.setAttribute('title', fullname) while node.childNodes: child = node.childNodes[0] node.removeChild(child) anchor.appendChild(child) node.appendChild(anchor) if node.hasAttribute('base'): node.removeAttribute('base')
[ "def", "fixAPI", "(", "document", ",", "url", ")", ":", "for", "node", "in", "domhelpers", ".", "findElementsWithAttribute", "(", "document", ",", "'class'", ",", "'API'", ")", ":", "fullname", "=", "_getAPI", "(", "node", ")", "anchor", "=", "dom", ".",...
replace api references with links to api documentation .
train
false
35,688
def subscriptions_for(username, number=(-1), etag=None): return gh.subscriptions_for(username, number, etag)
[ "def", "subscriptions_for", "(", "username", ",", "number", "=", "(", "-", "1", ")", ",", "etag", "=", "None", ")", ":", "return", "gh", ".", "subscriptions_for", "(", "username", ",", "number", ",", "etag", ")" ]
iterate over repositories subscribed to by username .
train
false
35,689
def random_bytes_hex(bytes_length): return hexlify(os.urandom(bytes_length)).decode('utf-8')
[ "def", "random_bytes_hex", "(", "bytes_length", ")", ":", "return", "hexlify", "(", "os", ".", "urandom", "(", "bytes_length", ")", ")", ".", "decode", "(", "'utf-8'", ")" ]
return a hexstring of bytes_length cryptographic-friendly random bytes .
train
false
35,691
def fill_in(at, basedate, units=adatetime.units): if isinstance(at, datetime): return at args = {} for unit in units: v = getattr(at, unit) if (v is None): v = getattr(basedate, unit) args[unit] = v return fix(adatetime(**args))
[ "def", "fill_in", "(", "at", ",", "basedate", ",", "units", "=", "adatetime", ".", "units", ")", ":", "if", "isinstance", "(", "at", ",", "datetime", ")", ":", "return", "at", "args", "=", "{", "}", "for", "unit", "in", "units", ":", "v", "=", "g...
returns a copy of at with any unspecified units filled in with values from basedate .
train
false
35,692
def xml_dict(data): xml = '' ordered_items = OrderedDict(sorted(data.items())) for (k, v) in ordered_items.items(): if isinstance(v, datetime.datetime): v = date_to_str(v) elif isinstance(v, (datetime.time, datetime.date)): v = v.isoformat() if (not isinstance(v, list)): v = [v] for value in v: if isinstance(value, dict): links = xml_add_links(value) xml += ('<%s>' % k) xml += xml_dict(value) xml += links xml += ('</%s>' % k) else: xml += ('<%s>%s</%s>' % (k, utils.escape(value), k)) return xml
[ "def", "xml_dict", "(", "data", ")", ":", "xml", "=", "''", "ordered_items", "=", "OrderedDict", "(", "sorted", "(", "data", ".", "items", "(", ")", ")", ")", "for", "(", "k", ",", "v", ")", "in", "ordered_items", ".", "items", "(", ")", ":", "if...
renders a dict as xml .
train
false
35,693
def unicode2utf8(arg): if (PY2 and isinstance(arg, unicode)): return arg.encode('utf-8') return arg
[ "def", "unicode2utf8", "(", "arg", ")", ":", "if", "(", "PY2", "and", "isinstance", "(", "arg", ",", "unicode", ")", ")", ":", "return", "arg", ".", "encode", "(", "'utf-8'", ")", "return", "arg" ]
only in python 2 .
train
false
35,695
def homepage(module=None, *match, **attr): settings = current.deployment_settings all_modules = settings.modules layout = S3MainMenuDefaultLayout c = ([module] + list(match)) if ('name' in attr): name = attr['name'] attr.pop('name') else: if (module is None): module = 'default' if (module in all_modules): m = all_modules[module] name = m.name_nice else: name = module if ('f' in attr): f = attr['f'] del attr['f'] else: f = 'index' return layout(name, c=c, f=f, **attr)
[ "def", "homepage", "(", "module", "=", "None", ",", "*", "match", ",", "**", "attr", ")", ":", "settings", "=", "current", ".", "deployment_settings", "all_modules", "=", "settings", ".", "modules", "layout", "=", "S3MainMenuDefaultLayout", "c", "=", "(", ...
shortcut for module homepage menu items using the mm layout .
train
false
35,696
def getDBTables(uri=None): DB_TABLES = [] for table in DB_SCHEMA: if (table.name in TABLES_REPOSITORY): DB_TABLES.append(TABLES_REPOSITORY[table.name]) continue attrs = {'_imdbpyName': table.name, '_imdbpySchema': table, 'addIndexes': addIndexes, 'addForeignKeys': addForeignKeys} for col in table.cols: if (col.name == 'id'): continue attrs[col.name] = MAP_COLS[col.kind](**col.params) cls = type(table.name, (SQLObject,), attrs) DB_TABLES.append(cls) TABLES_REPOSITORY[table.name] = cls return DB_TABLES
[ "def", "getDBTables", "(", "uri", "=", "None", ")", ":", "DB_TABLES", "=", "[", "]", "for", "table", "in", "DB_SCHEMA", ":", "if", "(", "table", ".", "name", "in", "TABLES_REPOSITORY", ")", ":", "DB_TABLES", ".", "append", "(", "TABLES_REPOSITORY", "[", ...
return a list of classes to be used to access the database through the sqlobject orm .
train
false
35,697
def checkSum(): val = 0 for (root, dirs, files) in os.walk(os.getcwd()): for extension in EXTENSIONS: for f in fnmatch.filter(files, extension): stats = os.stat(os.path.join(root, f)) val += (stats[stat.ST_SIZE] + stats[stat.ST_MTIME]) return val
[ "def", "checkSum", "(", ")", ":", "val", "=", "0", "for", "(", "root", ",", "dirs", ",", "files", ")", "in", "os", ".", "walk", "(", "os", ".", "getcwd", "(", ")", ")", ":", "for", "extension", "in", "EXTENSIONS", ":", "for", "f", "in", "fnmatc...
return a long which can be used to know if any .
train
false
35,698
def mock_tweet(): count = random.randint(70, 140) return ''.join([random.choice(string.ascii_letters) for _ in range(count)])
[ "def", "mock_tweet", "(", ")", ":", "count", "=", "random", ".", "randint", "(", "70", ",", "140", ")", "return", "''", ".", "join", "(", "[", "random", ".", "choice", "(", "string", ".", "ascii_letters", ")", "for", "_", "in", "range", "(", "count...
generate some random tweet text .
train
false
35,700
def get_poetry(address): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect(address) poem = '' while True: data = sock.recv(1024) if (not data): sock.close() break poem += data return poem
[ "def", "get_poetry", "(", "address", ")", ":", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "sock", ".", "connect", "(", "address", ")", "poem", "=", "''", "while", "True", ":", "data", ...
download a piece of poetry from the given address .
train
false
35,701
def saml_metadata_view(request): if (not SAMLConfiguration.is_enabled(request.site)): raise Http404 complete_url = reverse('social:complete', args=('tpa-saml',)) if (settings.APPEND_SLASH and (not complete_url.endswith('/'))): complete_url = (complete_url + '/') saml_backend = load_backend(load_strategy(request), 'tpa-saml', redirect_uri=complete_url) (metadata, errors) = saml_backend.generate_metadata_xml() if (not errors): return HttpResponse(content=metadata, content_type='text/xml') return HttpResponseServerError(content=', '.join(errors))
[ "def", "saml_metadata_view", "(", "request", ")", ":", "if", "(", "not", "SAMLConfiguration", ".", "is_enabled", "(", "request", ".", "site", ")", ")", ":", "raise", "Http404", "complete_url", "=", "reverse", "(", "'social:complete'", ",", "args", "=", "(", ...
get the service provider metadata for this edx-platform instance .
train
false
35,702
def _buildNetwork(*layers, **options): bias = (options['bias'] if ('bias' in options) else False) net = FeedForwardNetwork() layerParts = iter(layers) firstPart = iter(next(layerParts)) firstLayer = next(firstPart) net.addInputModule(firstLayer) prevLayer = firstLayer for part in chain(firstPart, layerParts): new_part = True for layer in part: net.addModule(layer) if new_part: ConnectionClass = FullConnection if bias: biasUnit = BiasUnit(('BiasUnit for %s' % layer.name)) net.addModule(biasUnit) net.addConnection(FullConnection(biasUnit, layer)) else: ConnectionClass = IdentityConnection new_part = False conn = ConnectionClass(prevLayer, layer) net.addConnection(conn) prevLayer = layer net.addOutputModule(layer) net.sortModules() return net
[ "def", "_buildNetwork", "(", "*", "layers", ",", "**", "options", ")", ":", "bias", "=", "(", "options", "[", "'bias'", "]", "if", "(", "'bias'", "in", "options", ")", "else", "False", ")", "net", "=", "FeedForwardNetwork", "(", ")", "layerParts", "=",...
this is a helper function to create different kinds of networks .
train
false
35,703
def get_infos(dir_name): infos = [i for i in iter_docstring_info(dir_name)] infos.sort(key=(lambda x: x['source'])) for (num, info) in enumerate(infos): info['num'] = num enhance_info_description(info) return infos
[ "def", "get_infos", "(", "dir_name", ")", ":", "infos", "=", "[", "i", "for", "i", "in", "iter_docstring_info", "(", "dir_name", ")", "]", "infos", ".", "sort", "(", "key", "=", "(", "lambda", "x", ":", "x", "[", "'source'", "]", ")", ")", "for", ...
return infos .
train
false
35,704
def at(*args, **kwargs): if (len(args) < 2): return {'jobs': []} if ('tag' in kwargs): stdin = '### SALT: {0}\n{1}'.format(kwargs['tag'], ' '.join(args[1:])) else: stdin = ' '.join(args[1:]) cmd_kwargs = {'stdin': stdin, 'python_shell': False} if ('runas' in kwargs): cmd_kwargs['runas'] = kwargs['runas'] res = __salt__['cmd.run_all']('at "{timespec}"'.format(timespec=args[0]), **cmd_kwargs) if (res['retcode'] > 0): if ('bad time specification' in res['stderr']): return {'jobs': [], 'error': 'invalid timespec'} return {'jobs': [], 'error': res['stderr']} else: jobid = res['stderr'].splitlines()[1] jobid = str(jobid.split()[1]) return atq(jobid)
[ "def", "at", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "len", "(", "args", ")", "<", "2", ")", ":", "return", "{", "'jobs'", ":", "[", "]", "}", "if", "(", "'tag'", "in", "kwargs", ")", ":", "stdin", "=", "'### SALT: {0}\\n{1}...
usage: {{ dictionary|at:key}} .
train
true
35,705
@pytest.mark.usefixtures('clean_system', 'remove_additional_dirs') def test_cookiecutter_templated_context(): main.cookiecutter('tests/fake-repo-tmpl', no_input=True) assert os.path.isdir('fake-project-templated')
[ "@", "pytest", ".", "mark", ".", "usefixtures", "(", "'clean_system'", ",", "'remove_additional_dirs'", ")", "def", "test_cookiecutter_templated_context", "(", ")", ":", "main", ".", "cookiecutter", "(", "'tests/fake-repo-tmpl'", ",", "no_input", "=", "True", ")", ...
call cookiecutter() with no_input=true and templates in the cookiecutter .
train
false
35,707
def new(rsa_key): return PKCS115_SigScheme(rsa_key)
[ "def", "new", "(", "rsa_key", ")", ":", "return", "PKCS115_SigScheme", "(", "rsa_key", ")" ]
return a form for a new genericimagemodeljob .
train
false
35,708
def load_pack_index(path): with GitFile(path, 'rb') as f: return load_pack_index_file(path, f)
[ "def", "load_pack_index", "(", "path", ")", ":", "with", "GitFile", "(", "path", ",", "'rb'", ")", "as", "f", ":", "return", "load_pack_index_file", "(", "path", ",", "f", ")" ]
load an index file by path .
train
false
35,709
def places_autocomplete_query(client, input_text, offset=None, location=None, radius=None, language=None): return _autocomplete(client, 'query', input_text, offset=offset, location=location, radius=radius, language=language)
[ "def", "places_autocomplete_query", "(", "client", ",", "input_text", ",", "offset", "=", "None", ",", "location", "=", "None", ",", "radius", "=", "None", ",", "language", "=", "None", ")", ":", "return", "_autocomplete", "(", "client", ",", "'query'", ",...
returns place predictions given a textual search query .
train
true
35,710
def create_text_record(text): opos = text.tell() text.seek(0, 2) npos = min(((opos + RECORD_SIZE), text.tell())) extra = 0 last = '' while (not last.decode(u'utf-8', u'ignore')): size = (len(last) + 1) text.seek((npos - size)) last = text.read(size) try: last.decode(u'utf-8', u'strict') except UnicodeDecodeError: prev = len(last) while True: text.seek((npos - prev)) last = text.read((len(last) + 1)) try: last.decode(u'utf-8') except UnicodeDecodeError: pass else: break extra = (len(last) - prev) text.seek(opos) data = text.read(RECORD_SIZE) overlap = text.read(extra) text.seek(npos) return (data, overlap)
[ "def", "create_text_record", "(", "text", ")", ":", "opos", "=", "text", ".", "tell", "(", ")", "text", ".", "seek", "(", "0", ",", "2", ")", "npos", "=", "min", "(", "(", "(", "opos", "+", "RECORD_SIZE", ")", ",", "text", ".", "tell", "(", ")"...
return a palmdoc record of size record_size from the text file object .
train
false
35,712
def ymax(y): y1 = y.ravel() maxy1 = np.amax(y1) e1 = np.exp((y1 - maxy1)) p1 = (e1 / np.sum(e1)) y1 = np.log((1e-20 + p1)) ix = np.argmax(y1) return (ix, y1[ix])
[ "def", "ymax", "(", "y", ")", ":", "y1", "=", "y", ".", "ravel", "(", ")", "maxy1", "=", "np", ".", "amax", "(", "y1", ")", "e1", "=", "np", ".", "exp", "(", "(", "y1", "-", "maxy1", ")", ")", "p1", "=", "(", "e1", "/", "np", ".", "sum"...
simple helper function here that takes unnormalized logprobs .
train
false
35,713
def _GenDiscoveryDocCallback(args, discovery_func=GenDiscoveryDoc): (services, doc_format, output_path, hostname) = (args.service, args.format, args.output, args.hostname) discovery_paths = discovery_func(services, doc_format, output_path, hostname=hostname) for discovery_path in discovery_paths: print ('API discovery document written to %s' % discovery_path)
[ "def", "_GenDiscoveryDocCallback", "(", "args", ",", "discovery_func", "=", "GenDiscoveryDoc", ")", ":", "(", "services", ",", "doc_format", ",", "output_path", ",", "hostname", ")", "=", "(", "args", ".", "service", ",", "args", ".", "format", ",", "args", ...
generate discovery docs to files .
train
false
35,715
def unequal_vals_constraint(A, a, B, b): return (a != b)
[ "def", "unequal_vals_constraint", "(", "A", ",", "a", ",", "B", ",", "b", ")", ":", "return", "(", "a", "!=", "b", ")" ]
a simple constraint: two neighbors must always have different values .
train
false
35,716
def from_dataset(dataset, num_examples): if (dataset.view_converter is not None): try: (V, y) = dataset.get_batch_topo(num_examples, True) except TypeError: if (isinstance(dataset, DenseDesignMatrix) and (dataset.X is None) and (not control.get_load_data())): warnings.warn("from_dataset wasn't able to make subset of dataset, using the whole thing") return DenseDesignMatrix(X=None, view_converter=dataset.view_converter) raise rval = DenseDesignMatrix(topo_view=V, y=y, y_labels=dataset.y_labels) rval.adjust_for_viewer = dataset.adjust_for_viewer else: (X, y) = dataset.get_batch_design(num_examples, True) rval = DenseDesignMatrix(X=X, y=y, y_labels=dataset.y_labels) return rval
[ "def", "from_dataset", "(", "dataset", ",", "num_examples", ")", ":", "if", "(", "dataset", ".", "view_converter", "is", "not", "None", ")", ":", "try", ":", "(", "V", ",", "y", ")", "=", "dataset", ".", "get_batch_topo", "(", "num_examples", ",", "Tru...
constructs a random subset of a densedesignmatrix parameters dataset : densedesignmatrix num_examples : int returns sub_dataset : densedesignmatrix a new dataset containing num_examples examples .
train
false
35,717
def mk_test_name(name, value, index=0): trivial_types = (type(None), bool, str, int, float) try: trivial_types += (unicode,) except NameError: pass def is_trivial(value): if isinstance(value, trivial_types): return True if isinstance(value, (list, tuple)): return all(map(is_trivial, value)) return False if (is_hash_randomized() and (not is_trivial(value))): return '{0}_{1}'.format(name, (index + 1)) try: value = str(value) except UnicodeEncodeError: value = value.encode('ascii', 'backslashreplace') test_name = '{0}_{1}_{2}'.format(name, (index + 1), value) return re.sub('\\W|^(?=\\d)', '_', test_name)
[ "def", "mk_test_name", "(", "name", ",", "value", ",", "index", "=", "0", ")", ":", "trivial_types", "=", "(", "type", "(", "None", ")", ",", "bool", ",", "str", ",", "int", ",", "float", ")", "try", ":", "trivial_types", "+=", "(", "unicode", ",",...
generate a new name for a test case .
train
false
35,718
def apply_request_extensions(request, extensions=None): if (extensions is None): extensions = request.registry.queryUtility(IRequestExtensions) if (extensions is not None): for (name, fn) in iteritems_(extensions.methods): method = fn.__get__(request, request.__class__) setattr(request, name, method) InstancePropertyHelper.apply_properties(request, extensions.descriptors)
[ "def", "apply_request_extensions", "(", "request", ",", "extensions", "=", "None", ")", ":", "if", "(", "extensions", "is", "None", ")", ":", "extensions", "=", "request", ".", "registry", ".", "queryUtility", "(", "IRequestExtensions", ")", "if", "(", "exte...
apply request extensions to an instance of :class:pyramid .
train
false
35,719
@register.filter def fix_jsi18n(extrahead): return mark_safe(unicode(extrahead).replace(u'../../../jsi18n/', reverse('admin:jsi18n')))
[ "@", "register", ".", "filter", "def", "fix_jsi18n", "(", "extrahead", ")", ":", "return", "mark_safe", "(", "unicode", "(", "extrahead", ")", ".", "replace", "(", "u'../../../jsi18n/'", ",", "reverse", "(", "'admin:jsi18n'", ")", ")", ")" ]
hack to rewrite out the jsi18n script tag from an inherited admin template .
train
false
35,721
@tempdir def test_full(): _system_call('virtualenv', '--no-site-packages', '.') _system_call('bin/easy_install', '-q', 'distribute==dev') _system_call('bin/easy_install', '-qU', 'distribute==dev') _system_call('bin/easy_install', '-q', 'pip') _system_call('bin/pip', 'install', '-q', 'zc.buildout') with open('buildout.cfg', 'w') as f: f.write(SIMPLE_BUILDOUT) with open('bootstrap.py', 'w') as f: f.write(urlopen(BOOTSTRAP).read()) _system_call('bin/python', 'bootstrap.py', '--distribute') _system_call('bin/buildout', '-q') eggs = os.listdir('eggs') eggs.sort() assert (len(eggs) == 3) assert eggs[0].startswith('distribute') assert (eggs[1:] == ['extensions-0.3-py2.6.egg', 'zc.recipe.egg-1.2.2-py2.6.egg'])
[ "@", "tempdir", "def", "test_full", "(", ")", ":", "_system_call", "(", "'virtualenv'", ",", "'--no-site-packages'", ",", "'.'", ")", "_system_call", "(", "'bin/easy_install'", ",", "'-q'", ",", "'distribute==dev'", ")", "_system_call", "(", "'bin/easy_install'", ...
virtualenv + pip + buildout .
train
false
35,722
def seq_minibatches(inputs, targets, batch_size, seq_length, stride=1): assert (len(inputs) == len(targets)) n_loads = ((batch_size * stride) + (seq_length - stride)) for start_idx in range(0, ((len(inputs) - n_loads) + 1), (batch_size * stride)): seq_inputs = np.zeros(((batch_size, seq_length) + inputs.shape[1:]), dtype=inputs.dtype) seq_targets = np.zeros(((batch_size, seq_length) + targets.shape[1:]), dtype=targets.dtype) for b_idx in xrange(batch_size): start_seq_idx = (start_idx + (b_idx * stride)) end_seq_idx = (start_seq_idx + seq_length) seq_inputs[b_idx] = inputs[start_seq_idx:end_seq_idx] seq_targets[b_idx] = targets[start_seq_idx:end_seq_idx] flatten_inputs = seq_inputs.reshape((((-1),) + inputs.shape[1:])) flatten_targets = seq_targets.reshape((((-1),) + targets.shape[1:])) (yield (flatten_inputs, flatten_targets))
[ "def", "seq_minibatches", "(", "inputs", ",", "targets", ",", "batch_size", ",", "seq_length", ",", "stride", "=", "1", ")", ":", "assert", "(", "len", "(", "inputs", ")", "==", "len", "(", "targets", ")", ")", "n_loads", "=", "(", "(", "batch_size", ...
generate a generator that return a batch of sequence inputs and targets .
train
true
35,724
def change_file_creation_mask(mask): try: os.umask(mask) except Exception as exc: error = DaemonOSEnvironmentError(('Unable to change file creation mask (%(exc)s)' % vars())) raise error
[ "def", "change_file_creation_mask", "(", "mask", ")", ":", "try", ":", "os", ".", "umask", "(", "mask", ")", "except", "Exception", "as", "exc", ":", "error", "=", "DaemonOSEnvironmentError", "(", "(", "'Unable to change file creation mask (%(exc)s)'", "%", "vars"...
change the file creation mask for this process .
train
false
35,726
def write_(path, key, value, profile=None): result = _query('POST', path, profile=profile, data=json.dumps({key: value})) return read_(path, key, profile)
[ "def", "write_", "(", "path", ",", "key", ",", "value", ",", "profile", "=", "None", ")", ":", "result", "=", "_query", "(", "'POST'", ",", "path", ",", "profile", "=", "profile", ",", "data", "=", "json", ".", "dumps", "(", "{", "key", ":", "val...
set a key/value pair in the vault service .
train
false
35,728
def endmsg(): print " [*] Your payload files have been generated, don't get caught!" print helpers.color(" [!] And don't submit samples to any online scanner! ;)\n", yellow=True)
[ "def", "endmsg", "(", ")", ":", "print", "\" [*] Your payload files have been generated, don't get caught!\"", "print", "helpers", ".", "color", "(", "\" [!] And don't submit samples to any online scanner! ;)\\n\"", ",", "yellow", "=", "True", ")" ]
print the exit message .
train
false
35,729
def retrieve_auth(): def get_kwargs(envargs): return dict([(arg.kwarg, os.environ.get(arg.envarg)) for arg in envargs]) mc_kwargs = get_kwargs(mc_envargs) mm_kwargs = get_kwargs(mm_envargs) if (not all([mc_kwargs[arg] for arg in (u'email', u'password', u'android_id')])): if os.environ.get(u'TRAVIS'): print(u'on Travis but could not read auth from environ; quitting.') sys.exit(1) mc_kwargs.update(zip([u'email', u'password', u'android_id'], prompt_for_mc_auth())) if (mm_kwargs[u'oauth_credentials'] is None): if (not os.path.isfile(OAUTH_FILEPATH)): raise ValueError(u'You must have oauth credentials stored at the default path by Musicmanager.perform_oauth prior to running.') del mm_kwargs[u'oauth_credentials'] else: mm_kwargs[u'oauth_credentials'] = credentials_from_refresh_token(mm_kwargs[u'oauth_credentials']) return (mc_kwargs, mm_kwargs)
[ "def", "retrieve_auth", "(", ")", ":", "def", "get_kwargs", "(", "envargs", ")", ":", "return", "dict", "(", "[", "(", "arg", ".", "kwarg", ",", "os", ".", "environ", ".", "get", "(", "arg", ".", "envarg", ")", ")", "for", "arg", "in", "envargs", ...
searches the env for auth .
train
false
35,732
@pytest.mark.skipif('no_real_s3_credentials()') def test_301_redirect(): import boto.s3.connection bucket_name = bucket_name_mangle('wal-e-test-301-redirect') with pytest.raises(boto.exception.S3ResponseError) as e: with FreshBucket(bucket_name, calling_format=OrdinaryCallingFormat()) as fb: fb.create(location='us-west-1') assert (e.value.status == 301)
[ "@", "pytest", ".", "mark", ".", "skipif", "(", "'no_real_s3_credentials()'", ")", "def", "test_301_redirect", "(", ")", ":", "import", "boto", ".", "s3", ".", "connection", "bucket_name", "=", "bucket_name_mangle", "(", "'wal-e-test-301-redirect'", ")", "with", ...
integration test for bucket naming issues this test .
train
false
35,734
def nti(s): if (s[0] != chr(128)): try: n = int((nts(s) or '0'), 8) except ValueError: raise HeaderError('invalid header') else: n = 0L for i in xrange((len(s) - 1)): n <<= 8 n += ord(s[(i + 1)]) return n
[ "def", "nti", "(", "s", ")", ":", "if", "(", "s", "[", "0", "]", "!=", "chr", "(", "128", ")", ")", ":", "try", ":", "n", "=", "int", "(", "(", "nts", "(", "s", ")", "or", "'0'", ")", ",", "8", ")", "except", "ValueError", ":", "raise", ...
convert a number field to a python number .
train
false
35,735
def fourier_shift(input, shift, n=(-1), axis=(-1), output=None): input = numpy.asarray(input) (output, return_value) = _get_output_fourier_complex(output, input) axis = _ni_support._check_axis(axis, input.ndim) shifts = _ni_support._normalize_sequence(shift, input.ndim) shifts = numpy.asarray(shifts, dtype=numpy.float64) if (not shifts.flags.contiguous): shifts = shifts.copy() _nd_image.fourier_shift(input, shifts, n, axis, output) return return_value
[ "def", "fourier_shift", "(", "input", ",", "shift", ",", "n", "=", "(", "-", "1", ")", ",", "axis", "=", "(", "-", "1", ")", ",", "output", "=", "None", ")", ":", "input", "=", "numpy", ".", "asarray", "(", "input", ")", "(", "output", ",", "...
multi-dimensional fourier shift filter .
train
false
35,738
def popValue(): return getCurrentThreadData().valueStack.pop()
[ "def", "popValue", "(", ")", ":", "return", "getCurrentThreadData", "(", ")", ".", "valueStack", ".", "pop", "(", ")" ]
pop value from the stack .
train
false
35,740
def _children_with_tag(element, tag): return itertools.chain(element.iterfind((_OLD_NAMESPACE_PREFIX + tag)), element.iterfind((_NEW_NAMESPACE_PREFIX + tag)))
[ "def", "_children_with_tag", "(", "element", ",", "tag", ")", ":", "return", "itertools", ".", "chain", "(", "element", ".", "iterfind", "(", "(", "_OLD_NAMESPACE_PREFIX", "+", "tag", ")", ")", ",", "element", ".", "iterfind", "(", "(", "_NEW_NAMESPACE_PREFI...
returns child elements of the given element with the given tag .
train
false
35,742
def format_time(seconds, count=3, accuracy=6, simple=False): if simple: periods = [('c', ((((60 * 60) * 24) * 365) * 100)), ('de', ((((60 * 60) * 24) * 365) * 10)), ('y', (((60 * 60) * 24) * 365)), ('mo', (((60 * 60) * 24) * 30)), ('d', ((60 * 60) * 24)), ('h', (60 * 60)), ('m', 60), ('s', 1)] else: periods = [(('century', 'centuries'), ((((60 * 60) * 24) * 365) * 100)), (('decade', 'decades'), ((((60 * 60) * 24) * 365) * 10)), (('year', 'years'), (((60 * 60) * 24) * 365)), (('month', 'months'), (((60 * 60) * 24) * 30)), (('day', 'days'), ((60 * 60) * 24)), (('hour', 'hours'), (60 * 60)), (('minute', 'minutes'), 60), (('second', 'seconds'), 1)] periods = periods[(- accuracy):] strings = [] i = 0 for (period_name, period_seconds) in periods: if (i < count): if (seconds > period_seconds): (period_value, seconds) = divmod(seconds, period_seconds) i += 1 if simple: strings.append('{}{}'.format(period_value, period_name)) elif (period_value == 1): strings.append('{} {}'.format(period_value, period_name[0])) else: strings.append('{} {}'.format(period_value, period_name[1])) else: break if simple: return ' '.join(strings) else: return formatting.get_text_list(strings, 'and')
[ "def", "format_time", "(", "seconds", ",", "count", "=", "3", ",", "accuracy", "=", "6", ",", "simple", "=", "False", ")", ":", "if", "simple", ":", "periods", "=", "[", "(", "'c'", ",", "(", "(", "(", "(", "60", "*", "60", ")", "*", "24", ")...
returns a timestamp formatted from given time using separators .
train
false
35,744
def _setLayerRule(layer, rule=None): layer.ClearField('include') layer.ClearField('exclude') if (rule is not None): layer.include.add().CopyFrom(rule)
[ "def", "_setLayerRule", "(", "layer", ",", "rule", "=", "None", ")", ":", "layer", ".", "ClearField", "(", "'include'", ")", "layer", ".", "ClearField", "(", "'exclude'", ")", "if", "(", "rule", "is", "not", "None", ")", ":", "layer", ".", "include", ...
set a new include rule for this layer if rule is none .
train
false
35,745
def get_module_source_path(modname, basename=None): srcpath = get_module_path(modname) parentdir = osp.join(srcpath, osp.pardir) if osp.isfile(parentdir): srcpath = osp.abspath(osp.join(osp.join(parentdir, osp.pardir), modname)) if (basename is not None): srcpath = osp.abspath(osp.join(srcpath, basename)) return srcpath
[ "def", "get_module_source_path", "(", "modname", ",", "basename", "=", "None", ")", ":", "srcpath", "=", "get_module_path", "(", "modname", ")", "parentdir", "=", "osp", ".", "join", "(", "srcpath", ",", "osp", ".", "pardir", ")", "if", "osp", ".", "isfi...
return module *modname* source path if *basename* is specified .
train
true
35,746
def to_native_str(text, encoding=None, errors='strict'): if six.PY2: return to_bytes(text, encoding, errors) else: return to_unicode(text, encoding, errors)
[ "def", "to_native_str", "(", "text", ",", "encoding", "=", "None", ",", "errors", "=", "'strict'", ")", ":", "if", "six", ".", "PY2", ":", "return", "to_bytes", "(", "text", ",", "encoding", ",", "errors", ")", "else", ":", "return", "to_unicode", "(",...
return str representation of text .
train
false
35,747
def json_load(filename): with open(filename, u'r') as fh: return json.load(fh, object_hook=_json_decode)
[ "def", "json_load", "(", "filename", ")", ":", "with", "open", "(", "filename", ",", "u'r'", ")", "as", "fh", ":", "return", "json", ".", "load", "(", "fh", ",", "object_hook", "=", "_json_decode", ")" ]
loads a data structure as json from the named file .
train
false
35,748
def secho(message=None, file=None, nl=True, err=False, color=None, **styles): if (message is not None): message = style(message, **styles) return echo(message, file=file, nl=nl, err=err, color=color)
[ "def", "secho", "(", "message", "=", "None", ",", "file", "=", "None", ",", "nl", "=", "True", ",", "err", "=", "False", ",", "color", "=", "None", ",", "**", "styles", ")", ":", "if", "(", "message", "is", "not", "None", ")", ":", "message", "...
this function combines :func:echo and :func:style into one call .
train
true
35,749
def write_mergecells(worksheet): cells = worksheet._merged_cells if (not cells): return merge = Element('mergeCells', count=('%d' % len(cells))) for range_string in cells: merge.append(Element('mergeCell', ref=range_string)) return merge
[ "def", "write_mergecells", "(", "worksheet", ")", ":", "cells", "=", "worksheet", ".", "_merged_cells", "if", "(", "not", "cells", ")", ":", "return", "merge", "=", "Element", "(", "'mergeCells'", ",", "count", "=", "(", "'%d'", "%", "len", "(", "cells",...
write merged cells to xml .
train
false
35,752
def ULTOSC(barDs, count, timeperiod1=(- (2 ** 31)), timeperiod2=(- (2 ** 31)), timeperiod3=(- (2 ** 31))): return call_talib_with_hlc(barDs, count, talib.ULTOSC, timeperiod1, timeperiod2, timeperiod3)
[ "def", "ULTOSC", "(", "barDs", ",", "count", ",", "timeperiod1", "=", "(", "-", "(", "2", "**", "31", ")", ")", ",", "timeperiod2", "=", "(", "-", "(", "2", "**", "31", ")", ")", ",", "timeperiod3", "=", "(", "-", "(", "2", "**", "31", ")", ...
ultimate oscillator .
train
false
35,755
def network_get_all_by_instance(context, instance_id): return IMPL.network_get_all_by_instance(context, instance_id)
[ "def", "network_get_all_by_instance", "(", "context", ",", "instance_id", ")", ":", "return", "IMPL", ".", "network_get_all_by_instance", "(", "context", ",", "instance_id", ")" ]
get all networks by instance id or raise if none exist .
train
false
35,756
@register.inclusion_tag('inclusion.html', takes_context=True) def inclusion_params_and_context(context, arg): return {'result': ('inclusion_params_and_context - Expected result (context value: %s): %s' % (context['value'], arg))}
[ "@", "register", ".", "inclusion_tag", "(", "'inclusion.html'", ",", "takes_context", "=", "True", ")", "def", "inclusion_params_and_context", "(", "context", ",", "arg", ")", ":", "return", "{", "'result'", ":", "(", "'inclusion_params_and_context - Expected result (...
expected inclusion_params_and_context __doc__ .
train
false
35,757
def _remove_persistent_module(mod, comment): conf = _get_modules_conf() mod_name = _strip_module_name(mod) if ((not mod_name) or (mod_name not in mod_list(True))): return set() escape_mod = re.escape(mod) if comment: __salt__['file.comment'](conf, '^[ DCTB ]*{0}[ DCTB ]?'.format(escape_mod)) else: __salt__['file.sed'](conf, '^[ DCTB ]*{0}[ DCTB ]?'.format(escape_mod), '') return set([mod_name])
[ "def", "_remove_persistent_module", "(", "mod", ",", "comment", ")", ":", "conf", "=", "_get_modules_conf", "(", ")", "mod_name", "=", "_strip_module_name", "(", "mod", ")", "if", "(", "(", "not", "mod_name", ")", "or", "(", "mod_name", "not", "in", "mod_l...
remove module from configuration file .
train
true
35,758
def _generate_storage_paths(app_id): if (sys.platform == 'win32'): user_format = '' else: try: user_name = getpass.getuser() except Exception: user_format = '' else: user_format = ('.%s' % user_name) tempdir = tempfile.gettempdir() (yield os.path.join(tempdir, ('appengine.%s%s' % (app_id, user_format)))) for i in itertools.count(1): (yield os.path.join(tempdir, ('appengine.%s%s.%d' % (app_id, user_format, i))))
[ "def", "_generate_storage_paths", "(", "app_id", ")", ":", "if", "(", "sys", ".", "platform", "==", "'win32'", ")", ":", "user_format", "=", "''", "else", ":", "try", ":", "user_name", "=", "getpass", ".", "getuser", "(", ")", "except", "Exception", ":",...
yield an infinite sequence of possible storage paths .
train
false
35,759
def get_sw_version(strip_build_num=False): try: from freenasOS import Configuration except ImportError: Configuration = None global _VERSION if (_VERSION is None): if Configuration: conf = Configuration.Configuration() sys_mani = conf.SystemManifest() if sys_mani: _VERSION = sys_mani.Sequence() if (_VERSION is None): with open(VERSION_FILE) as fd: _VERSION = fd.read().strip() if strip_build_num: return _VERSION.split(' ')[0] return _VERSION
[ "def", "get_sw_version", "(", "strip_build_num", "=", "False", ")", ":", "try", ":", "from", "freenasOS", "import", "Configuration", "except", "ImportError", ":", "Configuration", "=", "None", "global", "_VERSION", "if", "(", "_VERSION", "is", "None", ")", ":"...
return the full version string .
train
false
35,760
def _float_ones_like(x): dtype = x.type.dtype if (dtype not in tensor.float_dtypes): dtype = theano.config.floatX return tensor.ones_like(x, dtype=dtype)
[ "def", "_float_ones_like", "(", "x", ")", ":", "dtype", "=", "x", ".", "type", ".", "dtype", "if", "(", "dtype", "not", "in", "tensor", ".", "float_dtypes", ")", ":", "dtype", "=", "theano", ".", "config", ".", "floatX", "return", "tensor", ".", "one...
like ones_like .
train
false
35,761
def py_factorial(n): inner = outer = 1 for i in reversed(range(n.bit_length())): inner *= partial_product((((n >> (i + 1)) + 1) | 1), (((n >> i) + 1) | 1)) outer *= inner return (outer << (n - count_set_bits(n)))
[ "def", "py_factorial", "(", "n", ")", ":", "inner", "=", "outer", "=", "1", "for", "i", "in", "reversed", "(", "range", "(", "n", ".", "bit_length", "(", ")", ")", ")", ":", "inner", "*=", "partial_product", "(", "(", "(", "(", "n", ">>", "(", ...
factorial of nonnegative integer n .
train
false
35,762
def _is_match(tagged_phrase, cfg): copy = list(tagged_phrase) merge = True while merge: merge = False for i in range((len(copy) - 1)): (first, second) = (copy[i], copy[(i + 1)]) key = (first[1], second[1]) value = cfg.get(key, None) if value: merge = True copy.pop(i) copy.pop(i) match = u'{0} {1}'.format(first[0], second[0]) pos = value copy.insert(i, (match, pos)) break match = any([(t[1] in (u'NNP', u'NNI')) for t in copy]) return match
[ "def", "_is_match", "(", "tagged_phrase", ",", "cfg", ")", ":", "copy", "=", "list", "(", "tagged_phrase", ")", "merge", "=", "True", "while", "merge", ":", "merge", "=", "False", "for", "i", "in", "range", "(", "(", "len", "(", "copy", ")", "-", "...
return whether or not a tagged phrases matches a context-free grammar .
train
false
35,763
def make_alternating_boolean_array(shape, first_value=True): if (len(shape) != 2): raise ValueError('Shape must be 2-dimensional. Given shape was {}'.format(shape)) alternating = np.empty(shape, dtype=np.bool) for row in alternating: row[::2] = first_value row[1::2] = (not first_value) first_value = (not first_value) return alternating
[ "def", "make_alternating_boolean_array", "(", "shape", ",", "first_value", "=", "True", ")", ":", "if", "(", "len", "(", "shape", ")", "!=", "2", ")", ":", "raise", "ValueError", "(", "'Shape must be 2-dimensional. Given shape was {}'", ".", "format", "(", "shap...
create a 2d numpy array with the given shape containing alternating values of false .
train
false
35,764
def getargspec(obj): if inspect.isfunction(obj): func_obj = obj elif inspect.ismethod(obj): func_obj = obj.__func__ else: raise TypeError('arg is not a Python function') (args, varargs, varkw) = inspect.getargs(func_obj.__code__) return (args, varargs, varkw, func_obj.__defaults__)
[ "def", "getargspec", "(", "obj", ")", ":", "if", "inspect", ".", "isfunction", "(", "obj", ")", ":", "func_obj", "=", "obj", "elif", "inspect", ".", "ismethod", "(", "obj", ")", ":", "func_obj", "=", "obj", ".", "__func__", "else", ":", "raise", "Typ...
get the names and default values of a functions arguments .
train
false
35,765
def user_list(database=None, user=None, password=None, host=None, port=None): client = _client(user=user, password=password, host=host, port=port) if (not database): return client.get_list_cluster_admins() client.switch_database(database) return client.get_list_users()
[ "def", "user_list", "(", "database", "=", "None", ",", "user", "=", "None", ",", "password", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ")", ":", "client", "=", "_client", "(", "user", "=", "user", ",", "password", "=", "passw...
return a list of users on a mysql server cli example: .
train
true
35,766
def get_cycles(graph_dict, vertices=None): if (not graph_dict): return () result = [] if (vertices is None): vertices = graph_dict.keys() for vertice in vertices: _get_cycles(graph_dict, [], set(), result, vertice) return result
[ "def", "get_cycles", "(", "graph_dict", ",", "vertices", "=", "None", ")", ":", "if", "(", "not", "graph_dict", ")", ":", "return", "(", ")", "result", "=", "[", "]", "if", "(", "vertices", "is", "None", ")", ":", "vertices", "=", "graph_dict", ".", ...
given a dictionary representing an ordered graph .
train
true
35,767
def get_save_notes(dire): return read_in(os.path.join(dire, 'CHANGES_SINCE'))
[ "def", "get_save_notes", "(", "dire", ")", ":", "return", "read_in", "(", "os", ".", "path", ".", "join", "(", "dire", ",", "'CHANGES_SINCE'", ")", ")" ]
read save notes .
train
false
35,768
def package_check(pkg_name, version=None, app='pandas', checker=LooseVersion, exc_failed_import=ImportError, exc_failed_check=RuntimeError): if app: msg = ('%s requires %s' % (app, pkg_name)) else: msg = ('module requires %s' % pkg_name) if version: msg += (' with version >= %s' % (version,)) try: mod = __import__(pkg_name) except ImportError: raise exc_failed_import(msg) if (not version): return try: have_version = mod.__version__ except AttributeError: raise exc_failed_check(('Cannot find version for %s' % pkg_name)) if (checker(have_version) < checker(version)): raise exc_failed_check(msg)
[ "def", "package_check", "(", "pkg_name", ",", "version", "=", "None", ",", "app", "=", "'pandas'", ",", "checker", "=", "LooseVersion", ",", "exc_failed_import", "=", "ImportError", ",", "exc_failed_check", "=", "RuntimeError", ")", ":", "if", "app", ":", "m...
check that the minimal version of the required package is installed .
train
false
35,770
def _detection_limit_index(obs, cohn): if (cohn.shape[0] > 0): (index,) = numpy.where((cohn['lower_dl'] <= obs)) det_limit_index = index[(-1)] else: det_limit_index = 0 return det_limit_index
[ "def", "_detection_limit_index", "(", "obs", ",", "cohn", ")", ":", "if", "(", "cohn", ".", "shape", "[", "0", "]", ">", "0", ")", ":", "(", "index", ",", ")", "=", "numpy", ".", "where", "(", "(", "cohn", "[", "'lower_dl'", "]", "<=", "obs", "...
locates the corresponding detection limit for each observation .
train
false
35,771
def memcache_lock(prefix, expires=(60 * 60)): def decorator(func): @functools.wraps(func) def wrapper(self, *args, **kwargs): name = '_'.join(((prefix, func.__name__) + args)) lock = MemcacheLock(name, expires=expires) if lock.locked(): log.warning(('Lock %s locked; ignoring call.' % name)) return try: lock.acquire() except MemcacheLockException: log.warning(('Aborting %s; lock acquisition failed.' % name)) return else: try: return func(self, *args, **kwargs) finally: lock.release() return wrapper return decorator
[ "def", "memcache_lock", "(", "prefix", ",", "expires", "=", "(", "60", "*", "60", ")", ")", ":", "def", "decorator", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*...
decorator that only allows one instance of the same command to run at a time .
train
false
35,772
def formstyle_inline(form, fields): if (len(fields) != 2): raise RuntimeError('Not possible') (id, label, controls, help) = fields[0] submit_button = fields[1][2] return CAT(DIV(controls, _style='display:inline'), submit_button)
[ "def", "formstyle_inline", "(", "form", ",", "fields", ")", ":", "if", "(", "len", "(", "fields", ")", "!=", "2", ")", ":", "raise", "RuntimeError", "(", "'Not possible'", ")", "(", "id", ",", "label", ",", "controls", ",", "help", ")", "=", "fields"...
divs only .
train
false
35,773
@handle_response_format @treeio_login_required def document_edit(request, document_id, response_format='html'): document = get_object_or_404(Document, pk=document_id) if (not request.user.profile.has_permission(document, mode='w')): return user_denied(request, message="You don't have access to this Document") if request.POST: if ('cancel' not in request.POST): form = DocumentForm(request.user.profile, None, request.POST, instance=document) if form.is_valid(): document = form.save() return HttpResponseRedirect(reverse('documents_document_view', args=[document.id])) else: return HttpResponseRedirect(reverse('documents_document_view', args=[document.id])) else: form = DocumentForm(request.user.profile, None, instance=document) context = _get_default_context(request) context.update({'form': form, 'document': document}) return render_to_response('documents/document_edit', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "document_edit", "(", "request", ",", "document_id", ",", "response_format", "=", "'html'", ")", ":", "document", "=", "get_object_or_404", "(", "Document", ",", "pk", "=", "document_id", ")", "if...
document edit page .
train
false
35,774
def key_pair_count_by_user(context, user_id): return IMPL.key_pair_count_by_user(context, user_id)
[ "def", "key_pair_count_by_user", "(", "context", ",", "user_id", ")", ":", "return", "IMPL", ".", "key_pair_count_by_user", "(", "context", ",", "user_id", ")" ]
count number of key pairs for the given user id .
train
false
35,777
def renew_by(name, window=None): return _renew_by(name, window).isoformat()
[ "def", "renew_by", "(", "name", ",", "window", "=", "None", ")", ":", "return", "_renew_by", "(", "name", ",", "window", ")", ".", "isoformat", "(", ")" ]
date in iso format when a certificate should first be renewed .
train
false
35,778
def http_date(timestamp=None): return _dump_date(timestamp, ' ')
[ "def", "http_date", "(", "timestamp", "=", "None", ")", ":", "return", "_dump_date", "(", "timestamp", ",", "' '", ")" ]
formats the time to match the rfc1123 date format .
train
false
35,780
def libvlc_media_get_stats(p_md, p_stats): f = (_Cfunctions.get('libvlc_media_get_stats', None) or _Cfunction('libvlc_media_get_stats', ((1,), (1,)), None, ctypes.c_int, Media, ctypes.POINTER(MediaStats))) return f(p_md, p_stats)
[ "def", "libvlc_media_get_stats", "(", "p_md", ",", "p_stats", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_get_stats'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_get_stats'", ",", "(", "(", "1", ",", ")", ",", "("...
get the current statistics about the media .
train
true
35,781
def _get_proxy_auth(proxy_settings): proxy_username = None proxy_password = None proxy_username = os.environ.get('proxy-username') if (not proxy_username): proxy_username = os.environ.get('proxy_username') proxy_password = os.environ.get('proxy-password') if (not proxy_password): proxy_password = os.environ.get('proxy_password') if (not proxy_username): if ('@' in proxy_settings): protocol_and_proxy_auth = proxy_settings.split('@')[0].split(':') if (len(protocol_and_proxy_auth) == 3): proxy_username = protocol_and_proxy_auth[1].lstrip('/') proxy_password = protocol_and_proxy_auth[2] elif (len(protocol_and_proxy_auth) == 2): proxy_username = protocol_and_proxy_auth[0] proxy_password = protocol_and_proxy_auth[1] if proxy_username: user_auth = base64.encodestring(('%s:%s' % (proxy_username, proxy_password))) return ('Basic %s\r\n' % user_auth.strip()) else: return ''
[ "def", "_get_proxy_auth", "(", "proxy_settings", ")", ":", "proxy_username", "=", "None", "proxy_password", "=", "None", "proxy_username", "=", "os", ".", "environ", ".", "get", "(", "'proxy-username'", ")", "if", "(", "not", "proxy_username", ")", ":", "proxy...
returns proxy authentication string for header .
train
false
35,784
def make_params(**kwargs): return {k: v for (k, v) in kwargs.items() if (v is not None)}
[ "def", "make_params", "(", "**", "kwargs", ")", ":", "return", "{", "k", ":", "v", "for", "(", "k", ",", "v", ")", "in", "kwargs", ".", "items", "(", ")", "if", "(", "v", "is", "not", "None", ")", "}" ]
helper to create a params dict .
train
false
35,785
def test_iht_fit_sample_wrong_class_obj(): from sklearn.cluster import KMeans est = KMeans() iht = InstanceHardnessThreshold(estimator=est, random_state=RND_SEED) assert_raises(ValueError, iht.fit_sample, X, Y)
[ "def", "test_iht_fit_sample_wrong_class_obj", "(", ")", ":", "from", "sklearn", ".", "cluster", "import", "KMeans", "est", "=", "KMeans", "(", ")", "iht", "=", "InstanceHardnessThreshold", "(", "estimator", "=", "est", ",", "random_state", "=", "RND_SEED", ")", ...
test either if an error is raised while passing a wrong classifier object .
train
false
35,787
def _datetime_to_json(value): if isinstance(value, datetime.datetime): value = _datetime_to_rfc3339(value) return value
[ "def", "_datetime_to_json", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "datetime", ".", "datetime", ")", ":", "value", "=", "_datetime_to_rfc3339", "(", "value", ")", "return", "value" ]
coerce value to an json-compatible representation .
train
false
35,788
def is_ccx_course(course_key): return (isinstance(course_key, CCXLocator) or isinstance(course_key, CCXBlockUsageLocator))
[ "def", "is_ccx_course", "(", "course_key", ")", ":", "return", "(", "isinstance", "(", "course_key", ",", "CCXLocator", ")", "or", "isinstance", "(", "course_key", ",", "CCXBlockUsageLocator", ")", ")" ]
check whether the course locator maps to a ccx course; this is important because we dont allow access to ccx courses in studio .
train
false
35,790
def password_data(username, password): return {'username': username, 'password': hashlib.sha1(password.encode('utf-8')).hexdigest(), 'passwordMd5': hashlib.md5(password.encode('utf-8')).hexdigest()}
[ "def", "password_data", "(", "username", ",", "password", ")", ":", "return", "{", "'username'", ":", "username", ",", "'password'", ":", "hashlib", ".", "sha1", "(", "password", ".", "encode", "(", "'utf-8'", ")", ")", ".", "hexdigest", "(", ")", ",", ...
returns a dict with username and its encoded password .
train
false
35,791
def get_navigator_server_url(): return get_conf().get(_CONF_NAVIGATOR_SERVER_URL, 'http://localhost:7187')
[ "def", "get_navigator_server_url", "(", ")", ":", "return", "get_conf", "(", ")", ".", "get", "(", "_CONF_NAVIGATOR_SERVER_URL", ",", "'http://localhost:7187'", ")" ]
returns the navigator .
train
false
35,792
def _merge_flags(new_flags, old_flags=None, conf='any'): if (not old_flags): old_flags = [] args = [old_flags, new_flags] if (conf == 'accept_keywords'): tmp = (new_flags + [i for i in old_flags if _check_accept_keywords(new_flags, i)]) else: tmp = portage.flatten(args) flags = {} for flag in tmp: if (flag[0] == '-'): flags[flag[1:]] = False else: flags[flag] = True tmp = [] for (key, val) in six.iteritems(flags): if val: tmp.append(key) else: tmp.append(('-' + key)) tmp.sort(cmp=(lambda x, y: cmp(x.lstrip('-'), y.lstrip('-')))) return tmp
[ "def", "_merge_flags", "(", "new_flags", ",", "old_flags", "=", "None", ",", "conf", "=", "'any'", ")", ":", "if", "(", "not", "old_flags", ")", ":", "old_flags", "=", "[", "]", "args", "=", "[", "old_flags", ",", "new_flags", "]", "if", "(", "conf",...
merges multiple lists of flags removing duplicates and resolving conflicts giving priority to lasts lists .
train
true
35,793
def pmonitorTest(N=3, seconds=10): topo = SingleSwitchTopo(N) net = Mininet(topo) net.start() hosts = net.hosts info('Starting test...\n') server = hosts[0] popens = {} for h in hosts: popens[h] = h.popen('ping', server.IP()) info('Monitoring output for', seconds, 'seconds\n') endTime = (time() + seconds) for (h, line) in pmonitor(popens, timeoutms=500): if h: info(('<%s>: %s' % (h.name, line))) if (time() >= endTime): for p in popens.values(): p.send_signal(SIGINT) net.stop()
[ "def", "pmonitorTest", "(", "N", "=", "3", ",", "seconds", "=", "10", ")", ":", "topo", "=", "SingleSwitchTopo", "(", "N", ")", "net", "=", "Mininet", "(", "topo", ")", "net", ".", "start", "(", ")", "hosts", "=", "net", ".", "hosts", "info", "("...
run pings and monitor multiple hosts using pmonitor .
train
false