function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
list
def dict_merge(a, b): c = a.copy() c.update(b) return c
NationalLibraryOfNorway/NB-N-gram
[ 18, 5, 18, 1, 1433156314 ]
def swapcase(args): """ Swaps the case of the first letter of the argument """
NationalLibraryOfNorway/NB-N-gram
[ 18, 5, 18, 1, 1433156314 ]
def tokenize(term): """ Very simple tokenizer: based on whitespace but not including paranthesis """ return re.findall('[^\s\(]+|\([^)]*\)', term)
NationalLibraryOfNorway/NB-N-gram
[ 18, 5, 18, 1, 1433156314 ]
def merge_result(self): """ Returns a merged object (similar to UNION SELECT) """
NationalLibraryOfNorway/NB-N-gram
[ 18, 5, 18, 1, 1433156314 ]
def get_relfreq(total,total_freq): """Calculates the relative frequency for each item, returns complete dictionary """ relfreq_dict = [] for attribute, value in total.iteritems(): if int(attribute) >= 1810: rel_freq = float(value) / total_freq[attribute] * 100 relfreq_dict.append({"x": int(attribute), "y": rel_freq, "f": int(value)}) return relfreq_dict
NationalLibraryOfNorway/NB-N-gram
[ 18, 5, 18, 1, 1433156314 ]
def return_single_results(sql,args,lang,label,corpus): """ Returns the results for single items """
NationalLibraryOfNorway/NB-N-gram
[ 18, 5, 18, 1, 1433156314 ]
def get_query_params(request): """ Returns a dictionary of query parameters """
NationalLibraryOfNorway/NB-N-gram
[ 18, 5, 18, 1, 1433156314 ]
def index(): return render_template('header-footer.html')
NationalLibraryOfNorway/NB-N-gram
[ 18, 5, 18, 1, 1433156314 ]
def query(): entries = [] # get query paramaters qParams = get_query_params(request) # fills in default_parameters for those not set sParams = dict_merge(default_params, qParams) # does some clean-up and returns terms as list sParams['terms'] = return_terms(sParams['terms']) # gets total number of statements nTerms = len(sParams['terms']) # loops through each term, interpreting it and generating query for i in range(nTerms): # invokes term parser ngrams, qType, lParams = termParser(i, sParams) # starts the query factory for interprated term sql, args, label, lang, corpus = query_factory(ngrams, lParams['lang'], lParams['case_sens'], lParams['corpus']) # run query depending on amount of results from query_factory if len(sql) == 1: entries += return_single_results(sql,args,lang,label,corpus) elif len(sql) > 1: if qType == 'agg': entries += return_agg_results(sql, args, lang, label, corpus) elif qType == 'wildcard' or qType == 'trunctated': entries += return_single_results(sql,args,lang,label,corpus) else: pass else: pass jsonOutput = export_to_json(entries) return Response(jsonOutput, mimetype='application/json')
NationalLibraryOfNorway/NB-N-gram
[ 18, 5, 18, 1, 1433156314 ]
def export_to_json_file(entries): """ Exports result as JSON file """ with open('static/dump.json', 'wb') as outfile: json.dump(entries, outfile, indent=4, separators=(', ', ': '))
NationalLibraryOfNorway/NB-N-gram
[ 18, 5, 18, 1, 1433156314 ]
def message( self ): self._completeIfNotSet( self._message ) return self._NoneIfNotSet( self._message )
sagarsane/abetterportfolio
[ 6, 3, 6, 8, 1345948005 ]
def object( self ): self._completeIfNotSet( self._object ) return self._NoneIfNotSet( self._object )
sagarsane/abetterportfolio
[ 6, 3, 6, 8, 1345948005 ]
def sha( self ): self._completeIfNotSet( self._sha ) return self._NoneIfNotSet( self._sha )
sagarsane/abetterportfolio
[ 6, 3, 6, 8, 1345948005 ]
def tag( self ): self._completeIfNotSet( self._tag ) return self._NoneIfNotSet( self._tag )
sagarsane/abetterportfolio
[ 6, 3, 6, 8, 1345948005 ]
def tagger( self ): self._completeIfNotSet( self._tagger ) return self._NoneIfNotSet( self._tagger )
sagarsane/abetterportfolio
[ 6, 3, 6, 8, 1345948005 ]
def url( self ): self._completeIfNotSet( self._url ) return self._NoneIfNotSet( self._url )
sagarsane/abetterportfolio
[ 6, 3, 6, 8, 1345948005 ]
def fetch(observable): base_url_api = "https://www.threatcrowd.org/searchApi/v2" if isinstance(observable, Hostname): url = base_url_api + "/domain/report/" params = {"domain": observable.value} try: res = requests.get(url, params) if res.ok: return res.json() except Exception as e: print("Exception while getting domain report {}".format(e.message)) return None elif isinstance(observable, Email): url = base_url_api + "/email/report/" params = {"email": observable.value} try: res = requests.get(url, params) if res.ok: return res.json() except Exception as e: print("Exception while getting email report {}".format(e.message)) return None elif isinstance(observable, Ip): url = base_url_api + "/ip/report/" print(url) params = {"ip": observable.value} print(params) try: res = requests.get(url, params) if res.ok: return res.json() except Exception as e: print("Exception while getting email report {}".format(e.message)) return None elif isinstance(observable, Hash): url = base_url_api + "/file/report/" params = {"resource": observable.value} try: res = requests.get(url, params) if res.ok: return res.json() except Exception as e: print("Exception while getting email report {}".format(e.message)) return None
yeti-platform/yeti
[ 1360, 268, 1360, 132, 1450025666 ]
def add_header(r): """ Add headers to both force latest IE rendering engine or Chrome Frame, and also to cache the rendered page for 10 minutes. """ r.headers["Cache-Control"] = "no-cache, no-store, must-revalidate" r.headers["Pragma"] = "no-cache" r.headers["Expires"] = "0" r.headers['Cache-Control'] = 'public, max-age=0' return r
dpnishant/appmon
[ 1412, 284, 1412, 36, 1461924973 ]
def serve_json(): index = request.args.get('id') if request.args.get('reportdb'): db_name = request.args.get('reportdb') else: db_name = request.args.get('app') response = db.read_from_database(db_name, index) #response = open('static/data.json').read() return response
dpnishant/appmon
[ 1412, 284, 1412, 36, 1461924973 ]
def monitor_page(): app_name = request.args.get('app') return render_template('monitor.html', app_name=app_name)
dpnishant/appmon
[ 1412, 284, 1412, 36, 1461924973 ]
def landing_page(): global APP_LIST, DB_MAP app_dumps_dir = os.path.join('.','app_dumps') for root, dirs, files in os.walk(app_dumps_dir): path = root.split(os.sep) for file in files: file_path = os.path.join(root, file) if file_path.endswith('.db'): APP_LIST.append(file.replace('.db', '')) return render_template('index.html', apps=APP_LIST)
dpnishant/appmon
[ 1412, 284, 1412, 36, 1461924973 ]
def merge_scripts(path): global merged_script_path script_source = '' for root, dirs, files in os.walk(path): path = root.split('/') for file in files: script_path = os.path.join(root, file) if script_path.endswith('.js'): source = '' with codecs.open(script_path, 'r', 'utf-8') as f: source = f.read() script_source += '/* ____%s/%s____ */\n\n' % (os.path.basename(root), file) + source + '\n\n' with codecs.open(merged_script_path, "w", "utf-8") as f: f.write(script_source) return merged_script_path
dpnishant/appmon
[ 1412, 284, 1412, 36, 1461924973 ]
def writeBinFile(fname, data): with codecs.open(fname, "a", "utf-8") as f: f.write(data + '\r\n\r\n')
dpnishant/appmon
[ 1412, 284, 1412, 36, 1461924973 ]
def on_detached(): print((colored('[WARNING] "%s" has terminated!' % (app_name), 'red')))
dpnishant/appmon
[ 1412, 284, 1412, 36, 1461924973 ]
def generate_injection(): injection_source = '' if os.path.isfile(script_path): with codecs.open(script_path, 'r', 'utf-8') as f: injection_source = f.read() elif os.path.isdir(script_path): with codecs.open(merge_scripts(script_path), 'r', 'utf-8') as f: injection_source = f.read() print((colored('[INFO] Building injection...', 'yellow'))) return injection_source
dpnishant/appmon
[ 1412, 284, 1412, 36, 1461924973 ]
def getBundleID(device, app_name, platform): try: session = device.attach(app_name) session.on('detached', on_detached) script = session.create_script("""'use strict';
dpnishant/appmon
[ 1412, 284, 1412, 36, 1461924973 ]
def init_session(): try: session = None if platform == 'ios' or platform == 'android': try: device = frida.get_usb_device(3) # added timeout to wait for 3 seconds except Exception as e: print((colored(str(e), "red"))) traceback.print_exc() if platform == 'android': print((colored("Troubleshooting Help", "blue"))) print((colored("HINT: Is USB Debugging enabled?", "blue"))) print((colored("HINT: Is `frida-server` running on mobile device (with +x permissions)?", "blue"))) print((colored("HINT: Is `adb` daemon running?", "blue"))) sys.exit(1) elif platform == "ios": print((colored("Troubleshooting Help", "blue"))) print((colored("HINT: Have you installed `frida` module from Cydia?", "blue"))) print((colored("HINT: Have used `ipa_installer` to inject the `FridaGadget` shared lbrary?", "blue"))) sys.exit(1) elif platform == 'iossim': try: device = frida.get_remote_device() except Exception as e: print((colored("Troubleshooting Help", "blue"))) print((colored("HINT: Have you successfully integrated the FridaGadget dylib with the XCode Project?", "blue"))) print((colored("HINT: Do you see a message similar to \"[Frida INFO] Listening on 127.0.0.1 TCP port 27042\" on XCode console logs?", "blue"))) sys.exit(1) elif platform == 'macos': device = frida.get_local_device() else: print((colored('[ERROR] Unsupported Platform', 'red'))) sys.exit(1) pid = None if app_name: try: if platform == 'android' and spawn == 1: print((colored("Now Spawning %s" % app_name, "green"))) pid = device.spawn([app_name]) #time.sleep(5) session = device.attach(pid) #time.sleep(5) elif (platform == 'ios' or platform == 'macos') and spawn == 1: bundleID = getBundleID(device, app_name, platform) if bundleID: print((colored("Now Spawning %s" % bundleID, "green"))) pid = device.spawn([bundleID]) #time.sleep(5) session = device.attach(pid) else: print((colored("[ERROR] Can't spawn %s" % app_name, "red"))) traceback.print_exc() sys.exit(1) else: arg_to_attach = app_name if app_name.isdigit(): arg_to_attach = int(app_name) session = device.attach(arg_to_attach) except Exception as e: print((colored('[ERROR] ' + str(e), 'red'))) traceback.print_exc() if session: print((colored('[INFO] Attached to %s' % (app_name), 'yellow'))) session.on('detached', on_detached) except Exception as e: print((colored('[ERROR] ' + str(e), 'red'))) traceback.print_exc() sys.exit(1) return device, session, pid
dpnishant/appmon
[ 1412, 284, 1412, 36, 1461924973 ]
def test_bit_extraction() -> None: share = ShareTensor(rank=0, parties_info=[], ring_size=2**32) data = np.array([[21, 32], [-54, 89]], dtype=np.int32) share.child = data exp_res1 = np.array([[False, False], [True, False]], dtype=np.bool_) res = share.bit_extraction(31).child assert (res == exp_res1).all() exp_res2 = np.array([[True, False], [False, False]], dtype=np.bool_) res = share.bit_extraction(2).child assert (res == exp_res2).all()
OpenMined/PySyft
[ 8617, 1908, 8617, 143, 1500410476 ]
def test_with_files(self): zapp = {'ui': ['x']} tar = mock.Mock(getnames=lambda: ['x', 'y']) matches = zpm._find_ui_uploads(zapp, tar) assert sorted(matches) == ['x']
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def test__prepare_job(): # Test for `zpmlib.zpm._prepare_job`. # Contents of `boot/system.map`, which is expected to be in the # `myapp.zapp` archive. myapp_json = [ {'exec': {'args': 'myapp.py', 'path': 'file://python2.7:python'}, 'devices': [{'name': 'python2.7'}, {'name': 'stdout'}], 'name': 'myapp'} ] zapp = {'meta': {'name': 'myapp'}} zapp_swift_url = ('swift://AUTH_469a9cd20b5a4fc5be9438f66bb5ee04/' 'test_container/hello.zapp') # Expected result exp_job_json = copy.deepcopy(myapp_json) exp_job_json[0]['devices'].append( {'name': 'image', 'path': zapp_swift_url} ) tempdir = tempfile.mkdtemp() try: tempzapp = os.path.join(tempdir, 'myapp.zapp') tf = tarfile.open(tempzapp, 'w:gz') # prepare a sample job description system_map = os.path.join(tempdir, 'system.map') with open(system_map, 'w') as fp: json.dump(myapp_json, fp) tf.add(system_map, arcname='boot/system.map') tf.close() tf = tarfile.open(tempzapp, 'r:gz') job = zpm._prepare_job(tf, zapp, zapp_swift_url) tf.close() assert exp_job_json == job finally: shutil.rmtree(tempdir)
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def setup_method(self, _method): self.tempdir = tempfile.mkdtemp() self.subdir = os.path.join(self.tempdir, 'foo', 'bar') os.makedirs(self.subdir)
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def test_zapp_yaml_not_exists(self): try: with mock.patch('os.getcwd') as cwd: cwd.return_value = self.subdir with pytest.raises(RuntimeError): zpm.find_project_root() finally: shutil.rmtree(self.tempdir)
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def setup_method(self, _method): self.v1_args = mock.Mock() self.v1_args.auth_version = '1.0' self.v1_args.auth = 'http://example.com/auth/v1.0' self.v1_args.user = 'tenant1:user1' self.v1_args.key = 'secret' self.v2_args = mock.Mock() self.v2_args.auth_version = '2.0' self.v2_args.os_auth_url = 'http://example.com/v2.0' self.v2_args.os_username = 'user1' self.v2_args.os_password = 'secret' self.v2_args.os_tenant_name = 'tenant1'
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def test_v1_fail(self): self.v1_args.user = None with pytest.raises(zpmlib.ZPMException): zpm._get_zerocloud_conn(self.v1_args)
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def test_v2_fail(self): self.v2_args.os_tenant_name = None with pytest.raises(zpmlib.ZPMException): zpm._get_zerocloud_conn(self.v2_args)
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def setup_class(cls): cls.zapp_yaml_contents = """\
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def teardown_class(cls): shutil.rmtree(cls.temp_dir)
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def test__generate_uploads(self): uploads = zpm._generate_uploads(self.conn, self.target, self.zapp_path, self.auth_opts) uploads = list(uploads) foojs_tmpl = jinja2.Template(self.foojstmpl_contents.decode()) foojs = foojs_tmpl.render(auth_opts=self.auth_opts) expected_uploads = [ ('%s/zapp.yaml' % self.target, gzip.open(self.zapp_path).read(), 'application/x-tar'), ('%s/boot/system.map' % self.target, self.job_json_prepped.decode('utf-8'), 'application/json'), ('%s/foo.js' % self.target, foojs, None), ('%s/index.html' % self.target, self.indexhtml_contents, None), ] assert uploads[0] == expected_uploads[0] assert uploads[1][0] == expected_uploads[1][0] assert json.loads(uploads[1][1]) == json.loads(expected_uploads[1][1]) assert uploads[2] == expected_uploads[2] assert uploads[3] == expected_uploads[3]
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def test__deploy_zapp_with_index_html(self): with mock.patch('zpmlib.zpm._generate_uploads') as gu: gu.return_value = iter([('cont/dir/index.html', 'data', 'text/html')]) index = zpm._deploy_zapp(self.conn, 'cont', None, None) assert index == 'cont/dir/index.html' put_object = self.conn.put_object assert put_object.call_count == 1 assert put_object.call_args_list == [ mock.call('cont', 'dir/index.html', 'data', content_type='text/html') ]
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def test__deploy_zapp_container_not_empty(self): self.conn.get_container.return_value = ( {}, # response headers # The actual files list response from Swift is a list of # dictionaries. For these tests, we don't actually check the # content; just length of the file list. ['file1'], ) with pytest.raises(zpmlib.ZPMException) as exc: zpm._deploy_zapp(self.conn, 'target/dir1/dir2', None, None) assert str(exc.value) == ( "Target container ('target') is not empty.\n" "Deploying to a non-empty container can cause consistency " "problems with overwritten objects.\n" "Specify the flag `--force/-f` to overwrite anyway." ) assert self.conn.get_container.call_args_list == [mock.call('target')]
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def test__deploy_zapp_container_doesnt_exist(self): self.conn.get_container.side_effect = ( swiftclient.exceptions.ClientException(None) ) with mock.patch('zpmlib.zpm._generate_uploads') as gu: gu.return_value = iter([('target/dir/foo.py', 'data', None)]) zpm._deploy_zapp(self.conn, 'target/dir', None, None) # check that the container is created assert self.conn.put_container.call_count == 1 assert self.conn.put_container.call_args_list == [ mock.call('target') ] # check that files are uploaded correctly assert self.conn.put_object.call_count == 1 assert self.conn.put_object.call_args_list == [ mock.call('target', 'dir/foo.py', 'data', content_type=None) ]
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def post_job(self, job, response_dict=None, response_body_buffer=None): response_dict['status'] = 200 response_dict['reason'] = 'OK' response_dict['headers'] = { 'x-nexe-system': 'node-1', 'x-nexe-cdr-line': ( '5.121, 4.993, 0.13 3.84 1025 75943662 23 735 8 399 0 ' '0' ), 'x-nexe-status': 'ok', 'x-nexe-retcode': '0', } # Check the job is passed properly here assert job == job_dict
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def test__prepare_auth_v0(): # Test for :func:`zpmlib.zpm._prepare_auth`, with version 0.0 version = '0.0' args = None conn = mock.Mock() conn.url = 'http://example.com' expected = { 'version': '0.0', 'swiftUrl': 'http://example.com', } assert zpm._prepare_auth(version, args, conn) == expected
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def test__prepare_auth_v2(): # Test for :func:`zpmlib.zpm._prepare_auth`, with version 2.0 version = '2.0' args = mock.Mock() args.os_auth_url = 'http://example.com:5000/v2.0' args.os_username = 'user1' args.os_tenant_name = 'tenant1' args.os_password = 'secret' conn = None expected = { 'version': '2.0', 'authUrl': 'http://example.com:5000/v2.0', 'tenant': 'tenant1', 'username': 'user1', 'password': 'secret', } assert zpm._prepare_auth(version, args, conn) == expected # Make sure that we're robust enough to handle slightly varied version # inputs. version = '2' assert zpm._prepare_auth(version, args, conn) == expected
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def setup_method(self, _method): self.args = mock.Mock() self.args.auth = None self.args.user = None self.args.key = None self.args.os_auth_url = None self.args.os_username = None self.args.os_password = None self.args.os_tenant_name = None
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def test_args_v2(self): args = self.args args.os_auth_url = 'authurl' args.os_username = 'username' args.os_password = 'password' args.os_tenant_name = 'tenant' args.auth = 'auth' assert zpm._guess_auth_version(args) == '2.0'
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def test_env_v1(self): env = dict( ST_AUTH='auth', ST_USER='user', ST_KEY='key', OS_AUTH_URL='', OS_USERNAME='username', OS_PASSWORD='', OS_TENANT_NAME='', ) with mock.patch.dict('os.environ', env): assert zpm._guess_auth_version(self.args) == '1.0'
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def test_env_default(self): env = dict( ST_AUTH='auth', ST_USER='user', ST_KEY='key', OS_AUTH_URL='authurl', OS_USERNAME='username', OS_PASSWORD='password', OS_TENANT_NAME='tenant', ) with mock.patch.dict('os.environ', env): assert zpm._guess_auth_version(self.args) == '1.0'
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def test__get_exec_table_data_1_row(self): headers = { 'content-length': '20', 'content-type': 'text/html', 'date': 'Tue, 26 Aug 2014 09:27:08 GMT', 'etag': 'af0983cb8fef30642bae9ba0010e7a77', 'x-chain-total-time': '3.920', 'x-nexe-cdr-line': ( '3.920, 3.913, 0.11 3.37 1025 75943644 2 20 0 0 0 0' ), 'x-nexe-etag': 'disabled', 'x-nexe-policy': 'Policy-0', 'x-nexe-retcode': '0', 'x-nexe-status': 'ok', 'x-nexe-system': 'hello', 'x-nexe-validation': '0', 'x-timestamp': '1409045228.85265', 'x-trans-id': 'tx1d61239ed02a56fbbfe5d-0053fc52e9', 'x-zerovm-device': 'stdout', } expected_total_t = '3.920' expected_table = [ ['hello', 'ok', '0', '3.913', '0.11', '3.37', '1025', '75943644', '2', '20', '0', '0', '0', '0'] ] actual_total_t, actual_table = zpm._get_exec_table_data(headers) assert actual_total_t == expected_total_t assert actual_table == expected_table
zerovm/zerovm-cli
[ 6, 7, 6, 10, 1384778504 ]
def test_resize_and_fuse_features(self): batch, height, width, channels = 2, 11, 11, 6 smaller_height, smaller_width, smaller_channels = 6, 6, 3 larger_height1, larger_width1 = 21, 21 # Stride 2 conv. larger_height2, larger_width2 = 22, 22 # Stride 2 conv. larger_height3, larger_width3 = 23, 23 # Conv and resize. feature_list = [] feature_list.append(tf.zeros([batch, smaller_height, smaller_width, smaller_channels])) feature_list.append(tf.zeros([batch, smaller_height, smaller_width, channels])) feature_list.append(tf.zeros([batch, height, width, smaller_channels])) feature_list.append(tf.zeros([batch, height, width, channels])) feature_list.append(tf.zeros([batch, larger_height1, larger_width1, channels])) feature_list.append(tf.zeros([batch, larger_height1, larger_width1, smaller_channels])) feature_list.append(tf.zeros([batch, larger_height2, larger_width2, smaller_channels])) feature_list.append(tf.zeros([batch, larger_height3, larger_width3, smaller_channels])) layer = resized_fuse.ResizedFuse(name='fuse', height=height, width=width, num_channels=channels) output = layer(feature_list) self.assertEqual(output.get_shape().as_list(), [batch, height, width, channels])
google-research/deeplab2
[ 878, 146, 878, 24, 1620859177 ]
def defaults(self): self.name = "Toggle visibility of value/reference (of selected modules)" self.category = "A descriptive category name" self.description = "This plugin toggles the visibility of any selected module values/references"
mmccoo/kicad_mmccoo
[ 100, 27, 100, 5, 1485771968 ]
def __init__( self, env, policy, max_num_epoch_paths_saved=None, render=False, render_kwargs=None,
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def get_epoch_paths(self): return self._epoch_paths
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def get_diagnostics(self): path_lens = [len(path['actions']) for path in self._epoch_paths] stats = OrderedDict([ ('num steps total', self._num_steps_total), ('num paths total', self._num_paths_total), ]) stats.update(create_stats_ordered_dict( "path length", path_lens, always_show_all_stats=True, )) return stats
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def collect_new_steps( self, max_path_length, num_steps, discard_incomplete_paths,
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def collect_one_step( self, max_path_length, discard_incomplete_paths,
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def _start_new_rollout(self): self._current_path_builder = PathBuilder() self._obs = self._env.reset()
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def __init__( self, env, policy, max_num_epoch_paths_saved=None, render=False, render_kwargs=None, observation_key='observation', desired_goal_key='desired_goal',
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def get_epoch_paths(self): return self._epoch_paths
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def get_diagnostics(self): path_lens = [len(path['actions']) for path in self._epoch_paths] stats = OrderedDict([ ('num steps total', self._num_steps_total), ('num paths total', self._num_paths_total), ]) stats.update(create_stats_ordered_dict( "path length", path_lens, always_show_all_stats=True, )) return stats
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def start_collection(self): self._start_new_rollout()
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def collect_new_steps( self, max_path_length, num_steps, discard_incomplete_paths,
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def collect_one_step( self, max_path_length, discard_incomplete_paths,
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def _start_new_rollout(self): self._current_path_builder = PathBuilder() self._obs = self._env.reset()
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def __init__( self, env, policy, max_num_epoch_paths_saved=None, render=False, render_kwargs=None, observation_key='observation',
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def get_epoch_paths(self): return self._epoch_paths
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def get_diagnostics(self): path_lens = [len(path['actions']) for path in self._epoch_paths] stats = OrderedDict([ ('num steps total', self._num_steps_total), ('num paths total', self._num_paths_total), ]) stats.update(create_stats_ordered_dict( "path length", path_lens, always_show_all_stats=True, )) return stats
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def start_collection(self): self._start_new_rollout()
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def collect_new_steps( self, max_path_length, num_steps, discard_incomplete_paths,
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def collect_one_step( self, max_path_length, discard_incomplete_paths,
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def _start_new_rollout(self): self._current_path_builder = PathBuilder() self._obs = self._env.reset()
google-research/DBAP-algorithm
[ 3, 2, 3, 1, 1628553461 ]
def _check_if_unsupported_args_are_present(args: Mapping[str, Any], supported_args: Collection[str], job_type: str) -> None: supported_args = set(supported_args) unsupported_args = set(args.keys()) - supported_args if unsupported_args: raise ValueError( f'Arguments {unsupported_args!r} are not supported by {job_type}. Only ' f'{supported_args!r} are allowed.')
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def _apply_args_to_job_group(job_group: job_blocks.JobGroup, args: Mapping[str, Any]) -> None: """Recursively overrides job group properties.""" if args: _check_if_unsupported_args_are_present(args, job_group.jobs.keys(), 'xm.JobGroup') for key, job in job_group.jobs.items(): _apply_args(job, args.get(key, {}))
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def is_active(self) -> bool: """Returns whether the unit is not in terminal state. It may be actively running or queued. The unit may produce more results. If the unit is stopped by a user it will be neither active, completed nor failed. """ raise NotImplementedError
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def is_completed(self) -> bool: """Returns whether the unit has completed without failures. This is a terminal state. The unit has produced all the intended results. But it still may be restarted by an explicit request. """ raise NotImplementedError
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def is_failed(self) -> bool: """Returns whether the unit has failed. This is a terminal state. Experiment unit will enter this state on any fatal failure, such as process exiting with non-zero code, cloud rejecting to schedule/queue the job or exceptions in JobGenerator. The unit will stay in this state unless explicitly restarted. Intermediate failures do not result in this state. """ raise NotImplementedError
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def message(self) -> str: """An optional human-readable message providing context for the status. This may take the form of explaining why the work unit is in this state, or any potentially transient errors the work unit may be experiencing. """ raise NotImplementedError
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def _work_unit_arguments( job: job_blocks.JobType, args: Optional[Mapping[str, Any]],
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def deduce_args_for_job(job: job_blocks.Job) -> Dict[str, Any]: args = { 'args': job.args.to_dict(kwargs_only=True), 'env_vars': job.env_vars } return {key: value for key, value in args.items() if value}
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def __init__(self, experiment: 'Experiment', create_task: Callable[[Awaitable[Any]], futures.Future], args: Optional[Mapping[str, Any]], role: ExperimentUnitRole) -> None: """Initializes an `ExperimentUnit` instance. Args: experiment: An experiment this unit belongs to. create_task: A callback to register a new asynchronous task. args: Arguments to this experiment unit. Most commonly used to represent the hyperparameter sweep trial corresponding to a work unit. role: The role of this unit in the experiment structure. """ self.experiment = experiment self._create_task = create_task self._args = args self._role = role self._launch_tasks: List[futures.Future] = []
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def experiment_id(self) -> int: """Returns a unique ID assigned to the experiment.""" return self.experiment.experiment_id
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def launch_job(job: job_blocks.Job) -> Awaitable[None]: return self._launch_job_group( job_blocks.JobGroup(**{job.name: job}), _work_unit_arguments(job, self._args))
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def launch_job_generator( job_generator: job_blocks.JobGeneratorType) -> Awaitable[None]: if (not inspect.iscoroutinefunction(job_generator) and not inspect.iscoroutinefunction(job_generator.__call__)): raise ValueError( 'Job generator must be an async function. Signature needs to be ' '`async def job_generator(work_unit: xm.WorkUnit):`') return job_generator(self, **(args or {}))
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def stop(self) -> None: """Initiate the process to stop the unit from running. This method will synchronously make a request for the unit to stop. However, the method does not actually wait for the unit to be in a terminal state. Use self.wait_until_complete() after self.stop() to guarantee the unit is stopped. """ raise NotImplementedError
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def experiment_unit_name(self) -> str: raise NotImplementedError
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def context(self) -> metadata_context.MetadataContext: """Returns metadata context for a unit.""" return metadata_context.MetadataContext( creator=getpass.getuser(), annotations=metadata_context.ContextAnnotations())
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def work_unit_id(self) -> int: raise NotImplementedError
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def __init__(self, job: job_blocks.JobType, *, importance: Importance = Importance.NORMAL, termination_delay_secs: int) -> None: self.role = AuxiliaryUnitRole( importance=importance, termination_delay_secs=termination_delay_secs, ) self._job = job
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def experiment_id(self) -> int: """Returns a unique ID assigned to the experiment.""" raise NotImplementedError
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def _wait_for_tasks(self): while not self._running_tasks.empty(): self._running_tasks.get_nowait().result()
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def package( cls, packageables: Sequence[job_blocks.Packageable] = ()
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def package_async( cls, packageable: job_blocks.Packageable) -> Awaitable[job_blocks.Executable]: """Queues executable spec to be packaged into executable. If gathering all packageables for a single `package()` call is inconvenient, one may request packaging with `package_async` and later trigger the build for the whole batch with `package()`. Usage: if eval: eval_executable = experiment.package_async(xm.blaze_binary(...)) if train: train_executable = experiment.package_async(xm.blaze_binary(...)) experiment.package() # Explicitly trigger packaging. jobs = {} if eval: jobs['eval'] = xm.job(await eval_executable, ...) if train: jobs['train'] = xm.job(await train_executable, ...) Args: packageable: Executable spec to package. Returns: An awaitable for the packaging result. """ return cls._async_packager.add(packageable)
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def add(self, job: AuxiliaryUnitJob, args: Optional[Mapping[str, Any]] = ...) -> Awaitable[ExperimentUnit]: ...
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def add(self, job: job_blocks.JobType, args: Optional[Mapping[str, Any]] = ..., role: WorkUnitRole = ...) -> Awaitable[WorkUnit]: ...
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def add(self, job: job_blocks.JobType, args: Optional[Mapping[str, Any]], role: ExperimentUnitRole) -> Awaitable[ExperimentUnit]: ...
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def add( self, job: job_blocks.JobType, args: Optional[Mapping[str, Any]] = ..., *, # parameters after “*” are keyword-only parameters role: ExperimentUnitRole
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def add(self, job, args=None, role=WorkUnitRole()): # pyformat: disable """Adds a Job / JobGroup to the experiment. A new Experiment Unit is created to run the job. Args: job: A Job or JobGroup to add. args: Keyword arguments to be passed to the job. For Job and JobGroup args are recursively expanded. For example, ``` wu.add( JobGroup(agent=Job(...)), args={'agent': {'args': {'learning_rate': 0.1}}}, ) ``` would update `args` field of a job `agent` in the group. role: The role of this unit in the experiment structure. Returns: An awaitable that would be fulfilled when the job is launched. """ # pyformat: enable role = pattern_matching.match( pattern_matching.Case([AuxiliaryUnitJob], lambda job: job.role), pattern_matching.Case([Any], lambda job: role), )( job) experiment_unit_future = self._create_experiment_unit(args, role) async def launch(): experiment_unit = await experiment_unit_future await experiment_unit.add(job, args) return experiment_unit return asyncio.wrap_future(self._create_task(launch()))
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def _create_experiment_unit( self, args: Optional[Mapping[str, Any]], role: ExperimentUnitRole) -> Awaitable[ExperimentUnit]: """Creates a new experiment unit. Synchronously starts the experiment unit creation, ensuring that IDs would be assigned in invocation order. The operation itself may run asynchronously in background. Args: args: Executable unit arguments, to be show as a part of hyper-parameter sweep. role: Executable unit role: whether to create a work or auxiliary unit. Returns: An awaitable to the creation result. """ raise NotImplementedError
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def work_unit_count(self) -> int: """Returns how many work units the experiment has.""" raise NotImplementedError
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]
def work_units(self) -> Mapping[int, WorkUnit]: """Returns a mapping from work_unit_id to an instance of the work unit.""" raise NotImplementedError
deepmind/xmanager
[ 669, 33, 669, 13, 1619456631 ]