code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
def get_text_async(url_dict, chunk_size, concurrent_requests, headers): <NEW_LINE> <INDENT> monkeypatch_runner() <NEW_LINE> semaphore = Semaphore(concurrent_requests) <NEW_LINE> the_request_threads = [] <NEW_LINE> for filepath, url in get_filepaths_and_urls(url_dict): <NEW_LINE> <INDENT> request_thread = gevent.spawn(_get_text_async_thread_builder, url, filepath, chunk_size, headers, semaphore) <NEW_LINE> the_request_threads.append(request_thread) <NEW_LINE> <DEDENT> for the_response in gevent.iwait(the_request_threads): <NEW_LINE> <INDENT> yield the_response | Asynchronous GET requests for text files | 625941c1fb3f5b602dac3620 |
def test_instant_fail_two_times(self): <NEW_LINE> <INDENT> with patch('select.select') as selectmock: <NEW_LINE> <INDENT> selectmock.return_value = ([], [], []) <NEW_LINE> with patch('subprocess.Popen.__init__'): <NEW_LINE> <INDENT> mon = ProcessMonitor('abc') <NEW_LINE> mon.stdout = 123 <NEW_LINE> mon._buffer = 'aaa ZZZ bbb ccc' <NEW_LINE> with pytest.raises(MatchTimeoutError): <NEW_LINE> <INDENT> mon.wait_for_output('ZZZ', timeout=0, count=2) <NEW_LINE> <DEDENT> <DEDENT> selectmock.assert_called_once() | Test if wait_for_output fails instantly if the buffer
contains the match-string only one times and a two-times
match is requested | 625941c1a79ad161976cc0d4 |
def search_client(client_name): <NEW_LINE> <INDENT> for client in clients: <NEW_LINE> <INDENT> if client != client_name: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True | Search a client. | 625941c1e5267d203edcdc2e |
def update_by_index(self, index_name, query, scripted_patch=None, options=None): <NEW_LINE> <INDENT> if not isinstance(query, IndexQuery): <NEW_LINE> <INDENT> raise ValueError("query must be IndexQuery Type") <NEW_LINE> <DEDENT> path = Utils.build_path(index_name, query, options, with_page_size=False) <NEW_LINE> if scripted_patch: <NEW_LINE> <INDENT> if not isinstance(scripted_patch, ScriptedPatchRequest): <NEW_LINE> <INDENT> raise ValueError("scripted_patch must be ScriptedPatchRequest Type") <NEW_LINE> <DEDENT> scripted_patch = scripted_patch.to_json() <NEW_LINE> <DEDENT> response = self._requests_handler.http_request_handler(path, "EVAL", data=scripted_patch) <NEW_LINE> if response.status_code != 200 and response.status_code != 202: <NEW_LINE> <INDENT> raise response.raise_for_status() <NEW_LINE> <DEDENT> return response.json() | @param index_name: name of an index to perform a query on
:type str
@param query: query that will be performed
:type IndexQuery
@param options: various operation options e.g. AllowStale or MaxOpsPerSec
:type BulkOperationOptions
@param scripted_patch: JavaScript patch that will be executed on query results( Used only when update)
:type ScriptedPatchRequest
@return: json
:rtype: dict | 625941c1f7d966606f6a9f91 |
def export_note(note): <NEW_LINE> <INDENT> if note_exists(note): <NEW_LINE> <INDENT> note_text = load_text(note) <NEW_LINE> with open(note + '.txt', 'w') as write_file: <NEW_LINE> <INDENT> for line in note_text: <NEW_LINE> <INDENT> write_file.write(line + '\n') <NEW_LINE> <DEDENT> <DEDENT> update_component(note) <NEW_LINE> print("\n\tExported: '{}'.\n".format(note)) | Write a copy of note in current directory.
| 625941c116aa5153ce362408 |
def convert_integer(self, values, size): <NEW_LINE> <INDENT> def check(value, size, empty_integer): <NEW_LINE> <INDENT> if value == empty_integer: <NEW_LINE> <INDENT> return ' ' * size <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return str(value).rjust(size) <NEW_LINE> <DEDENT> <DEDENT> return [check(value, size, self.empty.empty_integer) for value in values] | Convert value to integer.
| 625941c150812a4eaa59c2b3 |
def chance_dx(rmag): <NEW_LINE> <INDENT> dx = 1.48 * 10**13 * rmag**(-9.53) * units.arcsec <NEW_LINE> return dx | Returns the angular separation for a secure association (1%)
as in https://ui.adsabs.harvard.edu/abs/2014MNRAS.437.1495T/abstract
Args:
rmag (float):
r-band magnitude
Returns:
Quantity: Angular offset in arcsec | 625941c1d53ae8145f87a202 |
def decode(self, data): <NEW_LINE> <INDENT> if not hasattr(self, 'type') and self.type: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> if self.type == 'object': <NEW_LINE> <INDENT> if 'properties' not in self._node: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> rt = {} <NEW_LINE> for k, v in six.iteritems(data): <NEW_LINE> <INDENT> if k not in self.properties: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> _decoder = self.properties._get(k) <NEW_LINE> rt[k] = _decoder.decode(v) <NEW_LINE> <DEDENT> return rt <NEW_LINE> <DEDENT> elif self.type == 'array': <NEW_LINE> <INDENT> if data is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> return [self.items.decode(i) for i in data] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> r = PROP_DECODERS[self.format or self.type](data) <NEW_LINE> if self._is_enum: <NEW_LINE> <INDENT> m = name_to_model.get(self._path[-1]) <NEW_LINE> if m: <NEW_LINE> <INDENT> r = m(r) <NEW_LINE> <DEDENT> <DEDENT> return r | decode the data as the schema defined
:param data: data to decode
:return: decoded data | 625941c166656f66f7cbc139 |
def GetArrayParamNames(i_node): <NEW_LINE> <INDENT> n_param = GetNArrayParam(i_node) <NEW_LINE> param_name_pp = ctypes.cast(NeuronGPU_GetArrayParamNames( ctypes.c_int(i_node)), ctypes.POINTER(c_char_p)) <NEW_LINE> param_name_list = [] <NEW_LINE> for i in range(n_param): <NEW_LINE> <INDENT> param_name_p = param_name_pp[i] <NEW_LINE> param_name = ctypes.cast(param_name_p, ctypes.c_char_p).value <NEW_LINE> param_name_list.append(to_def_str(param_name)) <NEW_LINE> <DEDENT> if GetErrorCode() != 0: <NEW_LINE> <INDENT> raise ValueError(GetErrorMessage()) <NEW_LINE> <DEDENT> return param_name_list | Get list of scalar parameter names | 625941c163f4b57ef00010ad |
def test_manual_invalid_url(self): <NEW_LINE> <INDENT> bad_urls = [ "http://", "https://", "google.com", "http://google" ] <NEW_LINE> for bad_url in bad_urls: <NEW_LINE> <INDENT> result = crawl(bad_url) <NEW_LINE> self.assertEqual( result, EXPECTED_CRAWL_RESULTS["ConnectionError"](bad_url) ) | Check that when the user provides an invalid URL to the crawler,
the crawler will raise an appropriate error. | 625941c126068e7796caec6a |
def delete_sent_update_request(self, sheet_id, sent_update_request_id): <NEW_LINE> <INDENT> _op = fresh_operation('delete_update_request') <NEW_LINE> _op['method'] = 'DELETE' <NEW_LINE> _op['path'] = '/sheets/' + str(sheet_id) + '/sentupdaterequests/' + str(sent_update_request_id) <NEW_LINE> expected = ['Result', None] <NEW_LINE> prepped_request = self._base.prepare_request(_op) <NEW_LINE> response = self._base.request(prepped_request, expected, _op) <NEW_LINE> return response | Deletes a SentUpdateRequest for the specified Sheet.
Args:
sheet_id (int): Sheet ID
sent_update_request_id (int): SentUpdateRequest ID
Returns:
Result | 625941c15166f23b2e1a50e8 |
def test_obs_dim_return_value(self): <NEW_LINE> <INDENT> env = EasyEnv() <NEW_LINE> env.reset() <NEW_LINE> self.assertEqual(env.obs_dim(), 1) | Check if 1 is returned | 625941c1d4950a0f3b08c2e0 |
def Register_SetDR3(*args): <NEW_LINE> <INDENT> return _x64dbgapi.Register_SetDR3(*args) | Register_SetDR3(duint value) -> bool | 625941c167a9b606de4a7e4a |
def render(self, config, files): <NEW_LINE> <INDENT> extensions = [ _RelativePathExtension(self.file, files, config['strict']) ] + config['markdown_extensions'] <NEW_LINE> md = markdown.Markdown( extensions=extensions, extension_configs=config['mdx_configs'] or {} ) <NEW_LINE> self.content = md.convert(self.markdown) <NEW_LINE> self.toc = get_toc(getattr(md, 'toc', '')) | Convert the Markdown source file to HTML as per the config. | 625941c1cc40096d615958e0 |
def _duplicate_system_metadata(updates): <NEW_LINE> <INDENT> system_metadata = dict(updates_only=True) <NEW_LINE> power_specs = updates.get('power_specs', {}) <NEW_LINE> spec_to_metadata_map = { 'vcpus': 'cpus', 'min_vcpus': 'min_cpus', 'max_vcpus': 'max_cpus', 'proc_units': 'vcpus', 'min_proc_units': 'min_vcpus', 'max_proc_units': 'max_vcpus', 'memory_mb': 'memory_mb', 'min_memory_mb': 'min_memory_mb', 'max_memory_mb': 'max_memory_mb', 'current_compatibility_mode': 'current_compatibility_mode', 'desired_compatibility_mode': 'desired_compatibility_mode', 'rmc_state': 'rmc_state'} <NEW_LINE> for key, val in power_specs.iteritems(): <NEW_LINE> <INDENT> if key in spec_to_metadata_map: <NEW_LINE> <INDENT> system_metadata[spec_to_metadata_map[key]] = str(val) <NEW_LINE> <DEDENT> <DEDENT> updates['system_metadata'] = system_metadata | Temporary Helper method to duplicate the System Meta-data | 625941c1004d5f362079a2c4 |
def test_filter_recipes_by_tags(self): <NEW_LINE> <INDENT> recipe1 = sample_recipe(user=self.user, title='Thai vegetable curry') <NEW_LINE> recipe2 = sample_recipe(user=self.user, title='Aubregine with tahini') <NEW_LINE> tag1 = sample_tag(user=self.user, name='Vegan') <NEW_LINE> tag2 = sample_tag(user=self.user, name='Vegetarian') <NEW_LINE> recipe1.tags.add(tag1) <NEW_LINE> recipe2.tags.add(tag2) <NEW_LINE> recipe3 = sample_recipe(user=self.user, title='Fish and ships') <NEW_LINE> res = self.client.get( RECIPES_URL, {'tags': f'{tag1.id}, {tag2.id}'} ) <NEW_LINE> serializer1 = RecipeSerializer(recipe1) <NEW_LINE> serializer2 = RecipeSerializer(recipe2) <NEW_LINE> serializer3 = RecipeSerializer(recipe3) <NEW_LINE> self.assertIn(serializer1.data, res.data) <NEW_LINE> self.assertIn(serializer2.data, res.data) <NEW_LINE> self.assertNotIn(serializer3.data, res.data) | Test returning recipes with specific tags | 625941c1ad47b63b2c509f0f |
def checksshipcollision(self): <NEW_LINE> <INDENT> if self._ship != None: <NEW_LINE> <INDENT> for bolt in self._bolts: <NEW_LINE> <INDENT> if self._ship.collides(bolt): <NEW_LINE> <INDENT> del bolt <NEW_LINE> self._ship = None <NEW_LINE> self._bolts = [] <NEW_LINE> break | Checks if any bolts from aliens collides with ship.
This method checks for collisions between bolt fired by alien and
ship. If one exists, the round is over so all bolts are erased and
the _ship is set to None. | 625941c1a4f1c619b28affcd |
def from_contre_tableau(self, comps): <NEW_LINE> <INDENT> n = len(comps) <NEW_LINE> M = [ [0 for _ in range(n)] for _ in range(n) ] <NEW_LINE> previous_set = set([]) <NEW_LINE> for col in range(n-1, -1, -1): <NEW_LINE> <INDENT> s = set( comps[col] ) <NEW_LINE> for x in s.difference(previous_set): <NEW_LINE> <INDENT> M[x-1][col] = 1 <NEW_LINE> <DEDENT> for x in previous_set.difference(s): <NEW_LINE> <INDENT> M[x-1][col] = -1 <NEW_LINE> <DEDENT> previous_set = s <NEW_LINE> <DEDENT> return AlternatingSignMatrix(M) | Return an alternating sign matrix from a contre-tableau.
EXAMPLES::
sage: ASM = AlternatingSignMatrices(3)
sage: ASM.from_contre_tableau([[1, 2, 3], [1, 2], [1]])
[0 0 1]
[0 1 0]
[1 0 0]
sage: ASM.from_contre_tableau([[1, 2, 3], [2, 3], [3]])
[1 0 0]
[0 1 0]
[0 0 1] | 625941c1bf627c535bc1315e |
def pluginFactory(self): <NEW_LINE> <INDENT> return self._pluginFactory | Returns the plugin factory that will be used to generate plugins for
the query selector. You can subclass the XOrbQueryPlugin and
XOrbQueryPluginFactory to create custom plugins for schemas and widgets.
:return <XOrbQueryPluginFactory> | 625941c1ff9c53063f47c184 |
def init_table(connection, cursor): <NEW_LINE> <INDENT> if table_exists(cursor): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cursor.execute('CREATE TABLE {tn} ({f1} {ftt} {nn}, {f2} {ftt} {nn}, {f3} {ftb} {nn}, {f4} {fts} {nn}, PRIMARY KEY({f1},{f2}))' .format(tn="proxy_responses", f1="method", f2="url", f3="response", f4="timestamp", ftt="TEXT", ftb="BLOB", fts="INTEGER", nn="NOT NULL")) <NEW_LINE> connection.commit() | Initializes SQLite table | 625941c1b57a9660fec33811 |
def global_lease_update_with_http_info(self, id, body, **kwargs): <NEW_LINE> <INDENT> all_params = ['id', 'body'] <NEW_LINE> all_params.append('async_req') <NEW_LINE> all_params.append('_return_http_data_only') <NEW_LINE> all_params.append('_preload_content') <NEW_LINE> all_params.append('_request_timeout') <NEW_LINE> params = locals() <NEW_LINE> for key, val in six.iteritems(params['kwargs']): <NEW_LINE> <INDENT> if key not in all_params: <NEW_LINE> <INDENT> raise TypeError( "Got an unexpected keyword argument '%s'" " to method global_lease_update" % key ) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> if ('id' not in params or params['id'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `id` when calling `global_lease_update`") <NEW_LINE> <DEDENT> if ('body' not in params or params['body'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `body` when calling `global_lease_update`") <NEW_LINE> <DEDENT> collection_formats = {} <NEW_LINE> path_params = {} <NEW_LINE> if 'id' in params: <NEW_LINE> <INDENT> path_params['id'] = params['id'] <NEW_LINE> <DEDENT> query_params = [] <NEW_LINE> header_params = {} <NEW_LINE> form_params = [] <NEW_LINE> local_var_files = {} <NEW_LINE> body_params = None <NEW_LINE> if 'body' in params: <NEW_LINE> <INDENT> body_params = params['body'] <NEW_LINE> <DEDENT> header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) <NEW_LINE> header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json']) <NEW_LINE> auth_settings = ['ApiKeyAuth'] <NEW_LINE> return self.api_client.call_api( '/dhcp/global_lease/{id}', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='LeasesUpdateGlobalLeaseResponse', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | global_lease_update # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.global_lease_update_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: An application specific resource identity of a resource (required)
:param LeasesGlobalLease body: (required)
:return: LeasesUpdateGlobalLeaseResponse
If the method is called asynchronously,
returns the request thread. | 625941c11d351010ab855aac |
def realify_vector(vec): <NEW_LINE> <INDENT> return real_part_of_vector(vec)+imag_part_of_vector(vec) | Is this used anywhere? | 625941c157b8e32f52483429 |
def test_email_router_send_email(self): <NEW_LINE> <INDENT> pass | Test case for email_router_send_email
| 625941c1f548e778e58cd50c |
def update_nested_user(data, user, updating_fields): <NEW_LINE> <INDENT> for field in updating_fields: <NEW_LINE> <INDENT> setattr(user, field, data.get(field, user.username)) <NEW_LINE> <DEDENT> if 'password' in updating_fields: <NEW_LINE> <INDENT> new_password = data.get('password', user.password) <NEW_LINE> password_is_valid = new_password == user.password or user.check_password(new_password) <NEW_LINE> if not password_is_valid: <NEW_LINE> <INDENT> user.set_password(new_password) <NEW_LINE> <DEDENT> <DEDENT> user.save() | Update nested User object
:param data: updating data
:param user: updating User object
:param updating_fields: updating fields list
:return: | 625941c1293b9510aa2c3227 |
def __init__(self, A, G, Q, R): <NEW_LINE> <INDENT> self.A, self.G, self.Q, self.R = map(self.convert, (A, G, Q, R)) <NEW_LINE> self.k, self.n = self.G.shape | Provides initial parameters describing the state space model
x_{t+1} = A x_t + w_{t+1} (w_t ~ N(0, Q))
y_t = G x_t + v_t (v_t ~ N(0, R))
Parameters
============
All arguments should be scalars or array_like
* A is n x n
* Q is n x n, symmetric and nonnegative definite
* G is k x n
* R is k x k, symmetric and nonnegative definite | 625941c15166f23b2e1a50e9 |
def getNumLayers(self): <NEW_LINE> <INDENT> return len(self.layers) | Return the number of layers in this velocity model.
:rtype: int | 625941c171ff763f4b549617 |
def setup(): <NEW_LINE> <INDENT> setFormat() <NEW_LINE> setFilename() <NEW_LINE> setScreenMode() | Convenience function for setting up screen for interactive use. | 625941c18c3a873295158348 |
def recursive_file_count(files): <NEW_LINE> <INDENT> if not isinstance(files, (list, set)): <NEW_LINE> <INDENT> files = [files] <NEW_LINE> <DEDENT> total_files = 0 <NEW_LINE> for f in files: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> is_dir = os.path.isdir(f) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> f = f[0] <NEW_LINE> is_dir = os.path.isdir(f) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> is_dir = False <NEW_LINE> <DEDENT> <DEDENT> if is_dir: <NEW_LINE> <INDENT> for x, _ in iter_directory(f): <NEW_LINE> <INDENT> total_files += 1 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> total_files += 1 <NEW_LINE> <DEDENT> <DEDENT> return total_files | Given a filepath or list of filepaths, return the total number of files. | 625941c1293b9510aa2c3228 |
def rotate_tensor( tensor: tf.Tensor, angles: tf.Tensor, rotation_axis: int, ) -> tf.Tensor: <NEW_LINE> <INDENT> tensor_shape = tensor.shape.as_list() <NEW_LINE> if len(tensor_shape) < 4: <NEW_LINE> <INDENT> raise ValueError( "`tensor` must have rank at least 4, got {}.".format(len(tensor_shape))) <NEW_LINE> <DEDENT> if rotation_axis < 0: <NEW_LINE> <INDENT> raise ValueError( "`rotation_axis` must be positive got {}".format(rotation_axis)) <NEW_LINE> <DEDENT> if len(tensor_shape) - rotation_axis < 4: <NEW_LINE> <INDENT> raise ValueError( "`rotation_axis` must be a batch dimension (last 3 axes, are reserved " "for `[height, width, channel]`)." ) <NEW_LINE> <DEDENT> if angles.shape.ndims != 1: <NEW_LINE> <INDENT> raise ValueError( "`angles` must be a 1D list. Got {}.".format(angles.shape)) <NEW_LINE> <DEDENT> if angles.shape.as_list()[0] != tensor_shape[rotation_axis]: <NEW_LINE> <INDENT> raise ValueError("`angles` length must equal `rotation_axis` shape." "Got {} and {}.".format( angles.shape.as_list(), tensor_shape[rotation_axis])) <NEW_LINE> <DEDENT> tensor_shape = [-1 if dim is None else dim for dim in tensor_shape] <NEW_LINE> axes = [(axis, shape) for axis, shape in enumerate(tensor_shape)] <NEW_LINE> transpose = [axes.pop(rotation_axis)] <NEW_LINE> transpose += [axes.pop(axis) for axis in range(-3, 0)] <NEW_LINE> transpose += axes <NEW_LINE> tensor = tf.transpose(tensor, [axis for axis, _ in transpose]) <NEW_LINE> tensor = tf.reshape( tensor, ([shape for _, shape in transpose[:3]] + [max(reduce(mul, [shape for _, shape in transpose[3:]], 1), -1)]) ) <NEW_LINE> tensor = tf.contrib.image.rotate(tensor, angles, "BILINEAR") <NEW_LINE> tensor = tf.reshape(tensor, [shape for _, shape in transpose]) <NEW_LINE> inverse_transpose = _reverse_transpose_sequence( [axis for axis, _ in transpose]) <NEW_LINE> return tf.transpose(tensor, inverse_transpose) | Rotates a `tf.Tensor` along a given batch dimension.
This function broadcasts a 2D rotation on a `tf.Tensor` with arbitrary
batch dimensions. The rotation is applied to each image or batch of images
where the batching dimension is set by `rotation_axis`. The rotation is
given in radians applied in a counterclockwise manner.
Explicitly:
tensor = ... # Tensor of shape `[B_1, B_2, ... , H, W, C]`
rotate_tensor(tensor, [R_1, R_2 ... ], rotation_axis = 1)
# Returns tensor where:
# [:, 0, ..., H, W, C] `H` and `W` dimensions are rotated by `R_1`.
# [:, 1, ..., H, W, C] `H` and `W` dimensions are rotated by `R_2`.
# Etc.
This function is used when there is a separate the rotation axis along with
one or more batch dimensions.
Args:
tensor: `tf.Tensor` of shape
`batch_dimensions + [height, width, channels]`.
angles: `tf.Tensor` of shape `[rotation_axis_dimension]` describing the
rotations to be applied to each batch along the rotation axis in radians.
rotation_axis: Int indicating rotation axis.
Returns:
`tf.Tensor` of same shape as `tensor` with rotation applied.
Raises:
ValueError: If input parameters are invalid. | 625941c192d797404e304119 |
def swap_results(wdir,name): <NEW_LINE> <INDENT> rank_path = os.path.join(wdir,"results") <NEW_LINE> if not os.path.isdir(rank_path):os.mkdir(rank_path) <NEW_LINE> rank_list = os.path.join(wdir,"rank_results.txt") <NEW_LINE> nrank_list = os.path.join(rank_path,"rank_results_%s.txt" % name) <NEW_LINE> shutil.move(rank_list,nrank_list) | Swap the results file for the current languages
:param wdir: the working directory
:param name: the name | 625941c13c8af77a43ae372e |
def clearBlockCache(self) -> "void": <NEW_LINE> <INDENT> return _PolyVoxCore.LargeVolumeint8_clearBlockCache(self) | clearBlockCache(LargeVolumeint8 self) | 625941c1bde94217f3682d83 |
def get_profile_dict(self): <NEW_LINE> <INDENT> avt = '' <NEW_LINE> if self.avatar.name != '' and self.avatar.name is not None: <NEW_LINE> <INDENT> avt = settings.FILES_PREFIX + '/' + str(self.avatar.name) <NEW_LINE> <DEDENT> ret = {'name': self.fullname, 'phone': self.phone, 'userid': self.userid, 'avatar': avt, 'digits_id': self.digitsid, 'version': self.version, 'email': self.email, 'email_verified': True if self.email_verified == '0' and self.email is not None else False} <NEW_LINE> return ret | :return: Get user profile dictionary | 625941c1283ffb24f3c55893 |
def read_fits_nparray(self, name = 'test.fit', number = 0): <NEW_LINE> <INDENT> import pyfits <NEW_LINE> _file =self. workspace + name <NEW_LINE> _fits = pyfits.open(_file) <NEW_LINE> _header = _fits[number].header <NEW_LINE> _arr = _fits[number].data <NEW_LINE> _arr = _arr[0,:,:] <NEW_LINE> return _arr, _header | Read .fits file from iStar camera
name (str): file name
number (int): number of hdulist (usually 0)
Returns:
_header (pyfits.header.Header): dictionary type something
_arr (numpy.ndarray): numpy array | 625941c1507cdc57c6306c66 |
def should_be_copied(self): <NEW_LINE> <INDENT> return self.status != NodeDiff.DELETED and not self.reprocess_new_node | Indicates whether this node should be copied from the previous recipe during a reprocess
:returns: Whether this node should be copied
:rtype: bool | 625941c1a219f33f346288fc |
def set_node_type(self, id, type): <NEW_LINE> <INDENT> raise NotImplementedError("Method has to be implemented") | Set the type of the node "type". | 625941c1adb09d7d5db6c721 |
def longestUnivaluePath(self, root): <NEW_LINE> <INDENT> if root is None: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> self.ans = 0 <NEW_LINE> def findLength(root, value): <NEW_LINE> <INDENT> if root is None: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> left = findLength(root.left, root.val) <NEW_LINE> right = findLength(root.right, root.val) <NEW_LINE> self.ans = max(self.ans, left + right) <NEW_LINE> if root.val != value: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return max(left, right) + 1 <NEW_LINE> <DEDENT> <DEDENT> findLength(root, root.val) <NEW_LINE> return self.ans | :type root: TreeNode
:rtype: int | 625941c14527f215b584c3ea |
def analyze_structures(self, structures, step_freq=10, most_frequent_polyhedra=15): <NEW_LINE> <INDENT> voro_dict = {} <NEW_LINE> step = 0 <NEW_LINE> for structure in structures: <NEW_LINE> <INDENT> step += 1 <NEW_LINE> if step % step_freq != 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> v = [] <NEW_LINE> for n in range(len(structure)): <NEW_LINE> <INDENT> v.append(str(self.analyze(structure, n=n).view())) <NEW_LINE> <DEDENT> for voro in v: <NEW_LINE> <INDENT> if voro in voro_dict: <NEW_LINE> <INDENT> voro_dict[voro] += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> voro_dict[voro] = 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return sorted(voro_dict.items(), key=lambda x: (x[1], x[0]), reverse=True)[:most_frequent_polyhedra] | Perform Voronoi analysis on a list of Structures.
Note that this might take a significant amount of time depending on the
size and number of structures.
Args:
structures (list): list of Structures
cutoff (float: cutoff distance around an atom to search for
neighbors
step_freq (int): perform analysis every step_freq steps
qhull_options (str): options to pass to qhull
most_frequent_polyhedra (int): this many unique polyhedra with
highest frequences is stored.
Returns:
A list of tuples in the form (voronoi_index,frequency) | 625941c121bff66bcd6848e5 |
def letter_combinations(self, digits): <NEW_LINE> <INDENT> if not digits: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> TABLE = { '2': 'abc', '3': 'def', '4': 'ghi', '5': 'jkl', '6': 'mno', '7': 'pqrs', '8': 'tuv', '9': 'wxyz', } <NEW_LINE> letters = [TABLE[d] for d in digits] <NEW_LINE> result = [] <NEW_LINE> self.helper(letters, 0, result) <NEW_LINE> return list(filter(lambda x: len(x) == len(letters), result)) | :type digits: str
:rtype: List[str] | 625941c19c8ee82313fbb705 |
def withTestSet(self, testset): <NEW_LINE> <INDENT> self.__specification.setTestSet(testset) <NEW_LINE> return self | Set the test set
@param testset: UQSetting | 625941c1baa26c4b54cb10b2 |
def extentions(app): <NEW_LINE> <INDENT> db.init_app(app) <NEW_LINE> return None | Initialize the Flask-Login extension (mutates the app passed in).
:param app: Flask application instance
:param user_model: Model that contains the authentication information
:type user_model: SQLAlchemy model
:return: None | 625941c1d99f1b3c44c67524 |
def scene(self): <NEW_LINE> <INDENT> return QGraphicsScene | QGraphicsView.scene() -> QGraphicsScene | 625941c1c4546d3d9de729c2 |
def do_show(self, args): <NEW_LINE> <INDENT> tokens = args.split() <NEW_LINE> stored_keys = storage.all() <NEW_LINE> if len(tokens) < 1: <NEW_LINE> <INDENT> print("** class name missing **") <NEW_LINE> return <NEW_LINE> <DEDENT> if len(tokens) == 1: <NEW_LINE> <INDENT> print("** instance id missing **") <NEW_LINE> return <NEW_LINE> <DEDENT> if tokens[0] not in HBNBCommand.all_classes: <NEW_LINE> <INDENT> print("** class doesn't exist **") <NEW_LINE> return <NEW_LINE> <DEDENT> if len(tokens) >= 2: <NEW_LINE> <INDENT> key = tokens[0] + "." + tokens[1] <NEW_LINE> if key in stored_keys: <NEW_LINE> <INDENT> show_output = stored_keys[key] <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> print(show_output) <NEW_LINE> <DEDENT> except UnboundLocalError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if key not in stored_keys: <NEW_LINE> <INDENT> print("** no instance found **") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return | show string representation of an instance | 625941c131939e2706e4cdfd |
def convertDict2Attrs(self, *args, **kwargs): <NEW_LINE> <INDENT> for n, c in enumerate(self.attrs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> params = self.params <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> params = {} <NEW_LINE> <DEDENT> kwargs.update(params) <NEW_LINE> try: <NEW_LINE> <INDENT> self.mambuclientclass <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self.mambuclientclass = MambuClient <NEW_LINE> <DEDENT> client = self.mambuclientclass(urlfunc=None, entid=None, *args, **kwargs) <NEW_LINE> client.init(c, *args, **kwargs) <NEW_LINE> client._MambuStruct__urlfunc = getclienturl <NEW_LINE> self.attrs[n] = client | The trick for iterable Mambu Objects comes here:
You iterate over each element of the responded List from Mambu,
and create a Mambu Client object for each one, initializing them
one at a time, and changing the attrs attribute (which just
holds a list of plain dictionaries) with a MambuClient just
created.
.. todo:: pass a valid (perhaps default) urlfunc, and its
corresponding id to entid to each MambuClient, telling
MambuStruct not to connect() by default. It's desirable to
connect at any other further moment to refresh some element in
the list. | 625941c123e79379d52ee4f6 |
def running_notify_fun(name, run_id, node_id, node_name, pipeline_obj): <NEW_LINE> <INDENT> print('pipeline [%s] [%s] start run node [%s: %s]' % ( name, run_id, node_id, node_name )) | 节点运行通知函数 | 625941c1498bea3a759b9a40 |
def link_end_point(self, *parents): <NEW_LINE> <INDENT> self.repeater.link_from(*parents) <NEW_LINE> self.end_point.link_from(*parents) <NEW_LINE> return self.end_point | Links the existing :class:`veles.workflow.EndPoint` and
:class:`veles.workflow.Repeater` with \*parents.
Returns :class:`veles.workflow.EndPoint` instance.
Arguments:
parents: units to link this one from. | 625941c17d43ff24873a2c2f |
def create_parameter(samples, sample_period): <NEW_LINE> <INDENT> parm_kind_str = 'USER' <NEW_LINE> parm_kind = _htk_str_to_param(parm_kind_str) <NEW_LINE> parm_kind_base, parm_kind_opts = _htk_str_to_param(parm_kind_str) <NEW_LINE> meta = ParameterMeta(n_samples=len(samples), samp_period=sample_period, samp_size=len(samples[0]) * 4, parm_kind_str=parm_kind_str, parm_kind=parm_kind, parm_kind_base=parm_kind_base, parm_kind_opts=parm_kind_opts) <NEW_LINE> return Parameter(meta=meta, samples=np.array(samples)) | Create a HTK Parameter object from an array of samples and a samples period
:param samples (list of lists or array of floats): The samples to write into the file. Usually feature vectors.
:param sample_period (int): Sample period in 100ns units. | 625941c107f4c71912b11411 |
def write_safe(data): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> file.write(data) <NEW_LINE> <DEDENT> except UnicodeEncodeError: <NEW_LINE> <INDENT> if replace_errors: <NEW_LINE> <INDENT> enc = getattr(file, 'encoding', sys.getdefaultencoding()) <NEW_LINE> file.write(data.encode(enc, errors='replace').decode(enc)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise | Write the input str to the file and, if an encoding error occurs and
replace_errors is ``True``, remove invalid code points and print again. | 625941c1dc8b845886cb54c4 |
def entity_container_status_by_id_get_with_http_info(self, id, **kwargs): <NEW_LINE> <INDENT> all_params = ['id', 'fields'] <NEW_LINE> all_params.append('async_req') <NEW_LINE> all_params.append('_return_http_data_only') <NEW_LINE> all_params.append('_preload_content') <NEW_LINE> all_params.append('_request_timeout') <NEW_LINE> params = locals() <NEW_LINE> for key, val in six.iteritems(params['kwargs']): <NEW_LINE> <INDENT> if key not in all_params: <NEW_LINE> <INDENT> raise TypeError( "Got an unexpected keyword argument '%s'" " to method entity_container_status_by_id_get" % key ) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> if ('id' not in params or params['id'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `id` when calling `entity_container_status_by_id_get`") <NEW_LINE> <DEDENT> collection_formats = {} <NEW_LINE> path_params = {} <NEW_LINE> if 'id' in params: <NEW_LINE> <INDENT> path_params['id'] = params['id'] <NEW_LINE> <DEDENT> query_params = [] <NEW_LINE> if 'fields' in params: <NEW_LINE> <INDENT> query_params.append(('fields', params['fields'])) <NEW_LINE> <DEDENT> header_params = {} <NEW_LINE> form_params = [] <NEW_LINE> local_var_files = {} <NEW_LINE> body_params = None <NEW_LINE> header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/xml']) <NEW_LINE> header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json', 'application/xml']) <NEW_LINE> auth_settings = ['basic'] <NEW_LINE> return self.api_client.call_api( '/entity/container_status/{id}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | EntityContainerStatusById_GET # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.entity_container_status_by_id_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: (required)
:param str fields:
:return: object
If the method is called asynchronously,
returns the request thread. | 625941c16aa9bd52df036d33 |
def clean_up_launch_config(launch_config): <NEW_LINE> <INDENT> reject = [ "CreatedTime", "KernelId", "LaunchConfigurationARN", "RamdiskId", "BlockDeviceMappings", ] <NEW_LINE> for key in reject: <NEW_LINE> <INDENT> del launch_config[key] <NEW_LINE> <DEDENT> for key in [k for k, v in launch_config.items() if not v]: <NEW_LINE> <INDENT> del launch_config[key] <NEW_LINE> <DEDENT> return launch_config | Remove unused or empty keys from launch config. | 625941c191f36d47f21ac481 |
def compare_databases(self, filename1, filename2): <NEW_LINE> <INDENT> db1, db2, page = None, None, None <NEW_LINE> if filename1 and filename2: <NEW_LINE> <INDENT> db1 = self.load_database(filename1) <NEW_LINE> <DEDENT> if db1: <NEW_LINE> <INDENT> db2 = self.load_database(filename2) <NEW_LINE> <DEDENT> if db1 and db2: <NEW_LINE> <INDENT> dbset = set((db1, db2)) <NEW_LINE> pp = list(filter(lambda i: i and set([i.db1, i.db2]) == dbset, self.merger_pages)) <NEW_LINE> page = pp[0] if pp else None <NEW_LINE> if not page: <NEW_LINE> <INDENT> f1, f2 = filename1, filename2 <NEW_LINE> main.log("Merge page for %s and %s.", db1, db2) <NEW_LINE> page = MergerPage(self.notebook, db1, db2, self.get_unique_tab_title("Database comparison")) <NEW_LINE> self.merger_pages[page] = (db1, db2) <NEW_LINE> self.UpdateAccelerators() <NEW_LINE> conf.save() <NEW_LINE> <DEDENT> <DEDENT> elif db1 or db2: <NEW_LINE> <INDENT> for db in filter(None, [db1, db2]): <NEW_LINE> <INDENT> if not db.has_consumers(): <NEW_LINE> <INDENT> main.log("Closed database %s." % db.filename) <NEW_LINE> del self.dbs[db.filename] <NEW_LINE> db.close() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if page: <NEW_LINE> <INDENT> for i in range(self.notebook.GetPageCount()): <NEW_LINE> <INDENT> if self.notebook.GetPage(i) == page: <NEW_LINE> <INDENT> self.notebook.SetSelection(i) <NEW_LINE> self.update_notebook_header() <NEW_LINE> break | Opens the two databases for comparison, if possible. | 625941c1cdde0d52a9e52fc1 |
def create_threads(thread_info): <NEW_LINE> <INDENT> titles, links = [], [] <NEW_LINE> x = 0 <NEW_LINE> for item in thread_info: <NEW_LINE> <INDENT> if x % 2 == 0: <NEW_LINE> <INDENT> titles.append(item) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> links.append(item) <NEW_LINE> <DEDENT> x += 1 <NEW_LINE> <DEDENT> for thread in range(len(titles)): <NEW_LINE> <INDENT> title, link = titles[thread], links[thread] <NEW_LINE> r.submit(subreddit, title, url=link, captcha=None) | -- Exclusively used by thread posting portion of bot. --
Post thread(s) to desired subreddit and then waits for next loop. | 625941c18c0ade5d55d3e949 |
def config_post_configs_deprecated(self, tenant_id, user_id, data, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> return self.config_post_configs_deprecated_with_http_info(tenant_id, user_id, data, **kwargs) | config_post_configs_deprecated # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.config_post_configs_deprecated(tenant_id, user_id, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str tenant_id: (required)
:param str user_id: (required)
:param NewBatchConfigData data: (required)
:param str sub_tree_path:
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: BatchCreateConfigsResult
If the method is called asynchronously,
returns the request thread. | 625941c107d97122c4178818 |
def ts_min(df, window=10): <NEW_LINE> <INDENT> return df.rolling(window).min().fillna(method='backfill') | Wrapper function to estimate rolling min.
:param df: a pandas DataFrame.
:param window: the rolling window.
:return: a pandas DataFrame with the time-series min over the past 'window' days. | 625941c13539df3088e2e2dc |
def create_New_Toplevel_1(root, *args, **kwargs): <NEW_LINE> <INDENT> global w, w_win, rt <NEW_LINE> rt = root <NEW_LINE> w = Toplevel (root) <NEW_LINE> top = New_Toplevel_1(w) <NEW_LINE> return (w, top) | Starting point when module is imported by another program. | 625941c1cad5886f8bd26f6a |
def __virtual__(): <NEW_LINE> <INDENT> if __grains__['os'] == 'Gentoo' and salt.utils.which('layman'): <NEW_LINE> <INDENT> return 'layman' <NEW_LINE> <DEDENT> return False | Only work on Gentoo systems with layman installed | 625941c1956e5f7376d70dff |
def add_reply(self, reply): <NEW_LINE> <INDENT> self._write_message(self.fs_replies_path, 'message_001.txt', dumps(reply)) | Add an editorial reply to the drop box.
:param reply: the message, must conform to :class:`views.DropboxReplySchema` | 625941c14e696a04525c93dd |
def capacity(self): <NEW_LINE> <INDENT> return _contest._wrap_PlanetList_capacity(self) | capacity(self) -> std::vector< Planet >::size_type
Parameters:
self: std::vector< Planet > const * | 625941c192d797404e30411a |
def main(): <NEW_LINE> <INDENT> os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'brightspot_project.settings') <NEW_LINE> try: <NEW_LINE> <INDENT> from django.core.management import execute_from_command_line <NEW_LINE> <DEDENT> except ImportError as exc: <NEW_LINE> <INDENT> raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc <NEW_LINE> <DEDENT> execute_from_command_line(sys.argv) | Run administrative tasks. | 625941c1d8ef3951e32434ce |
def default_panels(store, case_obj): <NEW_LINE> <INDENT> default_panels = [] <NEW_LINE> for panel in case_obj.get("panels", []): <NEW_LINE> <INDENT> if not panel.get("is_default"): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> panel_obj = store.gene_panel(panel["panel_name"], panel.get("version")) <NEW_LINE> if not panel_obj: <NEW_LINE> <INDENT> LOG.warning( "Panel {0} version {1} could not be found".format( panel["panel_name"], panel.get("version") ) ) <NEW_LINE> continue <NEW_LINE> <DEDENT> default_panels.append(panel_obj) <NEW_LINE> <DEDENT> return default_panels | Return the panels that are decided to be default for a case.
Check what panels that are default, fetch those and add them to a list.
Args:
store(scout.adapter.MongoAdapter)
case_obj(scout.models.Case)
Returns:
default_panels(list(dict)) | 625941c160cbc95b062c64d3 |
def endPath(self): <NEW_LINE> <INDENT> assert self.currentPath is not None <NEW_LINE> points = list(self.currentPath) <NEW_LINE> self._flushContour(points) <NEW_LINE> self.currentPath = None | End the current sub path. | 625941c16e29344779a625a5 |
def train_svm(self,file_name): <NEW_LINE> <INDENT> svm_params = dict(kernel_type = cv2.SVM_LINEAR, svm_type = cv2.SVM_NU_SVC, nu=.105) <NEW_LINE> with np.load(self.PARAMS_PATH + '/params/' + file_name + '.npz') as input_data: <NEW_LINE> <INDENT> print('MSG: training data length: ' + str(len( input_data['train']))) <NEW_LINE> print('MSG: training data length: ' + str(len(input_data['train_labels']))) <NEW_LINE> train_data = input_data['train'] <NEW_LINE> data_labels = input_data['train_labels'] <NEW_LINE> <DEDENT> SVM = cv2.SVM() <NEW_LINE> SVM.train(train_data,data_labels,params=svm_params) <NEW_LINE> return SVM | DOCSTRING:
Given .npz file of training data and labels, initializes, sets
parameters/data for, and trains SVM to distinguish between chars in
provided test data; returns SVM | 625941c15fdd1c0f98dc01c3 |
def _prog_comm(self, prog_id, cmd) : <NEW_LINE> <INDENT> xurl = "/rest/programs/" + prog_id + "/" + cmd <NEW_LINE> if self.debug & 0x02 : <NEW_LINE> <INDENT> print("xurl = " + xurl) <NEW_LINE> <DEDENT> resp = self._getXMLetree(xurl) <NEW_LINE> if resp.attrib["succeeded"] != 'true' : <NEW_LINE> <INDENT> raise IsyResponseError("ISY command error : prog_id=" + str(prog_id) + " cmd=" + str(cmd)) | called by prog_comm() after argument validation | 625941c1379a373c97cfaad4 |
def rational_scale( data: Sequence[float], factor: float, direc: Direction = Direction.UP ) -> List[float]: <NEW_LINE> <INDENT> return [_rational_scale(x, factor, direc) for x in data] | Scale a sequence of numbers based on the ``rational_scale``.
The ``rational_scale`` is basically a transformation of the simple rational
function in the form :math:`f(x) = \frac{1}{x}` such that :math:`f(0) = 0`
and :math:`f(1) = 1`. Some math derives the formula of the rational curve
that fits the above conditions as:
.. math:: s(x) = \frac{ax}{p(ax + p)}
where :math:`a` is the scaling factor, and:
.. math:: p = \frac{-a + \sqrt{a(a + 4)}}{2}
for the curve scaling up and:
.. math:: p = \frac{-a - \sqrt{a(a + 4)}}{2}
for the curve scaling down.
This curve is symmetrical on :math:`y = -x + b` which may or may not have a
practical benefit, but the definite benefit is that it looks nice. :D
:param data: The sequence of floats to scale. The floats must be between
zero and one, and will also be scaled within that range.
:param factor: A positive float representing the scaling factor. A larger
number results in more scaling. A scaling factor of 0 has no effect.
:param direc: A Direction type signifying which direction to scale the
marks.
:return: The sequence of floats after scaling. The floats in the sequence
remain in the order they were given and the ``data`` is not modified. | 625941c115fb5d323cde0a9d |
def test_naive_floor_day(self): <NEW_LINE> <INDENT> t = datetime.datetime(2013, 3, 4, 12, 23, 4, 40) <NEW_LINE> t = fleming.floor(t, day=1) <NEW_LINE> self.assertEquals(t, datetime.datetime(2013, 3, 4)) | Tests flooring a naive datetime to a day. Return value is naive. | 625941c1796e427e537b0555 |
def forward(self, enc_hs_pad, enc_hs_len, dec_z, att_prev_states, scaling=2.0): <NEW_LINE> <INDENT> batch = len(enc_hs_pad) <NEW_LINE> if self.pre_compute_enc_h is None: <NEW_LINE> <INDENT> self.enc_h = enc_hs_pad <NEW_LINE> self.h_length = self.enc_h.size(1) <NEW_LINE> self.pre_compute_enc_h = self.mlp_enc(self.enc_h) <NEW_LINE> <DEDENT> if dec_z is None: <NEW_LINE> <INDENT> dec_z = enc_hs_pad.new_zeros(batch, self.dunits) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dec_z = dec_z.view(batch, self.dunits) <NEW_LINE> <DEDENT> if att_prev_states is None: <NEW_LINE> <INDENT> att_prev = to_device(self, (1. - make_pad_mask(enc_hs_len).float())) <NEW_LINE> att_prev = att_prev / att_prev.new(enc_hs_len).unsqueeze(-1) <NEW_LINE> att_h = enc_hs_pad.new_zeros(batch, self.att_dim) <NEW_LINE> att_c = enc_hs_pad.new_zeros(batch, self.att_dim) <NEW_LINE> att_states = (att_h, att_c) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> att_prev = att_prev_states[0] <NEW_LINE> att_states = att_prev_states[1] <NEW_LINE> <DEDENT> att_conv = self.loc_conv(att_prev.view(batch, 1, 1, self.h_length)) <NEW_LINE> att_conv = F.relu(att_conv) <NEW_LINE> att_conv = F.max_pool2d(att_conv, (1, att_conv.size(3))).view(batch, -1) <NEW_LINE> att_h, att_c = self.att_lstm(att_conv, att_states) <NEW_LINE> dec_z_tiled = self.mlp_dec(dec_z).view(batch, 1, self.att_dim) <NEW_LINE> e = self.gvec(torch.tanh(att_h.unsqueeze(1) + self.pre_compute_enc_h + dec_z_tiled)).squeeze(2) <NEW_LINE> if self.mask is None: <NEW_LINE> <INDENT> self.mask = to_device(self, make_pad_mask(enc_hs_len)) <NEW_LINE> <DEDENT> e.masked_fill_(self.mask, -float('inf')) <NEW_LINE> w = F.softmax(scaling * e, dim=1) <NEW_LINE> c = torch.sum(self.enc_h * w.view(batch, self.h_length, 1), dim=1) <NEW_LINE> return c, (w, (att_h, att_c)) | AttLocRec forward
:param torch.Tensor enc_hs_pad: padded encoder hidden state (B x T_max x D_enc)
:param list enc_hs_len: padded encoder hidden state length (B)
:param torch.Tensor dec_z: decoder hidden state (B x D_dec)
:param tuple att_prev_states: previous attention weight and lstm states
((B, T_max), ((B, att_dim), (B, att_dim)))
:param float scaling: scaling parameter before applying softmax
:return: attention weighted encoder state (B, D_enc)
:rtype: torch.Tensor
:return: previous attention weights and lstm states (w, (hx, cx))
((B, T_max), ((B, att_dim), (B, att_dim)))
:rtype: tuple | 625941c1d18da76e23532464 |
def DistributedFairseqModel(args, model, process_group=None): <NEW_LINE> <INDENT> assert isinstance(model, nn.Module) <NEW_LINE> if args.ddp_backend == 'c10d': <NEW_LINE> <INDENT> ddp_class = nn.parallel.DistributedDataParallel <NEW_LINE> init_kwargs = dict( module=model, device_ids=[args.device_id], output_device=args.device_id, broadcast_buffers=args.broadcast_buffers, bucket_cap_mb=args.bucket_cap_mb, process_group=process_group, ) <NEW_LINE> if 'check_reduction' in inspect.getargspec(ddp_class)[0]: <NEW_LINE> <INDENT> init_kwargs['check_reduction'] = True <NEW_LINE> <DEDENT> if 'find_unused_parameters' in inspect.getargspec(ddp_class)[0]: <NEW_LINE> <INDENT> init_kwargs['find_unused_parameters'] = args.find_unused_parameters <NEW_LINE> <DEDENT> <DEDENT> elif args.ddp_backend == 'no_c10d': <NEW_LINE> <INDENT> ddp_class = LegacyDistributedDataParallel <NEW_LINE> init_kwargs = dict( module=model, world_size=args.distributed_world_size, buffer_size=2**28, process_group=process_group, ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Unknown --ddp-backend: ' + args.ddp_backend) <NEW_LINE> <DEDENT> class _DistributedFairseqModel(ddp_class): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> wrapped_module = super().__getattr__('module') <NEW_LINE> if hasattr(wrapped_module, name): <NEW_LINE> <INDENT> return getattr(wrapped_module, name) <NEW_LINE> <DEDENT> return super().__getattr__(name) <NEW_LINE> <DEDENT> <DEDENT> return _DistributedFairseqModel(**init_kwargs) | Wrap a *model* to support distributed data parallel training.
This is similar to the built-in DistributedDataParallel, but allows
additional configuration of the DistributedDataParallel class to
use, and also provides easier access to the wrapped model by
forwarding requests for missing attributes to the wrapped model.
Args:
args (argparse.Namespace): fairseq args
model (BaseFairseqModel): model to wrap | 625941c18c0ade5d55d3e94a |
def checkUnsettledBet(self, pin_bet_id) : <NEW_LINE> <INDENT> response = self.api.betting.get_bets(betids = pin_bet_id) <NEW_LINE> return response['betStatus'], response | Checking the status of a unsettled bet
Args:
pin_bet_id : Bet id
Returns:
status : the status of the bet
response : the complete response for the bookie | 625941c15e10d32532c5eeb8 |
@app.route('/trump.html') <NEW_LINE> def trump_vis(): <NEW_LINE> <INDENT> with open('/home/ubuntu/flaskapp/trump.html','r') as trump_vis: <NEW_LINE> <INDENT> return trump_vis.read() | Home page for trump visualization | 625941c194891a1f4081ba39 |
def __repr__(self): <NEW_LINE> <INDENT> return str(self) | Returns the a str represnetion of the property
with it's name and value respentively | 625941c1de87d2750b85fd21 |
def test_poller_bad_node(self): <NEW_LINE> <INDENT> polled_response = api_validate_node_pollers(self.__client, ['foo'], all_pollers=True) <NEW_LINE> self.assertFalse(polled_response, msg='Got True for bad poller (foo)') | validates test utility method api_validate_node_pollers() will detect pollers for invalid node | 625941c1aad79263cf3909cf |
def setNumColors(qimage, color_count): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> qimage.setNumColors(color_count) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> qimage.setColorCount(color_count) | Compatibility function btw. PyQt4 and PyQt5 | 625941c1bf627c535bc1315f |
def __list_pack_teams(self, pack_id, **kwargs): <NEW_LINE> <INDENT> kwargs["async_req"] = kwargs.get("async_req", False) <NEW_LINE> kwargs["_return_http_data_only"] = kwargs.get( "_return_http_data_only", True ) <NEW_LINE> kwargs["_preload_content"] = kwargs.get("_preload_content", True) <NEW_LINE> kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) <NEW_LINE> kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) <NEW_LINE> kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) <NEW_LINE> kwargs["_host_index"] = kwargs.get("_host_index") <NEW_LINE> kwargs["pack_id"] = pack_id <NEW_LINE> return self.call_with_http_info(**kwargs) | Fetch all teams assigned to pack # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_pack_teams(pack_id, async_req=True)
>>> result = thread.get()
Args:
pack_id (str): A pack UUID or slug
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[TeamPack]
If the method is called asynchronously, returns the request
thread. | 625941c1a05bb46b383ec7b4 |
def __repr__(self): <NEW_LINE> <INDENT> return """And(%s, %s)""" % (self.value[0], self.value[1]) | String representation of "and" instance.
:rtype: string representation of the complete "and" condition | 625941c14d74a7450ccd4154 |
def reset_dialogue(self): <NEW_LINE> <INDENT> self.dialogue_root = self.xml_root <NEW_LINE> if not self.met: <NEW_LINE> <INDENT> self.met = True <NEW_LINE> <DEDENT> self.loop_message = None | Triggered when player leaves the room. | 625941c13c8af77a43ae372f |
def test_easy_bot_map1(self): <NEW_LINE> <INDENT> game = self.bot_test_map1(Difficulty.easy) <NEW_LINE> self.assertEqual(game.first_player.ask_for_move(), (0, 7)) | Test easy bot on map1 | 625941c2eab8aa0e5d26dae8 |
def set_matrix_spectrum(self): <NEW_LINE> <INDENT> self.logger.debug('set_matrix_spectrum') <NEW_LINE> effect = matrix_effect_spectrum(self.device) <NEW_LINE> effect.run() | Cycle through colors. | 625941c2d18da76e23532465 |
def bulidGradientNodes(self,outputGradientNode,lossNodeName): <NEW_LINE> <INDENT> gradientList=[] <NEW_LINE> for node in self.getInputNodes(): <NEW_LINE> <INDENT> gradientList.append([node,outputGradientNode]) <NEW_LINE> <DEDENT> return gradientList | bulid gradient nodes for BP
return [[input node,corresponding gradient node],...] | 625941c215fb5d323cde0a9e |
@mock_s3 <NEW_LINE> def test_outdated_manifest_warning(tmpdir, example_datasets_with_metadata): <NEW_LINE> <INDENT> bucket_name = 'outdated_manifest_bucket' <NEW_LINE> metadatasets = example_datasets_with_metadata['metadata'] <NEW_LINE> create_bucket(bucket_name, example_datasets_with_metadata['data'], metadatasets=metadatasets) <NEW_LINE> cache_dir = pathlib.Path(tmpdir) / 'cache' <NEW_LINE> cache = S3CloudCache(cache_dir, bucket_name, 'project-x') <NEW_LINE> m_warn_type = 'OutdatedManifestWarning' <NEW_LINE> with pytest.warns(OutdatedManifestWarning) as warnings: <NEW_LINE> <INDENT> cache.load_manifest('project-x_manifest_v7.0.0.json') <NEW_LINE> <DEDENT> ct = 0 <NEW_LINE> for w in warnings.list: <NEW_LINE> <INDENT> if w._category_name == m_warn_type: <NEW_LINE> <INDENT> msg = str(w.message) <NEW_LINE> assert 'is not the most up to date' in msg <NEW_LINE> assert 'S3CloudCache.compare_manifests' in msg <NEW_LINE> assert 'load_latest_manifest' in msg <NEW_LINE> ct += 1 <NEW_LINE> <DEDENT> <DEDENT> assert ct > 0 <NEW_LINE> with pytest.warns(None) as warnings: <NEW_LINE> <INDENT> cache.load_manifest('project-x_manifest_v11.0.0.json') <NEW_LINE> <DEDENT> if len(warnings) > 0: <NEW_LINE> <INDENT> for w in warnings.list: <NEW_LINE> <INDENT> assert w._category_name != 'OutdatedManifestWarning' | Test that a warning is raised the first time you try to load an outdated
manifest | 625941c2460517430c39411b |
def lint(filename, settings): <NEW_LINE> <INDENT> if not filename or not os.path.exists(filename): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> warnings = [] <NEW_LINE> if settings.get('pyflakes', True): <NEW_LINE> <INDENT> builtins = settings.get('builtins') <NEW_LINE> if builtins: <NEW_LINE> <INDENT> old_builtins = pyflakes.checker.Checker.builtIns <NEW_LINE> pyflakes.checker.Checker.builtIns = old_builtins.union(builtins) <NEW_LINE> <DEDENT> flakes_reporter = FlakesReporter() <NEW_LINE> pyflakes.api.checkPath(filename, flakes_reporter) <NEW_LINE> warnings.extend(flakes_reporter.errors) <NEW_LINE> <DEDENT> if settings.get('pep8', True): <NEW_LINE> <INDENT> pep8style = pep8.StyleGuide( reporter=Pep8Report, ignore=settings.get('ignore', []), max_line_length=settings.get('pep8_max_line_length') ) <NEW_LINE> pep8style.input_file(filename) <NEW_LINE> warnings.extend(pep8style.options.report.errors) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> complexity = int(settings.get('complexity', -1)) <NEW_LINE> <DEDENT> except (TypeError, ValueError): <NEW_LINE> <INDENT> complexity = -1 <NEW_LINE> <DEDENT> if complexity > -1: <NEW_LINE> <INDENT> warnings.extend(mccabe.get_module_complexity(filename, complexity)) <NEW_LINE> <DEDENT> return warnings | Run flake8 lint with internal interpreter. | 625941c2d10714528d5ffc72 |
def test_basic(self): <NEW_LINE> <INDENT> self.assertTrue(issubclass(sqlstr.MySQL, sqlstr.Base), "sqlstr.MySQL does not sub-class sqlstr.Base") | Test basic properties of sqlstr.MySQL class | 625941c2ac7a0e7691ed4062 |
def findRepeatedDnaSequences(self, s): <NEW_LINE> <INDENT> dictionary = {} <NEW_LINE> length = len(s) <NEW_LINE> result = [] <NEW_LINE> if length <= 10: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> for i in range(length-9): <NEW_LINE> <INDENT> seg = s[i:i+10] <NEW_LINE> if seg not in dictionary: <NEW_LINE> <INDENT> dictionary[seg] = True <NEW_LINE> <DEDENT> elif dictionary[seg]: <NEW_LINE> <INDENT> result.append(seg) <NEW_LINE> dictionary[seg] = False <NEW_LINE> <DEDENT> <DEDENT> return result | :type s: str
:rtype: List[str] | 625941c215baa723493c3f05 |
def __init__(self, encoding='utf8'): <NEW_LINE> <INDENT> self.encoding = encoding | Constructor.
| 625941c2d486a94d0b98e0d6 |
def get(self): <NEW_LINE> <INDENT> is_cloud_admin = self.helper.is_user_cloud_admin() <NEW_LINE> apps_user_is_admin_on = self.helper.get_owned_apps() <NEW_LINE> app_name = self.request.get("appid") <NEW_LINE> if (not is_cloud_admin) and (app_name not in apps_user_is_admin_on): <NEW_LINE> <INDENT> response = json.dumps({"error": True, "message": "Not authorized"}) <NEW_LINE> self.response.out.write(response) <NEW_LINE> return <NEW_LINE> <DEDENT> instance_info = self.helper.get_instance_info(app_id=app_name) <NEW_LINE> self.response.out.write(json.dumps(instance_info)) | Makes sure the user is allowed to see instance data for the named
application, and if so, retrieves it for them. | 625941c23d592f4c4ed1d004 |
def get(self, request, pk=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> id = int(pk) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return Response(status=status.HTTP_400_BAD_REQUEST, data={"detail": "Bad request."}) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> url = host + 'api/services/id/' + str(id) + '/' <NEW_LINE> result = proxy(path=url, method='GET') <NEW_LINE> if result.status_code == 200: <NEW_LINE> <INDENT> return Response(status=status.HTTP_200_OK, data=result.data) <NEW_LINE> <DEDENT> elif result.status_code >= 400: <NEW_LINE> <INDENT> return Response(status=status.HTTP_404_NOT_FOUND, data={"detail": "Service not found."}) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> return Response(status=status.HTTP_400_BAD_REQUEST, data={"detail": "Bad request."}) | Gets Service by given ID
<b>Details</b>
METHODS : GET
<b>RETURNS:</b>
- 200 OK
- 400 BAD REQUEST
- 404 NOT FOUND
---
omit_parameters:
- form | 625941c27047854f462a139d |
def __init__(self, argument): <NEW_LINE> <INDENT> config.log.critical("glbase expression objects must have unique names") <NEW_LINE> config.log.critical("for each condition.") <NEW_LINE> c = Counter(argument) <NEW_LINE> for k in c: <NEW_LINE> <INDENT> if c[k] == 2: <NEW_LINE> <INDENT> config.log.critical(' Duplicate condition Name: %s' % k) <NEW_LINE> <DEDENT> elif c[k] > 2: <NEW_LINE> <INDENT> config.log.critical(' Duplicate condition Name: %s, used %s times' % (k, c[k])) | Output the error message and tidy up the traceback, and perform other stuff. | 625941c2ff9c53063f47c186 |
def test_user_can_see_error_message_for_missing_data(self): <NEW_LINE> <INDENT> self.verify_and_navigate_to_create_team_page() <NEW_LINE> self.team_management_page.submit_form() <NEW_LINE> self.assertEqual( self.team_management_page.validation_message_text, 'Check the highlighted fields below and try again.' ) <NEW_LINE> self.assertTrue(self.team_management_page.error_for_field(field_id='name')) <NEW_LINE> self.assertTrue(self.team_management_page.error_for_field(field_id='description')) | Scenario: The user should be able to see error message in case of missing required field.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
And When I click create team button without filling required fields
Then I should see the error message and highlighted fields. | 625941c25fc7496912cc390f |
def process_log_file(cur, filepath): <NEW_LINE> <INDENT> df = pd.read_json(filepath, lines=True) <NEW_LINE> df = df.query("page == 'NextSong'") <NEW_LINE> t = pd.to_datetime(df['ts'], unit = 'ms') <NEW_LINE> time_data = list((t, t.dt.hour, t.dt.day, t.dt.weekofyear, t.dt.month, t.dt.year, t.dt.weekday)) <NEW_LINE> column_labels = list(('start_time', 'hour', 'day', 'week', 'month', 'year', 'weekday')) <NEW_LINE> time_df = pd.DataFrame.from_dict(dict(zip(column_labels, time_data))) <NEW_LINE> for i, row in time_df.iterrows(): <NEW_LINE> <INDENT> cur.execute(time_table_insert, list(row)) <NEW_LINE> <DEDENT> user_df = df[["userId", "firstName", "lastName", "gender", "level"]] <NEW_LINE> for i, row in user_df.iterrows(): <NEW_LINE> <INDENT> cur.execute(user_table_insert, row) <NEW_LINE> <DEDENT> for index, row in df.iterrows(): <NEW_LINE> <INDENT> cur.execute(song_select, (row.song, row.artist, row.length)) <NEW_LINE> results = cur.fetchone() <NEW_LINE> if results: <NEW_LINE> <INDENT> songid, artistid = results <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> songid, artistid = None, None <NEW_LINE> <DEDENT> songplay_data = (pd.to_datetime(row.ts, unit='ms'), row.userId, row.level, songid, artistid, row.sessionId, row.location, row.userAgent) <NEW_LINE> cur.execute(songplay_table_insert, songplay_data) | process log files
input:
cur: the cursor object
filepath: the log file path
reuturn:
None | 625941c21d351010ab855aae |
def isSymmetric(self, root): <NEW_LINE> <INDENT> if root == None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> result = [] <NEW_LINE> singleLevel = [] <NEW_LINE> queue = [] <NEW_LINE> queue.append(root) <NEW_LINE> pos = 1 <NEW_LINE> while not len(queue) == 0: <NEW_LINE> <INDENT> curRoot = queue[0] <NEW_LINE> singleLevel.append(curRoot) <NEW_LINE> if not curRoot == None: <NEW_LINE> <INDENT> queue.append(curRoot.left) <NEW_LINE> queue.append(curRoot.right) <NEW_LINE> <DEDENT> del queue[0] <NEW_LINE> pos -= 1 <NEW_LINE> if pos == 0: <NEW_LINE> <INDENT> result.append(singleLevel) <NEW_LINE> singleLevel = [] <NEW_LINE> pos = len(queue) <NEW_LINE> <DEDENT> <DEDENT> for level in result: <NEW_LINE> <INDENT> i = 0 <NEW_LINE> j = len(level) - 1 <NEW_LINE> while i < j: <NEW_LINE> <INDENT> if (not level[i] == None) and (not level[j] == None): <NEW_LINE> <INDENT> if not level[i].val == level[j].val: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if not level[i] == level[j]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> i += 1 <NEW_LINE> j -= 1 <NEW_LINE> <DEDENT> <DEDENT> return True | :type root: TreeNode
:rtype: List[List[int]] | 625941c2d486a94d0b98e0d7 |
def constant_coefficient(self): <NEW_LINE> <INDENT> return self._monomial_coefficients.get(self.parent().one_basis(), self.base_ring().zero()) | Return the constant coefficient of ``self``.
.. TODO::
Define a similar method for general Clifford algebras once
the morphism to exterior algebras is implemented.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: elt = 5*x + y + x*z + 10
sage: elt.constant_coefficient()
10
sage: x.constant_coefficient()
0 | 625941c26aa9bd52df036d34 |
def node_set_source_route(self, node_id_list): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.validate_node_list(node_id_list) <NEW_LINE> <DEDENT> except ValueError as err: <NEW_LINE> <INDENT> raise ETRX3xATCommandException( "node_set_source_route: {}".format(err)) <NEW_LINE> <DEDENT> except TypeError as err: <NEW_LINE> <INDENT> raise ETRX3xATCommandException( "node_set_source_route: {}".format(err)) <NEW_LINE> <DEDENT> return "AT+SR:{}".format(",".join(node_id_list)) | Get command to set source routing from local node to remote node.
Args:
node_id_list: node id (4 hexadecimal chars) separeted with
",".
Returns:
String with "AT+SR:<node_id>,<node_id>,...".
Raises:
TypeError: invalid input arguments data type.
ValueError: invalid input arguments data content. | 625941c256b00c62f0f145ea |
def test_pm_write_auto_moderators_rejected(self): <NEW_LINE> <INDENT> pm_write(sender=self.user1, recipient=self.user2, subject='s', auto_moderators=(lambda m: False, )) <NEW_LINE> m = Message.objects.get() <NEW_LINE> self.check_status(m, status=STATUS_REJECTED, moderation_date=True, recipient_deleted_at=True) <NEW_LINE> self.check_now(m.moderation_date) <NEW_LINE> self.check_now(m.recipient_deleted_at) <NEW_LINE> self.assertEqual(len(mail.outbox), 0) | Test the auto_moderators parameter, moderate as rejected. Test the parameter as a tuple. | 625941c2f548e778e58cd50e |
def show_stack(self, context, stack_identity): <NEW_LINE> <INDENT> if stack_identity is not None: <NEW_LINE> <INDENT> stacks = [self._get_stack(context, stack_identity)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> stacks = db_api.stack_get_by_tenant(context) or [] <NEW_LINE> <DEDENT> def format_stack_detail(s): <NEW_LINE> <INDENT> stack = parser.Stack.load(context, s.id) <NEW_LINE> return api.format_stack(stack) <NEW_LINE> <DEDENT> return {'stacks': [format_stack_detail(s) for s in stacks]} | The show_stack method returns the attributes of one stack.
arg1 -> RPC context.
arg2 -> Name of the stack you want to see, or None to see all | 625941c26fece00bbac2d6cf |
def test_rings_detect_08(self): <NEW_LINE> <INDENT> body = {"depth": 5, "capacity": 50} <NEW_LINE> code, res = Algorithm().post_rings_detect(body, auth=auth) <NEW_LINE> id = res["task_id"] <NEW_LINE> if id > 0: <NEW_LINE> <INDENT> result = get_task_res(id, 120, auth=auth) <NEW_LINE> print(result) <NEW_LINE> assert len(result['rings']) == 18 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert 0 | param = [depth, capacity]
:return: | 625941c23cc13d1c6d3c730d |
def most_popular_gender(data_list): <NEW_LINE> <INDENT> gender = count_gender(data_list) <NEW_LINE> if gender[0] > gender[1]: <NEW_LINE> <INDENT> answer = "Masculino" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if gender[0] == gender[1]: <NEW_LINE> <INDENT> answer = "Igual" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> answer = "Feminino" <NEW_LINE> <DEDENT> <DEDENT> return answer | Conta qual genero e mais popular em uma lista.
Argumento:
data_list: Iteravel com opcoes 'Male' e 'Female'
Retorna:
Qual sexo é mais popular ou igual. | 625941c2d164cc6175782cdf |
def shell_icon_changed(self, icon): <NEW_LINE> <INDENT> self.set_icon(icon) <NEW_LINE> self.shell_obj.size_hint_updated() | The change handler for the 'icon' attribute.
| 625941c285dfad0860c3adeb |
def test_leave_one(self): <NEW_LINE> <INDENT> with temporary_dir() as cache_dir: <NEW_LINE> <INDENT> artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main') <NEW_LINE> touch(os.path.join(artifact_dir, 'old_cache_test1')) <NEW_LINE> touch(os.path.join(artifact_dir, 'old_cache_test2')) <NEW_LINE> touch(os.path.join(artifact_dir, 'old_cache_test3')) <NEW_LINE> touch(os.path.join(artifact_dir, 'old_cache_test4')) <NEW_LINE> touch(os.path.join(artifact_dir, 'old_cache_test5')) <NEW_LINE> config = {'cache.compile.zinc': {'write_to': [cache_dir]}} <NEW_LINE> pants_run = self.run_pants(self.create_platform_args(6) + ['compile.zinc', 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', '--cache-max-entries-per-target=1'], config=config) <NEW_LINE> self.assert_success(pants_run) <NEW_LINE> self.assertEqual(len(os.listdir(artifact_dir)), 1) <NEW_LINE> pants_run = self.run_pants(self.create_platform_args(7) + ['compile.zinc', 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', '--cache-max-entries-per-target=1'], config) <NEW_LINE> self.assert_success(pants_run) <NEW_LINE> self.assertEqual(len(os.listdir(artifact_dir)), 1) | Ensure that max-old of 1 removes all but one files | 625941c23c8af77a43ae3730 |
def generate_board(self, size, start, gold_loc): <NEW_LINE> <INDENT> board = [[0 for i in range(size)] for j in range(size)] <NEW_LINE> while gold_loc is None: <NEW_LINE> <INDENT> x, y = choice(range(size)), choice(range(size)) <NEW_LINE> if [x, y] != start: <NEW_LINE> <INDENT> gold_loc = (x, y) <NEW_LINE> <DEDENT> <DEDENT> board[gold_loc[0]][gold_loc[1]] = "g" <NEW_LINE> self.gold_loc = gold_loc <NEW_LINE> board[start[0]][start[1]] = "A" <NEW_LINE> return board | :returns a list of lists of size n
empty locations are 0s, agent is A, and gold is g | 625941c2293b9510aa2c322a |
def search_for_different_person(self): <NEW_LINE> <INDENT> search_for_different_person_link = self.find_element( *Locators.SEARCH_FOR_DIFFERENT_PERSON) <NEW_LINE> search_for_different_person_link.click() | Find the web element for Search for a different person link | 625941c2b57a9660fec33814 |
def get_shortest_routes( route_db: fib_types.RouteDatabase ) -> (List[network_types.UnicastRoute], List[network_types.MplsRoute]): <NEW_LINE> <INDENT> unicast_routes, mpls_routes = None, None <NEW_LINE> unicast_routes = sorted( route_db.unicastRoutes, key=lambda x: x.dest.prefixAddress.addr ) <NEW_LINE> mpls_routes = sorted(route_db.mplsRoutes, key=lambda x: x.topLabel) <NEW_LINE> shortest_unicast_routes = [] <NEW_LINE> shortest_mpls_routes = [] <NEW_LINE> for route in unicast_routes: <NEW_LINE> <INDENT> if not route.nextHops: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> min_metric = min(route.nextHops, key=lambda x: x.metric).metric <NEW_LINE> nextHops = [nh for nh in route.nextHops if nh.metric == min_metric] <NEW_LINE> shortest_unicast_routes.append( network_types.UnicastRoute( dest=route.dest, deprecatedNexthops=[nh.address for nh in nextHops], nextHops=nextHops, ) ) <NEW_LINE> <DEDENT> for route in mpls_routes: <NEW_LINE> <INDENT> if not route.nextHops: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> min_metric = min(route.nextHops, key=lambda x: x.metric).metric <NEW_LINE> nextHops = [nh for nh in route.nextHops if nh.metric == min_metric] <NEW_LINE> shortest_mpls_routes.append( network_types.MplsRoute(topLabel=route.topLabel, nextHops=nextHops) ) <NEW_LINE> <DEDENT> return (shortest_unicast_routes, shortest_mpls_routes) | Find all shortest routes for each prefix in routeDb
:param route_db: RouteDatabase
:return (
list of UnicastRoute of prefix & corresponding shortest nexthops
list of MplsRoute of prefix & corresponding shortest nexthops
) | 625941c24f6381625f1149cd |
def del_buddy(self, jid): <NEW_LINE> <INDENT> self.client.Roster.Unauthorize(jid) <NEW_LINE> self.client.Roster.Unsubscribe(jid) | Remove a buddy and/or deny authorization request | 625941c2656771135c3eb7fe |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.