code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
def compute_output(self): <NEW_LINE> <INDENT> x,y=self.input_nodes <NEW_LINE> self.output_value=np.dot(x.output_value,y.output_value) <NEW_LINE> return self.output_value | 计算并返回Multiplication operation 值
:return: | 625941c0a17c0f6771cbdfc1 |
def get_tags(self, **kwargs): <NEW_LINE> <INDENT> tags = iterate_all( 'Tags', self.connection.describe_tags, **kwargs ) <NEW_LINE> return dict((tag['Key'], tag['Value']) for tag in tags) | Returns tag list for selected instance of EFS | 625941c0e1aae11d1e749c24 |
@app.route('/list', methods=['GET']) <NEW_LINE> def list_stories(): <NEW_LINE> <INDENT> db = get_db() <NEW_LINE> query = """SELECT * FROM app""" <NEW_LINE> cur = db.execute(query) <NEW_LINE> stories = cur.fetchall() <NEW_LINE> return render_template('list.html', entries=stories) | Show stories | 625941c0aad79263cf3909ac |
def add_server(params): <NEW_LINE> <INDENT> db = api.common.get_conn() <NEW_LINE> validate(server_schema, params) <NEW_LINE> if isinstance(params["port"], str): <NEW_LINE> <INDENT> params["port"] = int(params["port"]) <NEW_LINE> <DEDENT> if isinstance(params.get("server_number"), str): <NEW_LINE> <INDENT> params["server_number"] = int(params["server_number"]) <NEW_LINE> <DEDENT> if safe_fail(get_server, name=params["name"]) is not None: <NEW_LINE> <INDENT> raise WebException("Shell server with this name already exists") <NEW_LINE> <DEDENT> params["sid"] = api.common.hash(params["name"]) <NEW_LINE> if db.shell_servers.count() == 0: <NEW_LINE> <INDENT> params["server_number"] = params.get("server_number", 1) <NEW_LINE> <DEDENT> db.shell_servers.insert(params) <NEW_LINE> return params["sid"] | Add a shell server to the pool of servers. First server is
automatically assigned server_number 1 (yes, 1-based numbering)
if not otherwise specified.
Args:
params: A dict containing:
host
port
username
password
server_number
Returns:
The sid. | 625941c05e10d32532c5ee96 |
def check_existence_by_id( self, resource_id, api_version, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> url = '/{resourceId}' <NEW_LINE> path_format_arguments = { 'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True) } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') <NEW_LINE> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> request = self._client.head(url, query_parameters) <NEW_LINE> response = self._client.send(request, header_parameters, **operation_config) <NEW_LINE> if response.status_code not in [204, 404]: <NEW_LINE> <INDENT> exp = CloudError(response) <NEW_LINE> exp.request_id = response.headers.get('x-ms-request-id') <NEW_LINE> raise exp <NEW_LINE> <DEDENT> deserialized = (response.status_code == 204) <NEW_LINE> if raw: <NEW_LINE> <INDENT> client_raw_response = ClientRawResponse(deserialized, response) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized | Checks by ID whether a resource exists.
:param resource_id: The fully qualified ID of the resource, including
the resource name and resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}
:type resource_id: str
:param api_version: The API version to use for the operation.
:type api_version: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: bool or ClientRawResponse if raw=true
:rtype: bool or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` | 625941c07cff6e4e811178f4 |
def _third_party_auth_context(request, redirect_to): <NEW_LINE> <INDENT> context = { "currentProvider": None, "providers": [], "secondaryProviders": [], "finishAuthUrl": None, "errorMessage": None, } <NEW_LINE> if third_party_auth.is_enabled(): <NEW_LINE> <INDENT> for enabled in third_party_auth.provider.Registry.accepting_logins(): <NEW_LINE> <INDENT> info = { "id": enabled.provider_id, "name": enabled.name, "iconClass": enabled.icon_class or None, "iconImage": enabled.icon_image.url if enabled.icon_image else None, "loginUrl": pipeline.get_login_url( enabled.provider_id, pipeline.AUTH_ENTRY_LOGIN, redirect_url=redirect_to, ), "registerUrl": pipeline.get_login_url( enabled.provider_id, pipeline.AUTH_ENTRY_REGISTER, redirect_url=redirect_to, ), } <NEW_LINE> context["providers" if not enabled.secondary else "secondaryProviders"].append(info) <NEW_LINE> <DEDENT> running_pipeline = pipeline.get(request) <NEW_LINE> if running_pipeline is not None: <NEW_LINE> <INDENT> current_provider = third_party_auth.provider.Registry.get_from_pipeline(running_pipeline) <NEW_LINE> if current_provider is not None: <NEW_LINE> <INDENT> context["currentProvider"] = current_provider.name <NEW_LINE> context["finishAuthUrl"] = pipeline.get_complete_url(current_provider.backend_name) <NEW_LINE> if current_provider.skip_registration_form: <NEW_LINE> <INDENT> context["autoSubmitRegForm"] = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for msg in messages.get_messages(request): <NEW_LINE> <INDENT> if msg.extra_tags.split()[0] == "social-auth": <NEW_LINE> <INDENT> context['errorMessage'] = _(unicode(msg)) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return context | Context for third party auth providers and the currently running pipeline.
Arguments:
request (HttpRequest): The request, used to determine if a pipeline
is currently running.
redirect_to: The URL to send the user to following successful
authentication.
Returns:
dict | 625941c0f9cc0f698b14056c |
def open(idstr): <NEW_LINE> <INDENT> contact = profile.blist.contact_for_idstr(idstr) <NEW_LINE> if contact is not None: <NEW_LINE> <INDENT> return begin_conversation(contact) | Opens an IM window (or raises an existing one) for a buddy id string. | 625941c0442bda511e8be38a |
def get_properties(self): <NEW_LINE> <INDENT> return RevisionProperties(self._rev_id, self._repo, self) | Get the RevisionProperties for this revision. | 625941c091f36d47f21ac45f |
def fit(self, X, y=None): <NEW_LINE> <INDENT> mask = _get_mask(X, self.missing_values) <NEW_LINE> X = X[~mask] <NEW_LINE> if self.strategy == 'most_frequent': <NEW_LINE> <INDENT> modes = pd.Series(X).mode() <NEW_LINE> <DEDENT> elif self.strategy == 'constant': <NEW_LINE> <INDENT> modes = np.array([self.fill_value]) <NEW_LINE> <DEDENT> if modes.shape[0] == 0: <NEW_LINE> <INDENT> raise ValueError('Data is empty or all values are null') <NEW_LINE> <DEDENT> elif modes.shape[0] > 1: <NEW_LINE> <INDENT> raise ValueError('No value is repeated more than ' 'once in the column') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fill_ = modes[0] <NEW_LINE> <DEDENT> return self | Get the most frequent value.
Parameters
----------
X : np.ndarray or pd.Series
Training data.
y : Passthrough for ``Pipeline`` compatibility.
Returns
-------
self: CategoricalImputer | 625941c0f548e778e58cd4eb |
def compile_funcs(self, feed_shapes): <NEW_LINE> <INDENT> self.node_to_compiled_func = {} <NEW_LINE> for node in self.node_to_shape_map: <NEW_LINE> <INDENT> if node in feed_shapes: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> shape = self.node_to_shape_map[node] <NEW_LINE> inputs = node.inputs <NEW_LINE> input_shapes = [] <NEW_LINE> for input in inputs: <NEW_LINE> <INDENT> input_shapes += [self.node_to_shape_map[input]] <NEW_LINE> <DEDENT> self.node_to_compiled_func[node] = node.op.compiled_func( node, input_shapes, self.tgt, self.tgt_host) | Compile tvm ops to native code.
Must be called after infer_shape(...) since op compilation requires
knowledge of tensor shapes.
Parameters
----------
feed_shapes: node->shapes mapping for feed_dict nodes. | 625941c0b545ff76a8913d85 |
def execute(self, lf_raw: str) -> int: <NEW_LINE> <INDENT> logical_form = re.sub(r"\(a:", r"(", lf_raw) <NEW_LINE> parse = semparse_util.lisp_to_nested_expression(logical_form) <NEW_LINE> if len(parse) < 2: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> if parse[0] == "infer": <NEW_LINE> <INDENT> args = [self._exec_and(arg) for arg in parse[1:]] <NEW_LINE> if None in args: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> return self._exec_infer(*args) <NEW_LINE> <DEDENT> return -1 | Very basic model for executing friction logical forms. For now returns answer index (or
-1 if no answer can be concluded) | 625941c0cc40096d615958c0 |
def get_email(obj): <NEW_LINE> <INDENT> pass | Return the subscriber's e-mail address. If not found,
return None, and the notification message will be skipped.
The `obj` parameter exists because subscriptions have no
awearness of their context; some of them (e.g. the Naaya
`AccountSubscription`) need to access the site in order to
find the email address. `obj` is the subject of the current
notification (the object that has been created/changed). | 625941c0cc0a2c11143dcdff |
def search(self, query=None, *args, **kwargs): <NEW_LINE> <INDENT> sqs = SearchQuerySet() <NEW_LINE> user = kwargs.get('user', None) <NEW_LINE> if hasattr(user, 'impersonated_user'): <NEW_LINE> <INDENT> if isinstance(user.impersonated_user, User): <NEW_LINE> <INDENT> user = user.impersonated_user <NEW_LINE> <DEDENT> <DEDENT> if query: <NEW_LINE> <INDENT> sqs = sqs.auto_query(sqs.query.clean(query)) <NEW_LINE> <DEDENT> return sqs.models(self.model) | Uses haystack to query events.
Returns a SearchQuerySet | 625941c0a934411ee3751601 |
def reopen_task(conn, task_id, start_date, end_date): <NEW_LINE> <INDENT> checker.date_validate(start_date) <NEW_LINE> checker.date_validate(end_date) <NEW_LINE> checker.date_compare(start_date, end_date) <NEW_LINE> with conn: <NEW_LINE> <INDENT> cursor = conn.execute(SQL_OPEN_TASK, ('opened', start_date, end_date, task_id)) | Переоткрывает задачу | 625941c0283ffb24f3c55872 |
def calc_sim_collector(self, key, values): <NEW_LINE> <INDENT> (rest1, rest2), common_ratings = key, values <NEW_LINE> common_ratings_list = list(common_ratings) <NEW_LINE> n_common = len(common_ratings_list) <NEW_LINE> if n_common==0: <NEW_LINE> <INDENT> rho=0. <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> diff1 = [] <NEW_LINE> diff2 = [] <NEW_LINE> for i in common_ratings_list: <NEW_LINE> <INDENT> diff1_item=float(i[0][0])-float(i[0][2]) <NEW_LINE> diff1.append(diff1_item) <NEW_LINE> diff2_item=float(i[1][0])-float(i[1][2]) <NEW_LINE> diff2.append(diff2_item) <NEW_LINE> <DEDENT> rho=pearsonr(diff1, diff2)[0] <NEW_LINE> if np.isnan(rho)==True: <NEW_LINE> <INDENT> rho = 0. <NEW_LINE> <DEDENT> <DEDENT> yield (rest1, rest2), (rho, n_common) | Pick up the information from the previous yield as shown. Compute
the pearson correlation and yield the final information as in the
last line here. | 625941c0379a373c97cfaab2 |
def setup_widget(self, widgetns): <NEW_LINE> <INDENT> globs = self.globalns <NEW_LINE> globs['ctx'] = Context.from_request(globs['req']) <NEW_LINE> globs['auth_ctx'] = Context.from_request(globs['auth_req']) <NEW_LINE> for wp in self.dbsys.providers : <NEW_LINE> <INDENT> if widgetns in set(wp.get_widgets()) : <NEW_LINE> <INDENT> globs['widget'] = wp <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else : <NEW_LINE> <INDENT> raise InvalidIdentifier('Cannot load widget provider for %s' % widgetns) | (Insert | update) the IWidgetProvider in the global
namespace.
@param widgetns widget name.
@throws RuntimeError if a widget with requested name cannot
be found. | 625941c07d847024c06be228 |
def get_regional_data(country): <NEW_LINE> <INDENT> iso3 = country['iso3'] <NEW_LINE> level = country['regional_level'] <NEW_LINE> gid_level = 'GID_{}'.format(level) <NEW_LINE> path_output = os.path.join(DATA_INTERMEDIATE, iso3, 'regional_data_uba.csv') <NEW_LINE> path_country = os.path.join(DATA_INTERMEDIATE, iso3, 'national_outline.shp') <NEW_LINE> single_country = gpd.read_file(path_country) <NEW_LINE> path_settlements = os.path.join(DATA_INTERMEDIATE, iso3, 'settlements.tif') <NEW_LINE> filename = 'regions_{}_{}.shp'.format(level, iso3) <NEW_LINE> folder = os.path.join(DATA_INTERMEDIATE, iso3, 'regions') <NEW_LINE> path = os.path.join(folder, filename) <NEW_LINE> regions = gpd.read_file(path) <NEW_LINE> results = [] <NEW_LINE> for index, region in regions.iterrows(): <NEW_LINE> <INDENT> with rasterio.open(path_settlements) as src: <NEW_LINE> <INDENT> affine = src.transform <NEW_LINE> array = src.read(1) <NEW_LINE> array[array <= 0] = 0 <NEW_LINE> population_summation = [d['sum'] for d in zonal_stats( region['geometry'], array, stats=['sum'], affine=affine, nodata=0, )][0] <NEW_LINE> <DEDENT> area_km2 = round(area_of_polygon(region['geometry']) / 1e6) <NEW_LINE> if area_km2 == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> results.append({ 'GID_0': region['GID_0'], 'GID_id': region[gid_level], 'GID_level': gid_level, 'population': population_summation, 'area_km2': area_km2, 'population_km2': population_summation / area_km2 if population_summation else 0, }) <NEW_LINE> <DEDENT> results_df = pd.DataFrame(results) <NEW_LINE> results_df.to_csv(path_output, index=False) <NEW_LINE> print('Completed {}'.format(single_country.NAME_0.values[0])) <NEW_LINE> return print('Completed night lights data querying') | Extract regional data including luminosity and population.
Parameters
----------
country : string
Three digit ISO country code. | 625941c063f4b57ef000108d |
def update(self, start: int, end: int, iteration: int, **kwargs) -> None: <NEW_LINE> <INDENT> value = self(**kwargs) <NEW_LINE> if isinstance(value, torch.Tensor): <NEW_LINE> <INDENT> value = value.cpu() <NEW_LINE> <DEDENT> self.values[start:end, iteration] = value.squeeze() | Update the internal value table of the metric | 625941c0b830903b967e987c |
def evaluate(self, trials, games, xoro, testNet = False, goodNet = 0): <NEW_LINE> <INDENT> if testNet: <NEW_LINE> <INDENT> answers = Game.run(1,trials, games, self, xoro, True, goodNet) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> answers = Game.run(1,trials,games,self, xoro) <NEW_LINE> <DEDENT> return answers | Tests the network and returns the results | 625941c0d8ef3951e32434ac |
def newfiles(self, user=None, start=None, end=None, reverse=False, step=None, total=None): <NEW_LINE> <INDENT> for event in self.logevents(logtype="upload", user=user, start=start, end=end, reverse=reverse, step=step, total=total): <NEW_LINE> <INDENT> filepage = pywikibot.FilePage(event.title()) <NEW_LINE> date = event.timestamp() <NEW_LINE> user = event.user() <NEW_LINE> comment = event.comment() or u'' <NEW_LINE> yield (filepage, date, user, comment) | Yield information about newly uploaded files.
Yields a tuple of FilePage, Timestamp, user(unicode), comment(unicode).
N.B. the API does not provide direct access to Special:Newimages, so
this is derived from the "upload" log events instead. | 625941c08a43f66fc4b53fd6 |
def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if not self.secret: <NEW_LINE> <INDENT> self.secret = heliosutils.random_string(12) <NEW_LINE> self.election.append_log("Trustee %s added" % self.name) <NEW_LINE> <DEDENT> super(Trustee, self).save(*args, **kwargs) | override this just to get a hook | 625941c07047854f462a137b |
def setOpacity(self, newOpacity, operation='', log=None): <NEW_LINE> <INDENT> if operation in ['', '=']: <NEW_LINE> <INDENT> self.opacity = newOpacity <NEW_LINE> <DEDENT> elif operation in ['+']: <NEW_LINE> <INDENT> self.opacity += newOpacity <NEW_LINE> <DEDENT> elif operation in ['-']: <NEW_LINE> <INDENT> self.opacity -= newOpacity <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.error(f"Operation '{operation}' not recognised.") <NEW_LINE> <DEDENT> self.updateOpacity() | Hard setter for opacity, allows the suppression of log messages and calls the update method
| 625941c023e79379d52ee4d5 |
def nextSpace(row, col, vertical): <NEW_LINE> <INDENT> if vertical is True: <NEW_LINE> <INDENT> return (row + 1), col <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return row, (col + 1) | Returns the position of the next space, whether its vertical or horizontal | 625941c0a79ad161976cc0b4 |
def sparse_apply_adadelta(var, accum, accum_update, lr, rho, epsilon, grad, indices, use_locking=False, name=None): <NEW_LINE> <INDENT> _ctx = _context._context <NEW_LINE> if _ctx is None or not _ctx._eager_context.is_eager: <NEW_LINE> <INDENT> if use_locking is None: <NEW_LINE> <INDENT> use_locking = False <NEW_LINE> <DEDENT> use_locking = _execute.make_bool(use_locking, "use_locking") <NEW_LINE> _, _, _op = _op_def_lib._apply_op_helper( "SparseApplyAdadelta", var=var, accum=accum, accum_update=accum_update, lr=lr, rho=rho, epsilon=epsilon, grad=grad, indices=indices, use_locking=use_locking, name=name) <NEW_LINE> _result = _op.outputs[:] <NEW_LINE> _inputs_flat = _op.inputs <NEW_LINE> _attrs = ("T", _op.get_attr("T"), "Tindices", _op.get_attr("Tindices"), "use_locking", _op.get_attr("use_locking")) <NEW_LINE> _execute.record_gradient( "SparseApplyAdadelta", _inputs_flat, _attrs, _result, name) <NEW_LINE> _result, = _result <NEW_LINE> return _result <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError("sparse_apply_adadelta op does not support eager execution. Arg 'out' is a ref.") <NEW_LINE> <DEDENT> raise RuntimeError("sparse_apply_adadelta op does not support eager execution. Arg 'out' is a ref.") | var: Should be from a Variable().
Args:
var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
accum: A mutable `Tensor`. Must have the same type as `var`.
Should be from a Variable().
accum_update: A mutable `Tensor`. Must have the same type as `var`.
: Should be from a Variable().
lr: A `Tensor`. Must have the same type as `var`.
Learning rate. Must be a scalar.
rho: A `Tensor`. Must have the same type as `var`.
Decay factor. Must be a scalar.
epsilon: A `Tensor`. Must have the same type as `var`.
Constant factor. Must be a scalar.
grad: A `Tensor`. Must have the same type as `var`. The gradient.
indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
A vector of indices into the first dimension of var and accum.
use_locking: An optional `bool`. Defaults to `False`.
If True, updating of the var and accum tensors will be protected by
a lock; otherwise the behavior is undefined, but may exhibit less contention.
name: A name for the operation (optional).
Returns:
A mutable `Tensor`. Has the same type as `var`. | 625941c055399d3f05588622 |
def javascript_tag(content): <NEW_LINE> <INDENT> return content_tag("script", javascript_cdata_section(content), type="text/javascript") | Returns a JavaScript tag with the ``content`` inside.
Example::
>>> javascript_tag("alert('All is good')"
'<script type="text/javascript">alert('All is good')</script>' | 625941c0287bf620b61d39d4 |
def _initialize_protocols(self): <NEW_LINE> <INDENT> self._default_protocol = self.process_class.get_default_protocol() <NEW_LINE> self._protocols = self.process_class.get_available_protocols() | Initialize the protocols class attribute by parsing them from the configuration file. | 625941c0dc8b845886cb54a3 |
def unique_conn_id(self): <NEW_LINE> <INDENT> next_id = self._next_conn_id <NEW_LINE> self._next_conn_id += 1 <NEW_LINE> return next_id | Generate a new unique connection id.
See :meth:`AbstractDeviceAdapter.unique_conn_id`.
Returns:
int: A new, unique integer suitable for use as a conn_id. | 625941c0cad5886f8bd26f49 |
def do_restart(request): <NEW_LINE> <INDENT> if request.user.is_staff: <NEW_LINE> <INDENT> reload_method = get_auto_reload_method() <NEW_LINE> reload_log = get_auto_reload_log() <NEW_LINE> reload_time = get_auto_reload_time() <NEW_LINE> command = "echo no script" <NEW_LINE> if reload_method == 'test': <NEW_LINE> <INDENT> command = 'touch settings.py' <NEW_LINE> <DEDENT> elif reload_method == 'apache2': <NEW_LINE> <INDENT> command = 'sudo apache2ctl restart' <NEW_LINE> <DEDENT> elif reload_method == 'httpd': <NEW_LINE> <INDENT> command = 'sudo service httpd restart' <NEW_LINE> <DEDENT> elif reload_method.startswith('restart_script'): <NEW_LINE> <INDENT> script = reload_method.split(" ")[1] <NEW_LINE> command = "%s &" % script <NEW_LINE> <DEDENT> os.system("sleep 2 && %s &> %s & " % (command, reload_log)) <NEW_LINE> return render_to_response('inlinetrans/response.html', {'message': reload_time}, context_instance=RequestContext(request)) | * "test" for a django instance (this do a touch over settings.py for reload)
* "apache"
* "httpd"
* "wsgi"
* "restart_script <script_path_name>" | 625941c0fff4ab517eb2f3a9 |
def test_convert_project(self): <NEW_LINE> <INDENT> result = convert_project(self.session, 'hf2') <NEW_LINE> assert_equals(result, {'id':3, 'type':'Project'}) <NEW_LINE> assert_raises(GrenadeValidationError, convert_project, self.session, 'mm4') | Test that the project converter transforms the supplied test data correctly.
.. versionadded:: v00_03_00
.. versionchanged:: 0.11.0
Update to use nose asserts statements. | 625941c0eab8aa0e5d26dac6 |
def set_roles(self): <NEW_LINE> <INDENT> self.roles = [ Role.objects.create(type='security'), Role.objects.create(type='audit', rid=15), Role.objects.create(type='data') ] | Add custom roles to self. | 625941c076d4e153a657ea9f |
def pc_noutput_items_var(self): <NEW_LINE> <INDENT> return _filter_swig.interp_fir_filter_scc_sptr_pc_noutput_items_var(self) | pc_noutput_items_var(interp_fir_filter_scc_sptr self) -> float | 625941c08a349b6b435e80e3 |
def sarsa(env, gamma, n_episode, alpha): <NEW_LINE> <INDENT> n_action = env.action_space.n <NEW_LINE> Q = defaultdict(lambda: torch.zeros(n_action)) <NEW_LINE> for episode in range(n_episode): <NEW_LINE> <INDENT> state = env.reset() <NEW_LINE> is_done = False <NEW_LINE> action = epsilon_greedy_policy(state, Q) <NEW_LINE> while not is_done: <NEW_LINE> <INDENT> next_state, reward, is_done, info = env.step(action) <NEW_LINE> next_action = epsilon_greedy_policy(next_state, Q) <NEW_LINE> td_delta = reward + gamma * Q[next_state][next_action] - Q[state][action] <NEW_LINE> Q[state][action] += alpha * td_delta <NEW_LINE> length_episode[episode] += 1 <NEW_LINE> total_reward_episode[episode] += reward <NEW_LINE> if is_done: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> state = next_state <NEW_LINE> action = next_action <NEW_LINE> <DEDENT> <DEDENT> policy = {} <NEW_LINE> for state, actions in Q.items(): <NEW_LINE> <INDENT> policy[state] = torch.argmax(actions).item() <NEW_LINE> <DEDENT> return Q, policy | Obtain the optimal policy with on-policy SARSA algorithm
@param env: OpenAI Gym environment
@param gamma: discount factor
@param n_episode: number of episodes
@return: the optimal Q-function, and the optimal policy | 625941c066656f66f7cbc119 |
def test_dtypes(self): <NEW_LINE> <INDENT> for dtype in self.supported_dtypes: <NEW_LINE> <INDENT> result = extrema.local_maxima(self.image.astype(dtype)) <NEW_LINE> assert result.dtype == np.bool <NEW_LINE> assert_equal(result, self.expected_default) | Test results with default configuration for all supported dtypes. | 625941c0796e427e537b0533 |
def _compute(self, cr, uid, taxes, price_unit, quantity, product=None, partner=None, precision=None): <NEW_LINE> <INDENT> if taxes and taxes[0].company_id.tax_calculation_rounding_method[:7] == 'swedish': <NEW_LINE> <INDENT> if not precision: <NEW_LINE> <INDENT> precision = self.pool.get('decimal.precision' ).precision_get(cr, uid, 'Account') <NEW_LINE> <DEDENT> precision += 5 <NEW_LINE> <DEDENT> return super(AccountTax, self )._compute(cr, uid, taxes, price_unit, quantity, product=product, partner=partner, precision=precision) | Using swedish rounding we want to keep standard global precision
so we add precision to do global computation | 625941c07d43ff24873a2c0e |
def loc_pref(r_loc, s_loc, loc_fidelity=0, n_reg=1): <NEW_LINE> <INDENT> loc_pref = 0 <NEW_LINE> if loc_fidelity > 0: <NEW_LINE> <INDENT> rreg = region(r_loc, n_reg=n_reg) <NEW_LINE> sreg = region(s_loc, n_reg=n_reg) <NEW_LINE> loc_pref = math.exp(-np.abs(rreg - sreg)) <NEW_LINE> <DEDENT> if loc_fidelity > 1: <NEW_LINE> <INDENT> loc_pref = (loc_pref + math.exp(-np.abs(r_loc - s_loc))) / 2 <NEW_LINE> <DEDENT> return loc_pref | returns the location-based preference between a requester and supplier
for a commodity | 625941c0d18da76e23532443 |
def test_pass(self): <NEW_LINE> <INDENT> with raises(NotImplementedError): <NEW_LINE> <INDENT> mol.occupied_per_atom(self.bas) | Contains atom Z=37, which is not implemented | 625941c07cff6e4e811178f5 |
def mark_regions_image(self, image, stats): <NEW_LINE> <INDENT> return image | Creates a new image with computed stats
takes as input
image: a list of pixels in a region
stats: stats regarding location and area
returns: image marked with center and area | 625941c0ac7a0e7691ed4040 |
def __getstate__(self): <NEW_LINE> <INDENT> state = super(ExecComp, self).__getstate__() <NEW_LINE> state['codes'] = None <NEW_LINE> return state | Return dict representing this container's state. | 625941c08e05c05ec3eea2e2 |
def _load(self) -> None: <NEW_LINE> <INDENT> with open(self._filename, 'r') as f: <NEW_LINE> <INDENT> for line in f: <NEW_LINE> <INDENT> if '#' in line: <NEW_LINE> <INDENT> line, _ = line.split('#', 1) <NEW_LINE> <DEDENT> if '=' in line: <NEW_LINE> <INDENT> key, val = map(lambda s: s.strip() , line.split('=', 1)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key, val = (line.strip(), '') <NEW_LINE> <DEDENT> if len(key): <NEW_LINE> <INDENT> self._dict[key] = val <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> f.close() | Load the config file. | 625941c04f88993c3716bfd9 |
def set_mode(self, on_done, **kwargs): <NEW_LINE> <INDENT> mode = util.input_choice(kwargs, 'mode', ['latching', 'momentary_a', 'momentary_b', 'momentary_c']) <NEW_LINE> if mode is None: <NEW_LINE> <INDENT> LOG.error("Invalid mode.") <NEW_LINE> on_done(False, 'Invalid mode.', None) <NEW_LINE> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> mode = IOLinc.Modes[mode.upper()] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> mode = IOLinc.Modes.LATCHING <NEW_LINE> <DEDENT> self.mode = mode <NEW_LINE> if mode == IOLinc.Modes.LATCHING: <NEW_LINE> <INDENT> type_a = IOLinc.OperatingFlags.MOMENTARY_A_OFF <NEW_LINE> type_b = IOLinc.OperatingFlags.MOMENTARY_B_OFF <NEW_LINE> type_c = IOLinc.OperatingFlags.MOMENTARY_C_OFF <NEW_LINE> <DEDENT> elif mode == IOLinc.Modes.MOMENTARY_A: <NEW_LINE> <INDENT> type_a = IOLinc.OperatingFlags.MOMENTARY_A_ON <NEW_LINE> type_b = IOLinc.OperatingFlags.MOMENTARY_B_OFF <NEW_LINE> type_c = IOLinc.OperatingFlags.MOMENTARY_C_OFF <NEW_LINE> <DEDENT> elif mode == IOLinc.Modes.MOMENTARY_B: <NEW_LINE> <INDENT> type_a = IOLinc.OperatingFlags.MOMENTARY_A_ON <NEW_LINE> type_b = IOLinc.OperatingFlags.MOMENTARY_B_ON <NEW_LINE> type_c = IOLinc.OperatingFlags.MOMENTARY_C_OFF <NEW_LINE> <DEDENT> elif mode == IOLinc.Modes.MOMENTARY_C: <NEW_LINE> <INDENT> type_a = IOLinc.OperatingFlags.MOMENTARY_A_ON <NEW_LINE> type_b = IOLinc.OperatingFlags.MOMENTARY_B_ON <NEW_LINE> type_c = IOLinc.OperatingFlags.MOMENTARY_C_ON <NEW_LINE> <DEDENT> seq = CommandSeq(self.protocol, "Set mode complete", on_done, name="SetMode") <NEW_LINE> for cmd2 in (type_a, type_b, type_c): <NEW_LINE> <INDENT> msg = Msg.OutExtended.direct(self.addr, 0x20, cmd2, bytes([0x00] * 14)) <NEW_LINE> callback = self.generic_ack_callback("Mode updated.") <NEW_LINE> msg_handler = handler.StandardCmd(msg, callback) <NEW_LINE> seq.add_msg(msg, msg_handler) <NEW_LINE> <DEDENT> seq.run() | Set momentary seconds.
Set the momentary mode
Args:
kwargs: Key=value pairs of the flags to change.
on_done: Finished callback. This is called when the command has
completed. Signature is: on_done(success, msg, data) | 625941c0097d151d1a222dcb |
def talk_to(self, person): <NEW_LINE> <INDENT> if type(person) != str: <NEW_LINE> <INDENT> print('Person has to be a string.') <NEW_LINE> <DEDENT> elif person in [character.name for character in self.place.characters.values()]: <NEW_LINE> <INDENT> current_person = self.place.characters[person] <NEW_LINE> print(current_person.name,'says:',current_person.talk()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(person,'is not here.') <NEW_LINE> <DEDENT> "*** YOUR CODE HERE ***" | Talk to person if person is at player's current place.
>>> john = Character('John', 'Have to run for lecture!')
>>> sather_gate = Place('Sather Gate', 'You are at Sather Gate', [john], [])
>>> me = Player('player', sather_gate)
>>> me.talk_to(john)
Person has to be a string.
>>> me.talk_to('John')
John says: Have to run for lecture!
>>> me.talk_to('Albert')
Albert is not here. | 625941c0d7e4931a7ee9de8c |
def _test(self): <NEW_LINE> <INDENT> pass | These tests are not useful as documentation, but are here to ensure
everything works as expected. The following was broken, now fixed.
>>> from fipy import *
>>> mesh = SphericalUniformGrid1D(nx=3., dx=1.)
>>> var = CellVariable(mesh=mesh)
>>> var.constrain(0., where=mesh.facesRight)
>>> DiffusionTerm().solve(var)
This test is for https://github.com/usnistgov/fipy/issues/372. Cell
volumes were being returned as `binOps` rather than arrays.
>>> m = SphericalUniformGrid1D(dx=1., nx=4)
>>> print(isinstance(m.cellVolumes, numerix.ndarray))
True
>>> print(isinstance(m._faceAreas, numerix.ndarray))
True
If the above types aren't correct, the divergence operator's value can be a `binOp`
>>> print(isinstance(CellVariable(mesh=m).arithmeticFaceValue.divergence.value, numerix.ndarray))
True | 625941c0d6c5a10208143fb8 |
def set_direction(self, direction): <NEW_LINE> <INDENT> if type(direction) is not int: <NEW_LINE> <INDENT> raise HacInvalidTypeException( "Projectile.set_direction " "requires an int from the Constants module as" "direction." ) <NEW_LINE> <DEDENT> self.model = self.directional_model(direction) <NEW_LINE> self.animation = self.directional_animation(direction) <NEW_LINE> self.direction = direction <NEW_LINE> self.actuator = UnidirectionalActuator(direction=direction) | Set the direction of a projectile
This method will set a UnidirectionalActuator with the direction.
It will also take care of updating the model and animation for the given
direction if they are specified.
:param direction: A direction from the Constants module.
:type direction: int
Example::
fireball.set_direction(Constants.UP) | 625941c0ff9c53063f47c164 |
def get_blob(self, format, compression=None, quality=None, factory=None): <NEW_LINE> <INDENT> blob = io.get_blob(self, format, compression, quality) <NEW_LINE> if factory: <NEW_LINE> <INDENT> blob = factory(blob) <NEW_LINE> <DEDENT> return blob | Return a blob representing an image
:param format: format of the output such as :term:`JPEG`
:type format: ``str``
:param compression: compression supported by format
:type compression: :class:`pystacia.lazyenum.EnumValue`
:param quality: output quality
:rtype: ``str`` (Python 2.x) / ``bytes`` (Python 3.x)
Returns blob carrying data representing an image along its header
in the given format. Compression is one of compression algorithms.
Some formats like :term:`TIFF` supports more then one compression
algorithms but typically this parameter is not used.
The interpretation of quality parameter depends
on the chosen format. E.g. for :term:`JPEG` it's integer number
between 1 (worst) and 100 (best). The default value is to choose
best available compression that preserves good quality image.
The details are in the `ImageMagick documentation
<http://www.imagemagick.org/script/
command-line-options.php#quality>`. | 625941c0167d2b6e31218b05 |
def rowViewportPosition(self, p_int): <NEW_LINE> <INDENT> return 0 | rowViewportPosition(self, int) -> int | 625941c04c3428357757c299 |
def get_key_list(self) -> list: <NEW_LINE> <INDENT> return self.__key_column.list_value() | Get the list of key values. | 625941c03617ad0b5ed67e68 |
def RemoveTopologyCategory(self, topologyFullPath='', categoryName=''): <NEW_LINE> <INDENT> return self.generateAPIRequest( OrderedDict([('method_name', 'RemoveTopologyCategory'), ('topologyFullPath', topologyFullPath), ('categoryName', categoryName)])) | Removes a category from given topology.
:param str topologyFullPath: Specify the topology we want to remove the given category from.
:param str categoryName: Specify the category's name which we want to remove.
:rtype: str | 625941c082261d6c526ab40c |
@remoteserviceHandle('gate') <NEW_LINE> def get_role_list_811(data, player): <NEW_LINE> <INDENT> response = GuildRoleListProto() <NEW_LINE> m_g_id = player.guild.g_id <NEW_LINE> if m_g_id == 0: <NEW_LINE> <INDENT> response.result = False <NEW_LINE> response.message = "没有公会" <NEW_LINE> return response.SerializeToString() <NEW_LINE> <DEDENT> data1 = tb_guild_info.getObjData(m_g_id) <NEW_LINE> if not data1: <NEW_LINE> <INDENT> response.result = False <NEW_LINE> response.message = "公会ID错误" <NEW_LINE> return response.SerializeToString() <NEW_LINE> <DEDENT> guild_obj = Guild() <NEW_LINE> guild_obj.init_data(data1) <NEW_LINE> guild_p_list = guild_obj.p_list <NEW_LINE> if guild_p_list.values(): <NEW_LINE> <INDENT> for p_list in guild_p_list.values(): <NEW_LINE> <INDENT> for role_id in p_list: <NEW_LINE> <INDENT> character_guild = tb_character_guild.getObjData(role_id) <NEW_LINE> character_info = tb_character_info.getObjData(role_id) <NEW_LINE> if character_info and character_guild: <NEW_LINE> <INDENT> guild_info = character_guild.get("info") <NEW_LINE> role_info = response.role_info.add() <NEW_LINE> role_info.p_id = role_id <NEW_LINE> if character_info.get('nickname'): <NEW_LINE> <INDENT> role_info.name = character_info['nickname'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> role_info.name = u'无名' <NEW_LINE> <DEDENT> role_info.level = character_info['level'] <NEW_LINE> role_info.position = guild_info.get("position") <NEW_LINE> role_info.all_contribution = guild_info.get("all_contribution") <NEW_LINE> role_info.k_num = guild_info.get("k_num") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> response.result = True <NEW_LINE> return response.SerializeToString() | 角色列表 | 625941c056b00c62f0f145c8 |
def __init__(self): <NEW_LINE> <INDENT> self.host = "https://api.phaxio.com/v2" <NEW_LINE> self.api_client = None <NEW_LINE> self.temp_folder_path = None <NEW_LINE> self.api_key = {} <NEW_LINE> self.api_key_prefix = {} <NEW_LINE> self.username = "" <NEW_LINE> self.password = "" <NEW_LINE> self.logger = {} <NEW_LINE> self.logger["package_logger"] = logging.getLogger("swagger_client") <NEW_LINE> self.logger["urllib3_logger"] = logging.getLogger("urllib3") <NEW_LINE> self.logger_format = '%(asctime)s %(levelname)s %(message)s' <NEW_LINE> self.logger_stream_handler = None <NEW_LINE> self.logger_file_handler = None <NEW_LINE> self.logger_file = None <NEW_LINE> self.debug = False <NEW_LINE> self.verify_ssl = True <NEW_LINE> self.ssl_ca_cert = None <NEW_LINE> self.cert_file = None <NEW_LINE> self.key_file = None | Constructor | 625941c097e22403b379cf09 |
def GetPointer(self): <NEW_LINE> <INDENT> return _itkImagePython.itkImageD2_GetPointer(self) | GetPointer(self) -> itkImageD2 | 625941c07b180e01f3dc4771 |
def lock(name, zk_hosts=None, identifier=None, max_concurrency=1, timeout=None, ephemeral_lease=False, profile=None, scheme=None, username=None, password=None, default_acl=None): <NEW_LINE> <INDENT> ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} <NEW_LINE> conn_kwargs = {'profile': profile, 'scheme': scheme, 'username': username, 'password': password, 'default_acl': default_acl} <NEW_LINE> if __opts__['test']: <NEW_LINE> <INDENT> ret['result'] = None <NEW_LINE> ret['comment'] = 'Attempt to acquire lock' <NEW_LINE> return ret <NEW_LINE> <DEDENT> if identifier is None: <NEW_LINE> <INDENT> identifier = __grains__['id'] <NEW_LINE> <DEDENT> locked = __salt__['zk_concurrency.lock'](name, zk_hosts, identifier=identifier, max_concurrency=max_concurrency, timeout=timeout, ephemeral_lease=ephemeral_lease, **conn_kwargs) <NEW_LINE> if locked: <NEW_LINE> <INDENT> ret['result'] = True <NEW_LINE> ret['comment'] = 'lock acquired' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret['comment'] = 'Unable to acquire lock' <NEW_LINE> <DEDENT> return ret | Block state execution until you are able to get the lock (or hit the timeout) | 625941c015baa723493c3ee3 |
def test_case_04_legal_triangle(self): <NEW_LINE> <INDENT> self.__assert_not_equal_test_case([(4, 4, 8), (4, 5, 8)], 'NotATriangle') | Verify that the sides form a legal triangle (Triangle Inequality) R3.1 | 625941c024f1403a92600ad8 |
def test_get_resource(self): <NEW_LINE> <INDENT> print('(' + self.test_get_resource.__name__ + ')', self.test_get_resource.__doc__) <NEW_LINE> resource = self.connection.get_resource(VALID_RESOURCE_IDS[0]) <NEW_LINE> self.assertDictContainsSubset(resource, VALID_RESOURCES[0]) <NEW_LINE> resource = self.connection.get_resource(VALID_RESOURCE_IDS[3]) <NEW_LINE> self.assertDictContainsSubset(resource, VALID_RESOURCES[3]) | Test get_resource with id 1 and 4 | 625941c0d486a94d0b98e0b5 |
def peek(self): <NEW_LINE> <INDENT> if len(self.items) > 0: <NEW_LINE> <INDENT> return self.items[len(self.items)-1] <NEW_LINE> <DEDENT> return None | Return the data at the top of the stack | 625941c063b5f9789fde7055 |
def _extract_differences(self, replay): <NEW_LINE> <INDENT> if replay.double_time: <NEW_LINE> <INDENT> time_coefficient = 1000 * 2 / 3 <NEW_LINE> <DEDENT> elif replay.half_time: <NEW_LINE> <INDENT> time_coefficient = 1000 * 4 / 3 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> time_coefficient = 1000 <NEW_LINE> <DEDENT> clicks = np.array([ ( second.position.x, second.position.y, time_coefficient * second.offset.total_seconds(), ) for first, second in toolz.sliding_window(2, replay.actions) if ((second.key1 and not first.key1 or second.key2 and not first.key2) and second.position != (0, 0)) ]) <NEW_LINE> double_time = replay.double_time <NEW_LINE> half_time = replay.half_time <NEW_LINE> hard_rock = replay.hard_rock <NEW_LINE> hit_object_coords = [] <NEW_LINE> append_coord = hit_object_coords.append <NEW_LINE> for hit_object in replay.beatmap.hit_objects_no_spinners: <NEW_LINE> <INDENT> if double_time: <NEW_LINE> <INDENT> hit_object = hit_object.double_time <NEW_LINE> <DEDENT> elif half_time: <NEW_LINE> <INDENT> hit_object = hit_object.half_time <NEW_LINE> <DEDENT> if hard_rock: <NEW_LINE> <INDENT> hit_object = hit_object.hard_rock <NEW_LINE> <DEDENT> position = hit_object.position <NEW_LINE> append_coord(( position.x, position.y, hit_object.time.total_seconds() * 1000, )) <NEW_LINE> <DEDENT> hit_object_coords = np.array(hit_object_coords) <NEW_LINE> hit_object_times = hit_object_coords[:, 2] <NEW_LINE> click_times = clicks[:, [2]] <NEW_LINE> nearest_click_ix = np.abs( hit_object_times - click_times, ).argmin(axis=0) <NEW_LINE> matched_clicks = clicks[nearest_click_ix] <NEW_LINE> squared_distance = ( hit_object_coords[:, :2] - matched_clicks[:, :2] ) ** 2 <NEW_LINE> aim_error = np.sqrt(squared_distance[:, 0] + squared_distance[:, 1]) <NEW_LINE> np.clip( aim_error, 0, 2 * sl.mod.circle_radius(replay.beatmap.cs(hard_rock=hard_rock)), out=aim_error, ) <NEW_LINE> accuracy_error = np.abs(hit_object_times - matched_clicks[:, 2]) <NEW_LINE> np.clip( accuracy_error, 0, 1.5 * sl.mod.od_to_ms(replay.beatmap.od( hard_rock=hard_rock, double_time=double_time, half_time=half_time, )).hit_50, out=accuracy_error, ) <NEW_LINE> return aim_error, accuracy_error | Extract the time and position differences for each hit object.
Parameters
----------
replay : Replay
The replay to get differences for.
Returns
-------
differences : np.ndarray
An array of shape (len(hit_objects), 2) where the first column
is the time offset in milliseconds and the second column is the
magnitude of (x, y) error in osu! pixels. | 625941c029b78933be1e561f |
def complexidade_sentenca(texto): <NEW_LINE> <INDENT> sentencas = separa_sentencas(texto) <NEW_LINE> numero_frases = 0 <NEW_LINE> '''laço que lê as sentenças e cria um array de frases pra cada sentença''' <NEW_LINE> for sentenca in sentencas: <NEW_LINE> <INDENT> frases = separa_frases(sentenca) <NEW_LINE> '''var que vai somando o numero de frases em cada sentença''' <NEW_LINE> numero_frases = numero_frases + len(frases) <NEW_LINE> <DEDENT> '''var que receberá a soma das palavras''' <NEW_LINE> numero_sentencas = len(sentencas) <NEW_LINE> '''complexidade''' <NEW_LINE> complexidade = numero_frases / numero_sentencas <NEW_LINE> return complexidade | array recebendo lista de sentencas | 625941c0b57a9660fec337f1 |
def calc_normal_equation_fit(xvec, yvec): <NEW_LINE> <INDENT> xlen = len(xvec) <NEW_LINE> assert xlen >= 3 and len(yvec) == xlen, 'xvec and yvec should have the same length. However, len(xvec)=%i, len(yvec)=%i.' % (xlen,len(yvec)) <NEW_LINE> xvec = np.matrix(xvec) <NEW_LINE> xvec = xvec.transpose() <NEW_LINE> unitVec = np.ones((xlen, 1), dtype='float') <NEW_LINE> X = np.concatenate((unitVec, xvec), axis=1) <NEW_LINE> XX = np.dot(X, np.transpose(X)) <NEW_LINE> normalFit = np.linalg.pinv(XX) <NEW_LINE> normalFit = np.dot(normalFit, X) <NEW_LINE> normalFit = np.transpose(normalFit) <NEW_LINE> res = np.dot(normalFit, yvec) <NEW_LINE> return { 'slope': res[0, 1], 'intercept': res[0, 0] } | Computes polynomial 1st degree fit
using normal equation. | 625941c0498bea3a759b9a1f |
def skip_block(self): <NEW_LINE> <INDENT> for line in self.config_fd: <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> if line == '}': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> raise SyntaxError('skipped block does not end with "}"') | skip configuration block
| 625941c0009cb60464c63323 |
def run(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> self.display_menu() <NEW_LINE> choice = input("Enter an option: ") <NEW_LINE> action = self.choices.get(choice) <NEW_LINE> if action: <NEW_LINE> <INDENT> action() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("{0} is not a valid choice".format(choice)) | Display the menu and repsond to choices. | 625941c0a8370b7717052810 |
def __init__( self, source_workdir: str, dest_workdir: str = "", prefix: dict = "mirror", ) -> None: <NEW_LINE> <INDENT> self.source_workdir = source_workdir <NEW_LINE> self.dest_prefix = prefix <NEW_LINE> self.stats = defaultdict(lambda: 0) <NEW_LINE> self._init_source_repo() <NEW_LINE> self.prior_dest_exists = False <NEW_LINE> self.dest_head_commit = None <NEW_LINE> self.dest_has_tree = False <NEW_LINE> self.dest_commit_hashes = [] <NEW_LINE> self.dest_is_mirror = False <NEW_LINE> if not dest_workdir: <NEW_LINE> <INDENT> self._init_empty_dest_repo() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if source_workdir == dest_workdir: <NEW_LINE> <INDENT> raise ValueError( "Source repo must not be the same as destination repo" ) <NEW_LINE> <DEDENT> self._init_existing_dest_repo(dest_workdir) <NEW_LINE> <DEDENT> logger.debug("Initialized mirror; mirror ready for use") | Sets up and initializes the mirror session.
Args:
source_workdir (str): Absolute path to the source repo
dest_workdir (str): Absolute path to the destination
repo (optional); if not provided, the destination
working directory will be automatically created
prefix (str): The prefixof the automatically generated
destination working directory (defaults to 'mirror')
Raises:
ValueError: if source_workdir is the same as dest_workdir | 625941c094891a1f4081ba18 |
def predict(image_path, checkpoint_path, idx_to_class, top_k, gpu): <NEW_LINE> <INDENT> image = Image.open(image_path) <NEW_LINE> model = load_checkpoint(checkpoint_path) <NEW_LINE> image = process_image(image) <NEW_LINE> device = "cpu" <NEW_LINE> if (gpu): <NEW_LINE> <INDENT> device = "cuda" <NEW_LINE> <DEDENT> model.to(device) <NEW_LINE> image = image.to(device) <NEW_LINE> results = model.forward(image.unsqueeze(0).float()) <NEW_LINE> ps = torch.exp(results) / torch.sum(torch.exp(results)) <NEW_LINE> probs, idx = torch.topk(ps, top_k) <NEW_LINE> probs = [float(p) for p in probs[0]] <NEW_LINE> classes = [idx_to_class[x] for x in np.array(idx[0])] <NEW_LINE> return probs, classes | Predict the class (or classes) of an image using a trained deep learning model.
| 625941c0e64d504609d747b0 |
def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> sprite_sheet = SpriteSheet("player.png") <NEW_LINE> self.image = sprite_sheet.get_image(35, 420, 57, 90).convert() <NEW_LINE> self.x_speed = 0 <NEW_LINE> self.rect = self.image.get_rect() | Constructor function | 625941c026238365f5f0eddb |
def chars_cli(ngrams, weights): <NEW_LINE> <INDENT> with open(ngrams) as f: <NEW_LINE> <INDENT> CharacterModel(map(parse_ngram, f), weights).run_loop() | Start a character model prediction loop.
| 625941c0be383301e01b53fa |
def __import__(module_name, persist_value = True): <NEW_LINE> <INDENT> module = None <NEW_LINE> index = 1 <NEW_LINE> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> caller = sys._getframe(index) <NEW_LINE> if module_name in caller.f_globals.get("_globals", {}): <NEW_LINE> <INDENT> module = caller.f_globals["_globals"][module_name] <NEW_LINE> break <NEW_LINE> <DEDENT> elif module_name in caller.f_globals.get("_locals", {}): <NEW_LINE> <INDENT> module = caller.f_globals["_locals"][module_name] <NEW_LINE> break <NEW_LINE> <DEDENT> index += 1 <NEW_LINE> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise ImportError("No module named '%s' found in global or local references" % module_name) <NEW_LINE> <DEDENT> if not module: raise ImportError("No module named '%s' found in global or local references" % module_name) <NEW_LINE> if index > 1 and persist_value: <NEW_LINE> <INDENT> caller = sys._getframe(1) <NEW_LINE> globals_reference = caller.f_globals.get("_globals", {}) <NEW_LINE> globals_reference[module_name] = module <NEW_LINE> caller.f_globals["_globals"] = globals_reference <NEW_LINE> <DEDENT> return module | Importer function to be used in the process of importing
a module referred in inverted way.
The optional persist value may be used to control if the
globals/locals reference value must be set in the caller module
in case it has been retrieved from a parent caller (cache).
This function should be used in cases where the inversion injection
was made using the data helper.
:type module_name: String
:param module_name: The name of the module to be imported.
:type persist_value: bool
:param persist_value: If the globals/locals value shall be
persisted in the caller in case it's is not available there.
:rtype: module
:return: The imported module. | 625941c0cc40096d615958c1 |
def find_log_decorator(func): <NEW_LINE> <INDENT> @wraps(func) <NEW_LINE> def wrapper(*args): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = func(*args) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error(func.__name__ + ' run failed') <NEW_LINE> raise Exception("Cannot find element by [%s]:under:\n %s \n" % (args[1], args[2])) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> return wrapper | 输出Exception的装饰器
:param func:
:return: | 625941c1099cdd3c635f0bcc |
def test_case4(self): <NEW_LINE> <INDENT> driver = self.driver <NEW_LINE> po = login(self.driver,self.url) <NEW_LINE> po.open() <NEW_LINE> po.customer() <NEW_LINE> driver.find_element_by_xpath("//input[@name='devCode']").send_keys("867967025947526") <NEW_LINE> driver.find_element_by_xpath("//button[1]").click() <NEW_LINE> sleep(2) <NEW_LINE> verity = driver.find_element_by_xpath("//table/tbody/tr[1]/td[3]").text <NEW_LINE> try: <NEW_LINE> <INDENT> assert verity == "867967025947526" <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> Page.get_screenshot(self) <NEW_LINE> <DEDENT> driver.quit() <NEW_LINE> sleep(2) | 查询设备号
验证查询的设备号是否与查询的一致
| 625941c0fb3f5b602dac3601 |
def scan_for_open_port(port, base_ip_address = None, range_start = 1, range_end = 254, connection_timeout_sec = 0.5, n_workers = 32): <NEW_LINE> <INDENT> if base_ip_address is None: <NEW_LINE> <INDENT> base_ip_address = get_own_ip() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> ip_components = base_ip_address.split(".") <NEW_LINE> base_ip_address = ".".join(ip_components[0:3]) <NEW_LINE> <DEDENT> except (NameError, IndexError, AttributeError): <NEW_LINE> <INDENT> base_ip_address = "192.160.0" <NEW_LINE> <DEDENT> _, scan_start, scan_end, _ = sorted([0, int(range_start), int(range_end), 255]) <NEW_LINE> ip_scan_list = ["{}.{}".format(base_ip_address, k) for k in range(scan_start, 1 + scan_end)] <NEW_LINE> num_ips = len(ip_scan_list) <NEW_LINE> port_scan_list = [port] * num_ips <NEW_LINE> timeout_scan_list = [connection_timeout_sec] * num_ips <NEW_LINE> localhost_valid_scan_list = [False] * num_ips <NEW_LINE> args_iter = zip(ip_scan_list, port_scan_list, timeout_scan_list, localhost_valid_scan_list) <NEW_LINE> n_workers = min(n_workers, num_ips) <NEW_LINE> with Pool(n_workers) as worker_pool: <NEW_LINE> <INDENT> connection_success_list = worker_pool.starmap(check_connection, args_iter) <NEW_LINE> <DEDENT> check_open_ips_iter = zip(ip_scan_list, connection_success_list) <NEW_LINE> open_ips_list = [each_ip for each_ip, port_is_open in check_open_ips_iter if port_is_open] <NEW_LINE> report_base_ip_address = "{}.*".format(base_ip_address) <NEW_LINE> return report_base_ip_address, open_ips_list | Function used to check connections on a target port, across all ips (#.#.#.start -to- #.#.#.end) using
a base ip (if not provided, uses the ip of the host machine). Returns a list of ips which have
the target port open as well as the base IP address that was used
Inputs:
port -> (Integer) Port to be scanned
base_ip_address -> (String) Base ip to use for scan (i.e. the first 3 ip components to use for scan)
If not provided, will use the machines own ip as the base
range_start -> (Integer) Start of IP range to scan. Must be a positive number
range_end -> (Integer) End of IP range to scan. Must be less than 256
connection_timeout_sec -> (Float) Amount of time to wait (per ip address) for a connection attempt
n_workers -> (Integer) Number of parallel workers to use for the scan
Outputs:
report_base_ip_address, open_ips_list (list of strings)
Note:
The combined value of timeout and number of workers can greatly alter the amount of time
needed for this function to complete.
Roughly, the amount of time needed for this function to complete is given by:
time to complete (seconds) = 1 + connection_timeout_sec * (256 / n_workers) | 625941c1e8904600ed9f1e9b |
def do_ecdsa_sign(G, priv, data, kinv_rp=None): <NEW_LINE> <INDENT> ec_key = _C.EC_KEY_new() <NEW_LINE> _check(_C.EC_KEY_set_group(ec_key, G.ecg)) <NEW_LINE> _check(_C.EC_KEY_set_private_key(ec_key, priv.bn)) <NEW_LINE> if kinv_rp is None: <NEW_LINE> <INDENT> ecdsa_sig = _C.ECDSA_do_sign(data, len(data), ec_key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> kinv, rp = kinv_rp <NEW_LINE> ecdsa_sig = _C.ECDSA_do_sign_ex( data, len(data), kinv.bn, rp.bn, ec_key) <NEW_LINE> <DEDENT> r = Bn() <NEW_LINE> s = Bn() <NEW_LINE> if _OPENSSL_VERSION == OpenSSLVersion.V1_0: <NEW_LINE> <INDENT> _C.BN_copy(r.bn, ecdsa_sig.r) <NEW_LINE> _C.BN_copy(s.bn, ecdsa_sig.s) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rptr = _FFI.new("BIGNUM **") <NEW_LINE> sptr = _FFI.new("BIGNUM **") <NEW_LINE> _C.ECDSA_SIG_get0(ecdsa_sig, rptr, sptr) <NEW_LINE> _C.BN_copy(r.bn, rptr[0]) <NEW_LINE> _C.BN_copy(s.bn, sptr[0]) <NEW_LINE> <DEDENT> _C.ECDSA_SIG_free(ecdsa_sig) <NEW_LINE> _C.EC_KEY_free(ec_key) <NEW_LINE> return (r, s) | A quick function to ECDSA sign a hash.
Args:
G (EcGroup): the group in which math is done.
priv (Bn): the secret key.
data (str): the string to sign.
kinv_rp (opaque): optional setup parameters.
Returns:
Bn, Bn: The (r, s) signature | 625941c10fa83653e4656f2c |
def test_coordinators_not_equals_each_other(self): <NEW_LINE> <INDENT> daemon_coord = coordinator.Coordinator(self.conf, self.fake_callback) <NEW_LINE> daemon_coord_2 = coordinator.Coordinator(self.conf, self.fake_callback) <NEW_LINE> daemon_coord._im_leader = mock.Mock(return_value=True) <NEW_LINE> daemon_coord_2._im_leader = mock.Mock(return_value=True) <NEW_LINE> self.assertNotEqual(daemon_coord, daemon_coord_2) <NEW_LINE> self.assertEqual(daemon_coord, daemon_coord) <NEW_LINE> self.assertEqual(daemon_coord_2, daemon_coord_2) | Test cases regarding the __init__ method of the Coordinator class
Test flow:
>>> Create two objects of the class;
>>> Mock method _im_leader to avoid infinite loop in the test;
>>> Test if an object is not equal to each other;
>>> Test if an object is equal to itself; | 625941c13317a56b86939bce |
def authorizeGCS(secretJsonFile): <NEW_LINE> <INDENT> FLOW = flow_from_clientsecrets(secretJsonFile, scope='https://www.googleapis.com/auth/devstorage.read_only') <NEW_LINE> storageFile = 'gcs.dat' <NEW_LINE> http = _authorizeCredentials(FLOW, storageFile) <NEW_LINE> return http | :type secretJsonFile: basestring
:rtype : httplib2.Http | 625941c14428ac0f6e5ba761 |
def apwidth(img, ap_uorder_interp, offsetlim=(-8, 8), ap_npix=10, method='max'): <NEW_LINE> <INDENT> img = np.array(img) <NEW_LINE> ap_width_max = offsetlim[1] - offsetlim[0] + 1 <NEW_LINE> assert 1 <= ap_npix <= ap_width_max <NEW_LINE> ofst, medsnr_lnsum = apoffset_snr(img, ap_uorder_interp, offsetlim=offsetlim) <NEW_LINE> if method is "max": <NEW_LINE> <INDENT> istart = np.argmax(medsnr_lnsum) <NEW_LINE> sys_offset = ofst[istart] <NEW_LINE> i_ap_width_lo, i_ap_width_hi = istart, istart <NEW_LINE> i_ap_width_min, i_ap_width_max = 0, len(ofst) - 1 <NEW_LINE> while (i_ap_width_hi - i_ap_width_lo + 1) < ap_npix: <NEW_LINE> <INDENT> if i_ap_width_lo - 1 < i_ap_width_min: <NEW_LINE> <INDENT> i_ap_width_hi += 1 <NEW_LINE> <DEDENT> elif i_ap_width_hi + 1 > i_ap_width_max: <NEW_LINE> <INDENT> i_ap_width_lo -= 1 <NEW_LINE> <DEDENT> elif medsnr_lnsum[i_ap_width_hi + 1] >= medsnr_lnsum[ i_ap_width_lo - 1]: <NEW_LINE> <INDENT> i_ap_width_hi += 1 <NEW_LINE> <DEDENT> elif medsnr_lnsum[i_ap_width_hi + 1] < medsnr_lnsum[ i_ap_width_lo - 1]: <NEW_LINE> <INDENT> i_ap_width_lo -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( "@SONG: value error when automatically finding ap_width\n" "{0}".format(medsnr_lnsum.__repr__())) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif method is "min": <NEW_LINE> <INDENT> istart = np.argmin(medsnr_lnsum) <NEW_LINE> sys_offset = ofst[istart] <NEW_LINE> i_ap_width_lo, i_ap_width_hi = istart, istart <NEW_LINE> i_ap_width_min, i_ap_width_max = 0, len(ofst) - 1 <NEW_LINE> while (i_ap_width_hi - i_ap_width_lo + 1) < ap_npix: <NEW_LINE> <INDENT> if i_ap_width_lo - 1 < i_ap_width_min: <NEW_LINE> <INDENT> i_ap_width_hi += 1 <NEW_LINE> <DEDENT> elif i_ap_width_hi + 1 > i_ap_width_max: <NEW_LINE> <INDENT> i_ap_width_lo -= 1 <NEW_LINE> <DEDENT> elif medsnr_lnsum[i_ap_width_hi + 1] <= medsnr_lnsum[ i_ap_width_lo - 1]: <NEW_LINE> <INDENT> i_ap_width_hi += 1 <NEW_LINE> <DEDENT> elif medsnr_lnsum[i_ap_width_hi + 1] > medsnr_lnsum[ i_ap_width_lo - 1]: <NEW_LINE> <INDENT> i_ap_width_lo -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( "@SONG: value error when automatically finding ap_width\n" "{0}".format(medsnr_lnsum.__repr__())) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("@SONG: invalid method! method={0}".format(method)) <NEW_LINE> <DEDENT> ap_width = (ofst[i_ap_width_lo], ofst[i_ap_width_hi]) <NEW_LINE> return ap_width, sys_offset | automatically find ap_width for a given ap_npix | 625941c13617ad0b5ed67e69 |
def start_control(dB): <NEW_LINE> <INDENT> start_menu_display() <NEW_LINE> response = start_user_input() <NEW_LINE> try: <NEW_LINE> <INDENT> return startmenulogic[response](dB) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> print("You have input an invalid instruction. Please enter 1, 2, 3, or 4") <NEW_LINE> return | Creates the control flow for the start menu. | 625941c199cbb53fe6792b57 |
@when_not('zookeeper.joined') <NEW_LINE> def wait_for_zkjoin(): <NEW_LINE> <INDENT> status_set('waiting', 'Waiting for Zookeeper to become joined') | Wait for Zookeeper | 625941c1c432627299f04bb5 |
def format_data_timestamp(self, msg): <NEW_LINE> <INDENT> self._format_msg = msg <NEW_LINE> return self._format_msg.timestamp | only get timestamp from msg | 625941c1507cdc57c6306c46 |
def validator(func: const.WebSocketCommandHandler) -> const.WebSocketCommandHandler: <NEW_LINE> <INDENT> @wraps(func) <NEW_LINE> def check_current_user( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: <NEW_LINE> <INDENT> def output_error(message_id: str, message: str) -> None: <NEW_LINE> <INDENT> connection.send_message( messages.error_message(msg["id"], message_id, message) ) <NEW_LINE> <DEDENT> if only_owner and not connection.user.is_owner: <NEW_LINE> <INDENT> output_error("only_owner", "Only allowed as owner") <NEW_LINE> return <NEW_LINE> <DEDENT> if only_system_user and not connection.user.system_generated: <NEW_LINE> <INDENT> output_error("only_system_user", "Only allowed as system user") <NEW_LINE> return <NEW_LINE> <DEDENT> if not allow_system_user and connection.user.system_generated: <NEW_LINE> <INDENT> output_error("not_system_user", "Not allowed as system user") <NEW_LINE> return <NEW_LINE> <DEDENT> if only_active_user and not connection.user.is_active: <NEW_LINE> <INDENT> output_error("only_active_user", "Only allowed as active user") <NEW_LINE> return <NEW_LINE> <DEDENT> if only_inactive_user and connection.user.is_active: <NEW_LINE> <INDENT> output_error("only_inactive_user", "Not allowed as active user") <NEW_LINE> return <NEW_LINE> <DEDENT> if only_supervisor and connection.user.name != HASSIO_USER_NAME: <NEW_LINE> <INDENT> output_error("only_supervisor", "Only allowed as Supervisor") <NEW_LINE> return <NEW_LINE> <DEDENT> return func(hass, connection, msg) <NEW_LINE> <DEDENT> return check_current_user | Decorate func. | 625941c11f5feb6acb0c4ac4 |
def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(AssetPolicy, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result | Returns the model properties as a dict | 625941c126238365f5f0eddc |
def get_metric_schema(resource_root): <NEW_LINE> <INDENT> return call(resource_root.get, METRIC_SCHEMA_PATH, ApiMetricSchema, True) | Get the schema for all of the metrics.
@return: List of metric schema. | 625941c1d6c5a10208143fb9 |
def std(self) -> Union["Series", "DataFrame"]: <NEW_LINE> <INDENT> return super().std() | Calculate rolling standard deviation.
Returns
-------
Series or DataFrame
Returns the same object type as the caller of the rolling calculation.
See Also
--------
Series.rolling : Calling object with Series data.
DataFrame.rolling : Calling object with DataFrames.
Series.std : Equivalent method for Series.
DataFrame.std : Equivalent method for DataFrame.
numpy.std : Equivalent method for Numpy array. | 625941c17cff6e4e811178f6 |
@app.task <NEW_LINE> def create_repo(repository_url, dir_name, to_fetch, user): <NEW_LINE> <INDENT> pth = path.join(REPOS_PATH, dir_name) <NEW_LINE> flag = 0 <NEW_LINE> if not path.exists(pth): <NEW_LINE> <INDENT> rep = Repository() <NEW_LINE> try: <NEW_LINE> <INDENT> if not is_rep(repository_url): <NEW_LINE> <INDENT> local = repo.Repo.init(pth, mkdir=True) <NEW_LINE> client = HttpGitClient(repository_url) <NEW_LINE> remote_refs = client.fetch( to_fetch, local, determine_wants=local.object_store.determine_wants_all, ) <NEW_LINE> local["HEAD"] = remote_refs["HEAD"] <NEW_LINE> local._build_tree() <NEW_LINE> rep.repo_dir_name = pth <NEW_LINE> rep.dir_name = dir_name <NEW_LINE> rep.url = '/'.join((repository_url, dir_name)) <NEW_LINE> rep.save() <NEW_LINE> flag = 1 <NEW_LINE> UserRepository(repo=rep, user=user).save() <NEW_LINE> rep.last_check = get_head_commit(rep) <NEW_LINE> rep.save() <NEW_LINE> create_analysis(dir_name) <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> rmtree(pth) <NEW_LINE> if flag == 1: <NEW_LINE> <INDENT> rep.delete() <NEW_LINE> <DEDENT> raise RuntimeError("Something went wrong.") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> rep = get_by_dir_name(dir_name) <NEW_LINE> if rep: <NEW_LINE> <INDENT> UserRepository(repo=rep, user=user).save() | Check on valid state repository url and try download it into. | 625941c14428ac0f6e5ba762 |
def test_remove_fields(self): <NEW_LINE> <INDENT> fs = Fieldsets(self.fieldsets) <NEW_LINE> fields = ('ever_taken_arv', 'why_no_arv', 'why_no_arv_other') <NEW_LINE> fs.remove_fields(*fields, section=None) <NEW_LINE> self.assertTrue( fields[0] not in fs.fieldsets[0][1]['fields']) <NEW_LINE> self.assertTrue( fields[1] not in fs.fieldsets[0][1]['fields']) <NEW_LINE> self.assertTrue( fields[2] not in fs.fieldsets[0][1]['fields']) | Asserts removes fields from an existing section.
| 625941c1d268445f265b4ddf |
def uniq(values): <NEW_LINE> <INDENT> uniq_values = [] <NEW_LINE> visited = set() <NEW_LINE> for v in values: <NEW_LINE> <INDENT> if v not in visited: <NEW_LINE> <INDENT> visited.add(v) <NEW_LINE> uniq_values.append(v) <NEW_LINE> <DEDENT> <DEDENT> return uniq_values | Returns uniq values while preserving the order.
| 625941c1d58c6744b4257bd1 |
def _prefix_operator(self, operation: str, expr) -> ast.Call: <NEW_LINE> <INDENT> expr = self.to_python(expr) <NEW_LINE> return self.setlx_function(operation, [expr]) | Turns a prefix operator into a function call
The function translates the given expression and calls the function *operation*
with the translated expression as argument.
e.g. +/[1,2] -> sum([1,2])
Parameters
----------
operation : str
The name of the function
expr :
The Setlx-Expression that is translated and used as argument for the function | 625941c1dd821e528d63b11b |
def logout(self): <NEW_LINE> <INDENT> if self.logined == True: <NEW_LINE> <INDENT> self.telnet.close() <NEW_LINE> self.logined = False <NEW_LINE> self.status = TELNET_NO_LOGIN <NEW_LINE> del self.telnet <NEW_LINE> self.telnet = None | 退出telent | 625941c1ad47b63b2c509ef1 |
def CAP_Update_FocusObject(self, context): <NEW_LINE> <INDENT> preferences = context.preferences <NEW_LINE> addon_prefs = preferences.addons['Capsule'].preferences <NEW_LINE> bpy.ops.object.select_all(action= 'DESELECT') <NEW_LINE> select_utils.SelectObject(self.object) <NEW_LINE> for area in bpy.context.screen.areas: <NEW_LINE> <INDENT> if area.type == 'VIEW_3D': <NEW_LINE> <INDENT> for region in area.regions: <NEW_LINE> <INDENT> if region.type == 'WINDOW': <NEW_LINE> <INDENT> override = {'area': area, 'region': region, 'edit_object': bpy.context.edit_object, 'scene': bpy.context.scene, 'screen': bpy.context.screen, 'window': bpy.context.window} <NEW_LINE> bpy.ops.view3d.view_selected(override) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return None | Focuses the camera to a particular object, ensuring the object is clearly within the camera frame. | 625941c1d53ae8145f87a1e4 |
def convertModel(keras_model): <NEW_LINE> <INDENT> return keras.estimator.model_to_estimator(keras_model) | 将keras的模型转为estimator
:param keras_model: keras模型
:return: estimator | 625941c1e5267d203edcdc10 |
def do_get_bindings(self): <NEW_LINE> <INDENT> greeting, err = run_fish_cmd(" __fish_config_interactive") <NEW_LINE> out, err = run_fish_cmd("__fish_config_interactive; bind") <NEW_LINE> out = out[len(greeting) :] <NEW_LINE> bindings = [] <NEW_LINE> command_to_binding = {} <NEW_LINE> binding_parser = BindingParser() <NEW_LINE> for line in out.split("\n"): <NEW_LINE> <INDENT> comps = line.split(" ", 2) <NEW_LINE> if len(comps) < 3: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if comps[1] == "--preset": <NEW_LINE> <INDENT> preset = True <NEW_LINE> comps = line.split(" ", 3)[1:] <NEW_LINE> <DEDENT> elif comps[1] == "--user": <NEW_LINE> <INDENT> preset = False <NEW_LINE> comps = line.split(" ", 3)[1:] <NEW_LINE> <DEDENT> if len(comps) < 3: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if comps[1] == "-k": <NEW_LINE> <INDENT> key_name, command = comps[2].split(" ", 1) <NEW_LINE> binding_parser.set_buffer(key_name.capitalize()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key_name = None <NEW_LINE> command = comps[2] <NEW_LINE> binding_parser.set_buffer(comps[1]) <NEW_LINE> <DEDENT> if command in bindings_blacklist: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> readable_binding = binding_parser.get_readable_binding() <NEW_LINE> if command in command_to_binding: <NEW_LINE> <INDENT> fish_binding = command_to_binding[command] <NEW_LINE> fish_binding.add_binding(line, readable_binding) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fish_binding = FishBinding(command, line, readable_binding) <NEW_LINE> bindings.append(fish_binding) <NEW_LINE> command_to_binding[command] = fish_binding <NEW_LINE> <DEDENT> <DEDENT> return [binding.get_json_obj() for binding in bindings] | Get key bindings | 625941c1a05bb46b383ec794 |
def hasPath(self, maze, start, destination): <NEW_LINE> <INDENT> maze_c = zip(*maze) <NEW_LINE> R, C = len(maze), len(maze[0]) <NEW_LINE> queue = collections.deque([tuple(start)]) <NEW_LINE> moves = [(0, 1), (0, -1), (1, 0), (-1, 0)] <NEW_LINE> visited = set() <NEW_LINE> while queue: <NEW_LINE> <INDENT> i, j = queue.popleft() <NEW_LINE> if i == destination[0] and j == destination[1]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if (i, j) not in visited: <NEW_LINE> <INDENT> visited.add((i, j)) <NEW_LINE> for di, dj in moves: <NEW_LINE> <INDENT> ni, nj = i, j <NEW_LINE> while 0 <= ni + di < R and 0 <= nj + dj < C and maze[ni + di][nj + dj] == 0: <NEW_LINE> <INDENT> ni, nj = ni + di, nj + dj <NEW_LINE> <DEDENT> if (ni, nj) != (i, j): <NEW_LINE> <INDENT> queue.append((ni, nj)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return False | :type maze: List[List[int]]
:type start: List[int]
:type destination: List[int]
:rtype: bool | 625941c18e7ae83300e4af3d |
def get(path): <NEW_LINE> <INDENT> def decorator(func): <NEW_LINE> <INDENT> @functools.wraps(func) <NEW_LINE> def wrapper(*args, **kw): <NEW_LINE> <INDENT> return func(*args, **kw) <NEW_LINE> <DEDENT> wrapper.__method__ = 'GET' <NEW_LINE> wrapper.__route__ = path <NEW_LINE> return wrapper <NEW_LINE> <DEDENT> return decorator | drcorator @get('/path')
:param path:
:return: | 625941c1cc0a2c11143dce01 |
def mean_longitude(longitudes): <NEW_LINE> <INDENT> from scipy.stats import circmean <NEW_LINE> mean_longitude = circmean(np.array(longitudes), low=-180, high=180) <NEW_LINE> mean_longitude = _normalize_longitude(mean_longitude) <NEW_LINE> return mean_longitude | Compute sample mean longitude, assuming longitude in degrees from -180 to
180.
>>> lons = (-170.5, -178.3, 166)
>>> np.mean(lons) # doctest: +SKIP
-60.933
>>> mean_longitude(lons) # doctest: +ELLIPSIS
179.08509...
:type longitudes: :class:`~numpy.ndarray` (or list, ..)
:param longitudes: Geographical longitude values ranging from -180 to 180
in degrees. | 625941c107d97122c41787f7 |
def incr_float(self, key, amount=1.0): <NEW_LINE> <INDENT> if not isinstance(amount, float): <NEW_LINE> <INDENT> raise TypeException(u'类型错误') <NEW_LINE> <DEDENT> self.database.hincrbyfloat(self.cache_key, key, amount) | 增加指定值
:param key:
:param amount:
:return: | 625941c14e696a04525c93bd |
def execute_cec_command(self, command, new_line=True): <NEW_LINE> <INDENT> self.connect() <NEW_LINE> self.cec_logger.debug('> %s', command.rstrip('\n ')) <NEW_LINE> self.cecclient.stdin.write(command) <NEW_LINE> if new_line: <NEW_LINE> <INDENT> self.cecclient.stdin.write('\n') <NEW_LINE> <DEDENT> self.cecclient.stdin.flush() | write a command to stdin of cec-client | 625941c19b70327d1c4e0d45 |
def write(self, item): <NEW_LINE> <INDENT> if self.closed: <NEW_LINE> <INDENT> raise Exception('Write to a pipe that was already closed') <NEW_LINE> <DEDENT> self.notFull.acquire() <NEW_LINE> self.mutex.acquire() <NEW_LINE> self.contents.append(item) <NEW_LINE> self.mutex.release() <NEW_LINE> self.notEmpty.release() | Blocking write | 625941c1379a373c97cfaab4 |
def endMyTurn(self): <NEW_LINE> <INDENT> global DEBUG <NEW_LINE> if DEBUG: <NEW_LINE> <INDENT> print("Ending my turn") | End my turn, yield to someone else | 625941c16e29344779a62585 |
def __init__(cls, *args, **_): <NEW_LINE> <INDENT> super().__init__(*args) | Initializer
It just calls the initializer in the base class with all keyword
arguments dropped. | 625941c1656771135c3eb7dd |
def directeur(self): <NEW_LINE> <INDENT> return copy.copy(self.vecteur) | retourne une copie du vecteur directeur | 625941c1711fe17d825422e1 |
def set_user_env(reg, parent=None): <NEW_LINE> <INDENT> reg = listdict2envdict(reg) <NEW_LINE> types = dict() <NEW_LINE> key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, "Environment") <NEW_LINE> for name in reg: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> _x, types[name] = winreg.QueryValueEx(key, name) <NEW_LINE> <DEDENT> except WindowsError: <NEW_LINE> <INDENT> types[name] = winreg.REG_EXPAND_SZ <NEW_LINE> <DEDENT> <DEDENT> key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, "Environment", 0, winreg.KEY_SET_VALUE) <NEW_LINE> for name in reg: <NEW_LINE> <INDENT> winreg.SetValueEx(key, name, 0, types[name], reg[name]) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> from win32gui import SendMessageTimeout <NEW_LINE> from win32con import (HWND_BROADCAST, WM_SETTINGCHANGE, SMTO_ABORTIFHUNG) <NEW_LINE> SendMessageTimeout(HWND_BROADCAST, WM_SETTINGCHANGE, 0, "Environment", SMTO_ABORTIFHUNG, 5000) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> QMessageBox.warning(parent, _("Warning"), _("Module <b>pywin32 was not found</b>.<br>" "Please restart this Windows <i>session</i> " "(not the computer) for changes to take effect.")) | Set HKCU (current user) environment variables | 625941c1460517430c3940fb |
def __init__( self, **kwargs): <NEW_LINE> <INDENT> for argName, argVal in kwargs.items(): <NEW_LINE> <INDENT> if argName == 'maltparser_dir': <NEW_LINE> <INDENT> self.maltparser_dir = argVal <NEW_LINE> <DEDENT> elif argName == 'model_name': <NEW_LINE> <INDENT> self.model_name = argVal <NEW_LINE> <DEDENT> elif argName == 'maltparser_jar': <NEW_LINE> <INDENT> self.maltparser_jar = argVal <NEW_LINE> <DEDENT> elif argName == 'feature_generator': <NEW_LINE> <INDENT> self.feature_generator = argVal <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception(' Unsupported argument given: '+argName) <NEW_LINE> <DEDENT> <DEDENT> if not self.maltparser_dir: <NEW_LINE> <INDENT> raise Exception('Missing input argument: MaltParser directory') <NEW_LINE> <DEDENT> elif not os.path.exists(self.maltparser_dir): <NEW_LINE> <INDENT> raise Exception('Invalid MaltParser directory:',self.maltparser_dir) <NEW_LINE> <DEDENT> elif not self.maltparser_jar: <NEW_LINE> <INDENT> raise Exception('Missing input argument: MaltParser jar file name') <NEW_LINE> <DEDENT> elif not self.model_name: <NEW_LINE> <INDENT> raise Exception('Missing input argument: MaltParser model name') <NEW_LINE> <DEDENT> if not self.feature_generator: <NEW_LINE> <INDENT> self.feature_generator = MaltParser.load_default_feature_generator() | Initializes MaltParser's wrapper.
Parameters
-----------
maltparser_dir : str
Directory that contains Maltparser jar file and model file;
This directory is also used for storing temporary files, so
writing should be allowed in it;
model_name : str
Name of the Maltparser's model;
maltparser_jar : str
Name of the Maltparser jar file (e.g. 'maltparser-1.8.jar');
feature_generator : estnltk.syntax.maltparser_support.CONLLFeatGenerator
The feature generator to be used for generating morphological features
for tokens.
NB! This must be the same feature generator that was used for training
the model of MaltParser; | 625941c16fb2d068a760f00c |
def get_interfaces_counters(self): <NEW_LINE> <INDENT> counters = {} <NEW_LINE> interface_count = len(self.get_interfaces()) <NEW_LINE> command_counters = "show packet ports 1-{}".format(interface_count) <NEW_LINE> output_counter = self._send_command(command_counters) <NEW_LINE> raw_counters = textfsm_extractor(self, "get_interfaces_counters", output_counter) <NEW_LINE> raw_counters = {row.pop("interface"): row for row in raw_counters} <NEW_LINE> command_errors = "show error ports 1-{}".format(interface_count) <NEW_LINE> output_errors = self._send_command(command_errors) <NEW_LINE> raw_errors = textfsm_extractor(self, "get_interfaces_errors", output_errors) <NEW_LINE> for raw_error in raw_errors: <NEW_LINE> <INDENT> interface = raw_error.pop("interface") <NEW_LINE> counters[interface] = {**raw_counters[interface], **raw_error} <NEW_LINE> <DEDENT> return counters | Return interface counters and errors.
'tx_errors': int,
'rx_errors': int,
'tx_discards': int,
'rx_discards': int,
'tx_octets': int,
'rx_octets': int,
'tx_unicast_packets': int,
'rx_unicast_packets': int,
'tx_multicast_packets': int,
'rx_multicast_packets': int,
'tx_broadcast_packets': int,
'rx_broadcast_packets': int,
Currently doesn't determine output broadcasts, multicasts | 625941c1ff9c53063f47c165 |
def csi_to_conky(match: re.Match) -> str: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> codes= [0 if _=='' else int(_) for _ in match.group(1).split(';')] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> print('csi_to_conky called with no group match', file=sys.stderr) <NEW_LINE> return match.group(0) <NEW_LINE> <DEDENT> except ValueError as err: <NEW_LINE> <INDENT> print(f'csi_to_conky: {err}', file=sys.stderr) <NEW_LINE> return match.group(0) <NEW_LINE> <DEDENT> result='' <NEW_LINE> while len(codes)>0: <NEW_LINE> <INDENT> code=codes.pop(0) <NEW_LINE> if code==0: <NEW_LINE> <INDENT> result = '${color}${font}' <NEW_LINE> continue <NEW_LINE> <DEDENT> elif code==1: <NEW_LINE> <INDENT> result += '${font DejaVu Sans Mono:style=bold}' <NEW_LINE> continue <NEW_LINE> <DEDENT> elif code>29 and code<38: <NEW_LINE> <INDENT> result += conky_set_fg(code -30) <NEW_LINE> continue <NEW_LINE> <DEDENT> elif code==38: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> type=codes.pop(0) <NEW_LINE> if type==2: <NEW_LINE> <INDENT> del codes[0:3] <NEW_LINE> continue <NEW_LINE> <DEDENT> elif type==5: <NEW_LINE> <INDENT> result += eightbit_to_conky(codes.pop(0)) <NEW_LINE> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError(f'Improper value {type} after code 38') <NEW_LINE> <DEDENT> <DEDENT> except (IndexError, ValueError) as err: <NEW_LINE> <INDENT> print(f'csi_to_conky: {err} while parsing advanced ANSI sequence {code};{type}', file=sys.stderr) <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> elif code==39: <NEW_LINE> <INDENT> result +='${color}' <NEW_LINE> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(f'code {code} not implemented', file=sys.stderr) <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> return result | Converts the received ansi code to conky code ${color}
This function must be called as repl in re.sub(pattern, repl, string) for instance.
The pattern parameter must match the whole ANSI sequence, and capture one group which is the list of parametres between [ and m].
Example:
converted=re.sub('\[([0-9;]*)m', csi_to_conky, original)
Args:
match (re.Match): the matched object
Returns:
(str) the string that will replace the pattern | 625941c1a219f33f346288dd |
def read_info(): <NEW_LINE> <INDENT> scope = {} <NEW_LINE> version_file = os.path.join(THIS_DIR, "physt", "version.py") <NEW_LINE> with open(version_file, "r") as f: <NEW_LINE> <INDENT> exec(f.read(), scope) <NEW_LINE> <DEDENT> return scope | Single source of version number and other info.
Inspiration:
- https://packaging.python.org/guides/single-sourcing-package-version/
- https://github.com/psf/requests/blob/master/setup.py | 625941c1379a373c97cfaab5 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.