code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def step(self): <NEW_LINE> <INDENT> return self.source_func()
Raises StopIteration if the routine has nothing more to do
625941bfab23a570cc2500c4
def judgeCircle(self, moves): <NEW_LINE> <INDENT> return False if (moves.count("U") != moves.count("D") or moves.count("L") != moves.count("R")) else True
:type moves: str :rtype: bool
625941bfdd821e528d63b0ee
def individual_stats(df): <NEW_LINE> <INDENT> ind_prompt1 = input('Would you like to see stats at an individual level? Y/N\n') <NEW_LINE> x = 0 <NEW_LINE> while ind_prompt1.lower() == 'y': <NEW_LINE> <INDENT> print('\n',df.loc[x]) <NEW_LINE> x += 1 <NEW_LINE> print('-'*40) <NEW_LINE> print('\n',df.loc[x]) <NEW_LINE> x += 1 <NEW_LINE> print('-'*40) <NEW_LINE> print('\n',df.loc[x]) <NEW_LINE> x += 1 <NEW_LINE> print('-'*40) <NEW_LINE> print('\n',df.loc[x]) <NEW_LINE> x += 1 <NEW_LINE> print('-'*40) <NEW_LINE> print('\n',df.loc[x]) <NEW_LINE> x += 1 <NEW_LINE> print('-'*40) <NEW_LINE> ind_promt2 = input('\nWould you like to see the next 5? Y/N\n') <NEW_LINE> if ind_promt2.lower() != 'y': <NEW_LINE> <INDENT> break
Displays statistics on bikeshare users.
625941bf30c21e258bdfa3df
def test_adhoc_sign_package_one_app(package_command, first_app): <NEW_LINE> <INDENT> package_command.apps = { "first": first_app, } <NEW_LINE> options = package_command.parse_options(["--adhoc"]) <NEW_LINE> package_command(**options) <NEW_LINE> assert package_command.actions == [ ("verify", ), ( "package", "first", { 'packaging_format': 'pkg', 'adhoc_sign': True, 'identity': None, 'sign_app': True, } ), ]
If there is one app,and an --adhoc argument, package signs the app using adhoc option
625941bfc4546d3d9de72975
def delete (self): <NEW_LINE> <INDENT> self._ruleset.remove_language(self) <NEW_LINE> self._ruleset = None
Deletes this language, removing it from its `.Ruleset`.
625941bfff9c53063f47c138
def add_to_assignees(self, *assignees): <NEW_LINE> <INDENT> assert all( isinstance(element, (github.NamedUser.NamedUser, str)) for element in assignees ), assignees <NEW_LINE> post_parameters = { "assignees": [ assignee.login if isinstance(assignee, github.NamedUser.NamedUser) else assignee for assignee in assignees ] } <NEW_LINE> headers, data = self._requester.requestJsonAndCheck( "POST", f"{self.url}/assignees", input=post_parameters ) <NEW_LINE> self._useAttributes(data)
:calls: `POST /repos/{owner}/{repo}/issues/{number}/assignees <https://docs.github.com/en/rest/reference/issues#assignees>`_ :param assignee: :class:`github.NamedUser.NamedUser` or string :rtype: None
625941bf7d43ff24873a2be2
def RegQueryValueEx(key, valueName=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> (dataType, data, dataLength) = c_api.RegQueryValueExW(key=key, name=valueName) <NEW_LINE> data = (dtypes.BYTE * dataLength.value)() <NEW_LINE> (dataType, data, dataLength) = c_api.RegQueryValueExW(key=key, name=valueName, data=data, dataLength=dataLength) <NEW_LINE> return RegistryValueFactory().by_type(dataType)(data) <NEW_LINE> <DEDENT> except errors.WindowsError as exception: <NEW_LINE> <INDENT> errors.catch_and_raise_general_errors(exception) <NEW_LINE> logging.exception(exception) <NEW_LINE> raise errors.RegistryBaseException(exception.winerror, exception.strerror)
Retrieves the type and data for the specified registry value. Parameters key A handle to an open registry key. The key must have been opened with the KEY_QUERY_VALUE access right valueName The name of the registry value. it is optional. Return Value If the function succeeds, the return a tuple of the value's name and RegistryValue object data. If the function fails, a RegistryBaseException exception is raised, unless: If the key is not open, an InvalidHandleException is raised If access is denied, an AccesDeniedException isRaised If the value does not exist, the function raises KeyError
625941bfb830903b967e9851
def _get_fp(y_pred, y_true): <NEW_LINE> <INDENT> return torch.sum((1 - y_true) * y_pred).float()
args: y_true : 3-d ndarray in [batch_size, img_rows, img_cols] y_pred : 3-d ndarray in [batch_size, img_rows, img_cols] return [float] false_positive
625941bf1f5feb6acb0c4a97
def cap(self): <NEW_LINE> <INDENT> if self.rwcap == None: <NEW_LINE> <INDENT> return self.rocap <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.rwcap
Get the least attenuated sparse capability available for this Node
625941bfe64d504609d74783
def genReadAsyncDataOutputType(*args): <NEW_LINE> <INDENT> return _libvncxx.Packet_genReadAsyncDataOutputType(*args)
genReadAsyncDataOutputType(vn::protocol::uart::ErrorDetectionMode errorDetectionMode, char * buffer, size_t size, uint8_t port) -> size_t genReadAsyncDataOutputType(vn::protocol::uart::ErrorDetectionMode errorDetectionMode, char * buffer, size_t size) -> size_t
625941bf67a9b606de4a7dff
def test_add_test_record(): <NEW_LINE> <INDENT> test_run = mock.MagicMock() <NEW_LINE> obj_cache = { 'test_run': test_run, 'user': 'testuser', 'testcases': { 'module.NameTestCase.test_name': 'caffa7b0-fb9e-430b-903f-3f37fa28e0da', }, } <NEW_LINE> with mock.patch.dict('betelgeuse.OBJ_CACHE', obj_cache): <NEW_LINE> <INDENT> with mock.patch.multiple( 'betelgeuse', TestCase=mock.DEFAULT, datetime=mock.DEFAULT, testimony=mock.DEFAULT, ) as patches: <NEW_LINE> <INDENT> test_case = mock.MagicMock() <NEW_LINE> patches['TestCase'].query.return_value = [test_case] <NEW_LINE> testimony_test_function = mock.MagicMock() <NEW_LINE> testimony_test_function.testmodule = 'module.py' <NEW_LINE> testimony_test_function.parent_class = 'NameTestCase' <NEW_LINE> testimony_test_function.name = 'test_name' <NEW_LINE> patches['testimony'].get_testcases.return_value = { 'module.py': [testimony_test_function], } <NEW_LINE> add_test_record({ 'classname': 'module.NameTestCase', 'message': u'Test failed because it not worked', 'name': 'test_name', 'status': 'failure', 'time': '3.1415', }) <NEW_LINE> test_run.add_test_record_by_fields.assert_called_once_with( duration=3.1415, executed=patches['datetime'].datetime.now(), executed_by='testuser', test_case_id=test_case.work_item_id, test_comment='Test failed because it not worked', test_result='failed' )
Check if test record creation works.
625941bf956e5f7376d70db2
def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Check whether ``self`` and ``other`` are not equal. TESTS:: sage: F3 = GF(3).algebraic_closure() sage: F3 != F3 False sage: F5 = GF(5).algebraic_closure() sage: F3 != F5 True
625941bfeab8aa0e5d26da9b
def test_coerce_number_set(self): <NEW_LINE> <INDENT> field = Field(data_type=set_(int), coerce=True) <NEW_LINE> ret = field.coerce([2, '4']) <NEW_LINE> self.assertEqual(ret, set([2, 4]))
Coerce to number set
625941bfd6c5a10208143f8c
@register.tag <NEW_LINE> def bookmark_form(parser, token): <NEW_LINE> <INDENT> return BookmarkFormNode(**_parse_args(parser, token, BOOKMARK_FORM_EXPRESSION))
Return, as html or as a template variable, a Django form to add or remove a bookmark for the given instance and key, and for current user. Usage: .. code-block:: html+django {% bookmark_form for *instance* [using *key*] [as *varname*] %} The key can be given hardcoded (surrounded by quotes) or as a template variable. Note that if the key is not given, it will be generated using the handler's *get_key* method, that, if not overridden, returns the default key. If the *varname* is used then it will be a context variable containing the form. Otherwise the form is rendered using the first template found in the order that follows:: bookmarks/[app_name]/[model_name]/[key]/form.html bookmarks/[app_name]/[model_name]/form.html bookmarks/[app_name]/[key]/form.html bookmarks/[app_name]/form.html bookmarks/[key]/form.html bookmarks/form.html The *app_name* and *model_name* refer to the instance given as argument to this templatetag. Example: .. code-block:: html+django {% bookmark_form for myinstance using 'mykey' as form %} {% if form %} {% if user.is_authenticated %} <form action="{% url bookmarks_bookmark %}" method="post" accept-charset="UTF-8" class="bookmarks_form"> {% csrf_token %} {{ form }} {% with form.bookmark_exists as exists %} {# another hidden input is created to handle javascript submit event #} <input class="bookmarks_toggle" type="submit" value="add"{% if exists %} style="display: none;"{% endif %}/> <input class="bookmarks_toggle" type="submit" value="remove"{% if not exists %} style="display: none;"{% endif %}/> {% endwith %} <span class="error" style="display: none;">Error during process</span> </form> {% else %} <a href="{{ login_url }}?{{ next }}={{ request.get_full_path }}">add</a> {% endif %} {% endif %} The template variable (or the html) will be None if: - the user is not authenticated - the instance is not bookmarkable - the key is not allowed AJAX is also supported using jQuery, e.g.: .. code-block:: html+django {% load bookmarks_tags %} <script src="path/to/jquery.js" type="text/javascript"></script> <script src="{{ STATIC_URL }}bookmarks/bookmarks.js" type="text/javascript"></script> {% bookmark_form for article %}
625941bf44b2445a33931fdb
def _runtime_initialize(self): <NEW_LINE> <INDENT> pass
This is called once everytime engine.run method is executed. It is meant to be used as a final setup call for all constraints.
625941bf5fdd1c0f98dc0176
def __init__(self, persistent=False, virsh_dargs=None): <NEW_LINE> <INDENT> if virsh_dargs is None: <NEW_LINE> <INDENT> virsh_dargs = {} <NEW_LINE> <DEDENT> if persistent: <NEW_LINE> <INDENT> self.super_set('__virsh__', virsh.VirshPersistent(**virsh_dargs)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.super_set('__virsh__', virsh.Virsh(**virsh_dargs)) <NEW_LINE> <DEDENT> super(LibvirtXMLBase, self).__init__()
Initialize instance's internal virsh interface from virsh_dargs @param: persistent: Use persistent virsh connection for this instance @param: virsh_dargs: virsh module Virsh class dargs API keywords
625941bf293b9510aa2c31dc
def test_validTabStructure_2(): <NEW_LINE> <INDENT> result = validTabStructure("1\t2\t3\t4\t5\t6\t7\t8\t9\n") <NEW_LINE> assert result == True
passes 8 tab structure
625941bf15fb5d323cde0a50
def compile_statements(self): <NEW_LINE> <INDENT> self.write_non_terminal_start('statements') <NEW_LINE> while True: <NEW_LINE> <INDENT> if self.is_token(KEYWORD, 'do'): <NEW_LINE> <INDENT> self.compile_do() <NEW_LINE> <DEDENT> elif self.is_token(KEYWORD, 'let'): <NEW_LINE> <INDENT> self.compile_let() <NEW_LINE> <DEDENT> elif self.is_token(KEYWORD, 'if'): <NEW_LINE> <INDENT> self.compile_if() <NEW_LINE> <DEDENT> elif self.is_token(KEYWORD, 'while'): <NEW_LINE> <INDENT> self.compile_while() <NEW_LINE> <DEDENT> elif self.is_token(KEYWORD, 'return'): <NEW_LINE> <INDENT> self.compile_return() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> self.write_non_terminal_end()
Compiles a sequence of statements, not including the enclosing {}.
625941bf0383005118ecf528
def __init__(self, filepath): <NEW_LINE> <INDENT> self.file = open(filepath, 'w') <NEW_LINE> self.chemin = filepath <NEW_LINE> self.contenu = [] <NEW_LINE> self.nbLigne = 0 <NEW_LINE> self.ro = False
Constructeur de classe. Un fichier est initialisé à partir de son chemin d'accès :param filepath: chemin d'accès du fichier :type filepath: str
625941bf55399d3f055885f7
def __init__(self, num_units,num_mels,outputs_per_step): <NEW_LINE> <INDENT> super(AttentionDecoder, self).__init__() <NEW_LINE> self.num_units = num_units <NEW_LINE> self.v = nn.Linear(num_units, 1, bias=False) <NEW_LINE> self.W1 = nn.Linear(num_units, num_units, bias=False) <NEW_LINE> self.W2 = nn.Linear(num_units, num_units, bias=False) <NEW_LINE> self.attn_grucell = nn.GRUCell(num_units // 2, num_units) <NEW_LINE> self.gru1 = nn.GRUCell(num_units, num_units) <NEW_LINE> self.gru2 = nn.GRUCell(num_units, num_units) <NEW_LINE> self.attn_projection = nn.Linear(num_units * 2, num_units) <NEW_LINE> self.out = nn.Linear(num_units, num_mels * outputs_per_step) <NEW_LINE> self.stop = nn.Linear(num_units, outputs_per_step) <NEW_LINE> self.num_mels=num_mels <NEW_LINE> self.outputs_per_step=outputs_per_step
:param num_units: dimension of hidden units
625941bfd10714528d5ffc24
def validate_email(value): <NEW_LINE> <INDENT> if not value: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> django_validate_email(value) <NEW_LINE> <DEDENT> except ValidationError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> validate_disposable_email(value) <NEW_LINE> <DEDENT> except ValidationError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True
Validate a single email.
625941bf26068e7796caec1e
def is_empty(self): <NEW_LINE> <INDENT> return self.head is None
Returns True if stack is empty
625941bf283ffb24f3c55847
def get_test_set(self): <NEW_LINE> <INDENT> return FULLVIDTIMIT(which_set='test', center=self.center, rescale=self.rescale, axes=self.axes)
.. todo:: WRITEME
625941bf9b70327d1c4e0d18
def write_to_parquet(cls, self): <NEW_LINE> <INDENT> write_to_parquet(self.df, self.fpath)
Write data to local json file.
625941bf07f4c71912b113c4
def IsDisplayValid(*args): <NEW_LINE> <INDENT> return _DigitalMicrograph.IsDisplayValid(*args)
IsDisplayValid(Image argument_1) -> bool
625941bf3346ee7daa2b2cae
def _build_loss(self, y_pred, y_true, **kwargs): <NEW_LINE> <INDENT> weight_decay = kwargs.pop('weight_decay', 0.0005) <NEW_LINE> variables = self.trainable_variables() <NEW_LINE> l2_reg_loss = tf.add_n([tf.nn.l2_loss(var) for var in variables]) <NEW_LINE> softmax_losses = tf.nn.softmax_cross_entropy_with_logits(labels=tf.stop_gradient(y_true), logits=y_pred) <NEW_LINE> softmax_loss = tf.reduce_mean(input_tensor=softmax_losses) <NEW_LINE> return softmax_loss + weight_decay * l2_reg_loss
Build loss function for the model training. :param kwargs: dict, extra arguments for regularization term. - weight_decay: float, L2 weight decay regularization coefficient. :return tf.Tensor.
625941bf31939e2706e4cdb1
def test_initialisation(self): <NEW_LINE> <INDENT> if self.__class__ != TestPlayer: <NEW_LINE> <INDENT> player = self.player() <NEW_LINE> self.assertEqual(player.history, []) <NEW_LINE> self.assertEqual(player.tournament_attributes, {'length': -1, 'game': DefaultGame, 'noise': 0}) <NEW_LINE> self.assertEqual(player.cooperations, 0) <NEW_LINE> self.assertEqual(player.defections, 0) <NEW_LINE> self.classifier_test()
Test that the player initiates correctly.
625941bf30dc7b76659018ad
def test(D, P, fields=fields, values=values): <NEW_LINE> <INDENT> correct = 0 <NEW_LINE> for i in range(len(D)): <NEW_LINE> <INDENT> if predict(D[i], P, fields, values) == D[i]['result']: <NEW_LINE> <INDENT> correct += 1 <NEW_LINE> <DEDENT> <DEDENT> return(correct / len(D))
returns the % our train/predict functions got correct vs the results D is list of respondants dictionaries P is result of train
625941bf2eb69b55b151c7f0
def Optimization(self, L0, maxiter): <NEW_LINE> <INDENT> result = minimize(self.objective_function, self.to_vector(L0), method='trust-constr', constraints=self.cons, options={'maxiter': maxiter, 'verbose': 3, 'gtol': 1e-8}) <NEW_LINE> result.x = self.to_matrix(result.x) <NEW_LINE> return result
Optimization, method a Trust region param L0: initial guess param maxiter: maximum of iterations return: result (Laplacian)
625941bfd4950a0f3b08c295
def un_gz(self, infile, outfile): <NEW_LINE> <INDENT> import subprocess <NEW_LINE> try: <NEW_LINE> <INDENT> subprocess.check_call('gunzip -c %s > %s', shell=True) <NEW_LINE> <DEDENT> except subprocess.CalledProcessError as e: <NEW_LINE> <INDENT> raise
Decompress gzip file.
625941bfd268445f265b4db2
def parse_td(row_html): <NEW_LINE> <INDENT> return row_html.find_all(("td", "th"), recursive=False)
Return the td elements from a row element. Parameters ---------- obj : node-like A DOM <tr> node. Returns ------- list of node-like These are the elements of each row, i.e., the columns.
625941bffb3f5b602dac35d4
def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, GETAccountSummaryTypeTaxInfo): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__
Returns true if both objects are equal
625941bf99cbb53fe6792b2b
def _get_best_move(_board: Board, _depth: int, _base_move: Move, _board_eval: Callable): <NEW_LINE> <INDENT> alpha = LOW_BOUND <NEW_LINE> beta = HIGH_BOUND <NEW_LINE> _counter = 0 <NEW_LINE> _player = _board.turn <NEW_LINE> _board.make_move(_base_move) <NEW_LINE> _value, _counter = _alpha_beta_max(_board, _depth - 1, alpha, beta, _player, _board_eval, _counter) <NEW_LINE> _value *= -1 <NEW_LINE> _board.unmake_move() <NEW_LINE> return _base_move, _value, _counter
Helper function for running alpha_beta on multiple threads.
625941bf3d592f4c4ed1cfb8
def hashes(self, trust_internet=True): <NEW_LINE> <INDENT> good_hashes = self.options.get('hashes', {}).copy() <NEW_LINE> link = self.link if trust_internet else self.original_link <NEW_LINE> if link and link.hash: <NEW_LINE> <INDENT> good_hashes.setdefault(link.hash_name, []).append(link.hash) <NEW_LINE> <DEDENT> return Hashes(good_hashes)
Return a hash-comparer that considers my option- and URL-based hashes to be known-good. Hashes in URLs--ones embedded in the requirements file, not ones downloaded from an index server--are almost peers with ones from flags. They satisfy --require-hashes (whether it was implicitly or explicitly activated) but do not activate it. md5 and sha224 are not allowed in flags, which should nudge people toward good algos. We always OR all hashes together, even ones from URLs. :param trust_internet: Whether to trust URL-based (#md5=...) hashes downloaded from the internet, as by populate_link()
625941bf6fece00bbac2d681
def output_multiple(self): <NEW_LINE> <INDENT> return _blocks_swig4.or_ii_sptr_output_multiple(self)
output_multiple(or_ii_sptr self) -> int
625941bf21a7993f00bc7c30
@pytest.fixture() <NEW_LINE> def create_db_instance(null_db_instances, clean_db): <NEW_LINE> <INDENT> database = Database(of_type='MongoDB', name='orion_test', username='user', password='pass') <NEW_LINE> return database
Create and save a singleton database instance.
625941bf71ff763f4b5495cb
def getCommunity(self, communityId): <NEW_LINE> <INDENT> return self._communities[communityId]
Getter @param communityId: ID of the community to return @type communityId: C{String} @return: The community with the given id from the community repository. @rtype: L{Community}
625941bf6e29344779a62558
def get_roc(self): <NEW_LINE> <INDENT> return self.get_statistics()
See :meth:`get_statistics`
625941bfb57a9660fec337c5
def _create_chord_entry(task_id, task_class, message_body, user_id): <NEW_LINE> <INDENT> args = message_body['args'] <NEW_LINE> kwargs = message_body['kwargs'] <NEW_LINE> arguments_dict = task_class.arguments_as_dict(*args, **kwargs) <NEW_LINE> name = task_class.generate_name(arguments_dict) <NEW_LINE> total_steps = task_class.calculate_total_steps(arguments_dict) <NEW_LINE> parent_name = kwargs.get('user_task_name', '') <NEW_LINE> chord_data = message_body['chord'] <NEW_LINE> group_id = message_body['taskset'] <NEW_LINE> with transaction.atomic(): <NEW_LINE> <INDENT> group, created = UserTaskStatus.objects.get_or_create( task_id=group_id, defaults={'is_container': True, 'name': parent_name, 'task_class': 'celery.group', 'total_steps': total_steps, 'user_id': user_id}) <NEW_LINE> if created: <NEW_LINE> <INDENT> chord = UserTaskStatus.objects.create( is_container=True, name=parent_name, task_class='celery.chord', task_id=str(uuid4()), total_steps=total_steps, user_id=user_id) <NEW_LINE> group.parent = chord <NEW_LINE> group.save(update_fields={'parent', 'modified'}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> chord = None <NEW_LINE> group.increment_total_steps(total_steps) <NEW_LINE> if parent_name and not group.name: <NEW_LINE> <INDENT> group.set_name(parent_name) <NEW_LINE> <DEDENT> <DEDENT> UserTaskStatus.objects.create( name=name, parent=group, task_class=task_class, task_id=task_id, total_steps=total_steps, user_id=user_id) <NEW_LINE> if not created: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> task_id = chord_data['options']['task_id'] <NEW_LINE> body_task = chord_data['task'] <NEW_LINE> body_class = import_string(body_task).__class__ <NEW_LINE> if not issubclass(body_class, UserTaskMixin): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> args = chord_data['args'] <NEW_LINE> kwargs = chord_data['kwargs'] <NEW_LINE> arguments_dict = body_class.arguments_as_dict(*args, **kwargs) <NEW_LINE> name = body_class.generate_name(arguments_dict) <NEW_LINE> total_steps = body_class.calculate_total_steps(arguments_dict) <NEW_LINE> UserTaskStatus.objects.get_or_create( task_id=task_id, defaults={'name': name, 'parent': chord, 'task_class': body_task, 'total_steps': total_steps, 'user_id': user_id}) <NEW_LINE> chord.increment_total_steps(total_steps)
Create and update status records for a new :py:class:`UserTaskMixin` in a Celery chord.
625941bf925a0f43d2549db9
def numWays(self, n, k): <NEW_LINE> <INDENT> if n==0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> if n==1: <NEW_LINE> <INDENT> return k <NEW_LINE> <DEDENT> pre1 = k <NEW_LINE> pre2 = k*k <NEW_LINE> ans = k*k <NEW_LINE> for i in xrange(2,n): <NEW_LINE> <INDENT> ans = (k-1)*pre1+(k-1)*pre2 <NEW_LINE> pre1 = pre2 <NEW_LINE> pre2 = ans <NEW_LINE> <DEDENT> return ans
:type n: int :type k: int :rtype: int
625941bf5fcc89381b1e1601
def _paste(self, box): <NEW_LINE> <INDENT> x = self.x[self.yi_remaining] <NEW_LINE> res_dim = determine_restricted_dims(box._box_lims[-1], self._box_init) <NEW_LINE> possible_pastes = [] <NEW_LINE> for u in res_dim: <NEW_LINE> <INDENT> logging.getLogger(__name__).info("pasting "+u) <NEW_LINE> dtype = self.x.dtype.fields.get(u)[0].name <NEW_LINE> if dtype not in Prim.PASTE_OPERATIONS: <NEW_LINE> <INDENT> raise PrimError("no paste operation defined for type %s" % dtype) <NEW_LINE> <DEDENT> pastes = Prim.PASTE_OPERATIONS[dtype](self, box, u) <NEW_LINE> [possible_pastes.append(entry) for entry in pastes] <NEW_LINE> <DEDENT> if not possible_pastes: <NEW_LINE> <INDENT> return box <NEW_LINE> <DEDENT> scores = [] <NEW_LINE> for entry in possible_pastes: <NEW_LINE> <INDENT> i, box_lim = entry <NEW_LINE> obj = self.obj_func(self.y[box.yi], self.y[i]) <NEW_LINE> non_res_dim = len(x.dtype.descr)- determine_nr_restricted_dims(box_lim, self._box_init) <NEW_LINE> score = (obj, non_res_dim, box_lim, i) <NEW_LINE> scores.append(score) <NEW_LINE> <DEDENT> scores.sort(key=operator.itemgetter(0,1), reverse=True) <NEW_LINE> obj, _, box_new, indices = scores[0] <NEW_LINE> mass_old = box.yi.shape[0]/self.n <NEW_LINE> mass_new = self.y[indices].shape[0]/self.n <NEW_LINE> mean_old = np.mean(self.y[box.yi]) <NEW_LINE> mean_new = np.mean(self.y[indices]) <NEW_LINE> if mass_new >= self.mass_min and mass_new > mass_old and obj > 0 and mean_new > mean_old: <NEW_LINE> <INDENT> box.update(box_new, indices) <NEW_LINE> return self._paste(box) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return box
Executes the pasting phase of the PRIM. Delegates pasting to data type specific helper methods.
625941bf60cbc95b062c6487
def _dvr_router_notifications_for_live_migration( self, with_floatingip=False): <NEW_LINE> <INDENT> HOST1, HOST2 = 'host1', 'host2' <NEW_LINE> for host in [HOST1, HOST2]: <NEW_LINE> <INDENT> helpers.register_l3_agent( host=host, agent_mode=constants.L3_AGENT_MODE_DVR) <NEW_LINE> <DEDENT> router = self._create_router() <NEW_LINE> arg_list = (portbindings.HOST_ID,) <NEW_LINE> with self.subnet() as ext_subnet, self.subnet(cidr='20.0.0.0/24') as subnet1, self.port(subnet=subnet1, device_owner=DEVICE_OWNER_COMPUTE, arg_list=arg_list, **{portbindings.HOST_ID: HOST1}) as vm_port: <NEW_LINE> <INDENT> ext_net_id = ext_subnet['subnet']['network_id'] <NEW_LINE> self._update('networks', ext_net_id, {'network': {extnet_apidef.EXTERNAL: True}}) <NEW_LINE> self.l3_plugin.update_router( self.context, router['id'], {'router': { 'external_gateway_info': {'network_id': ext_net_id}}}) <NEW_LINE> self.l3_plugin.add_router_interface( self.context, router['id'], {'subnet_id': subnet1['subnet']['id']}) <NEW_LINE> if with_floatingip: <NEW_LINE> <INDENT> floating_ip = {'floating_network_id': ext_net_id, 'router_id': router['id'], 'port_id': vm_port['port']['id'], 'tenant_id': vm_port['port']['tenant_id'], 'dns_name': '', 'dns_domain': ''} <NEW_LINE> floating_ip = self.l3_plugin.create_floatingip( self.context, {'floatingip': floating_ip}) <NEW_LINE> <DEDENT> with mock.patch.object(self.l3_plugin, '_l3_rpc_notifier') as l3_notifier, mock.patch.object( self.l3_plugin, 'create_fip_agent_gw_port_if_not_exists' ) as fip_agent: <NEW_LINE> <INDENT> live_migration_port_profile = { 'migrating_to': HOST2 } <NEW_LINE> updated_port = self.core_plugin.update_port( self.context, vm_port['port']['id'], {'port': { portbindings.PROFILE: live_migration_port_profile}}) <NEW_LINE> l3_notifier.routers_updated_on_host.assert_any_call( self.context, {router['id']}, HOST2) <NEW_LINE> self.assertEqual(updated_port[portbindings.HOST_ID], HOST1) <NEW_LINE> self.assertNotEqual(updated_port[portbindings.HOST_ID], HOST2) <NEW_LINE> if with_floatingip: <NEW_LINE> <INDENT> fip_agent.return_value = True <NEW_LINE> fip_agent.assert_any_call( mock.ANY, floating_ip['floating_network_id'], HOST2)
Check the router notifications go to the right hosts with live migration without hostbinding on the port.
625941bf15baa723493c3eb8
def test_caches_init_2(self): <NEW_LINE> <INDENT> cache = Caches(True) <NEW_LINE> self.assertEqual(cache.rmse, 0)
Tests to see that Caches' rmse value is initialized to 0
625941bf56b00c62f0f1459c
def exp(x): <NEW_LINE> <INDENT> return Variable.from_jvalue(callBigDlFunc("float", "exp", x))
Element-wise exponential. :param x: A variable. :return: A variable.
625941bfd164cc6175782c92
def sort(read_filename, write_filename): <NEW_LINE> <INDENT> formatter = '%Y-%m-%d %H' <NEW_LINE> read_fp = file(read_filename, 'rb') <NEW_LINE> reader = csv.reader(read_fp) <NEW_LINE> data = {} <NEW_LINE> for line in reader: <NEW_LINE> <INDENT> if reader.line_num == 1: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> user_id = int(line[0]) <NEW_LINE> item_id = int(line[1]) <NEW_LINE> behavior = int(line[2]) <NEW_LINE> geo = line[3] <NEW_LINE> item_category = int(line[4]) <NEW_LINE> timestamp = int(time.mktime(time.strptime(line[5], formatter))) <NEW_LINE> data.setdefault(user_id, dict()) <NEW_LINE> data[user_id].setdefault(item_id, list()) <NEW_LINE> data[user_id][item_id].append((behavior, geo, item_category, timestamp)) <NEW_LINE> <DEDENT> read_fp.close() <NEW_LINE> write_fp = file(write_filename, 'wb') <NEW_LINE> writer = csv.writer(write_fp) <NEW_LINE> for user_id in data: <NEW_LINE> <INDENT> for item_id in data[user_id]: <NEW_LINE> <INDENT> temp = data[user_id][item_id] <NEW_LINE> temp.sort(key=lambda x:x[3]) <NEW_LINE> for behavior, geo, item_category, timestamp in temp: <NEW_LINE> <INDENT> writer.writerow([user_id, item_id, behavior, geo, item_category, timestamp]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> write_fp.close()
Extract features from the dataset.
625941bfcc40096d61595896
def execute_and_get(self, query): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with self.connection: <NEW_LINE> <INDENT> cursor = self.connection.cursor() <NEW_LINE> cursor.execute(query) <NEW_LINE> return cursor <NEW_LINE> <DEDENT> <DEDENT> except sqlite3.Error as e: <NEW_LINE> <INDENT> print("Error executing query: \n{}\n{}".format(query, e.args[0])) <NEW_LINE> return None
Unsafe query executor. Used when query output matters. :param query: query to execute :return: Cursor object that was used to execute the query. None if query was unsuccessful
625941bfa8370b77170527e5
def take_fast_steps(): <NEW_LINE> <INDENT> active_canidates = list(self.dispersy_yield_verified_candidates()) <NEW_LINE> if len(active_canidates) > FAST_WALKER_CANDIDATE_TARGET: <NEW_LINE> <INDENT> self._logger.debug("there are %d active candidates available, " "quitting fast walker", len(active_canidates)) <NEW_LINE> switch_to_normal_walking() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._logger.debug("%d candidates active, target is %d walking a bit more... (step %d of %d)", len(active_canidates), FAST_WALKER_CANDIDATE_TARGET, self._fast_steps_taken, FAST_WALKER_STEPS) <NEW_LINE> eligible_candidates = get_eligible_candidates(time()) <NEW_LINE> self._logger.debug("Found %d eligible_candidates", len(eligible_candidates)) <NEW_LINE> for count, candidate in enumerate(eligible_candidates, 1): <NEW_LINE> <INDENT> self._logger.debug("%d of %d extra walk to %s", count, len(eligible_candidates), candidate) <NEW_LINE> self.create_introduction_request(candidate, allow_sync=False, is_fast_walker=True) <NEW_LINE> <DEDENT> self._fast_steps_taken += 1 <NEW_LINE> if self._fast_steps_taken >= FAST_WALKER_STEPS: <NEW_LINE> <INDENT> switch_to_normal_walking()
Walk to all the initial and new eligible candidates. Stop if we got enough active candidates.
625941bfa05bb46b383ec768
def refreshBrowser(self): <NEW_LINE> <INDENT> self.driver.refresh()
:return:
625941bf7c178a314d6ef39f
def get_session_seats(self, session_number, availability=0): <NEW_LINE> <INDENT> message = VIFMessage() <NEW_LINE> message.set_request_header(request_code=20, **self.header_data()) <NEW_LINE> body_data = { 'session_number': session_number, 'availability': availability } <NEW_LINE> body_record = VIFRecord(record_code='q20', data=body_data) <NEW_LINE> message.add_body_record(body_record) <NEW_LINE> return self.send_message(message)
Record code: 20 Description: Can be used to retrieve a snapshot of the current seating status for the specified session. Availability: 0=Get All Seats 1=Available 2=Unavailable Body: q20 record Response: pl4 record
625941bf4e4d5625662d431f
def test_check_haproxy_percents_for_backend_test_ok(self): <NEW_LINE> <INDENT> with patch("check_haproxy_stats.check_haproxy_stats_up.get_haproxy_services_up_count_for_backends") as get_backend_counts: <NEW_LINE> <INDENT> get_backend_counts.return_value = { 'backend-1': { 'count': 2, 'up_count': 2, }, 'backend-2': { 'count': 100, 'up_count': 3, } } <NEW_LINE> r = check_haproxy_stats_up.check_haproxy_up_rates( base_url_path="127.0.0.1/haproxy/stats", warning_percent=0.90, critical_percent=0.60, backends=['backend-1', ] ) <NEW_LINE> self.assertEqual(r, check_haproxy_stats_up.SensuCheckStatus.RETURN_CODES["OK"])
Test backend-1 check only tests backend-1 up percent with warning ratio and critical ratio of 0.90 and 0.60. check_haproxy_up_rates should return OK code.
625941bfd268445f265b4db3
def test_run_entire_full_x_full_y(init_entire, create_db_instance): <NEW_LINE> <INDENT> experiment = EVCBuilder().build_view_from({'name': 'full_x_full_y'}) <NEW_LINE> assert len(experiment.fetch_trials_tree({})) == 23 <NEW_LINE> assert len(experiment.fetch_trials({})) == 4 <NEW_LINE> kleio.core.cli.main(("-vv hunt --max-trials 20 --pool-size 1 -n full_x_full_y " "./black_box_with_y.py " "-x~uniform(-10,10) " "-y~uniform(-10,10,default_value=1)").split(" ")) <NEW_LINE> assert len(experiment.fetch_trials_tree({})) == 39 <NEW_LINE> assert len(experiment.fetch_trials({})) == 20
Test if branched experiment can be executed without triggering a branching event again
625941bf71ff763f4b5495cc
def run_experiment(self): <NEW_LINE> <INDENT> return self.algorithms[self.algorithm]()
Runs the specified prediction experiment for the given user and returns the results :return: A dict containing a list of predictions, the corresponding ground truth values, and patient level performance measures TODO: refine
625941bf30c21e258bdfa3e0
def create_lun(self): <NEW_LINE> <INDENT> path = '/vol/%s/%s' % (self.flexvol_name, self.name) <NEW_LINE> lun_create = netapp_utils.zapi.NaElement.create_node_with_children( 'lun-create-by-size', **{'path': path, 'size': str(self.size), 'ostype': self.ostype, 'space-reservation-enabled': str(self.space_reserve), 'space-allocation-enabled': str(self.space_allocation)}) <NEW_LINE> try: <NEW_LINE> <INDENT> self.server.invoke_successfully(lun_create, enable_tunneling=True) <NEW_LINE> <DEDENT> except netapp_utils.zapi.NaApiError as e: <NEW_LINE> <INDENT> self.module.fail_json(msg="Error provisioning lun %s of size %s: %s" % (self.name, self.size, to_native(e)), exception=traceback.format_exc())
Create LUN with requested name and size
625941bf7b25080760e3939e
def diff_cost(diff): <NEW_LINE> <INDENT> return tf.reduce_mean(tf.square(diff))
Returns a tf scalar that's the cost due to the difference between the two images.
625941bf9c8ee82313fbb6b9
def main_c2(): <NEW_LINE> <INDENT> application = tornado.web.Application([ (ROOT_PATH_QUERY, RPQ), (SITE_PATH_QUERY, SPQ), (r'/.*', UnknownPageHandler) ]) <NEW_LINE> if SSL: <NEW_LINE> <INDENT> http_server = tornado.httpserver.HTTPServer( application, ssl_options={'certfile': CERT_FILE, 'ssl_version': ssl.PROTOCOL_TLSv1}) <NEW_LINE> http_server.listen(443) <NEW_LINE> tornado.ioloop.IOLoop.instance().start() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> http_server = tornado.httpserver.HTTPServer(application) <NEW_LINE> http_server.listen(80) <NEW_LINE> tornado.ioloop.IOLoop.instance().start()
Start C2 Server.
625941bfdc8b845886cb5478
def sign(self, digest): <NEW_LINE> <INDENT> raise NotImplementedError()
Sign the specified message. :param digest: An AMQP message digest. :type digest: str :return: The message signature. :rtype: str
625941bf462c4b4f79d1d615
def test_rule_create_after_invalid(self): <NEW_LINE> <INDENT> rule = dict(name='one_rule', source='any', destination='any', interface='lan', after='admin_bypass') <NEW_LINE> self.do_rule_creation_test(rule, failed=True, msg='Failed to insert after rule=admin_bypass interface=lan')
test creation of a new rule after an invalid rule
625941bf0a50d4780f666dd5
def sigcheck(a_key, a_hash_for_sig, a_sig): <NEW_LINE> <INDENT> r, s = sigdecode_der(a_sig) <NEW_LINE> return ecdsa_verify(generator_secp256k1, a_key.public_pair(), a_hash_for_sig, ( r, s ))
Returns True if a_key was used to generate a_sig from a_hash_for_sig; False otherwise.
625941bfc432627299f04b89
def draw_dt_gt_dets(image, dt_boxes, dt_label, gt_boxes, gt_label, vis_diff=False): <NEW_LINE> <INDENT> if vis_diff: <NEW_LINE> <INDENT> assert len(gt_label) == len(dt_label) <NEW_LINE> assert len(gt_boxes) == len(dt_boxes) <NEW_LINE> for i in range(len(gt_label)): <NEW_LINE> <INDENT> if dt_label[i] == gt_label[i]: <NEW_LINE> <INDENT> image = draw_image_bboxes_text(image, [gt_boxes[i]], [gt_label[i]], color=(0, 255, 0)) <NEW_LINE> image = draw_image_bboxes_text(image, [dt_boxes[i]], [dt_label[i]], color=(0, 255, 0)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> image = draw_image_bboxes_text(image, [gt_boxes[i]], [gt_label[i]], color=(0, 255, 0)) <NEW_LINE> image = draw_image_bboxes_text(image, [dt_boxes[i]], [dt_label[i]], color=(255, 0, 0)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> image = draw_image_bboxes_text(image, gt_boxes, gt_label, color=(0, 255, 0)) <NEW_LINE> image = draw_image_bboxes_text(image, dt_boxes, dt_label, color=(255, 0, 0)) <NEW_LINE> <DEDENT> return image
显示ground true和Detection bbox :param image: :param dt_boxes: :param dt_label: :param gt_boxes: :param gt_label: :param vis_diff: 是否显示差异, vis_diff=True :使用不同颜色标记gt_label和dt_label的差异 要求len(gt_label) == len(dt_label) vis_diff=False:使用不同颜色显示dt_boxes和gt_boxes dt_boxes和gt_boxes的长度任意 :return:
625941bf2eb69b55b151c7f1
def format_element(bfo): <NEW_LINE> <INDENT> pagination = bfo.field('300__a') <NEW_LINE> return pagination
Prints the record pagination @see: date.py, publisher.py, reprints.py, imprint.py, place.py
625941bf1b99ca400220a9f6
def plot_redblue(party, x='date', y='daily_positives', days=0): <NEW_LINE> <INDENT> df = get_state_data() <NEW_LINE> fig, ax = plt.subplots(figsize=(8, 8)) <NEW_LINE> fig.autofmt_xdate() <NEW_LINE> ax.fmt_xdata = mdates.DateFormatter('%m-%d') <NEW_LINE> df = df[df['Governor'] == party] <NEW_LINE> if party == 'republican': <NEW_LINE> <INDENT> df = df[3:] <NEW_LINE> color = 'red' <NEW_LINE> <DEDENT> if party == 'democrat': <NEW_LINE> <INDENT> df = df[36:] <NEW_LINE> color = 'blue' <NEW_LINE> <DEDENT> df = df[['date', 'state', 'daily_positives']].dropna() <NEW_LINE> grouper = df.groupby(x)[y].sum() <NEW_LINE> print(grouper.tail()) <NEW_LINE> if days > 0: <NEW_LINE> <INDENT> plot_data = plot_data[-1*days:] <NEW_LINE> <DEDENT> ax.plot(df[x].unique(), grouper, linestyle='-', label=party, color=color) <NEW_LINE> ax.grid() <NEW_LINE> fig.text(0.15, 0.86, 'data source: covidtracking.com', fontsize=11, color='gray') <NEW_LINE> ax.set_title("{} States {}".format(party.title(), y)) <NEW_LINE> ax.yaxis.grid(True) <NEW_LINE> plt.style.use('seaborn') <NEW_LINE> plt.show()
plot states with red or blue governors
625941bf004d5f362079a27a
def test_exclude_should_work_on_sequence_too(self): <NEW_LINE> <INDENT> class PersonTable(tables.Table): <NEW_LINE> <INDENT> first_name = tables.Column() <NEW_LINE> last_name = tables.Column() <NEW_LINE> occupation = tables.Column() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> sequence = ('first_name', 'last_name', 'occupation') <NEW_LINE> <DEDENT> <DEDENT> class AnotherPersonTable(PersonTable): <NEW_LINE> <INDENT> class Meta(PersonTable.Meta): <NEW_LINE> <INDENT> exclude = ('first_name', 'last_name') <NEW_LINE> <DEDENT> <DEDENT> tableA = PersonTable([]) <NEW_LINE> self.assertEqual(tableA.columns.names(), ['first_name', 'last_name', 'occupation']) <NEW_LINE> tableB = AnotherPersonTable([]) <NEW_LINE> self.assertEqual(tableB.columns.names(), ['occupation']) <NEW_LINE> tableC = PersonTable([], exclude=('first_name')) <NEW_LINE> self.assertEqual(tableC.columns.names(), ['last_name', 'occupation'])
It should be possible to define a sequence on a table and exclude it in a child of that table.
625941bfeab8aa0e5d26da9c
def __init__(self, lexer, grammar): <NEW_LINE> <INDENT> self.lexer = lexer <NEW_LINE> self.grammar = grammar <NEW_LINE> self.cur_token = None <NEW_LINE> self.parser_tree = []
Create a grammar parser. Arguments --------- lexer -- Lexer instance to use as input grammar -- A list of rules Each entry is a rule which is a tuple (`symbol`, `expression`, `action`) `symbol` is the name of a nonterminal symbol to be defined. `expression` is one of the expression which defines the `symbol`. If additional expressions can define the same symbol, additional rules with identical `symbol` must be defined. `action` is the method to call upon expression matches. It can be None if no action is associated. For instance the grammar: STMT : (EXPR " ")+ EXPR : TERM "+" EXPR | TERM "-" EXPR | TERM TERM : NUMBER | "(" EXPR ")" must be represented as such: [ ("STMT", E(Q(E("EXPR", " "), "+")), None), ("EXPR", E("TERM", "+", "EXPR"), None), ("EXPR", E("TERM", "-", "EXPR"), None), ("EXPR", E("TERM"), None), ("TERM", E("NUMBER"), None), ("TERM", E("(", "EXPR", ")"), None), ] The associated lexer should yield all terminal symbols: " ", "+", "-", "(", ")" and "NUMBER" The `action` method is called with the following arguments: symbols -- list of matched symbols parser_tree -- list of nonterminal symbols currently being parsed The method must return the value to replace the nonterminal symbol with. Note ---- Caution with the provided grammar! This parser only support LL(1) grammar. Other kind of grammar, including left-recursive grammar will end up in a infinite recursion! There are no watchdogs to prevent such infinite recursion. Just be careful with what you are parsing.
625941bf24f1403a92600aad
def generate_char(dark, mode, char_set): <NEW_LINE> <INDENT> import random <NEW_LINE> if len(char_set[dark]) > 0: <NEW_LINE> <INDENT> if "l" in mode: <NEW_LINE> <INDENT> char = char_set[dark][0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> char = char_set[dark][random.randint(0, len(char_set[dark]) - 1)] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if dark in (0, 1): <NEW_LINE> <INDENT> char = " " <NEW_LINE> return char <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> i = 1 <NEW_LINE> done = False <NEW_LINE> while not done: <NEW_LINE> <INDENT> if len(char_set[dark + i]) > 0: <NEW_LINE> <INDENT> new_dark = dark + i <NEW_LINE> done = True <NEW_LINE> <DEDENT> elif len(char_set[dark - i]) > 0: <NEW_LINE> <INDENT> new_dark = dark - i <NEW_LINE> done = True <NEW_LINE> <DEDENT> i += 1 <NEW_LINE> <DEDENT> <DEDENT> if "l" in mode: <NEW_LINE> <INDENT> char = char_set[new_dark][0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> char = char_set[new_dark][ random.randint(0, len(char_set[new_dark]) - 1) ] <NEW_LINE> <DEDENT> <DEDENT> return char
Select a courier character based on a darkness level.
625941bf2ae34c7f2600d076
def ParticleFilterSearch_ExperimentalTests(pop_size = 50, iterations = 40): <NEW_LINE> <INDENT> if True: <NEW_LINE> <INDENT> imf, imd , pat_list, pose_list = pattern_utils.make_test_image_1(True) <NEW_LINE> ipat = 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> imf, imd , pat_list, pose_list = pattern_utils.make_test_image_2(True) <NEW_LINE> ipat = 0 <NEW_LINE> <DEDENT> pat = pat_list[ipat] <NEW_LINE> xs, ys = pose_list[ipat][:2] <NEW_LINE> region = (xs-20, xs+20, ys-20, ys+20) <NEW_LINE> scale = pose_list[ipat][3] <NEW_LINE> W = initial_population(region, scale , pop_size) <NEW_LINE> pop = PatternPosePopulation(W, pat) <NEW_LINE> pop.set_distance_image(imd) <NEW_LINE> pop.temperature = 5 <NEW_LINE> Lw, Lc = pop.particle_filter_search(iterations,log=True) <NEW_LINE> plt.plot(Lc) <NEW_LINE> plt.title('Cost vs generation index') <NEW_LINE> plt.show() <NEW_LINE> print(pop.best_w) <NEW_LINE> print(pop.best_cost) <NEW_LINE> pattern_utils.display_solution(pat_list, pose_list, pat, pop.best_w) <NEW_LINE> return (pop.best_cost, pop.best_w)
We created this function in order to be able to test the particle filter seach and find the experimental results. This function is very similar to the test_particle_filter_search function with minor changes to make testing simpler.
625941bf656771135c3eb7b1
def get_xpath(path: str, tree: html.HtmlElement): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> element = tree.xpath(path)[0] <NEW_LINE> <DEDENT> except IndexError as e: <NEW_LINE> <INDENT> ebay_logger.error(f'{e.__class__}: {path}: {e}') <NEW_LINE> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return element
Looks for path/element in tree :param path: str, valid xpath search string :param tree: html.HtmlElement from lxml :return: element, based on path; or None if not found
625941bf8a43f66fc4b53fac
def upload_example_old(): <NEW_LINE> <INDENT> response.files.append(URL(r=request, c='static/js', f='fileuploader.js')) <NEW_LINE> response.files.append(URL(r=request, c='static/css', f='fileuploader.css')) <NEW_LINE> response.files.append(URL(r=request, c='static/js/thatsit/global', f='use_fileuploader.js')) <NEW_LINE> form = FORM(TABLE( TR('Filename:', INPUT(_type='text', _name='arg_filename', requires=IS_NOT_EMPTY())), TR('Description:', TEXTAREA(_name='arg_description', value='write something here')), TR('Time (in secs):', INPUT(_type='int', _name='arg_time', requires=IS_NOT_EMPTY(), value=20)), TR('Position:', INPUT(_type='int', _name='arg_position', requires=IS_NOT_EMPTY(), value=0)), TR('Your email:', INPUT(_type='text', _name='email', requires=IS_EMAIL())), TR('Admin', INPUT(_type='checkbox', _name='admin')), TR('Sure?', SELECT('yes', 'no', _name='sure', requires=IS_IN_SET(['yes', 'no']))), TR('Profile', TEXTAREA(_name='profile', value='write something here')), TR('', INPUT(_type='submit', _value='SUBMIT')), )) <NEW_LINE> return dict(message = "test message from controller", form=form)
Renomeei em 2013/1/14 para old, para utilizar o SQLFORM disponibilizado out-of-the-box pelo web2py (permite nao guardar imediatamente na BD, pelo que podemos em vez disso fazer o upload via WebService para o cliente - realizado nas actions upload_first() e upload_new()
625941bf187af65679ca5063
def event_data_dict_to_list(event_data_dict): <NEW_LINE> <INDENT> run_dicts = event_data_dict.values() <NEW_LINE> return reduce(lambda x, y: x + y, [d.values() for d in run_dicts])
Converts the dict-based representation of event data for a label to a flat list of event data objects.
625941bfab23a570cc2500c5
def register_filter(filter_dir, filter_type, filter_name): <NEW_LINE> <INDENT> cmd = ['git', 'config', '--local', 'filter.rcs-keywords.%s' % filter_type, '%s %s' % (os.path.join(filter_dir, filter_name), '%f')] <NEW_LINE> execute_cmd(cmd=cmd)
Register a git filter for rcs-keywords functionality Arguments: filter_dir: Directory to hold the filter program filter_type: Type of the filter program filter_name: Source program of the filter to be copied Returns: None
625941bf2ae34c7f2600d077
def removeRemote(self, remote, clear=True): <NEW_LINE> <INDENT> super(RoadStack, self).removeRemote(remote=remote, clear=clear) <NEW_LINE> for transaction in remote.transactions.values(): <NEW_LINE> <INDENT> transaction.nack()
Remove remote at key uid. If clear then also remove from disk
625941bf5fc7496912cc38c3
def is_relative_to(path1: pathlib.Path, path2: os.PathLike) -> bool: <NEW_LINE> <INDENT> result = False <NEW_LINE> try: <NEW_LINE> <INDENT> path1.relative_to(path2) <NEW_LINE> result = True <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return result
Returns whether or not `path1` is relative to `path2`. Parameters ---------- path1 path compared with `path2` path2 path compared with `path1` Returns ------- bool True, if `path1` is relative to `path2`
625941bf56b00c62f0f1459d
def configure(self, default_settings=global_settings, **options): <NEW_LINE> <INDENT> if self._wrapped is not empty: <NEW_LINE> <INDENT> raise RuntimeError('Settings already configured.') <NEW_LINE> <DEDENT> holder = UserSettingsHolder(default_settings) <NEW_LINE> for name, value in options.items(): <NEW_LINE> <INDENT> setattr(holder, name, value) <NEW_LINE> <DEDENT> self._wrapped = holder
Called to manually configure the settings. The 'default_settings' parameter sets where to retrieve any unspecified values from (its argument must support attribute access (__getattr__)).
625941bf7cff6e4e811178cb
def _get_version_family(): <NEW_LINE> <INDENT> current_version = CASSANDRA_VERSION_FROM_BUILD <NEW_LINE> version_family = 'unknown' <NEW_LINE> if current_version.vstring.startswith('2.0'): <NEW_LINE> <INDENT> version_family = '2.0.x' <NEW_LINE> <DEDENT> elif current_version.vstring.startswith('2.1'): <NEW_LINE> <INDENT> version_family = '2.1.x' <NEW_LINE> <DEDENT> elif current_version.vstring.startswith('2.2'): <NEW_LINE> <INDENT> version_family = '2.2.x' <NEW_LINE> <DEDENT> elif current_version.vstring.startswith('3.0'): <NEW_LINE> <INDENT> version_family = '3.0.x' <NEW_LINE> <DEDENT> elif '3.1' <= current_version < '4.0': <NEW_LINE> <INDENT> version_family = '3.x' <NEW_LINE> <DEDENT> elif '4.0' <= current_version < '4.1': <NEW_LINE> <INDENT> version_family = 'trunk' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError("4.1+ not yet supported on upgrade tests!") <NEW_LINE> <DEDENT> return version_family
Detects the version family (line) using dtest.py:CASSANDRA_VERSION_FROM_BUILD
625941bf5f7d997b871749da
def __init__(self): <NEW_LINE> <INDENT> Language.__init__(self) <NEW_LINE> self.name = 'lexicon'
Initializes lexicon according to super class RuleDict.
625941bf91f36d47f21ac435
def keys(self): <NEW_LINE> <INDENT> queue = Queue() <NEW_LINE> x = self._first <NEW_LINE> while x is not None: <NEW_LINE> <INDENT> queue.enqueue(x._key) <NEW_LINE> x = x._next <NEW_LINE> <DEDENT> return queue
Returns all keys in the symbol table as an Iterable.
625941bfa219f33f346288b2
def mangle_build_prop_hook(prop, overrides): <NEW_LINE> <INDENT> prop.put("ro.com.android.dateformat", "yyyy-MM-dd") <NEW_LINE> pass
call mangle_build_prop_hook
625941bf3346ee7daa2b2caf
def _is_iter(lst): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return list(lst) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return False
Is ``lst`` an iterator?
625941bf009cb60464c632f9
def set_cron_job(backup_time,backup_dir,cloud_loc): <NEW_LINE> <INDENT> os.system("sudo touch cron_container.txt") <NEW_LINE> os.system("crontab -l > cron_container.txt") <NEW_LINE> crontab_cmd = backup_time + " sudo backuputil -p " + backup_dir + " -b " + cloud_loc <NEW_LINE> os.system('echo "' + crontab_cmd + '" >> cron_container.txt') <NEW_LINE> os.system("crontab cron_container.txt") <NEW_LINE> os.system("rm cron_container.txt")
Sets a cron job at the specified backup_time to back up the files in the specified directory at the specified cloud location
625941bf55399d3f055885f8
def projector(detector_name, inj, hp, hc, distance_scale=1): <NEW_LINE> <INDENT> detector = Detector(detector_name) <NEW_LINE> hp /= distance_scale <NEW_LINE> hc /= distance_scale <NEW_LINE> try: <NEW_LINE> <INDENT> tc = inj.tc <NEW_LINE> ra = inj.ra <NEW_LINE> dec = inj.dec <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> tc = inj.get_time_geocent() <NEW_LINE> ra = inj.longitude <NEW_LINE> dec = inj.latitude <NEW_LINE> <DEDENT> hp.start_time += tc <NEW_LINE> hc.start_time += tc <NEW_LINE> try: <NEW_LINE> <INDENT> hp_tapered = wfutils.taper_timeseries(hp, inj.taper) <NEW_LINE> hc_tapered = wfutils.taper_timeseries(hc, inj.taper) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> hp_tapered = hp <NEW_LINE> hc_tapered = hc <NEW_LINE> <DEDENT> projection_method = 'lal' <NEW_LINE> if hasattr(inj, 'detector_projection_method'): <NEW_LINE> <INDENT> projection_method = inj.detector_projection_method <NEW_LINE> <DEDENT> logging.info('Injecting at %s, method is %s', tc, projection_method) <NEW_LINE> signal = detector.project_wave(hp_tapered, hc_tapered, ra, dec, inj.polarization, method=projection_method, reference_time=tc,) <NEW_LINE> return signal
Use the injection row to project the polarizations into the detector frame
625941bf99cbb53fe6792b2c
def _best_affine_patch(self, point): <NEW_LINE> <INDENT> point = list(point) <NEW_LINE> try: <NEW_LINE> <INDENT> abs_point = [abs(_) for _ in point] <NEW_LINE> <DEDENT> except ArithmeticError: <NEW_LINE> <INDENT> abs_point = point <NEW_LINE> <DEDENT> i_max = 0 <NEW_LINE> p_max = abs_point[i_max] <NEW_LINE> for i in range(1,len(point)): <NEW_LINE> <INDENT> if abs_point[i]>p_max: <NEW_LINE> <INDENT> i_max = i <NEW_LINE> p_max = abs_point[i_max] <NEW_LINE> <DEDENT> <DEDENT> return i_max
Return the best affine patch of the ambient projective space. The "best" affine patch is where you end up dividing by the homogeneous coordinate with the largest absolute value. Division by small numbers is numerically unstable. INPUT: - ``point`` -- a point of the algebraic subscheme. OUTPUT: Integer. The index of the patch. See :meth:`affine_patch`. EXAMPLES:: sage: P.<x,y,z>= ProjectiveSpace(QQ,2) sage: S = P.subscheme(x+2*y+3*z) sage: S._best_affine_patch(P.point([0,-3,2])) 1 sage: S._best_affine_patch([0,-3,2]) 1 TESTS:: sage: F = GF(3) sage: P.<x,y,z>= ProjectiveSpace(F,2) sage: S._best_affine_patch([0,1,2]) 2
625941bf76d4e153a657ea75
def validate_name(self, name): <NEW_LINE> <INDENT> chat = Chat.query.filter_by(name=name.data).first() <NEW_LINE> if chat is not None: <NEW_LINE> <INDENT> raise ValidationError('Пожалуста, используйте другое название для беседы.')
метод проверки названия чата :param name: название чата :type name: строка :return: ничего не возвращает
625941bf63b5f9789fde702a
def check_hardlinked_pkg(env:Environment, Pkg:Package) -> list: <NEW_LINE> <INDENT> bad_linked = [] <NEW_LINE> expected_linked = Pkg.files - Pkg.has_prefix.keys() - Pkg.no_link <NEW_LINE> for f in expected_linked: <NEW_LINE> <INDENT> src = join(Pkg.path, f) <NEW_LINE> tgt = join(env.path, f) <NEW_LINE> if not is_hardlinked(src, tgt): <NEW_LINE> <INDENT> bad_linked.append(f) <NEW_LINE> <DEDENT> <DEDENT> return bad_linked
Check that pkg in cache is correctly (or completely) hardlinked into env. Returns a list of improperly hardlinked files.
625941bfcb5e8a47e48b79f2
def correctInPlace(x): <NEW_LINE> <INDENT> s = np.sum(x, axis=1) <NEW_LINE> s /= np.mean(s[s != 0]) <NEW_LINE> s[s == 0] = 1 <NEW_LINE> s2 = np.sum(x, axis=0) <NEW_LINE> s2 /= np.mean(s2[s2 != 0]) <NEW_LINE> s2[s2 == 0] = 1 <NEW_LINE> x /= (s2[None, :] * s[:, None])
works for non-symmetric and symmetric data
625941bf090684286d50ec28
def filter_bad_stats(stats, settings): <NEW_LINE> <INDENT> mmr = stats['minor_axis_length'] / stats['major_axis_length'] <NEW_LINE> stats = stats[mmr > settings.Process.min_deformation] <NEW_LINE> stats = stats[(stats['major_axis_length'] * settings.PostProcess.pix_size) < settings.Process.max_length] <NEW_LINE> return stats
remove unacceptable particles from the stats Note that for oil and gas analysis, this filtering is handled by the functions in the pysilcam.oilgas module. Args: stats (DataFrame) : particle statistics from silcam process settings (PySilcamSettings) : settings associated with the data, loaded with PySilcamSettings Returns: stats (DataFrame) : particle statistics from silcam process
625941bf10dbd63aa1bd2aeb
def modified_We_model(D, rho_gas, m_gas, mu_gas, sigma_gas, rho_oil, m_oil, mu_oil, sigma_oil, rho): <NEW_LINE> <INDENT> if not isinstance(m_gas, np.ndarray): <NEW_LINE> <INDENT> if not isinstance(m_gas, list): <NEW_LINE> <INDENT> m_gas = np.array([m_gas]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> m_gas = np.array(m_gas) <NEW_LINE> <DEDENT> <DEDENT> if not isinstance(m_oil, np.ndarray): <NEW_LINE> <INDENT> if not isinstance(m_oil, list): <NEW_LINE> <INDENT> m_oil = np.array([m_oil]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> m_oil = np.array(m_oil) <NEW_LINE> <DEDENT> <DEDENT> mu = 0.0012 <NEW_LINE> de_gas, de_max, k, alpha = psf.sintef(D, m_gas, rho_gas, m_oil, rho_oil, mu_gas, sigma_gas, rho, mu, fp_type=0, use_d95=False) <NEW_LINE> de_oil, de_max, k, alpha = psf.sintef(D, m_gas, rho_gas, m_oil, rho_oil, mu_oil, sigma_oil, rho, mu, fp_type=1, use_d95=False) <NEW_LINE> return (de_gas, de_oil)
This function is deprecated: Use psf.sintef() instead. Compute the initial oil droplet and gas bubble sizes from the SINTEF model Apply the SINTEF modified Weber number model to estimate the initial oil and gas particle sizes. This function calculates the adjusted exit velocity based on a void fraction and buoyancy adjustment per the method suggested by SINTEF. Parameters ---------- D : float Diameter of the release point (m) rho_gas : float In-situ density of the gas (kg/m^3) m_gas : ndarray Array of mass fluxes for each component of the gas object (kg/s) mu_gas : float Dynamic viscosity of gas (Pa s) sigma_gas : float Interfacial tension between gas and seawater (N/m) rho_oil : float In-situ density of the oil m_oil : ndarray Array of mass fluxes for each component of the oil object (kg/s) mu_oil : float Dynamic viscosity of oil (Pa s) sigma_oil : float Interfacial tension between oil and seawater (N/m) rho : float Density of the continuous phase fluid (kg/m^3) Returns ------- A tuple containing: de_gas : float The volume mean diameter of the gas bubbles (m) de_oil : float The volume mean diameter of the oil droplets (m)
625941bf3cc13d1c6d3c72c0
@manager.command <NEW_LINE> def load_initial_data(): <NEW_LINE> <INDENT> with open("states.csv", "r") as states_file: <NEW_LINE> <INDENT> preload_data(csv.reader(states_file))
Load initial data into database.
625941bf67a9b606de4a7e00
def union(self, x, y): <NEW_LINE> <INDENT> parent_x = self.find(x) <NEW_LINE> parent_y = self.find(y) <NEW_LINE> if parent_x != parent_y: <NEW_LINE> <INDENT> merge_from, merge_to = sorted([parent_x, parent_y], key=lambda i: self._items[i][1]) <NEW_LINE> self._items[merge_from][0] = merge_to <NEW_LINE> self._items[merge_to][1] += self._items[merge_from][1]
Merges sets containing two different items together. If items already belong to same set does nothing. Args: x: First item. y: Second item.
625941bf97e22403b379cede
def render_and_write(template_name, context, output_name, output_dir): <NEW_LINE> <INDENT> template = templates_env.get_template(template_name) <NEW_LINE> f = open(path.join(output_dir, output_name), "w") <NEW_LINE> f.write(template.render(**context)) <NEW_LINE> f.close()
Render `template_name` with `context` and write the result in the file `output_dir`/`output_name`.
625941bfe64d504609d74785
def resolve_bg_path(bgpth0): <NEW_LINE> <INDENT> if not os.path.isfile(bgpth0): <NEW_LINE> <INDENT> bgpth = os.path.join(gt.BACKGROUND_PATH, os.path.basename(bgpth0)) <NEW_LINE> if not os.path.isfile(bgpth): <NEW_LINE> <INDENT> raise IOError("Not a valid background file: %s" % bgpth) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> bgpth = bgpth0 <NEW_LINE> <DEDENT> return bgpth
Finds a valid background path.
625941bf4c3428357757c26f
def constants(self): <NEW_LINE> <INDENT> const_dict = {} <NEW_LINE> constants_ = self.objective.constants() <NEW_LINE> for constr in self.constraints: <NEW_LINE> <INDENT> constants_ += constr.constants() <NEW_LINE> <DEDENT> const_dict = {id(constant): constant for constant in constants_} <NEW_LINE> return list(const_dict.values())
Accessor method for parameters. Returns ------- list of :class:`~cvxpy.expressions.constants.constant.Constant` A list of the constants in the problem.
625941bf566aa707497f44b2
def typecheck(value, type: t.Union[Type, type], value_name: str = None): <NEW_LINE> <INDENT> if not bool(isinstance(value, type)): <NEW_LINE> <INDENT> ret = verbose_isinstance(value, type, value_name) <NEW_LINE> if not ret: <NEW_LINE> <INDENT> raise TypeError(str(ret))
Like verbose_isinstance but raises an error if the value hasn't the expected type. :param value: passed value :param type: expected type of the value :param value_name: optional description of the value :raises: TypeError
625941bfb830903b967e9853
def __sub__(self, other): <NEW_LINE> <INDENT> return Vector2(self.x - other.x, self.y - other.y)
Subtract the two vectors component wise. Example: .. code-block:: python from pygorithm.geometry import vector2 vec1 = vector2.Vector2(5, 5) vec2 = vector2.Vector2(2, 3) vec3 = vec1 - vec2 vec4 = vec2 - vec1 # prints <3, 2> print(vec3) # prints <2, 3> print(vec4) :param other: the vector to subtract from this one :type other: :class:`pygorithm.geometry.vector2.Vector2` :returns: a new vector two that is the difference of self and other :rtype: :class:`pygorithm.geometry.vector2.Vector2`
625941bfeab8aa0e5d26da9d
def list_topics(self, page_size=None, page_token=None): <NEW_LINE> <INDENT> api = self.publisher_api <NEW_LINE> return api.list_topics( self.project, page_size, page_token)
List topics for the project associated with this client. See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list Example: .. literalinclude:: pubsub_snippets.py :start-after: [START client_list_topics] :end-before: [END client_list_topics] :type page_size: int :param page_size: maximum number of topics to return, If not passed, defaults to a value set by the API. :type page_token: str :param page_token: opaque marker for the next "page" of topics. If not passed, the API will return the first page of topics. :rtype: :class:`~google.cloud.iterator.Iterator` :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` accessible to the current API.
625941bf07d97122c41787cc
def post_payload_to_ingest_service(insights_upload, local_path): <NEW_LINE> <INDENT> insights_account_id = os.environ.get("INSIGHTS_ACCOUNT_ID") <NEW_LINE> insights_org_id = os.environ.get("INSIGHTS_ORG_ID") <NEW_LINE> insights_user = os.environ.get("INSIGHTS_USER") <NEW_LINE> insights_password = os.environ.get("INSIGHTS_PASSWORD") <NEW_LINE> content_type = "application/vnd.redhat.hccm.tar+tgz" <NEW_LINE> if os.path.isfile(local_path): <NEW_LINE> <INDENT> file_info = os.stat(local_path) <NEW_LINE> filesize = _convert_bytes(file_info.st_size) <NEW_LINE> <DEDENT> LOG.info(f"Upload File: ({local_path}) filesize is {filesize}.") <NEW_LINE> with open(local_path, "rb") as upload_file: <NEW_LINE> <INDENT> if insights_account_id and insights_org_id: <NEW_LINE> <INDENT> header = { "identity": { "account_number": insights_account_id, "org_id": insights_org_id, "internal": {"org_id": insights_org_id}, "type": content_type, } } <NEW_LINE> headers = {"x-rh-identity": base64.b64encode(json.dumps(header).encode("UTF-8"))} <NEW_LINE> return requests.post( insights_upload, data={}, files={"file": ("payload.tar.gz", upload_file, content_type)}, headers=headers, ) <NEW_LINE> <DEDENT> return requests.post( insights_upload, data={}, files={"file": ("payload.tar.gz", upload_file, content_type)}, auth=(insights_user, insights_password), verify=False, )
POST the payload to Insights via header or basic auth.
625941bf21a7993f00bc7c31
def update_gui (self): <NEW_LINE> <INDENT> self.update_stbv_list() <NEW_LINE> self.update_package_list() <NEW_LINE> return
Update the GUI. This method should be called upon package loading, or when a new view or type is created, or when an existing one is modified, in order to reflect changes.
625941bfd18da76e23532419
def run(self): <NEW_LINE> <INDENT> jobs = self.store.get_jobs() <NEW_LINE> for job in jobs: <NEW_LINE> <INDENT> id_ = job['id'] <NEW_LINE> try: <NEW_LINE> <INDENT> self._initialize_job(id_, load_events=True) <NEW_LINE> <DEDENT> except JobDeleted: <NEW_LINE> <INDENT> print('Warning: job', id_, 'has vanished') <NEW_LINE> <DEDENT> <DEDENT> self.status_ready.set() <NEW_LINE> while True: <NEW_LINE> <INDENT> time.sleep(5) <NEW_LINE> datetime_ = datetime.datetime.now(pytz.UTC) <NEW_LINE> events = self.store.get_events_since(self.max_startid, self.max_alarmid, self.max_finishid) <NEW_LINE> for event in events: <NEW_LINE> <INDENT> id_ = event['jobid'] <NEW_LINE> self._update_max_id_values(event) <NEW_LINE> try: <NEW_LINE> <INDENT> if id_ not in self.status: <NEW_LINE> <INDENT> self._initialize_job(id_) <NEW_LINE> <DEDENT> self._process_event(id_, event) <NEW_LINE> self._compute_reliability(id_) <NEW_LINE> <DEDENT> except JobDeleted: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> self.num_error = 0; <NEW_LINE> self.num_warning = 0; <NEW_LINE> for id_ in self.status: <NEW_LINE> <INDENT> jobstatus = self.status[id_]['status'] <NEW_LINE> if (jobstatus is None or CrabStatus.is_ok(jobstatus)): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif (CrabStatus.is_warning(jobstatus)): <NEW_LINE> <INDENT> self.num_warning += 1; <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.num_error += 1; <NEW_LINE> <DEDENT> <DEDENT> if events: <NEW_LINE> <INDENT> with self.new_event: <NEW_LINE> <INDENT> self.new_event.notify_all() <NEW_LINE> <DEDENT> <DEDENT> self._check_minute() <NEW_LINE> for id_ in list(self.miss_timeout.keys()): <NEW_LINE> <INDENT> if self.miss_timeout[id_] < datetime_: <NEW_LINE> <INDENT> self._write_alarm(id_, CrabStatus.MISSED) <NEW_LINE> del self.miss_timeout[id_] <NEW_LINE> <DEDENT> <DEDENT> for id_ in list(self.timeout.keys()): <NEW_LINE> <INDENT> if self.timeout[id_] < datetime_: <NEW_LINE> <INDENT> self._write_alarm(id_, CrabStatus.TIMEOUT) <NEW_LINE> del self.timeout[id_]
Monitor thread main run function. When the thread is started, this function will run. It begins by fetching a list of jobs and using them to populate its data structures. When this is complete, the Event status_ready is fired. It then goes into a loop, and every few seconds it checks for new events, processing any which are found. The new_event Condition is fired if there were any new events. We call _check_minute from CrabMinutely to check whether the minute has changed since the last time round the loop.
625941bf76e4537e8c3515b6
def test_state_sls_id_test(self): <NEW_LINE> <INDENT> self._add_runtime_pillar(pillar={'test': True}) <NEW_LINE> ret = self.run_function('state.sls', ['core']) <NEW_LINE> for key, val in ret.items(): <NEW_LINE> <INDENT> self.assertEqual(val['comment'], 'The file /tmp/salt-tests-tmpdir/testfile is set to be changed') <NEW_LINE> self.assertEqual(val['changes'], {})
test state.sls_id when test is set to true in pillar data
625941bf6fece00bbac2d682
def build_data(self, training_instances="all_events", log=True): <NEW_LINE> <INDENT> variables = self.extractor.get_variable_names() <NEW_LINE> self.training_instances = training_instances <NEW_LINE> self.data = self.extractor.build_dataframe(training_instances) <NEW_LINE> self.log("There are " + str(len(variables)) + " variables in the network: " + " ".join(variables), log) <NEW_LINE> self.log("Library used: pgmpy", log) <NEW_LINE> self.log("There are " + str(len(self.data)) + " 'training' instances in the dataframe.", log) <NEW_LINE> return self.data
(3) Method that builds the data to be used by the graphical model library. ----------------- Parameters: training_instances : support -- to use duplicated training instances based on the support of the frequent sets all_events -- to generate one training instance per event (in "distinct devices after 5 minutes") all_events_with_causes -- like all_events, but also considers the 6 causes variables all_events_priority -- like all_event but instead of using [0, 1] as values for variables, uses the priority related to the event: [0, L0, L1, L2, L3] priority_node : True if you want the priority node, false otherwise. log : "True" if you want to print debug information in the console
625941bf5fdd1c0f98dc0178
def fixProgram(p): <NEW_LINE> <INDENT> ret = None <NEW_LINE> for i,val in enumerate(program): <NEW_LINE> <INDENT> cmd = program[i][0] <NEW_LINE> if cmd == 'nop': <NEW_LINE> <INDENT> program[i][0] = 'jmp' <NEW_LINE> <DEDENT> elif cmd == 'jmp': <NEW_LINE> <INDENT> program[i][0] = 'nop' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> ret = run(program) <NEW_LINE> if ret[1] == True: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> program[i][0] = cmd <NEW_LINE> <DEDENT> if ret is None: <NEW_LINE> <INDENT> ret = run(program) <NEW_LINE> <DEDENT> return(ret)
A program `p` is correct, if it terminates without a cycle detected. This function tries to fix a corrupted program changing exactly one jmp instruction to nop or nop to jmp.
625941bf293b9510aa2c31de
def get(self, request): <NEW_LINE> <INDENT> alloy_type_query_set = alloy_model.AlloyType.objects.defer("version", "create_time", "update_time").filter( is_delete=False) <NEW_LINE> total_alloy_type = len(alloy_type_query_set) <NEW_LINE> try: <NEW_LINE> <INDENT> page_num = int(request.GET.get("page", 1)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.info("页码格式错误:{}".format(e)) <NEW_LINE> page_num = 1 <NEW_LINE> <DEDENT> page_obj = Paginator(alloy_type_query_set, constant.PER_PAGE_NUMBER) <NEW_LINE> try: <NEW_LINE> <INDENT> alloy_type_info = page_obj.page(page_num) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.info("当前页面数据获取异常:{}".format(e)) <NEW_LINE> alloy_type_info = page_obj.page(page_obj.num_pages) <NEW_LINE> <DEDENT> pages_data = per_page.get_page_data(page_obj, alloy_type_info) <NEW_LINE> data = { "alloy_type_info": alloy_type_info, "paginator": page_obj, "total_alloy_type": total_alloy_type, } <NEW_LINE> data.update(pages_data) <NEW_LINE> return render(request, 'admin/alloy/alloy_type_index.html', context=data)
合金展示
625941bfd164cc6175782c93