code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def create_app(config_object=ProdConfig): <NEW_LINE> <INDENT> app = Flask(__name__, static_url_path='/static', static_folder=os.path.join('static', 'dist')) <NEW_LINE> app.config.from_object(config_object) <NEW_LINE> register_extensions(app) <NEW_LINE> register_blueprints(app) <NEW_LINE> register_errorhandlers(app) <NEW_LINE> return app
An application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/. :param config_object: The configuration object to use.
625941c1dc8b845886cb54c0
def clean(self, value): <NEW_LINE> <INDENT> if self.required and not value: <NEW_LINE> <INDENT> raise ValidationError(self.error_messages['required'], code='required') <NEW_LINE> <DEDENT> elif not self.required and not value: <NEW_LINE> <INDENT> return self.queryset.none() <NEW_LINE> <DEDENT> if not isinstance(value, (list, tuple)): <NEW_LINE> <INDENT> raise ValidationError(self.error_messages['list'], code='list') <NEW_LINE> <DEDENT> key = self.to_field_name or 'pk' <NEW_LINE> qs = [] <NEW_LINE> value = filter(lambda x: x, value) <NEW_LINE> for pk in value: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> qs.append(self.queryset.get(**{key: pk})) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise ValidationError(self.error_messages['invalid_pk_value'] % {'pk': pk}) <NEW_LINE> <DEDENT> except ObjectDoesNotExist: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> pks = set(force_text(getattr(o, key)) for o in qs) <NEW_LINE> for val in value: <NEW_LINE> <INDENT> if force_text(val) not in pks: <NEW_LINE> <INDENT> raise ValidationError(self.error_messages['invalid_choice'] % {'value': val}) <NEW_LINE> <DEDENT> <DEDENT> self.run_validators(value) <NEW_LINE> return qs
Same as the original implementation, but we guarantee that the resulting list of assets is in the correct order.
625941c1462c4b4f79d1d65c
def __init__(self, systemdef, **kwargs): <NEW_LINE> <INDENT> super(SimulationDefinition, self).__init__(**kwargs) <NEW_LINE> self.sys = systemdef
Creates a new definition or loads one from a file. Parameters ---------- systemdef: :py:class:`SystemDefinition` A system definition for which this simulation is defined. This system definition is stored in :py:attr:`sys`, from where it can be modified. fname : str, optional See :py:class:`IDefinition`. verbose : bool, optional See :py:class:`IDefinition`.
625941c129b78933be1e563b
def validate(self): <NEW_LINE> <INDENT> allowed_backbones = ['resnet18','resnet34','resnet50', 'resnet101', 'resnet152'] <NEW_LINE> backbone = self.backbone.split('_')[0] <NEW_LINE> if backbone not in allowed_backbones: <NEW_LINE> <INDENT> raise ValueError('Backbone (\'{}\') not in allowed backbones ({}).'.format(backbone, allowed_backbones))
Checks whether the backbone string is correct.
625941c191f36d47f21ac47d
def game_won(self, board, player): <NEW_LINE> <INDENT> score = {'1': 0, '2': 0} <NEW_LINE> if (board[0] == board[4] == board[8] and board[0] != 0) or (board[2] == board[4] == board[6] and board[2] != 0): <NEW_LINE> <INDENT> self.running = False <NEW_LINE> <DEDENT> for i in range(0, 3): <NEW_LINE> <INDENT> if (board[i * 3] == board[i * 3 + 1] == board[i * 3 + 2] and board[i * 3] != 0) or (board[i] == board[i + 3] == board[i + 6] and board[i] != 0): <NEW_LINE> <INDENT> self.running = False <NEW_LINE> <DEDENT> <DEDENT> if self.running is False: <NEW_LINE> <INDENT> if self.display_option: <NEW_LINE> <INDENT> pygame.time.wait(1000) <NEW_LINE> myfont_bold = pygame.font.SysFont('Segoe UI', 20, True) <NEW_LINE> text_won = myfont_bold.render(f'PLAYER {player.index} WON', True, (0, 0, 0)) <NEW_LINE> self.gameDisplay.fill([255, 255, 255]) <NEW_LINE> self.gameDisplay.blit(text_won, (120, 180)) <NEW_LINE> pygame.display.update() <NEW_LINE> pygame.time.wait(1000) <NEW_LINE> print(f'{player} won!') <NEW_LINE> <DEDENT> if isinstance(player, Computer): <NEW_LINE> <INDENT> score['1'] = 1 <NEW_LINE> score['2'] = -1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> score['1'] = -1 <NEW_LINE> score['2'] = 1 <NEW_LINE> <DEDENT> <DEDENT> elif self.running and len(self.get_available_actions(board=board)) == 0: <NEW_LINE> <INDENT> if self.display_option: <NEW_LINE> <INDENT> pygame.time.wait(1000) <NEW_LINE> myfont_bold = pygame.font.SysFont('Segoe UI', 20, True) <NEW_LINE> text_won = myfont_bold.render(f'DRAW', True, (0, 0, 0)) <NEW_LINE> self.gameDisplay.fill([255, 255, 255]) <NEW_LINE> self.gameDisplay.blit(text_won, (160, 180)) <NEW_LINE> pygame.display.update() <NEW_LINE> pygame.time.wait(1000) <NEW_LINE> print(f'draw') <NEW_LINE> <DEDENT> self.running = False <NEW_LINE> <DEDENT> return score
Return the score of game, and set game.running = False if the game is done. The score is a dictionary, where the key '1' is the score of the Computer, and key '2' is the score of the Human. The winning player receives a score equal to 1, otherwise 0.
625941c18c0ade5d55d3e945
def deepnn(x): <NEW_LINE> <INDENT> with tf.name_scope('reshape'): <NEW_LINE> <INDENT> x_image = tf.reshape(x, [-1, 28, 28, 1]) <NEW_LINE> <DEDENT> with tf.name_scope('conv1'): <NEW_LINE> <INDENT> W_conv1 = weight_variable([5, 5, 1, 32]) <NEW_LINE> b_conv1 = bias_variable([32]) <NEW_LINE> h_conv1 = tf.nn.relu(conv2d(x_image, W_conv1) + b_conv1) <NEW_LINE> studio.AddRecordVariable(W_conv1, "conv1") <NEW_LINE> <DEDENT> with tf.name_scope('pool1'): <NEW_LINE> <INDENT> h_pool1 = max_pool_2x2(h_conv1) <NEW_LINE> <DEDENT> with tf.name_scope('conv2'): <NEW_LINE> <INDENT> W_conv2 = weight_variable([5, 5, 32, 64]) <NEW_LINE> b_conv2 = bias_variable([64]) <NEW_LINE> h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2) <NEW_LINE> <DEDENT> with tf.name_scope('pool2'): <NEW_LINE> <INDENT> h_pool2 = max_pool_2x2(h_conv2) <NEW_LINE> <DEDENT> with tf.name_scope('fc1'): <NEW_LINE> <INDENT> W_fc1 = weight_variable([7 * 7 * 64, 1024]) <NEW_LINE> b_fc1 = bias_variable([1024]) <NEW_LINE> h_pool2_flat = tf.reshape(h_pool2, [-1, 7*7*64]) <NEW_LINE> h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1) <NEW_LINE> <DEDENT> with tf.name_scope('dropout'): <NEW_LINE> <INDENT> keep_prob = tf.placeholder(tf.float32) <NEW_LINE> h_fc1_drop = tf.nn.dropout(h_fc1, keep_prob) <NEW_LINE> <DEDENT> with tf.name_scope('fc2'): <NEW_LINE> <INDENT> W_fc2 = weight_variable([1024, 10]) <NEW_LINE> b_fc2 = bias_variable([10]) <NEW_LINE> y_conv = tf.matmul(h_fc1_drop, W_fc2) + b_fc2 <NEW_LINE> <DEDENT> return y_conv, keep_prob
deepnn builds the graph for a deep net for classifying digits. Args: x: an input tensor with the dimensions (N_examples, 784), where 784 is the number of pixels in a standard MNIST image. Returns: A tuple (y, keep_prob). y is a tensor of shape (N_examples, 10), with values equal to the logits of classifying the digit into one of 10 classes (the digits 0-9). keep_prob is a scalar placeholder for the probability of dropout.
625941c115baa723493c3f00
def results(race_id): <NEW_LINE> <INDENT> return _request("Results", {"RaceId": race_id})
Race results :param race_id: race identifier, get it by calling competitions(event_id) :return: list of races :rtype: dict
625941c1187af65679ca50aa
def _clean_book_items(self): <NEW_LINE> <INDENT> output_name = self.convert.name <NEW_LINE> settings = get_sections_settings(self.original_book) <NEW_LINE> count = 1 <NEW_LINE> for toc_item in parse_toc_nav(self.original_book): <NEW_LINE> <INDENT> if isinstance(toc_item[1], list): <NEW_LINE> <INDENT> section_title, chapters = toc_item <NEW_LINE> key = self.build_section_key(section_title, count) <NEW_LINE> section_settings = json.loads(settings.get(key, '{}')) <NEW_LINE> show_in_outputs = section_settings.get('show_in_outputs', {}) <NEW_LINE> if not show_in_outputs.get(output_name, True): <NEW_LINE> <INDENT> self.chapters_to_remove += [x[1] for x in chapters] <NEW_LINE> self.sections_to_remove.append(key) <NEW_LINE> <DEDENT> count += 1 <NEW_LINE> <DEDENT> <DEDENT> new_items = [] <NEW_LINE> for i, item in enumerate(list(self.original_book.items)): <NEW_LINE> <INDENT> if item.get_name() not in self.chapters_to_remove: <NEW_LINE> <INDENT> new_items.append(item) <NEW_LINE> <DEDENT> <DEDENT> self.original_book.items = new_items
Removes the items that are not supposed to be shown according to section settings
625941c1f548e778e58cd509
def pickPhase(self, evt): <NEW_LINE> <INDENT> _thisPick = {'time': self._pickTime(evt.pos()), 'amplitude': self.traceItem.getData()[1][int(evt.pos().x())], 'station_id': self.tr.id, 'station_lat': self.station.getCoordinates()[0], 'station_lon': self.station.getCoordinates()[1]} <NEW_LINE> self.station.parent.parent.events.pickSignal(_thisPick) <NEW_LINE> self.plotPickItems()
Evoked when the trace graph is clicked
625941c1566aa707497f44f9
def save(self, *args, **kwargs): <NEW_LINE> <INDENT> return _algorithms.SpatialPooler_save(self, *args, **kwargs)
save(self, outStream)
625941c11b99ca400220aa3e
def get_season(self): <NEW_LINE> <INDENT> new_year_day = dt.date(year=self.searchdate.year, month=1, day=1) <NEW_LINE> intday = (self.searchdate - new_year_day).days + 1 <NEW_LINE> spring = range(80, 172) <NEW_LINE> summer = range(172, 264) <NEW_LINE> fall = range(264, 355) <NEW_LINE> if intday in spring: <NEW_LINE> <INDENT> return self.seasons[0] <NEW_LINE> <DEDENT> elif intday in summer: <NEW_LINE> <INDENT> return self.seasons[1] <NEW_LINE> <DEDENT> elif intday in fall: <NEW_LINE> <INDENT> return self.seasons[2] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.seasons[3]
Returns the Season of the date sent
625941c1d4950a0f3b08c2dd
def extract_Z2(self, method='regular', frac=0.5): <NEW_LINE> <INDENT> nz = self.Z.shape[1] <NEW_LINE> ind_z2 = np.zeros(nz) <NEW_LINE> if method == 'regular': <NEW_LINE> <INDENT> ix = np.arange(1 / frac - 1, nz, 1 / frac).astype(int) <NEW_LINE> ind_z2[ix] = 1 <NEW_LINE> Z2 = self.Z[:, ind_z2 == 1] <NEW_LINE> Z1 = self.Z[:, ind_z2 == 0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError('Method not implemented yet') <NEW_LINE> <DEDENT> return Z1, Z2
This function is used to split a big sample Z (dimension: nz x p, containing nz iid realisation of a random vector of size p) into two samples Z1 and Z2 (respectively of dimension nz1 x p and nz2 x p, with nz = nz1 + nz2). Further explanations in Ribes et al. (2012). :param method: str type of sampling used, for now may be only 'regular' :param frac: float fraction of realizations to put in Z2, the remaining is used in Z1 :return: Z1: numpy.ndarray Array of size (nz1 x p) Z2: numpy.ndarray Array of size (nz2 x p)
625941c160cbc95b062c64cf
def export_metahuman_json2(ob, outputpath): <NEW_LINE> <INDENT> dict_channel = {} <NEW_LINE> for i in range(bpy.context.scene.frame_start, bpy.context.scene.frame_end): <NEW_LINE> <INDENT> bpy.context.scene.frame_set(i) <NEW_LINE> for block in ob.data.shape_keys.key_blocks: <NEW_LINE> <INDENT> if block.name != "Basis": <NEW_LINE> <INDENT> tmp = {} <NEW_LINE> tmp["frameNum"] = i <NEW_LINE> tmp['value'] = block.value <NEW_LINE> if block.name in dict_channel.keys(): <NEW_LINE> <INDENT> dict_channel[block.name].append(tmp) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dict_channel[block.name] = [] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> print("export {}".format(i)) <NEW_LINE> <DEDENT> write_json(outputpath, dict_channel)
export metahuman curve to json export the selected frame start frame end curve
625941c192d797404e304116
def eval(self, x, x_contains_var_shape=False): <NEW_LINE> <INDENT> x = np.asarray(x).copy() <NEW_LINE> if not x_contains_var_shape: <NEW_LINE> <INDENT> x = broadcast_to_shape(x, shape=self._shape, scheme='expand_left') <NEW_LINE> <DEDENT> x = cycle_axes(x, -self.ndim) <NEW_LINE> _y = None <NEW_LINE> if _y is None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> _y = self._dist_func.pdf(x) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> if _y is None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> _y = self._dist_func.pmf(x) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return cycle_axes(_y, self.ndim)
Evaluate the probability distribution/mass at a given point/an array of given points. If `x` is a scalar, the probability distribution/mass function will be evaluated at the single point `x` for *all* different parameter values specified when constructing the object. If the parameter values are specified as *p*-dimensional arrays, the resulting array will also have *p* dimensions, and the shape will match that of the parameter arrays. For example:: par_shape = (a, b, c) # ndim = 3 (= p) x.shape = () # ndim = 0 (scalar, n = 0) result.shape = (a, b, c) # ndim = 3 If `x` is an *n*-dimensional array with shape `x.shape`, the probability distribution/mass function will be evaluated at every value of `x`. In this case, the resulting array will have *p+n* dimensions, with the first *p* corresponding to the parameter arrays and the last *n* corresponding to `x.shape`. For example:: par_shape = (a, b, c) # ndim = 3 (= p) x.shape = (m, n) # ndim = 2 (= n) result.shape = (a, b, c, m, n) # ndim = p+n = 5 Alternatively, if the flag ``x_contains_var_shape`` is set, the first dimensions of ``x`` are assumed to have the same shape as the parameter arrays. If this is the case, no broadcasting of ``x`` to the parameter arrays is done and the resulting array will have the same dimension and shape as the input array ``x``. For example:: par_shape = (a, b, c) # ndim = 3 (= p) x.shape = (a, b, c, m, n) # ndim = p + 2 = 5 result.shape = (a, b, c, m, n) # ndim = p + 2 = 5 :param x: value (array of values) at which to evaluate the pdf/pmf :type x: `numpy.ndarray`. If the alternative method above is used, the *first* entries of `x.shape` must match the ensemble variable's shape :param x_contains_var_shape: if ``True``, the alternative method described above is used. :type x_contains_var_shape: bool :return:
625941c1bd1bec0571d905bb
def request(self, method, url, query_params=None, headers=None, post_params=None, files_params=None): <NEW_LINE> <INDENT> response = None <NEW_LINE> if method == "GET": <NEW_LINE> <INDENT> response = requests.get(url, params=query_params, headers=headers, timeout=self._request_timeout) <NEW_LINE> <DEDENT> elif method == "HEAD": <NEW_LINE> <INDENT> response = requests.head(url, params=query_params, headers=headers, timeout=self._request_timeout) <NEW_LINE> <DEDENT> elif method == "POST": <NEW_LINE> <INDENT> response = requests.post(url, params=query_params, headers=headers, data=post_params, files=files_params, timeout=self._request_timeout) <NEW_LINE> <DEDENT> elif method == "PUT": <NEW_LINE> <INDENT> response = requests.put(url, params=query_params, headers=headers, data=post_params, files=files_params, timeout=self._request_timeout) <NEW_LINE> <DEDENT> elif method == "DELETE": <NEW_LINE> <INDENT> response = requests.delete(url, params=query_params, headers=headers, timeout=self._request_timeout) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( "http method must be `GET`, `HEAD`," " `POST`, `PATCH`, `PUT` or `DELETE`." ) <NEW_LINE> <DEDENT> return response
Makes the HTTP request using requests library. :raise requests.exceptions.RequestException :raise sphere_engine.exceptions.SphereEngineException
625941c13eb6a72ae02ec464
def symlink(self, src, dst): <NEW_LINE> <INDENT> self.debug("linking: '%s' -> '%s'" % (self.relpath(dst), self.relpath(src))) <NEW_LINE> if os.path.lexists(dst): <NEW_LINE> <INDENT> os.remove(dst) <NEW_LINE> <DEDENT> os.symlink(src, dst)
Like os.symlink, but overwrites dst and logs
625941c1d8ef3951e32434ca
def __getitem__(self, name): <NEW_LINE> <INDENT> return self._resources[name]
Get a Resource by name.
625941c124f1403a92600af5
def save(self): <NEW_LINE> <INDENT> click_css(self, '.save-button')
Clicks save button.
625941c101c39578d7e74dc8
def initialize(context): <NEW_LINE> <INDENT> content_types, constructors, ftis = atapi.process_types( atapi.listTypes(config.PROJECTNAME), config.PROJECTNAME) <NEW_LINE> for atype, constructor in zip(content_types, constructors): <NEW_LINE> <INDENT> utils.ContentInit( "%s: %s" % (config.PROJECTNAME, atype.portal_type), content_types=(atype,), permission=config.ADD_PERMISSIONS[atype.portal_type], extra_constructors=(constructor,), ).initialize(context)
Register content types through Archetypes with Zope and the CMF.
625941c18c0ade5d55d3e946
def get( self, resource_group_name, cross_connection_name, **kwargs ): <NEW_LINE> <INDENT> cls = kwargs.pop('cls', None) <NEW_LINE> error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } <NEW_LINE> error_map.update(kwargs.pop('error_map', {})) <NEW_LINE> api_version = "2019-09-01" <NEW_LINE> accept = "application/json" <NEW_LINE> url = self.get.metadata['url'] <NEW_LINE> path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') <NEW_LINE> header_parameters = {} <NEW_LINE> header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') <NEW_LINE> request = self._client.get(url, query_parameters, header_parameters) <NEW_LINE> pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) <NEW_LINE> response = pipeline_response.http_response <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> map_error(status_code=response.status_code, response=response, error_map=error_map) <NEW_LINE> raise HttpResponseError(response=response, error_format=ARMErrorFormat) <NEW_LINE> <DEDENT> deserialized = self._deserialize('ExpressRouteCrossConnection', pipeline_response) <NEW_LINE> if cls: <NEW_LINE> <INDENT> return cls(pipeline_response, deserialized, {}) <NEW_LINE> <DEDENT> return deserialized
Gets details about the specified ExpressRouteCrossConnection. :param resource_group_name: The name of the resource group (peering location of the circuit). :type resource_group_name: str :param cross_connection_name: The name of the ExpressRouteCrossConnection (service key of the circuit). :type cross_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ExpressRouteCrossConnection, or the result of cls(response) :rtype: ~azure.mgmt.network.v2019_09_01.models.ExpressRouteCrossConnection :raises: ~azure.core.exceptions.HttpResponseError
625941c115fb5d323cde0a99
def syrk(self, *args): <NEW_LINE> <INDENT> return _ndlml.matrix_syrk(self, *args)
syrk(matrix self, matrix A, double alpha, double beta, char const * type, char const * trans)
625941c1091ae35668666eef
def delete(self, user_id): <NEW_LINE> <INDENT> method_url = 'DELETE /api/users/{user_id}' <NEW_LINE> method_url = method_url.format(user_id=user_id) <NEW_LINE> return self._client._request(method_url)
Delete user. https://www.appveyor.com/docs/api/team/#delete-user
625941c1d18da76e23532460
def _pack(self, W1, W2, b1, b2): <NEW_LINE> <INDENT> return np.hstack((W1.ravel(), W2.ravel(), b1.ravel(), b2.ravel()))
Pack the coefficients and intercepts (W1,W2,b1,b2) from theta Parameters ---------- theta : array-like, shape (size(W1)*size(W2)*size(b1)*size(b2), 1) Contains concatenated flattened weights that represent the parameters "W1, W2, b1, b2" n_features : int Number of features
625941c11d351010ab855aa9
def __call__(self, entry): <NEW_LINE> <INDENT> new_entry = copy.copy(entry) <NEW_LINE> new_entry.clear() <NEW_LINE> new_entry.extend(self._process_tag(tag, value) for tag, value in entry) <NEW_LINE> return new_entry
Swap in-line links for markdown-style links. :returns: a modified copy of `entry`
625941c12eb69b55b151c83a
@skipif_yask <NEW_LINE> def test_forward_unroll(a, c, nt=5): <NEW_LINE> <INDENT> a.data[0, :] = 1. <NEW_LINE> c.data[0, :] = 1. <NEW_LINE> eqn_c = Eq(c.forward, c + 1.) <NEW_LINE> eqn_a = Eq(a.forward, c.forward) <NEW_LINE> Operator([eqn_c, eqn_a])(time=nt) <NEW_LINE> for i in range(nt): <NEW_LINE> <INDENT> assert np.allclose(a.data[i, :], 1. + i, rtol=1.e-12)
Test forward time marching with a buffered and an unrolled t
625941c10383005118ecf571
def filter_tag(self, trs): <NEW_LINE> <INDENT> q = Q() <NEW_LINE> if not self.slug: <NEW_LINE> <INDENT> q &= ~Q(resource__tag__isnull=True) <NEW_LINE> <DEDENT> if self.slug: <NEW_LINE> <INDENT> q &= Q(resource__tag__slug__contains=self.slug) <NEW_LINE> <DEDENT> if self.priority is not None: <NEW_LINE> <INDENT> if self.priority is False: <NEW_LINE> <INDENT> q &= Q(resource__tag__priority__isnull=True) <NEW_LINE> <DEDENT> elif self.priority is True: <NEW_LINE> <INDENT> q &= Q(resource__tag__priority__isnull=False) <NEW_LINE> <DEDENT> elif isinstance(self.priority, int): <NEW_LINE> <INDENT> q &= Q(resource__tag__priority=self.priority) <NEW_LINE> <DEDENT> <DEDENT> return trs.filter(q)
Filters on tag.slug and tag.priority
625941c1de87d2750b85fd1d
def move_asynchronously(self, session, source_space, source_offset, source_width, destination_space, destination_offset, destination_width, length): <NEW_LINE> <INDENT> raise NotImplementedError
Moves a block of data asynchronously. Corresponds to viMoveAsync function of the VISA library. :param session: Unique logical identifier to a session. :param source_space: Specifies the address space of the source. :param source_offset: Offset of the starting address or register from which to read. :param source_width: Specifies the data width of the source. :param destination_space: Specifies the address space of the destination. :param destination_offset: Offset of the starting address or register to which to write. :param destination_width: Specifies the data width of the destination. :param length: Number of elements to transfer, where the data width of the elements to transfer is identical to the source data width. :return: Job identifier of this asynchronous move operation, return value of the library call. :rtype: jobid, :class:`pyvisa.constants.StatusCode`
625941c1097d151d1a222de8
def _planePositionChanged(self, source, *args, **kwargs): <NEW_LINE> <INDENT> if self.__plane.visible: <NEW_LINE> <INDENT> self._updated(ItemChangedType.POSITION)
Handle update of cut plane position and normal
625941c17b25080760e393e7
def translate_lines(self): <NEW_LINE> <INDENT> self.turing_code = ["# -*- coding: utf-8 -*-"] <NEW_LINE> self.turing_code.append("") <NEW_LINE> for s in self.raw_lines: <NEW_LINE> <INDENT> if s.strip() == "": <NEW_LINE> <INDENT> self.turing_code.append(s) <NEW_LINE> <DEDENT> elif s[0] == "/": <NEW_LINE> <INDENT> self.turing_code.append("# " + s) <NEW_LINE> <DEDENT> elif s.split(":")[0] in ["name", "init", "accept"]: <NEW_LINE> <INDENT> self.turing_code.append("# " + s) <NEW_LINE> <DEDENT> elif len(s.split(",")) == 3: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif len(s.split(",")) == 2: <NEW_LINE> <INDENT> code_line_1 = s.split(",") <NEW_LINE> code_line_2 = self.raw_lines[self.raw_lines.index(s) + 1].split(",") <NEW_LINE> self.turing_code.append(self._convert_code(code_line_1 + code_line_2)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.turing_code.append("# BAD LINE " + s)
Takes the list of lines in the input file and classifies each line as either a comment line or a code line. Comment lines have '#' appended, while code lines are reformatted for use in constructing _CodeLine objects.
625941c116aa5153ce362405
def wheelEvent(self, event): <NEW_LINE> <INDENT> if event.modifiers() & QtCore.Qt.ControlModifier == QtCore.Qt.ControlModifier: <NEW_LINE> <INDENT> if event.delta() > 0: <NEW_LINE> <INDENT> self.zoom_in() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.zoom_out() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> super(ZoomableImage, self).wheelEvent(event)
Overloaded to zoom/unzoom the image when the Ctrl key is pressed.
625941c1442bda511e8be3a8
def isSubtree(self, s, t): <NEW_LINE> <INDENT> def convert(p): <NEW_LINE> <INDENT> return "#" + str(p.val) + convert(p.left) + convert(p.right) if p else "$" <NEW_LINE> <DEDENT> return convert(t) in convert(s) <NEW_LINE> stack = [] <NEW_LINE> res = [] <NEW_LINE> stack.append(s) <NEW_LINE> node = None <NEW_LINE> x = 0 <NEW_LINE> while stack: <NEW_LINE> <INDENT> node = stack.pop() <NEW_LINE> if node: <NEW_LINE> <INDENT> if node.val == t.val: <NEW_LINE> <INDENT> res.append(node) <NEW_LINE> <DEDENT> stack.append(node.left) <NEW_LINE> stack.append(node.right) <NEW_LINE> <DEDENT> <DEDENT> for i in range(len(res)): <NEW_LINE> <INDENT> print(res[i].val) <NEW_LINE> <DEDENT> if res: <NEW_LINE> <INDENT> for i in range(len(res)): <NEW_LINE> <INDENT> x = x or self.compare_val(res[i],t) <NEW_LINE> <DEDENT> return bool(x) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
:type s: TreeNode :type t: TreeNode :rtype: bool
625941c132920d7e50b2815b
def generate_nagios_service_cfg(): <NEW_LINE> <INDENT> pass
Returns a body of the configuraton file for the services currently monitored by the agent.
625941c1377c676e91272136
def _texture(func): <NEW_LINE> <INDENT> def inner(name, *args, **kwargs): <NEW_LINE> <INDENT> if isinstance(name, types.Texture): <NEW_LINE> <INDENT> texture = name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> texture = data.textures[name] <NEW_LINE> <DEDENT> return func(texture, *args, **kwargs) <NEW_LINE> <DEDENT> return inner
:param func:
625941c182261d6c526ab429
def items(self, category): <NEW_LINE> <INDENT> addons, _, _ = addon_listing(self.request, [self.TYPE], default='updated') <NEW_LINE> if category: <NEW_LINE> <INDENT> addons = addons.filter(categories__id=category.id) <NEW_LINE> <DEDENT> return addons[:30]
Return the Addons for this Category to be output as RSS <item>'s
625941c150812a4eaa59c2b1
def string2number(i): <NEW_LINE> <INDENT> return int(i.encode('hex'), 16)
Convert a string to a number Input: string (big-endian) Output: long or integer
625941c14c3428357757c2b7
def test_invalidate_existing_token_twice(self): <NEW_LINE> <INDENT> first_response = self.client.post(self.api_url) <NEW_LINE> second_response = self.client.post(self.api_url) <NEW_LINE> self.assertEqual(first_response.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(second_response.status_code, status.HTTP_401_UNAUTHORIZED)
Try to invalidate a token twice and expect an error
625941c1460517430c394117
def get_sub_items(item_url): <NEW_LINE> <INDENT> bf = visit_web(item_url) <NEW_LINE> sub_items = {} <NEW_LINE> try: <NEW_LINE> <INDENT> all_sub = bf.find('div', class_='sub-items') <NEW_LINE> sub_list = all_sub.find_all('a') <NEW_LINE> for sub in sub_list: <NEW_LINE> <INDENT> sub_items[sub.string] = sub.get('href') <NEW_LINE> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> print('error') <NEW_LINE> <DEDENT> return sub_items
二级区域
625941c150485f2cf553cd26
def test_unknown_offset_raises_error(): <NEW_LINE> <INDENT> with pytest.raises(ValueError): <NEW_LINE> <INDENT> assert parse_date("(2018-01-01) + 1century", "%Y-%m-%d")
It should raise an exception when an invalid offset is used.
625941c197e22403b379cf26
def _live(city): <NEW_LINE> <INDENT> return add_metadata(parse_html(city, get_html(city)))
Scrape data for a given city pulling all data now This function is only used in development mode for debugging the server without a database present.
625941c126068e7796caec68
def insertForwardIndexEntry(self, entry, documentId): <NEW_LINE> <INDENT> length = self._length <NEW_LINE> index_length = self._index_length <NEW_LINE> if index_length is None: <NEW_LINE> <INDENT> self._inline_migration() <NEW_LINE> length = self._length <NEW_LINE> index_length = self._index_length <NEW_LINE> <DEDENT> if length.value == 0: <NEW_LINE> <INDENT> self._index_value = int(not bool(entry)) <NEW_LINE> <DEDENT> if bool(entry) is bool(self._index_value): <NEW_LINE> <INDENT> if (index_length.value + 1) >= ((length.value + 1) * 0.6): <NEW_LINE> <INDENT> self._invert_index() <NEW_LINE> return <NEW_LINE> <DEDENT> self._index.insert(documentId) <NEW_LINE> index_length.change(1)
If the value matches the indexed one, insert into treeset
625941c1099cdd3c635f0be9
def _random_pd_matrix(n, rng): <NEW_LINE> <INDENT> temp = rng.randn(n, n) <NEW_LINE> return temp.dot(temp.T)
Random postive definite matrix.
625941c1eab8aa0e5d26dae5
@check_mlist_private <NEW_LINE> def thread_index(request, mlist_fqdn, threadid, month=None, year=None): <NEW_LINE> <INDENT> mlist = get_object_or_404(MailingList, name=mlist_fqdn) <NEW_LINE> thread = get_object_or_404(Thread, mailinglist=mlist, thread_id=threadid) <NEW_LINE> starting_email = thread.starting_email <NEW_LINE> sort_mode = request.GET.get("sort", "thread") <NEW_LINE> if request.user.is_authenticated(): <NEW_LINE> <INDENT> starting_email.myvote = starting_email.votes.filter( user=request.user).first() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> starting_email.myvote = None <NEW_LINE> <DEDENT> tag_form = AddTagForm() <NEW_LINE> fav_action = "add" <NEW_LINE> if request.user.is_authenticated() and Favorite.objects.filter( thread=thread, user=request.user).exists(): <NEW_LINE> <INDENT> fav_action = "rm" <NEW_LINE> <DEDENT> category, category_form = get_category_widget(request, thread.category) <NEW_LINE> today = datetime.date.today() <NEW_LINE> days_old = today - starting_email.date.date() <NEW_LINE> days_inactive = today - thread.date_active.date() <NEW_LINE> subject = stripped_subject(mlist, starting_email.subject) <NEW_LINE> last_view = None <NEW_LINE> if request.user.is_authenticated(): <NEW_LINE> <INDENT> last_view_obj, created = LastView.objects.get_or_create( thread=thread, user=request.user) <NEW_LINE> if not created: <NEW_LINE> <INDENT> last_view = last_view_obj.view_date <NEW_LINE> last_view_obj.save() <NEW_LINE> <DEDENT> <DEDENT> if last_view is None: <NEW_LINE> <INDENT> if request.user.is_authenticated(): <NEW_LINE> <INDENT> unread_count = thread.emails_count <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> unread_count = 0 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> unread_count = thread.emails.filter(date__gt=last_view).count() <NEW_LINE> <DEDENT> user_agent = request.META.get('HTTP_USER_AGENT', None) <NEW_LINE> if user_agent: <NEW_LINE> <INDENT> is_bot = robot_detection.is_robot(user_agent) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> is_bot = True <NEW_LINE> <DEDENT> export = { "url": "%s?thread=%s" % ( reverse("hk_list_export_mbox", kwargs={ "mlist_fqdn": mlist.name, "filename": "%s-%s" % (mlist.name, thread.thread_id)}), thread.thread_id), "message": _("Download"), "title": _("This thread in gzipped mbox format"), } <NEW_LINE> context = { 'mlist': mlist, 'thread': thread, 'starting_email': starting_email, 'subject': subject, 'addtag_form': tag_form, 'month': thread.date_active, 'months_list': get_months(mlist), 'days_inactive': days_inactive.days, 'days_old': days_old.days, 'sort_mode': sort_mode, 'fav_action': fav_action, 'reply_form': get_posting_form(ReplyForm, request, mlist), 'is_bot': is_bot, 'num_comments': thread.emails_count - 1, 'last_view': last_view, 'unread_count': unread_count, 'category_form': category_form, 'category': category, 'export': export, } <NEW_LINE> if is_bot: <NEW_LINE> <INDENT> context["replies"] = _get_thread_replies(request, thread, limit=1000) <NEW_LINE> <DEDENT> return render(request, "hyperkitty/thread.html", context)
Displays all the email for a given thread identifier
625941c1de87d2750b85fd1e
def disallow_discussion(self, request, queryset): <NEW_LINE> <INDENT> queryset.update(allow_discussion=False) <NEW_LINE> self.message_user(request, u'Document(s) no longer allow discussion.')
Disallow discussion on several documents.
625941c130bbd722463cbd51
def capture(self, args): <NEW_LINE> <INDENT> commit = self.__get_newest_commit() <NEW_LINE> print(self.__make_macro(**commit))
Capture the most recent commit and macro it!
625941c1ac7a0e7691ed405d
def test_my_implementation(): <NEW_LINE> <INDENT> from implementation import CarDynamics, CarCommands, CarParameters <NEW_LINE> from check import check_car_dynamics_correct <NEW_LINE> klass = CarDynamics <NEW_LINE> commands = CarCommands <NEW_LINE> params = CarParameters <NEW_LINE> check_car_dynamics_correct(klass,commands,params)
This will be run by nose
625941c1009cb60464c63340
def result_summary(self): <NEW_LINE> <INDENT> return result_summary(self.results.all())
Return a dict summarizing status of results.
625941c17047854f462a1399
def set_mew(self, val): <NEW_LINE> <INDENT> self.set_markeredgewidth(val)
alias for set_markeredgewidth
625941c1004d5f362079a2c2
def get_selector_config(self): <NEW_LINE> <INDENT> if "selector" not in self._suite_config: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> selector = self._suite_config["selector"].copy() <NEW_LINE> if self.options.include_tags is not None: <NEW_LINE> <INDENT> if "include_tags" in selector: <NEW_LINE> <INDENT> selector["include_tags"] = {"$allOf": [ selector["include_tags"], self.options.include_tags, ]} <NEW_LINE> <DEDENT> elif "exclude_tags" in selector: <NEW_LINE> <INDENT> selector["exclude_tags"] = {"$anyOf": [ selector["exclude_tags"], {"$not": self.options.include_tags}, ]} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> selector["include_tags"] = self.options.include_tags <NEW_LINE> <DEDENT> <DEDENT> return selector
Returns the "selector" section of the YAML configuration.
625941c126068e7796caec69
def _receive_message(self, queue_name: str) -> typing.Union[None, str]: <NEW_LINE> <INDENT> response = self._get_per_thread_client().receive_message( QueueUrl=self._get_per_queue_url(queue_name=queue_name), AttributeNames=["All"], MessageAttributeNames=["All"], MaxNumberOfMessages=self.MaxNumberOfMessages, VisibilityTimeout=self.VisibilityTimeout, WaitTimeSeconds=self.ReceiveMessageWaitTimeSeconds, ) <NEW_LINE> response.update({"event": "wijisqs.SqsBroker._receive_message", "queue_name": queue_name}) <NEW_LINE> self.logger.log(logging.DEBUG, response) <NEW_LINE> if not response.get("Messages"): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if self.long_poll: <NEW_LINE> <INDENT> if len(response["Messages"]) >= 1: <NEW_LINE> <INDENT> for msg in response["Messages"]: <NEW_LINE> <INDENT> ReceiptHandle = msg["ReceiptHandle"] <NEW_LINE> MessageAttributes = msg["MessageAttributes"] <NEW_LINE> task_id = MessageAttributes["task_id"]["StringValue"] <NEW_LINE> _ = MessageAttributes["task_eta"]["StringValue"] <NEW_LINE> _ = MessageAttributes["task_hook_metadata"]["StringValue"] <NEW_LINE> self._get_per_queue_task_receipt(queue_name=queue_name)[task_id] = ReceiptHandle <NEW_LINE> self._get_per_queue_recieveBuf(queue_name=queue_name).put(new_item=msg["Body"]) <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if len(response["Messages"]) >= 1: <NEW_LINE> <INDENT> msg = response["Messages"][0] <NEW_LINE> ReceiptHandle = msg["ReceiptHandle"] <NEW_LINE> MessageAttributes = msg["MessageAttributes"] <NEW_LINE> task_id = MessageAttributes["task_id"]["StringValue"] <NEW_LINE> _ = MessageAttributes["task_eta"]["StringValue"] <NEW_LINE> _ = MessageAttributes["task_hook_metadata"]["StringValue"] <NEW_LINE> self._get_per_queue_task_receipt(queue_name=queue_name)[task_id] = ReceiptHandle <NEW_LINE> return msg["Body"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
Retrieves one or more messages (up to 10), from the specified queue. Using the WaitTimeSeconds parameter enables long-poll support. 1. https://botocore.amazonaws.com/v1/documentation/api/latest/reference/services/sqs.html#SQS.Client.receive_message
625941c1a8ecb033257d305b
def update_sheet_activity(sheet_name, force=False): <NEW_LINE> <INDENT> name = 'ActivityBuffer' <NEW_LINE> sheet = topo.sim.objects(Sheet)[sheet_name] <NEW_LINE> view = sheet.views.Maps.get(name, False) <NEW_LINE> time = topo.sim.time() <NEW_LINE> metadata = AttrDict(precedence=sheet.precedence, row_precedence=sheet.row_precedence, src_name=sheet.name, shape=sheet.activity.shape, timestamp=time) <NEW_LINE> if not view: <NEW_LINE> <INDENT> sv = SheetView(np.array(sheet.activity), sheet.bounds) <NEW_LINE> sv.metadata=metadata <NEW_LINE> view = SheetStack((time, sv), dimensions=[Time]) <NEW_LINE> view.metadata = metadata <NEW_LINE> sheet.views.Maps[name] = view <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if force or view.dim_range('Time')[1] < time: <NEW_LINE> <INDENT> sv = SheetView(np.array(sheet.activity), sheet.bounds) <NEW_LINE> sv.metadata=metadata <NEW_LINE> view[time] = sv <NEW_LINE> <DEDENT> <DEDENT> return view
Update the '_activity_buffer' SheetStack for a given sheet by name. If force is False and the existing Activity SheetView isn't stale, the existing view is returned.
625941c199fddb7c1c9de31f
def test_group_by_params_with_and_string_success(self): <NEW_LINE> <INDENT> group_by_params = {'and:account': 'account1,account2'} <NEW_LINE> serializer = GroupBySerializer(data=group_by_params) <NEW_LINE> self.assertTrue(serializer.is_valid())
Test that the and: prefix is allowed with a string of items.
625941c110dbd63aa1bd2b32
def check_reboot(): <NEW_LINE> <INDENT> return os.path.exists("/run/reboot-required")
Return true if the computer has a pending reboot.
625941c1d53ae8145f87a201
def colour_code_segmentation(image, label_values): <NEW_LINE> <INDENT> colour_codes = np.array(label_values) <NEW_LINE> x = colour_codes[image.astype(int)] <NEW_LINE> return x
Given a 1-channel array of class keys, colour code the segmentation results. # Arguments image: single channel array where each value represents the class key. label_values # Returns Colour coded image for segmentation visualization
625941c11d351010ab855aaa
def fix_files( in_dir: pathlib.Path, out_dir: pathlib.Path, *, transformer=networkconnectivityCallTransformer(), ): <NEW_LINE> <INDENT> pyfile_gen = ( pathlib.Path(os.path.join(root, f)) for root, _, files in os.walk(in_dir) for f in files if os.path.splitext(f)[1] == ".py" ) <NEW_LINE> for fpath in pyfile_gen: <NEW_LINE> <INDENT> with open(fpath, 'r') as f: <NEW_LINE> <INDENT> src = f.read() <NEW_LINE> <DEDENT> tree = cst.parse_module(src) <NEW_LINE> updated = tree.visit(transformer) <NEW_LINE> updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) <NEW_LINE> updated_path.parent.mkdir(parents=True, exist_ok=True) <NEW_LINE> with open(updated_path, 'w') as f: <NEW_LINE> <INDENT> f.write(updated.code)
Duplicate the input dir to the output dir, fixing file method calls. Preconditions: * in_dir is a real directory * out_dir is a real, empty directory
625941c14e4d5625662d4368
def __init__(self, email=None, password=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._email = None <NEW_LINE> self._password = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.email = email <NEW_LINE> self.password = password
LoginParams - a model defined in OpenAPI
625941c1d486a94d0b98e0d3
def ParseFileObject(self, parser_mediator, file_object): <NEW_LINE> <INDENT> regf_file = pyregf.file() <NEW_LINE> try: <NEW_LINE> <INDENT> regf_file.open_file_object(file_object) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> root_key = regf_file.get_key_by_path('Root') <NEW_LINE> if root_key: <NEW_LINE> <INDENT> file_key = root_key.get_sub_key_by_path('File') <NEW_LINE> if file_key: <NEW_LINE> <INDENT> self._ParseFileKey(parser_mediator, file_key) <NEW_LINE> <DEDENT> programs_key = root_key.get_sub_key_by_path('Programs') <NEW_LINE> if programs_key: <NEW_LINE> <INDENT> self._ParseProgramsKey(parser_mediator, programs_key) <NEW_LINE> <DEDENT> <DEDENT> regf_file.close()
Parses an AMCache.hve file-like object for events. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object.
625941c13617ad0b5ed67e86
def get_commit(deployment): <NEW_LINE> <INDENT> version = deployment_util.get_application_version(deployment) <NEW_LINE> return version[(version.find('_') + 1):]
The version is in format "8.11.0_713eef5s", the last part is the git commit hash.
625941c1167d2b6e31218b23
def __load_config(self) -> Dict[str, Any]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(self.file, 'r') as f: <NEW_LINE> <INDENT> return yaml.safe_load(f) <NEW_LINE> <DEDENT> <DEDENT> except yaml.YAMLError as ye: <NEW_LINE> <INDENT> log.log(logging.ERROR, f'Error in YAML file {self.file}') <NEW_LINE> if hasattr(ye, 'problem_mark'): <NEW_LINE> <INDENT> log.log(logging.INFO, f'Error at position ({ye.problem_mark.line + 1}, ' f'{ye.problem_mark.column + 1})') <NEW_LINE> <DEDENT> raise SystemExit
Loads YAML configuration file to Python dictionary. Does some basic error checking to help with debugging bad configuration files.
625941c13cc13d1c6d3c7308
@app.route('/genre/new/', methods=['GET', 'POST']) <NEW_LINE> def newGenre(): <NEW_LINE> <INDENT> if 'username' not in login_session: <NEW_LINE> <INDENT> return redirect('/login') <NEW_LINE> <DEDENT> if request.method == 'POST': <NEW_LINE> <INDENT> if not (session .query(Genre).filter_by(name=request.form['name']).first()): <NEW_LINE> <INDENT> session.add(Genre(name=request.form['name'], user_id=login_session['user_id'])) <NEW_LINE> session.commit() <NEW_LINE> flash('New Genre added!') <NEW_LINE> return redirect(url_for('showGenres')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> flash('This genre already exists!') <NEW_LINE> return render_template('newgenre.html') <NEW_LINE> <DEDENT> <DEDENT> if request.method == 'GET': <NEW_LINE> <INDENT> return render_template('newgenre.html')
Render template to add new genre and handle post request.
625941c1cc40096d615958df
def to_binary(self): <NEW_LINE> <INDENT> c = containerize(exclude_fields(self)) <NEW_LINE> self.payload = MsgLog._parser.build(c) <NEW_LINE> return self.pack()
Produce a framed/packed SBP message.
625941c14f88993c3716bff7
def parse(self): <NEW_LINE> <INDENT> CommentPat = re.compile(r'^\s*#.*') <NEW_LINE> LinePat = re.compile(r'^\s*(.+):\s*(.+)?\s*') <NEW_LINE> with self.path.open() as f: <NEW_LINE> <INDENT> for lineno, line in enumerate(f): <NEW_LINE> <INDENT> lineno += 1 <NEW_LINE> if line == "\n": continue <NEW_LINE> if CommentPat.match(line): continue <NEW_LINE> m = LinePat.match(line) <NEW_LINE> if m is None: <NEW_LINE> <INDENT> yield KeyValue(lineno, error="Could not parse line number: %d" % lineno) <NEW_LINE> continue <NEW_LINE> <DEDENT> g = m.groups() <NEW_LINE> if len(g) != 2: <NEW_LINE> <INDENT> yield KeyValue(lineno, error="No value found") <NEW_LINE> continue <NEW_LINE> <DEDENT> if g[1] is None: <NEW_LINE> <INDENT> g = (g[0], "") <NEW_LINE> <DEDENT> yield KeyValue(lineno, g[0], g[1], error=None)
Parses the file at self.path and yields all found 'key: value' pairs as a stream of KeyValue objects
625941c1cc0a2c11143dce1e
def remap(self, src): <NEW_LINE> <INDENT> return cv2.remap(src, self.mapx, self.mapy, cv2.INTER_LINEAR)
:param src: source image :type src: :class:`cvMat` Apply the post-calibration undistortion to the source image
625941c1293b9510aa2c3225
def create_member(self, pool_id, address, protocol_port, prefix = '', member_body = None, **kwargs): <NEW_LINE> <INDENT> _body = { "pool_id":pool_id, "address":address, "protocol_port":protocol_port } <NEW_LINE> if member_body and type(member_body) == dict : <NEW_LINE> <INDENT> _body.update(member_body) <NEW_LINE> <DEDENT> body = {"member":_body} <NEW_LINE> url = prefix + self.members_path <NEW_LINE> return self.base_create(resource_url = url, body = body, **kwargs)
Creates a new load balancer member. :param pool_id: ID of the pool. :param address: IP address. :param protocol_port: Port number. :param kwargs: other values.
625941c144b2445a33932025
def test_post_email_already_verified(self): <NEW_LINE> <INDENT> user = UserFactory.create(email_verified=True) <NEW_LINE> data = {'email': user.email} <NEW_LINE> request = self.create_request('post', auth=False, data=data) <NEW_LINE> view = self.view_class.as_view() <NEW_LINE> response = view(request) <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> self.assertIn('email', response.data)
Assert email already verified does not trigger another email.
625941c163b5f9789fde7073
def json_serial_datetime(obj): <NEW_LINE> <INDENT> if isinstance(obj, datetime): <NEW_LINE> <INDENT> serial = obj.strftime('%d/%m/%Y - %H:%M:%S') <NEW_LINE> return serial
Converts a datetime object to a string :param obj: datetime object :return:
625941c145492302aab5e24f
def label_encoder(column): <NEW_LINE> <INDENT> return LabelEncoder().fit_transform(column)
Converts categorical data to numerical data
625941c15f7d997b87174a23
def _get_date_filter(self): <NEW_LINE> <INDENT> ranges = { 'year': [ "EXTRACT('year' FROM {}.{}) = EXTRACT('year' FROM {})", ], 'month': [ "EXTRACT('year' FROM {}.{}) = EXTRACT('year' FROM NOW())", "EXTRACT('month' FROM {}.{}) = EXTRACT('month' FROM {})", ], 'week': [ "EXTRACT('year' FROM {}.{}) = EXTRACT('year' FROM NOW())", "EXTRACT('week' FROM {}.{}) = EXTRACT('week' FROM {})", ], 'day': [ "EXTRACT('year' FROM {}.{}) = EXTRACT('year' FROM NOW())", "EXTRACT('month' FROM {}.{}) = EXTRACT('month' FROM NOW())", "EXTRACT('day' FROM {}.{}) = EXTRACT('day' FROM {})", ], } <NEW_LINE> try: <NEW_LINE> <INDENT> show_range = ranges[self.partition_range] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise PartitionRangeError(model=self.model, current_value=self.partition_range, allowed_values=ranges.keys()) <NEW_LINE> <DEDENT> shows = { 'current': 'NOW()', 'previous': "NOW() - '1 {}'::interval", } <NEW_LINE> try: <NEW_LINE> <INDENT> show = shows[self.partition_show] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise PartitionShowError(model=self.model, current_value=self.partition_show, allowed_values=shows.keys()) <NEW_LINE> <DEDENT> return [item.format(self.table, self.partition_column, show.format(self.partition_range)) for item in show_range]
Contains a partition filter for date partition subtype
625941c1507cdc57c6306c64
def hasNext(self): <NEW_LINE> <INDENT> return bool(self.vectors)
:rtype: bool
625941c1f8510a7c17cf9689
def containment_radius(self, fraction, factor=20, **kwargs): <NEW_LINE> <INDENT> from gammapy.datasets.map import RAD_AXIS_DEFAULT <NEW_LINE> output = np.broadcast(*kwargs.values(), fraction) <NEW_LINE> try: <NEW_LINE> <INDENT> rad_axis = self.axes["rad"] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> rad_axis = RAD_AXIS_DEFAULT <NEW_LINE> <DEDENT> rad = rad_axis.upsample(factor=factor).center <NEW_LINE> axis = tuple(range(output.ndim)) <NEW_LINE> rad = np.expand_dims(rad, axis=axis).T <NEW_LINE> containment = self.containment(rad=rad, **kwargs) <NEW_LINE> fraction_idx = np.argmin(np.abs(containment - fraction), axis=0) <NEW_LINE> return rad[fraction_idx].reshape(output.shape)
Containment radius at given axes coordinates Parameters ---------- fraction : float or `~numpy.ndarray` Containment fraction factor : int Up-sampling factor of the rad axis, determines the precision of the computed containment radius. **kwargs : dict Other coordinates Returns ------- radius : `~astropy.coordinates.Angle` Containment radius
625941c121bff66bcd6848e2
def get_localities(self): <NEW_LINE> <INDENT> return list(self.instances.keys())
Returns the list of available localities that are currently in the collection
625941c17047854f462a139a
def analyzemymeeting(indir, nspeakers, sr=16000, frame_size=50e-3, frame_step=25e-3, outdir=False): <NEW_LINE> <INDENT> fn = os.path.join(indir, os.listdir(indir)[0]) <NEW_LINE> y, sr = librosa.load(fn, sr=sr) <NEW_LINE> duration = librosa.get_duration(y, sr=sr) <NEW_LINE> y = (y - y.mean()) / ((np.abs(y)).max() + 0.0000000001) <NEW_LINE> mfccs = librosa.feature.mfcc(y=y, sr=sr, n_mfcc=13, n_fft=int( frame_size * sr), hop_length=int(frame_step * sr)) <NEW_LINE> mfccs_strided = handy_functions.sliding_window(mfccs, size=80, stepsize=8) <NEW_LINE> _mean = np.mean(mfccs_strided, axis=2) <NEW_LINE> _std = np.std(mfccs_strided, axis=2) <NEW_LINE> X = np.concatenate((_mean, _std)) <NEW_LINE> X = X.T <NEW_LINE> scaler = StandardScaler().fit(X) <NEW_LINE> X_scaled = scaler.transform(X) <NEW_LINE> sr_X = (X_scaled.shape[0] - 1) / duration <NEW_LINE> t = librosa.samples_to_time(np.arange(X_scaled.shape[0]), sr=sr_X) <NEW_LINE> kmeans = KMeans(n_clusters=nspeakers, random_state=0) <NEW_LINE> labels = kmeans.fit_predict(X_scaled) <NEW_LINE> df = pd.DataFrame(index=t, data={'speaker_number_id': labels}) <NEW_LINE> stats = get_meeting_stats(df, 'speaker_number_id') <NEW_LINE> if outdir: <NEW_LINE> <INDENT> save_meeting_stats(stats, outdir) <NEW_LINE> <DEDENT> os.remove(fn) <NEW_LINE> pass
Parameters ---------- indir : .wav file location e.g. 'data/diarization/diarizationExample.wav' sr : sampling freq, default=16kHz frame_size : n_fft in librosa, default 50 ms frame_step : hop_length in librosa, default 25 ms nspeakers : number of speakers in meeting (int)
625941c12c8b7c6e89b35750
def GetInstance(self, request, timeout, metadata=None, with_call=False, protocol_options=None): <NEW_LINE> <INDENT> raise NotImplementedError()
Gets instance information.
625941c1adb09d7d5db6c71f
def create_dataset(data, seq_len=3, tt_split=0.90, normalise=True, pad=None): <NEW_LINE> <INDENT> import numpy as np <NEW_LINE> sequence_length = seq_len + 1 <NEW_LINE> if (pad): <NEW_LINE> <INDENT> sequence_length = pad+1 <NEW_LINE> <DEDENT> result = [] <NEW_LINE> data_np = np.array(data) <NEW_LINE> data_fl = data_np.astype(np.float) <NEW_LINE> bounds = [np.amin(data_fl), np.amax(data_fl)] <NEW_LINE> for index in range(len(data) - sequence_length): <NEW_LINE> <INDENT> if (pad): <NEW_LINE> <INDENT> x = [] <NEW_LINE> for i in range(0, pad-seq_len): <NEW_LINE> <INDENT> x.append(data[index]) <NEW_LINE> <DEDENT> for i in range(0, seq_len+1): <NEW_LINE> <INDENT> x.append(data[index + i]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> x = data[index: index + sequence_length] <NEW_LINE> <DEDENT> result.append(x) <NEW_LINE> <DEDENT> if normalise: <NEW_LINE> <INDENT> result = normalise_all(result) <NEW_LINE> <DEDENT> result = np.array(result) <NEW_LINE> row = round(tt_split * result.shape[0]) <NEW_LINE> train = result[:int(row), :] <NEW_LINE> if (pad): <NEW_LINE> <INDENT> offset = seq_len <NEW_LINE> <DEDENT> x_train = train[:, :-1] <NEW_LINE> y_train = train[:, -1] <NEW_LINE> x_test = result[int(row):, :-1] <NEW_LINE> y_test = result[int(row):, -1] <NEW_LINE> x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1], 1)) <NEW_LINE> x_test = np.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1)) <NEW_LINE> return x_train, y_train, x_test, y_test, bounds
Convert an array of data into LSTM format test and train sequences :param data: array of data to convert :param seq_len: lookback value for number of entries per LSTM timestep: default 3 :param tt_split: ratio of training data to test data: default = .90 :param normalise_window: optional normalize :param pad: optional add padding of 0 to match dataset lengths :return: four arrays: x_train, y_train, x_test, y_test
625941c131939e2706e4cdfa
def _silent_restorecon(self, path): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if selinux.is_selinux_enabled(): <NEW_LINE> <INDENT> selinux.restorecon(path) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> self.logger.error("restorecon %s failed" % path, exc_info=True)
Execute selinux restorecon cmd to determined file Args path -- full path to file
625941c13346ee7daa2b2cf8
def frm_idx2angle(bw, i, j, k): <NEW_LINE> <INDENT> from math import pi, floor <NEW_LINE> phe=i*pi/bw <NEW_LINE> phc=j*pi/bw <NEW_LINE> ohm=k*pi/bw <NEW_LINE> phi = pi-ohm; phi -= 2.0*pi*floor(phi/2.0/pi) <NEW_LINE> the = pi-phc <NEW_LINE> psi = -phe; psi -= 2.0*pi*floor(psi/2.0/pi) <NEW_LINE> return [psi*180.0/pi, phi*180.0/pi, the*180.0/pi]
Transfer the index from correlation volume to the actual Euler angles in ZXZ convention (degree). Note the order of the returned Euler angle is [Phi, Psi, Theta], or [Z1, Z2, X] in Pytom format. Parameters ---------- bw: Bandwidth of the spherical harmonics. Integer i: First index of the correlation volume. Integer j: Second index of the correlation volume. Integer k: Second index of the correlation volume. Integer Returns ------- Euler angle in degrees.
625941c13346ee7daa2b2cf9
def smoke_test(self): <NEW_LINE> <INDENT> input_size = 4 <NEW_LINE> input_length = 4 <NEW_LINE> batch_size = 2 <NEW_LINE> n_units = 4 <NEW_LINE> cell = ESNCell(n_units) <NEW_LINE> inputs = np.random.random([input_length, batch_size, input_size]) <NEW_LINE> state = cell.zero_state(batch_size, tf.float64) <NEW_LINE> for i in range(input_length): <NEW_LINE> <INDENT> if i > 0 : tf.get_variable_scope().reuse_variables() <NEW_LINE> state, _ = cell(inputs[i, :, :], state) <NEW_LINE> <DEDENT> with self.test_session() as sess: <NEW_LINE> <INDENT> sess.run(tf.global_variables_initializer()) <NEW_LINE> final_states = sess.run(state) <NEW_LINE> <DEDENT> expected_final_states = [[-0.56735968, -0.21625957, 0.69647415, -0.91361383], [-0.22654705, -0.15751715, 0.85077971, -0.89757621]] <NEW_LINE> self.assertAllClose(final_states, expected_final_states)
A simple smoke test with random initialization
625941c1be8e80087fb20bd4
def run(game=Hangman(), flash=FlashMessage()): <NEW_LINE> <INDENT> GameClass, FlashClass = game.__class__, flash.__class__ <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> game_loop(game=game, flash=flash) <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if not view.prompt_play_again(): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> game, flash = GameClass(), FlashClass() <NEW_LINE> <DEDENT> return view.say_goodbye()
Run ``game_loop`` and handle exiting. Logic is separated from game_loop to cleanly avoid python recursion limits. :param hangman.model.Hangman game: Hangman game instance. :param hangman.utils.FlashMessage flash: FlashMessage utility
625941c18a43f66fc4b53ff5
def dp(): <NEW_LINE> <INDENT> global lis <NEW_LINE> ways = [1 for i in range(30001)] <NEW_LINE> for i in range(4): <NEW_LINE> <INDENT> for k in range(30001): <NEW_LINE> <INDENT> if k >= lis[i]: <NEW_LINE> <INDENT> ways[k] += ways[k-lis[i]] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return ways
Se utiliza dinamica bottom-up para calcular todas las maneras hasta 30000 La funcion retorna la lista de maneras de obtener el cambio hasta 30000
625941c130bbd722463cbd52
def create_nodes(self, node, depth): <NEW_LINE> <INDENT> for index in range(0, self.depth): <NEW_LINE> <INDENT> work = [] <NEW_LINE> self.logger.info('Creating tree level %s' % (index + 1)) <NEW_LINE> for node in self.traverse( self.rootNode, node_filter=lambda n: n.level == index ): <NEW_LINE> <INDENT> work.append((node, index + 1, self.rating)) <NEW_LINE> <DEDENT> self.logger.info( 'Starting worker on level %s, %s tasks' % ( index + 1, len(work) ) ) <NEW_LINE> with multiprocessing.Pool(4) as pool: <NEW_LINE> <INDENT> pool.starmap_async( create_children, work, callback=self.result_callback, chunksize=10 ) <NEW_LINE> pool.close() <NEW_LINE> pool.join() <NEW_LINE> <DEDENT> self.logger.info('Worker finished on level %s' % (index + 1))
Build up tree
625941c1be383301e01b5418
def compare_full_name(list_of_records, email, first_name, last_name): <NEW_LINE> <INDENT> for rec in list_of_records: <NEW_LINE> <INDENT> if rec[1].get("first_name") == first_name and rec[1].get("last_name") == last_name: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False
fill in docstring
625941c10383005118ecf572
def gen_cups_display(cups, current, position): <NEW_LINE> <INDENT> index = cups.index(current) <NEW_LINE> if index != position: <NEW_LINE> <INDENT> cups = rotate_string(cups, position - index) <NEW_LINE> <DEDENT> string = "" <NEW_LINE> for i in cups: <NEW_LINE> <INDENT> if i == current: <NEW_LINE> <INDENT> string += f"({i}) " <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> string += f"{i} " <NEW_LINE> <DEDENT> <DEDENT> return string.strip()
Return a string to display of cups.
625941c1627d3e7fe0d68ddd
def calculate_adv_and_value_targets(self, vals, rews, lam=None): <NEW_LINE> <INDENT> if self.adv_estimation_method == 'gae': <NEW_LINE> <INDENT> lam = self.lam if lam is None else lam <NEW_LINE> deltas = rews[:-1] + self.gamma * vals[1:] - vals[:-1] <NEW_LINE> adv = discount_cumsum(deltas, self.gamma * lam) <NEW_LINE> value_net_targets = adv + vals[:-1] <NEW_LINE> <DEDENT> elif self.adv_estimation_method == 'vtrace': <NEW_LINE> <INDENT> path_slice = slice(self.path_start_idx, self.ptr) <NEW_LINE> obs = self.actor_critic.obs_oms(self.obs_buf[path_slice], clip=False) if self.standardize_env_obs else self.obs_buf[path_slice] <NEW_LINE> obs = torch.as_tensor(obs, dtype=torch.float32) <NEW_LINE> act = self.act_buf[path_slice] <NEW_LINE> act = torch.as_tensor(act, dtype=torch.float32) <NEW_LINE> with torch.no_grad(): <NEW_LINE> <INDENT> dist = self.actor_critic.pi.dist(obs) <NEW_LINE> log_p = self.actor_critic.pi.log_prob_from_dist(dist, act) <NEW_LINE> <DEDENT> value_net_targets, adv, _ = calculate_v_trace( policy_action_probs=np.exp(log_p.numpy()), values=vals, rewards=rews, behavior_action_probs=np.exp(self.logp_buf[path_slice]), gamma=self.gamma, rho_bar=1.0, c_bar=1.0 ) <NEW_LINE> <DEDENT> elif self.adv_estimation_method == 'plain': <NEW_LINE> <INDENT> adv = rews[:-1] + self.gamma * vals[1:] - vals[:-1] <NEW_LINE> value_net_targets = discount_cumsum(rews, self.gamma)[:-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> return adv, value_net_targets
Compute the estimated advantage
625941c13539df3088e2e2da
def _post(self, which_port, msg): <NEW_LINE> <INDENT> return _ccsds_swig.rs_decode_sptr__post(self, which_port, msg)
_post(rs_decode_sptr self, swig_int_ptr which_port, swig_int_ptr msg)
625941c1b830903b967e989c
def isStringStyle(self, style): <NEW_LINE> <INDENT> return style in [QsciLexerIDL.DoubleQuotedString, QsciLexerIDL.SingleQuotedString, QsciLexerIDL.UnclosedString, QsciLexerIDL.VerbatimString]
Public method to check, if a style is a string style. @param style style to check (integer) @return flag indicating a string style (boolean)
625941c11b99ca400220aa3f
def getProximityIntEnable(self): <NEW_LINE> <INDENT> return (self._read_byte_data(REG_ENABLE) >> 5) & 0b00000001
Gets if proximity interrupts are enabled or not. Returns: bool: True if interrupts are enabled, False if not
625941c1851cf427c661a4a0
def pre_translation(self, query): <NEW_LINE> <INDENT> pass
In order to keep the API the same, Bugzilla4 needs to process the query and the result. This also applies to the refresh() function
625941c1956e5f7376d70dfd
def update(self, instance, validated_data): <NEW_LINE> <INDENT> current_password = validated_data.get('current_password') <NEW_LINE> for attr, value in validated_data.items(): <NEW_LINE> <INDENT> if attr == 'password': <NEW_LINE> <INDENT> if not instance.check_password(current_password): <NEW_LINE> <INDENT> raise serializers.ValidationError( {'current_password': 'invalid password'}) <NEW_LINE> <DEDENT> instance.set_password(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setattr(instance, attr, value) <NEW_LINE> <DEDENT> <DEDENT> instance.save() <NEW_LINE> return instance
Override update method to handle password change.
625941c1d8ef3951e32434cc
def _show_message(self, message, message_color=(255,255,255), background_color=(0, 0, 0)): <NEW_LINE> <INDENT> self._sense_hat.rotation = 0 <NEW_LINE> self._sense_hat.show_message(message, self.config.getfloat("PI_HAT_DISPLAY", "SCROLL_TEXT_SPEED"), message_color, background_color)
Internal. Shows message by scrolling it over HAT screen.
625941c18a43f66fc4b53ff6
def defineVariable(self, name='', value=''): <NEW_LINE> <INDENT> if not pmta.PmtaRcptDefineVariable(self.recipient, c_char_p(name), c_char_p(value)): <NEW_LINE> <INDENT> raise PmtaRecipientError(self.recipient)
used to bind mailmerge variable to a recipient. name: string for mailmerge variable's name value: string for mailmerge variable's value
625941c1dd821e528d63b139
def test_init(self): <NEW_LINE> <INDENT> msg = 'Object is not an top.Xlwriter' <NEW_LINE> self.assertIsInstance(self._x, top.Xlwriter, msg)
Initialise a Xlwriter object.
625941c18a349b6b435e8102
def emit(self, record): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> import smtplib <NEW_LINE> import ssl <NEW_LINE> from email.utils import formatdate <NEW_LINE> port = self.mailport <NEW_LINE> if not port: <NEW_LINE> <INDENT> port = smtplib.SMTP_SSL_PORT <NEW_LINE> <DEDENT> smtp = smtplib.SMTP_SSL(self.mailhost, port, timeout=5) <NEW_LINE> msg = MIMEMultipart() <NEW_LINE> msg['Subject'] = self.getSubject(record) <NEW_LINE> msg['From'] = self.fromaddr <NEW_LINE> msg['To'] = self.toaddrs[0] <NEW_LINE> msg['Date'] = formatdate() <NEW_LINE> text = MIMEText('Screenshot attached') <NEW_LINE> msg.attach(text) <NEW_LINE> image_data = SessionManager.get_screenshot() <NEW_LINE> if image_data: <NEW_LINE> <INDENT> image = MIMEImage(image_data, 'screenshot.png') <NEW_LINE> msg.attach(image) <NEW_LINE> <DEDENT> if self.username: <NEW_LINE> <INDENT> smtp.ehlo() <NEW_LINE> smtp.login(self.username, self.password) <NEW_LINE> <DEDENT> smtp.sendmail(self.fromaddr, [self.toaddrs], msg.as_string()) <NEW_LINE> smtp.quit() <NEW_LINE> <DEDENT> except (KeyboardInterrupt, SystemExit): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.handleError(record)
Overwrite the logging.handlers.SMTPHandler.emit function with SMTP_SSL. Emit a record. Format the record and send it to the specified addressees.
625941c124f1403a92600af7
def render_to_window(self): <NEW_LINE> <INDENT> if not self._onscreen_renderer: <NEW_LINE> <INDENT> self._onscreen_renderer = mujoco_py.MjViewer(self._sim) <NEW_LINE> self._update_camera_properties(self._onscreen_renderer.cam) <NEW_LINE> <DEDENT> self._onscreen_renderer.render()
Renders the simulation to a window.
625941c115fb5d323cde0a9b
def percent(self, key): <NEW_LINE> <INDENT> return float(self[key])/sum(self.values())
Returns what percentage a certain key is of all entries. >>> c = counter() >>> c.add('x') >>> c.add('x') >>> c.add('x') >>> c.add('y') >>> c.percent('x') 0.75 >>> c.percent('y') 0.25
625941c123849d37ff7b301f
def test_graphics_screen_rotation(): <NEW_LINE> <INDENT> print('\nTest the 4 different screen rotations, accept each one') <NEW_LINE> for rotation in ['landscape', 'landscape reverse', 'portrait', 'portrait reverse']: <NEW_LINE> <INDENT> PICASO.clear_screen() <NEW_LINE> print('= ' + rotation) <NEW_LINE> PICASO.screen_mode(rotation) <NEW_LINE> PICASO.put_string('CINF') <NEW_LINE> accept()
Test the different screen rotations
625941c1b545ff76a8913da5
def __init__(self, movie_title, movie_year, imdb_rating, movie_storyline, poster_image, trailer_youtube): <NEW_LINE> <INDENT> self.title = movie_title <NEW_LINE> self.year = movie_year <NEW_LINE> self.imdbRating = imdb_rating <NEW_LINE> self.storyline = movie_storyline <NEW_LINE> self.poster_image_url = poster_image <NEW_LINE> self.trailer_youtube_url = trailer_youtube
Init Movie instance
625941c1bf627c535bc1315d
def hit_string(target_str,str_list): <NEW_LINE> <INDENT> hit = False <NEW_LINE> for str in str_list: <NEW_LINE> <INDENT> if target_str in str: <NEW_LINE> <INDENT> hit = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return hit
验证target_str是否在str_list中匹配到(字符串patial匹配)
625941c18e7ae83300e4af5b
def p_factor(p): <NEW_LINE> <INDENT> if p[1]==[]: <NEW_LINE> <INDENT> p[0]=p[2] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if get_type(p[2])=='INT': <NEW_LINE> <INDENT> type='INT' <NEW_LINE> ending='I' <NEW_LINE> <DEDENT> elif get_type(p[2])=='FLOAT': <NEW_LINE> <INDENT> type='FLOAT' <NEW_LINE> ending='F' <NEW_LINE> <DEDENT> if len(p[1])==1: <NEW_LINE> <INDENT> t=next_temp(type) <NEW_LINE> irnodes.append(p[1][0][1]+ending+' '+ p[1][0][0]+' '+ p[2]+' '+ t) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> t=next_temp(type) <NEW_LINE> fp=p[1][0] <NEW_LINE> fpnext=p[1][1] <NEW_LINE> op=fp[1]+ending <NEW_LINE> irnodes.append(op, fp[0], fpnext[0], t) <NEW_LINE> op=fpnext[1]+ending <NEW_LINE> for fp in p[1][2:]: <NEW_LINE> <INDENT> lastop=op <NEW_LINE> op=fp[1]+ending <NEW_LINE> lastt=t <NEW_LINE> t=next_temp(type) <NEW_LINE> irnodes.append(lastop+' '+ lastt+' '+ fp[0]+' '+ t) <NEW_LINE> <DEDENT> lastt=t <NEW_LINE> t=next_temp(type) <NEW_LINE> irnodes.append(op+' '+ lastt+' '+ p[2]+' '+ t) <NEW_LINE> <DEDENT> p[0]=t
factor : factor_prefix postfix_expr
625941c163d6d428bbe4447e
def add_candidates(self, sample, seed): <NEW_LINE> <INDENT> rand = random.Random(seed) <NEW_LINE> if 'label_candidates' in sample: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> label = sample['labels'][0] <NEW_LINE> negs = [] <NEW_LINE> while len(negs) < 15: <NEW_LINE> <INDENT> neg_idx = rand.randint(0, len(self.all_candidates) - 1) <NEW_LINE> neg = self.all_candidates[neg_idx] <NEW_LINE> if neg != label: <NEW_LINE> <INDENT> negs.append(neg) <NEW_LINE> <DEDENT> <DEDENT> sample['label_candidates'] = [label] + negs
Add 16 candidates. Should be called only at train time.
625941c10383005118ecf573
def pop(self): <NEW_LINE> <INDENT> if self.is_empty(): <NEW_LINE> <INDENT> raise ValueError("No values in stack to POP") <NEW_LINE> <DEDENT> self.size -= 1 <NEW_LINE> return self.list.pop()
Remove and return the item on the top of this stack, or raise ValueError if this stack is empty. Running time: O(???) – Why? [TODO]
625941c1507cdc57c6306c65
def jobconf(self): <NEW_LINE> <INDENT> unfiltered_jobconf = combine_dicts(self.JOBCONF, self.options.jobconf) <NEW_LINE> filtered_jobconf = {} <NEW_LINE> def format_hadoop_version(v_float): <NEW_LINE> <INDENT> if v_float >= 1.0: <NEW_LINE> <INDENT> return '%.1f' % v_float <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '%.2f' % v_float <NEW_LINE> <DEDENT> <DEDENT> for key in unfiltered_jobconf: <NEW_LINE> <INDENT> unfiltered_val = unfiltered_jobconf[key] <NEW_LINE> filtered_val = unfiltered_val <NEW_LINE> if isinstance(unfiltered_val, bool): <NEW_LINE> <INDENT> if unfiltered_val: <NEW_LINE> <INDENT> filtered_val = 'true' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> filtered_val = 'false' <NEW_LINE> <DEDENT> <DEDENT> elif (key == 'hadoop_version' and isinstance(unfiltered_val, float)): <NEW_LINE> <INDENT> log.warn('hadoop_version should be a string, not %s' % unfiltered_val) <NEW_LINE> filtered_val = format_hadoop_version(unfiltered_val) <NEW_LINE> <DEDENT> filtered_jobconf[key] = filtered_val <NEW_LINE> <DEDENT> if self.SORT_VALUES: <NEW_LINE> <INDENT> filtered_jobconf = combine_dicts( _SORT_VALUES_JOBCONF, filtered_jobconf) <NEW_LINE> <DEDENT> return filtered_jobconf
``-jobconf`` args to pass to hadoop streaming. This should be a map from property name to value. By default, this combines :option:`jobconf` options from the command lines with :py:attr:`JOBCONF`, with command line arguments taking precedence. If :py:attr:`SORT_VALUES` is set, we also set these jobconf values:: stream.num.map.output.key.fields=2 mapred.text.key.partitioner.options=k1,1 We also blank out ``mapred.output.key.comparator.class`` and ``mapred.text.key.comparator.options`` to prevent interference from :file:`mrjob.conf`. :py:attr:`SORT_VALUES` *can* be overridden by :py:attr:`JOBCONF`, the command line, and step-specific ``jobconf`` values. For example, if you know your values are numbers, and want to sort them in reverse, you could do:: SORT_VALUES = True JOBCONF = { 'mapred.output.key.comparator.class': 'org.apache.hadoop.mapred.lib.KeyFieldBasedComparator', 'mapred.text.key.comparator.options': '-k1 -k2nr', } If you want to re-define this, it's strongly recommended that do something like this, so as not to inadvertently disable the :option:`jobconf` option:: def jobconf(self): orig_jobconf = super(MyMRJobClass, self).jobconf() custom_jobconf = ... return mrjob.conf.combine_dicts(orig_jobconf, custom_jobconf)
625941c1de87d2750b85fd1f