code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def left_strip(data): <NEW_LINE> <INDENT> out = {"high": [], "low": [], "open": [], "close": [], "volume": [], "unix": []} <NEW_LINE> begin = False <NEW_LINE> for idx, item in enumerate(data["volume"]): <NEW_LINE> <INDENT> if item or begin: <NEW_LINE> <INDENT> begin = True <NEW_LINE> out["high"].append(data["high"][idx]) <NEW_LINE> out["low"].append(data["low"][idx]) <NEW_LINE> out["open"].append(data["open"][idx]) <NEW_LINE> out["close"].append(data["close"][idx]) <NEW_LINE> out["volume"].append(data["volume"][idx]) <NEW_LINE> out["unix"].append(data["unix"][idx]) <NEW_LINE> <DEDENT> <DEDENT> return out
Remove no volume candles in beginning of dataset
625941c330c21e258bdfa461
def is_goal_state(state): <NEW_LINE> <INDENT> return state.state == GOAL_STATE
Whether current state is the goal state
625941c3dd821e528d63b170
def create(self, ml_model_dto: MLModelDTO): <NEW_LINE> <INDENT> self.name = ml_model_dto.name <NEW_LINE> self.source = ml_model_dto.source <NEW_LINE> self.dockerhub_url = ml_model_dto.dockerhub_url <NEW_LINE> db.session.add(self) <NEW_LINE> db.session.commit() <NEW_LINE> return self
Creates and saves the current model to the DB
625941c3a8ecb033257d3093
def constrain_encoding(self): <NEW_LINE> <INDENT> accept_encoding = self.headers.get("accept-encoding") <NEW_LINE> if accept_encoding: <NEW_LINE> <INDENT> self.headers["accept-encoding"] = ( ', '.join( e for e in {"gzip", "identity", "deflate", "br", "zstd"} if e in accept_encoding ) )
Limits the permissible Accept-Encoding values, based on what we can decode appropriately.
625941c391f36d47f21ac4b6
def _change_setting4( self ): <NEW_LINE> <INDENT> self.settings[ "smooth_scrolling" ] = not self.settings[ "smooth_scrolling" ] <NEW_LINE> self._set_controls_values()
changes settings #4
625941c3ac7a0e7691ed4095
def interior_decorator(func): <NEW_LINE> <INDENT> @wraps(func) <NEW_LINE> def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> with Timer(timer): <NEW_LINE> <INDENT> return func(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> return wrapper
The actual function wrapper.
625941c31d351010ab855ae2
def size(self, ctype=gu.PEDESTALS) : <NEW_LINE> <INDENT> if self.pbits & 128 : print(self.msgw() % 'size (%s)' % gu.dic_calib_type_to_name[ctype]) <NEW_LINE> if ctype == gu.COMMON_MODE : return self.cbase.size_cm <NEW_LINE> else : <NEW_LINE> <INDENT> self.retrieve_shape() <NEW_LINE> return self._size
Returns size
625941c3d7e4931a7ee9dee3
def list_by_resource( self, resource_group_name, vault_name, **kwargs ): <NEW_LINE> <INDENT> cls = kwargs.pop('cls', None) <NEW_LINE> error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } <NEW_LINE> error_map.update(kwargs.pop('error_map', {})) <NEW_LINE> api_version = "2021-06-01-preview" <NEW_LINE> accept = "application/json" <NEW_LINE> def prepare_request(next_link=None): <NEW_LINE> <INDENT> header_parameters = {} <NEW_LINE> header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') <NEW_LINE> if not next_link: <NEW_LINE> <INDENT> url = self.list_by_resource.metadata['url'] <NEW_LINE> path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'vaultName': self._serialize.url("vault_name", vault_name, 'str', pattern=r'^[a-zA-Z0-9-]{3,24}$'), } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') <NEW_LINE> request = self._client.get(url, query_parameters, header_parameters) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = next_link <NEW_LINE> query_parameters = {} <NEW_LINE> request = self._client.get(url, query_parameters, header_parameters) <NEW_LINE> <DEDENT> return request <NEW_LINE> <DEDENT> def extract_data(pipeline_response): <NEW_LINE> <INDENT> deserialized = self._deserialize('PrivateEndpointConnectionListResult', pipeline_response) <NEW_LINE> list_of_elem = deserialized.value <NEW_LINE> if cls: <NEW_LINE> <INDENT> list_of_elem = cls(list_of_elem) <NEW_LINE> <DEDENT> return deserialized.next_link or None, iter(list_of_elem) <NEW_LINE> <DEDENT> def get_next(next_link=None): <NEW_LINE> <INDENT> request = prepare_request(next_link) <NEW_LINE> pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) <NEW_LINE> response = pipeline_response.http_response <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> map_error(status_code=response.status_code, response=response, error_map=error_map) <NEW_LINE> raise HttpResponseError(response=response, error_format=ARMErrorFormat) <NEW_LINE> <DEDENT> return pipeline_response <NEW_LINE> <DEDENT> return ItemPaged( get_next, extract_data )
The List operation gets information about the private endpoint connections associated with the vault. :param resource_group_name: Name of the resource group that contains the key vault. :type resource_group_name: str :param vault_name: The name of the key vault. :type vault_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either PrivateEndpointConnectionListResult or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.keyvault.v2021_06_01_preview.models.PrivateEndpointConnectionListResult] :raises: ~azure.core.exceptions.HttpResponseError
625941c3004d5f362079a2fa
def benchmark_data(self, agent, world): <NEW_LINE> <INDENT> reward = 0 <NEW_LINE> collisions = 0 <NEW_LINE> occupied_landmarks = 0 <NEW_LINE> min_dists = 0 <NEW_LINE> for l in world.landmarks: <NEW_LINE> <INDENT> dists = [np.sqrt(np.sum(np.square(a.state.p_pos - l.state.p_pos))) for a in world.agents] <NEW_LINE> min_dists += min(dists) <NEW_LINE> reward -= min(dists) <NEW_LINE> if min(dists) < 0.1: <NEW_LINE> <INDENT> occupied_landmarks += 1 <NEW_LINE> <DEDENT> <DEDENT> if agent.collide: <NEW_LINE> <INDENT> for a in world.agents: <NEW_LINE> <INDENT> if world.is_collision(a, agent): <NEW_LINE> <INDENT> reward -= 1 <NEW_LINE> collisions += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return tuple([reward, collisions, min_dists, occupied_landmarks])
Returns data for benchmarking purposes. Args: agent (multiagent_particle_env.core.Agent): Agent object world (multiagent_particle_env.core.World): World object with agents and landmarks Returns: (tuple) The agent's reward, number of collisions, total minimum distance to landmarks, and number of occupied landmarks.
625941c32c8b7c6e89b35787
def start_external_search(self, selection, state: "State", paths: List[Path]): <NEW_LINE> <INDENT> def letter_split(q): <NEW_LINE> <INDENT> return r"[*/'_~]*".join((re.escape(c) for c in list(q))) <NEW_LINE> <DEDENT> sub_queries = state.query.split(" ") <NEW_LINE> queries = [(q, re.compile(letter_split(q), re.IGNORECASE)) for q in sub_queries] <NEW_LINE> exact_query = re.compile(letter_split(state.query), re.IGNORECASE) if len(sub_queries) > 1 else None <NEW_LINE> header_queries = [re.compile("(\n=+ .*)?" + letter_split(q), re.IGNORECASE) for q in sub_queries] <NEW_LINE> link = re.compile(r"\[\[(.*?)\]\]", re.IGNORECASE) <NEW_LINE> start = perf_counter() <NEW_LINE> for path in paths: <NEW_LINE> <INDENT> if path not in file_cache: <NEW_LINE> <INDENT> contents = path.read_text() <NEW_LINE> if contents.startswith('Content-Type: text/x-zim-wiki'): <NEW_LINE> <INDENT> contents = contents[contents.find("\n\n"):] <NEW_LINE> <DEDENT> zim_path = self._path2zim(path) <NEW_LINE> file_cache[path] = _FileCache(zim_path, contents) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> zim_path, contents = file_cache[path].path, file_cache[path].contents <NEW_LINE> <DEDENT> matched_links = [] <NEW_LINE> def matched_link(match): <NEW_LINE> <INDENT> matched_links.append(match.group(1)) <NEW_LINE> return "" <NEW_LINE> <DEDENT> txt_body = link.sub(matched_link, contents) <NEW_LINE> txt_links = "".join(matched_links) <NEW_LINE> wanted = [(None, reg) for q, reg in queries if q not in str(zim_path).casefold()] <NEW_LINE> def found(it): <NEW_LINE> <INDENT> return (reg.search(txt_body) or reg.search(txt_links) for _, reg in it) <NEW_LINE> <DEDENT> if wanted and all(found(wanted)) or not wanted and any(found(queries)): <NEW_LINE> <INDENT> score = sum([len(m.group(1)) * 3 if m.group(1) else 1 for q in header_queries for m in q.finditer(txt_body)]) <NEW_LINE> if exact_query: <NEW_LINE> <INDENT> score += 50 * len(exact_query.findall(txt_body)) <NEW_LINE> <DEDENT> selection._count_score(zim_path, score or 1) <NEW_LINE> state.matching_files.append(path) <NEW_LINE> <DEDENT> elif not wanted: <NEW_LINE> <INDENT> state.matching_files.append(path) <NEW_LINE> <DEDENT> <DEDENT> logger.info("[Instantsearch] External search: %g s", perf_counter() - start) <NEW_LINE> self._update_results(selection, state, force=True)
Zim internal search is not able to find out text with markup. Ex: 'economical' is not recognized as 'economi**cal**' (however highlighting works great), as 'economi[[inserted link]]cal' as 'any text with [[http://economical.example.com|link]]' This fulltext search loops all .txt files in the notebook directory and tries to recognize the patterns.
625941c33eb6a72ae02ec49e
def formatDist(distance, mapunits): <NEW_LINE> <INDENT> if mapunits == 'metres': <NEW_LINE> <INDENT> mapunits = 'meters' <NEW_LINE> <DEDENT> outunits = mapunits <NEW_LINE> distance = float(distance) <NEW_LINE> divisor = 1.0 <NEW_LINE> if mapunits == 'meters': <NEW_LINE> <INDENT> if distance > 2500.0: <NEW_LINE> <INDENT> outunits = 'km' <NEW_LINE> divisor = 1000.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> outunits = 'm' <NEW_LINE> <DEDENT> <DEDENT> elif mapunits == 'feet': <NEW_LINE> <INDENT> if distance > 5280.0: <NEW_LINE> <INDENT> outunits = 'miles' <NEW_LINE> divisor = 5280.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> outunits = 'ft' <NEW_LINE> <DEDENT> <DEDENT> elif 'degree' in mapunits: <NEW_LINE> <INDENT> if distance < 1: <NEW_LINE> <INDENT> outunits = 'min' <NEW_LINE> divisor = (1 / 60.0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> outunits = 'deg' <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return (distance, 'units') <NEW_LINE> <DEDENT> if (distance / divisor) >= 2500.0: <NEW_LINE> <INDENT> outdistance = round(distance / divisor) <NEW_LINE> <DEDENT> elif (distance / divisor) >= 1000.0: <NEW_LINE> <INDENT> outdistance = round(distance / divisor, 1) <NEW_LINE> <DEDENT> elif (distance / divisor) > 0.0: <NEW_LINE> <INDENT> outdistance = round(distance / divisor, int(math.ceil(3 - math.log10(distance / divisor)))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> outdistance = float(distance / divisor) <NEW_LINE> <DEDENT> return (outdistance, outunits)
Formats length numbers and units in a nice way. Formats length numbers and units as a function of length. >>> formatDist(20.56915, 'metres') (20.57, 'm') >>> formatDist(6983.4591, 'metres') (6.983, 'km') >>> formatDist(0.59, 'feet') (0.59, 'ft') >>> formatDist(8562, 'feet') (1.622, 'miles') >>> formatDist(0.48963, 'degrees') (29.38, 'min') >>> formatDist(20.2546, 'degrees') (20.25, 'deg') >>> formatDist(82.146, 'unknown') (82.15, 'units') Accepted map units are 'meters', 'metres', 'feet', 'degree'. Returns 'units' instead of unrecognized units. :param distance: map units :param mapunits: map units From code by Hamish Bowman Grass Development Team 2006.
625941c3462c4b4f79d1d696
def update(self, params={}): <NEW_LINE> <INDENT> return self.__update__( 'CardTimeLog', params )
Updates the card time log. :param params: Dictionary with parametrs for request. Full list of avalible parameters is avalible on https://faq.kaiten.io/docs/api#card-time-logs,-not-stable-yet-patch :type params: dict
625941c301c39578d7e74e01
def new_get_model(model_identifier): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return models.get_model(model_identifier) <NEW_LINE> <DEDENT> except (LookupError, TypeError): <NEW_LINE> <INDENT> raise base.DeserializationError( "Invalid model identifier: '%s'" % model_identifier)
Helper to look up a model from an "app_label.model_name" string. From
625941c37cff6e4e8111794c
def ReloadDeviceOs(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> params = request._serialize() <NEW_LINE> body = self.call("ReloadDeviceOs", params) <NEW_LINE> response = json.loads(body) <NEW_LINE> if "Error" not in response["Response"]: <NEW_LINE> <INDENT> model = models.ReloadDeviceOsResponse() <NEW_LINE> model._deserialize(response["Response"]) <NEW_LINE> return model <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> code = response["Response"]["Error"]["Code"] <NEW_LINE> message = response["Response"]["Error"]["Message"] <NEW_LINE> reqid = response["Response"]["RequestId"] <NEW_LINE> raise TencentCloudSDKException(code, message, reqid) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> if isinstance(e, TencentCloudSDKException): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TencentCloudSDKException(e.message, e.message)
重装操作系统 :param request: Request instance for ReloadDeviceOs. :type request: :class:`tencentcloud.bm.v20180423.models.ReloadDeviceOsRequest` :rtype: :class:`tencentcloud.bm.v20180423.models.ReloadDeviceOsResponse`
625941c39f2886367277a854
def MigrateV2(self, v2Item, log): <NEW_LINE> <INDENT> itemBody = v2Item.get_or_add_child(qtiv2.content.ItemBody) <NEW_LINE> if self.GotPosition(): <NEW_LINE> <INDENT> log.append( "Warning: discarding absolute positioning information on presentation") <NEW_LINE> <DEDENT> if self.InlineChildren(): <NEW_LINE> <INDENT> p = itemBody.add_child( html.P, (qtiv2.core.IMSQTI_NAMESPACE, 'p')) <NEW_LINE> if self.label is not None: <NEW_LINE> <INDENT> p.set_attribute('label', self.label) <NEW_LINE> <DEDENT> self.MigrateV2Content(p, html.InlineMixin, log) <NEW_LINE> <DEDENT> elif self.label is not None: <NEW_LINE> <INDENT> div = itemBody.add_child( html.Div, (qtiv2.core.IMSQTI_NAMESPACE, 'div')) <NEW_LINE> div.set_attribute('label', self.label) <NEW_LINE> self.MigrateV2Content(div, html.BlockMixin, log) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.MigrateV2Content(itemBody, html.BlockMixin, log) <NEW_LINE> <DEDENT> self.CleanHotspotImages(itemBody)
Presentation maps to the main content in itemBody.
625941c3a4f1c619b28b0003
def get_mo_overlap_matrix(mo_a,mo_b,ao_overlap_matrix,numproc=1): <NEW_LINE> <INDENT> if isinstance(mo_a, MOClass): <NEW_LINE> <INDENT> mo_a = mo_a.get_coeffs() <NEW_LINE> <DEDENT> elif isinstance(mo_a,dict): <NEW_LINE> <INDENT> mo_a = numpy.array(mo_a['coeffs']) <NEW_LINE> <DEDENT> if isinstance(mo_b, MOClass): <NEW_LINE> <INDENT> mo_b = mo_b.get_coeffs() <NEW_LINE> <DEDENT> elif isinstance(mo_b,dict): <NEW_LINE> <INDENT> mo_b = numpy.array(mo_b['coeffs']) <NEW_LINE> <DEDENT> global_args = {'mo_a': mo_a, 'mo_b': mo_b, 'ao_overlap_matrix': ao_overlap_matrix} <NEW_LINE> if ((global_args['mo_a'].shape[1] != ao_overlap_matrix.shape[0]) or (global_args['mo_b'].shape[1] != ao_overlap_matrix.shape[1])): <NEW_LINE> <INDENT> raise ValueError('mo_a and mo_b have to correspond to the same basis set, '+ 'i.e., shape_a[1] != shape_b[1]') <NEW_LINE> <DEDENT> numproc = min(len(global_args['mo_a']),max(1,numproc)) <NEW_LINE> ij = numpy.array(numpy.linspace(0, len(global_args['mo_a']), num=numproc+1, endpoint=True), dtype=numpy.intc) <NEW_LINE> ij = list(zip(ij[:-1],ij[1:])) <NEW_LINE> if numproc > 1: <NEW_LINE> <INDENT> pool = Pool(processes=numproc, initializer=initializer, initargs=(global_args,)) <NEW_LINE> it = pool.imap(get_slice, ij) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> initializer(global_args) <NEW_LINE> <DEDENT> mo_overlap_matrix = numpy.zeros((len(mo_a),len(mo_b)),dtype=numpy.float64) <NEW_LINE> for l,[m,n] in enumerate(ij): <NEW_LINE> <INDENT> mo_overlap_matrix[m:n,:] = it.next() if numproc > 1 else get_slice(ij[l]) <NEW_LINE> <DEDENT> if numproc > 1: <NEW_LINE> <INDENT> pool.close() <NEW_LINE> pool.join() <NEW_LINE> <DEDENT> return mo_overlap_matrix
Computes the overlap of two sets of molecular orbitals. **Parameters:** mo_a : numpy.ndarray with shape = (NMO,NAO), dict, or MOClass instance Contains the molecular orbital coefficients of all `Bra` orbitals. mo_b : numpy.ndarray with shape = (NMO,NAO), dict, or MOClass instance Contains the molecular orbital coefficients of all `Ket` orbitals. ao_overlap_matrix : numpy.ndarray, shape = (NAO,NAO) Contains the overlap matrix of the basis set. numproc : int Specifies number of subprocesses for multiprocessing. **Returns:** mo_overlap_matrix : numpy.ndarray, shape = (NMO,NMO) Contains the overlap matrix between the two sets of input molecular orbitals.
625941c38e71fb1e9831d770
def Efield_static_uniform(x): <NEW_LINE> <INDENT> Ex = 0 <NEW_LINE> Ey = 0 <NEW_LINE> Ez = 0.1 <NEW_LINE> return np.array((Ex, Ey, Ez))
To simplify the example, impose a constant, uniform E field
625941c3b7558d58953c4edd
def _update_figure(self): <NEW_LINE> <INDENT> self._axes.clear() <NEW_LINE> if (self._parameters['x_axis'].list != [] and self._parameters['y_axis'].list != []): <NEW_LINE> <INDENT> tb = self._tb_dict[self._parameters['tb'].selected]['tb'] <NEW_LINE> ts = (self._tb_ts_dict[self._parameters['tb'].selected] [self._parameters['y_axis'].selected]['ts']) <NEW_LINE> if np.all(np.isreal(tb)) and np.all(np.isreal(ts)): <NEW_LINE> <INDENT> ts_new, ts_trend = detrend_data( tb, ts, self._parameters['detrend_function'].value[0]) <NEW_LINE> self._axes.plot( tb, ts_trend, '--', label='Trend') <NEW_LINE> self._axes.plot( tb, ts, '-', label='Original') <NEW_LINE> self._axes.plot( tb, ts_new, '-', label='Detrended') <NEW_LINE> self._axes.legend() <NEW_LINE> self._axes.set_xlabel( self._tb_dict[self._parameters['x_axis'].selected] ['raster_name']) <NEW_LINE> self._axes.set_ylabel(self._parameters['y_axis'].selected) <NEW_LINE> <DEDENT> <DEDENT> self._canvas.draw()
Update figure.
625941c37047854f462a13d2
def test_update_page_state(self): <NEW_LINE> <INDENT> pageStateObj = PageState() <NEW_LINE> response = self.client.open( '/rui-support/page-state/{keycloakId}'.format(keycloakId='keycloakId_example'), method='PATCH', data=json.dumps(pageStateObj), content_type='application/ld+json') <NEW_LINE> self.assert200(response, 'Response body is : ' + response.data.decode('utf-8'))
Test case for update_page_state Updates the page state for a user session
625941c3287bf620b61d3a2b
def purchase(self, ticker, fromMidPrice=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.hTicks.append(ticker) <NEW_LINE> if fromMidPrice: <NEW_LINE> <INDENT> self.midTicks.remove(ticker) <NEW_LINE> tPrice = fromMidPrice <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tPrice = ticker.AP <NEW_LINE> self.qTicks.remove(ticker) <NEW_LINE> self.transTable.bought(ticker) <NEW_LINE> <DEDENT> self._dtCost += float(ticker.Q * tPrice) <NEW_LINE> self.totalCost.setText( '%.2f' % (float(self.totalCost.text()) + float(ticker.Q * tPrice))) <NEW_LINE> logging.info( '---- Bought {} shares of {} at {}, SL: {} ----'.format( ticker.Q, ticker.T, tPrice, ticker.SL)) <NEW_LINE> return True <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> logging.error('~~~~ Error With Purchase, Reverting Back ~~~~') <NEW_LINE> self.revert(ticker, self.hTicks, self.qTicks) <NEW_LINE> <DEDENT> return False
"Purchases" the stock by removing it from the Queue, placing it on the Holding table Args: ticker (Tick): Tick object of ticker we're actually purchasing Returns: None
625941c36aa9bd52df036d69
def _refresh_locale_store(self, lang): <NEW_LINE> <INDENT> self._localeStore.clear() <NEW_LINE> locales = localization.get_language_locales(lang) <NEW_LINE> for locale in locales: <NEW_LINE> <INDENT> if self._only_existing_locales and not localization.locale_has_translation(locale): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self._add_locale(self._localeStore, localization.get_native_name(locale), locale) <NEW_LINE> <DEDENT> set_treeview_selection(self._localeView, locales[0], col=1)
Refresh the localeStore with locales for the given language.
625941c3b7558d58953c4ede
def compute_buckets(self): <NEW_LINE> <INDENT> from collections import defaultdict <NEW_LINE> dicts = pan, pap, pbm, pbq = defaultdict(list), defaultdict(list), defaultdict(list), defaultdict(list) <NEW_LINE> for dic, lis in zip(dicts, (self.an, self.ap, self.bm, self.bq)): <NEW_LINE> <INDENT> for x in lis: <NEW_LINE> <INDENT> dic[Mod(x, 1)].append(x) <NEW_LINE> <DEDENT> <DEDENT> for dic, flip in zip(dicts, (True, False, False, True)): <NEW_LINE> <INDENT> for m, items in dic.items(): <NEW_LINE> <INDENT> x0 = items[0] <NEW_LINE> items.sort(key=lambda x: x - x0, reverse=flip) <NEW_LINE> dic[m] = items <NEW_LINE> <DEDENT> <DEDENT> return tuple([dict(w) for w in dicts])
Compute buckets for the fours sets of parameters. We guarantee that any two equal Mod objects returned are actually the same, and that the buckets are sorted by real part (an and bq descendending, bm and ap ascending). >>> from sympy.simplify.hyperexpand import IndexQuadruple >>> from sympy.abc import y >>> from sympy import S >>> a, b = [1, 3, 2, S(3)/2], [1 + y, y, 2, y + 3] >>> IndexQuadruple(a, b, [2], [y]).compute_buckets() ({0: [3, 2, 1], 1/2: [3/2]}, {0: [2], Mod(y, 1): [y, y + 1, y + 3]}, {0: [2]}, {Mod(y, 1): [y]})
625941c3d164cc6175782d14
def eliminate(self, *vs): <NEW_LINE> <INDENT> for v in vs: <NEW_LINE> <INDENT> for f in self.N(v): <NEW_LINE> <INDENT> fac = self.node[f] <NEW_LINE> i = fac['vars'].index(v) <NEW_LINE> fac['vars'].remove(v) <NEW_LINE> if len(fac['pmf'].shape) > 1: <NEW_LINE> <INDENT> fac['pmf'] = sum( fac['pmf'], axis=i ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.remove_node(f) <NEW_LINE> self.graph['facs'].remove(f) <NEW_LINE> <DEDENT> <DEDENT> self.remove_node(v) <NEW_LINE> self.graph['vars'].remove(v)
vs : [var]
625941c38a349b6b435e8139
def findPorts(self): <NEW_LINE> <INDENT> com_port_combo = [] <NEW_LINE> for port in serialutils.enumerate_serial_ports(): <NEW_LINE> <INDENT> com_port_combo.append(port) <NEW_LINE> <DEDENT> return com_port_combo
Search for available Ports
625941c3435de62698dfdc12
def get_and_organise_product_codes(self): <NEW_LINE> <INDENT> if self.sale_info["note"]: <NEW_LINE> <INDENT> self.notes.append(self.sale_info["note"]) <NEW_LINE> <DEDENT> for product in self.sale_info["line_items"]: <NEW_LINE> <INDENT> if product["product_id"] == "549e099d-a641-7141-c907-cdd9d0266175": <NEW_LINE> <INDENT> self.update_monday = True <NEW_LINE> continue <NEW_LINE> <DEDENT> if product["product_id"] == "02d59481-b6ab-11e5-f667-e9f1a04c6e04": <NEW_LINE> <INDENT> self.repair_type = "Diagnostic" <NEW_LINE> self.notes.append(product["note"]) <NEW_LINE> continue <NEW_LINE> <DEDENT> if product["product_id"] == "02dcd191-aeab-11e6-f485-aea7f2c0a90a": <NEW_LINE> <INDENT> self.client = "Warranty" <NEW_LINE> self.notes.append(product["note"]) <NEW_LINE> continue <NEW_LINE> <DEDENT> if product["product_id"] == "6ce9883a-dfd1-e137-1596-d7c3c97fb450": <NEW_LINE> <INDENT> self.passcode = product["note"] <NEW_LINE> continue <NEW_LINE> <DEDENT> if product["note"]: <NEW_LINE> <INDENT> if any(option in product["note"] for option in ["IMEI", "SN", "S/N"]): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.imei_sn = product["note"].split(":")[1].strip() <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> self.pre_checks.append(product["note"]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if product["product_id"] in keys.vend.pre_checks: <NEW_LINE> <INDENT> self.pre_checks.append(keys.vend.pre_checks[product["product_id"]]) <NEW_LINE> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.products.append(product["product_id"])
Go through products on sale organise into pre-checks, actual repairs, extract passcode/data/notification preferences
625941c367a9b606de4a7e81
def contour(self, *args, **kwargs): <NEW_LINE> <INDENT> plt.figure() <NEW_LINE> plt.gca().set_aspect('equal') <NEW_LINE> plt.tricontour(self.triang, self.values, *args, **kwargs) <NEW_LINE> plt.colorbar() <NEW_LINE> plt.show()
Show contours of values.
625941c356ac1b37e6264199
def stat_sum(self): <NEW_LINE> <INDENT> return Dataset.stat_sum(self)
Total likelihood given the current model parameters.
625941c3293b9510aa2c325e
def size(lst): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return lt.size(lst) <NEW_LINE> <DEDENT> except Exception as exp: <NEW_LINE> <INDENT> error.reraise (exp, 'TADList->size: ')
Informa el número de elementos de la lista. Args lst: La lista a examinar Raises: Exception
625941c307d97122c417884e
def __init__(self,original): <NEW_LINE> <INDENT> self.original_frame=original <NEW_LINE> tk.Toplevel.__init__(self) <NEW_LINE> self.geometry("400x300") <NEW_LINE> self.title("newAccount") <NEW_LINE> newusername=StringVar() <NEW_LINE> newpassword=StringVar() <NEW_LINE> def createact(*args): <NEW_LINE> <INDENT> newact=[] <NEW_LINE> newun=unentry.get() <NEW_LINE> newpw=pwentry.get() <NEW_LINE> newact.append(newun) <NEW_LINE> newact.append(newpw) <NEW_LINE> file=open('quizlet_logins.txt','a') <NEW_LINE> newact=str(newact).replace('[','') <NEW_LINE> newact=str(newact).replace(']','') <NEW_LINE> newact=str(newact).replace("'",'') <NEW_LINE> file.write(newact+'\n') <NEW_LINE> self.onClose() <NEW_LINE> <DEDENT> def test(*args): <NEW_LINE> <INDENT> print(newusername.get()) <NEW_LINE> file=open('quizlet_logins.txt','r') <NEW_LINE> for line in file: <NEW_LINE> <INDENT> line=line.replace('\n','').split(',') <NEW_LINE> print(line) <NEW_LINE> if unentry==line[0]: <NEW_LINE> <INDENT> messagebox.showinfo(title='ERROR',message='Your username is already being used, please choose another username.') <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> root=Tk() <NEW_LINE> createlbl=tk.Label(self, text='Create an Account').grid(column=0, row=1) <NEW_LINE> newunlbl=tk.Label(self, text='Username: ').grid(column=0, row=2) <NEW_LINE> unentry=tk.Entry(self, textvariable=newusername) <NEW_LINE> unentry.grid(column=1, row=2, padx=5, pady=5) <NEW_LINE> newpwlbl=tk.Label(self, text='Password: ').grid(column=0, row=3) <NEW_LINE> pwentry=tk.Entry(self, textvariable=newpassword) <NEW_LINE> pwentry.grid(column=1, row=3) <NEW_LINE> createbtn = tk.Button(self, text="Create", command=self.onClose).grid(column=1, row=4, padx=5, pady=5)
Constructor
625941c3e76e3b2f99f3a7d4
def save_credential(credential): <NEW_LINE> <INDENT> Credential.save_credentials(credential)
function that saves a newly created credential
625941c35510c4643540f3af
def __init__(self, message = None, previous = None, code = 0): <NEW_LINE> <INDENT> ConsoleException.__init__(self, 127, message, previous, code);
Constructor. @param string message The internal exception message @param Exception previous The previous exception @param integer code The internal exception code
625941c3f548e778e58cd543
def __init__(self, keyid=None, comptage='1', date_envoi=None, sms=None, num=None, emetteur=None, tracker=None, smslong='999', nostop=None, ucs2=None): <NEW_LINE> <INDENT> self.swagger_types = { 'keyid': 'str', 'comptage': 'str', 'date_envoi': 'str', 'sms': 'str', 'num': 'str', 'emetteur': 'str', 'tracker': 'str', 'smslong': 'str', 'nostop': 'str', 'ucs2': 'str' } <NEW_LINE> self.attribute_map = { 'keyid': 'keyid', 'comptage': 'comptage', 'date_envoi': 'date_envoi', 'sms': 'sms', 'num': 'num', 'emetteur': 'emetteur', 'tracker': 'tracker', 'smslong': 'smslong', 'nostop': 'nostop', 'ucs2': 'ucs2' } <NEW_LINE> self._keyid = keyid <NEW_LINE> self._comptage = comptage <NEW_LINE> self._date_envoi = date_envoi <NEW_LINE> self._sms = sms <NEW_LINE> self._num = num <NEW_LINE> self._emetteur = emetteur <NEW_LINE> self._tracker = tracker <NEW_LINE> self._smslong = smslong <NEW_LINE> self._nostop = nostop <NEW_LINE> self._ucs2 = ucs2
ComptageRequest - a model defined in Swagger :param dict swaggerTypes: The key is attribute name and the value is attribute type. :param dict attributeMap: The key is attribute name and the value is json key in definition.
625941c32eb69b55b151c874
def test_retrieve_list_no_redirect_if_logged_in(self): <NEW_LINE> <INDENT> self.client.login(username="user", password="abcd123456") <NEW_LINE> response = self.client.get("/payee-payer/retrieve-payee-payer-list/") <NEW_LINE> self.assertEqual(str(response.context['user']), 'user') <NEW_LINE> self.assertEqual(response.status_code, 200)
Checks that request is redirected if user is not logged in
625941c329b78933be1e5675
def as_command(self): <NEW_LINE> <INDENT> return "(dynamic test case)"
Provide base method.
625941c3cdde0d52a9e52ff8
def get_cl_parameters(argv): <NEW_LINE> <INDENT> program_function = 0 <NEW_LINE> try: <NEW_LINE> <INDENT> opts, args = getopt.getopt(argv, "hf:b:r:", ["help", "function=", "b_qcd=", "qcd_ratio="]) <NEW_LINE> <DEDENT> except getopt.GetoptError as err: <NEW_LINE> <INDENT> print(err) <NEW_LINE> usage() <NEW_LINE> sys.exit(2) <NEW_LINE> <DEDENT> b_qcd = 0 <NEW_LINE> ratio = 1 <NEW_LINE> for opt, arg in opts: <NEW_LINE> <INDENT> if opt in ("-f", "--function"): <NEW_LINE> <INDENT> if arg == "gen": <NEW_LINE> <INDENT> program_function = 0 <NEW_LINE> <DEDENT> elif arg == "u": <NEW_LINE> <INDENT> program_function = 1 <NEW_LINE> <DEDENT> elif arg == "l": <NEW_LINE> <INDENT> program_function = 2 <NEW_LINE> <DEDENT> <DEDENT> elif opt in ("-b", "--b_qcd"): <NEW_LINE> <INDENT> b_qcd = float(arg) <NEW_LINE> <DEDENT> elif opt in ("-r", "--qcd_ratio"): <NEW_LINE> <INDENT> ratio = float(arg) <NEW_LINE> <DEDENT> elif opt == '-h': <NEW_LINE> <INDENT> usage() <NEW_LINE> exit(0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert False, "Unhandled exception." <NEW_LINE> <DEDENT> <DEDENT> return program_function, b_qcd, ratio
Extracts the command line parameters. :param argv: :return:
625941c3442bda511e8be3e1
def load_data_file_dark(filename): <NEW_LINE> <INDENT> filename_dark = filename.with_suffix(".dark13.txt") <NEW_LINE> counts_dark = load_csv(filename_dark) <NEW_LINE> return counts_dark
Load a dark pixel file from a spectrum filename.
625941c3091ae35668666f28
def _findfieldnameindex(self, fieldname): <NEW_LINE> <INDENT> return self.fields.index(fieldname)
Internal use only.
625941c33eb6a72ae02ec49f
def compare_files(service, downloaded_file, used_file): <NEW_LINE> <INDENT> LOG.debug(f'Comparing downloaded via {service} file with original file ...') <NEW_LINE> assert filecmp.cmp(downloaded_file, used_file, shallow=False), 'Files are not equal. | FAIL | ' <NEW_LINE> LOG.info(f'{service} Downloaded file is equal to the original file. | PASS |') <NEW_LINE> if os.path.isfile(downloaded_file): <NEW_LINE> <INDENT> os.remove(downloaded_file) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> LOG.error(f"Error: %s file not found {downloaded_file}")
Compare Downloaded file with original.
625941c3adb09d7d5db6c757
def euler(self, bestside): <NEW_LINE> <INDENT> if np.allclose(bestside[0], np.array([0, 0, -1]), atol=VECTOR_TOL): <NEW_LINE> <INDENT> rotation_axis = [1, 0, 0] <NEW_LINE> phi = np.pi <NEW_LINE> <DEDENT> elif np.allclose(bestside[0], np.array([0, 0, 1]), atol=VECTOR_TOL): <NEW_LINE> <INDENT> rotation_axis = [1, 0, 0] <NEW_LINE> phi = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> phi = float("{:2f}".format(np.pi - np.arccos(-bestside[0][2]))) <NEW_LINE> rotation_axis = [-bestside[0][1], bestside[0][0], 0] <NEW_LINE> rotation_axis = [i / np.sum(np.power(rotation_axis, 2), axis=-1)**0.5 for i in rotation_axis] <NEW_LINE> rotation_axis = np.array([float("{:2f}".format(i)) for i in rotation_axis]) <NEW_LINE> <DEDENT> v = rotation_axis <NEW_LINE> rotational_matrix = [[v[0] * v[0] * (1 - math.cos(phi)) + math.cos(phi), v[0] * v[1] * (1 - math.cos(phi)) - v[2] * math.sin(phi), v[0] * v[2] * (1 - math.cos(phi)) + v[1] * math.sin(phi)], [v[1] * v[0] * (1 - math.cos(phi)) + v[2] * math.sin(phi), v[1] * v[1] * (1 - math.cos(phi)) + math.cos(phi), v[1] * v[2] * (1 - math.cos(phi)) - v[0] * math.sin(phi)], [v[2] * v[0] * (1 - math.cos(phi)) - v[1] * math.sin(phi), v[2] * v[1] * (1 - math.cos(phi)) + v[0] * math.sin(phi), v[2] * v[2] * (1 - math.cos(phi)) + math.cos(phi)]] <NEW_LINE> rotational_matrix = np.around(rotational_matrix, decimals=6) <NEW_LINE> sleep(0) <NEW_LINE> return rotation_axis, phi, rotational_matrix
Calculating euler rotation parameters and rotational matrix. Args: bestside (np.array): vector of the best orientation (3 x 3). Returns: rotation axis, rotation angle, rotational matrix.
625941c3379a373c97cfab0a
def serialize(self): <NEW_LINE> <INDENT> json_dict = [] <NEW_LINE> for dessert in self.desserts: <NEW_LINE> <INDENT> json_dict.append(dessert.serialize()) <NEW_LINE> <DEDENT> return json_dict
Convert dessert list data to a list of dictionaries, which will play nice with JSON
625941c38a43f66fc4b5402d
def test_format_count_gt_total(self): <NEW_LINE> <INDENT> ctr = Counter(stream=self.tty.stdout, total=10, desc='Test', unit='ticks') <NEW_LINE> ctr.count = 50 <NEW_LINE> ctr.start = time.time() - 50 <NEW_LINE> rtn = ctr.format(width=80) <NEW_LINE> self.assertEqual(len(rtn), 80) <NEW_LINE> self.assertRegex(rtn, r'Test 50 ticks \[00:5\d, \d.\d\d ticks/s\]')
Counter should fall back to no-total format if count is greater than total
625941c376e4537e8c351637
def Email_Search(mail, emailAddressToSearch, numDaysToSearchBeforeToday=0): <NEW_LINE> <INDENT> i = 0 <NEW_LINE> listOfUIDLists = [] <NEW_LINE> FullUIDListRaw = [] <NEW_LINE> while i <= numDaysToSearchBeforeToday: <NEW_LINE> <INDENT> date_search = datetime.strftime(datetime.now() - timedelta(i), "%d %b %Y") <NEW_LINE> uidListRaw = list(mail.uid('search', None, '(HEADER From \"<' + emailAddressToSearch + '>\")', '(HEADER Date ' + '\"' + date_search + '\"' + ')'))[1][0].decode("utf-8") <NEW_LINE> uidListCooked = uidListRaw.split(" ") <NEW_LINE> listOfUIDLists.append(uidListCooked) <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> allUIDs = mail.uid('search', None, "ALL") <NEW_LINE> print("\nAll UIDs:") <NEW_LINE> print(allUIDs) <NEW_LINE> unread = mail.search(None, '(UNSEEN)') <NEW_LINE> print("UNREAD MESSAGES:") <NEW_LINE> print(unread) <NEW_LINE> scUIDs = mail.search(None, '(HEADER From \"<' + emailAddressToSearch + '>\")') <NEW_LINE> print("\nSC UIDs:") <NEW_LINE> print(scUIDs) <NEW_LINE> for uidList in listOfUIDLists: <NEW_LINE> <INDENT> FullUIDListRaw = FullUIDListRaw + uidList <NEW_LINE> <DEDENT> print("\nFullUIDListRaw:") <NEW_LINE> print(FullUIDListRaw) <NEW_LINE> FullUIDListFiltered = list(filter(lambda a: a != '', FullUIDListRaw)) <NEW_LINE> print("\nFullUIDListFiltered:") <NEW_LINE> print(FullUIDListFiltered) <NEW_LINE> return FullUIDListFiltered
Default: Searches all emails from the current day's date, creates a list of email uids of all the emails matching the search criteria Parameters: ---------- mail : object emailAddressToSearch : str numDaysToSearchBeforeToday : int Default : 0 -> Searches the current date only > 0 -> Searches that many days before the current date Example: numDaysToSearchBeforeToday = 1 returns uids from yesterday numDaysToSearchBeforeToday = 2 returns uids from 2 days ago ---------- Returns: ---------- List of UIDs that match search criteria
625941c3e64d504609d74806
def test_permanent_redirect(self): <NEW_LINE> <INDENT> response = RedirectView.as_view(url='/bar/')( DummyRequest(path='/foo/', method='GET') ) <NEW_LINE> self.assertEqual(response.status_code, 301) <NEW_LINE> self.assertEqual(response.location, '/bar/')
Default is a permanent redirect
625941c3a219f33f34628932
def compute_one_vertex_feature_score(self, object_tracker): <NEW_LINE> <INDENT> return object_tracker.tracker_feature_score()
Compute and return the feature (or appearance) score of one vertex (tracker).
625941c39c8ee82313fbb73b
def add_armor(self, armor): <NEW_LINE> <INDENT> armory = self.armors.append(armor) <NEW_LINE> return armory
This method will add the armor object that is passed in to the list of armor objects definied in the initializer as self.armors.
625941c3627d3e7fe0d68e15
def get_alb_target_groups(alb_arn): <NEW_LINE> <INDENT> target_group_descriptions = ALB_CLIENT.describe_target_groups(LoadBalancerArn=alb_arn)[ 'TargetGroups'] <NEW_LINE> return [tg_desc['TargetGroupArn'] for tg_desc in target_group_descriptions]
Takes an application load balancer amazon resource name and returns a list of target group amazon resource names. args: alb_arn - the amazon resource name of an ALB
625941c3d8ef3951e3243504
def inverse(x: int, n: int) -> int: <NEW_LINE> <INDENT> (divider, inv, _) = extended_gcd(x, n) <NEW_LINE> if divider != 1: <NEW_LINE> <INDENT> raise NotRelativePrimeError(x, n, divider) <NEW_LINE> <DEDENT> return inv
Returns the inverse of x % n under multiplication, a.k.a x^-1 (mod n) >>> inverse(7, 4) 3 >>> (inverse(143, 4) * 143) % 4 1
625941c326068e7796caeca2
def set_encoder_enable(self, id): <NEW_LINE> <INDENT> for i in self._parse_id(id): <NEW_LINE> <INDENT> self._write_bytes(self._REG_ENCODER1_EN + 5 * (i - 1), [0x01])
@brief Set dc motor encoder enable @param id: list Encoder list, items in range 1 to 2, or id = self.ALL
625941c326068e7796caeca3
def set_fee_per_period(self, fee, alias_type): <NEW_LINE> <INDENT> storage = Storage() <NEW_LINE> keys = self.options() <NEW_LINE> key = keys[3] <NEW_LINE> key = concat(key, alias_type) <NEW_LINE> storage.delete(key) <NEW_LINE> storage.save(key, fee) <NEW_LINE> ConfigurationUpdatedEvent(key, fee) <NEW_LINE> return True
:param fee: :param type: sets fee per period for given type
625941c324f1403a92600b2e
def get_cash_flow(self, **kwargs): <NEW_LINE> <INDENT> def format(out): <NEW_LINE> <INDENT> results = {} <NEW_LINE> for symbol in out: <NEW_LINE> <INDENT> if out[symbol]: <NEW_LINE> <INDENT> results[symbol] = pd.DataFrame.from_dict( {d["reportDate"]: d for d in out[symbol]["cashflow"]}, orient="index", ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> results[symbol] = pd.DataFrame() <NEW_LINE> <DEDENT> <DEDENT> return results[self.symbols[0]].T if self.single_symbol else results <NEW_LINE> <DEDENT> return self._get_endpoint("cash-flow", format=format, params=kwargs)
Cash Flow Pulls cash flow data. Available quarterly (4 quarters) or annually (4 years). Reference: https://iexcloud.io/docs/api/#cash-flow Data Weighting: ``1000`` per symbol per period Parameters ---------- period: str, default 'quarter', optional Allows you to specify annual or quarterly cash flows. Value should be `annual` or `quarter`. last: int, default 1, optional Specify the number of quarters or years to return. You can specify up to 12 quarters or 4 years.
625941c35fdd1c0f98dc01f9
def _set_safi_name(self, v, load=False): <NEW_LINE> <INDENT> parent = getattr(self, "_parent", None) <NEW_LINE> if parent is not None and load is False: <NEW_LINE> <INDENT> raise AttributeError( "Cannot set keys directly when" + " within an instantiated list" ) <NEW_LINE> <DEDENT> if hasattr(v, "_utype"): <NEW_LINE> <INDENT> v = v._utype(v) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> t = YANGDynClass( v, base=six.text_type, is_leaf=True, yang_name="safi-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="leafref", is_config=True, ) <NEW_LINE> <DEDENT> except (TypeError, ValueError): <NEW_LINE> <INDENT> raise ValueError( { "error-string": """safi_name must be of a type compatible with leafref""", "defined-type": "leafref", "generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="safi-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""", } ) <NEW_LINE> <DEDENT> self.__safi_name = t <NEW_LINE> if hasattr(self, "_set"): <NEW_LINE> <INDENT> self._set()
Setter method for safi_name, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/interfaces/interface/afi_safi/af/safi_name (leafref) If this variable is read-only (config: false) in the source YANG file, then _set_safi_name is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_safi_name() directly. YANG Description: Reference to subsequent address-family type
625941c3099cdd3c635f0c22
def __init__(self, centre: Tuple[int, int]) -> None: <NEW_LINE> <INDENT> self._centre = int(centre[0]), int(centre[1]) <NEW_LINE> self._name = None <NEW_LINE> self._point = None <NEW_LINE> self._ne = None <NEW_LINE> self._nw = None <NEW_LINE> self._se = None <NEW_LINE> self._sw = None
Initialize this QuadTree instance. === Precondition === - <centre> must contain only positive integers or zero. >>> tree = QuadTree((100, 100)) >>> tree.__getattribute__('_centre') == (100, 100) True >>> tree.is_empty() True Runtime: O(1)
625941c30fa83653e4656f83
def convert_to_id(vocab, sentences): <NEW_LINE> <INDENT> return np.stack([[vocab[word] if word in vocab else vocab[UNK_TOKEN] for word in sentence] for sentence in sentences])
DO NOT CHANGE Convert sentences to indexed :param vocab: dictionary, word --> unique index :param sentences: list of lists of words, each representing padded sentence :return: list of list of integers, with each row representing the word indices in the corresponding sentences
625941c3d6c5a10208144010
def __iter__(self): <NEW_LINE> <INDENT> for s in self._list: <NEW_LINE> <INDENT> yield s
!Iterates over the set's elements in order.
625941c31d351010ab855ae3
def swissPairings(tournament): <NEW_LINE> <INDENT> standings = playerStandings(tournament) <NEW_LINE> id_names = [(elem[0], elem[1]) for elem in standings] <NEW_LINE> return [(elem[0][0], elem[0][1], elem[1][0], elem[1][1]) for elem in zip(id_names[::2], id_names[1::2])]
Returns a list of pairs of players for the next round of a match. Assuming that there are an even number of players registered, each player appears exactly once in the pairings. Each player is paired with another player with an equal or nearly-equal win record, that is, a player adjacent to him or her in the standings. Args: tournament: the id number of the tournament Returns: A list of tuples, each of which contains (id1, name1, id2, name2) id1: the first player's unique id name1: the first player's name id2: the second player's unique id name2: the second player's name
625941c3aad79263cf390a05
def test_pushn_fail(self): <NEW_LINE> <INDENT> pass
no way to test this function's failure (pass)
625941c3d486a94d0b98e10c
def get_raw_x_intensities(self): <NEW_LINE> <INDENT> return self.__get_generic_array_numpy(GenotypeCalls.__ID_RAW_X, uint16)
Returns: The raw x intensities of assay bead types as a list of integers
625941c37d43ff24873a2c66
def fix_python_cmd(cmd, env=None): <NEW_LINE> <INDENT> if cmd[0] == 'python': <NEW_LINE> <INDENT> cmd = cmd[1:] <NEW_LINE> <DEDENT> elif cmd[0].endswith('.py'): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return cmd <NEW_LINE> <DEDENT> if sys.platform == 'win32': <NEW_LINE> <INDENT> python_exe = 'python.exe' <NEW_LINE> check = os.path.isfile <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> python_exe = 'python' <NEW_LINE> def check(candidate): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return bool(os.stat(candidate).st_mode | os.path.stat.S_IEXEC) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> found_python = sys.executable <NEW_LINE> paths = (os.environ if env is None else env).get('PATH', '').split(os.pathsep) <NEW_LINE> for path in paths: <NEW_LINE> <INDENT> if path == '': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> candidate = os.path.join(path, python_exe) <NEW_LINE> if check(candidate): <NEW_LINE> <INDENT> found_python = candidate <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return [found_python] + cmd
Returns a fixed command line to explicitly invoke python if cmd is running 'python' or a '.py' script. This will probe $PATH in `env` to see if there's an available python in the current $PATH (allowing tasks to bring their own python). If there's no python (or python.exe) in $PATH, this will fall back to sys.executable. NOTE: This should only be used for python2. If tasks want to include python3, they should make sure that their task explicitly invokes python3.
625941c382261d6c526ab463
def get_response(self, environ): <NEW_LINE> <INDENT> resp = super(HTTPExceptionMixin, self).get_response(environ) <NEW_LINE> if is_json_request(environ.get('HTTP_ACCEPT')): <NEW_LINE> <INDENT> resp.mimetype = environ['HTTP_ACCEPT'] <NEW_LINE> <DEDENT> return resp
Return a json response for a json request otherwise an html response
625941c316aa5153ce36243f
@cliutils.arg('instance_id', metavar='<instance_id>', help="Instance to remove the virtual interface from") <NEW_LINE> @cliutils.arg('interface_id', metavar='<interface_id>', help='ID of the virtual interface to delete') <NEW_LINE> def do_virtual_interface_delete(cs, args): <NEW_LINE> <INDENT> cs.os_virtual_interfacesv2_python_novaclient_ext.delete(args.instance_id, args.interface_id)
Removes the specified virtual interface from an instance
625941c30383005118ecf5ab
def get_name(self): <NEW_LINE> <INDENT> return lib.appnet_application_get_name(self._as_parameter_)
get application name
625941c37d847024c06be281
def gradient(self, x): <NEW_LINE> <INDENT> xm = x[1:-1] <NEW_LINE> xm_m1 = x[:-2] <NEW_LINE> xm_p1 = x[2:] <NEW_LINE> der = numpy.zeros(x.shape, x.dtype) <NEW_LINE> der[1:-1] = 200. * (xm - xm_m1**2.) - 400. * (xm_p1 - xm**2.) * xm - 2. * (1. - xm) <NEW_LINE> der[0] = -400. * x[0] * (x[1] - x[0]**2.) - 2. * (1. - x[0]) <NEW_LINE> der[-1] = 200. * (x[-1] - x[-2]**2.) <NEW_LINE> return der
Evaluates the gradient of the function
625941c37c178a314d6ef423
def selectiveDescendantGen(self, openOnly=False): <NEW_LINE> <INDENT> if not openOnly or self.isExpanded(): <NEW_LINE> <INDENT> for child in self.childList: <NEW_LINE> <INDENT> yield child <NEW_LINE> for node in child.selectiveDescendantGen(openOnly): <NEW_LINE> <INDENT> yield node
Return a generator to step through nodes in this branch. Does not include the root node. Arguments: openOnly -- if True, only include children open in the current view
625941c3bf627c535bc13195
def test_bm_response(self): <NEW_LINE> <INDENT> xyz = (.05,0,1) <NEW_LINE> self.ant.select_chans([0]) <NEW_LINE> resp = self.ant.bm_response(xyz, pol='x') <NEW_LINE> self.assertAlmostEqual(resp, n.sqrt(n.exp(-1)), 3) <NEW_LINE> resp = self.ant.bm_response(xyz, pol='y') <NEW_LINE> self.assertAlmostEqual(resp, n.sqrt(n.exp(-4)), 3)
Test the Antenna beam response
625941c332920d7e50b28195
def __init__(self, acc_metric='accuracy_score', num_cv=5, **kwargs): <NEW_LINE> <INDENT> self.run_types = {} <NEW_LINE> for k in kwargs: <NEW_LINE> <INDENT> self.run_types[k] = kwargs.get(k, None) <NEW_LINE> <DEDENT> self.acc_metric = acc_metric <NEW_LINE> self.num_cv = num_cv <NEW_LINE> self.summary_df_cv = None <NEW_LINE> self.models = None <NEW_LINE> self.params = None <NEW_LINE> self.best_model = None <NEW_LINE> self.best_params = None
kwargs: *something* must be passed for each type or it will be ignored. Can have just check, just ignore, just params, or any combination thereof. kwarg inputs: bucket: check, ignore, bucket_list. min_max_scale: check, ignore, feature_range. one_hot_encode: check, ignore, categories, bucket_list. raw: check, ignore. Example: kwargs = { 'min_max_scale': { 'ignore': ['RandForest'], 'feature_range': (0, 0,5) }, 'one_hot_encode': { 'check': ['GaussNB', 'MultiNB'], 'categories': [ list(range(1, 11)) if c not in ['multiworld'] else [0, 1] for c in df_train.columns ], 'bucket_list': [(col, 10) for col in df.columns if col not in ['multiworld']] }, raw: { 'check': ['LogRegress'] } }
625941c399cbb53fe6792bae
def test_no_arguments(self): <NEW_LINE> <INDENT> with self.assertRaises(TypeError): <NEW_LINE> <INDENT> recipe()
No arguments
625941c367a9b606de4a7e82
def getUsernameFromSiteDB_wrapped(logger, quiet = False): <NEW_LINE> <INDENT> from CRABClient.UserUtilities import getUsernameFromSiteDB <NEW_LINE> username = None <NEW_LINE> msg = "Retrieving username from SiteDB..." <NEW_LINE> if quiet: <NEW_LINE> <INDENT> logger.debug(msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.info(msg) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> username = getUsernameFromSiteDB() <NEW_LINE> <DEDENT> except ProxyException as ex: <NEW_LINE> <INDENT> msg = "%sError%s: %s" % (colors.RED, colors.NORMAL, ex) <NEW_LINE> if quiet: <NEW_LINE> <INDENT> logger.debug(msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.error(msg) <NEW_LINE> <DEDENT> <DEDENT> except UsernameException as ex: <NEW_LINE> <INDENT> msg = "%sError%s: %s" % (colors.RED, colors.NORMAL, ex) <NEW_LINE> if quiet: <NEW_LINE> <INDENT> logger.debug(msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.error(msg) <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> msg = "%sError%s: Failed to retrieve username from SiteDB." % (colors.RED, colors.NORMAL) <NEW_LINE> msg += "\n%s" % (traceback.format_exc()) <NEW_LINE> if quiet: <NEW_LINE> <INDENT> logger.debug(msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.error(msg) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> msg = "Username is: %s" % (username) <NEW_LINE> if quiet: <NEW_LINE> <INDENT> logger.debug(msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.info(msg) <NEW_LINE> <DEDENT> <DEDENT> return username
Wrapper function for getUsernameFromSiteDB, catching exceptions and printing messages.
625941c30a50d4780f666e58
def read_global_config() -> Dict[Text, Any]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return rasa.utils.io.read_config_file(GLOBAL_USER_CONFIG_PATH) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return {}
Read global Rasa configuration.
625941c3d268445f265b4e35
def wind_LC(lcName,deckLineSet,vectWindDeck,windwardPileSet,vectWindwardPile,leewardPileSet=None,vectLeewardPile=None): <NEW_LINE> <INDENT> preprocessor=deckLineSet.getPreprocessor <NEW_LINE> lc=lcases.LoadCase(preprocessor,lcName,"default","constant_ts") <NEW_LINE> lc.create() <NEW_LINE> lc.addLstLoads([loads.UniformLoadOnLines(name= lcName+'deck', xcSet=deckLineSet, loadVector=xc.Vector([vectWindDeck[0],vectWindDeck[1],0,0,0,0]))]) <NEW_LINE> lc.addLstLoads([loads.UniformLoadOnLines(name= lcName+'pilewindward', xcSet=windwardPileSet, loadVector=xc.Vector([vectWindwardPile[0],vectWindwardPile[1],0,0,0,0]))]) <NEW_LINE> if leewardPileSet: <NEW_LINE> <INDENT> lc.addLstLoads([loads.UniformLoadOnLines(name= lcName+'pileleeward', xcSet=leewardPileSet, loadVector=xc.Vector([vectLeewardPile[0],vectLeewardPile[1],0,0,0,0]))]) <NEW_LINE> <DEDENT> return lc
Return the dead load case (asphalt, sidewalk, barriers, ...) :param lcName: load case name :param deckLineSet: set of lines to apply wind on deck. :param vectWindDeck: components [vx,vy] of the uniform linear load due to wind on deck. :param windwardPileSet:set of lines to apply on piles windward. :param vectWindwardPile: components [vx,vy] of the uniform linear load due to wind on piles (windward side). :param leewardPileSet:set of lines to apply wind on pilesleeward (defaults to None). :param vectLeewardPile: components [vx,vy] of the uniform linear load due to wind on piles (leeward side) (defaults to None).
625941c345492302aab5e289
def get(self, counter): <NEW_LINE> <INDENT> return self.counts[counter]
Return total count associated with group
625941c36e29344779a625db
def __init__(self, orig: str, url: str = "https://api.bart.gov/", key: str = "MW9S-E7SL-26DU-VV8V", json: str = "y", cmd: str = "etd"): <NEW_LINE> <INDENT> BartAPI.__init__(self, url, key,json,cmd) <NEW_LINE> self.response = self.get_train_estimates(orig=orig) <NEW_LINE> self.station_list = [] <NEW_LINE> if self.response: <NEW_LINE> <INDENT> self.time = self.response["root"]["time"] <NEW_LINE> self.station_name = self.response["root"]["station"][0]["name"]
Constructor for class. It performs the rest operation on the URL and get the response back It also set the variable which retrieves current time and all the destination station details :param orig: Origin station :param url: URL :param key: Key to validate request :param json: Whether response is needed in JSON or not. :param cmd: etd
625941c3627d3e7fe0d68e16
def glob_files_locally(folder_path, pattern): <NEW_LINE> <INDENT> pattern = os.path.join(folder_path, pattern.lstrip('/')) if pattern else None <NEW_LINE> len_folder_path = len(folder_path) + 1 <NEW_LINE> for root, _, files in os.walk(folder_path): <NEW_LINE> <INDENT> for f in files: <NEW_LINE> <INDENT> full_path = os.path.join(root, f) <NEW_LINE> if not pattern or _match_path(full_path, pattern): <NEW_LINE> <INDENT> yield (full_path, full_path[len_folder_path:])
glob files in local folder based on the given pattern
625941c3009cb60464c6337a
def __init__(self, array_size, readnoise, dark_current, gain): <NEW_LINE> <INDENT> self.array_size = array_size <NEW_LINE> self.readnoise = readnoise <NEW_LINE> self.gain = gain <NEW_LINE> self.dark_current = dark_current <NEW_LINE> self.tint = 1.0 <NEW_LINE> self.coadds = 1 <NEW_LINE> self.fowler = 1 <NEW_LINE> self.scale = 0.1 <NEW_LINE> return
array_size - in units of pixels (2D) readnoise - in units of electrons per read dark_current - in units of electrons per second per pixel gain - in units of electrons per DN
625941c3656771135c3eb834
def doRollover(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> super().doRollover() <NEW_LINE> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self._mail_summary() <NEW_LINE> self._log_state.last_rollover = time()
Overridden method. Perform all inherited behavior and mail any content just rolled out of the current log file.
625941c332920d7e50b28196
def get_recording_requests(self): <NEW_LINE> <INDENT> requests = {} <NEW_LINE> for site in self.record_sites: <NEW_LINE> <INDENT> requests[site] = RecordingRequest(time_start=self.time_start, time_stop=self.time_stop, conditions=self.exp_conditions, record_variable=site) <NEW_LINE> <DEDENT> return requests
Returns a RecordingRequest object or a dictionary of RecordingRequest objects with unique keys representing the recordings that are required from the simulation controller
625941c3c4546d3d9de729f9
def errorString(self): <NEW_LINE> <INDENT> return QString()
QString QStateMachine.errorString()
625941c350812a4eaa59c2ea
def inception_v4(pretrained=False, **kwargs): <NEW_LINE> <INDENT> model = InceptionV4(**kwargs) <NEW_LINE> if pretrained: <NEW_LINE> <INDENT> model.load_state_dict(model_zoo.load_url(model_urls['inceptionv4'])) <NEW_LINE> <DEDENT> for param in model.parameters(): <NEW_LINE> <INDENT> param.requires_grad = True <NEW_LINE> <DEDENT> return model
InceptionV4 model architecture from the `"Inception-v4, Inception-ResNet..." <https://arxiv.org/abs/1602.07261>`_ paper. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet
625941c3ff9c53063f47c1bb
def print_options(): <NEW_LINE> <INDENT> print('---------- Documentation bot ----------') <NEW_LINE> print('---------------------------------------------') <NEW_LINE> print('The application must be executed with at ') <NEW_LINE> print('least one argument, accepted arguments are: ') <NEW_LINE> print('help - shows this information pannel ') <NEW_LINE> print('make - checks if config file exists ') <NEW_LINE> print(' generating an default file ') <NEW_LINE> print(' if it doesn\'t ') <NEW_LINE> print('make-db - builds base DB ') <NEW_LINE> print('new-inten - prompts for data to create a new ') <NEW_LINE> print(' intention to the program ') <NEW_LINE> print('startbot - starts the bot ')
Function utilized to print possible commandline options for this application
625941c3287bf620b61d3a2c
def get_or_bust(data_dict, keys): <NEW_LINE> <INDENT> if isinstance(keys, basestring): <NEW_LINE> <INDENT> keys = [keys] <NEW_LINE> <DEDENT> import ckan.logic.schema as schema <NEW_LINE> schema = schema.create_schema_for_required_keys(keys) <NEW_LINE> data_dict, errors = _validate(data_dict, schema) <NEW_LINE> if errors: <NEW_LINE> <INDENT> raise ValidationError(errors) <NEW_LINE> <DEDENT> values = [data_dict[key] for key in keys] <NEW_LINE> if len(values) == 1: <NEW_LINE> <INDENT> return values[0] <NEW_LINE> <DEDENT> return tuple(values)
Return the value(s) from the given data_dict for the given key(s). Usage:: single_value = get_or_bust(data_dict, 'a_key') value_1, value_2 = get_or_bust(data_dict, ['key1', 'key2']) :param data_dict: the dictionary to return the values from :type data_dict: dictionary :param keys: the key(s) for the value(s) to return :type keys: either a string or a list :returns: a single value from the dict if a single key was given, or a tuple of values if a list of keys was given :raises: :py:exc:`ckan.logic.ValidationError` if one of the given keys is not in the given dictionary
625941c3d164cc6175782d15
def extract_total_num(self, smiles_list): <NEW_LINE> <INDENT> return len(smiles_list)
Extracts total number of data which can be parsed We can use this method to determine the value fed to `target_index` option of `parse` method. For example, if we want to extract input feature from 10% of whole dataset, we need to know how many samples are in a file. The returned value of this method may not to be same as the final dataset size. Args: smiles_list (list): list of strings of smiles Returns (int): total number of dataset can be parsed.
625941c3090684286d50ecab
def tshark_supports_json(tshark_version): <NEW_LINE> <INDENT> return tshark_version >= LooseVersion("2.2.0")
Returns boolean indicating json support.
625941c35166f23b2e1a5120
def fetch_stream_from_url(url, config, data=None, handlers=None): <NEW_LINE> <INDENT> return_code, return_message, response = open_url(url, config, data=data, handlers=handlers) <NEW_LINE> if return_code and return_code == http_client_.OK: <NEW_LINE> <INDENT> return response <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise URLFetchError(return_message)
Returns data retrieved from a URL. @param url: URL to attempt to open @type url: basestring @param config: SSL context configuration @type config: Configuration @param data: HTTP POST data @type data: str @param handlers: list of custom urllib2 handlers to add to the request @type handlers: iterable @return: data retrieved from URL or None @rtype: file derived type
625941c3507cdc57c6306c9e
@shared_task <NEW_LINE> def update_resource_from_row(row): <NEW_LINE> <INDENT> r = Resource(url=row[0]) <NEW_LINE> r._hash = row[1] <NEW_LINE> r.protocol = row[2] <NEW_LINE> r.contenttype = row[3] <NEW_LINE> r.host = row[4] <NEW_LINE> r.port = row[5] <NEW_LINE> r.path = row[6] <NEW_LINE> r.lastFetchDateTime = row[7] <NEW_LINE> r.save()
ORM lookup then update No input validation and foolishly assumes the lookup won't miss.
625941c3711fe17d82542336
def get_qd_conf_value(userid=None, mode='coupon', key='promotion_url', **kw): <NEW_LINE> <INDENT> def _get_default(): <NEW_LINE> <INDENT> if 'default' in kw: <NEW_LINE> <INDENT> return kw['default'] <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> default_key = kw.get('default_key', 0) <NEW_LINE> if mode: <NEW_LINE> <INDENT> return (qd_confs[default_key].get(key) or {}).get(mode) or '' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return qd_confs[default_key].get(key) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> if 'qd_confs' in kw: <NEW_LINE> <INDENT> qd_confs = kw['qd_confs'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> qd_confs = get_qd_conf() <NEW_LINE> <DEDENT> if 'groupid' in kw: <NEW_LINE> <INDENT> groupid = kw['groupid'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> user = apcli.user_by_id(int(userid)) <NEW_LINE> groupid = user['groupid'] if user else 0 <NEW_LINE> <DEDENT> default = _get_default() <NEW_LINE> if mode: <NEW_LINE> <INDENT> if (groupid in qd_confs and key in qd_confs[groupid] and qd_confs[groupid][key]): <NEW_LINE> <INDENT> return qd_confs[groupid][key].get(mode, default) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if groupid in qd_confs: <NEW_LINE> <INDENT> return qd_confs[groupid].get(key) or default <NEW_LINE> <DEDENT> <DEDENT> return default
获取物料的链接 会区分渠道id返回url Args: userid: 商户userid. mode: coupon,红包的物料链接; card,集点的物料链接. key: qd_conf的key值
625941c3d4950a0f3b08c318
def _RunGsUtilWithAnalyticsOutput(self, cmd, expected_status=0): <NEW_LINE> <INDENT> stderr = self.RunGsUtil(['-d'] + cmd, return_stderr=True, expected_status=expected_status, env_vars={'GSUTIL_TEST_ANALYTICS': '2'}) <NEW_LINE> return METRICS_LOG_RE.search(stderr).group()
Runs the gsutil command to check for metrics log output. The env value is set so that the metrics collector in the subprocess will use testing parameters and output the metrics collected to the debugging log, which lets us check for proper collection in the stderr. Args: cmd: The command to run, as a list. expected_status: The expected return code. Returns: The string of metrics output.
625941c3f8510a7c17cf96c3
def add(self, word, excerpt, freq, difficulty): <NEW_LINE> <INDENT> token = word.token <NEW_LINE> self.least_freq = min(freq, self.least_freq) <NEW_LINE> self.tokens.add(token) <NEW_LINE> self.token_with_difficulty[token] = difficulty <NEW_LINE> self.token_with_lang_freq[token] = freq <NEW_LINE> self.token_with_movie_freq[token] += 1 <NEW_LINE> self.token_with_POS[token].add(word.POS) <NEW_LINE> self.word_with_excerpts[word].append(excerpt) <NEW_LINE> self.word_with_movie_freq[word] += 1
Add a word with meta data to the report.
625941c3aad79263cf390a06
def reset(self): <NEW_LINE> <INDENT> self.state = {'blocks':np.zeros((self.num_row,self.num_col)), 'X':0} <NEW_LINE> self.pos_list = [[0,self.min_y]] <NEW_LINE> self.close() <NEW_LINE> self.generateNewRow() <NEW_LINE> self.generateNewRow() <NEW_LINE> self.render_blocks = self.state['blocks'].copy() <NEW_LINE> return self._get_obs('reset')
Reset the environment
625941c3bd1bec0571d905f6
def setParent(self, parent): <NEW_LINE> <INDENT> self.parent=parent <NEW_LINE> self._addPrerequisites()
Sets the parent Page to add requisites on css and js files
625941c3c4546d3d9de729fa
def save(self, *args, **kwargs): <NEW_LINE> <INDENT> return create_packages_and_items(self.shipment, self.cleaned_data['name'], self.cleaned_data['description'], self.cleaned_data['package_quantity'], {})
:return: QuerySet of the packages that were created.
625941c338b623060ff0adb5
def test_run_environment(self): <NEW_LINE> <INDENT> infile = 'test.in' <NEW_LINE> outdir = 'path/to/out' <NEW_LINE> sample_name = 'test' <NEW_LINE> environment = 'test.sh' <NEW_LINE> fn = ps.general.run_mbased( infile, outdir, sample_name, environment=environment, is_phased=False, num_sim=1000000, threads=6, shell=False, ) <NEW_LINE> shutil.rmtree('path')
Test to make sure the function at least runs with an environment
625941c34d74a7450ccd418b
def stats_data(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = self.backend.hap_process.servers_stats( self.backend.name, self.backend.iid, self._sid)[self.name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = self.backend.hap_process.servers_stats( self.backend.name)[self.name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> return data
Return stats data Check documentation of ``stats_data`` method in :class:`_Frontend`. :rtype: ``utils.CSVLine`` object
625941c3eab8aa0e5d26db1f
def get_last_option(sct): <NEW_LINE> <INDENT> c = ConfigParser.ConfigParser() <NEW_LINE> items={} <NEW_LINE> try: <NEW_LINE> <INDENT> if not c.read( os.path.join(os.getenv("CONFIG_DIR"),'trace.ini')): <NEW_LINE> <INDENT> c.write(open(os.path.join(os.getenv("CONFIG_DIR"),'trace.ini'), "w")) <NEW_LINE> raise <NEW_LINE> <DEDENT> if not c.has_section(sct): <NEW_LINE> <INDENT> c.add_section(sct) <NEW_LINE> c.write(open(os.path.join(os.getenv("CONFIG_DIR"),'trace.ini'), "w")) <NEW_LINE> return [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tempitems = c.items(sct) <NEW_LINE> k = len(tempitems) <NEW_LINE> if k <=0: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return tempitems[k-1] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> logging.exception('get last_option file')
get sct last key value.
625941c366673b3332b92058
def stocGradAscent1(dataMat, classLabels, numIters=150): <NEW_LINE> <INDENT> m,n = shape(dataMat) <NEW_LINE> weights = ones(n) <NEW_LINE> for j in range(numIters): <NEW_LINE> <INDENT> dataIndex = range(m) <NEW_LINE> for i in range(m): <NEW_LINE> <INDENT> alpha = 4 / (1.0 + j + i) + 0.1 <NEW_LINE> randIndex = int(random.uniform(0, len(dataIndex))) <NEW_LINE> h = sigmod(sum(dataMat[randIndex] * weights)) <NEW_LINE> error = classLabels[randIndex] - h <NEW_LINE> weights = weights + alpha * error * dataMat[randIndex] <NEW_LINE> del dataIndex[randIndex] <NEW_LINE> <DEDENT> <DEDENT> return weights
改进的随机梯度算法 设置迭代次数;每次更新选择的训练样本是随机的 学习率alpha随着迭代次数的增加,逐渐减小 :param dataMat: :param classLabels: :param numIters: :return:
625941c371ff763f4b549650
def get_word_count(string): <NEW_LINE> <INDENT> return len(string.split(sep = ' '))
Returns the number of words in a string Input: - string - The string to validate
625941c30c0af96317bb81af
def initialize_chain(test_dir): <NEW_LINE> <INDENT> if not os.path.isdir(os.path.join("cache", "node0")): <NEW_LINE> <INDENT> devnull = open("/dev/null", "w+") <NEW_LINE> for i in range(4): <NEW_LINE> <INDENT> datadir=initialize_datadir("cache", i) <NEW_LINE> args = [ os.getenv("BITCOIND", "blibraryd"), "-keypool=1", "-datadir="+datadir, "-discover=0" ] <NEW_LINE> if i > 0: <NEW_LINE> <INDENT> args.append("-connect=127.0.0.1:"+str(p2p_port(0))) <NEW_LINE> <DEDENT> bitcoind_processes[i] = subprocess.Popen(args) <NEW_LINE> subprocess.check_call([ os.getenv("BITCOINCLI", "blibrary-cli"), "-datadir="+datadir, "-rpcwait", "getblockcount"], stdout=devnull) <NEW_LINE> <DEDENT> devnull.close() <NEW_LINE> rpcs = [] <NEW_LINE> for i in range(4): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> url = "http://rt:rt@127.0.0.1:%d"%(rpc_port(i),) <NEW_LINE> rpcs.append(AuthServiceProxy(url)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> sys.stderr.write("Error connecting to "+url+"\n") <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> <DEDENT> block_time = 1388534400 <NEW_LINE> for i in range(2): <NEW_LINE> <INDENT> for peer in range(4): <NEW_LINE> <INDENT> for j in range(25): <NEW_LINE> <INDENT> set_node_times(rpcs, block_time) <NEW_LINE> rpcs[peer].setgenerate(True, 1) <NEW_LINE> block_time += 10*60 <NEW_LINE> <DEDENT> sync_blocks(rpcs) <NEW_LINE> <DEDENT> <DEDENT> stop_nodes(rpcs) <NEW_LINE> wait_bitcoinds() <NEW_LINE> for i in range(4): <NEW_LINE> <INDENT> os.remove(log_filename("cache", i, "debug.log")) <NEW_LINE> os.remove(log_filename("cache", i, "db.log")) <NEW_LINE> os.remove(log_filename("cache", i, "peers.dat")) <NEW_LINE> os.remove(log_filename("cache", i, "fee_estimates.dat")) <NEW_LINE> <DEDENT> <DEDENT> for i in range(4): <NEW_LINE> <INDENT> from_dir = os.path.join("cache", "node"+str(i)) <NEW_LINE> to_dir = os.path.join(test_dir, "node"+str(i)) <NEW_LINE> shutil.copytree(from_dir, to_dir) <NEW_LINE> initialize_datadir(test_dir, i)
Create (or copy from cache) a 200-block-long chain and 4 wallets. blibraryd and blibrary-cli must be in search path.
625941c3b830903b967e98d4
def generate_DeepConvLSTM_model( x_shape, class_number, filters, lstm_dims, learning_rate=0.01, regularization_rate=0.01, metrics=['accuracy']): <NEW_LINE> <INDENT> dim_length = x_shape[1] <NEW_LINE> dim_channels = x_shape[2] <NEW_LINE> output_dim = class_number <NEW_LINE> weightinit = 'lecun_uniform' <NEW_LINE> model = Sequential() <NEW_LINE> model.add(BatchNormalization(input_shape=(dim_length, dim_channels))) <NEW_LINE> model.add( Reshape(target_shape=(dim_length, dim_channels, 1))) <NEW_LINE> for filt in filters: <NEW_LINE> <INDENT> model.add( Convolution2D(filt, kernel_size=(3, 1), padding='same', kernel_regularizer=l2(regularization_rate), kernel_initializer=weightinit)) <NEW_LINE> model.add(BatchNormalization()) <NEW_LINE> model.add(Activation('relu')) <NEW_LINE> <DEDENT> model.add(Reshape(target_shape=(dim_length, filters[-1] * dim_channels))) <NEW_LINE> for lstm_dim in lstm_dims: <NEW_LINE> <INDENT> model.add(CuDNNLSTM(units=lstm_dim, return_sequences=True)) <NEW_LINE> <DEDENT> model.add(Dropout(0.5)) <NEW_LINE> model.add(GlobalAveragePooling1D()) <NEW_LINE> model.add(Dense(units=output_dim, kernel_initializer=weightinit)) <NEW_LINE> model.add(BatchNormalization()) <NEW_LINE> model.add(Activation("softmax")) <NEW_LINE> loss = 'categorical_crossentropy' <NEW_LINE> model.compile(loss=loss, optimizer=Adam(lr=learning_rate), metrics=metrics) <NEW_LINE> return model
Generate a model with convolution and LSTM layers. See Ordonez et al., 2016, http://dx.doi.org/10.3390/s16010115 Parameters ---------- x_shape : tuple Shape of the input dataset: (num_samples, num_timesteps, num_channels) class_number : int Number of classes for classification task filters : list of ints number of filters for each convolutional layer lstm_dims : list of ints number of hidden nodes for each LSTM layer learning_rate : float learning rate regularization_rate : float regularization rate metrics : list Metrics to calculate on the validation set. See https://keras.io/metrics/ for possible values. Returns ------- model : Keras model The compiled Keras model
625941c399fddb7c1c9de359
@define(ns, 'dump_stack') <NEW_LINE> def dumpStack( cat ) : <NEW_LINE> <INDENT> cat.output( str(cat.stack), cat.ns.info_colour )
dump_stack : (-- -> --) desc: Non-destructively dumps the entire contents of the stack to the console Most useful if stack output in the REPL is turned off Example: dump_stack tags: debug,console,stack,dump,display
625941c34f88993c3716c030
def remove_symbol_from_dist(dist, index): <NEW_LINE> <INDENT> if type(dist) is not Distribution: <NEW_LINE> <INDENT> raise TypeError("remove_symbol_from_dist got an object ot type {0}".format(type(dist))) <NEW_LINE> <DEDENT> new_prob = dist.prob.copy() <NEW_LINE> new_prob[index]=0 <NEW_LINE> new_prob /= sum(new_prob) <NEW_LINE> return Distribution(new_prob)
prob is a ndarray representing a probability distribution. index is a number between 0 and and the number of symbols ( len(prob)-1 ) return the probability distribution if the element at 'index' was no longer available
625941c3e8904600ed9f1ef2
def read_from_file(): <NEW_LINE> <INDENT> with open("object_data.csv", 'r') as data: <NEW_LINE> <INDENT> file_data = pd.read_csv(data, header=None) <NEW_LINE> for i in list(file_data.values): <NEW_LINE> <INDENT> print(i[0])
This is just a test function to determine how the file data will be manipulated or if the data was stored correctly :return:
625941c323e79379d52ee52d
@server.route('/smdj') <NEW_LINE> def smdj(): <NEW_LINE> <INDENT> url = bottle.request.query.get('x') <NEW_LINE> urldict = urllib.parse.urlparse(url) <NEW_LINE> netloc = urldict.netloc <NEW_LINE> svrurl = "http://{}:{}/smdj".format(servername, portnum) <NEW_LINE> if url is None: <NEW_LINE> <INDENT> return "Usage: {}?x=http://example.com/something".format(svrurl) <NEW_LINE> <DEDENT> webcontent = filter_content(get_webpage(url), svrurl, netloc) <NEW_LINE> return '{}'.format(webcontent)
Use bottle to serve up the modified webpage
625941c310dbd63aa1bd2b6c