code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def split_by_sentences(self): <NEW_LINE> <INDENT> return self.split_by(SENTENCES)
Split the text into individual sentences.
625941bf3346ee7daa2b2ca6
def generate_url(self, expires_in, method='GET', headers=None, query_auth=True, force_http=False, response_headers=None): <NEW_LINE> <INDENT> return self.bucket.connection.generate_url(expires_in, method, self.bucket.name, self.name, headers, query_auth, force_http, response_headers)
Generate a URL to access this key. :type expires_in: int :param expires_in: How long the url is valid for, in seconds :type method: string :param method: The method to use for retrieving the file (default is GET) :type headers: dict :param headers: Any headers to pass along in the request :type query_auth: bool :param query_auth: :rtype: string :return: The URL to access the key
625941bf07d97122c41787c2
def generate_feynman_rule(self, fields, momentum, *args): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> assert len(fields)==2 <NEW_LINE> <DEDENT> except AssertionError as e: <NEW_LINE> <INDENT> logger.error("Error when generating the feynman rule for Propagator {}".format(self)) <NEW_LINE> logger.error("Input field list length is not 2. See below") <NEW_LINE> logger.error(fields) <NEW_LINE> logger.error(e) <NEW_LINE> raise <NEW_LINE> <DEDENT> from_field = fields[0] <NEW_LINE> to_field = fields[1] <NEW_LINE> feynman_rule = self.feynman_rule(from_field,to_field,momentum,*args) <NEW_LINE> return feynman_rule
Create a string corresponding to the Feynman rule for the qgraf_parser Propagator Parameters ---------- fields : fields: list of qgraf_parser.diagram_elements.DiagramField list of fields connected to this propagator as [from_field,to_field] in terms of particle flow (i.e. opposite to Dirac algebra index order). momentum : str args : Returns ------- str the expression of the feynman rule for this interaction with a specific choice of fields and momentum.
625941bf15fb5d323cde0a48
def influence(session, parent_id, child_id): <NEW_LINE> <INDENT> inf = Influence(parent=parent_id, child=child_id) <NEW_LINE> session.add(inf)
Creates an Influence row, linking the parent genre to the child genre by genre database id
625941bf07f4c71912b113bc
def get_word_to_display(word, letters_to_show,): <NEW_LINE> <INDENT> output_characters = [] <NEW_LINE> for letter in word.lower(): <NEW_LINE> <INDENT> if letter in letters_to_show: <NEW_LINE> <INDENT> output_characters.append(letter.upper()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output_characters.append("_") <NEW_LINE> <DEDENT> <DEDENT> return " ".join(output_characters)
Given a word and guesses show the user "what the word looks like with correct guesses filled in and incorrect guesses left blank as "_".
625941bf5f7d997b871749d1
def test_base(self): <NEW_LINE> <INDENT> self.render_config_template( path=os.path.abspath(self.working_dir) + "/log/*" ) <NEW_LINE> stocksbeat_proc = self.start_beat() <NEW_LINE> self.wait_until(lambda: self.log_contains("stocksbeat is running")) <NEW_LINE> exit_code = stocksbeat_proc.kill_and_wait() <NEW_LINE> assert exit_code == 0
Basic test with exiting stocksbeat normally
625941bf73bcbd0ca4b2bfb2
def matplotlib2svgwrite(fig, svg, insert, size=None, method="firm", image_format=None, **kwargs): <NEW_LINE> <INDENT> if image_format is None: <NEW_LINE> <INDENT> image_format = dict(loose="png", firm="svg")[method] <NEW_LINE> <DEDENT> if method == "firm" and image_format != "svg": <NEW_LINE> <INDENT> raise ValueError("Only SVG images can be embedded with the 'firm' method") <NEW_LINE> <DEDENT> if method == "firm" and svg._parameter.profile != "full": <NEW_LINE> <INDENT> raise ValueError("'firm' method requires a full svg profile") <NEW_LINE> <DEDENT> image_bin = StringIO() <NEW_LINE> fig.savefig(image_bin, format=image_format, **kwargs) <NEW_LINE> image_bin.seek(0) <NEW_LINE> if method == "loose": <NEW_LINE> <INDENT> image_str = "data:image/{};base64,".format(image_format) + base64.b64encode(image_bin.read()) <NEW_LINE> svg.add(svg.image( image_str, insert=insert, size=size, )) <NEW_LINE> <DEDENT> elif method == "firm": <NEW_LINE> <INDENT> root = ElementTree.fromstring(image_bin.read()) <NEW_LINE> root.attrib["x"], root.attrib["y"] = map(str, insert) <NEW_LINE> if size is not None: <NEW_LINE> <INDENT> root.attrib["width"], root.attrib["height"] = map(str, size) <NEW_LINE> <DEDENT> esvg = svg.g() <NEW_LINE> esvg.get_xml = lambda: root <NEW_LINE> svg.add(esvg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Illegal 'method' value")
Saves a matplotlib image to an existing svgwrite object. Args: fig (matplotlib.figure.Figure): a figure to save; svg (svgwrite.Drawing): an svg drawing to save to; insert (tuple): a tuple of ints defining destination to insert a drawing; size (tuple): size of the inserted image; method (str): the embedding method: either 'loose' (the plot is rasterized) or 'firm' (the plot's svg is embedded via <svg> tag); Kwargs: The kwargs are passed to ``fig.savefig`` used to print the plot.
625941bfbf627c535bc1310a
def __init__(self, logger, mongos_options): <NEW_LINE> <INDENT> self.mongos_executable = utils.default_if_none(config.MONGOS_EXECUTABLE, config.DEFAULT_MONGOS_EXECUTABLE) <NEW_LINE> TestCase.__init__(self, logger, "mongos", self.mongos_executable) <NEW_LINE> self.options = mongos_options.copy()
Initializes the mongos test and saves the options.
625941bf283ffb24f3c55840
def psi(self, z): <NEW_LINE> <INDENT> z = np.asarray(z) <NEW_LINE> a = self.a; b = self.b; c = self.c <NEW_LINE> t1, t2, t3 = self._subset(z) <NEW_LINE> s = np.sign(z) <NEW_LINE> z = np.fabs(z) <NEW_LINE> v = s * (t1 * z + t2 * a*s + t3 * a*s * (c - z) / (c - b)) <NEW_LINE> return v
The psi function for Hampel's estimator The analytic derivative of rho Parameters ---------- z : array-like 1d array Returns ------- array psi(z) = z for |z| <= a psi(z) = a*sign(z) for a < |z| <= b psi(z) = a*sign(z)*(c - |z|)/(c-b) for b < |z| <= c psi(z) = 0 for |z| > c
625941bf460517430c3940c7
def search_library(): <NEW_LINE> <INDENT> kw = input('library keyword: ') <NEW_LINE> resp = request_get('https://api.cdnjs.com/libraries?search={0}'.format(kw)) <NEW_LINE> if resp.ok: <NEW_LINE> <INDENT> data = json.loads(resp.text) <NEW_LINE> if isinstance(data, dict) and 'results' in data: <NEW_LINE> <INDENT> results = data['results'] <NEW_LINE> results = sorted(list(map(lambda item: item['name'], results)), reverse=False) <NEW_LINE> completer = WordCompleter(results, ignore_case=True, match_middle=True) <NEW_LINE> selected = prompt('choose library: ', completer=completer, complete_while_typing=True) <NEW_LINE> if selected in results: <NEW_LINE> <INDENT> print('your choice is:', end=' ') <NEW_LINE> print_formatted_text(ANSI('\x1b[91m{0}'.format(selected))) <NEW_LINE> return selected <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('canceled')
search library with keyword inputed
625941bfd58c6744b4257b9c
def test_dict_of_applications(self): <NEW_LINE> <INDENT> config = dict( version=1, applications={ 'mysql-hybridcluster': dict( image='flocker/mysql:v1.0.0', volume={'mountpoint': b'/var/mysql/data'} ), 'site-hybridcluster': { 'image': 'flocker/wordpress:v1.0.0', 'ports': [dict(internal=80, external=8080)], 'environment': { 'MYSQL_PORT_3306_TCP': 'tcp://172.16.255.250:3306', 'WP_ADMIN_USERNAME': 'administrator', }, } } ) <NEW_LINE> parser = FlockerConfiguration(config) <NEW_LINE> applications = parser.applications() <NEW_LINE> expected_applications = { 'mysql-hybridcluster': Application( name='mysql-hybridcluster', image=DockerImage(repository='flocker/mysql', tag='v1.0.0'), ports=frozenset(), links=frozenset(), volume=AttachedVolume( name='mysql-hybridcluster', mountpoint=FilePath(b'/var/mysql/data'))), 'site-hybridcluster': Application( name='site-hybridcluster', image=DockerImage(repository='flocker/wordpress', tag='v1.0.0'), ports=frozenset([Port(internal_port=80, external_port=8080)]), links=frozenset(), environment=frozenset({ 'MYSQL_PORT_3306_TCP': 'tcp://172.16.255.250:3306', 'WP_ADMIN_USERNAME': 'administrator' }.items()) ), } <NEW_LINE> self.assertEqual(expected_applications, applications)
``Configuration.applications`` returns a ``dict`` of ``Application`` instances, one for each application key in the supplied configuration.
625941bfa17c0f6771cbdf8f
@deprecate_with_version('read_img_data deprecated. ' 'Please use ``img.dataobj.get_unscaled()`` instead.', '3.2', '5.0') <NEW_LINE> def read_img_data(img, prefer='scaled'): <NEW_LINE> <INDENT> if prefer not in ('scaled', 'unscaled'): <NEW_LINE> <INDENT> raise ValueError(f'Invalid string "{prefer}" for "prefer"') <NEW_LINE> <DEDENT> hdr = img.header <NEW_LINE> if not hasattr(hdr, 'raw_data_from_fileobj'): <NEW_LINE> <INDENT> if prefer == 'unscaled': <NEW_LINE> <INDENT> raise ValueError("Can only do unscaled for Analyze types") <NEW_LINE> <DEDENT> return np.array(img.dataobj) <NEW_LINE> <DEDENT> img_fh = img.file_map['image'] <NEW_LINE> img_file_like = (img_fh.filename if img_fh.fileobj is None else img_fh.fileobj) <NEW_LINE> if img_file_like is None: <NEW_LINE> <INDENT> raise ImageFileError('No image file specified for this image') <NEW_LINE> <DEDENT> hdr = img.header <NEW_LINE> dao = img.dataobj <NEW_LINE> default_offset = hdr.get_data_offset() == 0 <NEW_LINE> default_scaling = hdr.get_slope_inter() == (None, None) <NEW_LINE> if is_proxy(dao) and (default_offset or default_scaling): <NEW_LINE> <INDENT> hdr = hdr.copy() <NEW_LINE> if default_offset and dao.offset != 0: <NEW_LINE> <INDENT> hdr.set_data_offset(dao.offset) <NEW_LINE> <DEDENT> if default_scaling and (dao.slope, dao.inter) != (1, 0): <NEW_LINE> <INDENT> hdr.set_slope_inter(dao.slope, dao.inter) <NEW_LINE> <DEDENT> <DEDENT> with ImageOpener(img_file_like) as fileobj: <NEW_LINE> <INDENT> if prefer == 'scaled': <NEW_LINE> <INDENT> return hdr.data_from_fileobj(fileobj) <NEW_LINE> <DEDENT> return hdr.raw_data_from_fileobj(fileobj)
Read data from image associated with files If you want unscaled data, please use ``img.dataobj.get_unscaled()`` instead. If you want scaled data, use ``img.get_fdata()`` (which will cache the loaded array) or ``np.array(img.dataobj)`` (which won't cache the array). If you want to load the data as for a modified header, save the image with the modified header, and reload. Parameters ---------- img : ``SpatialImage`` Image with valid image file in ``img.file_map``. Unlike the ``img.get_fdata()`` method, this function returns the data read from the image file, as specified by the *current* image header and *current* image files. prefer : str, optional Can be 'scaled' - in which case we return the data with the scaling suggested by the format, or 'unscaled', in which case we return, if we can, the raw data from the image file, without the scaling applied. Returns ------- arr : ndarray array as read from file, given parameters in header Notes ----- Summary: please use the ``get_data`` method of `img` instead of this function unless you are sure what you are doing. In general, you will probably prefer ``prefer='scaled'``, because this gives the data as the image format expects to return it. Use `prefer` == 'unscaled' with care; the modified Analyze-type formats such as SPM formats, and nifti1, specify that the image data array is given by the raw data on disk, multiplied by a scalefactor and maybe with the addition of a constant. This function, with ``unscaled`` returns the data on the disk, without these format-specific scalings applied. Please use this function only if you absolutely need the unscaled data, and the magnitude of the data, as given by the scalefactor, is not relevant to your application. The Analyze-type formats have a single scalefactor +/- offset per image on disk. If you do not care about the absolute values, and will be removing the mean from the data, then the unscaled values will have preserved intensity ratios compared to the mean-centered scaled data. However, this is not necessarily true of other formats with more complicated scaling - such as MINC.
625941bfbf627c535bc1310b
@vary_on_headers('X-Requested-With') <NEW_LINE> def extension_detail(request, addon): <NEW_LINE> <INDENT> comp_apps = addon.compatible_apps <NEW_LINE> if comp_apps and request.APP not in comp_apps: <NEW_LINE> <INDENT> prefixer = urlresolvers.get_url_prefix() <NEW_LINE> prefixer.app = comp_apps.keys()[0].short <NEW_LINE> return redirect('addons.detail', addon.slug, permanent=True) <NEW_LINE> <DEDENT> recommended = Addon.objects.listed(request.APP).filter( recommended_for__addon=addon)[:6] <NEW_LINE> collections = Collection.objects.listed().filter( addons=addon, application__id=request.APP.id) <NEW_LINE> ctx = { 'addon': addon, 'src': request.GET.get('src', 'dp-btn-primary'), 'version_src': request.GET.get('src', 'dp-btn-version'), 'tags': addon.tags.not_blacklisted(), 'grouped_ratings': GroupedRating.get(addon.id), 'recommendations': recommended, 'review_form': ReviewForm(), 'reviews': Review.objects.valid().filter(addon=addon, is_latest=True), 'get_replies': Review.get_replies, 'collections': collections.order_by('-subscribers')[:3], 'abuse_form': AbuseForm(request=request), } <NEW_LINE> if request.is_ajax(): <NEW_LINE> <INDENT> ctx['author_addons'] = addon.authors_other_addons(app=request.APP)[:6] <NEW_LINE> return render(request, 'addons/impala/details-more.html', ctx) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return render(request, 'addons/impala/details.html', ctx)
Extensions details page.
625941bf15baa723493c3eb0
def handle_list_streams(args): <NEW_LINE> <INDENT> bucket = args.bucket <NEW_LINE> def fun(conn): <NEW_LINE> <INDENT> return conn.list_streams(bucket) <NEW_LINE> <DEDENT> do_when_authenticated(args, fun)
get events
625941bfe64d504609d7477c
def set_restitution_coeff(self, coeff = 0.05): <NEW_LINE> <INDENT> assert(coeff < 0.1) <NEW_LINE> self.restitution = coeff
Sets the restitution coefficient of the net. For ball hitting the net during simulation.
625941bfb57a9660fec337bd
def get_branch_hash(self, branch_name): <NEW_LINE> <INDENT> return self._send({'name': 'getBranchHash', 'args': [branch_name]})
Retrieves the commit hash for the head of the branch. :param branch_name: Name of branch. :type branch_name: str :returns: The commit hash. :rtype: str :raises JSError: The result of the execution.
625941bfd164cc6175782c8a
def planwatch(self, hours=12): <NEW_LINE> <INDENT> post = {'mytime': str(hours)} <NEW_LINE> response = self._get_page('planwatch.php', post=post) <NEW_LINE> soup = bs4.BeautifulSoup(response.text, 'html5lib') <NEW_LINE> results = soup.find('ul', {'id': 'new_plan_list'}) <NEW_LINE> new_plans = results.findAll('div', {'class': 'newplan'}) <NEW_LINE> resultlist = [] <NEW_LINE> for div in new_plans: <NEW_LINE> <INDENT> user = div.find('a', {'class': 'planlove'}).contents[0] <NEW_LINE> time = div.find('span').contents[0] <NEW_LINE> time = parse_plans_date(time, tz_name=self.server_tz) <NEW_LINE> resultlist.append((user, time)) <NEW_LINE> <DEDENT> return resultlist
Return plans updated in the last ``hours`` hours. The result is a list of (username, timestamp) 2-tuples.
625941bf4c3428357757c266
def LNKS_bnds(theta=None, pathway=1, bnd_mode=0): <NEW_LINE> <INDENT> if bnd_mode == 0: <NEW_LINE> <INDENT> bnd_S = _sb.SC1DF_bnds() <NEW_LINE> bnd_LNK = LNK_bnds(pathway) <NEW_LINE> bnds = bnd_LNK + bnd_S <NEW_LINE> <DEDENT> elif bnd_mode == 1: <NEW_LINE> <INDENT> bnd_LNK = LNK_bnds(pathway) <NEW_LINE> if pathway == 1: <NEW_LINE> <INDENT> bnd_S = tuple([(theta[i],theta[i]) for i in range(17,theta.size)]) <NEW_LINE> <DEDENT> elif pathway == 2: <NEW_LINE> <INDENT> bnd_S = tuple([(theta[i],theta[i]) for i in range(36,theta.size)]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('The pathway parameter should be 1 or 2') <NEW_LINE> <DEDENT> bnds = bnd_LNK + bnd_S <NEW_LINE> <DEDENT> elif bnd_mode == 2: <NEW_LINE> <INDENT> if pathway == 1: <NEW_LINE> <INDENT> bnd_LNK = tuple([(theta[i],theta[i]) for i in range(17)]) <NEW_LINE> <DEDENT> elif pathway == 2: <NEW_LINE> <INDENT> bnd_LNK = tuple([(theta[i],theta[i]) for i in range(36)]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('The pathway parameter should be 1 or 2') <NEW_LINE> <DEDENT> bnd_S = _sb.SC1DF_bnds() <NEW_LINE> bnds = bnd_LNK + bnd_S <NEW_LINE> <DEDENT> return bnds
LNKS parameter bounds for optimization Input ----- theta (ndarray): initial LNKS parameters (theta) bnd_mode (int): Different modes of LNKS model parameter boundary for optimization 0: fit LNKS model 1: fit LNK (S fixed) 2: fit S (LNK fixed) this is used with fitmodel.py (3x-optim/LNKS/program/)
625941bf046cf37aa974cc86
def get_token(self): <NEW_LINE> <INDENT> return create_access_token(self._token)
Return the the serialized JWT token.
625941bfac7a0e7691ed400d
def collect_values( privacy=True, types=True, licenses=True, is_about=True, formats=True, keywords=True, ): <NEW_LINE> <INDENT> privacy_values = set() <NEW_LINE> licenses_values = set() <NEW_LINE> types_datatype_values = set() <NEW_LINE> is_about_values = set() <NEW_LINE> distributions_formats = set() <NEW_LINE> keywords_values = set() <NEW_LINE> dats_files_count = 0 <NEW_LINE> for path, _, files in os.walk(PROJECTS_DIR): <NEW_LINE> <INDENT> if "DATS.json" in files: <NEW_LINE> <INDENT> dats_files_count += 1 <NEW_LINE> dats_file = os.path.join(path, "DATS.json") <NEW_LINE> with open(dats_file, encoding="utf-8") as json_file: <NEW_LINE> <INDENT> dats_data = json.load(json_file) <NEW_LINE> if privacy and "privacy" in dats_data: <NEW_LINE> <INDENT> privacy_values.add(dats_data["privacy"]) <NEW_LINE> <DEDENT> if types: <NEW_LINE> <INDENT> for typ in dats_data["types"]: <NEW_LINE> <INDENT> datatype_schemas = [ "information", "method", "platform", "instrument", ] <NEW_LINE> types_datatype_values.update( {typ[t]["value"] for t in datatype_schemas if t in typ}, ) <NEW_LINE> <DEDENT> <DEDENT> if licenses: <NEW_LINE> <INDENT> licenses_values.update( {licence["name"] for licence in dats_data["licenses"]}, ) <NEW_LINE> <DEDENT> if is_about and "isAbout" in dats_data: <NEW_LINE> <INDENT> for each_is_about in dats_data["isAbout"]: <NEW_LINE> <INDENT> if "name" in each_is_about: <NEW_LINE> <INDENT> is_about_values.add(each_is_about["name"]) <NEW_LINE> <DEDENT> elif "value" in each_is_about: <NEW_LINE> <INDENT> is_about_values.add(each_is_about["value"]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if formats: <NEW_LINE> <INDENT> for dist in dats_data["distributions"]: <NEW_LINE> <INDENT> if "formats" in dist: <NEW_LINE> <INDENT> distributions_formats.update({f for f in dist["formats"]}) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if keywords: <NEW_LINE> <INDENT> keywords_values.update({k["value"] for k in dats_data["keywords"]}) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> report = {} <NEW_LINE> for key, value in zip( ["privacy", "licenses", "types", "is_about", "formats", "keywords"], [ privacy_values, licenses_values, types_datatype_values, is_about_values, distributions_formats, keywords_values, ], ): <NEW_LINE> <INDENT> if value: <NEW_LINE> <INDENT> report[key] = { "count": len(value), "values": list(value), } <NEW_LINE> <DEDENT> <DEDENT> return report, dats_files_count
Iterates over the projects directory content retrieving DATS file for each project. Aggregates all values and their count for selected properties in the report object. :param : set to False in order to exclude the property from the final report :return: dict object report, int how many DATS files were processed
625941bf26068e7796caec17
def checkGlobalSelection(self, elements): <NEW_LINE> <INDENT> from ..gui import selection <NEW_LINE> globalSelection = selection.getGlobalSelection() <NEW_LINE> if globalSelection and globalSelection.level is self and any(elem in globalSelection.elements() for elem in elements): <NEW_LINE> <INDENT> selection.setGlobalSelection(None)
Erases global selection when elements of it have been removed.
625941bf63f4b57ef000105c
def click_css(page, css, source_index=0, require_notification=True): <NEW_LINE> <INDENT> def _is_visible(element): <NEW_LINE> <INDENT> return element.is_displayed() and all(size > 0 for size in element.size.itervalues()) <NEW_LINE> <DEDENT> disable_animations(page) <NEW_LINE> page.q(css=css).filter(_is_visible).nth(source_index).click() <NEW_LINE> if require_notification: <NEW_LINE> <INDENT> sync_on_notification(page) <NEW_LINE> <DEDENT> page.wait_for_ajax()
Click the button/link with the given css and index on the specified page (subclass of PageObject). Will only consider elements that are displayed and have a height and width greater than zero. If require_notification is False (default value is True), the method will return immediately. Otherwise, it will wait for the "mini-notification" to appear and disappear.
625941bf82261d6c526ab3d8
def update_site_forward(apps, schema_editor): <NEW_LINE> <INDENT> Site = apps.get_model("sites", "Site") <NEW_LINE> Site.objects.update_or_create( id=settings.SITE_ID, defaults={ "domain": "example.com", "name": "kgraph" } )
Set site domain and name.
625941bf711fe17d825422ad
def main(argv): <NEW_LINE> <INDENT> if len(argv) > 1: <NEW_LINE> <INDENT> props_to_get = argv[1:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> props_to_get = DEFAULT_PROPS_TO_GET <NEW_LINE> <DEDENT> adb = FindADB() <NEW_LINE> if not adb: <NEW_LINE> <INDENT> raise Exception('Could not find ADB!') <NEW_LINE> <DEDENT> proc = subprocess.Popen([adb, 'devices'], stdout=subprocess.PIPE) <NEW_LINE> code = proc.wait() <NEW_LINE> if code != 0: <NEW_LINE> <INDENT> raise Exception('Failure in ADB: could not find attached devices.') <NEW_LINE> <DEDENT> header = ['Serial', 'Status'] <NEW_LINE> header.extend(props_to_get) <NEW_LINE> output_lines = [header] <NEW_LINE> for line in proc.stdout: <NEW_LINE> <INDENT> line = line.rstrip() <NEW_LINE> if line != 'List of devices attached' and line != '': <NEW_LINE> <INDENT> line_list = shlex.split(line) <NEW_LINE> serial = line_list[0] <NEW_LINE> status = line_list[1] <NEW_LINE> device_info = [serial, status] <NEW_LINE> device_info.extend(GetDeviceInfo(adb, serial, props_to_get)) <NEW_LINE> output_lines.append(device_info) <NEW_LINE> <DEDENT> <DEDENT> PrintPrettyTable(output_lines)
Print out information about connected Android devices. By default, print the serial number, status, device name, OS version, and build type of each device. If any arguments are supplied on the command line, print the serial number and status for each device along with values for those arguments interpreted as properties.
625941bf498bea3a759b99ec
def test_delete_multiple_objects(self): <NEW_LINE> <INDENT> self.bos.put_object_from_string(self.BUCKET, 'hello1', 'Hello World') <NEW_LINE> self.bos.put_object_from_string(self.BUCKET, 'hello2', u'hello world') <NEW_LINE> key_list = ['hello1', 'hello2'] <NEW_LINE> response = self.bos.delete_multiple_objects(self.BUCKET, key_list) <NEW_LINE> self.check_headers(response)
test delete_multiple_objects function normally
625941bf7c178a314d6ef397
def test_three_nodes_1(self): <NEW_LINE> <INDENT> node_1 = (0, 0) <NEW_LINE> node_2 = (0, 1) <NEW_LINE> node_3 = (1, 0) <NEW_LINE> graph = { node_1: {"neighbors": [node_2, node_3]}, node_2: {"neighbors": [node_1, node_3]}, node_3: {"neighbors": [node_1, node_2]} } <NEW_LINE> path, distance = dijkstra(graph, node_1, node_2) <NEW_LINE> self.assertEqual(path, [(0, 0), (0, 1)]) <NEW_LINE> self.assertEqual(distance, 1.0)
Tres nodos equidistantes.
625941bf63b5f9789fde7021
def hay_superficie(juego, x, y): <NEW_LINE> <INDENT> return juego[y][x][0]
Devuelve True si la celda (x, y) está ocupada por la superficie consolidada. La coordenada (0, 0) se refiere a la posición que está en la esquina superior izquierda de la grilla.
625941bfa934411ee37515cf
def fc2(self): <NEW_LINE> <INDENT> image = Input(shape=(4096,)) <NEW_LINE> layer = Dense(4096, activation='relu')(image) <NEW_LINE> self.set_weights_for(layer, 'vgg_16-fc7') <NEW_LINE> layer = Dense(1000, activation='softmax') <NEW_LINE> self.set_weights_for(layer, 'vgg_16-fc8') <NEW_LINE> model = Model(image, layer) <NEW_LINE> return model
Block 7 is the last two fully connected layer.
625941bf8c3a8732951582f4
def appel(self): <NEW_LINE> <INDENT> s = StatsCartes() <NEW_LINE> s.calcul(self.cartes) <NEW_LINE> points = s.info.pourcentPoints <NEW_LINE> objPoints = constantes.POINT_CONTRAT[s.info.nbOutdlers] <NEW_LINE> limit = (objPoints * 100) / constantes.POINT['total'] <NEW_LINE> if points < limit * self.algo['petite']: <NEW_LINE> <INDENT> return 'passe' <NEW_LINE> <DEDENT> if (points >= limit * self.algo['petite'] and points < limit * self.algo['garde']): <NEW_LINE> <INDENT> return 'petite' <NEW_LINE> <DEDENT> if (points >= limit * self.algo['garde'] and points < limit * self.algo['garde_sans']): <NEW_LINE> <INDENT> return 'garde' <NEW_LINE> <DEDENT> if (points >= limit * self.algo['garde_sans'] and points < limit * self.algo['garde_contre']): <NEW_LINE> <INDENT> return 'garde_sans' <NEW_LINE> <DEDENT> return 'garde_contre'
analyse du jeu et determine le type d'appel
625941bf435de62698dfdb88
def check_handtrd(self): <NEW_LINE> <INDENT> hastrd = False <NEW_LINE> handlst = self.read_trdlist(handtrd=True) <NEW_LINE> if handlst['in']: <NEW_LINE> <INDENT> self.update_holdlist(handlst['in'], 'T0') <NEW_LINE> Portfolio.add_pool(self._holdings['T0'], self._pofname) <NEW_LINE> self._noposition = False <NEW_LINE> hastrd = True <NEW_LINE> print('%s : trading by hand found : In' % self._pofname) <NEW_LINE> <DEDENT> if handlst['out']: <NEW_LINE> <INDENT> self.update_holdlist(handlst['out'], 'T1') <NEW_LINE> hastrd = True <NEW_LINE> print('%s :trading by hand found : Out' % self._pofname) <NEW_LINE> <DEDENT> if hastrd: <NEW_LINE> <INDENT> data_subscribe(gv.SUBSCRIBE_SOURCE) <NEW_LINE> time.sleep(Portfolio.CHARGE_TIME)
扫描手动交易单子
625941bf23e79379d52ee4a3
def clear_all_interfaces(): <NEW_LINE> <INDENT> Interface.objects.all().delete()
Top level function Main function for Clear all interfaces information in database Args: NONE
625941bfbde94217f3682d30
def generate_import_info(self, mtype=None, fpath=None, ftype=None, idx=None, series=None): <NEW_LINE> <INDENT> out = {'mtype': mtype, 'fpath': fpath, 'ftype': ftype, 'idx': idx, 'series': series} <NEW_LINE> file_info = dict() <NEW_LINE> with RockPy3.ignored(ValueError): <NEW_LINE> <INDENT> file_info = RockPy3.core.file_operations.get_info_from_fname(path=fpath) <NEW_LINE> <DEDENT> if not file_info: <NEW_LINE> <INDENT> self.log.warning( 'CANNOT readin fpath automatically.', extra='See RockPy naming conventions for proper naming scheme.') <NEW_LINE> fname = RockPy3.get_fname_from_info(samplegroup='SG', sample_name=self.name, mtype=mtype, ftype=ftype, series=series) <NEW_LINE> self.log.info('FILE NAME proposition:') <NEW_LINE> self.log.info('-'.join('' for i in range(50))) <NEW_LINE> self.log.info('%s' % fname) <NEW_LINE> self.log.info('-'.join('' for i in range(50))) <NEW_LINE> self.log.info('Please check for consistency and add samplegroup and mass, height, diameter') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for check in ['mtype', 'ftype', 'series']: <NEW_LINE> <INDENT> if check in file_info and locals()[check]: <NEW_LINE> <INDENT> if not out[check] == file_info[check]: <NEW_LINE> <INDENT> self.log.warning( '!!! INPUT != file_name: info does not match. Please check input, assuming filename correct') <NEW_LINE> self.log.warning('!!! {} != {}'.format(locals()[check], file_info[check])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> out.update(file_info) <NEW_LINE> out.pop('name', None) <NEW_LINE> return out
First generate the file info. It is read from the fname, if possible. After that the mtype, ftype, fpath and idx will be overwritten, assuming that you gave a proper filname.
625941bf29b78933be1e55ed
def tangent_plane_to_xyz(lat, lon, *args): <NEW_LINE> <INDENT> xyz = _get_vector(*args) <NEW_LINE> Ve = np.matrix(((-np.sin(lat) * np.cos(lon), -np.sin(lat)*np.sin(lon), np.cos(lat)), (-np.sin(lon), np.cos(lon), 0.), (-np.cos(lat)*np.cos(lon), -np.cos(lat)*np.sin(lon), -np.sin(lat)))) <NEW_LINE> v = xyz <NEW_LINE> temp = np.dot(Ve.T,v) <NEW_LINE> out = np.column_stack((lon, lat, temp[0], temp[1], temp[2])) <NEW_LINE> return out
***Use with caution, not properly tested >>> Converts tangent plane (east, north, down) to x,y, z rotations.tangent_plane_to_xyz(90., 0.0, (1.,1., 1.)) http://what-when-how.com/gps-with-high-rate-sensors/specific-vector-transformations-gps-part-1/
625941bfcdde0d52a9e52f6d
def verify_decode_jwt(token): <NEW_LINE> <INDENT> jsonurl = urlopen(f'https://{AUTH0_DOMAIN}/.well-known/jwks.json') <NEW_LINE> jwks = json.loads(jsonurl.read()) <NEW_LINE> unverified_header = jwt.get_unverified_header(token) <NEW_LINE> rsa_key = {} <NEW_LINE> if 'kid' not in unverified_header: <NEW_LINE> <INDENT> raise AuthError({ 'code': 'invalid_header', 'description': 'Authorization malformed.' }, 401) <NEW_LINE> <DEDENT> for key in jwks['keys']: <NEW_LINE> <INDENT> if key['kid'] == unverified_header['kid']: <NEW_LINE> <INDENT> rsa_key = { 'kty': key['kty'], 'kid': key['kid'], 'use': key['use'], 'n': key['n'], 'e': key['e'] } <NEW_LINE> <DEDENT> <DEDENT> if rsa_key: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> payload = jwt.decode( token, rsa_key, algorithms=ALGORITHMS, audience=API_AUDIENCE, issuer='https://' + AUTH0_DOMAIN + '/' ) <NEW_LINE> return payload <NEW_LINE> <DEDENT> except jwt.ExpiredSignatureError: <NEW_LINE> <INDENT> raise AuthError({ 'code': 'token_expired', 'description': 'Token expired.' }, 401) <NEW_LINE> <DEDENT> except jwt.JWTClaimsError: <NEW_LINE> <INDENT> raise AuthError({ 'code': 'invalid_claims', 'description': 'Incorrect claims' }, 401) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise AuthError({ 'code': 'invalid_header', 'description': 'Unable to parse authentication token.' }, 400) <NEW_LINE> <DEDENT> <DEDENT> raise AuthError({ 'code': 'invalid_header', 'description': 'Unable to find the appropriate key.' }, 400)
verify_decode_jwt(token) method @INPUTS token: a json web token (string) it should be an Auth0 token with key id (kid) it should verify the token using Auth0 /.well-known/jwks.json it should decode the payload from the token it should validate the claims return the decoded payload
625941bf5e10d32532c5ee64
def test_no_landscape_server_unit(self): <NEW_LINE> <INDENT> del self.units[0] <NEW_LINE> script.get_units.return_value = self.units[:] <NEW_LINE> script.collect_inner_logs(self.juju) <NEW_LINE> script.get_units.assert_called_once_with(self.juju) <NEW_LINE> script.check_output.assert_not_called() <NEW_LINE> script.call.assert_not_called() <NEW_LINE> script.check_call.assert_not_called() <NEW_LINE> self.assert_clean()
collect_inner_logs() is a noop if the landscape unit isn't found.
625941bf24f1403a92600aa5
def test_not_audio_file(self): <NEW_LINE> <INDENT> test_file = "tests/unit/test_files/not_audio_file.txt" <NEW_LINE> result = _get_size_in_kb(test_file) <NEW_LINE> self.assertEqual(result, 62.884_765_625)
Test not audio file
625941bfc432627299f04b81
def import_constants_section(self, filename_suffix='con'): <NEW_LINE> <INDENT> with open('%s/%s.%s' % (self.model_path, self.model_name, filename_suffix)) as f: <NEW_LINE> <INDENT> for lnum, l in enumerate(f): <NEW_LINE> <INDENT> if re.match('^\s*(;|$)', l): continue <NEW_LINE> l = l.strip().partition(';')[0].strip() <NEW_LINE> t = re.split('\s+', l) <NEW_LINE> self.constants[t[0].lower()] = float(t[1])
Imports CONSTANTS section from a Forest model.
625941bfeab8aa0e5d26da94
@app.route('/ip') <NEW_LINE> @app.route('/IP') <NEW_LINE> def ip(*args, **kwargs): <NEW_LINE> <INDENT> flash(request.remote_addr) <NEW_LINE> if request.referrer and (request.referrer.lower().find('pegman.space') != -1): <NEW_LINE> <INDENT> return redirect(request.referrer) <NEW_LINE> <DEDENT> return redirect(url_for('home'))
Returns the client their IP address.
625941bf7d847024c06be1f6
def test_summary(self): <NEW_LINE> <INDENT> mock_lst = MagicMock(return_value=[]) <NEW_LINE> with patch.dict(puppet.__salt__, {'cmd.run': mock_lst}): <NEW_LINE> <INDENT> with patch('salt.utils.fopen', mock_open(read_data="resources: 1")): <NEW_LINE> <INDENT> self.assertDictEqual(puppet.summary(), {'resources': 1}) <NEW_LINE> <DEDENT> with patch('salt.utils.fopen', mock_open()) as m_open: <NEW_LINE> <INDENT> helper_open = m_open() <NEW_LINE> helper_open.write.assertRaises(CommandExecutionError, puppet.summary)
Test to show a summary of the last puppet agent run
625941bfd18da76e23532410
def is_valid(self): <NEW_LINE> <INDENT> if not re.match(r'^[ox ]{9}$', self.state): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> counts = Counter(self.state) <NEW_LINE> if abs(counts['o'] - counts['x']) > 1: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> wins = self.get_wins() <NEW_LINE> if len(wins) == 2: <NEW_LINE> <INDENT> if not wins[0] == wins[1]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> if len(wins) > 2: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True
Validates the state of the game. Assumption: it is always 'o's turn.
625941bfd486a94d0b98e082
def order_deck(self): <NEW_LINE> <INDENT> self.deck.sort(key=operator.attrgetter("value", "suit")) <NEW_LINE> logging.debug("_Deck sorted_\n{}".format( [ card.name for card in self.deck ] )) <NEW_LINE> return self.deck
Orders the cards in the deck by value and rank :return: deck object
625941bf1d351010ab855a59
def get_unique_field_value(candidate, object_manager, field_name): <NEW_LINE> <INDENT> pat = re.compile(r'(.*\()(\d+)(\))$') <NEW_LINE> def generate_new_candidate(candidate): <NEW_LINE> <INDENT> match = re.match(pat, candidate) <NEW_LINE> if match: <NEW_LINE> <INDENT> num = int(match.groups()[1]) + 1 <NEW_LINE> new_candidate = match.groups()[0] + str(num) + match.groups()[2] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_candidate = "{0} (1)".format(candidate) <NEW_LINE> <DEDENT> return new_candidate <NEW_LINE> <DEDENT> while object_manager.filter(**{field_name: candidate}): <NEW_LINE> <INDENT> candidate = generate_new_candidate(candidate) <NEW_LINE> <DEDENT> return candidate
Return unique version of field_name candidate, altering it if necessary by adding (or incrementing) a suffixed integer in brackets. For example, if the matching candidate 'About' already exists, return 'About (1)'.
625941bf50812a4eaa59c261
def check_cmaq_units(df, param="O3", aqs_param="OZONE"): <NEW_LINE> <INDENT> aunit = df[df.variable == aqs_param].Units.unique()[0] <NEW_LINE> if aunit == "UG/M3": <NEW_LINE> <INDENT> fac = 1.0 <NEW_LINE> <DEDENT> elif aunit == "PPB": <NEW_LINE> <INDENT> fac = 1000.0 <NEW_LINE> <DEDENT> elif aunit == "ppbC": <NEW_LINE> <INDENT> fac = 1000.0 <NEW_LINE> if aqs_param == "ISOPRENE": <NEW_LINE> <INDENT> fac *= 5.0 <NEW_LINE> <DEDENT> elif aqs_param == "BENZENE": <NEW_LINE> <INDENT> fac *= 6.0 <NEW_LINE> <DEDENT> elif aqs_param == "TOLUENE": <NEW_LINE> <INDENT> fac *= 7.0 <NEW_LINE> <DEDENT> elif aqs_param == "O-XYLENE": <NEW_LINE> <INDENT> fac *= 8.0 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> fac = 1.0 <NEW_LINE> <DEDENT> return fac
Short summary. Parameters ---------- df : type Description of parameter `df`. param : type Description of parameter `param` (the default is 'O3'). aqs_param : type Description of parameter `aqs_param` (the default is 'OZONE'). Returns ------- type Description of returned object.
625941bf3346ee7daa2b2ca7
def __str__(self) -> str: <NEW_LINE> <INDENT> return " ".join([str(t) for t in self._rhs]) if self._rhs else "0"
Return a representation of this production
625941bf96565a6dacc8f609
def get_script_hash_from_wif(wif): <NEW_LINE> <INDENT> pk = KeyPair.PrivateKeyFromWIF(wif) <NEW_LINE> keypair = KeyPair(pk) <NEW_LINE> logger.debug("Public Address is {}".format(keypair.GetAddress())) <NEW_LINE> return get_script_hash_from_address(keypair.GetAddress())
Fetch the script hash of the public key from a wif represented in string format. Args: wif (str) : wif from which we need to extract the public key script hash Returns: public key script hash in string format
625941bf3cc13d1c6d3c72b8
def files_exist(*args): <NEW_LINE> <INDENT> do_exit = False <NEW_LINE> for f in args: <NEW_LINE> <INDENT> if not os.path.isfile(f): <NEW_LINE> <INDENT> do_exit = True <NEW_LINE> print("No such file: '{}'".format(f)) <NEW_LINE> <DEDENT> <DEDENT> if do_exit: <NEW_LINE> <INDENT> sys.exit(1)
Check if files exists in the file system.
625941bf76d4e153a657ea6d
def ipv6(test): <NEW_LINE> <INDENT> return None
Check for valid IPv6 address and return if found. :param test: A string, string to test for IPv6 address format. :returns: NoneType, this test is not yet implemented.
625941bf67a9b606de4a7df9
def __getitem__(self, index): <NEW_LINE> <INDENT> return self.data[index]
Get item from dataset. :param index: index in the dataset :return: (audio, target) where target is index of the target class. :rtype: tuple[dict, int]
625941bfbe7bc26dc91cd542
def new_transaction(self, sender, recipient, amount, signature=None, msg=None): <NEW_LINE> <INDENT> transaction = { 'sender': sender, 'recipient': recipient, 'amount': amount, } <NEW_LINE> if msg and signature: <NEW_LINE> <INDENT> transaction['signature'] = signature <NEW_LINE> transaction['message'] = msg <NEW_LINE> <DEDENT> self.current_transactions.append(transaction) <NEW_LINE> return self.last_block['index'] + 1
Creates a new transaction to go into the next mined Block :param sender: Address of the Sender :param recipient: Address of the Recipient :param amount: Amount :return: The index of the Block that will hold this transaction
625941bf15fb5d323cde0a49
def main (): <NEW_LINE> <INDENT> global DISPLAYSURF , FPSCLOCK , BASICFONT , HELP_SURF , HELP_RECT , NEW_SURF , NEW_RECT , SHOTS_SURF , SHOTS_RECT , BIGFONT , COUNTER_SURF , COUNTER_RECT , HBUTTON_SURF , EXPLOSION_IMAGES <NEW_LINE> pygame.init ( ) <NEW_LINE> FPSCLOCK = pygame.time.Clock ( ) <NEW_LINE> DISPLAYSURF = pygame.display.set_mode ( (WINDOWWIDTH , WINDOWHEIGHT) ) <NEW_LINE> BASICFONT = pygame.font.Font ( 'freesansbold.ttf' , 20 ) <NEW_LINE> BIGFONT = pygame.font.Font ( 'freesansbold.ttf' , 50 ) <NEW_LINE> HELP_SURF = BASICFONT.render ( "HELP" , True , WHITE ) <NEW_LINE> HELP_RECT = HELP_SURF.get_rect ( ) <NEW_LINE> HELP_RECT.topleft = (WINDOWWIDTH - 180 , WINDOWHEIGHT - 350) <NEW_LINE> NEW_SURF = BASICFONT.render ( "NEW GAME" , True , WHITE ) <NEW_LINE> NEW_RECT = NEW_SURF.get_rect ( ) <NEW_LINE> NEW_RECT.topleft = (WINDOWWIDTH - 200 , WINDOWHEIGHT - 200) <NEW_LINE> SHOTS_SURF = BASICFONT.render ( "Shots: " , True , WHITE ) <NEW_LINE> SHOTS_RECT = SHOTS_SURF.get_rect ( ) <NEW_LINE> SHOTS_RECT.topleft = (WINDOWWIDTH - 750 , WINDOWHEIGHT - 570) <NEW_LINE> EXPLOSION_IMAGES = [ pygame.image.load ( "img/blowup1.png" ) , pygame.image.load ( "img/blowup2.png" ) , pygame.image.load ( "img/blowup3.png" ) , pygame.image.load ( "img/blowup4.png" ) , pygame.image.load ( "img/blowup5.png" ) , pygame.image.load ( "img/blowup6.png" ) ] <NEW_LINE> pygame.display.set_caption ( 'Battleship' ) <NEW_LINE> while True: <NEW_LINE> <INDENT> shots_taken = run_game ( ) <NEW_LINE> show_gameover_screen ( shots_taken )
The main function intializes the variables which will be used by the game.
625941bf0383005118ecf521
def augment_queryset(self, state, queryset): <NEW_LINE> <INDENT> return queryset
Augments a queryset with new queries. Subclasses can override this to extend the queryset to provide additional information, usually using queryset.extra(). This must return a queryset based on the original queryset. This should not restrict the query in any way, or the datagrid may not operate properly. It must only add additional data to the queryset.
625941bfcb5e8a47e48b79ea
def control_3_5_ensure_log_metric_cloudtrail_configuration_changes(cloudtrails): <NEW_LINE> <INDENT> result = False <NEW_LINE> failReason = "" <NEW_LINE> offenders = [] <NEW_LINE> control = "3.5" <NEW_LINE> description = "Ensure a log metric filter and alarm exist for IAM policy changes" <NEW_LINE> scored = True <NEW_LINE> failReason = "Ensure a log metric filter and alarm exist for IAM policy changes" <NEW_LINE> for m, n in cloudtrails.iteritems(): <NEW_LINE> <INDENT> for o in n: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if o['CloudWatchLogsLogGroupArn']: <NEW_LINE> <INDENT> group = re.search('log-group:(.+?):', o['CloudWatchLogsLogGroupArn']).group(1) <NEW_LINE> client = boto3.client('logs', region_name=m) <NEW_LINE> filters = client.describe_metric_filters( logGroupName=group ) <NEW_LINE> for p in filters['metricFilters']: <NEW_LINE> <INDENT> if "{ ($.eventName = CreateTrail) || ($.eventName = UpdateTrail) || ($.eventName = DeleteTrail) || ($.eventName = StartLogging) || ($.eventName = StopLogging) }": <NEW_LINE> <INDENT> cwclient = boto3.client('cloudwatch', region_name=m) <NEW_LINE> response = cwclient.describe_alarms_for_metric( MetricName=p['metricTransformations'][0]['metricName'], Namespace="CloudTrailMetrics" ) <NEW_LINE> snsClient = boto3.client('sns', region_name=m) <NEW_LINE> subscribers = snsClient.list_subscriptions_by_topic( TopicArn=response['MetricAlarms'][0]['AlarmActions'][0] ) <NEW_LINE> if not len(subscribers['Subscriptions']) == 0: <NEW_LINE> <INDENT> result = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return {'Result': result, 'failReason': failReason, 'Offenders': offenders, 'ScoredControl': scored, 'Description': description, 'ControlId': control}
Summary Returns: TYPE: Description
625941bf7d43ff24873a2bdb
def optimize_vertex_cover_greedy_single_pass(G, C): <NEW_LINE> <INDENT> C = list(C) <NEW_LINE> shuffle(C) <NEW_LINE> while True: <NEW_LINE> <INDENT> for u in C: <NEW_LINE> <INDENT> if all(v in C for v in G[u]): <NEW_LINE> <INDENT> C.remove(u) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return C
usuwanie w losowej kolejności wierzchołków z rozwiązania obliczonego przez dany algorytm, o ile nie powoduje to, że tracimy pokrycie wierzchołkowe
625941bf91f36d47f21ac42d
def card_ranks(hand): <NEW_LINE> <INDENT> card_ranks_dict = {'T':10, 'J':11, 'Q':12, 'K':13, 'A':14} <NEW_LINE> list_ranks = [card_ranks_dict[item[0]] if item[0] in card_ranks_dict.keys() else int(item[0]) for item in hand] <NEW_LINE> return sorted(list_ranks)
Возвращает список рангов (его числовой эквивалент), отсортированный от большего к меньшему
625941bfaad79263cf39097b
def tex_to_pdf(template, resources): <NEW_LINE> <INDENT> with tempfile.TemporaryDirectory() as tmpdirname: <NEW_LINE> <INDENT> pth_template = path.join(tmpdirname, 'template.tex') <NEW_LINE> pth_pdf = path.join(tmpdirname, 'template.pdf') <NEW_LINE> with open(pth_template, 'w') as fh: <NEW_LINE> <INDENT> fh.write(template) <NEW_LINE> <DEDENT> for res in resources: <NEW_LINE> <INDENT> res_path = path.join(tmpdirname, res.filename) <NEW_LINE> with open(res_path, 'wb') as fh: <NEW_LINE> <INDENT> fh.write(res.data) <NEW_LINE> <DEDENT> <DEDENT> pdflatex = sys.PdfLaTeX() <NEW_LINE> pdflatex.call(filename=pth_template) <NEW_LINE> with open(pth_pdf, 'rb') as fh: <NEW_LINE> <INDENT> pdf_bin = fh.read() <NEW_LINE> pdf_b64 = base64.b64encode(pdf_bin) <NEW_LINE> pdf = str(pdf_b64, 'ascii') <NEW_LINE> <DEDENT> <DEDENT> return pdf
Convert TeX Template to PDF. :param template: String buffer containing the TeX Template. :param resources: A list of Resources as defined in `sii.printing.TemplateElement`. :return: PDF string in base64 encoding. :type template: str :type resources: list of :class:sii.printing.TemplateElement.Resource
625941bfe64d504609d7477d
def exception(self, msg, *args, **kwargs): <NEW_LINE> <INDENT> msg, kwargs = self.process(msg, kwargs) <NEW_LINE> kwargs["exc_info"] = 1 <NEW_LINE> self.logger.error(msg, *args, **kwargs)
Delegate an exception call to the underlying logger, after adding contextual information from this adapter instance.
625941bf090684286d50ec20
def get_binary_data(rows, cols, scale = 50, add_noise = False, noise_var = 1.0): <NEW_LINE> <INDENT> Z, X, weights = get_dense_data(rows = rows, cols = cols, scale = scale, add_noise = add_noise, noise_var = noise_var) <NEW_LINE> Y = (Z >= 0) * 1 <NEW_LINE> return Y, X, weights
Generate a dense matrix X with the given number of rows and columns, and a vector of binary labels Y that depend linearly on the features. Args: rows (int): the number of observations in the data set cols (int): the number of features in the data set add_noise (bool): whether to include noise (random error) in the labels with mean 0 and variance noise_var (default: False) noise_var (float): the variance of the noise, if included (default = 1.0) Returns: Y (array): a 1D array of binary labels for the data X (array): a rows by cols ndarray of random i.i.d. data weights (array): the weights applied to the columns of X to generate Y
625941bf596a897236089a01
def filter_any_value(c, value, keys=None, inclusion=None, exclusion=None): <NEW_LINE> <INDENT> return filter_any_with(c, lambda v: v == value, keys=keys, inclusion=inclusion, exclusion=exclusion)
Returns the entries of the specified collection whose values are equal to the specified value for at least one specified key.
625941bfb5575c28eb68df3c
def memDim(S,dim,mCount): <NEW_LINE> <INDENT> for i in range(dim): <NEW_LINE> <INDENT> if 0 > S - np.sum(mCount[:i+2]): <NEW_LINE> <INDENT> Sdim = dim-i <NEW_LINE> break <NEW_LINE> <DEDENT> elif i == dim-1: <NEW_LINE> <INDENT> print('error in memDim') <NEW_LINE> Sdim = 0 <NEW_LINE> <DEDENT> <DEDENT> return Sdim
Function returns the dimensions of each member S
625941bf26068e7796caec18
def test_menu__person_list_menu__2(pl_menu): <NEW_LINE> <INDENT> assert pl_menu.item_selected(pl_menu.menu_item_URL)
The person list menu item is selected on the person list.
625941bffb3f5b602dac35ce
def test_uptodate_no_changes(self): <NEW_LINE> <INDENT> list_upgrades = MagicMock(return_value={}) <NEW_LINE> upgrade = MagicMock(return_value={}) <NEW_LINE> with patch.dict(pkg.__salt__, {'pkg.list_upgrades': list_upgrades, 'pkg.upgrade': upgrade}): <NEW_LINE> <INDENT> with patch.dict(pkg.__opts__, {'test': False}): <NEW_LINE> <INDENT> ret = pkg.uptodate('dummy', test=True) <NEW_LINE> self.assertTrue(ret['result']) <NEW_LINE> self.assertDictEqual(ret['changes'], {}) <NEW_LINE> <DEDENT> with patch.dict(pkg.__opts__, {'test': True}): <NEW_LINE> <INDENT> ret = pkg.uptodate('dummy', test=True) <NEW_LINE> self.assertTrue(ret['result']) <NEW_LINE> self.assertDictEqual(ret['changes'], {})
Test pkg.uptodate with no changes
625941bf99cbb53fe6792b25
def rasmify(text: str) -> str: <NEW_LINE> <INDENT> result = text.translate(RASMIFY_TRANSLATION_TABLE) <NEW_LINE> result = result.replace( '\N{ARABIC LETTER LAM}\N{ARABIC LETTER LAM}\N{ARABIC LETTER HEH}', '\N{ARABIC LETTER LAM}\N{ARABIC LETTER LAM}\N{ZERO WIDTH JOINER}\N{ARABIC LETTER HEH}') <NEW_LINE> return result.strip()
Reduces an arabic string to its rasm.
625941bf50485f2cf553ccd6
def write(version_file_path, version): <NEW_LINE> <INDENT> with open(version_file_path, 'wb') as f: <NEW_LINE> <INDENT> f.write(version)
Writes a version string to version_file. Args: version(str): The version as a string. version_file_path(str): File path to write version to.
625941bfcc40096d6159588f
def receive(self): <NEW_LINE> <INDENT> with socket.socket(type=socket.SOCK_DGRAM) as s: <NEW_LINE> <INDENT> s.bind(self.address) <NEW_LINE> while True: <NEW_LINE> <INDENT> data, client_address = s.recvfrom(1024) <NEW_LINE> kick_list = set() <NEW_LINE> now = datetime.datetime.now().timestamp() <NEW_LINE> if data == b'': <NEW_LINE> <INDENT> self.clients[client_address] = datetime.datetime.now().timestamp() <NEW_LINE> continue <NEW_LINE> <DEDENT> elif data == b'quit': <NEW_LINE> <INDENT> self.clients.pop(client_address) <NEW_LINE> continue <NEW_LINE> <DEDENT> print('{}:{} :\t{}'.format(client_address[0], client_address[1], data.decode())) <NEW_LINE> for c, t in self.clients.items(): <NEW_LINE> <INDENT> if now - t > self.interval: <NEW_LINE> <INDENT> kick_list.add(c) <NEW_LINE> continue <NEW_LINE> <DEDENT> elif c == client_address: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s.sendto(data, c) <NEW_LINE> <DEDENT> <DEDENT> for c in kick_list: <NEW_LINE> <INDENT> self.clients.pop(c)
Receive the message from Client, send it to all connected clients except sender, kick the client out who have not send heart beat package.
625941bf283ffb24f3c55841
def test_account_exists(self): <NEW_LINE> <INDENT> self.assertTrue(self.stoken.account_exists(TEST_ACC))
Test account_exists returns true for TEST_ACC
625941bf796e427e537b0501
def conv_backward(dZ, cache): <NEW_LINE> <INDENT> (A_prev, W, b, hparameters) = cache <NEW_LINE> (m, n_H_prev, n_W_prev, n_C_prev) = A_prev.shape <NEW_LINE> (f, f, n_C_prev, n_C) = W.shape <NEW_LINE> stride = hparameters['stride'] <NEW_LINE> pad = hparameters['pad'] <NEW_LINE> (m, n_H, n_W, n_C) = dZ.shape <NEW_LINE> dA_prev = np.zeros(A_prev.shape) <NEW_LINE> dW = np.zeros(W.shape) <NEW_LINE> db = np.zeros(b.shape) <NEW_LINE> A_prev_pad = zero_pad(A_prev, pad) <NEW_LINE> dA_prev_pad = zero_pad(dA_prev, pad) <NEW_LINE> for i in range(m): <NEW_LINE> <INDENT> a_prev_pad = A_prev_pad[i,:,:,:] <NEW_LINE> da_prev_pad = dA_prev_pad[i,:,:,:] <NEW_LINE> for h in range(n_H-f+1): <NEW_LINE> <INDENT> for w in range(n_W-f+1): <NEW_LINE> <INDENT> for c in range(n_C): <NEW_LINE> <INDENT> vert_start = h <NEW_LINE> vert_end = h+f <NEW_LINE> horiz_start = w <NEW_LINE> horiz_end = w+f <NEW_LINE> a_slice = a_prev_pad[vert_start:vert_end,horiz_start:horiz_end,:] <NEW_LINE> da_prev_pad[vert_start:vert_end, horiz_start:horiz_end, :] += W[:,:,:,c] * dZ[i, h, w, c] <NEW_LINE> dW[:,:,:,c] += a_slice * dZ[i, h, w, c] <NEW_LINE> db[:,:,:,c] += dZ[i, h, w, c] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> dA_prev[i, :, :, :] = da_prev_pad[pad:-pad, pad:-pad, :] <NEW_LINE> <DEDENT> assert(dA_prev.shape == (m, n_H_prev, n_W_prev, n_C_prev)) <NEW_LINE> return dA_prev, dW, db
Implement the backward propagation for a convolution function Arguments: dZ -- gradient of the cost with respect to the output of the conv layer (Z), numpy array of shape (m, n_H, n_W, n_C) cache -- cache of values needed for the conv_backward(), output of conv_forward() Returns: dA_prev -- gradient of the cost with respect to the input of the conv layer (A_prev), numpy array of shape (m, n_H_prev, n_W_prev, n_C_prev) dW -- gradient of the cost with respect to the weights of the conv layer (W) numpy array of shape (f, f, n_C_prev, n_C) db -- gradient of the cost with respect to the biases of the conv layer (b) numpy array of shape (1, 1, 1, n_C)
625941bf0383005118ecf522
def get_options(self, ctx): <NEW_LINE> <INDENT> self.wool = WOOLS[ctx.opts.wool] or WOOLS.default <NEW_LINE> self.wool.output_path = ctx.opts.output or "" <NEW_LINE> self.wool.parse_config(ctx.opts.config_file)
Extract option parameters from the context. :param ctx: the context
625941bf30dc7b76659018a7
def test_cbr_zoo(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.blackbox_method_int('cbr_zoo') <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return
Integration test for kabam.cbr_zoo
625941bffbf16365ca6f60fc
def refine_abs(expr, assumptions): <NEW_LINE> <INDENT> arg = expr.args[0] <NEW_LINE> if ask(arg, Q.real, assumptions) and fuzzy_not(ask(arg, Q.negative, assumptions)): <NEW_LINE> <INDENT> return arg <NEW_LINE> <DEDENT> if ask(arg, Q.negative, assumptions): <NEW_LINE> <INDENT> return -arg
Handler for the absolute value. Examples:: >>> from sympy import Symbol, Assume, Q, refine >>> from sympy.assumptions.refine import refine_abs >>> from sympy.abc import x >>> refine_abs(abs(x), Assume(x, Q.real)) >>> refine_abs(abs(x), Assume(x, Q.positive)) x >>> refine_abs(abs(x), Assume(x, Q.negative)) -x
625941bf0fa83653e4656efa
def forgot_password(): <NEW_LINE> <INDENT> if request.json: <NEW_LINE> <INDENT> form = ForgotPasswordForm(MultiDict(request.json)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> form = ForgotPasswordForm() <NEW_LINE> <DEDENT> if form.validate_on_submit(): <NEW_LINE> <INDENT> send_reset_password_instructions(form.user) <NEW_LINE> if request.json: <NEW_LINE> <INDENT> return _render_json(form, False) <NEW_LINE> <DEDENT> do_flash(*get_message('PASSWORD_RESET_REQUEST', email=form.user.email)) <NEW_LINE> <DEDENT> return render_template('security/forgot_password.html', forgot_password_form=form, **_ctx('forgot_password'))
View function that handles a forgotten password request.
625941bf3d592f4c4ed1cfb2
def get_glgcm_features(mat): <NEW_LINE> <INDENT> sum_mat = mat.sum() <NEW_LINE> small_grads_dominance = big_grads_dominance = gray_asymmetry = grads_asymmetry = energy = gray_mean = grads_mean = 0 <NEW_LINE> gray_variance = grads_variance = corelation = gray_entropy = grads_entropy = entropy = inertia = differ_moment = 0 <NEW_LINE> sum_of_squares = 0 <NEW_LINE> for i in range(mat.shape[0]): <NEW_LINE> <INDENT> gray_variance_temp = 0 <NEW_LINE> for j in range(mat.shape[1]): <NEW_LINE> <INDENT> small_grads_dominance += mat[i][j] / ((j + 1) ** 2) <NEW_LINE> big_grads_dominance += mat[i][j] * j ** 2 <NEW_LINE> energy += mat[i][j] ** 2 <NEW_LINE> if mat[i].sum() != 0: <NEW_LINE> <INDENT> gray_entropy -= mat[i][j] * np.log(mat[i].sum()) <NEW_LINE> <DEDENT> if mat[:, j].sum() != 0: <NEW_LINE> <INDENT> grads_entropy -= mat[i][j] * np.log(mat[:, j].sum()) <NEW_LINE> <DEDENT> if mat[i][j] != 0: <NEW_LINE> <INDENT> entropy -= mat[i][j] * np.log(mat[i][j]) <NEW_LINE> inertia += (i - j) ** 2 * np.log(mat[i][j]) <NEW_LINE> <DEDENT> differ_moment += mat[i][j] / (1 + (i - j) ** 2) <NEW_LINE> gray_variance_temp += mat[i][j] ** 0.5 <NEW_LINE> <DEDENT> gray_asymmetry += mat[i].sum() ** 2 <NEW_LINE> gray_mean += i * mat[i].sum() ** 2 <NEW_LINE> gray_variance += (i - gray_mean) ** 2 * gray_variance_temp <NEW_LINE> <DEDENT> for j in range(mat.shape[1]): <NEW_LINE> <INDENT> grads_variance_temp = 0 <NEW_LINE> for i in range(mat.shape[0]): <NEW_LINE> <INDENT> grads_variance_temp += mat[i][j] ** 0.5 <NEW_LINE> <DEDENT> grads_asymmetry += mat[:, j].sum() ** 2 <NEW_LINE> grads_mean += j * mat[:, j].sum() ** 2 <NEW_LINE> grads_variance += (j - grads_mean) ** 2 * grads_variance_temp <NEW_LINE> <DEDENT> small_grads_dominance /= sum_mat <NEW_LINE> big_grads_dominance /= sum_mat <NEW_LINE> gray_asymmetry /= sum_mat <NEW_LINE> grads_asymmetry /= sum_mat <NEW_LINE> gray_variance = gray_variance ** 0.5 <NEW_LINE> grads_variance = grads_variance ** 0.5 <NEW_LINE> for i in range(mat.shape[0]): <NEW_LINE> <INDENT> for j in range(mat.shape[1]): <NEW_LINE> <INDENT> corelation += (i - gray_mean) * (j - grads_mean) * mat[i][j] <NEW_LINE> <DEDENT> <DEDENT> glgcm_features = [small_grads_dominance, big_grads_dominance, gray_asymmetry, grads_asymmetry, energy, gray_mean, grads_mean, gray_variance, grads_variance, corelation, gray_entropy, grads_entropy, entropy, inertia, differ_moment] <NEW_LINE> return np.round(glgcm_features, 4)
We base on Gray Level-Gradient Co-occurrence Matrix to calculate texture features,which includes small gradients dominance, big gradients dominance, gray level asymmetry, gradients asymmetry, energy, gray level mean, gradients mean, gray level variance, gradients variance, correlation, gray level entropy, gradients entropy, mixed entropy, inertia and inverse difference moment
625941bf21a7993f00bc7c29
def remove_place(self, place): <NEW_LINE> <INDENT> place = place.strip().lower() <NEW_LINE> for i in reversed(range(len(self._places))): <NEW_LINE> <INDENT> if self._places[i].lower() == place: <NEW_LINE> <INDENT> del self._places[i]
Remove `place` from the list of places.
625941bf656771135c3eb7aa
def example_response(self, status_code=None, content_type=None): <NEW_LINE> <INDENT> status_code = status_code or sorted(self._responses.keys())[0] <NEW_LINE> content_type = content_type or self.get_mimetype() <NEW_LINE> examples_path = [str(status_code), 'content', content_type, 'examples'] <NEW_LINE> example_path = [str(status_code), 'content', content_type, 'example'] <NEW_LINE> schema_example_path = [ str(status_code), 'content', content_type, 'schema', 'example' ] <NEW_LINE> schema_path = [str(status_code), 'content', content_type, 'schema'] <NEW_LINE> try: <NEW_LINE> <INDENT> status_code = int(status_code) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> status_code = 200 <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return ( list(deep_get(self._responses, examples_path).values())[0]['value'], status_code ) <NEW_LINE> <DEDENT> except (KeyError, IndexError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return (deep_get(self._responses, example_path), status_code) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return (deep_get(self._responses, schema_example_path), status_code) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return (self._nested_example(deep_get(self._responses, schema_path)), status_code) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return (None, status_code)
Returns example response from spec
625941bf9b70327d1c4e0d12
def permission_add_role(name): <NEW_LINE> <INDENT> if name: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> role = Role() <NEW_LINE> role.name = name <NEW_LINE> db.session.add(role) <NEW_LINE> db.session.commit() <NEW_LINE> return {"status": True, "msg": "add role success"} <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> db.session.rollback() <NEW_LINE> return {"status": False, "msg": "add role fail"} <NEW_LINE> <DEDENT> <DEDENT> return {"status": False, "msg": "need role name"}
add role :param name: :return:
625941bf60cbc95b062c6480
def get_reduce_action(self, machine, top=True): <NEW_LINE> <INDENT> if machine.action_history == []: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> action = machine.action_history[-1] <NEW_LINE> fetch = arc_regex.match(action) <NEW_LINE> if fetch is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if top: <NEW_LINE> <INDENT> node_id = machine.node_stack[-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> index = int(fetch.groups()[0]) <NEW_LINE> if self.absolute_stack_pos: <NEW_LINE> <INDENT> node_id = index <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> index = len(machine.node_stack) - index - 2 <NEW_LINE> node_id = machine.node_stack[index] <NEW_LINE> <DEDENT> <DEDENT> gold_node_id = self.node_reverse_map[node_id] <NEW_LINE> return self.pend_edges_by_node[gold_node_id] == []
If last action is an arc, check if any involved node (top or not top) has no pending edges
625941bfcdde0d52a9e52f6e
def test_to_dataframe(): <NEW_LINE> <INDENT> results = table.to_dataframe() <NEW_LINE> assert(type(results) == DataFrame)
Tests that results are returned as a pandas.DataFrame
625941bfd268445f265b4dac
def up(self): <NEW_LINE> <INDENT> with self.schema.create('testcases') as table: <NEW_LINE> <INDENT> table.increments('id') <NEW_LINE> table.primary('id') <NEW_LINE> table.increments('experiment_id') <NEW_LINE> table.foreign('experiment_id') .references('id').on('experiments') .on_delete('cascade') .on_update('cascade')
Run the migrations.
625941bf2eb69b55b151c7ea
def getsize_multiline( self, text, direction=None, spacing=4, features=None, language=None ): <NEW_LINE> <INDENT> max_width = 0 <NEW_LINE> lines = self._multiline_split(text) <NEW_LINE> line_spacing = self.getsize("A")[1] + spacing <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> line_width, line_height = self.getsize(line, direction, features, language) <NEW_LINE> max_width = max(max_width, line_width) <NEW_LINE> <DEDENT> return max_width, len(lines) * line_spacing - spacing
Returns width and height (in pixels) of given text if rendered in font with provided direction, features, and language, while respecting newline characters. :param text: Text to measure. :param direction: Direction of the text. It can be 'rtl' (right to left), 'ltr' (left to right) or 'ttb' (top to bottom). Requires libraqm. :param spacing: The vertical gap between lines, defaulting to 4 pixels. :param features: A list of OpenType font features to be used during text layout. This is usually used to turn on optional font features that are not enabled by default, for example 'dlig' or 'ss01', but can be also used to turn off default font features for example '-liga' to disable ligatures or '-kern' to disable kerning. To get all supported features, see https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist Requires libraqm. :param language: Language of the text. Different languages may use different glyph shapes or ligatures. This parameter tells the font which language the text is in, and to apply the correct substitutions as appropriate, if available. It should be a `BCP 47 language code <https://www.w3.org/International/articles/language-tags/>` Requires libraqm. .. versionadded:: 6.0.0 :return: (width, height)
625941bf090684286d50ec21
def funplot(func, xlims = None, n_points = 100, keep_ylims = False, ax=None, **plot_args): <NEW_LINE> <INDENT> if ax is None: <NEW_LINE> <INDENT> ax = plt.gca() <NEW_LINE> <DEDENT> if xlims is None: <NEW_LINE> <INDENT> xlims = ax.get_xbound() <NEW_LINE> <DEDENT> xs, xe = xlims <NEW_LINE> x = np.logspace(np.log10(xs), np.log10(xe), n_points) if ax.get_xscale() else np.linspace(xs, xe, n_points) <NEW_LINE> if keep_ylims: <NEW_LINE> <INDENT> ylims = ax.get_ybound() <NEW_LINE> <DEDENT> h=ax.plot(x, func(x), **plot_args) <NEW_LINE> if keep_ylims: <NEW_LINE> <INDENT> ax.set_ybound(*ylims) <NEW_LINE> <DEDENT> ax.set_xbound(*xlims) <NEW_LINE> return h
Plot a function :param func: :param xlims: :param n_points: :return:
625941bf507cdc57c6306c13
def register_api(view, endpoint, url, id='id', id_type='int'): <NEW_LINE> <INDENT> view_func = view.as_view(endpoint) <NEW_LINE> app.add_url_rule(url, defaults={id: None}, view_func=view_func, methods=['GET']) <NEW_LINE> app.add_url_rule(url, view_func=view_func, methods=['POST']) <NEW_LINE> app.add_url_rule('%s<%s:%s>' % (url, id_type, id), view_func=view_func, methods=['GET', 'PUT', 'DELETE'])
Covenience function for building APIs.
625941bf73bcbd0ca4b2bfb5
def unsubscribe(self, stock_code, data_type): <NEW_LINE> <INDENT> query_processor = self._get_sync_query_processor(SubscriptionQuery.pack_unsubscribe_req, SubscriptionQuery.unpack_unsubscribe_rsp) <NEW_LINE> kargs = {'stock_str': stock_code, 'data_type': data_type} <NEW_LINE> ret_code, msg, _ = query_processor(**kargs) <NEW_LINE> if ret_code != RET_OK: <NEW_LINE> <INDENT> return RET_ERROR, msg <NEW_LINE> <DEDENT> return RET_OK, None
unsubcribe a sort of data for a stock :param stock_code: string stock_code . For instance, "HK.00700", "US.AAPL" :param data_type: string data type. For instance, "K_1M", "K_MON" :return: (ret_code, ret_data). ret_code: RET_OK or RET_ERROR.
625941bf71ff763f4b5495c5
def FunctionDef_enter(self, node, parent): <NEW_LINE> <INDENT> assert type(parent) is ast.Module, 'nested functions not implemented' <NEW_LINE> node.locals = {} <NEW_LINE> self.locals = node.locals
FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns)
625941bf60cbc95b062c6481
@app.route("/", methods=["GET", "POST"]) <NEW_LINE> def index(): <NEW_LINE> <INDENT> form = PasswordForm(request.form) <NEW_LINE> if form.validate_on_submit(): <NEW_LINE> <INDENT> password = form.password.data <NEW_LINE> hashed_pass = generate_password_hash(password) <NEW_LINE> return redirect(url_for("check", hashed_pass=hashed_pass)) <NEW_LINE> <DEDENT> return render_template("input.html", form=form)
パスワード入力
625941bfa8ecb033257d300c
def testMcaCtime(self): <NEW_LINE> <INDENT> mca0_preset_time = self.sfh5["/1.1/instrument/mca_0/preset_time"] <NEW_LINE> mca1_preset_time = self.sfh5["/1.1/instrument/mca_1/preset_time"] <NEW_LINE> self.assertLess(mca0_preset_time - 123.4, 10**-5) <NEW_LINE> self.assertLess(mca1_preset_time - 10, 10**-5) <NEW_LINE> mca0_live_time = self.sfh5["/1.1/instrument/mca_0/live_time"] <NEW_LINE> mca1_live_time = self.sfh5["/1.1/instrument/mca_1/live_time"] <NEW_LINE> self.assertLess(mca0_live_time - 234.5, 10**-5) <NEW_LINE> self.assertLess(mca1_live_time - 11, 10**-5) <NEW_LINE> mca0_elapsed_time = self.sfh5["/1.1/instrument/mca_0/elapsed_time"] <NEW_LINE> mca1_elapsed_time = self.sfh5["/1.1/instrument/mca_1/elapsed_time"] <NEW_LINE> self.assertLess(mca0_elapsed_time - 345.6, 10**-5) <NEW_LINE> self.assertLess(mca1_elapsed_time - 12, 10**-5)
Tests for #@CTIME mca header
625941bfdc8b845886cb5472
def revert_name(str): <NEW_LINE> <INDENT> if str.startswith('custom_amount_') or str.startswith('custom_units_'): <NEW_LINE> <INDENT> str = str.replace('custom_', '') <NEW_LINE> if str.startswith('amount_'): <NEW_LINE> <INDENT> str = str.replace('amount_', '') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> str = str.replace('units_', '') <NEW_LINE> <DEDENT> <DEDENT> return str
Remove the text 'custom_amount_' or 'custom_units_' from the start of a string
625941bf0a50d4780f666dce
def rainbow_text(x, y, strings, colors, orientation='horizontal', ax=None, **kwargs): <NEW_LINE> <INDENT> if ax is None: <NEW_LINE> <INDENT> ax = plt.gca() <NEW_LINE> <DEDENT> t = ax.transData <NEW_LINE> canvas = ax.figure.canvas <NEW_LINE> assert orientation in ['horizontal', 'vertical'] <NEW_LINE> if orientation == 'vertical': <NEW_LINE> <INDENT> kwargs.update(rotation=90, verticalalignment='bottom') <NEW_LINE> <DEDENT> for s, c in zip(strings, colors): <NEW_LINE> <INDENT> text = ax.text(x, y, s + " ", color=c, transform=t, **kwargs) <NEW_LINE> text.draw(canvas.get_renderer()) <NEW_LINE> ex = text.get_window_extent() <NEW_LINE> if orientation == 'horizontal': <NEW_LINE> <INDENT> t = transforms.offset_copy( text.get_transform(), x=ex.width, units='dots') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> t = transforms.offset_copy( text.get_transform(), y=ex.height, units='dots')
Take a list of *strings* and *colors* and place them next to each other, with text strings[i] being shown in colors[i]. Parameters ---------- x, y : float Text position in data coordinates. strings : list of str The strings to draw. colors : list of color The colors to use. orientation : {'horizontal', 'vertical'} ax : Axes, optional The Axes to draw into. If None, the current axes will be used. **kwargs All other keyword arguments are passed to plt.text(), so you can set the font size, family, etc.
625941bf63b5f9789fde7023
def double_check_ctg(tg, ctg): <NEW_LINE> <INDENT> for task in tg.nodes(): <NEW_LINE> <INDENT> cluster = tg.node[task]['task'].cluster <NEW_LINE> if cluster in ctg.nodes(): <NEW_LINE> <INDENT> if task not in ctg.node[cluster]['TaskList']: <NEW_LINE> <INDENT> print("DOUBLE CHECKING CTG with TG: \t\033[31mFAILED\033[0m") <NEW_LINE> print("TASK", task, "DOES NOT EXIST IN CLUSTER:", cluster) <NEW_LINE> Clustering_Reports.report_ctg(ctg, "CTG_DoubleCheckError.png") <NEW_LINE> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print("DOUBLE CHECKING CTG with TG: \t\033[31mFAILED\033[0m") <NEW_LINE> print("CLUSTER", cluster, " DOESNT EXIST...!!!") <NEW_LINE> Clustering_Reports.report_ctg(ctg, "CTG_DoubleCheckError.png") <NEW_LINE> raise ValueError("DOUBLE CHECKING CTG with TG FAILED") <NEW_LINE> <DEDENT> <DEDENT> return True
Checks if the clusters info in TG matches with the information in the CTG. :param tg: Task Graph :param ctg: Clustered Task Graph :return: True if CTG information is the same as TG, False if otherwise
625941bff7d966606f6a9f40
def retrieve_show(self, show_id: int) -> Show: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> resp = self._request(f'tv/{show_id}') <NEW_LINE> <DEDENT> except requests.HTTPError as e: <NEW_LINE> <INDENT> if e.response.status_code == 404: <NEW_LINE> <INDENT> raise Http404(f'Show {show_id} does not exist') from e <NEW_LINE> <DEDENT> raise <NEW_LINE> <DEDENT> data: dict = resp.json() <NEW_LINE> parser = self._get_show_parser() <NEW_LINE> return parser.for_detail(data)
Retrieve details of a show. :param show_id: id of the show in the API. :return: a Show object
625941bf4e4d5625662d4319
def publish_config_updated(self, type, name, host): <NEW_LINE> <INDENT> self.log.debug("Publish configuration update notification for {0}-{1}.{2}".format(type, name, host)) <NEW_LINE> self._pub.send_event('plugin.configuration', {"type" : type, "name" : name, "host" : host, "event" : "updated"})
Publish over the MQ a message to inform that a plugin configuration has been updated @param type : package type (plugin) @param name : package name @param host : host
625941bf711fe17d825422ae
def curr_dt(loc=None): <NEW_LINE> <INDENT> if loc == None: curr = dt.datetime.now() <NEW_LINE> elif loc in all_timezones: curr = dt.datetime.now(loc) <NEW_LINE> elif loc.lower() == 'utc': curr = dt.datetime.now(timezone('UTC')) <NEW_LINE> elif loc.lower() == 'eastern': curr = dt.datetime.now(timezone('US/Eastern')) <NEW_LINE> elif loc.lower() == 'central': curr = dt.datetime.now(timezone('US/Central')) <NEW_LINE> elif loc.lower() == 'pacific': curr = dt.datetime.now(timezone('US/Pacific')) <NEW_LINE> else: <NEW_LINE> <INDENT> logger.error('Unsupported location: %s', loc) <NEW_LINE> return dt.datetime(1900,1,1,tzinfo=timezone('UTC')) <NEW_LINE> <DEDENT> return curr
Returns the current date&time as a datetime object. The calling API is similar to curr_time and is timezone aware via loc. See the curr_time() function for description of loc. HOWEVER, the return type is a dt.datetime tuple format w/timezone spec
625941bfb545ff76a8913d54
def goto_adv(self): <NEW_LINE> <INDENT> with allure.step("点击横幅广告"): <NEW_LINE> <INDENT> self.steps("../page/main.yaml") <NEW_LINE> <DEDENT> self.tsleep(2) <NEW_LINE> return self
点击横幅广告 :return:
625941bf15baa723493c3eb2
def __init__(self, algoEngine, parent=None): <NEW_LINE> <INDENT> super(BlWidget, self).__init__(algoEngine, parent) <NEW_LINE> self.templateName = BlAlgo.templateName
Constructor
625941bffb3f5b602dac35cf
def fromSecureString(self, str): <NEW_LINE> <INDENT> kparams = KalturaParams() <NEW_LINE> kparams.addStringIfDefined("str", str) <NEW_LINE> self.client.queueServiceActionCall("kalturainternaltools_kalturainternaltoolssystemhelper", "fromSecureString", KalturaInternalToolsSession, kparams) <NEW_LINE> if self.client.isMultiRequest(): <NEW_LINE> <INDENT> return self.client.getMultiRequestResult() <NEW_LINE> <DEDENT> resultNode = self.client.doQueue() <NEW_LINE> return KalturaObjectFactory.create(resultNode, KalturaInternalToolsSession)
KS from Secure String
625941bf435de62698dfdb8a
def petals(size, color): <NEW_LINE> <INDENT> turtle.color(color) <NEW_LINE> turtle.pencolor("black") <NEW_LINE> turtle.begin_fill() <NEW_LINE> for x in range(6): <NEW_LINE> <INDENT> turtle.forward(size) <NEW_LINE> turtle.right(60) <NEW_LINE> <DEDENT> turtle.end_fill() <NEW_LINE> for x in range(5): <NEW_LINE> <INDENT> turtle.color(color) <NEW_LINE> turtle.pencolor("black") <NEW_LINE> turtle.begin_fill() <NEW_LINE> turtle.right(120) <NEW_LINE> for y in range(5): <NEW_LINE> <INDENT> turtle.forward(size) <NEW_LINE> turtle.right(60) <NEW_LINE> <DEDENT> turtle.end_fill()
This function draws the outside hexagons :param size: :param color: :return:
625941bf76d4e153a657ea6e
def colon(r1, inc, r2): <NEW_LINE> <INDENT> s = np.sign(inc) <NEW_LINE> if s == 0: <NEW_LINE> <INDENT> return np.zeros(1) <NEW_LINE> <DEDENT> elif s == 1: <NEW_LINE> <INDENT> n = ((r2 - r1) + 2 * np.spacing(r2 - r1)) // inc <NEW_LINE> return np.linspace(r1, r1 + inc * n, n + 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> n = ((r1 - r2) + 2 * np.spacing(r1 - r2)) // np.abs(inc) <NEW_LINE> temp = np.linspace(r2, r2 + np.abs(inc) * n, n + 1) <NEW_LINE> return temp[::-1]
Matlab's colon operator, althought it doesn't although inc is required
625941bfac7a0e7691ed400f
def get_dt_fromtimestamp(timestamp, utc=True, multiplier=1): <NEW_LINE> <INDENT> if isinstance(timestamp, str): <NEW_LINE> <INDENT> timestamp = float(timestamp) <NEW_LINE> <DEDENT> timestamp = timestamp * multiplier <NEW_LINE> if utc: <NEW_LINE> <INDENT> dt = datetime.utcfromtimestamp(timestamp) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dt = datetime.fromtimestamp(timestamp) <NEW_LINE> <DEDENT> return dt
根据timestamp获得对应的datetime对象
625941bf7c178a314d6ef39a
def setup(i): <NEW_LINE> <INDENT> s='' <NEW_LINE> cus=i.get('customize',{}) <NEW_LINE> full_path=cus.get('full_path','') <NEW_LINE> hosd=i['host_os_dict'] <NEW_LINE> env=i['env'] <NEW_LINE> ep=cus['env_prefix'] <NEW_LINE> pi=os.path.dirname(full_path) <NEW_LINE> while True: <NEW_LINE> <INDENT> if os.path.isdir(os.path.join(pi,'lib')): <NEW_LINE> <INDENT> found=True <NEW_LINE> break <NEW_LINE> <DEDENT> pix=os.path.dirname(pi) <NEW_LINE> if pix==pi: <NEW_LINE> <INDENT> found=False <NEW_LINE> break <NEW_LINE> <DEDENT> pi=pix <NEW_LINE> <DEDENT> if not found: <NEW_LINE> <INDENT> return {'return':1, 'error':'can\'t find root dir of this installation'} <NEW_LINE> <DEDENT> cus['path_lib'] = os.path.join(pi,'lib') <NEW_LINE> cus['path_include'] = os.path.join(pi,'include') <NEW_LINE> r = ck.access({'action': 'lib_path_export_script', 'module_uoa': 'os', 'host_os_dict': hosd, 'lib_path': cus.get('path_lib', '')}) <NEW_LINE> if r['return']>0: return r <NEW_LINE> s += r['script'] <NEW_LINE> static_lib_name = 'libgtest.a' <NEW_LINE> cus['static_lib'] = static_lib_name <NEW_LINE> env[ep+'_STATIC_NAME'] = static_lib_name <NEW_LINE> env[ep] = pi <NEW_LINE> return {'return':0, 'bat':s}
Input: { cfg - meta of this soft entry self_cfg - meta of module soft ck_kernel - import CK kernel module (to reuse functions) host_os_uoa - host OS UOA host_os_uid - host OS UID host_os_dict - host OS meta target_os_uoa - target OS UOA target_os_uid - target OS UID target_os_dict - target OS meta target_device_id - target device ID (if via ADB) tags - list of tags used to search this entry env - updated environment vars from meta customize - updated customize vars from meta deps - resolved dependencies for this soft interactive - if 'yes', can ask questions, otherwise quiet } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 bat - prepared string for bat file }
625941bfd10714528d5ffc1f
def re_install_net_ctrl_paths(self, vrf_table): <NEW_LINE> <INDENT> assert vrf_table <NEW_LINE> for dest in vrf_table.itervalues(): <NEW_LINE> <INDENT> for path in dest.known_path_list: <NEW_LINE> <INDENT> if path.source is None: <NEW_LINE> <INDENT> vrf_table.insert_vrf_path( path.nlri, path.nexthop, gen_lbl=True ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> LOG.debug('Re-installed NC paths with current policy for table %s.', vrf_table)
Re-installs paths from NC with current BGP policy. Iterates over known paths from NC installed in `vrf4_table` and adds new path with path attributes as per current VRF configuration.
625941bf32920d7e50b2810c
def main(): <NEW_LINE> <INDENT> os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'asame.settings') <NEW_LINE> try: <NEW_LINE> <INDENT> from django.core.management import execute_from_command_line <NEW_LINE> <DEDENT> except ImportError as exc: <NEW_LINE> <INDENT> raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc <NEW_LINE> <DEDENT> execute_from_command_line(sys.argv)
Run administrative tasks.
625941bfb545ff76a8913d55
def GetSignature(self): <NEW_LINE> <INDENT> src_digest = ''.join([hashlib.sha256(open(f, 'rb').read()).hexdigest() for f in self.all_files]) <NEW_LINE> dep_digest = ''.join([Brewery.Signature(d) for d in self.deps]) <NEW_LINE> command_digest = str(self.command_groups) <NEW_LINE> return hashlib.sha256( (src_digest + dep_digest + command_digest).encode('utf-8')).hexdigest()
Generate the signature of the build object, and see if we need to rebuild it.
625941bfbe383301e01b53ca