code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def synchronize_metadata(self): <NEW_LINE> <INDENT> self._set_meta('id', self.id) <NEW_LINE> self._set_meta('source_template_id', self.template.id) <NEW_LINE> if self.appliance_pool is not None: <NEW_LINE> <INDENT> self._set_meta('pool_id', self.appliance_pool.id) <NEW_LINE> self._set_meta('pool_total_count', self.appliance_pool.total_count) <NEW_LINE> self._set_meta('pool_group', self.appliance_pool.group.id) <NEW_LINE> if self.appliance_pool.provider is not None: <NEW_LINE> <INDENT> self._set_meta('pool_provider', self.appliance_pool.provider.id) <NEW_LINE> <DEDENT> self._set_meta('pool_version', self.appliance_pool.version) <NEW_LINE> self._set_meta( 'pool_appliance_date', apply_if_not_none(self.appliance_pool.date, "isoformat")) <NEW_LINE> self._set_meta('pool_owner_id', self.appliance_pool.owner.id) <NEW_LINE> self._set_meta('pool_owner_username', self.appliance_pool.owner.username) <NEW_LINE> self._set_meta('pool_preconfigured', self.appliance_pool.preconfigured) <NEW_LINE> self._set_meta('pool_description', self.appliance_pool.description) <NEW_LINE> self._set_meta('pool_not_needed_anymore', self.appliance_pool.not_needed_anymore) <NEW_LINE> self._set_meta('pool_finished', self.appliance_pool.finished) <NEW_LINE> self._set_meta('pool_yum_update', self.appliance_pool.yum_update) <NEW_LINE> <DEDENT> self._set_meta('datetime_leased', apply_if_not_none(self.datetime_leased, "isoformat")) <NEW_LINE> self._set_meta('leased_until', apply_if_not_none(self.leased_until, "isoformat")) <NEW_LINE> self._set_meta('status_changed', apply_if_not_none(self.status_changed, "isoformat")) <NEW_LINE> self._set_meta('ready', self.ready) <NEW_LINE> self._set_meta('description', self.description) <NEW_LINE> self._set_meta('lun_disk_connected', self.lun_disk_connected) <NEW_LINE> self._set_meta('swap', self.swap) <NEW_LINE> self._set_meta('ssh_failed', self.ssh_failed)
If possible, uploads some metadata to the provider VM object to be able to recover.
625941c0099cdd3c635f0bc0
def _pull_runs(self): <NEW_LINE> <INDENT> data = "min_time=%s" % datetime.datetime.now().isoformat() <NEW_LINE> data = data.encode('utf-8') <NEW_LINE> res = request.urlopen(db_receiver_url, data) <NEW_LINE> return json.loads(res.read().decode('utf-8'))
This is a helper method that pulls data from the server-side database. @return string A string representing the URL request response, which will contain data concerning users' patterns and the datetime at which they are to run.
625941c057b8e32f524833fd
def __init__(self): <NEW_LINE> <INDENT> self.Text = "MainMenu control" <NEW_LINE> self.label = Label() <NEW_LINE> self.label.Text = "Click a sub-menu of Main menu" <NEW_LINE> self.label.Dock = DockStyle.Bottom <NEW_LINE> self.mainmenu = MainMenu() <NEW_LINE> self.menu_file = MenuItem() <NEW_LINE> self.menu_file.Text = "&File" <NEW_LINE> self.menu_edit = MenuItem() <NEW_LINE> self.menu_edit.Text = "&Edit" <NEW_LINE> self.menu_help = MenuItem() <NEW_LINE> self.menu_help.Text = "&Help" <NEW_LINE> self.menu_file_new = MenuItem() <NEW_LINE> self.menu_file_new.Text = "&New" <NEW_LINE> self.menu_file_new.Click += self.on_click <NEW_LINE> self.menu_file_new_doc = MenuItem() <NEW_LINE> self.menu_file_new_doc.Text = "&Document" <NEW_LINE> self.menu_file_new_doc.Click += self.on_click <NEW_LINE> self.menu_file_open = MenuItem() <NEW_LINE> self.menu_file_open.Text = "&Open" <NEW_LINE> self.menu_file_open.Click += self.on_click <NEW_LINE> self.menu_file_exit = MenuItem() <NEW_LINE> self.menu_file_exit.Text = "E&xit" <NEW_LINE> self.menu_file_exit.Click += self.on_click <NEW_LINE> self.menu_edit_undo = MenuItem() <NEW_LINE> self.menu_edit_undo.Text = "&Undo" <NEW_LINE> self.menu_edit_undo.Click += self.on_click <NEW_LINE> self.menu_edit_redo = MenuItem() <NEW_LINE> self.menu_edit_redo.Text = "&Redo" <NEW_LINE> self.menu_edit_redo.Click += self.on_click <NEW_LINE> self.menu_help_about = MenuItem() <NEW_LINE> self.menu_help_about.Text = "&About" <NEW_LINE> self.menu_help_about.Click += self.on_click <NEW_LINE> self.mainmenu.MenuItems.Add(self.menu_file) <NEW_LINE> self.mainmenu.MenuItems.Add(self.menu_edit) <NEW_LINE> self.mainmenu.MenuItems.Add(self.menu_help) <NEW_LINE> self.menu_file.MenuItems.Add(self.menu_file_new) <NEW_LINE> self.menu_file_new.MenuItems.Add(self.menu_file_new_doc) <NEW_LINE> self.menu_file.MenuItems.Add(self.menu_file_open) <NEW_LINE> self.menu_file.MenuItems.Add(self.menu_file_exit) <NEW_LINE> self.menu_edit.MenuItems.Add(self.menu_edit_undo) <NEW_LINE> self.menu_edit.MenuItems.Add(self.menu_edit_redo) <NEW_LINE> self.menu_help.MenuItems.Add(self.menu_help_about) <NEW_LINE> self.Menu = self.mainmenu <NEW_LINE> self.Controls.Add(self.label)
MainMenuSample class init function.
625941c0bf627c535bc13132
def replace_alt(text, alt_txtlist, new_txt): <NEW_LINE> <INDENT> for txt in alt_txtlist: <NEW_LINE> <INDENT> text = text.replace(txt, new_txt) <NEW_LINE> <DEDENT> return text
Method to do replace multiple alternative texts with one specific text.
625941c091af0d3eaac9b97a
def set_default_subparser(self, defname): <NEW_LINE> <INDENT> self._default_subparser = defname
Set default subparser (subcommand name).
625941c00a50d4780f666df4
def is_event_processor_valid(event_processor): <NEW_LINE> <INDENT> return _has_method(event_processor, 'process')
Given an event_processor, determine if it is valid or not i.e. provides a process method. Args: event_processor: Provides a process method to create user events and then send requests. Returns: Boolean depending upon whether event_processor is valid or not.
625941c0b545ff76a8913d7a
def get_cwts_stocked_mu(mu_poly, year=None, strain=None): <NEW_LINE> <INDENT> events = (Event.objects.filter(taggingevent__tag_type=6). filter(geom__within=mu_poly.geom)) <NEW_LINE> if year: <NEW_LINE> <INDENT> events = events.filter(year=year) <NEW_LINE> <DEDENT> if strain: <NEW_LINE> <INDENT> events = events.filter(lot__strain__strain_code='SI') <NEW_LINE> <DEDENT> cwt_numbers = [] <NEW_LINE> for event in events: <NEW_LINE> <INDENT> tmp = event.get_cwts() <NEW_LINE> if tmp: <NEW_LINE> <INDENT> cwt_numbers.extend([x.cwt for x in tmp]) <NEW_LINE> <DEDENT> <DEDENT> cwt_numbers=list(set(cwt_numbers)) <NEW_LINE> recoveries = CWT_recovery.objects.filter(cwt__in=cwt_numbers) <NEW_LINE> cwts = CWT.objects.filter(cwt__in=cwt_numbers) <NEW_LINE> foo = [x.cwt for x in recoveries] <NEW_LINE> recovery_counts = dict([(i, foo.count(i)) for i in set(foo)]) <NEW_LINE> for x in cwts: <NEW_LINE> <INDENT> x.recovery_count = recovery_counts.get(x.cwt, 0) <NEW_LINE> <DEDENT> return(dict( cwts=cwts, events = events, recoveries = recoveries))
A helper function to actually get the cwt stocking and recovery data for cwts stocked in a management unit. Given a management unit polygon, find all of the stocking events that have occured inside of it, and then get all of their associated recovery events. Returns a dictionary containing the following keys: cwts = set of unique cwt numbers recovered in this management unit recoveries = recovery events for this management unit (year and strain) events = Ontario stocking events associated with the cwts recovered
625941c055399d3f05588617
def solve_part_two(self): <NEW_LINE> <INDENT> count = 1 <NEW_LINE> a = self.molecule <NEW_LINE> while True: <NEW_LINE> <INDENT> for k, r in self.replacements: <NEW_LINE> <INDENT> lr = len(r) <NEW_LINE> index = a.rfind(r) <NEW_LINE> if index == -1: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> a = a[0:index] + k + a[index + lr:] <NEW_LINE> count += 1 <NEW_LINE> for e in self.electrons: <NEW_LINE> <INDENT> if a == e[1]: <NEW_LINE> <INDENT> return count <NEW_LINE> <DEDENT> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> return None
Solution for part two. Greedy solution works here (possibly this can be seen in the input grammar) but generally we would have to do a BFS to find out shortest number of steps. This is "impossible" here due to branching factor.
625941c09b70327d1c4e0d38
def globally_show_author(self): <NEW_LINE> <INDENT> if self.anonymous: <NEW_LINE> <INDENT> return getToolByName(self.context, 'portal_properties').site_properties.getProperty('allowAnonymousViewAbout', True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True
View author column
625941c0be8e80087fb20baa
def spec_features_keywd(gff_parts): <NEW_LINE> <INDENT> for t_id in ["transcript_id", "transcriptId", "proteinId"]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> gff_parts["info"]["Parent"] = gff_parts["info"][t_id] <NEW_LINE> break <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> for g_id in ["gene_id", "geneid", "geneId", "name", "gene_name", "genename"]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> gff_parts["info"]["GParent"] = gff_parts["info"][g_id] <NEW_LINE> break <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> for flat_name in ["Transcript", "CDS"]: <NEW_LINE> <INDENT> if gff_parts["info"].has_key(flat_name): <NEW_LINE> <INDENT> if gff_parts['type'] in [flat_name] or re.search(r'transcript', gff_parts['type'], re.IGNORECASE): <NEW_LINE> <INDENT> if not gff_parts['id']: <NEW_LINE> <INDENT> gff_parts['id'] = gff_parts['info'][flat_name][0] <NEW_LINE> <DEDENT> <DEDENT> elif gff_parts["type"] in ["intron", "exon", "three_prime_UTR", "coding_exon", "five_prime_UTR", "CDS", "stop_codon", "start_codon"]: <NEW_LINE> <INDENT> gff_parts["info"]["Parent"] = gff_parts["info"][flat_name] <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> return gff_parts
Specify the feature key word according to the GFF specifications @args gff_parts: attribute field key @type gff_parts: str
625941c06fb2d068a760efff
def _get_slaves_dict(hostname, port): <NEW_LINE> <INDENT> url = 'http://%s:%i/master/state.json' % (hostname, port) <NEW_LINE> response = urllib2.urlopen(url) <NEW_LINE> state_dict = json.load(response) <NEW_LINE> return state_dict['slaves']
Queries the Mesos master REST API to get information for the given slave :param hostname: The hostname of the master :type hostname: str :param port: The port of the master :type port: int :returns: A dictionary structure representing the slave information. :rtype: dict
625941c050485f2cf553ccfd
def old_stars_map_name_for_model(self, model_name): <NEW_LINE> <INDENT> index = self.index_for_model(model_name) <NEW_LINE> return self["Old stars map"][index]
THis function ... :param model_name: :return:
625941c030c21e258bdfa400
def pushBinaryNums(self): <NEW_LINE> <INDENT> self.chars.update(range(1))
rief 插入二进制数字
625941c076d4e153a657ea94
def backward(self, lstm_scores: torch.Tensor, word_seq_lens: torch.Tensor) -> torch.Tensor: <NEW_LINE> <INDENT> batch_size = lstm_scores.size(0) <NEW_LINE> seq_len = lstm_scores.size(1) <NEW_LINE> dev_num = lstm_scores.get_device() <NEW_LINE> curr_dev = torch.device(f"cuda:{dev_num}") if dev_num >= 0 else torch.device("cpu") <NEW_LINE> beta = torch.zeros(batch_size, seq_len, self.label_size, device=curr_dev) <NEW_LINE> rev_score = self.transition.transpose(0, 1).view(1, 1, self.label_size, self.label_size).expand(batch_size, seq_len, self.label_size, self.label_size) + lstm_scores.view(batch_size, seq_len, 1, self.label_size).expand(batch_size, seq_len, self.label_size, self.label_size) <NEW_LINE> perm_idx = torch.zeros(batch_size, seq_len, device=curr_dev) <NEW_LINE> for batch_idx in range(batch_size): <NEW_LINE> <INDENT> perm_idx[batch_idx][:word_seq_lens[batch_idx]] = torch.range(word_seq_lens[batch_idx] - 1, 0, -1) <NEW_LINE> <DEDENT> perm_idx = perm_idx.long() <NEW_LINE> for i, length in enumerate(word_seq_lens): <NEW_LINE> <INDENT> rev_score[i, :length] = rev_score[i, :length][perm_idx[i, :length]] <NEW_LINE> <DEDENT> beta[:, 0, :] = rev_score[:, 0, self.end_idx, :] <NEW_LINE> for word_idx in range(1, seq_len): <NEW_LINE> <INDENT> before_log_sum_exp = beta[:, word_idx - 1, :].view(batch_size, self.label_size, 1).expand(batch_size, self.label_size, self.label_size) + rev_score[:, word_idx, :, :] <NEW_LINE> beta[:, word_idx, :] = log_sum_exp_pytorch(before_log_sum_exp) <NEW_LINE> <DEDENT> last_beta = torch.gather(beta, 1, word_seq_lens.view(batch_size, 1, 1).expand(batch_size, 1, self.label_size) - 1).view(batch_size, self.label_size) <NEW_LINE> last_beta += self.transition.transpose(0, 1)[:, self.start_idx].view(1, self.label_size).expand(batch_size, self.label_size) <NEW_LINE> last_beta = log_sum_exp_pytorch(last_beta.view(batch_size, self.label_size, 1)).view(batch_size) <NEW_LINE> for i, length in enumerate(word_seq_lens): <NEW_LINE> <INDENT> beta[i, :length] = beta[i, :length][perm_idx[i, :length]] <NEW_LINE> <DEDENT> return torch.sum(last_beta)
Backward algorithm. A benchmark implementation which is ready to use. :param lstm_scores: shape: (batch_size, sent_len, label_size) NOTE: the score from LSTMs, not `all_scores` (which add up the transtiion) :param word_seq_lens: shape: (batch_size,) :return: Backward variable
625941c0d7e4931a7ee9de81
def __init__(self): <NEW_LINE> <INDENT> self.wit_name = ".wit" <NEW_LINE> self.img_name = "images" <NEW_LINE> self.staging_name = 'staging_area' <NEW_LINE> self.wit_parent = os.getcwd() <NEW_LINE> self.wit_path = Path(os.path.join(self.wit_parent, self.wit_name)) <NEW_LINE> self.img_path = Path(os.path.join(self.wit_path, self.img_name)) <NEW_LINE> self.staging = os.path.join(self.wit_path, self.staging_name) <NEW_LINE> if '.wit' not in listdir(self.wit_parent): <NEW_LINE> <INDENT> os.mkdir(self.wit_path) <NEW_LINE> os.mkdir(self.img_path) <NEW_LINE> os.mkdir(self.staging) <NEW_LINE> <DEDENT> os.chdir(self.wit_path) <NEW_LINE> with open("activated.txt", "w") as f: <NEW_LINE> <INDENT> f.write("master") <NEW_LINE> f.close()
making a directory named "wit" and directory named "images
625941c0d4950a0f3b08c2b5
def render_html(string, **kwargs): <NEW_LINE> <INDENT> string = Template(string).render(**kwargs) <NEW_LINE> display(HTML(string))
html_string = """ <svg width="400" height="200"></svg> """
625941c0a8ecb033257d3032
def size(self, name: str) -> int: <NEW_LINE> <INDENT> return self._ipfs_client.object_stat(name)['CumulativeSize']
Total size, in bytes, of IPFS content with multihash `name`.
625941c0cdde0d52a9e52f95
def get_excel_data(name: str): <NEW_LINE> <INDENT> excel2json.convert_from_file(name)
Convert Excel file to JSON files. The excel file is converted into separate JSON files based on the spreadsheet names. :param name: Type string, the name of the file including .xlsx
625941c0a17c0f6771cbdfb7
def _validate_response(operation, is_create, item_type="Permission"): <NEW_LINE> <INDENT> if not islambda(operation): <NEW_LINE> <INDENT> raise TypeError("invalid use of method") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> _resp = operation() <NEW_LINE> if _resp is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> except HTTPException as exc: <NEW_LINE> <INDENT> _resp = exc <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> if is_create: <NEW_LINE> <INDENT> if _resp.status_code in [200, 201]: <NEW_LINE> <INDENT> _log_permission("{} successfully created.".format(item_type), entry_index, level=logging.INFO, trail="") <NEW_LINE> <DEDENT> elif _resp.status_code == 409: <NEW_LINE> <INDENT> _log_permission("{} already exists.".format(item_type), entry_index, level=logging.INFO) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _log_permission("Unknown response [{}]".format(_resp.status_code), entry_index, permission=permission_config_entry, level=logging.ERROR) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if _resp.status_code == 200: <NEW_LINE> <INDENT> _log_permission("{} successfully removed.".format(item_type), entry_index, level=logging.INFO, trail="") <NEW_LINE> <DEDENT> elif _resp.status_code == 404: <NEW_LINE> <INDENT> _log_permission("{} already removed.".format(item_type), entry_index, level=logging.INFO) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _log_permission("Unknown response [{}]".format(_resp.status_code), entry_index, permission=permission_config_entry, level=logging.ERROR)
Validate action/operation applied and handles raised ``HTTPException`` as returned response.
625941c0377c676e9127210d
def infer(self, model_name, inputs, model_version="", outputs=None, request_id="", sequence_id=0, sequence_start=False, sequence_end=False, priority=0, timeout=None, headers=None): <NEW_LINE> <INDENT> if headers is not None: <NEW_LINE> <INDENT> metadata = headers.items() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> metadata = () <NEW_LINE> <DEDENT> if type(model_version) != str: <NEW_LINE> <INDENT> raise_error("model version must be a string") <NEW_LINE> <DEDENT> request = _get_inference_request(model_name=model_name, inputs=inputs, model_version=model_version, request_id=request_id, outputs=outputs, sequence_id=sequence_id, sequence_start=sequence_start, sequence_end=sequence_end, priority=priority, timeout=timeout) <NEW_LINE> if self._verbose: <NEW_LINE> <INDENT> print("infer, metadata {}\n{}".format(metadata, request)) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> response = self._client_stub.ModelInfer(request=request, metadata=metadata) <NEW_LINE> if self._verbose: <NEW_LINE> <INDENT> print(response) <NEW_LINE> <DEDENT> result = InferResult(response) <NEW_LINE> return result <NEW_LINE> <DEDENT> except grpc.RpcError as rpc_error: <NEW_LINE> <INDENT> raise_error_grpc(rpc_error)
Run synchronous inference using the supplied 'inputs' requesting the outputs specified by 'outputs'. Parameters ---------- model_name: str The name of the model to run inference. inputs : list A list of InferInput objects, each describing data for a input tensor required by the model. model_version : str The version of the model to run inference. The default value is an empty string which means then the server will choose a version based on the model and internal policy. outputs : list A list of InferRequestedOutput objects, each describing how the output data must be returned. If not specified all outputs produced by the model will be returned using default settings. request_id : str Optional identifier for the request. If specified will be returned in the response. Default value is an empty string which means no request_id will be used. sequence_id : int The unique identifier for the sequence being represented by the object. Default value is 0 which means that the request does not belong to a sequence. sequence_start : bool Indicates whether the request being added marks the start of the sequence. Default value is False. This argument is ignored if 'sequence_id' is 0. sequence_end : bool Indicates whether the request being added marks the end of the sequence. Default value is False. This argument is ignored if 'sequence_id' is 0. priority : int Indicates the priority of the request. Priority value zero indicates that the default priority level should be used (i.e. same behavior as not specifying the priority parameter). Lower value priorities indicate higher priority levels. Thus the highest priority level is indicated by setting the parameter to 1, the next highest is 2, etc. If not provided, the server will handle the request using default setting for the model. timeout : int The timeout value for the request, in microseconds. If the request cannot be completed within the time the server can take a model-specific action such as terminating the request. If not provided, the server will handle the request using default setting for the model. headers : dict Optional dictionary specifying additional HTTP headers to include in the request. Returns ------- InferResult The object holding the result of the inference. Raises ------ InferenceServerException If server fails to perform inference.
625941c07b25080760e393be
def check_schema(body, schema): <NEW_LINE> <INDENT> validator = jsonschema.Draft4Validator( schema, format_checker=jsonschema.FormatChecker()) <NEW_LINE> try: <NEW_LINE> <INDENT> validator.validate(body) <NEW_LINE> <DEDENT> except jsonschema.ValidationError as exc: <NEW_LINE> <INDENT> raise exception.InvalidParameterValue(_('Invalid create body: %s') % exc)
Ensure all necessary keys are present and correct in create body. Check that the user-specified create body is in the expected format and include the required information. :param body: create body :raises InvalidParameterValue: if validation of create body fails.
625941c0ad47b63b2c509ee5
def cost(self, child): <NEW_LINE> <INDENT> cost = 0 <NEW_LINE> while child != self.x_init: <NEW_LINE> <INDENT> cost += self.graph._node[child] <NEW_LINE> child = self.parent(child) <NEW_LINE> if child is None: <NEW_LINE> <INDENT> return float('inf') <NEW_LINE> <DEDENT> <DEDENT> return cost
Calculates the cost between nodes rootward until the root node is reached.
625941c0e5267d203edcdc04
def sanitized_games(games): <NEW_LINE> <INDENT> g_vec = [] <NEW_LINE> print("Cleaning games...") <NEW_LINE> for g in games: <NEW_LINE> <INDENT> if g.white.user_id is None or g.black.user_id is None: <NEW_LINE> <INDENT> print(' ', g) <NEW_LINE> <DEDENT> elif g.handicap > 1 and (-1 > g.komi > 1): <NEW_LINE> <INDENT> print(' ', g) <NEW_LINE> <DEDENT> elif g.handicap > 6: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif g.komi < 0 or g.komi > 8.6: <NEW_LINE> <INDENT> print(' ', g) <NEW_LINE> <DEDENT> elif not (g.result.startswith('W+') or g.result.startswith('B+')): <NEW_LINE> <INDENT> print(' ', g) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> g_vec.append( (g.white.user_id, g.black.user_id, 1.0 if g.result.startswith('W') else 0.0, g.date_played, g.handicap, g.komi)) <NEW_LINE> <DEDENT> <DEDENT> return g_vec
Sanitizes a list of games into result tuples for rating. A result tuple is the form (w, b, result, date, handicap, komi) Where 'result' is 1 if w won and 0 otherwise.
625941c066673b3332b91ff5
def test_stream_decode_values_types(self): <NEW_LINE> <INDENT> pdu = A_ASSOCIATE_AC() <NEW_LINE> pdu.Decode(a_associate_ac) <NEW_LINE> app_context = pdu.variable_items[0] <NEW_LINE> self.assertEqual(app_context.item_type, 0x10) <NEW_LINE> self.assertEqual(app_context.item_length, 21) <NEW_LINE> self.assertEqual(app_context.application_context_name, '1.2.840.10008.3.1.1.1') <NEW_LINE> self.assertTrue(isinstance(app_context.item_type, int)) <NEW_LINE> self.assertTrue(isinstance(app_context.item_length, int)) <NEW_LINE> self.assertTrue(isinstance(app_context.application_context_name, UID)) <NEW_LINE> self.assertEqual(app_context.application_context_name, '1.2.840.10008.3.1.1.1') <NEW_LINE> self.assertTrue(isinstance(app_context.application_context_name, UID))
Check decoding an assoc_ac produces the correct application context
625941c07cff6e4e811178ea
def access_account(where_from=False, user=None): <NEW_LINE> <INDENT> if not where_from: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> account = input('Please enter your username (required): ') <NEW_LINE> password = getpass(prompt='Master Password (required): ') <NEW_LINE> main_cur.execute(f"SELECT master_pwd FROM Users WHERE name='{account}'") <NEW_LINE> result = main_cur.fetchone() <NEW_LINE> if result is None: <NEW_LINE> <INDENT> print(f'The username {account} is not valid') <NEW_LINE> continue <NEW_LINE> <DEDENT> if bcrypt.checkpw(password.encode('utf-8'), result[0].encode('utf-8')): <NEW_LINE> <INDENT> actions(account) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Password is not correct!') <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> actions(user)
To access the account you need to provide username and master password. If coming from registration it will be included in the process, otherwise a direct access will need the entire process
625941c07d43ff24873a2c03
def _processFeature(self, obj, shape, sub, pNum): <NEW_LINE> <INDENT> p = None <NEW_LINE> dYdX = None <NEW_LINE> cat = sub[:4] <NEW_LINE> PathLog.debug("sub-feature is {}".format(cat)) <NEW_LINE> Ref = getattr(obj, "Reference" + str(pNum)) <NEW_LINE> if cat == "Face": <NEW_LINE> <INDENT> BE = self._getBottomEdge(shape) <NEW_LINE> if BE: <NEW_LINE> <INDENT> self.bottomEdges.append(BE) <NEW_LINE> <DEDENT> V0 = shape.Vertexes[0] <NEW_LINE> v1 = shape.CenterOfMass <NEW_LINE> temp = FreeCAD.Vector(v1.x - V0.X, v1.y - V0.Y, 0.0) <NEW_LINE> dYdX = self._normalizeVector(temp) <NEW_LINE> norm = shape.normalAt(0.0, 0.0) <NEW_LINE> if norm.z != 0: <NEW_LINE> <INDENT> msg = translate( "Path_Slot", "The selected face is not oriented vertically:" ) <NEW_LINE> FreeCAD.Console.PrintError(msg + " {}.\n".format(sub)) <NEW_LINE> return False <NEW_LINE> <DEDENT> if Ref == "Center of Mass": <NEW_LINE> <INDENT> comS = shape.CenterOfMass <NEW_LINE> p = FreeCAD.Vector(comS.x, comS.y, 0.0) <NEW_LINE> <DEDENT> elif Ref == "Center of BoundBox": <NEW_LINE> <INDENT> comS = shape.BoundBox.Center <NEW_LINE> p = FreeCAD.Vector(comS.x, comS.y, 0.0) <NEW_LINE> <DEDENT> elif Ref == "Lowest Point": <NEW_LINE> <INDENT> p = self._getLowestPoint(shape) <NEW_LINE> <DEDENT> elif Ref == "Highest Point": <NEW_LINE> <INDENT> p = self._getHighestPoint(shape) <NEW_LINE> <DEDENT> <DEDENT> elif cat == "Edge": <NEW_LINE> <INDENT> featDetIdx = pNum - 1 <NEW_LINE> if shape.Curve.TypeId == "Part::GeomCircle": <NEW_LINE> <INDENT> self.featureDetails[featDetIdx] = "arc" <NEW_LINE> <DEDENT> v0 = shape.Edges[0].Vertexes[0] <NEW_LINE> v1 = shape.Edges[0].Vertexes[1] <NEW_LINE> temp = FreeCAD.Vector(v1.X - v0.X, v1.Y - v0.Y, 0.0) <NEW_LINE> dYdX = self._normalizeVector(temp) <NEW_LINE> if Ref == "Center of Mass": <NEW_LINE> <INDENT> comS = shape.CenterOfMass <NEW_LINE> p = FreeCAD.Vector(comS.x, comS.y, 0.0) <NEW_LINE> <DEDENT> elif Ref == "Center of BoundBox": <NEW_LINE> <INDENT> comS = shape.BoundBox.Center <NEW_LINE> p = FreeCAD.Vector(comS.x, comS.y, 0.0) <NEW_LINE> <DEDENT> elif Ref == "Lowest Point": <NEW_LINE> <INDENT> p = self._findLowestPointOnEdge(shape) <NEW_LINE> <DEDENT> elif Ref == "Highest Point": <NEW_LINE> <INDENT> p = self._findHighestPointOnEdge(shape) <NEW_LINE> <DEDENT> <DEDENT> elif cat == "Vert": <NEW_LINE> <INDENT> V = shape.Vertexes[0] <NEW_LINE> p = FreeCAD.Vector(V.X, V.Y, 0.0) <NEW_LINE> <DEDENT> if p: <NEW_LINE> <INDENT> return (p, dYdX, cat) <NEW_LINE> <DEDENT> return False
_processFeature(obj, shape, sub, pNum)... This function analyzes a shape and returns a three item tuple containing: working point, shape orientation/slope, shape category as face, edge, or vert.
625941c05f7d997b871749fa
def create_cloned_volume(self, volume, src_vref): <NEW_LINE> <INDENT> LOG.info('create_cloned_volume, ' 'target volume id: %(tid)s, ' 'source volume id: %(sid)s, Enter method.', {'tid': volume['id'], 'sid': src_vref['id']}) <NEW_LINE> element_path, metadata = ( self.common.create_cloned_volume(volume, src_vref)) <NEW_LINE> v_metadata = volume.get('volume_metadata') <NEW_LINE> if v_metadata: <NEW_LINE> <INDENT> for data in v_metadata: <NEW_LINE> <INDENT> metadata[data['key']] = data['value'] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> v_metadata = volume.get('metadata', {}) <NEW_LINE> metadata.update(v_metadata) <NEW_LINE> <DEDENT> LOG.info('create_cloned_volume, info: %s, Exit method.', metadata) <NEW_LINE> return {'provider_location': six.text_type(element_path), 'metadata': metadata}
Create cloned volume.
625941c0460517430c3940ef
def setInitialState(self, QAbstractState): <NEW_LINE> <INDENT> pass
QState.setInitialState(QAbstractState)
625941c08e71fb1e9831d70f
def scene_pgm(unsused, args, scene_idx: int): <NEW_LINE> <INDENT> scene = scenes_names[scene_idx] <NEW_LINE> obs_ws.call(requests.SetCurrentScene(scene))
sends scene from preview into pgm
625941c0cc0a2c11143dcdf5
def getMainGroup(create=True, fromControl=None): <NEW_LINE> <INDENT> if fromControl: <NEW_LINE> <INDENT> path = fromControl.fullPath().split('|')[1:] <NEW_LINE> for i, name in enumerate(path): <NEW_LINE> <INDENT> if attributeQuery( config.FOSSIL_MAIN_CONTROL, ex=1, node=name ): <NEW_LINE> <INDENT> return PyNode('|' + '|'.join( path[:i + 1] )) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> existing = node.mainGroup(create=False) <NEW_LINE> if existing: <NEW_LINE> <INDENT> return existing <NEW_LINE> <DEDENT> if create: <NEW_LINE> <INDENT> main = node.mainGroup() <NEW_LINE> addRootMotion(main) <NEW_LINE> return main
Wraps lib.anim.getMainGroup() so code that simply needs to obtain the group can do so while this function actually builds all the needed parts. fromControl ensures the right mainGroup is found.
625941c063f4b57ef0001084
@set_module('mxnet.symbol.numpy') <NEW_LINE> def dsplit(ary, indices_or_sections): <NEW_LINE> <INDENT> indices = [] <NEW_LINE> sections = 0 <NEW_LINE> if isinstance(indices_or_sections, int): <NEW_LINE> <INDENT> sections = indices_or_sections <NEW_LINE> <DEDENT> elif isinstance(indices_or_sections, (list, set, tuple)): <NEW_LINE> <INDENT> indices = [0] + list(indices_or_sections) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('indices_or_sections must either int or tuple of ints') <NEW_LINE> <DEDENT> ret = _npi.dsplit(ary, indices, 2, False, sections) <NEW_LINE> return ret
Split array into multiple sub-arrays along the 3rd axis (depth). Please refer to the `split` documentation. `dsplit` is equivalent to `split` with ``axis=2``, the array is always split along the third axis provided the array dimension is greater than or equal to 3. Parameters ---------- ary : _Symbol Array to be divided into sub-arrays. indices_or_sections : int or 1-D Python tuple, list or set. If `indices_or_sections` is an integer, N, the array will be divided into N equal arrays along axis 2. If such a split is not possible, an error is raised. If `indices_or_sections` is a 1-D array of sorted integers, the entries indicate where along axis 2 the array is split. For example, ``[2, 3]`` would result in - ary[:, :, :2] - ary[:, :, 2:3] - ary[:, :, 3:] If an index exceeds the dimension of the array along axis 2, an error will be thrown.
625941c0adb09d7d5db6c6f6
def get_median(number_list): <NEW_LINE> <INDENT> number_list.sort() <NEW_LINE> n=len(number_list) <NEW_LINE> mid = n//2 <NEW_LINE> median = (number_list[mid]+number_list[mid-1])/2 if n%2==0 else number_list[mid] <NEW_LINE> return median
주어진 리스트 숫자들의 중간값을 구함. Parameters: number_list (list): integer로 값으로만 구성된 list ex - [10, 33, 22, 99, 33] Returns: median (int): parameter number_list 숫자들의 중간값 Examples: >>> number_list = [39, 54, 32, 11, 99] >>> import basic_math as bm >>> bm.get_median(number_list) 39 >>> number_list2 = [39, 54, 32, 11, 99, 5] >>> bm.get_median(number_list2) 35.5
625941c0009cb60464c63318
def test_update_task_duration_to_scheduled(self): <NEW_LINE> <INDENT> task = Task.objects.create( user=self.user, name='Testtask', duration=Decimal(2)) <NEW_LINE> TaskChunk.objects.create( task=task, day=date(2000, 1, 2), day_order=1, duration=Decimal('0.5')) <NEW_LINE> resp = self.client.patch('/task/task/{}/'.format(task.pk), { 'duration': '0.5', }) <NEW_LINE> self.assertEqual( resp.status_code, status.HTTP_200_OK) <NEW_LINE> task.refresh_from_db() <NEW_LINE> self.assertEqual( task.duration, Decimal('0.5'))
Test setting the duration of a task to exactly the scheduled duration.
625941c007d97122c41787eb
def on_exit(self): <NEW_LINE> <INDENT> def wrap_exit(func): <NEW_LINE> <INDENT> self._exit_callback = func <NEW_LINE> <DEDENT> return wrap_exit
A decorator function to register the event with one argument to specify interval
625941c073bcbd0ca4b2bfdb
def saveproj(self): <NEW_LINE> <INDENT> projdata = {} <NEW_LINE> if self.radiobutton_interval.isChecked(): <NEW_LINE> <INDENT> projdata['type'] = 'interval' <NEW_LINE> <DEDENT> elif self.radiobutton_mean.isChecked(): <NEW_LINE> <INDENT> projdata['type'] = 'mean' <NEW_LINE> <DEDENT> elif self.radiobutton_median.isChecked(): <NEW_LINE> <INDENT> projdata['type'] = 'median' <NEW_LINE> <DEDENT> elif self.radiobutton_8bit.isChecked(): <NEW_LINE> <INDENT> projdata['type'] = '8bit' <NEW_LINE> <DEDENT> return projdata
Save project data from class. Returns ------- projdata : dictionary Project data to be saved to JSON project file.
625941c010dbd63aa1bd2b09
def DeleteDigitalAssetAddress(self, label, username, headers=None, query_params=None): <NEW_LINE> <INDENT> uri = self.client.base_url + "/users/"+username+"/digitalwallet/"+label <NEW_LINE> return self.client.session.delete(uri, headers=headers, params=query_params)
Removes an address It is method for DELETE /users/{username}/digitalwallet/{label}
625941c0b830903b967e9872
def p_type_def_2(t): <NEW_LINE> <INDENT> id = t[2] <NEW_LINE> body = t[3] <NEW_LINE> lineno = t.lineno(1) <NEW_LINE> sortno = t.lineno(4) + 0.5 <NEW_LINE> if id_unique(id, 'enum', lineno): <NEW_LINE> <INDENT> name_dict[id] = enum_info(id, body, lineno, sortno)
type_def : ENUM ID enum_body SEMI
625941c094891a1f4081ba0d
def sort_pattern(f_pattern): <NEW_LINE> <INDENT> f_pattern2 = [] <NEW_LINE> for pattern, support in f_pattern.items(): <NEW_LINE> <INDENT> pattern_list = sorted(list(pattern)) <NEW_LINE> f_pattern2.append((pattern_list, support)) <NEW_LINE> <DEDENT> s_pattern = sorted(f_pattern2, key=lambda x: (-x[1], x[0])) <NEW_LINE> return s_pattern
Sort frequent patterns by support (descending) then pattern (ascending).
625941c0d99f1b3c44c674f9
def get_link_path( info, image_link_order=None): <NEW_LINE> <INDENT> image_link = None <NEW_LINE> default_image_link_order = ['web', 'main', '.swf'] <NEW_LINE> if image_link_order: <NEW_LINE> <INDENT> default_image_link_order = image_link_order <NEW_LINE> <DEDENT> for item in default_image_link_order: <NEW_LINE> <INDENT> if item in info: <NEW_LINE> <INDENT> image_link = info[item] <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for key, value in info.items(): <NEW_LINE> <INDENT> if key in ["icon", "_repo"]: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> image_link = info[key] <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if not image_link: <NEW_LINE> <INDENT> image_link = info.get('icon') <NEW_LINE> <DEDENT> return image_link
get the link for the thumbnail
625941c055399d3f05588618
def plot(self, timestep="AUTO", metric="AUTO", **kwargs): <NEW_LINE> <INDENT> if self._model_json["algo"] in ("deeplearning", "deepwater", "drf", "gbm"): <NEW_LINE> <INDENT> if metric == "AUTO": <NEW_LINE> <INDENT> metric = "rmse" <NEW_LINE> <DEDENT> elif metric not in ("rmse", "deviance", "mae"): <NEW_LINE> <INDENT> raise ValueError("metric for H2ORegressionModel must be one of: AUTO, rmse, deviance, or mae") <NEW_LINE> <DEDENT> <DEDENT> self._plot(timestep=timestep, metric=metric, **kwargs)
Plots training set (and validation set if available) scoring history for an H2ORegressionModel. The timestep and metric arguments are restricted to what is available in its scoring history. :param timestep: A unit of measurement for the x-axis. :param metric: A unit of measurement for the y-axis. :return: A scoring history plot.
625941c016aa5153ce3623dd
def riemann(self, name=None, latex_name=None): <NEW_LINE> <INDENT> return self.connection().riemann(name, latex_name)
Return the Riemann curvature tensor associated with the metric. This method is actually a shortcut for ``self.connection().riemann()`` The Riemann curvature tensor is the tensor field `R` of type (1,3) defined by .. MATH:: R(\omega, u, v, w) = \left\langle \omega, \nabla_u \nabla_v w - \nabla_v \nabla_u w - \nabla_{[u, v]} w \right\rangle for any 1-form `\omega` and any vector fields `u`, `v` and `w`. INPUT: - ``name`` -- (default: ``None``) name given to the Riemann tensor; if none, it is set to "Riem(g)", where "g" is the metric's name - ``latex_name`` -- (default: ``None``) LaTeX symbol to denote the Riemann tensor; if none, it is set to "\\mathrm{Riem}(g)", where "g" is the metric's name OUTPUT: - the Riemann curvature tensor `R`, as an instance of :class:`~sage.manifolds.differentiable.tensorfield.TensorField` EXAMPLES: Riemann tensor of the standard metric on the 2-sphere:: sage: M = Manifold(2, 'S^2', start_index=1) sage: U = M.open_subset('U') # the complement of a meridian (domain of spherical coordinates) sage: c_spher.<th,ph> = U.chart(r'th:(0,pi):\theta ph:(0,2*pi):\phi') sage: a = var('a') # the sphere radius sage: g = U.metric('g') sage: g[1,1], g[2,2] = a^2, a^2*sin(th)^2 sage: g.display() # standard metric on the 2-sphere of radius a: g = a^2 dth*dth + a^2*sin(th)^2 dph*dph sage: g.riemann() Tensor field Riem(g) of type (1,3) on the Open subset U of the 2-dimensional differentiable manifold S^2 sage: g.riemann()[:] [[[[0, 0], [0, 0]], [[0, sin(th)^2], [-sin(th)^2, 0]]], [[[0, -1], [1, 0]], [[0, 0], [0, 0]]]] In dimension 2, the Riemann tensor can be expressed entirely in terms of the Ricci scalar `r`: .. MATH:: R^i_{\ \, jlk} = \frac{r}{2} \left( \delta^i_{\ \, k} g_{jl} - \delta^i_{\ \, l} g_{jk} \right) This formula can be checked here, with the r.h.s. rewritten as `-r g_{j[k} \delta^i_{\ \, l]}`:: sage: g.riemann() == \ ....: -g.ricci_scalar()*(g*U.tangent_identity_field()).antisymmetrize(2,3) True Using SymPy as symbolic engine:: sage: M.set_calculus_method('sympy') sage: g = U.metric('g') sage: g[1,1], g[2,2] = a**2, a**2*sin(th)**2 sage: g.riemann()[:] [[[[0, 0], [0, 0]], [[0, sin(2*th)/(2*tan(th)) - cos(2*th)], [-sin(2*th)/(2*tan(th)) + cos(2*th), 0]]], [[[0, -1], [1, 0]], [[0, 0], [0, 0]]]]
625941c0eab8aa0e5d26dabc
def post(self, alias, uri, data={}, headers=None, files={}, cassette=None, allow_redirects=None): <NEW_LINE> <INDENT> session = self._cache.switch(alias) <NEW_LINE> data = self._utf8_urlencode(data) <NEW_LINE> redir = True if allow_redirects is None else allow_redirects <NEW_LINE> if cassette: <NEW_LINE> <INDENT> with vcr.use_cassette(cassette, serializer='json', cassette_library_dir = 'cassettes/POST', record_mode='new_episodes', match_on=['url', 'method', 'headers', 'body']): <NEW_LINE> <INDENT> response = self.post_request(session, uri, data, headers, files, redir) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> response = self.post_request(session, uri, data, headers, files, redir) <NEW_LINE> <DEDENT> return response
Send a POST request on the session object found using the given `alias` `alias` that will be used to identify the Session object in the cache `uri` to send the GET request to `data` a dictionary of key-value pairs that will be urlencoded and sent as POST data or binary data that is sent as the raw body content `headers` a dictionary of headers to use with the request `files` a dictionary of file names containing file data to POST to the server
625941c0097d151d1a222dc0
def regular_tracker_axis_ticks_float( start: float, stop: float, num: int, endpoint: bool = True) -> List[TrackerAxisTick]: <NEW_LINE> <INDENT> ticks = [] <NEW_LINE> for val in equally_spaced_ndarray(start, stop, num, endpoint=endpoint): <NEW_LINE> <INDENT> ticks.append(TrackerAxisTick(val, str(val))) <NEW_LINE> <DEDENT> return ticks
Args: start: starting value stop: stopping value num: number of values to return endpoint: include the endpoint? Returns: a list of simple numerical TrackerAxisTick objects
625941c016aa5153ce3623de
def divide(self, dividend, divisor): <NEW_LINE> <INDENT> if divisor == 1: <NEW_LINE> <INDENT> return dividend <NEW_LINE> <DEDENT> if divisor == -1: <NEW_LINE> <INDENT> result = -dividend <NEW_LINE> if result >= 2**31 - 1: <NEW_LINE> <INDENT> return 2**31 - 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> <DEDENT> if dividend < 0: <NEW_LINE> <INDENT> a = -dividend <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> a = dividend <NEW_LINE> <DEDENT> if divisor < 0: <NEW_LINE> <INDENT> b = -divisor <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> b = divisor <NEW_LINE> <DEDENT> def divide(a, b): <NEW_LINE> <INDENT> if a < b: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> x = b <NEW_LINE> result = 1 <NEW_LINE> while a >= (x << 1): <NEW_LINE> <INDENT> x <<= 1 <NEW_LINE> result <<= 1 <NEW_LINE> <DEDENT> return divide(a - x, b) + result <NEW_LINE> <DEDENT> result = divide(a, b) <NEW_LINE> if (dividend > 0 and divisor > 0) or (dividend < 0 and divisor < 0): <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -result
:type dividend: int :type divisor: int :rtype: int
625941c0fff4ab517eb2f39f
def open(self): <NEW_LINE> <INDENT> if not os.path.isfile(self.filename): <NEW_LINE> <INDENT> self.createFile() <NEW_LINE> <DEDENT> self.file = open(self.filename, self.type)
Метод для открытия файла :param type: :param filename: :return:
625941c076d4e153a657ea95
def active_thread_priority(self): <NEW_LINE> <INDENT> return _fm_debug_swig.shared_tuner_sptr_active_thread_priority(self)
active_thread_priority(shared_tuner_sptr self) -> int
625941c0bd1bec0571d90594
def get_queryset(self): <NEW_LINE> <INDENT> queryset = Transaction.objects <NEW_LINE> groupby = [] <NEW_LINE> extras = {} <NEW_LINE> groups = self.request.QUERY_PARAMS.getlist('groupby', None) <NEW_LINE> bins = self.request.QUERY_PARAMS.getlist('bins', None) <NEW_LINE> if groups: <NEW_LINE> <INDENT> for field in groups: <NEW_LINE> <INDENT> group_by_bins = re.match("^(.*)_bin$", field) <NEW_LINE> if group_by_bins: <NEW_LINE> <INDENT> groupby_field, field = field, group_by_bins.group(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> groupby_field = field <NEW_LINE> <DEDENT> if field not in ALLOWED_GROUPS: <NEW_LINE> <INDENT> raise GroupByFieldError <NEW_LINE> <DEDENT> groupby.append(groupby_field) <NEW_LINE> if field in Transaction.DATE_FILEDS: <NEW_LINE> <INDENT> extras[field] = "extract('" + field + "' from date)" <NEW_LINE> <DEDENT> if group_by_bins: <NEW_LINE> <INDENT> if not bins: <NEW_LINE> <INDENT> raise GroupByFieldError <NEW_LINE> <DEDENT> field_bins = bins.pop() <NEW_LINE> qs = Transaction.objects <NEW_LINE> f = TransactionFilter(self.request.QUERY_PARAMS, queryset=qs) <NEW_LINE> min_field = f.qs.aggregate(Min(field)) .get(field + '__min') <NEW_LINE> max_field = f.qs.aggregate(Max(field)) .get(field + '__max') <NEW_LINE> if not min_field or not max_field: <NEW_LINE> <INDENT> raise NoDataError <NEW_LINE> <DEDENT> extras[groupby_field] = "width_bucket(" + field + ", " + str(min_field) + ", " + str(max_field + 1) + ", " + field_bins + ")" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if extras: <NEW_LINE> <INDENT> queryset = queryset.extra(extras) <NEW_LINE> <DEDENT> if groupby: <NEW_LINE> <INDENT> queryset = queryset.values(*groupby) <NEW_LINE> queryset = queryset.order_by(*groupby) <NEW_LINE> <DEDENT> queryset = queryset.annotate(Avg('price'), Min('price'), Max('price'), Count('id')) <NEW_LINE> return queryset
Build the queryset given the query parameters
625941c0236d856c2ad4473c
def get(self, key, timestamp): <NEW_LINE> <INDENT> pass
:type key: str :type timestamp: int :rtype: str
625941c04f6381625f1149a1
def defineDynamicExplicitCoupledThermalDisplacementStep(myModel, outputs): <NEW_LINE> <INDENT> logger = logging.getLogger() <NEW_LINE> logger.info("Defining dynamic explicit coupled temperature-displacement step" + '\n') <NEW_LINE> myStep = myModel.TempDisplacementDynamicsStep(name='dynamicExCoupledTempDispl', previous='Initial', timePeriod=1.0, nlgeom=ON, maxIncrement=100000) <NEW_LINE> myModel.fieldOutputRequests['F-Output-1'].setValues(variables=outputs, frequency=LAST_INCREMENT) <NEW_LINE> myModel.TabularAmplitude(name='dynamicExplicitAmplitude', data=((0, 0), (5, 1), (10, 1), (15, 0))) <NEW_LINE> amp = 'dynamicExplicitAmplitude' <NEW_LINE> analysisStepName = 'dynamicExCoupledTempDispl' <NEW_LINE> logger.info("**********************************************************************************\n") <NEW_LINE> return myStep, amp, analysisStepName
Create explicit dynamic coupled thermal-stress analysis step.
625941c066656f66f7cbc10f
def _set_index(self, v, load=False): <NEW_LINE> <INDENT> if hasattr(v, "_utype"): <NEW_LINE> <INDENT> v = v._utype(v) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> t = YANGDynClass( v, base=RestrictedClassType( base_type=RestrictedClassType( base_type=int, restriction_dict={"range": ["0..65535"]}, int_size=16, ), restriction_dict={"range": ["0..max"]}, ), is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace="http://openconfig.net/yang/platform/transceiver", defining_module="openconfig-platform-transceiver", yang_type="uint16", is_config=False, ) <NEW_LINE> <DEDENT> except (TypeError, ValueError): <NEW_LINE> <INDENT> raise ValueError( { "error-string": """index must be of a type compatible with uint16""", "defined-type": "uint16", "generated-type": """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['0..max']}), is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/platform/transceiver', defining_module='openconfig-platform-transceiver', yang_type='uint16', is_config=False)""", } ) <NEW_LINE> <DEDENT> self.__index = t <NEW_LINE> if hasattr(self, "_set"): <NEW_LINE> <INDENT> self._set()
Setter method for index, mapped from YANG variable /components/component/transceiver/physical_channels/channel/state/index (uint16) If this variable is read-only (config: false) in the source YANG file, then _set_index is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_index() directly. YANG Description: Index of the physical channnel or lane within a physical client port
625941c04f88993c3716bfcf
def clean(self): <NEW_LINE> <INDENT> data = super().clean() <NEW_LINE> if data['password'] != data['password_confirmation']: <NEW_LINE> <INDENT> raise forms.ValidationError('Passwords did not match.') <NEW_LINE> <DEDENT> if Users.objects.filter(email=data['email']).exists(): <NEW_LINE> <INDENT> raise forms.ValidationError('Email is already in use.') <NEW_LINE> <DEDENT> if Users.objects.filter(username=data['username']).exists(): <NEW_LINE> <INDENT> raise forms.ValidationError('Username is already in use.') <NEW_LINE> <DEDENT> if not data['picture']: <NEW_LINE> <INDENT> data.pop('picture') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> validate_email(data['email']) <NEW_LINE> password_validation.validate_password(data['password']) <NEW_LINE> <DEDENT> except (ValidationError, KeyError): <NEW_LINE> <INDENT> raise forms.ValidationError('Invalid email or password') <NEW_LINE> <DEDENT> return data
Validate data.
625941c038b623060ff0ad53
def addStdButtons (self,frame): <NEW_LINE> <INDENT> self.ok = ok = Tk.Button(frame,text="Go",width=6,command=self.go) <NEW_LINE> self.hideButton = hide = Tk.Button(frame,text="Hide",width=6,command=self.hide) <NEW_LINE> ok.pack(side="left",pady=2,padx=5) <NEW_LINE> hide.pack(side="left",pady=2,padx=5)
Add standard buttons to a listBox dialog.
625941c021a7993f00bc7c51
def make_single_image(data): <NEW_LINE> <INDENT> R_data = get_band_filter(data, 0.01, 7) <NEW_LINE> G_data = get_band_filter(data, 8, 13) <NEW_LINE> B_data = get_band_filter(data, 13, 30) <NEW_LINE> image = [] <NEW_LINE> image.append(R_data) <NEW_LINE> image.append(G_data) <NEW_LINE> image.append(B_data) <NEW_LINE> return image
input: [channel, SampFreq] return [3, channel, SampFreq]
625941c0a05bb46b383ec789
def get_user(self): <NEW_LINE> <INDENT> username = os.path.basename(self.path) <NEW_LINE> userdata = None <NEW_LINE> users_path = os.path.join( os.path.dirname(self.path), 'contestants.txt') <NEW_LINE> if os.path.exists(users_path): <NEW_LINE> <INDENT> with open(users_path, "rt", encoding="utf-8") as users_file: <NEW_LINE> <INDENT> for user in users_file.readlines(): <NEW_LINE> <INDENT> user = user.strip().split(';') <NEW_LINE> name = user[0].strip() <NEW_LINE> if name == username: <NEW_LINE> <INDENT> userdata = [x.strip() for x in user] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if userdata is not None: <NEW_LINE> <INDENT> logger.info("Loading parameters for user %s.", username) <NEW_LINE> args = {} <NEW_LINE> args['username'] = userdata[0] <NEW_LINE> args['password'] = build_password(userdata[1]) <NEW_LINE> args['first_name'] = userdata[2] <NEW_LINE> args['last_name'] = userdata[3] <NEW_LINE> args['hidden'] = (len(userdata) > 4 and userdata[4] == '1') <NEW_LINE> logger.info("User parameters loaded.") <NEW_LINE> return User(**args) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.critical( "User %s not found in contestants.txt file.", username) <NEW_LINE> return None
See docstring in class Loader.
625941c0d18da76e23532439
def _build_tree_from_dict(self, struct, values, nodename): <NEW_LINE> <INDENT> child_list = struct.get(nodename, []) <NEW_LINE> val = values.get(nodename, None) <NEW_LINE> node = Node(nodename, val) <NEW_LINE> if self.root is None: <NEW_LINE> <INDENT> self.root = node <NEW_LINE> <DEDENT> _child_list = [] <NEW_LINE> for c in child_list: <NEW_LINE> <INDENT> c_node = self._build_tree_from_dict(struct, values, c) <NEW_LINE> _child_list.append(c_node) <NEW_LINE> <DEDENT> node.child_list = _child_list <NEW_LINE> return node
:param struct: the dict that represents tree :param values: the dict containing utility for leaves :param nodename: current node name :return:
625941c056b00c62f0f145bd
def sample(self, nsamples=1): <NEW_LINE> <INDENT> samples = [] <NEW_LINE> for i in range(nsamples): <NEW_LINE> <INDENT> samples.append(np.random.gamma(self.vi_shape, 1/self.vi_rate).T) <NEW_LINE> <DEDENT> return np.stack(samples).T
Sample from variational distributions Parameters ---------- nsamples: int (optional, default 1) Number of samples to take. Returns ------- X_rep : np.ndarray An ndarray of samples from the variational distributions, where the last dimension is the number of samples `nsamples`
625941c0c4546d3d9de72997
def add_modify_listener(self): <NEW_LINE> <INDENT> self.cell.addModifyListener(self)
set modify listener.
625941c0097d151d1a222dc1
def filter_dict(d, keys): <NEW_LINE> <INDENT> return dict((k, d[k]) for k in keys if k in d.keys())
Return a copy of d containing only keys that are in keys
625941c0d53ae8145f87a1d9
def simple_calculator(calculation): <NEW_LINE> <INDENT> pass
Receives 'calculation' and returns the calculated result, Examples - input -> output: '2 * 3' -> 6 '2 + 6' -> 8 Support +, -, * and /, use "true" division (so 2/3 is .66 rather than 0) Make sure you convert both numbers to ints. If bad data is passed in, raise a ValueError.
625941c0d4950a0f3b08c2b6
def ship_hit(ai_settings, screen, stats, scoreboard, ship, aliens, bullets): <NEW_LINE> <INDENT> if stats.ships_left > 0: <NEW_LINE> <INDENT> stats.ships_left -= 1 <NEW_LINE> scoreboard.prep_ships() <NEW_LINE> aliens.empty() <NEW_LINE> bullets.empty() <NEW_LINE> create_fleet(ai_settings, screen, ship, aliens) <NEW_LINE> ship.center_ship() <NEW_LINE> sleep(0.5) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> stats.game_active = False <NEW_LINE> pygame.mouse.set_visible(True)
响应被外星人撞到的飞船
625941c04e4d5625662d4340
def TurnOnLed(self, color): <NEW_LINE> <INDENT> if self._pin_states[color] != 1: <NEW_LINE> <INDENT> GPIO.output(self._output_pins[color], True) <NEW_LINE> self._pin_states[color] = 1
Turn on a single LED. @param color The color of the LED to turn on. Can be "red", "yellow", or "green"
625941c0796e427e537b0529
@pytest.fixture(scope="function") <NEW_LINE> def report_user_message(local_table: DbManager, admin_group: Chat) -> Message: <NEW_LINE> <INDENT> Report(1, config_map['meme']['group_id'], 0, target_username='BadUser', date=datetime.now()).save_report() <NEW_LINE> return Message(message_id=0, date=datetime.now(), chat=admin_group)
Called once per at the beginning of each function. Simulates an existing user report Returns: user report message
625941c05fcc89381b1e1622
def text_lives(self): <NEW_LINE> <INDENT> font_size = 30 <NEW_LINE> param_text = pygame.font.SysFont('calibri', font_size) <NEW_LINE> text_place = param_text.render(str(self.lives), 1, BLACK) <NEW_LINE> screen.blit(text_place, (self.x - int(font_size / 3), self.y - int(font_size / 2)))
this function types count of lives in the center of ball :return:
625941c0d6c5a10208143fae
def convertToTitle(self, n): <NEW_LINE> <INDENT> result = "" <NEW_LINE> while n != 0: <NEW_LINE> <INDENT> result = chr((n-1)%26+65) + result <NEW_LINE> n = int((n-1)/26) <NEW_LINE> <DEDENT> return result
:type n: int :rtype: str
625941c056ac1b37e6264139
def pwrs(self): <NEW_LINE> <INDENT> result = self.factor() <NEW_LINE> while self.current_token.value is '^': <NEW_LINE> <INDENT> self.eatOp(0) <NEW_LINE> result = result ** self.pwrs() <NEW_LINE> <DEDENT> return result
pwrs: factor((PWR) factor)*
625941c085dfad0860c3adbf
def rob(self, root): <NEW_LINE> <INDENT> res = self.robsub(root) <NEW_LINE> return max(res)
:type root: TreeNode :rtype: int
625941c0cc0a2c11143dcdf6
def __iter__(self): <NEW_LINE> <INDENT> for coord in self.position: <NEW_LINE> <INDENT> yield coord
Examples >>> l = Atom.Location(' ', [1,2,3], 1.0, 20.0, "XXX") >>> for c in l: ... print(c) ... 1 2 3
625941c0167d2b6e31218afc
def _remove_from_sets(self, test, dict_of_sets_of_tests): <NEW_LINE> <INDENT> for set_of_tests in dict_of_sets_of_tests.itervalues(): <NEW_LINE> <INDENT> if test in set_of_tests: <NEW_LINE> <INDENT> set_of_tests.remove(test)
Removes the given test from the sets in the dictionary. Args: test: test to look for dict: dict of sets of files
625941c0a79ad161976cc0ab
def askUserForScrapeYears(): <NEW_LINE> <INDENT> scrape_year = 'all' <NEW_LINE> list_all = ['Would you like to scrape all years?', 'Yes', 'No'] <NEW_LINE> all_years = list_all[cutie.select(list_all, caption_indices=[0], selected_index=1)] <NEW_LINE> if all_years == 'Yes': <NEW_LINE> <INDENT> print('Proceeding to scrape all available years.') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> years = ['Select the year you would like to scrape:', '2021', '2020', '2019', '2018', '2017', '2016', '2015', '2014','2013', '2012', '2011', '2010', '2009', 'None'] <NEW_LINE> scrape_year = years[cutie.select(years, caption_indices=[0], selected_index=1)] <NEW_LINE> print(f'Proceeding to scrape statistics for {scrape_year}') <NEW_LINE> <DEDENT> return scrape_year
Purpose: On the command line, ask the user to input which years they would like to scrape statistical data for. If the user does not want to scrape data for all years, they will be asked what years they are specifically interested in. Input: - NONE Output: (1) scrape_year (string): Year that the user requests data for (allowed values are from 2009-2020 or 'all') [default = 'all']
625941c0566aa707497f44d2
def lp_assignment_from_pd(df, attr_name=['supply'], dummy_name='DummySinkNode'): <NEW_LINE> <INDENT> g = build_shifts_graph(df) <NEW_LINE> g = add_dummy_supply_sink(g, attr_name) <NEW_LINE> in_paths = defaultdict(list) <NEW_LINE> out_paths = defaultdict(list) <NEW_LINE> for p,q in g.edges_iter(): <NEW_LINE> <INDENT> out_paths[p].append((p,q)) <NEW_LINE> in_paths[q].append((p,q)) <NEW_LINE> <DEDENT> vols = list(df.index) <NEW_LINE> vols.remove('shift_needs') <NEW_LINE> shifts = df.columns <NEW_LINE> prob = pulp.LpProblem("Supply Chain", pulp.LpMinimize) <NEW_LINE> x = pulp.LpVariable.dicts("takeShift", (vols, shifts), lowBound = 0 , upBound = 1, cat = pulp.LpInteger) <NEW_LINE> prob += sum(x[v][s]*g[v][s]['cost'] for v,s in g.edges_iter() if dummy_name not in v and dummy_name not in s), "objective" <NEW_LINE> for shift in shifts: <NEW_LINE> <INDENT> demand = nx.get_node_attributes(g, 'supply')[shift] <NEW_LINE> prob += demand - sum([x[v][s] for (v,s) in in_paths[shift]]) == 0, "satisfiedShiftConstr_{}".format(shift) <NEW_LINE> <DEDENT> prob.solve() <NEW_LINE> return prob
Workhorse function for LP solution of transport problems represented in a graph. Currently only supports a single node attribute, but will expand function in future to operate on a list of node attributes.
625941c0fbf16365ca6f6125
def read_string(self, addr): <NEW_LINE> <INDENT> return _tracy.tracy_read_string(self.child, addr)
Read a nil-terminated string from the Child process.
625941c0925a0f43d2549ddb
def _add_optional_values(values: dict, **kwargs) -> dict: <NEW_LINE> <INDENT> result = values.copy() <NEW_LINE> for key, value in kwargs.items(): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result
Given a dict of values for the insert statement, add entries for the values which are not None. :param values: the existing dict :param kwargs: the values to add if not None :return: the dict with the values added
625941c0be383301e01b53f0
def __init__(self): <NEW_LINE> <INDENT> self.Topic = None <NEW_LINE> self.SubscriptionName = None <NEW_LINE> self.ReceiverQueueSize = None <NEW_LINE> self.SubInitialPosition = None
:param Topic: 接收消息的topic的名字, 这里尽量需要使用topic的全路径,如果不指定,即:tenant/namespace/topic。默认使用的是:public/default :type Topic: str :param SubscriptionName: 订阅者的名字 :type SubscriptionName: str :param ReceiverQueueSize: 默认值为1000,consumer接收的消息会首先存储到receiverQueueSize这个队列中,用作调优接收消息的速率 :type ReceiverQueueSize: int :param SubInitialPosition: 默认值为:Latest。用作判定consumer初始接收消息的位置,可选参数为:Earliest, Latest :type SubInitialPosition: str
625941c076e4537e8c3515d7
def __parse_JSON(self, json): <NEW_LINE> <INDENT> latitude = json['data']['latitude'] <NEW_LINE> longitude = json['data']['longitude'] <NEW_LINE> device_ID = json['data']['device_ID'] <NEW_LINE> return device_ID, latitude, longitude
парсинг на значение :param json: который нужно пропарсить :return: значения из json
625941c0baa26c4b54cb1088
def _do_plot(x, y, dist=None, line=False, ax=None, fmt='bo', **kwargs): <NEW_LINE> <INDENT> fig, ax = utils.create_mpl_ax(ax) <NEW_LINE> ax.set_xmargin(0.02) <NEW_LINE> ax.plot(x, y, fmt, **kwargs) <NEW_LINE> if line: <NEW_LINE> <INDENT> if line not in ['r','q','45','s']: <NEW_LINE> <INDENT> msg = "%s option for line not understood" % line <NEW_LINE> raise ValueError(msg) <NEW_LINE> <DEDENT> qqline(ax, line, x=x, y=y, dist=dist) <NEW_LINE> <DEDENT> return fig, ax
Boiler plate plotting function for the `ppplot`, `qqplot`, and `probplot` methods of the `ProbPlot` class Parameteters ------------ x, y : array_like Data to be plotted dist : scipy.stats.distribution A scipy.stats distribution, needed if `line` is 'q'. line : str {'45', 's', 'r', q'} or None Options for the reference line to which the data is compared. ax : AxesSubplot, optional If given, this subplot is used to plot in instead of a new figure being created. fmt : str, optional matplotlib-compatible formatting string for the data markers kwargs : keywords These are passed to matplotlib.plot Returns ------- fig : Figure The figure containing `ax`. ax : AxesSubplot The original axes if provided. Otherwise a new instance.
625941c0004d5f362079a29b
def _init_session(self): <NEW_LINE> <INDENT> headers = {'Accept': 'application/json', 'User-Agent': 'KuCoin-BOT/HuangJacky', 'KC-API-KEY': self.API_KEY, 'KC-API-PASSPHRASE': self.API_PASSPHRASE} <NEW_LINE> session = aiohttp.ClientSession( headers=headers ) <NEW_LINE> return session
初始化session :return:
625941c0f7d966606f6a9f68
def get_name(self): <NEW_LINE> <INDENT> if len(self.enum) == 4: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return Enum(self.enum).name <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return str(self.enum)
Get enum name.
625941c03346ee7daa2b2cd0
def InitializeJaccardMatrix(self): <NEW_LINE> <INDENT> k = 0 <NEW_LINE> try: <NEW_LINE> <INDENT> for ID1 in self.tweets: <NEW_LINE> <INDENT> self.JaccardMatrix[ID1] = {} <NEW_LINE> for ID2 in self.tweets: <NEW_LINE> <INDENT> if ID2 not in self.JaccardMatrix: <NEW_LINE> <INDENT> self.JaccardMatrix[ID2] = {} <NEW_LINE> <DEDENT> Distance = self.JaccardDistance(self.tweets[ID1], self.tweets[ID2]) <NEW_LINE> self.JaccardMatrix[ID1][ID2] = Distance <NEW_LINE> self.JaccardMatrix[ID2][ID1] = Distance <NEW_LINE> k += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except MemoryError: <NEW_LINE> <INDENT> print(k)
计算出每一对tweet的Jaccard距离,动态规划思想,以空间换时间 数据量过大时,可能会发生memoryerror的错误
625941c09c8ee82313fbb6da
def one_to_group(name): <NEW_LINE> <INDENT> value_format = '%(value)s'.replace('value', name) <NEW_LINE> return (u'<td><input class="inp" type="text" name="%s" value="%s" />' u'</td>') % (name, value_format)
生成ModelList中某一项的某一个值的HTML Args: name: 字段名 Returns: HTML代码字符串
625941c05510c4643540f350
def get_binaries(): <NEW_LINE> <INDENT> paths = [] <NEW_LINE> for arp in [False, True]: <NEW_LINE> <INDENT> paths.append(get_binary(arp=arp)) <NEW_LINE> <DEDENT> return paths
Download and return paths of all platform-specific binaries
625941c0c432627299f04baa
def __init__( self, continuation_token=None, total_item_count=None, items=None, ): <NEW_LINE> <INDENT> if continuation_token is not None: <NEW_LINE> <INDENT> self.continuation_token = continuation_token <NEW_LINE> <DEDENT> if total_item_count is not None: <NEW_LINE> <INDENT> self.total_item_count = total_item_count <NEW_LINE> <DEDENT> if items is not None: <NEW_LINE> <INDENT> self.items = items
Keyword args: continuation_token (str): Continuation token that can be provided in the `continuation_token` query param to get the next page of data. If you use the `continuation_token` to page through data you are guaranteed to get all items exactly once regardless of how items are modified. If an item is added or deleted during the pagination then it may or may not be returned. The `continuation_token` is generated if the `limit` is less than the remaining number of items, and the default sort is used (no sort is specified). total_item_count (int): Total number of items after applying `filter` params. items (list[ArrayNfsSpecificPerformance]): A list of NFS specific array performance metrics objects.
625941c0d8ef3951e32434a3
def OnUpdateSessionInfo(self, event): <NEW_LINE> <INDENT> if event.infos['user_quota'] is not None and event.infos['used_space'] is not None: <NEW_LINE> <INDENT> self.setSpaceInfo(int(event.infos['user_quota']), int(event.infos['used_space'])) <NEW_LINE> <DEDENT> if 'basis' in event.infos: <NEW_LINE> <INDENT> self.panel_1.updateHash(event.infos['basis']) <NEW_LINE> <DEDENT> if event.infos['last_commit_timestamp'] is not None: <NEW_LINE> <INDENT> self.panel_1.updateTimestamp(event.infos['last_commit_timestamp']) <NEW_LINE> <DEDENT> if ('plan' in event.infos and 'status' in event.infos and 'expires_on' in event.infos and 'user_quota' in event.infos): <NEW_LINE> <INDENT> self.setContractualInfo(event.infos['plan'], event.infos['status'], event.infos['expires_on'], event.infos['user_quota'])
event.infos contains: last_commit_client_id: string or None last_commit_client_hostname: string or None last_commit_client_platform: string or None last_commit_timestamp: unix time user_quota: number (space in bytes) or None used_space: number (space in bytes) or None basis plan status expires_on
625941c091af0d3eaac9b97c
def main(): <NEW_LINE> <INDENT> ndata = 1e5 <NEW_LINE> nbins = 50 <NEW_LINE> data = np.random.random(ndata) <NEW_LINE> fig, axs = plt.subplots(1,3) <NEW_LINE> sns.set(context='talk') <NEW_LINE> axs[0].hist(data,bins=nbins) <NEW_LINE> axs[0].set_title('Random data') <NEW_LINE> axs[0].set_ylabel('Counts') <NEW_LINE> axs[1].hist( np.mean(data.reshape((ndata/2,2)), axis=1), bins=nbins) <NEW_LINE> axs[1].set_title(' Average over 2') <NEW_LINE> axs[2].hist( np.mean(data.reshape((ndata/10,10)),axis=1), bins=nbins) <NEW_LINE> axs[2].set_title(' Average over 10') <NEW_LINE> curDir = os.path.abspath(os.path.curdir) <NEW_LINE> outFile = 'CentralLimitTheorem.png' <NEW_LINE> plt.savefig(outFile, dpi=200) <NEW_LINE> print('Data written to {0}'.format(os.path.join(curDir, outFile))) <NEW_LINE> plt.show()
Demonstrate central limit theorem.
625941c055399d3f05588619
def setParameter(self, key, value): <NEW_LINE> <INDENT> self._parameters[key] = value
Sets a particular metadata parameter based on the provided key and value. Parameters ---------- key: str The name of the new bit of metadata. value: object The value of the new bit of metadata. Could be anything that is an acceptable value for a dictionary.
625941c032920d7e50b28134
def embeddedformset_factory(document, parent_document, form=EmbeddedDocumentForm, formset=EmbeddedDocumentFormSet, fields=None, exclude=None, extra=1, can_order=False, can_delete=True, max_num=None, formfield_callback=None): <NEW_LINE> <INDENT> kwargs = { 'form': form, 'formfield_callback': formfield_callback, 'formset': formset, 'extra': extra, 'can_delete': can_delete, 'can_order': can_order, 'fields': fields, 'exclude': exclude, 'max_num': max_num, } <NEW_LINE> FormSet = documentformset_factory(document, **kwargs) <NEW_LINE> FormSet.parent_document = parent_document <NEW_LINE> return FormSet
Returns an ``InlineFormSet`` for the given kwargs. You must provide ``fk_name`` if ``model`` has more than one ``ForeignKey`` to ``parent_model``.
625941c0e8904600ed9f1e90
def get_user(self, safe=True): <NEW_LINE> <INDENT> user = self.session.get("user") <NEW_LINE> if not user or not isinstance(user, dict) or not "id" in user: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> if len(user) == 1 and self.db: <NEW_LINE> <INDENT> user = self.db.auth_user(user["id"]) <NEW_LINE> if not user: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> if safe: <NEW_LINE> <INDENT> user = {f.name: user[f.name] for f in self.db.auth_user if f.readable} <NEW_LINE> <DEDENT> <DEDENT> return user
extracts the user form the session. returns {} if no user in the session. If session contains only a user['id'] retrives the other readable user info from auth_user
625941c0d58c6744b4257bc6
def hierarchy_pos(G, root, width=600., vert_gap = 175, vert_loc = 0, xcenter = 0.0, pos = None, parent = None): <NEW_LINE> <INDENT> if pos == None: <NEW_LINE> <INDENT> pos = {root:(xcenter,vert_loc)} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pos[root] = (xcenter, vert_loc) <NEW_LINE> <DEDENT> neighbors = list(G.neighbors(root)) <NEW_LINE> if len(neighbors)!=0: <NEW_LINE> <INDENT> dx = width/len(neighbors) <NEW_LINE> nextx = xcenter - width/2 - dx/2 <NEW_LINE> for neighbor in neighbors: <NEW_LINE> <INDENT> nextx += dx <NEW_LINE> pos = hierarchy_pos(G,neighbor, width = dx, vert_gap = vert_gap, vert_loc = vert_loc-vert_gap, xcenter=nextx, pos=pos, parent = root) <NEW_LINE> <DEDENT> <DEDENT> return pos
from: http://stackoverflow.com/questions/29586520/can-one-get-hierarchical-graphs-from-networkx-with-python-3 If there is a cycle that is reachable from root, then this will see infinite recursion. G: the graph root: the root node of current branch width: horizontal space allocated for this branch - avoids overlap with other branches vert_gap: gap between levels of hierarchy vert_loc: vertical location of root xcenter: horizontal location of root pos: a dict saying where all nodes go if they have been assigned parent: parent of this branch.
625941c05e10d32532c5ee8d
def __init__(self, fieldname, name='component.transfer.sort'): <NEW_LINE> <INDENT> super(sort, self).__init__(name ) <NEW_LINE> self._type = 'component.transfer.sort' <NEW_LINE> self.fieldname = fieldname
Required Parameters fieldname : Specifies the field name according to which sorting process will be done. Extra Parameters name : Name of the component.
625941c076d4e153a657ea96
def _decode_hook(self, hook): <NEW_LINE> <INDENT> if hook.HasField('modfun'): <NEW_LINE> <INDENT> return self._decode_modfun(hook.modfun) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {'name': bytes_to_str(hook.name)}
Decodes a protobuf commit hook message into a dict. Used in bucket properties. :param hook: the hook to decode :type hook: riak_pb.RpbCommitHook :rtype dict
625941c021bff66bcd6848bb
def get_driver(pci_handle): <NEW_LINE> <INDENT> driver_path = os.path.join(_PCI_DIR, _PCI_DRIVER).format(pci_handle) <NEW_LINE> if os.path.isdir(driver_path): <NEW_LINE> <INDENT> return os.path.basename(os.path.realpath(driver_path)) <NEW_LINE> <DEDENT> return None
Returns name of kernel driver assigned to given NIC :param pci_handle: PCI slot identifier with domain part. :returns: string with assigned kernel driver, None otherwise
625941c03d592f4c4ed1cfd9
def update_campaign_retrieve(self, campaign_id, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('asynchronous'): <NEW_LINE> <INDENT> return self.update_campaign_retrieve_with_http_info(campaign_id, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.update_campaign_retrieve_with_http_info(campaign_id, **kwargs) <NEW_LINE> return data
Get a campaign. # noqa: E501 Get an update campaign. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.update_campaign_retrieve(campaign_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str campaign_id: The campaign ID (required) :return: UpdateCampaign If the method is called asynchronously, returns the request thread.
625941c0d7e4931a7ee9de83
def generate_zero_bets(self): <NEW_LINE> <INDENT> oc = self.fact.make('00-0-1-2-3', 6) <NEW_LINE> for n in [37, 0, 1, 2, 3]: <NEW_LINE> <INDENT> self.wheel.add_outcome(n, oc)
Generates zero bet Outcomes
625941c0442bda511e8be381
def __iadd__(self, other): <NEW_LINE> <INDENT> if isinstance(other, Book): <NEW_LINE> <INDENT> names = other.sheet_names() <NEW_LINE> for name in names: <NEW_LINE> <INDENT> new_key = name <NEW_LINE> if len(names) == 1: <NEW_LINE> <INDENT> new_key = other.filename <NEW_LINE> <DEDENT> if new_key in self.name_array: <NEW_LINE> <INDENT> uid = uuid.uuid4().hex <NEW_LINE> new_key = "%s_%s" % (name, uid) <NEW_LINE> <DEDENT> self.sheets[new_key] = self.get_sheet(other[name].array, new_key) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(other, Sheet): <NEW_LINE> <INDENT> new_key = other.name <NEW_LINE> if new_key in self.name_array: <NEW_LINE> <INDENT> uid = uuid.uuid4().hex <NEW_LINE> new_key = "%s_%s" % (other.name, uid) <NEW_LINE> <DEDENT> self.sheets[new_key] = self.get_sheet(other.array, new_key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError <NEW_LINE> <DEDENT> self.name_array = list(self.sheets.keys()) <NEW_LINE> return self
Operator overloading += example:: book += book2 book += book2["Sheet1"]
625941c0d4950a0f3b08c2b7
def _set_positives(self, positives): <NEW_LINE> <INDENT> if isinstance(positives, Fluents): <NEW_LINE> <INDENT> self._positives = positives <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("L'argument pour le set positives n'est pas un Fluent, veuillez reesayer")
Positives's setter :param positives : the list we want to set for the positives
625941c029b78933be1e5616
def rotmatz(angle): <NEW_LINE> <INDENT> angle = np.radians(angle) <NEW_LINE> rotz = np.array([ [math.cos(angle), -math.sin(angle), 0.], [math.sin(angle), math.cos(angle), 0.], [0., 0., 1.] ], dtype=np.float) <NEW_LINE> return rotz
Definition to generate a rotation matrix along Z axis. Parameters ---------- angles : list Rotation angles in degrees. Returns ---------- rotz : ndarray Rotation matrix along Z axis.
625941c09f2886367277a7f5
def test_CH4_RGWP(self): <NEW_LINE> <INDENT> array_res = self._tested_inst.co2eq_yields_trajectory_computer( {'CO2':.0, 'N2O':.0, 'CH4':1.} )['as_array'] <NEW_LINE> array_ref = np.array( [[ self._tested_inst.GHGS_BASE_DATA['CH4']['GWP100'] ]] ) <NEW_LINE> np.testing.assert_equal( np.round(array_res,12), np.round(array_ref,12) )
Method which tests the CH4 recomputed relative GWP100
625941c0507cdc57c6306c3c
def media_seek(self, position): <NEW_LINE> <INDENT> self._chromecast.media_controller.seek(position)
Seek the media to a specific location.
625941c0796e427e537b052a
@app.route("/list_albums") <NEW_LINE> def list_albums(): <NEW_LINE> <INDENT> return jsonify(mpd_command('list_albums'))
List all albums.
625941c0cad5886f8bd26f40
def top(self): <NEW_LINE> <INDENT> if self.is_empty_stack(): <NEW_LINE> <INDENT> raise StackUnderflow("stack is empty") <NEW_LINE> <DEDENT> return self.head.element
查询栈顶元素
625941c0d58c6744b4257bc7
def is_task_file(task): <NEW_LINE> <INDENT> return os.path.isfile(get_task_filename(task))
Return whether or not the dataset ids corresponding to the specified task have already be downloaded
625941c0ad47b63b2c509ee6