code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def count_items(column_list): <NEW_LINE> <INDENT> dictionary = {} <NEW_LINE> for data in column_list: <NEW_LINE> <INDENT> if not dictionary.get(data): <NEW_LINE> <INDENT> dictionary[data] = 0 <NEW_LINE> <DEDENT> dictionary[data] += 1 <NEW_LINE> <DEDENT> return list(dictionary.keys()), list(dictionary.values())
Função para contar a quantidade itens em uma lista de dados. Argumentos: data_list: Lista de dados. Retorna: Uma tupla contendo os types dos items e quantidades dos itens respectivamente.
625941c3627d3e7fe0d68e06
def test_as_extension(self) -> None: <NEW_LINE> <INDENT> for config in self.test_values.values(): <NEW_LINE> <INDENT> if config["extension_type"] is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> ext = self.ext(config["expected"]) <NEW_LINE> cg_ext = x509.extensions.Extension( oid=self.ext_class.oid, critical=self.ext_class.default_critical, value=config["extension_type"], ) <NEW_LINE> self.assertEqual(ext.as_extension(), cg_ext) <NEW_LINE> for critical in self.critical_values: <NEW_LINE> <INDENT> ext = self.ext(config["expected"], critical=critical) <NEW_LINE> self.assertEqual( ext.as_extension(), x509.extensions.Extension( oid=self.ext_class.oid, critical=critical, value=config["extension_type"] ), )
Test the as_extension property.
625941c30a50d4780f666e48
def test_output_merge_by_attribute_inner(self): <NEW_LINE> <INDENT> domainA, domainB = self.dataA.domain, self.dataB.domain <NEW_LINE> result_d = Domain(domainA.attributes + domainB.attributes[1:], domainA.class_vars + domainB.class_vars, domainA.metas + domainB.metas) <NEW_LINE> result_X = np.array([[0, 0, 0], [1, 1, 1], [2, 0, np.nan]]) <NEW_LINE> result_Y = np.array([[0, np.nan], [1, 1], [2, 0]]) <NEW_LINE> result_M = np.array([[0.0, "m1", np.nan], [1.0, "m2", 1.0], [np.nan, "m3", 0.0]]).astype(object) <NEW_LINE> result = Table(result_d, result_X, result_Y, result_M) <NEW_LINE> self.send_signal(self.widget.Inputs.data, self.dataA) <NEW_LINE> self.send_signal(self.widget.Inputs.extra_data, self.dataB) <NEW_LINE> self.widget.attr_boxes.set_state([(domainA[0], domainB[0])]) <NEW_LINE> self.widget.controls.merging.buttons[self.widget.InnerJoin].click() <NEW_LINE> self.assertTablesEqual(self.get_output(self.widget.Outputs.data), result)
Check output for merging option 'Find matching rows' by attribute
625941c3d164cc6175782d05
def set_interfaces_ethernet_eth0_bridgegroup_priority(self, *options, **def_args ): <NEW_LINE> <INDENT> arguments= '' <NEW_LINE> for option in options: <NEW_LINE> <INDENT> arguments = arguments + option +' ' <NEW_LINE> <DEDENT> prompt = def_args.setdefault('prompt',self.prompt) <NEW_LINE> timeout = def_args.setdefault('timeout',self.timeout) <NEW_LINE> self.execute( cmd= "set interfaces ethernet eth0 bridge-group priority "+ arguments, prompt = prompt, timeout = timeout ) <NEW_LINE> return main.TRUE
Possible Options :[]
625941c3046cf37aa974cd01
def find_by_email(addr, users): <NEW_LINE> <INDENT> return find_by_tuple_index(1, addr, users)
Retrieve details for user by their email address. Returns the user tuple, or None if the user could not be found.
625941c310dbd63aa1bd2b5b
def get( self, resource_group_name, network_watcher_name, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}' <NEW_LINE> path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') <NEW_LINE> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> request = self._client.get(url, query_parameters) <NEW_LINE> response = self._client.send(request, header_parameters, stream=False, **operation_config) <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> exp = CloudError(response) <NEW_LINE> exp.request_id = response.headers.get('x-ms-request-id') <NEW_LINE> raise exp <NEW_LINE> <DEDENT> deserialized = None <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> deserialized = self._deserialize('NetworkWatcher', response) <NEW_LINE> <DEDENT> if raw: <NEW_LINE> <INDENT> client_raw_response = ClientRawResponse(deserialized, response) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized
Gets the specified network watcher by resource group. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param network_watcher_name: The name of the network watcher. :type network_watcher_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: NetworkWatcher or ClientRawResponse if raw=true :rtype: ~azure.mgmt.network.v2017_08_01.models.NetworkWatcher or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
625941c30c0af96317bb819f
def start_instance(instanceid, hibernate = False): <NEW_LINE> <INDENT> response = ec2.start_instances( InstanceIds = [instanceid] ) <NEW_LINE> logging.info(f"Got response from EC2 api {response}")
Start and EC2 instance
625941c35fc7496912cc3935
def start_opcua_server(self): <NEW_LINE> <INDENT> logging.log(logging.INFO, "Weather station has started") <NEW_LINE> self.server.start() <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.update_prediction() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> time.sleep(self.frequency_of_fetching)
starts server and consecutive updates in case of any client errors creates a delay, leaving previous values as are with specified time
625941c36fece00bbac2d6f5
def slider_wav_onpress(self): <NEW_LINE> <INDENT> self.wav_slider_moving = True
Callback when the GUI wave player slider is pressed.
625941c31f037a2d8b9461b6
def watch_endpoints_list(self, **kwargs): <NEW_LINE> <INDENT> all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds'] <NEW_LINE> all_params.append('callback') <NEW_LINE> params = locals() <NEW_LINE> for key, val in iteritems(params['kwargs']): <NEW_LINE> <INDENT> if key not in all_params: <NEW_LINE> <INDENT> raise TypeError( "Got an unexpected keyword argument '%s'" " to method watch_endpoints_list" % key ) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> resource_path = '/api/v1/watch/endpoints'.replace('{format}', 'json') <NEW_LINE> path_params = {} <NEW_LINE> query_params = {} <NEW_LINE> if 'pretty' in params: <NEW_LINE> <INDENT> query_params['pretty'] = params['pretty'] <NEW_LINE> <DEDENT> if 'label_selector' in params: <NEW_LINE> <INDENT> query_params['labelSelector'] = params['label_selector'] <NEW_LINE> <DEDENT> if 'field_selector' in params: <NEW_LINE> <INDENT> query_params['fieldSelector'] = params['field_selector'] <NEW_LINE> <DEDENT> if 'watch' in params: <NEW_LINE> <INDENT> query_params['watch'] = params['watch'] <NEW_LINE> <DEDENT> if 'resource_version' in params: <NEW_LINE> <INDENT> query_params['resourceVersion'] = params['resource_version'] <NEW_LINE> <DEDENT> if 'timeout_seconds' in params: <NEW_LINE> <INDENT> query_params['timeoutSeconds'] = params['timeout_seconds'] <NEW_LINE> <DEDENT> header_params = {} <NEW_LINE> form_params = [] <NEW_LINE> local_var_files = {} <NEW_LINE> body_params = None <NEW_LINE> header_params['Accept'] = self.api_client. select_header_accept(['application/json']) <NEW_LINE> if not header_params['Accept']: <NEW_LINE> <INDENT> del header_params['Accept'] <NEW_LINE> <DEDENT> header_params['Content-Type'] = self.api_client. select_header_content_type(['*/*']) <NEW_LINE> auth_settings = [] <NEW_LINE> response = self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='JsonWatchEvent', auth_settings=auth_settings, callback=params.get('callback')) <NEW_LINE> return response
watch individual changes to a list of Endpoints This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.watch_endpoints_list(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str pretty: If 'true', then the output is pretty printed. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. :param int timeout_seconds: Timeout for the list/watch call. :return: JsonWatchEvent If the method is called asynchronously, returns the request thread.
625941c3377c676e91272161
def process_queue(queue, spam_substrings, file): <NEW_LINE> <INDENT> messages = [(queue[0].user_id, [])] <NEW_LINE> seen_users = set() <NEW_LINE> for m in queue: <NEW_LINE> <INDENT> if m.user_id == messages[-1][0]: <NEW_LINE> <INDENT> messages[-1] = (m.user_id, messages[-1][1] + [m.text]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> messages.append((m.user_id, [m.text])) <NEW_LINE> <DEDENT> seen_users.add(m.user_id) <NEW_LINE> <DEDENT> if len(messages) < 2: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> distinct_messages = set(','.join(lines) for _, lines in messages) <NEW_LINE> if len(distinct_messages) * 2 < len(messages): <NEW_LINE> <INDENT> print(f'Spam skipped: {messages}') <NEW_LINE> return <NEW_LINE> <DEDENT> for m in distinct_messages: <NEW_LINE> <INDENT> for s in spam_substrings: <NEW_LINE> <INDENT> if s in m: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if ''.join(distinct_messages).count('/') > 2: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> seen_users = list(seen_users) <NEW_LINE> reindexed_msgs = [ ( seen_users.index(u) if u != queue[-1].user_id else 'ME', '\n'.join(text) ) for u, text in messages ] <NEW_LINE> obj = dict( context=reindexed_msgs[:-1], reply=reindexed_msgs[-1][1], reply_id=queue[-1].id, timestamp=queue[-1].timestamp.isoformat()[:19], ) <NEW_LINE> file.write(json.dumps(obj)) <NEW_LINE> file.write('\n')
Adds the latest conversation atom in the queue to the file. The queue is expected to be ordered, from the same chat group and without missing messages from the same user which came later in the log. This function evaluates whether the last messages in the queue are a valid conversation atom, and if so write in in a single JSON line to the file.
625941c391af0d3eaac9b9ce
def putexhaust(idf, zone, exhaust_fan): <NEW_LINE> <INDENT> if hasexhaust(idf, zone): <NEW_LINE> <INDENT> raise HasExhaustFanError <NEW_LINE> <DEDENT> exhaust_fan.Air_Inlet_Node_Name = f"{exhaust_fan.Name} Node" <NEW_LINE> exhaust_fan.Air_Outlet_Node_Name = f"{exhaust_fan.Name} Outlet Node" <NEW_LINE> nodelist = idf.newidfobject( "NodeList", Name=f"{exhaust_fan.Name} Node List", Node_1_Name=f"{exhaust_fan.Name} Node", ) <NEW_LINE> econns = findequipmentconnection(idf, zone) <NEW_LINE> econns.Zone_Air_Exhaust_Node_or_NodeList_Name = f"{exhaust_fan.Name} Node List" <NEW_LINE> eqlist = findequipmentlist(idf, zone) <NEW_LINE> extlist = extfields.extensiblefields2list(eqlist) <NEW_LINE> cooling_sequence_max = max([item[2] for item in extlist]) <NEW_LINE> heating_sequence_max = max([item[3] for item in extlist]) <NEW_LINE> exfanitem = [ "Fan:ZoneExhaust", exhaust_fan.Name, cooling_sequence_max + 1, heating_sequence_max + 1, ] <NEW_LINE> extlist.append(exfanitem) <NEW_LINE> extfields.list2extensiblefields(eqlist, extlist) <NEW_LINE> return exhaust_fan
plug the exhaust_fan into the zone Will raise HasExhaustFanError exception, the zone has and exhaust fan Usage:: greatexhaust = idf.newidfobject(FAN:ZONEEXHAUST, Name='Great Exhaust', ) # with other fields zones = idf.idfobjects['zone'] zone = zones[2] # the 3rd zone putexhaust(idf, zone, greatexhaust) Parameters ---------- idf: eppy.modeleditor.IDF this idf model zone : eppy.bunch_subclass.EpBunch the zone you are checking for exhaust (type ZONE) exhaust_fan: eppy.bunch_subclass.EpBunch type FAN:ZONEEXHAUST. This should be made or copied to this idf Returns ------- exhaust_fan: eppy.bunch_subclass.EpBunch FAN:ZONEEXHAUST object for this zone
625941c344b2445a3393204e
def s3_pr_presence_onvalidation(form): <NEW_LINE> <INDENT> table = db.pr_presence <NEW_LINE> location = form.vars.location_id <NEW_LINE> shelter = form.vars.shelter_id <NEW_LINE> if location or shelter: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> condition = form.vars.presence_condition <NEW_LINE> if condition: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> condition = int(condition) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> condition = None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> condition = table.presence_condition.default <NEW_LINE> form.vars.condition = condition <NEW_LINE> <DEDENT> if condition: <NEW_LINE> <INDENT> if condition in vita.PERSISTANT_PRESENCE or condition in vita.ABSENCE: <NEW_LINE> <INDENT> if not form.vars.id: <NEW_LINE> <INDENT> if table.location_id.default or table.shelter_id.default: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> record = db(table.id == form.vars.id).select(table.location_id, table.shelter_id, limitby=(0, 1)).first() <NEW_LINE> if record and record.location_id or record.shelter_id: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> form.errors.location_id = form.errors.shelter_id = T("Either a shelter or a location must be specified") <NEW_LINE> return
Presence record validation
625941c3c4546d3d9de729ea
def process_file(input_file_name, output_file_name, delay_in_ms, line_search, line_replace): <NEW_LINE> <INDENT> input_file = open(input_file_name) <NEW_LINE> output_file = open(output_file_name, 'w') <NEW_LINE> for line in input_file: <NEW_LINE> <INDENT> new_line = line.replace(line_search, line_replace) <NEW_LINE> output_file.write(new_line) <NEW_LINE> sleep(delay_in_ms / 1000.0) <NEW_LINE> <DEDENT> output_file.close()
Cat (echo) one file in a separated thread at the rate you specify.
625941c35166f23b2e1a5111
@tf_export('math.acos', 'acos') <NEW_LINE> @deprecated_endpoints('acos') <NEW_LINE> def acos(x, name=None): <NEW_LINE> <INDENT> _ctx = _context._context <NEW_LINE> if _ctx is None or not _ctx._eager_context.is_eager: <NEW_LINE> <INDENT> _, _, _op = _op_def_lib._apply_op_helper( "Acos", x=x, name=name) <NEW_LINE> _result = _op.outputs[:] <NEW_LINE> _inputs_flat = _op.inputs <NEW_LINE> _attrs = ("T", _op.get_attr("T")) <NEW_LINE> _execute.record_gradient( "Acos", _inputs_flat, _attrs, _result, name) <NEW_LINE> _result, = _result <NEW_LINE> return _result <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> _result = _pywrap_tensorflow.TFE_Py_FastPathExecute( _ctx._context_handle, _ctx._eager_context.device_name, "Acos", name, _ctx._post_execution_callbacks, x) <NEW_LINE> return _result <NEW_LINE> <DEDENT> except _core._FallbackException: <NEW_LINE> <INDENT> return acos_eager_fallback( x, name=name, ctx=_ctx) <NEW_LINE> <DEDENT> except _core._NotOkStatusException as e: <NEW_LINE> <INDENT> if name is not None: <NEW_LINE> <INDENT> message = e.message + " name: " + name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message = e.message <NEW_LINE> <DEDENT> _six.raise_from(_core._status_to_exception(e.code, message), None)
Computes acos of x element-wise. Args: x: A `Tensor`. Must be one of the following types: `bfloat16`, `half`, `float32`, `float64`, `int32`, `int64`, `complex64`, `complex128`. name: A name for the operation (optional). Returns: A `Tensor`. Has the same type as `x`.
625941c33617ad0b5ed67eb0
def main(args): <NEW_LINE> <INDENT> print ("This is Tic-Tak-Toe player.") <NEW_LINE> if restoreall(): <NEW_LINE> <INDENT> return playall() <NEW_LINE> <DEDENT> return 1
main dispatcher
625941c363f4b57ef00010d5
def isSameTree(self, p, q): <NEW_LINE> <INDENT> if not p or not q: <NEW_LINE> <INDENT> return p is q <NEW_LINE> <DEDENT> return p.val == q.val and self.isSameTree(p.left, q.left) and self.isSameTree(p.right, q.right)
:type p: TreeNode :type q: TreeNode :rtype: bool
625941c366673b3332b92048
def check_differences( self ): <NEW_LINE> <INDENT> v = self.values_ <NEW_LINE> groups = { 'ldap' : 3 if v[ 'ldap' ] == DiffItem.Unknown else 0 , } <NEW_LINE> from aolpsync.utils import multivalued_check_equals as mce <NEW_LINE> if v[ 'db' ] == DiffItem.Unknown: <NEW_LINE> <INDENT> groups[ 'db' ] = 3 <NEW_LINE> <DEDENT> elif mce( v[ 'ldap' ] , v[ 'db' ] ): <NEW_LINE> <INDENT> groups[ 'db' ] = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> groups[ 'db' ] = 1 <NEW_LINE> <DEDENT> if v[ 'bss' ] == DiffItem.Unknown: <NEW_LINE> <INDENT> groups[ 'bss' ] = 3 <NEW_LINE> <DEDENT> elif mce( v[ 'bss' ] , v[ 'ldap' ] ): <NEW_LINE> <INDENT> groups[ 'bss' ] = groups[ 'ldap' ] <NEW_LINE> <DEDENT> elif mce( v[ 'bss' ] , v[ 'db' ] ): <NEW_LINE> <INDENT> groups[ 'bss' ] = groups[ 'db' ] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> groups[ 'bss' ] = 2 <NEW_LINE> <DEDENT> return ( bool( set( groups.values( ) ) - set(( 0 , 3 )) ) , groups )
Vérifie si des différences existent et assigne chaque donnée à un groupe en fonction de sa valeur. Les données 'inconnues' (i.e. celles qui n'existent pas pour cette source de données) seront ignorées lors de la comparaison. :return: un tuple contenant un booléen qui indique si des modifications ont été trouvées, et un dictionnaire associant à chaque source de données un groupe (sous la forme d'un entier entre 0 et 3 - cette dernière valeur indiquant un champ inconnu).
625941c3a79ad161976cc0fd
def find_vars_0(filepath, paths): <NEW_LINE> <INDENT> filepath = jinja2_cli.render.template_path(filepath, paths) <NEW_LINE> def find_undecls_0(fpath, paths=paths): <NEW_LINE> <INDENT> ast_ = get_ast(fpath, paths) <NEW_LINE> if ast_: <NEW_LINE> <INDENT> return [find_attrs(ast_, v) for v in jinja2.meta.find_undeclared_variables(ast_)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> <DEDENT> return [(f, find_undecls_0(f)) for f in find_templates(filepath, paths)]
Find and return variables in given template. see also: http://jinja.pocoo.org/docs/api/#the-meta-api :param filepath: (Base) filepath of template file :param paths: Template search paths :return: [(template_abs_path, [var])]
625941c3adb09d7d5db6c748
def process_image(image): <NEW_LINE> <INDENT> img_loader = transforms.Compose([transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]) <NEW_LINE> pil_img = Image.open(image) <NEW_LINE> pil_img = img_loader(pil_img).float() <NEW_LINE> np_image = np.array(pil_img) <NEW_LINE> return np_image
Scales, crops, and normalizes a PIL image for a PyTorch model, returns an Numpy array
625941c330c21e258bdfa453
def semestri(G): <NEW_LINE> <INDENT> n = len(G) <NEW_LINE> assert n > 0 <NEW_LINE> s = [1] * n <NEW_LINE> for u in toporder(G): <NEW_LINE> <INDENT> for v in G[u]: <NEW_LINE> <INDENT> if s[v] <= s[u]: <NEW_LINE> <INDENT> s[v] = s[u] + 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return (max(s), s)
Vrne najmanjše število semestrov, potrebnih za dokončanje študija, in seznam s prvim semestrom, v katerem lahko opravljamo določen predmet. Časovna zahtevnost: O(m)
625941c321bff66bcd68490c
def _update_cluster_status(self): <NEW_LINE> <INDENT> params = {} <NEW_LINE> results = self._issue_api_request('GetClusterCapacity', params) <NEW_LINE> results = results['result']['clusterCapacity'] <NEW_LINE> free_capacity = ( results['maxProvisionedSpace'] - results['usedSpace']) <NEW_LINE> data = {} <NEW_LINE> backend_name = self.configuration.safe_get('volume_backend_name') <NEW_LINE> data["volume_backend_name"] = backend_name or self.__class__.__name__ <NEW_LINE> data["vendor_name"] = 'SolidFire Inc' <NEW_LINE> data["driver_version"] = self.VERSION <NEW_LINE> data["storage_protocol"] = 'iSCSI' <NEW_LINE> data['consistencygroup_support'] = True <NEW_LINE> data['replication_enabled'] = self.replication_enabled <NEW_LINE> if self.replication_enabled: <NEW_LINE> <INDENT> data['replication'] = 'enabled' <NEW_LINE> <DEDENT> data['active_cluster_mvip'] = self.active_cluster_info['mvip'] <NEW_LINE> data['total_capacity_gb'] = ( float(results['maxProvisionedSpace'] / units.Gi)) <NEW_LINE> data['free_capacity_gb'] = float(free_capacity / units.Gi) <NEW_LINE> data['reserved_percentage'] = self.configuration.reserved_percentage <NEW_LINE> data['QoS_support'] = True <NEW_LINE> data['compression_percent'] = ( results['compressionPercent']) <NEW_LINE> data['deduplicaton_percent'] = ( results['deDuplicationPercent']) <NEW_LINE> data['thin_provision_percent'] = ( results['thinProvisioningPercent']) <NEW_LINE> self.cluster_stats = data
Retrieve status info for the Cluster.
625941c35fcc89381b1e1675
def twoSum(self, numbers, target): <NEW_LINE> <INDENT> a = 0 <NEW_LINE> b = len(numbers) - 1 <NEW_LINE> c = True <NEW_LINE> while c: <NEW_LINE> <INDENT> if numbers[a] + numbers[b] > target: <NEW_LINE> <INDENT> b -= 1 <NEW_LINE> <DEDENT> elif numbers[a] + numbers[b] < target: <NEW_LINE> <INDENT> a += 1 <NEW_LINE> <DEDENT> elif numbers[a] + numbers[b] == target: <NEW_LINE> <INDENT> return [a+1,b+1]
:type numbers: List[int] :type target: int :rtype: List[int]
625941c363d6d428bbe444a7
def show_results_in_labels(tab_no): <NEW_LINE> <INDENT> for i in range(15): <NEW_LINE> <INDENT> entry_parsed_data[tab_no][i].delete(0, 'end') <NEW_LINE> entry_parsed_data[tab_no][i].insert(0, ms_variables_values[tab_no][i])
updating text in GUI
625941c3d99f1b3c44c67549
def path(p): <NEW_LINE> <INDENT> return os.path.normpath(os.path.join( os.environ.get('HOST_PREFIX', '/host'), './{}'.format(p)))
Build the corresponding path `p` inside the container.
625941c392d797404e304141
def getEnum(self, name): <NEW_LINE> <INDENT> return self.getType(name, Type.ENUM)
Gets the enum object with given name.
625941c399fddb7c1c9de34a
def analyze_results(self, results): <NEW_LINE> <INDENT> first_fail = set() <NEW_LINE> pre_fail = set() <NEW_LINE> post_fail = set() <NEW_LINE> always_pre_fail = set() <NEW_LINE> always_post_fail = set() <NEW_LINE> before_fail = set() <NEW_LINE> print("Analyzing {f} failed and {p} passed test runs...".format( f=len(results['failed']), p=len(results['passed']) )) <NEW_LINE> for test_run in results['failed']: <NEW_LINE> <INDENT> failed = False <NEW_LINE> tmp_first_fail = None <NEW_LINE> tmp_pre_fail = [] <NEW_LINE> tmp_post_fail = [] <NEW_LINE> tmp_before_fail = [] <NEW_LINE> last_test = '' <NEW_LINE> for testcase in test_run: <NEW_LINE> <INDENT> if testcase.result == 'success': <NEW_LINE> <INDENT> if failed: <NEW_LINE> <INDENT> tmp_post_fail.append(testcase.methodname) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tmp_pre_fail.append(testcase.methodname) <NEW_LINE> <DEDENT> last_test = testcase.methodname <NEW_LINE> continue <NEW_LINE> <DEDENT> if failed: <NEW_LINE> <INDENT> last_test = testcase.methodname <NEW_LINE> continue <NEW_LINE> <DEDENT> tmp_first_fail = testcase.methodname <NEW_LINE> tmp_before_fail = last_test <NEW_LINE> last_test = testcase.methodname <NEW_LINE> failed = True <NEW_LINE> <DEDENT> first_fail.add(tmp_first_fail) <NEW_LINE> before_fail.add(tmp_before_fail) <NEW_LINE> always_pre_fail = set(always_pre_fail).intersection(tmp_pre_fail) <NEW_LINE> always_post_fail = set(always_post_fail).intersection(tmp_post_fail) <NEW_LINE> pre_fail.update(tmp_pre_fail) <NEW_LINE> post_fail.update(tmp_post_fail) <NEW_LINE> <DEDENT> print("\n\nAnalysis of {l} failed test runs:".format(l=len(results['failed']))) <NEW_LINE> print("Tests that failed first:") <NEW_LINE> for x in first_fail: <NEW_LINE> <INDENT> print("\t" + x) <NEW_LINE> <DEDENT> print("") <NEW_LINE> print("Tests immediately before failure:") <NEW_LINE> for x in before_fail: <NEW_LINE> <INDENT> print("\t" + x) <NEW_LINE> <DEDENT> print("") <NEW_LINE> print("Tests that **always** succeeded before first failure:") <NEW_LINE> for x in always_pre_fail: <NEW_LINE> <INDENT> print("\t" + x) <NEW_LINE> <DEDENT> print("") <NEW_LINE> print("All tests that succeeded before first failure:") <NEW_LINE> for x in pre_fail: <NEW_LINE> <INDENT> print("\t" + x) <NEW_LINE> <DEDENT> print("") <NEW_LINE> print("Tests that **always** succeeded after first failure:") <NEW_LINE> for x in always_post_fail: <NEW_LINE> <INDENT> print("\t" + x) <NEW_LINE> <DEDENT> print("") <NEW_LINE> print("All tests that succeeded after first failure:") <NEW_LINE> for x in post_fail: <NEW_LINE> <INDENT> print("\t" + x)
very simple results analysis
625941c3be8e80087fb20bfd
def download_chunk(args): <NEW_LINE> <INDENT> global counter <NEW_LINE> x, y, latest = args <NEW_LINE> url_format = "http://himawari8.nict.go.jp/img/D531106/{}d/{}/{}_{}_{}.png" <NEW_LINE> url = url_format.format(level, width, strftime("%Y/%m/%d/%H%M%S", latest), x, y) <NEW_LINE> tile_w = urllib2.urlopen(url) <NEW_LINE> tiledata = tile_w.read() <NEW_LINE> with counter.get_lock(): <NEW_LINE> <INDENT> counter.value += 1 <NEW_LINE> print("\rDownloading tiles: %s/%s completed" % (counter.value, level * level)) <NEW_LINE> <DEDENT> return x, y, tiledata
download a picture section from website x - coordinate x y - coordinate y latest - requested time
625941c35fc7496912cc3936
def longestPalindromeSubseq(self, s): <NEW_LINE> <INDENT> X = s <NEW_LINE> Y = s[::-1] <NEW_LINE> m = len(X) <NEW_LINE> n = len(Y) <NEW_LINE> arr = [] <NEW_LINE> for i in range(m+1): <NEW_LINE> <INDENT> arr.append([0]*(n+1)) <NEW_LINE> <DEDENT> for i in range(m+1): <NEW_LINE> <INDENT> for j in range(n+1): <NEW_LINE> <INDENT> if i == 0 or j == 0 : <NEW_LINE> <INDENT> arr[i][j] = 0 <NEW_LINE> <DEDENT> elif X[i-1] == Y[j-1]: <NEW_LINE> <INDENT> arr[i][j] = 1 + arr[i-1][j-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> arr[i][j] = max(arr[i-1][j], arr[i][j-1]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return arr[m][n]
:type s: str :rtype: int
625941c3462c4b4f79d1d688
def convert_image(input_path, output_path, o_ext=None): <NEW_LINE> <INDENT> supported_exts = ('png', 'jpeg', 'webp', 'bmp') <NEW_LINE> if not o_ext: <NEW_LINE> <INDENT> _, ext = splitext(output_path) <NEW_LINE> o_ext = ext[1:].lower().replace('jpg', 'jpeg') <NEW_LINE> <DEDENT> if o_ext not in supported_exts: <NEW_LINE> <INDENT> raise UnsupportedExtension(f"Can't convert image to {o_ext}") <NEW_LINE> <DEDENT> im = Image.open(input_path).convert("RGB") <NEW_LINE> im.save(output_path, o_ext)
Use PIL.Image to convert image format. The output extension will be determined from output_path. You can also specify it by using o_ext. Supported extensions: png, jpeg, webp Note: Every image will be convert to RGB, which mean the output image will not have alpha value. Args: input_path (str): [description] output_path (str): [description] o_ext (str, optional): output extension, If you don't specific it, the return image will be JPG by default. Defaults to None. Raises: UnsupportedExtension: [description]
625941c330c21e258bdfa454
def get_number_rows(ai_settings,ship_height,alien_height): <NEW_LINE> <INDENT> available_space_y=(ai_settings.screen_height-(2*alien_height)-ship_height) <NEW_LINE> number_rows=int(available_space_y/(3*alien_height)) <NEW_LINE> return number_rows
计算屏幕可容纳多少行外星人
625941c3fbf16365ca6f6178
def _copy_file_time(source_file, destination_file): <NEW_LINE> <INDENT> file1, file2 = source_file, destination_file <NEW_LINE> try: <NEW_LINE> <INDENT> stat1 = os.stat(file1) <NEW_LINE> <DEDENT> except os.error: <NEW_LINE> <INDENT> sys.stderr.write(file1 + ' : cannot stat\n') <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> os.utime(file2, (stat1[ST_ATIME], stat1[ST_MTIME])) <NEW_LINE> <DEDENT> except os.error: <NEW_LINE> <INDENT> sys.stderr.write(file2 + ' : cannot change time\n') <NEW_LINE> sys.exit(2)
Copy one file's atime and mtime to another. :param source_file: Source file. :param destination_file: Destination file.
625941c37b25080760e39412
def mfh_entry(self): <NEW_LINE> <INDENT> if self.address > 0xffffffff or self.address < (0xffffffff - FLASH_SIZE): <NEW_LINE> <INDENT> print('ERROR: Address {:#x} not valid for {}'.format(self.address, self.name), file=sys.stderr) <NEW_LINE> die('Valid range is {:#x} - 0xffffffff'.format(0xffffffff - FLASH_SIZE)) <NEW_LINE> <DEDENT> mfh_type = MFH_ITEM_TYPE[self.stype.replace('mfh.', '')] <NEW_LINE> return struct.pack(MFH_ENTRY_STRUCT, mfh_type, self.address, self.size, 0xf3f3f3f3)
Struct to use in the MFH when it is being generated
625941c33eb6a72ae02ec490
def spawn_worker_threads(self): <NEW_LINE> <INDENT> if self.coordinator.settings['debug']: <NEW_LINE> <INDENT> print("emailsendhandler.spawn_worker_threads: creating work pool") <NEW_LINE> <DEDENT> for i in range(len(self.worker_amounts)): <NEW_LINE> <INDENT> worker = EmailSender(self, i) <NEW_LINE> worker.name = "Thread #" + str(i) <NEW_LINE> self.workers.append(worker)
Create the required number of worker threads.
625941c363b5f9789fde709d
def getFlightCmds(): <NEW_LINE> <INDENT> flight_cmds = RoboCaller().call("getFlightCmds", "int") <NEW_LINE> for i in range(len(flight_cmds)): <NEW_LINE> <INDENT> flight_cmds[i] = ((flight_cmds[i] + 2**15) % 2**16 - 2**15) <NEW_LINE> <DEDENT> return flight_cmds
Get intermediate values from calculateFlightErrors. return: list
625941c357b8e32f52483451
def setErrorStatus( self, newStatusText ) -> None: <NEW_LINE> <INDENT> fnPrint( debuggingThisModule, "setErrorStatus( {!r} )".format( newStatusText ) ) <NEW_LINE> self.setStatus( newStatusText ) <NEW_LINE> Style().configure( 'MainStatusBar.TLabel', foreground='yellow', background='red' ) <NEW_LINE> self.update()
Set the status bar text and change the cursor to the wait/hourglass cursor.
625941c38a349b6b435e812b
def test_get_Image_comments(self): <NEW_LINE> <INDENT> self.jorim = User(username="hulk") <NEW_LINE> self.jorim.save() <NEW_LINE> self.jorim = User(username="thanos") <NEW_LINE> self.jorim.save() <NEW_LINE> self.test_profile = Profile(user=self.jorim, bio="Another Profile") <NEW_LINE> self.test_Image = Image(user=self.jorim, caption="Another Profile") <NEW_LINE> self.test_comment = Comment( Image=self.test_Image, comment_content="Wow") <NEW_LINE> gotten_comments = Comment.get_Image_comments(self.test_Image.id) <NEW_LINE> comments = Comment.objects.all() <NEW_LINE> self.assertTrue(len(gotten_comments) == len(comments))
Test case to check if get Image comments is getting comments for a specific Image
625941c3d164cc6175782d06
def test_create_token_no_user(self): <NEW_LINE> <INDENT> payload = {"email": "test@test.com", "password": "testpass"} <NEW_LINE> res = self.client.post(TOKEN_URL, payload) <NEW_LINE> self.assertNotIn("token", res.data) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
Test that token
625941c3293b9510aa2c3250
def __init__(self, name, value, type_name): <NEW_LINE> <INDENT> super(StaticVariableDeclarationNode, self).__init__(name, value) <NEW_LINE> self._type_name = type_name
Inicializa la clase C{StaticVariableDeclarationNode}. Para obtener información acerca de los parámetros recibidos por este método consulte la documentación del método C{__init__} en la clase C{BinaryOperatorNode}. @type type_name: C{str} @param type_name: Nombre del tipo que se expresa explícitamente para esta variable
625941c331939e2706e4ce24
def emitComment(self, comment): <NEW_LINE> <INDENT> if comment is None or comment == "": <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "/* " + comment + "*/"
Emit a comment
625941c3bd1bec0571d905e7
def convert_examples_to_features(examples, label_list, max_seq_length, tokenizer,trunc_medium=-2): <NEW_LINE> <INDENT> features = [] <NEW_LINE> for (ex_index, example) in enumerate(examples): <NEW_LINE> <INDENT> if ex_index % 10000 == 0: <NEW_LINE> <INDENT> tf.logging.info("Writing example %d of %d" % (ex_index, len(examples))) <NEW_LINE> <DEDENT> feature = convert_single_example(ex_index, example, label_list, max_seq_length, tokenizer,trunc_medium) <NEW_LINE> features.append(feature) <NEW_LINE> <DEDENT> return features
Convert a set of `InputExample`s to a list of `InputFeatures`.
625941c3a219f33f34628924
def _getRecord(self, maxdate=None): <NEW_LINE> <INDENT> if not maxdate: <NEW_LINE> <INDENT> maxdate = self.maxdate <NEW_LINE> <DEDENT> print('record called') <NEW_LINE> record = self.EZ.read( self.EZ.esearch( db='pubmed', term='tcga', retmax=2000, mindate='2000/01/01', maxdate=maxdate.strftime('%Y/%m/%d'), usehistory='y' ) ) <NEW_LINE> self.recordObj = record
Call the initial pubmed search and set up the search history. :param maxdate: :return: set the pubmed records object
625941c307d97122c4178840
def get_name (self): <NEW_LINE> <INDENT> raise NotImplementedError
Return the descriptive name of the item
625941c3f548e778e58cd535
def intercept2(data, b): <NEW_LINE> <INDENT> n = len(data) <NEW_LINE> y = reduce(lambda a, b: a + b, [d[1] for d in data]) <NEW_LINE> xx = reduce(lambda a, b: a + b, [d[0]**2 for d in data]) <NEW_LINE> return (y - b * xx) / n
intercept a = (ΣY - b(ΣP)) / N >>> data = [(60, 3.1), (61, 3.6), (62, 3.8), (63, 4), (65, 4.1)]
625941c38c0ade5d55d3e972
def test_upload_file_public(self): <NEW_LINE> <INDENT> upload('test.py', test_bucket, acl='public-read', s3_dest='foo/bar.txt') <NEW_LINE> s3_url = 'https://s3.amazonaws.com/{}/foo/bar.txt'.format(test_bucket) <NEW_LINE> r = requests.get(s3_url) <NEW_LINE> self.assertEquals(r.status_code, 200)
Test that objects uploaded with a public ACL are accessible to the public.
625941c31f5feb6acb0c4b0b
def test_wrong_node_type_single(self): <NEW_LINE> <INDENT> asset_md = self.course_assets[0] <NEW_LINE> root = etree.Element("assets") <NEW_LINE> asset = etree.SubElement(root, "smashset") <NEW_LINE> with pytest.raises(ContractNotRespected): <NEW_LINE> <INDENT> asset_md.to_xml(asset)
Ensure single asset blocks with the wrong tag are detected.
625941c33539df3088e2e303
def open(self): <NEW_LINE> <INDENT> super(Keithley622x, self).open() <NEW_LINE> if (self._kiss488): <NEW_LINE> <INDENT> sleep(1.5) <NEW_LINE> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> bytes = self._inst.read_raw() <NEW_LINE> if (self._kiss488): <NEW_LINE> <INDENT> if ('KISS-488'.encode() in bytes): <NEW_LINE> <INDENT> print(bytes.decode('utf-8').strip()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except visa.errors.VisaIOError as err: <NEW_LINE> <INDENT> if (err.error_code != visa.constants.StatusCode.error_timeout): <NEW_LINE> <INDENT> print("ERROR: {}, {}".format(err, type(err))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif (self._prologix): <NEW_LINE> <INDENT> self._instWrite('++mode 1') <NEW_LINE> self._instWrite('++auto 0') <NEW_LINE> self._instWrite('++addr {}'.format(self._gaddr)) <NEW_LINE> self._instWrite('++eos 2') <NEW_LINE> self._instWrite('++eoi 1') <NEW_LINE> self._instWrite('++read_tmo_ms 600') <NEW_LINE> self._instWrite('++eot_enable 0') <NEW_LINE> self._instWrite('++ver') <NEW_LINE> sleep(self._query_delay) <NEW_LINE> print(self._inst.read())
Overloaded open() so can handle GPIB interfaces after opening the connection
625941c31b99ca400220aa6a
def register_all(): <NEW_LINE> <INDENT> reg = FactoryStore().register <NEW_LINE> reg(AC.CommFile, CommFile(AS.File)) <NEW_LINE> reg(AC.MedFile, MedFile(AS.File)) <NEW_LINE> reg(AC.ExportFile, ExportFile(AS.File)) <NEW_LINE> reg(AC.ExportFname, ExportFname(AS.File)) <NEW_LINE> reg(AC.WorkingDir, WorkingDir("working-dir")) <NEW_LINE> reg(AC.RemoveRmed, RemoveRmed()) <NEW_LINE> reg(AC.HasBaseResult, HasBaseResult()) <NEW_LINE> reg(AC.InteractivFollowUp, InteractivFollowUp()) <NEW_LINE> reg(AC.Value, Value()) <NEW_LINE> reg(AC.CommEntry, FileEntry(AC.CommFile, astk.CommFile)) <NEW_LINE> reg(AC.MedEntry, FileEntry(AC.MedFile, astk.MedFile)) <NEW_LINE> reg(AC.SMeshEntry, SMeshEntry()) <NEW_LINE> reg(AC.AstkProfil, AstkProfil()) <NEW_LINE> reg(AC.DataSection, Section(AS.DataSection)) <NEW_LINE> reg(AC.AstkParams, AstkParams()) <NEW_LINE> outputs = ( AC.MessFile, AC.ResuFile, AC.RMedFile, AC.BaseResult, ) <NEW_LINE> reg(AC.ResultsSection, ResultsSection(outputs)) <NEW_LINE> reg(AC.JobId, JobId()) <NEW_LINE> reg(AC.MessFile, ResultFile(astk.MessFile, AS.File)) <NEW_LINE> reg(AC.ResuFile, ResultFile(astk.ResuFile, AS.File)) <NEW_LINE> reg(AC.RMedFile, VisuMedFile(astk.RMedFile, AS.File)) <NEW_LINE> reg(AC.BaseResult, ResultDirectory("base-result", astk.BaseResult)) <NEW_LINE> inputs = ( AC.CommFile, AC.CommEntry, AC.MedFile, AC.MedEntry, AC.SMeshEntry, AC.WorkingDir, AC.RemoveRmed, AC.HasBaseResult, AC.InteractivFollowUp, ) <NEW_LINE> reg(AC.FromComm, FromComm(inputs, outputs)) <NEW_LINE> inputs = ( AC.ExportFile, ) <NEW_LINE> reg(AC.FromExport, Case(inputs, outputs, AS.FromExport)) <NEW_LINE> inputs = ( AC.AstkProfil, AC.ExportFname, ) <NEW_LINE> reg(AC.FromProfil, Case(inputs, outputs, AS.FromProfil))
Register all the factories
625941c3a8ecb033257d3086
def check_valid_input(letter_guessed, old_letters_guessed): <NEW_LINE> <INDENT> if len(letter_guessed) != 1: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not(letter_guessed.isalpha()): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if letter_guessed.lower() in list(map(str.lower, old_letters_guessed)): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True
Return whether the input is a new, single letter
625941c324f1403a92600b20
def test_guest_no_tasks(self): <NEW_LINE> <INDENT> self.logger.info('setup: Setting task policy to all team members') <NEW_LINE> self.limited_access_team.task_assign_policy=10 <NEW_LINE> self.limited_access_team.save() <NEW_LINE> vids = self.add_some_team_videos() <NEW_LINE> self.videos_tab.open_videos_tab(self.limited_access_team.slug) <NEW_LINE> self.assertFalse(self.videos_tab.video_has_link(vids[0].title, 'Tasks'))
Task policy: all team members; guest sees no tasks.
625941c30383005118ecf59c
def _generate_token(self): <NEW_LINE> <INDENT> enc_data = urlencode(self.data) <NEW_LINE> digest_txt = enc_data[9:35] <NEW_LINE> return md5(digest_txt.encode('utf-8')).hexdigest()
Cleverbot tries to prevent unauthorized access to its API by obfuscating how it generates the 'icognocheck' token, so we have to URLencode the data twice: once to generate the token, and twice to add the token to the data we're sending to Cleverbot.
625941c3462c4b4f79d1d689
def draw_interaction(self, end=False): <NEW_LINE> <INDENT> self.gameDisplay.blit(self.backGround, self.backGroundRect) <NEW_LINE> self.gameDisplay.blit(self.chest, self.chestRect) <NEW_LINE> self.draw_dialog() <NEW_LINE> if not end: <NEW_LINE> <INDENT> self.gameDisplay.blit(self.button, self.damageButtonRect) <NEW_LINE> self.gameDisplay.blit(self.button, self.chanceButtonRect) <NEW_LINE> self.gameDisplay.blit(self.damageText, self.damageTextRect) <NEW_LINE> self.gameDisplay.blit(self.chanceText, self.chanceTextRect) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.gameDisplay.blit(self.button, self.continueButtonRect) <NEW_LINE> self.gameDisplay.blit(self.continueText, self.continueTextRect)
Hero is shown 2 buttons - must choose one Draw chest image in the middle of the window Buttons under chest
625941c330dc7b7665901921
@app.route('/logout') <NEW_LINE> def logout(): <NEW_LINE> <INDENT> if 'username' in session: <NEW_LINE> <INDENT> del session['username'] <NEW_LINE> <DEDENT> return '退出成功'
退出
625941c36fece00bbac2d6f6
@fixtures.fixture <NEW_LINE> def base_env(home_path, make_env, request): <NEW_LINE> <INDENT> env = make_env(home=home_path) <NEW_LINE> if isinstance(request.instance, subprocesstest.SubprocessTestCase): <NEW_LINE> <INDENT> request.instance.injected_test_env = env <NEW_LINE> <DEDENT> return env
A modified environment to ensure reproducible tests. Tests can modify this environment as they see fit.
625941c3d10714528d5ffc9a
def visit(i,snap,particles,visited,members,level,remaining): <NEW_LINE> <INDENT> if level >= sys.getrecursionlimit()/2: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> idx = int(np.argwhere(particles==i)) <NEW_LINE> if visited[idx] == 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> members.append(i) <NEW_LINE> visited[idx] = 1 <NEW_LINE> nn = [x for x in snap.neighbors[i] if x in particles] <NEW_LINE> for j in nn: <NEW_LINE> <INDENT> jdx = np.argwhere(particles==j) <NEW_LINE> if visited[jdx] == 0: <NEW_LINE> <INDENT> result = visit(j,snap,particles,visited,members,level+1,remaining) <NEW_LINE> if not result: <NEW_LINE> <INDENT> remaining += [j] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return True
traverse neighborlist to find clusters Args: i (int): particle index snap (Snapshot): Snapshot to work on particles (array): list of particles with the target signature visited (array): nodes which have already been visited members (array): nodes which belong to this cluster level (int): recursion depth remaining (list): particles yet to be visited in this search Returns: completed (bool): returns True if traversal completed or False if interrupted
625941c35fdd1c0f98dc01eb
def get_environment_by_id(self, account_id, environment_id, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('async_req'): <NEW_LINE> <INDENT> return self.get_environment_by_id_with_http_info(account_id, environment_id, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.get_environment_by_id_with_http_info(account_id, environment_id, **kwargs) <NEW_LINE> return data
get specific environment on account # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_environment_by_id(account_id, environment_id, async_req=True) >>> result = thread.get() :param async_req bool :param int account_id: Numeric ID of the account to retrieve (required) :param int environment_id: Numeric ID of the run to retrieve (required) :return: JobResponse If the method is called asynchronously, returns the request thread.
625941c39b70327d1c4e0d8d
def _handler_autosample_acquire_status(self, *args, **kwargs): <NEW_LINE> <INDENT> result = [] <NEW_LINE> self._wakeup(timeout=WAKEUP_TIMEOUT, delay=0.3) <NEW_LINE> self._wakeup(timeout=WAKEUP_TIMEOUT, delay=0.3) <NEW_LINE> result.append(self._do_cmd_resp(Command.GET_SD, response_regex=SBE43StatusParticle.regex_compiled(), timeout=TIMEOUT)) <NEW_LINE> log.debug("_handler_autosample_acquire_status: GetSD Response: %s", result) <NEW_LINE> result.append(self._do_cmd_resp(Command.GET_HD, response_regex=SBE43HardwareParticle.regex_compiled(), timeout=TIMEOUT)) <NEW_LINE> log.debug("_handler_autosample_acquire_status: GetHD Response: %s", result) <NEW_LINE> result.append(self._do_cmd_resp(Command.GET_CD, response_regex=SBE43ConfigurationParticle.regex_compiled(), timeout=TIMEOUT)) <NEW_LINE> log.debug("_handler_autosample_acquire_status: GetCD Response: %s", result) <NEW_LINE> result.append(self._do_cmd_resp(Command.GET_CC, response_regex=SBE43CalibrationParticle.regex_compiled(), timeout=TIMEOUT)) <NEW_LINE> log.debug("_handler_autosample_acquire_status: GetCC Response: %s", result) <NEW_LINE> result.append(self._do_cmd_resp(Command.GET_EC, timeout=TIMEOUT)) <NEW_LINE> log.debug("_handler_autosample_acquire_status: GetEC Response: %s", result) <NEW_LINE> self._do_cmd_no_resp(Command.RESET_EC) <NEW_LINE> return None, (None, ''.join(result))
Get device status in autosample mode
625941c33eb6a72ae02ec491
def test_invalid_size_dna_matrix(self): <NEW_LINE> <INDENT> checker = MutantChecker() <NEW_LINE> self.assertRaises(ValueError, checker.isMutant, self.invalid_size_dna)
Ensures the MutantChecker algorithm raises an exception when the matrix is not square.
625941c30a50d4780f666e49
def test_timeit_return(): <NEW_LINE> <INDENT> res = _ip.run_line_magic('timeit','-n10 -r10 -o 1') <NEW_LINE> assert(res is not None)
test whether timeit -o return object
625941c33d592f4c4ed1d02b
def _get_string_value(self, context, tag_name): <NEW_LINE> <INDENT> val = context.get(tag_name) <NEW_LINE> if not val and val != 0: <NEW_LINE> <INDENT> if tag_name != '.': <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> val = context.top() <NEW_LINE> <DEDENT> if callable(val): <NEW_LINE> <INDENT> template = val() <NEW_LINE> if not isinstance(template, basestring): <NEW_LINE> <INDENT> template = str(template) <NEW_LINE> <DEDENT> if type(template) is not unicode: <NEW_LINE> <INDENT> template = self.literal(template) <NEW_LINE> <DEDENT> val = self._render(template, context) <NEW_LINE> <DEDENT> if not isinstance(val, basestring): <NEW_LINE> <INDENT> val = str(val) <NEW_LINE> <DEDENT> return val
Get a value from the given context as a basestring instance.
625941c3236d856c2ad44791
def create_count_array(base): <NEW_LINE> <INDENT> outArray = [] <NEW_LINE> for i in range(base): <NEW_LINE> <INDENT> outArray.append([]) <NEW_LINE> <DEDENT> return outArray
This function returns an list of size base and having an empty list as each element in the list. :param base: an integer to determine the size of the returned list :return: a list of size base containing empty lists as each element @time complexity : O(b) whereby b is the value of base. The for loop performs base number of iterations and appending an empty list to the generated empty list in each iteration. It is assumed that appending would require O(1) time. @space complexity : Total - O(b) whereby b represents the memory space required to generate a list of size base. The input base would always occupy a constant amount of space since it's an integer so the space required by the input is O(1). Auxiliary - O(b) since the outArray would take up an extra b amount of space in memory.
625941c326068e7796caec95
def calc_correlation(dist): <NEW_LINE> <INDENT> range_length = 10.0 <NEW_LINE> return 1.0 - np.exp(-np.square(dist)/(range_length ** 2.0))
a very simple semivariogram is defined. like: 1.0 Gaus (10.0) TODO: this should be replaced after useing the semivariogram class
625941c31d351010ab855ad5
def handle_chunk_IEND(self, chunk, length): <NEW_LINE> <INDENT> if self.plte: <NEW_LINE> <INDENT> self._build_palette() <NEW_LINE> <DEDENT> self.done_reading = True
IEND is the last chunk, so stop reading and actually process IDAT
625941c3925a0f43d2549e2e
def skip_last(source: Observable) -> Observable: <NEW_LINE> <INDENT> def subscribe(observer, scheduler=None): <NEW_LINE> <INDENT> q = [] <NEW_LINE> def on_next(value): <NEW_LINE> <INDENT> front = None <NEW_LINE> with source.lock: <NEW_LINE> <INDENT> q.append(value) <NEW_LINE> if len(q) > count: <NEW_LINE> <INDENT> front = q.pop(0) <NEW_LINE> <DEDENT> <DEDENT> if front is not None: <NEW_LINE> <INDENT> observer.on_next(front) <NEW_LINE> <DEDENT> <DEDENT> return source.subscribe_(on_next, observer.on_error, observer.on_completed, scheduler) <NEW_LINE> <DEDENT> return Observable(subscribe)
Bypasses a specified number of elements at the end of an observable sequence. This operator accumulates a queue with a length enough to store the first `count` elements. As more elements are received, elements are taken from the front of the queue and produced on the result sequence. This causes elements to be delayed. Args: count: Number of elements to bypass at the end of the source sequence. Returns: An observable sequence containing the source sequence elements except for the bypassed ones at the end.
625941c3be383301e01b5442
def check_product_flow(self, flow, termination): <NEW_LINE> <INDENT> if termination is None: <NEW_LINE> <INDENT> raise ValueError('Must supply a termination') <NEW_LINE> <DEDENT> k = (flow.uuid, termination.uuid) <NEW_LINE> if k in self._product_flows: <NEW_LINE> <INDENT> return self.product_flow(self._product_flows[k]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
returns the product flow if it exists, or None if it doesn't :param flow: :param termination: the process whose reference flow is flow :return:
625941c3cad5886f8bd26f92
def enable_receiver(self, receiver_name, period_ms): <NEW_LINE> <INDENT> receiver = self.getReceiver(receiver_name) <NEW_LINE> receiver.enable(period_ms)
Enables the receiver.
625941c3cb5e8a47e48b7a65
def list_of_items(items): <NEW_LINE> <INDENT> item_list = [] <NEW_LINE> for i in items: <NEW_LINE> <INDENT> item_list.append(i['name']) <NEW_LINE> <DEDENT> final_list = ", ".join(item_list) <NEW_LINE> return final_list
This function takes a list of items (see items.py for the definition) and returns a comma-separated list of item names (as a string). For example:
625941c3d18da76e2353248d
def start_to_timestamp(start): <NEW_LINE> <INDENT> if isinstance(start, datetime.datetime): <NEW_LINE> <INDENT> offset_time = (start - EPOCH).total_seconds() <NEW_LINE> <DEDENT> elif isinstance(start, datetime.timedelta): <NEW_LINE> <INDENT> now = calendar.timegm(time.gmtime()) <NEW_LINE> offset_time = now - start.total_seconds() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> offset_time = start <NEW_LINE> <DEDENT> return offset_time
Helper method for translating "start" values into offset api values. Valid values are instances of ``datetime.datetime``, ``datetime.timedelta`` or one of `SingleConsumer.BEGINNING` or `SingleConsumer.END`.
625941c315fb5d323cde0ac6
def show(self): <NEW_LINE> <INDENT> pass
This function is to be called to show the food. Exactly how show is defined depends on the environment using it Override if necessary
625941c3dd821e528d63b163
def add_key(self, key, peer_key_id, commit = True): <NEW_LINE> <INDENT> add = Row.add_object(Key, 'peer_key') <NEW_LINE> add(self, key, {'peer_id': self['peer_id'], 'key_id': key['key_id'], 'peer_key_id': peer_key_id}, commit = commit)
Associate a local key entry with this peer.
625941c3b545ff76a8913dcf
def fol_bc_and(knowledge_dict, goals, theta, level=0): <NEW_LINE> <INDENT> if theta is None: <NEW_LINE> <INDENT> yield None <NEW_LINE> <DEDENT> elif len(goals) == 0: <NEW_LINE> <INDENT> yield theta.copy() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (first, rest) = (goals[0], goals[1:]) <NEW_LINE> for thetap in fol_bc_or(knowledge_dict, subst(first, theta), theta, level + 1): <NEW_LINE> <INDENT> for thetapp in fol_bc_and(knowledge_dict, rest, thetap, level + 1): <NEW_LINE> <INDENT> if thetapp: <NEW_LINE> <INDENT> yield thetapp.copy() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return
Resolves and-statements :param knowledge_dict: dictionary of facts :param goal: dictionary of question asked :param theta: current resolved variables :param level: :return:
625941c3004d5f362079a2ed
def to_ports(self, service_search=[], sreg=False, *args, **kwargs): <NEW_LINE> <INDENT> if not service_search: <NEW_LINE> <INDENT> return self.get_ports() <NEW_LINE> <DEDENT> ports = PortList() <NEW_LINE> for service in service_search: <NEW_LINE> <INDENT> if sreg: <NEW_LINE> <INDENT> ports += self.ports.get(attr='service',value=service, regexp=True,value_attr='name') <NEW_LINE> ports += self.ports.get(attr='service',value=service, regexp=True,value_attr='extrainfo') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ports += self.ports.get(attr='service',value=service) <NEW_LINE> ports += self.ports.get('service',value=service, value_attr='extrainfo') <NEW_LINE> <DEDENT> <DEDENT> return [str(p.number) for p in set(ports)]
Translate the host to a list of port numbers.
625941c394891a1f4081ba61
def testTranslationMemoryEntry(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True)
Test TranslationMemoryEntry
625941c3711fe17d82542328
def _parse_self_depends(self, depends): <NEW_LINE> <INDENT> depends_list = list() <NEW_LINE> for depend in depends.split(','): <NEW_LINE> <INDENT> re_match = re_depends.match(depend.strip()) <NEW_LINE> if re_match is not None: <NEW_LINE> <INDENT> name = re_match.group(1) <NEW_LINE> comparator = re_match.group(3) <NEW_LINE> version = re_match.group(4) <NEW_LINE> if name.lower() != 'octave': <NEW_LINE> <INDENT> depends_list.append((name, comparator, version)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> log.error('Invalid dependency atom: %s' % depend) <NEW_LINE> raise DescriptionException('Invalid dependency atom: %s' % depend) <NEW_LINE> <DEDENT> <DEDENT> return depends_list
returns a list of tuples (name, comparator, version) for the other octave-forge packages.
625941c36aa9bd52df036d5c
def make_param(name, value): <NEW_LINE> <INDENT> return theano.shared( value=value, name=name, borrow=True )
Function that makes parameters into theano shared variables for DNN layers.
625941c34e4d5625662d4393
def _check_rules(dataset, ruleset): <NEW_LINE> <INDENT> error_log = ValidationErrorLog() <NEW_LINE> error_found = False <NEW_LINE> for rule in ruleset.rules: <NEW_LINE> <INDENT> validation_status = rule.is_valid_for(dataset) <NEW_LINE> if validation_status is None: <NEW_LINE> <INDENT> error = ValidationError('warn-rule-skipped', locals()) <NEW_LINE> error_log.add(error) <NEW_LINE> <DEDENT> elif validation_status is False: <NEW_LINE> <INDENT> error = _create_error_for_rule(rule) <NEW_LINE> error_log.add(error) <NEW_LINE> error_found = True <NEW_LINE> <DEDENT> <DEDENT> if error_found: <NEW_LINE> <INDENT> error = ValidationError('err-ruleset-conformance-fail', locals()) <NEW_LINE> error_log.add(error) <NEW_LINE> <DEDENT> return error_log
Determine whether a given Dataset conforms with a provided Ruleset. Args: dataset (iati.data.Dataset): The Dataset to check Ruleset conformance with. ruleset (iati.code.Ruleset): The Ruleset to check conformance with. Returns: iati.validator.ValidationErrorLog: A log of the errors that occurred.
625941c396565a6dacc8f685
def fopen(ref): <NEW_LINE> <INDENT> return [i.split() for i in open(ref).readlines()]
Open a file and return a list of lines.
625941c3cc40096d6159590a
def segment_craters(mids, all_points): <NEW_LINE> <INDENT> if isinstance(all_points, str): <NEW_LINE> <INDENT> input_df = pd.read_csv(all_points, sep=";") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> input_df = all_points <NEW_LINE> <DEDENT> if isinstance(mids, str): <NEW_LINE> <INDENT> mid_df = pd.read_csv(mids, sep=";") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mid_df = mids <NEW_LINE> <DEDENT> input_df = input_df.rename(columns={'//X': 'X'}) <NEW_LINE> xyzArray = input_df.filter(items=['X', 'Y', 'Z']).to_numpy() <NEW_LINE> tree3D = KDTree(xyzArray) <NEW_LINE> ncr_array = input_df['Normal change rate (3)'].to_numpy() <NEW_LINE> segmentIds = [-1]*len(xyzArray) <NEW_LINE> segmentArray = np.array(segmentIds) <NEW_LINE> mid_array = mid_df.to_numpy() <NEW_LINE> searchRad = 0.6 <NEW_LINE> SegID = 0 <NEW_LINE> for i in range(len(mid_array)): <NEW_LINE> <INDENT> MidPt = [mid_array[i]] <NEW_LINE> IdxList = tree3D.query_radius(MidPt, searchRad) <NEW_LINE> SegID += 1 <NEW_LINE> Shoots = IdxList[0] <NEW_LINE> count = 1 <NEW_LINE> while len(Shoots) != 0 and count < 2000: <NEW_LINE> <INDENT> Seeds = Shoots <NEW_LINE> Shoots = [] <NEW_LINE> for seedID in Seeds: <NEW_LINE> <INDENT> segmentArray[seedID] = SegID <NEW_LINE> SeedPoint = xyzArray[seedID] <NEW_LINE> SearchPt = [SeedPoint] <NEW_LINE> IdxList = tree3D.query_radius(SearchPt, searchRad) <NEW_LINE> for j in range(len(IdxList[0])): <NEW_LINE> <INDENT> idx = IdxList[0][j] <NEW_LINE> dist = sqrt((xyzArray[idx][0]-MidPt[0][0])**2+(xyzArray[idx][1]-MidPt[0][1])**2) <NEW_LINE> if (segmentArray[idx] != -1): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if ncr_array[idx] > 0.0028 and dist < 7: <NEW_LINE> <INDENT> Shoots.append(idx) <NEW_LINE> segmentArray[idx] = SegID <NEW_LINE> count += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> segmented_df = input_df <NEW_LINE> segmented_df['SegmentID'] = segmentArray <NEW_LINE> segmented_df.loc[segmented_df['SegmentID'] < 0, 'SegmentID'] = 0 <NEW_LINE> is_seg = segmented_df['SegmentID'] != 0 <NEW_LINE> segmented_df = segmented_df[is_seg] <NEW_LINE> df = filter_planarity.cut_planes(segmented_df) <NEW_LINE> print("Segments: ", len(set(df['SegmentID']))) <NEW_LINE> return df
:param mids: input middle point of craters :param all_points: point cloud continaing total AOI :return: df with segmented pointcloud segments will be built based on distance to nearest point default = 0.6
625941c30a50d4780f666e4a
def search(self, query=''): <NEW_LINE> <INDENT> return CategoryList(digital_object_list=self.repository.search('objatt_type:category AND (%s)' % query), repository=self)
do a search of the repository, filtered by type=category
625941c38c3a873295158372
def _structure_from_instance(self): <NEW_LINE> <INDENT> struct = super()._structure_from_instance() <NEW_LINE> struct.update({ 'type': self.TYPE, 'data': { 'coin': MinterHelper.encode_coin_name(self.coin), 'to': bytes.fromhex(MinterHelper.prefix_remove(self.to)), 'value': MinterHelper.to_pip(self.value) } }) <NEW_LINE> return struct
Override parent method to add tx special data.
625941c3a17c0f6771cbe00b
def __init__(self): <NEW_LINE> <INDENT> self.smaller = [] <NEW_LINE> self.larger = []
Initialize your data structure here.
625941c3925a0f43d2549e2f
def subtract(self): <NEW_LINE> <INDENT> return self._do_calc(self.subtracter)
Subtract method which calls the calc method on the subtracter object
625941c38e05c05ec3eea32c
def train(self, i, crf_data, crf_ranges): <NEW_LINE> <INDENT> mpath = '{}.{}'.format(self.model_path, i) <NEW_LINE> path = '{}.{}'.format(self.train_path, i) <NEW_LINE> self.write_file(path, crf_data, crf_ranges) <NEW_LINE> return subprocess.Popen([self.crf, 'learn', '-m', mpath, path], stdout=subprocess.DEVNULL)
call CRFsuite to train a model
625941c35fc7496912cc3937
def add_string(self, s, obj): <NEW_LINE> <INDENT> curState = self.startState <NEW_LINE> statesAdded = 0 <NEW_LINE> if self.verbose > 1: <NEW_LINE> <INDENT> print('Starting', s, '...') <NEW_LINE> <DEDENT> for i in range(len(s)): <NEW_LINE> <INDENT> c = s[i] <NEW_LINE> if self.verbose > 1: <NEW_LINE> <INDENT> print('Current character:', c, ', current state:', curState) <NEW_LINE> <DEDENT> if c == '.': <NEW_LINE> <INDENT> if i == len(s) - 1: <NEW_LINE> <INDENT> curState.add_obj(obj) <NEW_LINE> <DEDENT> nextStates = self.get_next_states_strict(curState, '') <NEW_LINE> if len(nextStates) != 1 or not nextStates[0].loopState: <NEW_LINE> <INDENT> nextState = MorphFSTState(loopState=True) <NEW_LINE> self.add_transition(curState, '', nextState) <NEW_LINE> curState = nextState <NEW_LINE> statesAdded += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> curState = nextStates[0] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> nextStates = self.get_next_states_strict(curState, c) <NEW_LINE> if len(nextStates) != 1 or nextStates[0].loopState: <NEW_LINE> <INDENT> nextState = MorphFSTState() <NEW_LINE> self.add_transition(curState, c, nextState) <NEW_LINE> curState = nextState <NEW_LINE> statesAdded += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> curState = nextStates[0] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> curState.add_obj(obj) <NEW_LINE> return statesAdded
Add a string s (representing either a stem or an affix) to the transducer. obj is the transducer's output given that string. Return number of states added during the operation.
625941c3f8510a7c17cf96b5
def insert(self, data): <NEW_LINE> <INDENT> node = BTNode(data) <NEW_LINE> cur = self._root <NEW_LINE> while (cur is not None): <NEW_LINE> <INDENT> parent = cur <NEW_LINE> if (data < cur.get_data()): <NEW_LINE> <INDENT> cur = cur.get_left() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cur = cur.get_right() <NEW_LINE> <DEDENT> <DEDENT> if (data < parent.get_data()): <NEW_LINE> <INDENT> parent.set_left(node) <NEW_LINE> node.set_parent(parent) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> parent.set_right(node) <NEW_LINE> node.set_parent(parent)
(BST,obj) -> None create a node containing data and insert it in a right place
625941c38e05c05ec3eea32d
@task(alias='m') <NEW_LINE> def manage(manage_task): <NEW_LINE> <INDENT> if env.env == 'local': <NEW_LINE> <INDENT> env.runner('python manage.py {task}'.format(task=manage_task)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> env.runner( 'bin/python manage.py --env {env} {task}'.format( env=env.env, task=manage_task ) )
Run a manage task
625941c36fb2d068a760f055
def grad_curl_div(self, u): <NEW_LINE> <INDENT> A = self.grad(u) <NEW_LINE> e = np.zeros((3, 3, 3)) <NEW_LINE> e[0, 1, 2] = e[1, 2, 0] = e[2, 0, 1] = 1 <NEW_LINE> e[0, 2, 1] = e[2, 1, 0] = e[1, 0, 2] = -1 <NEW_LINE> omega = np.einsum('ijk,jk...->i...', e, A) <NEW_LINE> Aii = np.einsum('ii...', A) <NEW_LINE> return A, omega, Aii
Uses numpy.einsum which can be dramatically faster than alternative routines for many use cases
625941c371ff763f4b549642
def __init__(self, parent): <NEW_LINE> <INDENT> Tk.__init__(self, parent) <NEW_LINE> self.parent = parent <NEW_LINE> self.file_dic = OrderedDict() <NEW_LINE> self.orig_patch_obj_dic = OrderedDict() <NEW_LINE> self.patch_obj_dic = OrderedDict() <NEW_LINE> self.cb_dic = OrderedDict() <NEW_LINE> self.container = [] <NEW_LINE> self.init_completed = False <NEW_LINE> self.apply_button = None <NEW_LINE> self.disable_all_button = None <NEW_LINE> self.defaults_button = None <NEW_LINE> fn_list = askopenfilenames(filetypes=[('Patch Files', '*.patch')], parent=self.parent) <NEW_LINE> if fn_list: <NEW_LINE> <INDENT> fd = {fn: None for fn in fn_list} <NEW_LINE> self.file_dic, error = read_patch_files(fd) <NEW_LINE> if error: <NEW_LINE> <INDENT> messagebox.showerror('Read Error!', error) <NEW_LINE> self.init_completed = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for (fn, patch_text) in iterDic(self.file_dic): <NEW_LINE> <INDENT> self.orig_patch_obj_dic[fn] = gen_patch_obj_list(fn, patch_text) <NEW_LINE> <DEDENT> self.init_completed = True <NEW_LINE> self.initialize() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.init_completed = False
Sets initial GUI variable state only if one or more patch files are selected by the user. :param parent: :return:
625941c3e1aae11d1e749c6f
def n_elements(my_list, start, n): <NEW_LINE> <INDENT> last = start + n <NEW_LINE> return my_list[start:last]
returns n elements in a list, starting at the position "start". EX: n_elements([0, 1, 2, 3, 4, 5], 2, 3) should return [2, 3, 4] :param my_list: a non-empty list :param start: a non-negative integer :param n: an integer greater than 0 :return: a list of length n
625941c3dd821e528d63b164
def load(self, weights_file, strict=False): <NEW_LINE> <INDENT> if os.path.splitext(weights_file)[1] == '.pt': <NEW_LINE> <INDENT> log.debug('Loading weights from pytorch file') <NEW_LINE> super().load(weights_file, strict) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.debug('Loading weights from darknet file') <NEW_LINE> self._load_darknet_weights(weights_file)
This function will load the weights from a file. If the file extension is ``.pt``, it will be considered as a `pytorch pickle file <http://pytorch.org/docs/stable/notes/serialization.html#recommended-approach-for-saving-a-model>`_. Otherwise, the file is considered to be a darknet binary weight file. Args: weights_file (str): path to file strict (Boolean, optional): Whether the weight file should contain all layers of the model; Default **False** Note: The ``strict`` parameter only works for pytorch pickle files. See :class:`~lightnet.network.module.Lightnet` for more information.
625941c3ff9c53063f47c1ae
def display_board(board): <NEW_LINE> <INDENT> print("\n{}|{}|{}\n-----\n{}|{}|{}\n-----\n{}|{}|{}\n".format(board[7],board[8],board[9],board[4],board[5],board[6],board[1],board[2],board[3]))
Display the current board for each rounds INPUT: board
625941c3c4546d3d9de729ec
def test_path_3(test): <NEW_LINE> <INDENT> p = Path("not_a_real_dir") <NEW_LINE> test.assertFalse(p.exists()) <NEW_LINE> test.assertFalse(p.isfile()) <NEW_LINE> test.assertFalse(p.isdir()) <NEW_LINE> test.assertTrue(None == p.created()) <NEW_LINE> test.assertTrue(None == p.modified()) <NEW_LINE> test.assertTrue(None == p.size()) <NEW_LINE> test.assertTrue(None == p.isempty()) <NEW_LINE> test.assertTrue(None == p.isempty()) <NEW_LINE> test.assertTrue("not_a_real_dir" == p.name)
Basic Path usage.
625941c3a8ecb033257d3087
def _logger_setup(self) -> None: <NEW_LINE> <INDENT> self._verify_env() <NEW_LINE> log_dir = Path(environ['log_dir']) <NEW_LINE> if not log_dir.exists(): <NEW_LINE> <INDENT> log_dir.mkdir() <NEW_LINE> <DEDENT> filename = Path.joinpath(log_dir, 'namecheap-updater.log') <NEW_LINE> log = logging.getLogger('namecheap_updater') <NEW_LINE> log.setLevel(logging.DEBUG) <NEW_LINE> fmt = logging.Formatter("%(asctime)s [%(filename)s] func: [%(funcName)s] [%(levelname)s] " "line: [%(lineno)d] %(message)s") <NEW_LINE> file_handler = RotatingFileHandler(filename=filename, delay=True, backupCount=5, maxBytes=2000000) <NEW_LINE> file_handler.setLevel(logging.DEBUG) <NEW_LINE> file_handler.setFormatter(fmt) <NEW_LINE> if not log.handlers: <NEW_LINE> <INDENT> log.addHandler(file_handler) <NEW_LINE> <DEDENT> self.logger = log
Initiates logger
625941c35166f23b2e1a5113
def sqlDelete(self,composentes=True): <NEW_LINE> <INDENT> if self.dbid:self._sqlDelete()
Se deleta lui même et ses composentes si dbid
625941c366673b3332b9204a
def TransformOperation(self, list_of_uuids, transform4x4): <NEW_LINE> <INDENT> self.send_TransformOperation(list_of_uuids, transform4x4) <NEW_LINE> self.recv_TransformOperation()
Parameters: - list_of_uuids - transform4x4
625941c34428ac0f6e5ba7ab
def _try_send(self, text: str) -> Optional[BaseException]: <NEW_LINE> <INDENT> result = None <NEW_LINE> try: <NEW_LINE> <INDENT> if self.mode == '': <NEW_LINE> <INDENT> self._generate(text) <NEW_LINE> <DEDENT> elif self.mode == 'c': <NEW_LINE> <INDENT> self.app.adventure.context = text <NEW_LINE> <DEDENT> elif self.mode == 'a': <NEW_LINE> <INDENT> self.app.adventure.actions[self.edit_index] = text <NEW_LINE> <DEDENT> elif self.mode == 'r': <NEW_LINE> <INDENT> self.app.adventure.results[self.edit_index] = text <NEW_LINE> <DEDENT> elif self.mode == 'm': <NEW_LINE> <INDENT> self.app.adventure.memory = text <NEW_LINE> <DEDENT> <DEDENT> except FunctionTimedOut as result: <NEW_LINE> <INDENT> popup = ErrorPopup() <NEW_LINE> popup.ids.error_text.text = 'The AI took too long to respond.\n' 'Please try something else.' <NEW_LINE> popup.open() <NEW_LINE> Logger.info(f"AI: AI timed out.") <NEW_LINE> <DEDENT> except Exception as result: <NEW_LINE> <INDENT> popup = ErrorPopup() <NEW_LINE> popup.ids.error_text.text = 'An unexpected error occurred.\n' 'Please try something else,\n' 'or adjust your settings.' <NEW_LINE> popup.open() <NEW_LINE> Logger.error(f"AI: {traceback.format_exc()}") <NEW_LINE> <DEDENT> return result
Determines and performs the send action depending on the current `mode`. :param text: The text to send.
625941c307d97122c4178841
def EventReceivedAddEncKey(builder, encKey): <NEW_LINE> <INDENT> return AddEncKey(builder, encKey)
This method is deprecated. Please switch to AddEncKey.
625941c3d486a94d0b98e0ff
def yuv2rgb(yuv): <NEW_LINE> <INDENT> return _convert(rgb_from_yuv, yuv)
RGB to YIQ color space conversion. Parameters ---------- rgb : array_like The image in RGB format, in a 3- or 4-D array of shape ``(M, N, [P,] 3)``. Returns ------- out : ndarray The image in YIQ format, in a 3- or 4-D array of shape ``(M, N, [P,] 3)``. Raises ------ ValueError If `rgb` is not a 3- or 4-D array of shape ``(M, N, [P,] 3)``.
625941c3e8904600ed9f1ee4
def write_to_notification_spool(self, message, is_error, is_verbose, center_id=None): <NEW_LINE> <INDENT> type_id = self.db.grep_id_from_lookup_table( id_field_name='NotificationTypeID', table_name='notification_types', where_field_name='Type', where_value=self.notification_type, insert_if_not_found=True ) <NEW_LINE> col_names = ( 'NotificationTypeID', 'TimeSpooled', 'Message', 'Origin', 'ProcessID', 'Error', 'Verbose' ) <NEW_LINE> values = ( type_id, datetime.datetime.now(), message, self.notification_origin, self.process_id, is_error, is_verbose ) <NEW_LINE> if center_id: <NEW_LINE> <INDENT> col_names = col_names + ('CenterID',) <NEW_LINE> values = values + (center_id,) <NEW_LINE> <DEDENT> self.db.insert( table_name='notification_spool', column_names=col_names, values=values )
Insert a row in the notification_spool table. :param message: message to be inserted in the notification_spool table :type message: str :param is_error: whether the notification is an error or not ('Y' or 'N') :type is_error: str :param is_verbose: whether the notification is verbose or not ('Y' or 'N') :type is_verbose: str :param center_id: the CenterID associated with the notification when applicable :type center_id: int :return:
625941c35fc7496912cc3938
def __Set_Header_Info(self): <NEW_LINE> <INDENT> self.headers = { 'Host': self.IIS_API_Host + ":" + self.IIS_API_Port, 'User-Agent': "Nagios IIS API Per Check", 'Accept': "application/hal+json", 'Accept-Language': "en-US,en;q=0.8,en-US;q=0.5,en;q=0.3", 'Accept-Encoding': "gzip, deflate, br", 'Access-Token': "Bearer " + self.IIS_API_Token, 'Connection': "keep-alive" }
设置API请求的头部信息 :return:
625941c3dc8b845886cb54ee