code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
def get_tasks_by_id(self, task_id): <NEW_LINE> <INDENT> return self.requests_(http="GET", url="/v2/tasks/%s" % task_id) | Get the task by task_id.
Endpoint: /tasks/{task_id}
Returns:
class:`Response <Response>` object | 625941c315fb5d323cde0acc |
def depthFirstSearch(problem): <NEW_LINE> <INDENT> from game import Directions <NEW_LINE> from util import Stack <NEW_LINE> frontier = Stack() <NEW_LINE> stateTuple = (problem.getStartState(),[]) <NEW_LINE> frontier.push(stateTuple) <NEW_LINE> explored = [] <NEW_LINE> while not frontier.isEmpty(): <NEW_LINE> <INDENT> current_node = frontier.pop() <NEW_LINE> if problem.isGoalState(current_node[0]): <NEW_LINE> <INDENT> print("current_node[1][", current_node[1]) <NEW_LINE> return current_node[1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> explored.append(current_node[0]) <NEW_LINE> children = problem.getSuccessors(current_node[0]) <NEW_LINE> for child in children: <NEW_LINE> <INDENT> if child[0] not in explored: <NEW_LINE> <INDENT> frontier.push((child[0],current_node[1] + [child[1]])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> util.raiseNotDefined() <NEW_LINE> return 0 | Search the deepest nodes in the search tree first.
Your search algorithm needs to return a list of actions that reaches the
goal. Make sure to implement a graph search algorithm.
To get started, you might want to try some of these simple commands to
understand the search problem that is being passed in:
print("Start:", problem.getStartState())
print("Is the start a goal?", problem.isGoalState(problem.getStartState()))
print("Start's successors:", problem.getSuccessors(problem.getStartState())) | 625941c3498bea3a759b9a6e |
def generate_security_data(self): <NEW_LINE> <INDENT> timestamp = int(time.time()) <NEW_LINE> security_dict = { 'content_type' : str(self.target_object._meta), 'object_pk' : str(self.target_object._get_pk_val()), 'timestamp' : str(timestamp), 'security_hash' : self.initial_security_hash(timestamp), } <NEW_LINE> return security_dict | Generate a dict of security data for "initial" data. | 625941c3fff4ab517eb2f3fa |
def testUpdate2(self): <NEW_LINE> <INDENT> a = self.build_read() <NEW_LINE> a.query_sequence = a.query_sequence[5:10] <NEW_LINE> self.assertEqual(pysam.qualities_to_qualitystring(a.query_qualities), None) <NEW_LINE> a = self.build_read() <NEW_LINE> s = pysam.qualities_to_qualitystring(a.query_qualities) <NEW_LINE> a.query_sequence = a.query_sequence[5:10] <NEW_LINE> a.query_qualities = pysam.qualitystring_to_array(s[5:10]) <NEW_LINE> self.assertEqual(pysam.qualities_to_qualitystring(a.query_qualities), s[5:10]) | issue 135: inplace update of sequence and quality score.
This does not work as setting the sequence will erase
the quality scores. | 625941c3956e5f7376d70e2d |
def study_convergence(mdp, epsilon=0.01): <NEW_LINE> <INDENT> vi_values = Value_Iteration(mdp, epsilon) <NEW_LINE> pi_valies = Policy_Iteration(mdp) <NEW_LINE> Value_list_VI = [np.linalg.norm(vi_values[0] - V) for V in vi_values[2]] <NEW_LINE> Value_list_PI = [np.linalg.norm(pi_valies[0] - V) for V in pi_valies [2]] <NEW_LINE> plt.figure() <NEW_LINE> plt.title('Convergence of Value Iteration and Policy iteration') <NEW_LINE> plt.xlabel("Number of Iteration") <NEW_LINE> plt.ylabel('Norm of Vn - V') <NEW_LINE> plt.plot(range(len(Value_list_VI)), Value_list_VI, '-', label='Value Iteration') <NEW_LINE> plt.plot(range(len(Value_list_PI)), Value_list_PI, 'r-', label='Policy Iteration') <NEW_LINE> plt.legend() <NEW_LINE> plt.savefig("Convergence_of_Policy_Iteration_and_Value_Iteration.jpg") <NEW_LINE> plt.show() | :param mdp: Class of the MDP simulation
:param epsilon:
:return: | 625941c3b545ff76a8913dd5 |
def kwargs_row(func: Callable[..., T]) -> BaseRowFactory[T]: <NEW_LINE> <INDENT> def kwargs_row_(cur: "BaseCursor[Any, T]") -> "RowMaker[T]": <NEW_LINE> <INDENT> desc = cur.description <NEW_LINE> if desc is None: <NEW_LINE> <INDENT> return no_result <NEW_LINE> <DEDENT> names = [d.name for d in desc] <NEW_LINE> def kwargs_row__(values: Sequence[Any]) -> T: <NEW_LINE> <INDENT> return func(**dict(zip(names, values))) <NEW_LINE> <DEDENT> return kwargs_row__ <NEW_LINE> <DEDENT> return kwargs_row_ | Generate a row factory calling *func* with keyword parameters for every row.
:param func: The function to call for each row. It must support the fields
returned by the query as keyword arguments. | 625941c31f5feb6acb0c4b12 |
def __init__(self): <NEW_LINE> <INDENT> if self.NAME is None or self.SCOPE is None: <NEW_LINE> <INDENT> raise Exception('GClient class cannot be instantiated alone. ' + 'Instantiate a subclass for a specific API.') <NEW_LINE> <DEDENT> self._client_id = decrypt_token(settings['GCLIENT_ID']) <NEW_LINE> self._client_secret = decrypt_token(settings['GCLIENT_SECRET']) <NEW_LINE> self._access_token = None <NEW_LINE> self._expiry_from_epoch = None <NEW_LINE> tokens_json = self.load_tokens_file() <NEW_LINE> if tokens_json: <NEW_LINE> <INDENT> self._load_json_tokens(tokens_json) | Creates an object of the type GClient | 625941c3cb5e8a47e48b7a6b |
def add_event(_event=""): <NEW_LINE> <INDENT> if _event == "": <NEW_LINE> <INDENT> _event = [Text("", new_line=True)] <NEW_LINE> <DEDENT> update_textbox("events", _event) | Adds an event to the events box
:type _event: str | list[Text]
:param _event: "" for new line | 625941c37c178a314d6ef41b |
@mock.patch('foremast.utils.subnets.gate_request') <NEW_LINE> def test_utils_subnets_get_subnets(mock_gate_request): <NEW_LINE> <INDENT> mock_gate_request.return_value.json.return_value = SUBNET_DATA <NEW_LINE> result = get_subnets(env='dev', region='us-east-1') <NEW_LINE> assert result == { 'subnet_ids': { 'us-east-1': [SUBNET_DATA[0]['id']], }, 'us-east-1': [[]], } | Find one subnet. | 625941c3be383301e01b5448 |
def get_type(handler, **uri_args): <NEW_LINE> <INDENT> query = handler.query <NEW_LINE> query = handler.make_query(query, **uri_args) <NEW_LINE> model = handler.fetch_one(query) <NEW_LINE> if not model: <NEW_LINE> <INDENT> abort(404) <NEW_LINE> <DEDENT> schema = handler.schema <NEW_LINE> schema = schema(**handler.schema_dump_options()) <NEW_LINE> result = handler.serialize(schema, model) <NEW_LINE> return result, 200 | Return an instance endpoint response.
Steps:
1. Construct a query.
2. Fetch the requested resource.
3. Abort if a resource is not found.
4. Construct a serializer.
5. Return a serailized response. | 625941c3498bea3a759b9a6f |
def get( command_name ): <NEW_LINE> <INDENT> return imports.get('entities', command_name) | Return the command class for the given command-name. | 625941c3a8370b771705285f |
def test_backspace_and_delete(): <NEW_LINE> <INDENT> superConsole.SendKeys('outputRedirectStart{(}True{)}{ENTER}') <NEW_LINE> testRegex = "" <NEW_LINE> superConsole.SendKeys("print 'IQ{BACKSPACE}P'{ENTER}") <NEW_LINE> testRegex += "IP" <NEW_LINE> superConsole.SendKeys("print 'FW'{LEFT}{LEFT}{DELETE}X{ENTER}") <NEW_LINE> testRegex += "FX" <NEW_LINE> superConsole.SendKeys('outputRedirectStop{(}{)}{ENTER}') <NEW_LINE> verifyResults(getTestOutput()[0], testRegex) | Backspace and delete | 625941c345492302aab5e280 |
def main(argv): <NEW_LINE> <INDENT> args, credentials = parse_args(argv) <NEW_LINE> try: <NEW_LINE> <INDENT> if args.command == 'download': <NEW_LINE> <INDENT> download_candidate_files( credentials, args.release_number, args.path, args.br_number, args.pr_number, dry_run=args.dry_run, verbose=args.verbose) <NEW_LINE> <DEDENT> elif args.command == 'extract': <NEW_LINE> <INDENT> extract_candidates( args.path, dry_run=args.dry_run, verbose=args.verbose) <NEW_LINE> <DEDENT> elif args.command == 'publish': <NEW_LINE> <INDENT> publish_candidates( args.path, args.streams_path, juju_release_tools=args.juju_release_tools, dry_run=args.dry_run, verbose=args.verbose) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> if args.verbose: <NEW_LINE> <INDENT> traceback.print_tb(sys.exc_info()[2]) <NEW_LINE> <DEDENT> return 2 <NEW_LINE> <DEDENT> if args.verbose: <NEW_LINE> <INDENT> print("Done.") <NEW_LINE> <DEDENT> return 0 | Manage successful Juju CI candiates. | 625941c3dc8b845886cb54f3 |
def calculate_hand_len(hand): <NEW_LINE> <INDENT> hand_len = 0 <NEW_LINE> for value in hand.values(): <NEW_LINE> <INDENT> hand_len += value <NEW_LINE> <DEDENT> return hand_len | Return the length in the current hand.
hand: dictionary (string-> int)
return: integer | 625941c345492302aab5e281 |
def addChild(self, subNode): <NEW_LINE> <INDENT> self.children[subNode.subDomain] = subNode | Add DomainNode for more-specific subdomains of this one. | 625941c3b57a9660fec33841 |
def getname(exprresolver): <NEW_LINE> <INDENT> result = None <NEW_LINE> if not callable(exprresolver): <NEW_LINE> <INDENT> raise TypeError('Expression resolver must be a callable object.') <NEW_LINE> <DEDENT> result = getattr( exprresolver, __RESOLVER__, getattr( exprresolver, '__name__', getattr( exprresolver.__class__, '__name__' ) ) ) <NEW_LINE> return result | Get expression resolver name.
Expression resolver name is given by the attribute __resolver__.
If not exist, then the it is given by the attribute __name__.
Otherwise, given by the __class__.__name__ attribute.
:raises: TypeError if exprresolver is not callable. | 625941c396565a6dacc8f68b |
def sendDict(self, dict, playerNum): <NEW_LINE> <INDENT> pickledObj = pickle.dumps(dict, -1) <NEW_LINE> self.conn[playerNum].sendall(pickledObj) <NEW_LINE> return True | Sends a dictionary to the client associated with the playerNum
:param dict: dictionary to be sent
:param playerNum: id of player that dictionary will be sent to
:return: True | 625941c3f9cc0f698b1405bc |
def setUp( self ): <NEW_LINE> <INDENT> pass | Executed before each test in this class. | 625941c3cc40096d61595910 |
def numericalize(self, arr, device=None, train=True): <NEW_LINE> <INDENT> if self.include_lengths and not isinstance(arr, tuple): <NEW_LINE> <INDENT> raise ValueError("Field has include_lengths set to True, but " "input data is not a tuple of " "(data batch, batch lengths).") <NEW_LINE> <DEDENT> if isinstance(arr, tuple): <NEW_LINE> <INDENT> arr, lengths = arr <NEW_LINE> lengths = torch.LongTensor(lengths) <NEW_LINE> <DEDENT> def _dense_mask(seqs: List[List[str]], batch_first: bool): <NEW_LINE> <INDENT> batch_size = len(seqs) <NEW_LINE> assert batch_size > 0 <NEW_LINE> seq_size = len(seqs[0]) <NEW_LINE> masks = torch.ByteTensor(batch_size, seq_size, len(self.vocab)).zero_() if batch_first else torch.ByteTensor(seq_size, batch_size, len(self.vocab)).zero_() <NEW_LINE> for i, seq in enumerate(seqs): <NEW_LINE> <INDENT> for j, tok in enumerate(seq): <NEW_LINE> <INDENT> if tok != '*': <NEW_LINE> <INDENT> id = self.vocab.stoi[tok] <NEW_LINE> if batch_first: <NEW_LINE> <INDENT> masks[i, j] = 1 <NEW_LINE> masks[i, j, id] = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> masks[j, i] = 1 <NEW_LINE> masks[j, i, id] = 0 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return masks <NEW_LINE> <DEDENT> masks = self.vocab.dense_mask(arr, self.batch_first) <NEW_LINE> if device == -1: <NEW_LINE> <INDENT> masks = masks.contiguous() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> masks = masks.cuda(device) <NEW_LINE> if self.include_lengths: <NEW_LINE> <INDENT> lengths = lengths.cuda(device) <NEW_LINE> <DEDENT> <DEDENT> if self.include_lengths: <NEW_LINE> <INDENT> return masks, lengths, arr <NEW_LINE> <DEDENT> return masks, arr | Turn a batch of examples that use this field into a Variable.
If the field has include_lengths=True, a tensor of lengths will be
included in the return value.
Arguments:
arr (List[List[str]], or tuple of (List[List[str]], List[int])):
List of tokenized and padded examples, or tuple of List of
tokenized and padded examples and List of lengths of each
example if self.include_lengths is True.
device (-1 or None): Device to create the Variable's Tensor on.
Use -1 for CPU and None for the currently active GPU device.
Default: None.
train (boolean): Whether the batch is for a training set.
If False, the Variable will be created with volatile=True.
Default: True. | 625941c3c4546d3d9de729f1 |
def infix_to_binary_tree(infix: List, start, end) -> Node: <NEW_LINE> <INDENT> if end - start == 1: <NEW_LINE> <INDENT> return Node(float(infix[start])) <NEW_LINE> <DEDENT> p1 = -1 <NEW_LINE> p2 = -1 <NEW_LINE> flag = 0 <NEW_LINE> for i in range(start, end): <NEW_LINE> <INDENT> x = infix[i] <NEW_LINE> if x == '(': <NEW_LINE> <INDENT> flag += 1 <NEW_LINE> <DEDENT> elif x == ')': <NEW_LINE> <INDENT> flag -= 1 <NEW_LINE> <DEDENT> if flag == 0: <NEW_LINE> <INDENT> if x in ['+', '-']: <NEW_LINE> <INDENT> p1 = i <NEW_LINE> <DEDENT> elif x in ['*', '/']: <NEW_LINE> <INDENT> p2 = i <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> p = p1 if p1 != -1 else p2 <NEW_LINE> if p != -1: <NEW_LINE> <INDENT> node = Node(infix[p]) <NEW_LINE> print(infix[p]) <NEW_LINE> node.left = infix_to_binary_tree(infix, start, p) <NEW_LINE> node.right = infix_to_binary_tree(infix, p + 1, end) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> node = infix_to_binary_tree(infix, start + 1, end - 1) <NEW_LINE> <DEDENT> return node | 中缀表达式转二叉树 | 625941c31f037a2d8b9461bd |
def parse(session, url): <NEW_LINE> <INDENT> print('[parse] url:', url) <NEW_LINE> soup = get_soup(session, url) <NEW_LINE> if not soup: <NEW_LINE> <INDENT> print('[parse] no soup:', url) <NEW_LINE> return <NEW_LINE> <DEDENT> num_reviews = soup.find('span', class_='_3jEYFo-z').text <NEW_LINE> num_reviews = num_reviews.replace(',', '') <NEW_LINE> num_reviews = re.sub(r'[a-z]+', '', num_reviews) <NEW_LINE> num_reviews = int(num_reviews) <NEW_LINE> print('[parse] num_reviews ALL:', num_reviews) <NEW_LINE> url_template = url.replace('.html', '-or{}.html') <NEW_LINE> print('[parse] url_template:', url_template) <NEW_LINE> items = [] <NEW_LINE> offset = 0 <NEW_LINE> while(True): <NEW_LINE> <INDENT> subpage_url = url_template.format(offset) <NEW_LINE> subpage_items = parse_reviews(session, subpage_url) <NEW_LINE> if not subpage_items: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> items += subpage_items <NEW_LINE> if len(subpage_items) < 5: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> offset += 5 <NEW_LINE> <DEDENT> return items | Get number of reviews and start getting subpages with reviews | 625941c35166f23b2e1a5118 |
def get_utxo(self): <NEW_LINE> <INDENT> return self._utxos.pop() | Return the last utxo. Can be used to get the change output immediately after a send_self_transfer | 625941c360cbc95b062c6502 |
def getStates(self): <NEW_LINE> <INDENT> return self._states | Returns list of non-default states
| 625941c35fc7496912cc393d |
def test_rest_collections_put(api_client): <NEW_LINE> <INDENT> user = User.objects.get(username="test") <NEW_LINE> api_client.force_authenticate(user=user) <NEW_LINE> resp = api_client.put("/covmanager/rest/collections/") <NEW_LINE> LOG.debug(resp) <NEW_LINE> assert resp.status_code == requests.codes["method_not_allowed"] | put should not be allowed | 625941c3507cdc57c6306c96 |
def _heapify_up(self): <NEW_LINE> <INDENT> index = len(self.main_array) - 1 <NEW_LINE> while ( self._has_parent(index) & self._get_parent(index) > self.main_array[index] ): <NEW_LINE> <INDENT> self._swap(_get_parent_index(index), index) <NEW_LINE> index = self._get_parent_index(index) | 'Bubble up' last element in array to maintain heap ordering,
swapping it with parent elements until it is in the right place. | 625941c38e05c05ec3eea332 |
def update_effect_map(self, light_effect_map: dict): <NEW_LINE> <INDENT> self._light_effect_map = light_effect_map <NEW_LINE> if light_effect_map: <NEW_LINE> <INDENT> self._attr_supported_features = self._attr_supported_features | SUPPORT_EFFECT <NEW_LINE> self._attr_effect_list = list(light_effect_map.values()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._attr_supported_features = self._attr_supported_features & ~SUPPORT_EFFECT <NEW_LINE> self._attr_effect_list = None <NEW_LINE> <DEDENT> if self.hass and self.enabled: <NEW_LINE> <INDENT> self.async_write_ha_state() | the list of available effects was changed (context at device level)
so we'll just tell HA to update the state | 625941c3baa26c4b54cb10e0 |
def rank_till_index(node: Node, num: int, index: int) -> int: <NEW_LINE> <INDENT> if index < 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> if node.minn == node.maxx: <NEW_LINE> <INDENT> return index + 1 if node.minn == num else 0 <NEW_LINE> <DEDENT> pivot = (node.minn + node.maxx) // 2 <NEW_LINE> if num <= pivot: <NEW_LINE> <INDENT> return rank_till_index(node.left, num, node.map_left[index] - 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return rank_till_index(node.right, num, index - node.map_left[index]) | Returns the number of occurrences of num in interval [0, index] in the list
>>> root = build_tree(test_array)
>>> rank_till_index(root, 6, 6)
1
>>> rank_till_index(root, 2, 0)
1
>>> rank_till_index(root, 1, 10)
2
>>> rank_till_index(root, 17, 7)
0
>>> rank_till_index(root, 0, 9)
1 | 625941c3be7bc26dc91cd5c2 |
def validate_email(self, field): <NEW_LINE> <INDENT> if Users.query.filter_by(email=field.data).first(): <NEW_LINE> <INDENT> raise ValidationError('Email is already in use.') | Function to validate email | 625941c338b623060ff0adad |
def load(self, path): <NEW_LINE> <INDENT> with open(path, 'rb') as file: <NEW_LINE> <INDENT> self._dict = pickle.load(file) | Loads the database from a specific file. | 625941c3377c676e91272168 |
def numJewelsInStones(self, J, S): <NEW_LINE> <INDENT> n = 0 <NEW_LINE> for i in S: <NEW_LINE> <INDENT> if i in J: <NEW_LINE> <INDENT> n = n+1 <NEW_LINE> <DEDENT> <DEDENT> return(n) | :type J: str
:type S: str
:rtype: int | 625941c3cc0a2c11143dce50 |
def _get_detections(generator, model, score_threshold=0.05, max_detections=100, save_path=None): <NEW_LINE> <INDENT> all_detections = [[None for i in range(generator.num_classes()) if generator.has_label(i)] for j in range(generator.size())] <NEW_LINE> all_inferences = [None for i in range(generator.size())] <NEW_LINE> for i in progressbar.progressbar(range(generator.size()), prefix='Running network: '): <NEW_LINE> <INDENT> raw_image = generator.load_image(i) <NEW_LINE> image, scale = generator.resize_image(raw_image.copy()) <NEW_LINE> image = generator.preprocess_image(image) <NEW_LINE> if keras.backend.image_data_format() == 'channels_first': <NEW_LINE> <INDENT> image = image.transpose((2, 0, 1)) <NEW_LINE> <DEDENT> start = time.time() <NEW_LINE> boxes, scores, labels = model.predict_on_batch(np.expand_dims(image, axis=0))[:3] <NEW_LINE> inference_time = time.time() - start <NEW_LINE> boxes /= scale <NEW_LINE> indices = np.where(scores[0, :] > score_threshold)[0] <NEW_LINE> scores = scores[0][indices] <NEW_LINE> scores_sort = np.argsort(-scores)[:max_detections] <NEW_LINE> image_boxes = boxes[0, indices[scores_sort], :] <NEW_LINE> image_scores = scores[scores_sort] <NEW_LINE> image_labels = labels[0, indices[scores_sort]] <NEW_LINE> image_detections = np.concatenate([image_boxes, np.expand_dims(image_scores, axis=1), np.expand_dims(image_labels, axis=1)], axis=1) <NEW_LINE> if save_path is not None: <NEW_LINE> <INDENT> draw_annotations(raw_image, generator.load_annotations(i), label_to_name=generator.label_to_name) <NEW_LINE> draw_detections(raw_image, image_boxes, image_scores, image_labels, label_to_name=generator.label_to_name, score_threshold=score_threshold) <NEW_LINE> cv2.imwrite(os.path.join(save_path, '{}.png'.format(i)), raw_image) <NEW_LINE> <DEDENT> for label in range(generator.num_classes()): <NEW_LINE> <INDENT> if not generator.has_label(label): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> all_detections[i][label] = image_detections[image_detections[:, -1] == label, :-1] <NEW_LINE> <DEDENT> all_inferences[i] = inference_time <NEW_LINE> <DEDENT> return all_detections, all_inferences | Get the detections from the model using the generator.
The result is a list of lists such that the size is:
all_detections[num_images][num_classes] = detections[num_detections, 4 + num_classes]
# Arguments
generator : The generator used to run images through the model.
model : The model to run on the images.
score_threshold : The score confidence threshold to use.
max_detections : The maximum number of detections to use per image.
save_path : The path to save the images with visualized detections to.
# Returns
A list of lists containing the detections for each image in the generator. | 625941c36fb2d068a760f05b |
def _validate_customer_service(self): <NEW_LINE> <INDENT> direc = self.customer_service_dir <NEW_LINE> if not direc.exists: <NEW_LINE> <INDENT> mode = 0o750 <NEW_LINE> print("Creating customer_service_dir %s with mode %s" % (direc, mode)) <NEW_LINE> direc.makedirs() <NEW_LINE> os.chmod(direc.path, mode) <NEW_LINE> <DEDENT> if self.mailto is None: <NEW_LINE> <INDENT> raise RuntimeError("customer_service_dir requires mailto option in scheduler.yml") | Validate input parameters if customer service is on then
create directory for tarball files with correct premissions for user and group. | 625941c3e1aae11d1e749c75 |
def grow_child(self, uid, objectives, constraints): <NEW_LINE> <INDENT> child = self.children.pop(uid) <NEW_LINE> adolescent = child.transform_to_member(objectives, constraints) <NEW_LINE> if len(self.adults) < self.capacity: <NEW_LINE> <INDENT> self.adults[uid] = adolescent <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.adolescents[uid] = adolescent <NEW_LINE> <DEDENT> return adolescent | Transition a child to an adolescent | 625941c37c178a314d6ef41c |
def versions(): <NEW_LINE> <INDENT> client = salt.client.get_local_client(__opts__['conf_file']) <NEW_LINE> minions = client.cmd('*', 'test.version', timeout=__opts__['timeout']) <NEW_LINE> labels = { -1: 'Minion requires update', 0: 'Up to date', 1: 'Minion newer than master', } <NEW_LINE> version_status = {} <NEW_LINE> comps = salt.__version__.split('-') <NEW_LINE> if len(comps) == 3: <NEW_LINE> <INDENT> master_version = '-'.join(comps[0:2]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> master_version = salt.__version__ <NEW_LINE> <DEDENT> for minion in minions: <NEW_LINE> <INDENT> comps = minions[minion].split('-') <NEW_LINE> if len(comps) == 3: <NEW_LINE> <INDENT> minion_version = '-'.join(comps[0:2]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> minion_version = minions[minion] <NEW_LINE> <DEDENT> ver_diff = cmp(minion_version, master_version) <NEW_LINE> if ver_diff not in version_status: <NEW_LINE> <INDENT> version_status[ver_diff] = {} <NEW_LINE> <DEDENT> version_status[ver_diff][minion] = minion_version <NEW_LINE> <DEDENT> ret = {} <NEW_LINE> for key in version_status: <NEW_LINE> <INDENT> for minion in sorted(version_status[key]): <NEW_LINE> <INDENT> ret.setdefault(labels[key], {})[minion] = version_status[key][minion] <NEW_LINE> <DEDENT> <DEDENT> salt.output.display_output(ret, '', __opts__) <NEW_LINE> return ret | Check the version of active minions
CLI Example:
.. code-block:: bash
salt-run manage.versions | 625941c30c0af96317bb81a8 |
def _initialize_constraints(self, kwargs): <NEW_LINE> <INDENT> for constraint in self.parameter_constraints: <NEW_LINE> <INDENT> values = kwargs.pop(constraint, {}) <NEW_LINE> for ckey, cvalue in values.items(): <NEW_LINE> <INDENT> param = getattr(self, ckey) <NEW_LINE> setattr(param, constraint, cvalue) <NEW_LINE> <DEDENT> <DEDENT> self._mconstraints = {} <NEW_LINE> for constraint in self.model_constraints: <NEW_LINE> <INDENT> values = kwargs.pop(constraint, []) <NEW_LINE> self._mconstraints[constraint] = values | Pop parameter constraint values off the keyword arguments passed to
`Model.__init__` and store them in private instance attributes. | 625941c34d74a7450ccd4183 |
def least_squares(matrix,vector): <NEW_LINE> <INDENT> if len(matrix) != len(vector): <NEW_LINE> <INDENT> return("Incompatible matrix-vector in least_squares function.") <NEW_LINE> <DEDENT> for i in range(len(matrix)): <NEW_LINE> <INDENT> for j in range(len(matrix[0])): <NEW_LINE> <INDENT> if type(matrix[i][j]) != int and type(matrix[i][j]) != float and type(matrix[i][j]) != complex: <NEW_LINE> <INDENT> return("Invalid matrix element in least_squares function.") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for i in vector: <NEW_LINE> <INDENT> if type(i) != int and type(i) != float and type(i) != complex: <NEW_LINE> <INDENT> return("Invalid vector element in least_squares function.") <NEW_LINE> <DEDENT> <DEDENT> p = [] <NEW_LINE> p = q_y(matrix,vector) <NEW_LINE> q = [] <NEW_LINE> q = gramSchmitt_mod(matrix)[0] <NEW_LINE> r = [] <NEW_LINE> r = gramSchmitt_mod(matrix)[1] <NEW_LINE> c = [] <NEW_LINE> c = back_sub(r,p) <NEW_LINE> p = q_y(matrix,vector) <NEW_LINE> print("A = ") <NEW_LINE> for i in range(len(matrix)): <NEW_LINE> <INDENT> print(matrix[i]) <NEW_LINE> <DEDENT> print("Q = ") <NEW_LINE> for i in range(len(q)): <NEW_LINE> <INDENT> print(gramSchmitt_mod(matrix)[0][i]) <NEW_LINE> <DEDENT> print("R = ") <NEW_LINE> for i in range(len(r)): <NEW_LINE> <INDENT> print(r[i]) <NEW_LINE> <DEDENT> print() <NEW_LINE> print("c =",c) <NEW_LINE> print() <NEW_LINE> print("f(x)=",c[0],"+",c[1],"*x","+", c[2],"*x**2 +",c[3],"*x**3") <NEW_LINE> return None | This function takes a matrix and a vector and validates them using an if statements and for loops.
It then computes the variables Q,R,c and product Q*y (of the equation Ac=y where A=QR) from the
input matrix and vector. It uses the gramSchmitt_mod function to compute the Q,R; the q_y function
to compute Q*y and assign it to p; and the back_sub function to compute c. It then prints the input
matrix A, matrix Q, matrix R and vector c. Finally it prints the approximate polynomial f(x)
that maps A to y. It implements for loops to print the matrices in readable format. | 625941c3c4546d3d9de729f2 |
def addWriter(writer): <NEW_LINE> <INDENT> pass | I add writer to the set of file descriptors to get write events for.
@param writer: An L{IWriteDescriptor} provider that will be checked for
write events until it is removed from the reactor with
L{removeWriter}.
@return: C{None}. | 625941c3a8370b7717052860 |
def compile_parameter_list(self): <NEW_LINE> <INDENT> while self.tokenizer.current_token != ')': <NEW_LINE> <INDENT> type = self.get_token() <NEW_LINE> name = self.get_token() <NEW_LINE> self.symbol_table.define(name, type, 'arg') <NEW_LINE> if self.tokenizer.current_token == ',': <NEW_LINE> <INDENT> self.process() | Compiles a (possibly empty) parameter list. | 625941c37047854f462a13cb |
def get_plaintext_menu(lounas_dict): <NEW_LINE> <INDENT> string = "" <NEW_LINE> for name, meals in sorted(lounas_dict.items()): <NEW_LINE> <INDENT> if meals: <NEW_LINE> <INDENT> string += u"%s\n" % name.upper() <NEW_LINE> for meal in meals: <NEW_LINE> <INDENT> paren_index = meal.find('(') <NEW_LINE> if paren_index > -1: <NEW_LINE> <INDENT> meal = meal[:(paren_index - 1)] <NEW_LINE> <DEDENT> string += "- %s\n" % meal <NEW_LINE> <DEDENT> string += '\n' <NEW_LINE> <DEDENT> <DEDENT> return string | Returns a plaintext aggregation of filtered meals for each restaurant | 625941c34428ac0f6e5ba7b1 |
def get_class(self): <NEW_LINE> <INDENT> return self.classdesc | Returns the class of this instance | 625941c3ac7a0e7691ed408f |
def same_tech(self, other: Union[UnitTypeId, Set[UnitTypeId], List[UnitTypeId], Dict[UnitTypeId, Any]]) -> "Units": <NEW_LINE> <INDENT> if isinstance(other, UnitTypeId): <NEW_LINE> <INDENT> other = {other} <NEW_LINE> <DEDENT> tech_alias_types = set(other) <NEW_LINE> for unitType in other: <NEW_LINE> <INDENT> tech_alias = self.game_data.units[unitType.value].tech_alias <NEW_LINE> if tech_alias: <NEW_LINE> <INDENT> for same in tech_alias: <NEW_LINE> <INDENT> tech_alias_types.add(same) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return self.filter(lambda unit: unit.type_id in tech_alias_types or unit._type_data.tech_alias is not None and any(same in tech_alias_types for same in unit._type_data.tech_alias)) | Usage:
'self.units.same_tech(UnitTypeId.COMMANDCENTER)' or 'self.units.same_tech(UnitTypeId.ORBITALCOMMAND)'
returns all CommandCenter, CommandCenterFlying, OrbitalCommand, OrbitalCommandFlying, PlanetaryFortress
This also works with a set/list/dict parameter, e.g. 'self.units.same_tech({UnitTypeId.COMMANDCENTER, UnitTypeId.SUPPLYDEPOT})'
Untested: This should return the equivalents for Hatchery, WarpPrism, Observer, Overseer, SupplyDepot and others | 625941c34f88993c3716c028 |
def test_display_name_none(self): <NEW_LINE> <INDENT> user = UserFactory.create() <NEW_LINE> user.userprofile.display_name = '' <NEW_LINE> with patch('affiliates.users.models._') as ugettext: <NEW_LINE> <INDENT> ugettext.return_value = 'Affiliate' <NEW_LINE> eq_(user.display_name, 'Affiliate') <NEW_LINE> ugettext.assert_called_with(u'Affiliate') | If a user's profile has no display name set, return a localized
default. | 625941c366673b3332b92051 |
def setGrade(self, grade, course="6.01x"): <NEW_LINE> <INDENT> pass | grade: integer greater than or equal to 0 and less than or equal to 100
course: string
This method sets the grade in the courseInfo object named by `course`.
If `course` was not part of the initialization, then no grade is set, and no
error is thrown.
The method does not return a value. | 625941c3e8904600ed9f1eea |
def __exec(self, cmd): <NEW_LINE> <INDENT> p = Popen(cmd, stdout=PIPE, stderr=PIPE) <NEW_LINE> output, error = p.communicate() <NEW_LINE> if p.returncode == -11: <NEW_LINE> <INDENT> raise OutOfMemoryException() <NEW_LINE> <DEDENT> elif p.returncode == -24: <NEW_LINE> <INDENT> raise TimeoutException() <NEW_LINE> <DEDENT> elif p.returncode != 0: <NEW_LINE> <INDENT> error = '' if error is None else error <NEW_LINE> raise ProcessErrorException( 'Process ended with an error code: ' + str(p.returncode) + '\nERROR:\n' + error) <NEW_LINE> <DEDENT> return output, error | :param cmd: Path to the executable
:return: output of the executable | 625941c356ac1b37e6264192 |
def talk(s, commands, dry_run=False): <NEW_LINE> <INDENT> waitingForReply = False <NEW_LINE> for teststr in commands: <NEW_LINE> <INDENT> if not cmd_valid(teststr): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if waitingForReply == False: <NEW_LINE> <INDENT> write_to_serial(s, teststr, dry_run=dry_run) <NEW_LINE> print("Sent from PC -- " + teststr) <NEW_LINE> waitingForReply = True <NEW_LINE> <DEDENT> if waitingForReply == True: <NEW_LINE> <INDENT> while not dry_run and s.inWaiting() == 0: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> dataRecvd = listen(s, dry_run=dry_run) <NEW_LINE> print("Reply Received -- " + dataRecvd) <NEW_LINE> waitingForReply = False <NEW_LINE> <DEDENT> time.sleep(0.1) <NEW_LINE> print("Send and receive complete") | Send a list of commands to the Arduino connected at the Serial port.
Parameters
----------
s : Serial
The Serial object instance that your Arduino is interfacing.
commands : list
A list of properly formatted string commands to send to the Arduino.
Command is structured as "<mode, motorID, arg_m1, arg_m2, arg_m3>",
where mode is one of [RUN, STOP, RESUME, PAUSE, SET_SPEED, SET_ACCEL],
and motorID is [1, 1, 1] (can be combo of numbers i.e. 100 or 101 or
001 (binary indicator), and arg_m* is any floating number.
Returns
------- | 625941c363d6d428bbe444af |
def help(self, update, context): <NEW_LINE> <INDENT> update.message.reply_text( "Use /start to initiate server monitoring.\n" + "Use /player_list to view players currently on the server." ) | Send a message when the command /help is issued. | 625941c356b00c62f0f14618 |
def _on_relation_created(self, event): <NEW_LINE> <INDENT> if not self._charm.is_slurm_installed(): <NEW_LINE> <INDENT> event.defer() <NEW_LINE> return <NEW_LINE> <DEDENT> app_relation_data = event.relation.data[self.model.app] <NEW_LINE> app_relation_data["munge_key"] = self._charm.get_munge_key() <NEW_LINE> app_relation_data["slurmctld_host"] = self._charm.hostname <NEW_LINE> app_relation_data["slurmctld_port"] = self._charm.port <NEW_LINE> app_relation_data["etcd_port"] = "2379" <NEW_LINE> app_relation_data["cluster_name"] = self._charm.config.get("cluster-name") <NEW_LINE> app_relation_data["nhc_params"] = self._charm.config.get("health-check-params", "#") | Set our data on the relation. | 625941c326238365f5f0ee2c |
def get(self, modulename, classname=""): <NEW_LINE> <INDENT> _logger_adaptor = self._get_logger(modulename, classname) <NEW_LINE> return _logger_adaptor | Return logerAdapter instance for module level loging.
| 625941c39f2886367277a84e |
def compile(self, space): <NEW_LINE> <INDENT> self.fragrance = np.zeros(space.n_agents) | Compiles additional information that is used by this optimizer.
Args:
space (Space): A Space object containing meta-information. | 625941c3956e5f7376d70e2e |
def _summaries(self): <NEW_LINE> <INDENT> tf.summary.scalar("XEntropy_Loss", self.cost) | Define summaries for Tensorboard | 625941c3be8e80087fb20c05 |
def validate_token(func): <NEW_LINE> <INDENT> @functools.wraps(func) <NEW_LINE> def wrap(self, *args, **kwargs): <NEW_LINE> <INDENT> access_token = request.httprequest.headers.get("access_token") <NEW_LINE> if not access_token: <NEW_LINE> <INDENT> return invalid_response("access_token_not_found", "missing access token in request header", 401) <NEW_LINE> <DEDENT> access_token_data = ( request.env["api.access_token"].sudo().search([("token", "=", access_token)], order="id DESC", limit=1) ) <NEW_LINE> if access_token_data.find_one_or_create_token(user_id=access_token_data.user_id.id) != access_token: <NEW_LINE> <INDENT> return invalid_response("access_token", "token seems to have expired or invalid", 401) <NEW_LINE> <DEDENT> request.session.uid = access_token_data.user_id.id <NEW_LINE> request.uid = access_token_data.user_id.id <NEW_LINE> return func(self, *args, **kwargs) <NEW_LINE> <DEDENT> return wrap | . | 625941c330c21e258bdfa45c |
def standard_extract(): <NEW_LINE> <INDENT> target = os.path.dirname(build_dir) <NEW_LINE> for url in extract_plan: <NEW_LINE> <INDENT> out.write(" %s %s\n" % (out.color(">", "green"), os.path.join(cfg.LPMSConfig().src_cache, os.path.basename(url)))) <NEW_LINE> archive_path = os.path.join(cfg.LPMSConfig().src_cache, os.path.basename(url)) <NEW_LINE> try: <NEW_LINE> <INDENT> partial = [atom.strip() for atom in partial.split(" ") if atom != "#"] <NEW_LINE> archive.extract(str(archive_path), str(target), partial) <NEW_LINE> <DEDENT> except NameError: <NEW_LINE> <INDENT> archive.extract(str(archive_path), str(target)) | Runs standard extract procedure | 625941c34f6381625f1149fc |
def db_for_read(self, model, **hints): <NEW_LINE> <INDENT> if model._meta.app_label == 'management': <NEW_LINE> <INDENT> return 'witmetadata' <NEW_LINE> <DEDENT> return None | Attempts to read auth models go to auth_db. | 625941c38e71fb1e9831d76a |
def __init__(self, username, nedid, type, debug=True): <NEW_LINE> <INDENT> self.username = username <NEW_LINE> self.nedid = nedid <NEW_LINE> self.type = type <NEW_LINE> self.template = '' <NEW_LINE> self.debug = debug <NEW_LINE> self.cmd_list = [] <NEW_LINE> self.last_error = '' | Initialization function | 625941c3fb3f5b602dac3651 |
def help_doomsday(self): <NEW_LINE> <INDENT> self.write("That is when the world will end. Don't ask why.", curses.color_pair(2) | curses.A_BOLD) | If such a help method is defined, 'help command_name' calls this method
instead of printing the Python doc. | 625941c3097d151d1a222e1b |
def train_test_split(ratings, frac_test=0.2, impute=True, seed=1234): <NEW_LINE> <INDENT> np.random.seed(seed) <NEW_LINE> test = np.zeros(ratings.shape) <NEW_LINE> train = ratings.copy() <NEW_LINE> nonzero_ind = np.where(train > 0) <NEW_LINE> N = nonzero_ind[0].size <NEW_LINE> N_test = np.int(np.round(frac_test * N)) <NEW_LINE> ind = np.random.permutation(N)[:N_test] <NEW_LINE> test_users = nonzero_ind[0][ind] <NEW_LINE> test_items = nonzero_ind[1][ind] <NEW_LINE> test[test_users, test_items] = train[test_users, test_items].copy() <NEW_LINE> train[test_users, test_items] = 0. <NEW_LINE> if impute: <NEW_LINE> <INDENT> pos_ind = train > 0 <NEW_LINE> means = np.zeros(train.shape[1]) + np.mean(train[pos_ind]) <NEW_LINE> for i in range(means.size): <NEW_LINE> <INDENT> item = train[:, i] <NEW_LINE> ind = item > 0. <NEW_LINE> if any(ind): <NEW_LINE> <INDENT> means[i] = np.mean(item[ind]) <NEW_LINE> <DEDENT> <DEDENT> mean_train = np.tile(means, (train.shape[0], 1)) <NEW_LINE> mean_train[pos_ind] = train[pos_ind].copy() <NEW_LINE> train = mean_train <NEW_LINE> train[test_users, test_items] = 0. <NEW_LINE> <DEDENT> assert(np.all((train * test) == 0)) <NEW_LINE> return train, test | Split the MovieLens data into train/test, mostly follows version found here,
http://bit.ly/1YvzkE2, but also allows imputation of zeros in training data
with mean values of item ratings.
Args:
ratings [numpy.ndarray]: user-ratings matrix
frac_test [Optional(float)]: Fractional amount of entries to test.
impute [Optional(boolean)]: Flag to impute zeros with means of items.
seed [Optional(int)]: Integer to fix random number generator.
Returns:
train [numpy.ndarray]: training user-ratings matrix
test [numpy.ndarray]: test user-ratings matrix | 625941c3b7558d58953c4ed7 |
def get_kalman_gain(self, m_error): <NEW_LINE> <INDENT> return self.estimate_error / (self.estimate_error + m_error) | Calculate the Kalman gain.
The higher the Kalman gain, the more important is the measurement
compared to the current estimate
Returns
-------
float
In [0, 1] | 625941c391f36d47f21ac4b1 |
def __init__(self, file_path, protoLabel): <NEW_LINE> <INDENT> self.logger = logging.getLogger(__name__) <NEW_LINE> self.logger.setLevel(logging.WARNING) <NEW_LINE> self.pcapFilePath = file_path <NEW_LINE> self.pcapFileName = '' <NEW_LINE> try: <NEW_LINE> <INDENT> if len(file_path) > 0: <NEW_LINE> <INDENT> self.cap = rdpcap(self.pcapFilePath) <NEW_LINE> self.pcapFileName = str(self.pcapFilePath).rsplit('/',1)[1] <NEW_LINE> self.logger.debug("Pcap File Name: %s" % self.pcapFileName) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> self.logger.warning("Pcap File MISSING at : [%s] or Filtered PCAP" % self.pcapFilePath) <NEW_LINE> <DEDENT> self.protocolLabel = protoLabel <NEW_LINE> self.pktCharFreqDict = {} <NEW_LINE> self.pktCharEntropySeq = [] <NEW_LINE> self.specificPktLens = [] <NEW_LINE> self.fig = None <NEW_LINE> self.ax = None <NEW_LINE> self.logger.info("Finished initializing and reading pcap file ...") <NEW_LINE> self.logger.debug("Type : %s" % str(type(self.cap))) | :param file_path: Set file path with 'None' if file_path given (indicates that it's a filtered pcap)
else. set the actual file_Path
:param protoLabel: Used to label the base file where the pcap file-path will be stored
:return: | 625941c34e696a04525c940c |
def __init__(self, data, weights=None): <NEW_LINE> <INDENT> import numpy as np <NEW_LINE> self.data = data <NEW_LINE> self.rel_a = data / np.expand_dims(data.sum(axis=1), 1) <NEW_LINE> if weights is None: <NEW_LINE> <INDENT> weights = data.sum(axis=1) / data.sum() <NEW_LINE> <DEDENT> self.weights = weights / weights.sum() <NEW_LINE> self.total_list = data.sum(axis=0) <NEW_LINE> self.total_rel = self.total_list / self.total_list.sum() | Calculates common diversity metrics
Args:
data: the data set to initialize. should be a 2-d np array
where nrows = nsites to asses, and ncol = nspecies in community.
the values in the array should be the observations
weights: weights for each site. default is sum of
Returns:
an object with properties obj.shannon() and obj.simpson() to calculate
those diversity metrics. | 625941c355399d3f05588673 |
def julian2num(j): <NEW_LINE> <INDENT> ep = np.datetime64(get_epoch(), 'h').astype(float) / 24. <NEW_LINE> ep0 = np.datetime64('0000-12-31T00:00:00', 'h').astype(float) / 24. <NEW_LINE> dt = JULIAN_OFFSET - ep0 + ep <NEW_LINE> return np.subtract(j, dt) | Convert a Julian date (or sequence) to a Matplotlib date (or sequence).
Parameters
----------
j : float or sequence of floats
Julian dates (days relative to 4713 BC Jan 1, 12:00:00 Julian
calendar or 4714 BC Nov 24, 12:00:00, proleptic Gregorian calendar).
Returns
-------
float or sequence of floats
Matplotlib dates (days relative to `.get_epoch`). | 625941c321a7993f00bc7cac |
def test_create_recipe_with_ingredients(self): <NEW_LINE> <INDENT> ingredient1 = Ingredient.objects.create(user=self.user, name='Prawns') <NEW_LINE> ingredient2 = Ingredient.objects.create(user=self.user, name='Ginger') <NEW_LINE> payload = { 'title': 'That Prawn Red Curry', 'ingredient': [ingredient1.id, ingredient2.id], 'time_minutes': 20, 'price': 7.00 } <NEW_LINE> res = self.client.post(RECIPES_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_201_CREATED) <NEW_LINE> recipe = Recipe.objects.get(id=res.data['id']) <NEW_LINE> ingredients = recipe.ingredient.all() <NEW_LINE> self.assertEqual(ingredients.count(), 2) <NEW_LINE> self.assertIn(ingredient1, ingredients) <NEW_LINE> self.assertIn(ingredient2, ingredients) | Test creating recipe with ingredients | 625941c34f88993c3716c029 |
def computeAAfromRM(rotationMatrix): <NEW_LINE> <INDENT> trace = rotationMatrix.trace() <NEW_LINE> if (trace == -1.0): <NEW_LINE> <INDENT> angle = math.pi <NEW_LINE> axis = (0.5 * (rotationMatrix + np.identity(3)).diagonal().reshape(3, 1)) ** 0.5 <NEW_LINE> <DEDENT> elif (trace < 3): <NEW_LINE> <INDENT> angle = math.acos((trace - 1) / 2) <NEW_LINE> axis = inverseSkewMatrix(rotationMatrix - rotationMatrix.transpose()) / (2 * math.sin(angle)) <NEW_LINE> <DEDENT> elif (trace == 3.0): <NEW_LINE> <INDENT> angle = 0 <NEW_LINE> axis = np.array([[1,0,0]]).transpose() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> angle = 0 <NEW_LINE> axis = np.array([[0,0,0]]) <NEW_LINE> print("computeAAfromRN: rotationMatrix must be a special orthoganal matrix") <NEW_LINE> <DEDENT> return (angle, axis) | Computes the axis and angle of rotation from a rotation matrix using the
inverse Rodrigues formula, as given in Theorem 7.12 in Bullo and Smith. It
checks through the cases of the rotation matrix having trace of -1, (-1, 3),
and 3, with the procedure of calculations given in the theorem. In the case
of trace = 3, the rotation is arbitrarily set to (1, 0, 0), in the case of
trace == -1, the positive axis is selected, and the case where the trace falls
outside of the tested range, the zero vector is returned. | 625941c3d164cc6175782d0e |
def load_vgg(sess, vgg_path): <NEW_LINE> <INDENT> vgg_tag = 'vgg16' <NEW_LINE> vgg_input_tensor_name = 'image_input:0' <NEW_LINE> vgg_keep_prob_tensor_name = 'keep_prob:0' <NEW_LINE> vgg_layer3_out_tensor_name = 'layer3_out:0' <NEW_LINE> vgg_layer4_out_tensor_name = 'layer4_out:0' <NEW_LINE> vgg_layer7_out_tensor_name = 'layer7_out:0' <NEW_LINE> tf.saved_model.loader.load(sess, [vgg_tag], vgg_path) <NEW_LINE> vgg_graph = tf.get_default_graph() <NEW_LINE> vgg_input_tensor = vgg_graph.get_tensor_by_name(vgg_input_tensor_name) <NEW_LINE> vgg_keep_prob_tensor = vgg_graph.get_tensor_by_name(vgg_keep_prob_tensor_name) <NEW_LINE> vgg_layer3_out_tensor = vgg_graph.get_tensor_by_name(vgg_layer3_out_tensor_name) <NEW_LINE> vgg_layer4_out_tensor = vgg_graph.get_tensor_by_name(vgg_layer4_out_tensor_name) <NEW_LINE> vgg_layer7_out_tensor = vgg_graph.get_tensor_by_name(vgg_layer7_out_tensor_name) <NEW_LINE> return vgg_input_tensor, vgg_keep_prob_tensor, vgg_layer3_out_tensor, vgg_layer4_out_tensor, vgg_layer7_out_tensor | Load Pretrained VGG Model into TensorFlow.
:param sess: TensorFlow Session
:param vgg_path: Path to vgg folder, containing "variables/" and "saved_model.pb"
:return: Tuple of Tensors from VGG model (image_input, keep_prob, layer3_out, layer4_out, layer7_out) | 625941c3293b9510aa2c3258 |
def test_remove_file_custom_path_job_array(self): <NEW_LINE> <INDENT> script = "#!/bin/sh\n" "%s 3;\n" "if [ $PBS_ARRAY_INDEX -eq 2 ]; then\n" "exit 1; fi; exit 0;" % (self.mom.sleep_cmd) <NEW_LINE> tmp_dir = self.du.create_temp_dir(asuser=TEST_USER) <NEW_LINE> j = Job(TEST_USER, attrs={ATTR_e: tmp_dir, ATTR_o: tmp_dir, ATTR_R: 'oe', ATTR_J: '1-3'}) <NEW_LINE> j.create_script(script) <NEW_LINE> sub_dir = self.du.create_temp_dir(asuser=TEST_USER) <NEW_LINE> jid = self.server.submit(j, submit_dir=sub_dir) <NEW_LINE> self.server.expect(JOB, {ATTR_state: 'B'}, id=jid) <NEW_LINE> self.server.expect(JOB, ATTR_state, op=UNSET, id=jid) <NEW_LINE> file_list = [name for name in os.listdir( tmp_dir) if os.path.isfile(os.path.join(tmp_dir, name))] <NEW_LINE> self.assertEqual(2, len(file_list), "expected 2 std files") <NEW_LINE> subj2_id = j.create_subjob_id(jid, 2) <NEW_LINE> std_files = [subj2_id + '.OU', subj2_id + '.ER'] <NEW_LINE> for f_name in std_files: <NEW_LINE> <INDENT> if f_name not in file_list: <NEW_LINE> <INDENT> raise self.failureException("std file " + f_name + " not found") | submit job array script that makes subjobs to exit with 0 except for
subjob[2] and make sure that the std_files for only subjob[2] are
available in custom directory when remove_files option is used with
-o and -e options. | 625941c3d18da76e23532494 |
def date_convert(self, string_date): <NEW_LINE> <INDENT> date_complex = string_date.split(' ') <NEW_LINE> date = date_complex[0].split('-') <NEW_LINE> year = int(date[0]) <NEW_LINE> month = int(date[1]) <NEW_LINE> day = int(date[2]) <NEW_LINE> time = date_complex[1].split(':') <NEW_LINE> hours = int(time[0]) <NEW_LINE> minutes = int(time[1]) <NEW_LINE> mseconds = 0 <NEW_LINE> if '.' in time[2]: <NEW_LINE> <INDENT> tsecs = time[2].split('.') <NEW_LINE> seconds = int(tsecs[0]) <NEW_LINE> mseconds = int(tsecs[1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> seconds = int(time[2]) <NEW_LINE> <DEDENT> return datetime.datetime(year, month, day, hours, minutes, seconds, mseconds) | Конвертация строки в datetime (возможно, есть стандартная реализация)
:param string_date:
:return: | 625941c323e79379d52ee526 |
def testUsernameChange(self): <NEW_LINE> <INDENT> url = reverse('api:user_detail', args=(self.user.pk,)) <NEW_LINE> data = {'username': 'a_new_username', 'email': self.user.email} <NEW_LINE> resp = self.client.put(url, json.dumps(data), 'application/json') <NEW_LINE> self.assertEqual(resp.status_code, 200) <NEW_LINE> newuser = User.objects.get(pk=self.user.pk) <NEW_LINE> self.assertEqual(self.user.username, newuser.username) | You should not be able to change your own username. | 625941c34a966d76dd550fce |
def write_header(file_like: BinaryIO, prefix: bytes, data: Mapping, version: Tuple[int, int]): <NEW_LINE> <INDENT> file_like.write(make_header(prefix, data, version)) | Write a header constructed by :func:`.make_header` to a file-like. | 625941c37b180e01f3dc47c1 |
def write_to_db(self): <NEW_LINE> <INDENT> if self.title_entry.get() == '': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> data = [] <NEW_LINE> self.db_length += 1 <NEW_LINE> data.append(self.db_length) <NEW_LINE> if self.date.get() == 'Today': <NEW_LINE> <INDENT> data.append(datetime.date.today()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data.append(self.date.get()) <NEW_LINE> <DEDENT> data.append(self.title_entry.get()) <NEW_LINE> data.append(self.notes_text.get('1.0', END)) <NEW_LINE> data.append(self.recur_var.get()) <NEW_LINE> data[2] = self.format_string(data[2]) <NEW_LINE> data[3] = self.format_string(data[3]) <NEW_LINE> self.curs.execute('INSERT INTO to_do VALUES(?, ?, ?, ?, ? )', (data[0], data[1], data[2], data[3], data[4],)) <NEW_LINE> self.db.commit() <NEW_LINE> self.title_entry.delete(0, 'end') <NEW_LINE> self.date_entry.delete(0, 'end') <NEW_LINE> self.notes_text.delete('1.0', END) <NEW_LINE> self.refresh_list() | writes the users information to the selected database
@ param self: member function of class Window | 625941c38c0ade5d55d3e979 |
def _get_tenant_id(self): <NEW_LINE> <INDENT> body = self.client.show_network(self.network['id']) <NEW_LINE> return body['network']['tenant_id'] | Returns the tenant_id of the client current user | 625941c39c8ee82313fbb734 |
def test_view_virtual_machines(self): <NEW_LINE> <INDENT> url = "/cluster/%s/virtual_machines/" <NEW_LINE> args = self.cluster.slug <NEW_LINE> self.validate_get(url, args, 'ganeti/virtual_machine/list.html') | Tests view for cluster users:
Verifies:
* lack of permissions returns 403
* nonexistent cluster returns 404 | 625941c3f548e778e58cd53d |
def extend(self, q_new, prec_list, succ_list, label_new): <NEW_LINE> <INDENT> added = 0 <NEW_LINE> cost = np.inf <NEW_LINE> q_min = () <NEW_LINE> for pre in prec_list: <NEW_LINE> <INDENT> if pre in succ_list: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> c = self.tree.nodes[pre]['cost'] + np.abs(q_new[0][0]-pre[0][0]) + np.abs(q_new[0][1]-pre[0][1]) <NEW_LINE> if c < cost: <NEW_LINE> <INDENT> added = 1 <NEW_LINE> q_min = pre <NEW_LINE> cost = c <NEW_LINE> <DEDENT> <DEDENT> if added == 1: <NEW_LINE> <INDENT> self.tree.add_node(q_new, cost=cost, label=label_new) <NEW_LINE> self.tree.nodes[q_new]['acc'] = set(self.acpt_check(q_min, q_new)[0]) <NEW_LINE> self.tree.add_edge(q_min, q_new) <NEW_LINE> <DEDENT> return added | :param: q_new: new state form: tuple (mulp, buchi)
:param: near_v: near state form: tuple (mulp, buchi)
:param: obs_check: check obstacle free form: dict { (mulp, mulp): True }
:param: succ: list of successor of the root
:return: extending the tree | 625941c33539df3088e2e30b |
def push_front(self, item): <NEW_LINE> <INDENT> self.insert_before(self.front(), item) | insert object as started valid node.
@param item: inserted object
@type item: LinkNode | 625941c38e7ae83300e4af8c |
def run_one_epoch(model, tqdm_iterator, mode, get_locals=False, optimizer=None, loss_update_interval=1000): <NEW_LINE> <INDENT> if mode == "train": <NEW_LINE> <INDENT> assert optimizer is not None <NEW_LINE> model.train() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> model.eval() <NEW_LINE> param_grads = [] <NEW_LINE> for param in model.parameters(): <NEW_LINE> <INDENT> param_grads += [param.requires_grad] <NEW_LINE> param.requires_grad = False <NEW_LINE> <DEDENT> <DEDENT> summary = {"losses": [], "logits": [], "labels": []} <NEW_LINE> device = next(model.parameters()).device <NEW_LINE> loss_fcn = model.module.get_loss if isinstance(model, torch.nn.DataParallel) else model.get_loss <NEW_LINE> for i, batch_cpu in enumerate(tqdm_iterator): <NEW_LINE> <INDENT> batch = misc.move_to(batch_cpu, device) <NEW_LINE> X, y = batch <NEW_LINE> X_cpu, y_cpu = batch_cpu <NEW_LINE> if optimizer: <NEW_LINE> <INDENT> optimizer.zero_grad() <NEW_LINE> <DEDENT> logits = model(X) <NEW_LINE> loss = loss_fcn(logits, y) <NEW_LINE> summary["losses"] += [loss.item()] <NEW_LINE> if mode == "train": <NEW_LINE> <INDENT> loss.backward() <NEW_LINE> optimizer.step() <NEW_LINE> if loss_update_interval > 0 and i%loss_update_interval == 0: <NEW_LINE> <INDENT> tqdm_iterator.set_description("Loss: %.3f" % (np.mean(summary["losses"]))) <NEW_LINE> <DEDENT> <DEDENT> if get_locals: <NEW_LINE> <INDENT> summary["logits"] += [logits.cpu().detach().numpy()] <NEW_LINE> summary["labels"] += [y_cpu.numpy()] <NEW_LINE> <DEDENT> <DEDENT> if mode != "train": <NEW_LINE> <INDENT> for param, value in zip(model.parameters(), param_grads): <NEW_LINE> <INDENT> param.requires_grad = value <NEW_LINE> <DEDENT> <DEDENT> return summary | Definition of one epoch procedure.
| 625941c3a219f33f3462892c |
def common_point(self): <NEW_LINE> <INDENT> if len(self.canvas)>1: <NEW_LINE> <INDENT> x= self.canvas[:] <NEW_LINE> for i in self.canvas: <NEW_LINE> <INDENT> x.remove(i) <NEW_LINE> for j in x: <NEW_LINE> <INDENT> if not (j.intersects(i)): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "Not enought rectangles to compare." | (Canvas)-> bool/str (str if not enought rectangles were given to compare)
Returns True if there exists a point that intersects all rectangles in the calling canvas. False otherwise. | 625941c31f5feb6acb0c4b13 |
def process_file_via_stream(self, idd_file_stream): <NEW_LINE> <INDENT> self.idd_file_stream = idd_file_stream <NEW_LINE> self.file_path = "/streamed/idd" <NEW_LINE> return self.process_file() | This worker allows processing of an IDD snippet via stream. Most useful for unit testing, but possibly for
other situations.
:param file-like-object idd_file_stream: An IDD snippet that responds to typical file-like commands such as
read(). A common object would be the StringIO object.
:return: An IDDStructure instance created from processing the IDD snippet | 625941c3091ae35668666f22 |
def seed_db(): <NEW_LINE> <INDENT> db = get_db() <NEW_LINE> add_game({ 'game_title': 'Caverna: The Cave Farmers', 'game_url': 'https://boardgamegeek.com/boardgame/102794/caverna-cave-farmers', 'play_time_min': 30, 'play_time_max': 210, 'players_min': 1, 'players_max': 7 }) <NEW_LINE> add_game({ 'game_title': 'Exploding Kittens', 'game_url': 'https://boardgamegeek.com/boardgame/172225/exploding-kittens', 'play_time_min': 15, 'play_time_max': None, 'players_min': 2, 'players_max': 5 }) <NEW_LINE> add_game({ 'game_title': 'Scythe', 'game_url': 'https://boardgamegeek.com/boardgame/169786/scythe', 'play_time_min': 90, 'play_time_max': 115, 'players_min': 1, 'players_max': 5 }) | Seeds the database. | 625941c38a349b6b435e8134 |
def isPalindrome(self, head): <NEW_LINE> <INDENT> slow = fast = head <NEW_LINE> while fast and fast.next: <NEW_LINE> <INDENT> slow = slow.next <NEW_LINE> fast = fast.next.next <NEW_LINE> <DEDENT> if fast: <NEW_LINE> <INDENT> slow = slow.next <NEW_LINE> <DEDENT> slow = self.reverseList0(slow) <NEW_LINE> while slow: <NEW_LINE> <INDENT> if head.val != slow.val: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> head = head.next <NEW_LINE> slow = slow.next <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True | :type head: ListNode
:rtype: bool | 625941c315baa723493c3f34 |
@multibox_detection.register(["cuda", "gpu"]) <NEW_LINE> def multibox_detection_gpu(cls_prob, loc_pred, anchor, clip=True, threshold=0.01, nms_threshold=0.5, force_suppress=False, variances=(0.1, 0.1, 0.2, 0.2), nms_topk=-1): <NEW_LINE> <INDENT> inter_out = multibox_transform_loc(cls_prob, loc_pred, anchor, clip, threshold, variances) <NEW_LINE> out = nms( inter_out[0], inter_out[1], nms_threshold, force_suppress, nms_topk) <NEW_LINE> return out | Convert multibox detection predictions.
Parameters
----------
cls_prob : tvm.Tensor
Class probabilities.
loc_pred : tvm.Tensor
Location regression predictions.
anchor : tvm.Tensor
Prior anchor boxes.
clip : boolean
Whether to clip out-of-boundary boxes.
nms_threshold : float
Non-maximum suppression threshold.
force_suppress : boolean
Whether to suppress all detections regardless of class_id.
threshold : float
Threshold to be a positive prediction.
variances : tuple of float
Variances to be decoded from box regression output.
nms_topk : int
Keep maximum top k detections before nms, -1 for no limit.
Returns
-------
out : tvm.Tensor
3-D tensor with shape (batch_size, num_anchors, 6) | 625941c315baa723493c3f35 |
def __init__(self, width, height, caption, fps=60): <NEW_LINE> <INDENT> pygame.init() <NEW_LINE> self.width = width <NEW_LINE> self.height = height <NEW_LINE> self.caption = caption <NEW_LINE> self.fps = fps <NEW_LINE> self.running = False <NEW_LINE> self.clock = pygame.time.Clock() <NEW_LINE> self.option_val = 1 <NEW_LINE> self.game_progress = 0 <NEW_LINE> self.game_info_dict = {} <NEW_LINE> self.game_start_text = {} <NEW_LINE> self.game_start_img = {} <NEW_LINE> self.game_option_index = GAME_OPTION_TEXT_OBJECT_ARR_1 <NEW_LINE> self.bg_logo_list = {} <NEW_LINE> self.person = {} <NEW_LINE> os.environ['SDL_VIDEO_WINDOW_POS'] = "%d, %d" % (0, 30) <NEW_LINE> self.screen = pygame.display.set_mode((self.width, self.height), 0, 32) <NEW_LINE> pygame.display.set_caption(self.caption) <NEW_LINE> self.bearing_surface_group = pygame.sprite.Group() <NEW_LINE> self.left_surface_group = pygame.sprite.Group() <NEW_LINE> self.right_surface_group = pygame.sprite.Group() <NEW_LINE> self.top_surface_group = pygame.sprite.Group() <NEW_LINE> self.display_back_group = pygame.sprite.Group() <NEW_LINE> self.display_font_group = pygame.sprite.Group() <NEW_LINE> self.joysticks = [] <NEW_LINE> self.joysticks1 = None <NEW_LINE> self.game_mario = None <NEW_LINE> self.bgm = None | :param width:游戏窗体宽度
:param height: 高度
:param caption: 窗体标题
:param fps: 游戏帧率 | 625941c39b70327d1c4e0d95 |
def proc_output(command): <NEW_LINE> <INDENT> proc = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True) <NEW_LINE> (output, err) = proc.communicate() <NEW_LINE> return output | returns output of command in the linux shell | 625941c326068e7796caec9d |
def run_spp(self, input_bam, output, plot, cpus): <NEW_LINE> <INDENT> base = "{} {} -rf -savp".format(self.tools.Rscript, self.tools.spp) <NEW_LINE> cmd = base + " -savp={} -s=0:5:500 -c={} -out={} -p={}".format( plot, input_bam, output, cpus) <NEW_LINE> return cmd | Run the SPP read peak analysis tool.
:param str input_bam: Path to reads file
:param str output: Path to output file
:param str plot: Path to plot file
:param int cpus: Number of processors to use
:return str: Command with which to run SPP | 625941c396565a6dacc8f68c |
def invoke_javascript(self, javascript): <NEW_LINE> <INDENT> self._impl.invoke_javascript(javascript) | Invoke a JavaScript expression.
The result (if any) of the javascript is ignored.
**No guarantee is provided that the javascript has completed
execution when `invoke()` returns**
Args:
javascript (str): The javascript expression to evaluate. | 625941c38da39b475bd64f32 |
def log(message): <NEW_LINE> <INDENT> print(message, flush=True) | Log message to console | 625941c3097d151d1a222e1c |
def array2tiff(data, fname, pxsize=1, dim="yxz", transpose3=True): <NEW_LINE> <INDENT> fname = checkfname(fname, "tiff") <NEW_LINE> ndim = data.ndim <NEW_LINE> if ndim >= 3 and transpose3: <NEW_LINE> <INDENT> data = np.transpose(data, (2, 0, 1)) <NEW_LINE> <DEDENT> dimAll = "tzcyxs" <NEW_LINE> N = [1, 1, 1, 1, 1, 1] <NEW_LINE> d = 0 <NEW_LINE> Ishape = np.shape(data) <NEW_LINE> for i in range(6): <NEW_LINE> <INDENT> if dimAll[i] in dim: <NEW_LINE> <INDENT> N[i] = Ishape[d] <NEW_LINE> d += 1 <NEW_LINE> <DEDENT> <DEDENT> data.shape = N <NEW_LINE> data = data.astype('int16') <NEW_LINE> imwrite(fname, data, imagej=True, resolution=(1./pxsize, 1./pxsize), metadata={'unit': 'um'}) <NEW_LINE> print("Done.") | Write 2D or 3D array to tiff image file
===========================================================================
Input Meaning
---------------------------------------------------------------------------
data 2D or 3D array with data (integer numbers int16)
order: TZCYXS
with t time
c channel
fname Name of the file to write to
pxsize Pixel size [µm]
dim String with dimensions in image
e.g. z stack of planar images: dim = "yxz"
The order must be "tzcyxs". The same order must be used
for data
E.g. for a xy time series: dim="tyx" and 'data' is a 3D
array with time, y, and x as 1st, 2nd, and 3r dimension
The only exception is that for a 3D array also "yxz" is ok
in combination with transpose3=True
(which moves the 3rd dimension to the first to correct the
order)
===========================================================================
Output Meaning
---------------------------------------------------------------------------
tiff image
=========================================================================== | 625941c3656771135c3eb82d |
def create_for_rectangle(self, x, y, width, height): <NEW_LINE> <INDENT> return Surface._from_pointer( cairo.cairo_surface_create_for_rectangle( self._pointer, x, y, width, height), incref=False) | Create a new surface that is a rectangle within this surface.
All operations drawn to this surface are then clipped and translated
onto the target surface.
Nothing drawn via this sub-surface outside of its bounds
is drawn onto the target surface,
making this a useful method for passing constrained child surfaces
to library routines that draw directly onto the parent surface,
i.e. with no further backend allocations,
double buffering or copies.
.. note::
As of cairo 1.12,
the semantics of subsurfaces have not been finalized yet
unless the rectangle is in full device units,
is contained within the extents of the target surface,
and the target or subsurface's device transforms are not changed.
:param x:
The x-origin of the sub-surface
from the top-left of the target surface (in device-space units)
:param y:
The y-origin of the sub-surface
from the top-left of the target surface (in device-space units)
:param width:
Width of the sub-surface (in device-space units)
:param height:
Height of the sub-surface (in device-space units)
:type x: float
:type y: float
:type width: float
:type height: float
:returns:
A new :class:`Surface` object.
*New in cairo 1.10.* | 625941c382261d6c526ab45d |
def reorderList(self, head): <NEW_LINE> <INDENT> slist = [] <NEW_LINE> temp = head <NEW_LINE> while temp is not None: <NEW_LINE> <INDENT> slist.append(temp) <NEW_LINE> temp = temp.next <NEW_LINE> <DEDENT> length = len(slist) <NEW_LINE> count = (length - 1) // 2 <NEW_LINE> temp = head <NEW_LINE> while count > 0: <NEW_LINE> <INDENT> point = slist.pop() <NEW_LINE> point.next = temp.next <NEW_LINE> temp.next = point <NEW_LINE> temp = temp.next.next <NEW_LINE> count -= 1 <NEW_LINE> <DEDENT> if length > 0: <NEW_LINE> <INDENT> if length % 2 == 1: <NEW_LINE> <INDENT> temp.next = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> temp.next.next = None | :type head: ListNode
:rtype: void Do not return anything, modify head in-place instead. | 625941c3d99f1b3c44c67552 |
def feed(self, handle, consumer): <NEW_LINE> <INDENT> self._parser.setContentHandler(EventGenerator(consumer, self.interest_tags )) <NEW_LINE> self._parser.setErrorHandler(handler.ErrorHandler()) <NEW_LINE> self._parser.parseFile(handle) | Feeed a set of data into the scanner.
Arguments:
o handle - A handle with the information to parse.
o consumer - The consumer that should be informed of events. | 625941c371ff763f4b549649 |
def result(): <NEW_LINE> <INDENT> file_list = UL() <NEW_LINE> static_path = os.path.join(request.folder, "static", "test") <NEW_LINE> for filename in os.listdir(static_path): <NEW_LINE> <INDENT> link = A(filename, _href = URL(c = "static", f = "test", args = [filename] ) ) <NEW_LINE> file_list.append(link) <NEW_LINE> <DEDENT> return dict(file_list=file_list) | Selenium Test Result Reports list | 625941c316aa5153ce36243a |
def validate(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> resp = self.request().get(self.validate_url, verify=self.verifySSL).json() <NEW_LINE> <DEDENT> except TokenExpiredError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if 'error' in resp: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True | Confirms the current token is still valid.
Returns True if it is valid, False otherwise. | 625941c3be383301e01b544a |
def dumps(obj): <NEW_LINE> <INDENT> buff = six.StringIO() <NEW_LINE> yaml.dump_all([obj], buff, explicit_start=True, indent=2, default_flow_style=False, line_break="\n", Dumper=PrettySafeDumper, allow_unicode=True) <NEW_LINE> return buff.getvalue() | Dump a python object -> blob and apply our pretty styling. | 625941c3187af65679ca50df |
def bind(self, func): <NEW_LINE> <INDENT> cnew = CallableObject(func) <NEW_LINE> for c in self._handlers: <NEW_LINE> <INDENT> if cnew.compare(c): <NEW_LINE> <INDENT> print("Warning: handler %s already present for %s" %(func, self)) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> self._handlers.append(cnew) | bind(func)
Add an eventhandler to this event.
The callback/handler (func) must be a callable. It is called
with one argument: the event instance, which can contain
additional information about the event. | 625941c350812a4eaa59c2e4 |
def test_clear_cached_attrs_resets_cached_attr_class_attributes(self): <NEW_LINE> <INDENT> count = 0 <NEW_LINE> for attr, value in self.datasource.cached_attr_defaults: <NEW_LINE> <INDENT> setattr(self.datasource, attr, count) <NEW_LINE> count += 1 <NEW_LINE> <DEDENT> self.datasource._dirty_cache = True <NEW_LINE> self.datasource.clear_cached_attrs() <NEW_LINE> for attr, value in self.datasource.cached_attr_defaults: <NEW_LINE> <INDENT> self.assertEqual(value, getattr(self.datasource, attr)) | Class attributes listed in cached_attr_defaults are reset. | 625941c396565a6dacc8f68d |
def test_hex_to_rgb_percent(self): <NEW_LINE> <INDENT> test_pairs = ((u'#fff', (u'100%', u'100%', u'100%')), (u'#ffffff', (u'100%', u'100%', u'100%')), (u'#000080', (u'0%', u'0%', u'50%'))) <NEW_LINE> for pair in test_pairs: <NEW_LINE> <INDENT> self.assertEqual(pair[1], webcolors.hex_to_rgb_percent(pair[0])) | Test conversion from hex to percent RGB triplet. | 625941c3c4546d3d9de729f3 |
def AffineForward(A, W, b): <NEW_LINE> <INDENT> return A.dot(W) + b, (A, W, b) | Computes affine transformation Z = AW + b.
Args:
A (n x d array): Batch of data
W (d x d' array): Layer Weight
b (size d' array): Bias
Returns:
n x d' array: Z
tuple: Current (A, W, b) | 625941c33c8af77a43ae3760 |
def pairwise(iterable): <NEW_LINE> <INDENT> a = iter(iterable) <NEW_LINE> return itertools.izip(a, a) | s -> (s0,s1), (s2,s3), (s4, s5), ... | 625941c31f037a2d8b9461bf |
def test_get_literals_none(self): <NEW_LINE> <INDENT> self.assertEqual(bbc.get_literals(BitArray()), (BitArray(), BitArray())) <NEW_LINE> for i in range(8, 100): <NEW_LINE> <INDENT> bits = BitArray(bin='0' * i) <NEW_LINE> self.assertEqual(bbc.get_literals(bits), (bits, BitArray())) | Test ``bbc.get_literals()`` with non-literal inputs. | 625941c3099cdd3c635f0c1d |
def GenerateWeaterTime(currenttime): <NEW_LINE> <INDENT> time_local = time.localtime(currenttime) <NEW_LINE> timestamp = time.strftime('%Y-%m-%d %H:%M:%S',time_local) <NEW_LINE> return timestamp | form UNIX time to YYYY-MM-DD HH:MM:SS | 625941c3656771135c3eb82e |
def ai_acc_xed2x(y_true,y_pred,ky0=5,fgDebug=False): <NEW_LINE> <INDENT> df,dacc=pd.DataFrame(),-1 <NEW_LINE> if (len(y_true)==0) or (len(y_pred)==0): <NEW_LINE> <INDENT> return dacc,df <NEW_LINE> <DEDENT> y_num=len(y_true) <NEW_LINE> df['y_true'],df['y_pred']=pd.Series(y_true),pd.Series(y_pred) <NEW_LINE> df['y_diff']=np.abs(df.y_true-df.y_pred) <NEW_LINE> df['y_true2']=df['y_true'] <NEW_LINE> df.loc[df['y_true'] == 0, 'y_true2'] =0.00001 <NEW_LINE> df['y_kdif']=df.y_diff/df.y_true2*100 <NEW_LINE> dfk=df[df.y_kdif<ky0] <NEW_LINE> knum=len(dfk['y_pred']) <NEW_LINE> dacc=knum/y_num*100 <NEW_LINE> dacc=round(dacc,3) <NEW_LINE> return dacc,df | 效果评估函数,用于评估机器学习算法函数的效果。
输入:
y_true,y_pred,pandas的Series数据列格式。
ky0,结果数据误差k值,默认是5,表示百分之五。
fgDebug,调试模式变量,默认为False。
返回:
dacc,准确率,float格式
df,结果数据,pandas列表格式DataFrame | 625941c310dbd63aa1bd2b65 |
def get_changelists_changelist_diff_region(self, changelists_changelist_diff_region_id, changelist, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('async_req'): <NEW_LINE> <INDENT> return self.get_changelists_changelist_diff_region_with_http_info(changelists_changelist_diff_region_id, changelist, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.get_changelists_changelist_diff_region_with_http_info(changelists_changelist_diff_region_id, changelist, **kwargs) <NEW_LINE> return data | get_changelists_changelist_diff_region # noqa: E501
Get snap diff regions of a file. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_changelists_changelist_diff_region(changelists_changelist_diff_region_id, changelist, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int changelists_changelist_diff_region_id: Get snap diff regions of a file. (required)
:param str changelist: (required)
:param str resume: Continue returning results from previous call using this token (token should come from the previous call, resume cannot be used with other options).
:param int limit: Return no more than this many results at once (see resume).
:param int offset:
:return: ChangelistsChangelistDiffRegions
If the method is called asynchronously,
returns the request thread. | 625941c3283ffb24f3c558c4 |
def systems(): <NEW_LINE> <INDENT> return 'Sinclair ZX Spectrum/ZX-81' | the related system/s
:return: string (i.e. 'Commodore 64') | 625941c3507cdc57c6306c98 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.