text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def create_kernel_spec(self, is_cython=False, is_pylab=False, is_sympy=False): """Create a kernel spec for our own kernels""" # Before creating our kernel spec, we always need to # set this value in spyder.ini CONF.set('main', 'spyder_pythonpath', self.main.get_spyder_pythonpath()) return SpyderKernelSpec(is_cython=is_cython, is_pylab=is_pylab, is_sympy=is_sympy)
[ "def", "create_kernel_spec", "(", "self", ",", "is_cython", "=", "False", ",", "is_pylab", "=", "False", ",", "is_sympy", "=", "False", ")", ":", "# Before creating our kernel spec, we always need to\r", "# set this value in spyder.ini\r", "CONF", ".", "set", "(", "'m...
51.3
10
def pairwise_cos_distance(A, B): """Pairwise cosine distance between two matrices. :param A: a matrix. :param B: a matrix. :returns: A tensor for the pairwise cosine between A and B. """ normalized_A = tf.nn.l2_normalize(A, dim=1) normalized_B = tf.nn.l2_normalize(B, dim=1) prod = tf.matmul(normalized_A, normalized_B, adjoint_b=True) return 1 - prod
[ "def", "pairwise_cos_distance", "(", "A", ",", "B", ")", ":", "normalized_A", "=", "tf", ".", "nn", ".", "l2_normalize", "(", "A", ",", "dim", "=", "1", ")", "normalized_B", "=", "tf", ".", "nn", ".", "l2_normalize", "(", "B", ",", "dim", "=", "1",...
34.363636
14.909091
def editprojecthook(self, project_id, hook_id, url, push=False, issues=False, merge_requests=False, tag_push=False): """ edit an existing hook from a project :param id_: project id :param hook_id: hook id :param url: the new url :return: True if success """ data = { "id": project_id, "hook_id": hook_id, "url": url, 'push_events': int(bool(push)), 'issues_events': int(bool(issues)), 'merge_requests_events': int(bool(merge_requests)), 'tag_push_events': int(bool(tag_push)), } request = requests.put( '{0}/{1}/hooks/{2}'.format(self.projects_url, project_id, hook_id), headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout) if request.status_code == 200: return True else: return False
[ "def", "editprojecthook", "(", "self", ",", "project_id", ",", "hook_id", ",", "url", ",", "push", "=", "False", ",", "issues", "=", "False", ",", "merge_requests", "=", "False", ",", "tag_push", "=", "False", ")", ":", "data", "=", "{", "\"id\"", ":",...
34.518519
20.296296
def get_language(self, language_id): """ Retrieves information about the language of the given id. :param language_id: The TheTVDB Id of the language. :return: a python dictionary with either the result of the search or an error from TheTVDB. """ raw_response = requests_util.run_request('get', self.API_BASE_URL + '/languages/%d' % language_id, headers=self.__get_header_with_auth()) return self.parse_raw_response(raw_response)
[ "def", "get_language", "(", "self", ",", "language_id", ")", ":", "raw_response", "=", "requests_util", ".", "run_request", "(", "'get'", ",", "self", ".", "API_BASE_URL", "+", "'/languages/%d'", "%", "language_id", ",", "headers", "=", "self", ".", "__get_hea...
43.833333
29.333333
def selectlanguage(self, event): """Store client's selection of a new translation""" self.log('Language selection event:', event.client, pretty=True) if event.data not in all_languages(): self.log('Unavailable language selected:', event.data, lvl=warn) language = None else: language = event.data if language is None: language = 'en' event.client.language = language if event.client.config is not None: event.client.config.language = language event.client.config.save()
[ "def", "selectlanguage", "(", "self", ",", "event", ")", ":", "self", ".", "log", "(", "'Language selection event:'", ",", "event", ".", "client", ",", "pretty", "=", "True", ")", "if", "event", ".", "data", "not", "in", "all_languages", "(", ")", ":", ...
30.736842
19.421053
def _handle_template(self, token): """Handle a case where a template is at the head of the tokens.""" params = [] default = 1 self._push() while self._tokens: token = self._tokens.pop() if isinstance(token, tokens.TemplateParamSeparator): if not params: name = self._pop() param = self._handle_parameter(default) params.append(param) if not param.showkey: default += 1 elif isinstance(token, tokens.TemplateClose): if not params: name = self._pop() return Template(name, params) else: self._write(self._handle_token(token)) raise ParserError("_handle_template() missed a close token")
[ "def", "_handle_template", "(", "self", ",", "token", ")", ":", "params", "=", "[", "]", "default", "=", "1", "self", ".", "_push", "(", ")", "while", "self", ".", "_tokens", ":", "token", "=", "self", ".", "_tokens", ".", "pop", "(", ")", "if", ...
39.619048
11.809524
def types(self): """ Tuple containing types transformed by this transformer. """ out = [] if self._transform_bytes: out.append(bytes) if self._transform_str: out.append(str) return tuple(out)
[ "def", "types", "(", "self", ")", ":", "out", "=", "[", "]", "if", "self", ".", "_transform_bytes", ":", "out", ".", "append", "(", "bytes", ")", "if", "self", ".", "_transform_str", ":", "out", ".", "append", "(", "str", ")", "return", "tuple", "(...
26.2
12.6
def write_mnefiff(data, filename): """Export data to MNE using FIFF format. Parameters ---------- data : instance of ChanTime data with only one trial filename : path to file file to export to (include '.mat') Notes ----- It cannot store data larger than 2 GB. The data is assumed to have only EEG electrodes. It overwrites a file if it exists. """ from mne import create_info, set_log_level from mne.io import RawArray set_log_level(WARNING) TRIAL = 0 info = create_info(list(data.axis['chan'][TRIAL]), data.s_freq, ['eeg', ] * data.number_of('chan')[TRIAL]) UNITS = 1e-6 # mne wants data in uV fiff = RawArray(data.data[0] * UNITS, info) if data.attr['chan']: fiff.set_channel_positions(data.attr['chan'].return_xyz(), data.attr['chan'].return_label()) fiff.save(filename, overwrite=True)
[ "def", "write_mnefiff", "(", "data", ",", "filename", ")", ":", "from", "mne", "import", "create_info", ",", "set_log_level", "from", "mne", ".", "io", "import", "RawArray", "set_log_level", "(", "WARNING", ")", "TRIAL", "=", "0", "info", "=", "create_info",...
28.121212
19.181818
def colored(msg, color=None, background=None, style=None, force=False): """ Return the colored version of a string *msg*. For *color*, *background* and *style* options, see https://misc.flogisoft.com/bash/tip_colors_and_formatting. Unless *force* is *True*, the *msg* string is returned unchanged in case the output is not a tty. """ try: if not force and not os.isatty(sys.stdout.fileno()): return msg except: return msg color = colors.get(color, colors["default"]) background = backgrounds.get(background, backgrounds["default"]) if not isinstance(style, (tuple, list, set)): style = (style,) style = ";".join(str(styles.get(s, styles["default"])) for s in style) return "\033[{};{};{}m{}\033[0m".format(style, background, color, msg)
[ "def", "colored", "(", "msg", ",", "color", "=", "None", ",", "background", "=", "None", ",", "style", "=", "None", ",", "force", "=", "False", ")", ":", "try", ":", "if", "not", "force", "and", "not", "os", ".", "isatty", "(", "sys", ".", "stdou...
40.2
27.2
def main(): """ The "main" entry that controls the flow of the script based on the provided arguments. """ setup_logging(logging.INFO) # Parse arguments parser = argparse.ArgumentParser( description="A utility to interact with AWS using Cloudera Manager.") parser.add_argument('-H', '--hostname', action='store', dest='hostname', required=True, help='The hostname of the Cloudera Manager server.') parser.add_argument('-p', action='store', dest='port', type=int, help='The port of the Cloudera Manager server. Defaults ' 'to 7180 (http) or 7183 (https).') parser.add_argument('-u', '--username', action='store', dest='username', help='Login name.') parser.add_argument('--password', action='store', dest='password', help='Login password.') parser.add_argument('--api-version', action='store', dest='api_version', type=int, default=MINIMUM_SUPPORTED_API_VERSION, help='API version to be used. Defaults to {0}.'.format( MINIMUM_SUPPORTED_API_VERSION)) parser.add_argument('--tls', action='store_const', dest='use_tls', const=True, default=False, help='Whether to use tls (https).') parser.add_argument('-c', '--show-categories', action='store_true', default=False, dest='show_categories', help='Prints a list of supported external account ' 'category names. For example, "AWS" is a supported ' 'external account category name.') parser.add_argument('-t', '--show-types', action='store', dest='category_name', help='Prints a list of supported external account type ' 'names for the given CATEGORY_NAME. For example, ' '"AWS_ACCESS_KEY_AUTH" is a supported external ' 'account type name for external account category ' '"AWS".') parser.add_argument('-n', '--show-credentials', action='store', dest='type_name', help='Prints a list of available credential names for ' 'the given TYPE_NAME.') parser.add_argument('--prune', action='store', dest='credential_name', help='Runs S3Guard prune command on external account ' 'associated with the given CREDENTIAL_NAME.') parser.add_argument('--version', action='version', version='%(prog)s 1.0') args = parser.parse_args() # Use the default port if required. if not args.port: if args.use_tls: args.port = DEFAULT_HTTPS_PORT else: args.port = DEFAULT_HTTP_PORT validate_api_compatibility(args) get_login_credentials(args) initialize_api(args) # Perform the AWS operation based on the input arguments. if args.show_categories: list_supported_categories() elif args.category_name: list_supported_types(args.category_name) elif args.type_name: list_credentials_by_name(args.type_name) elif args.credential_name: call_s3guard_prune(args.credential_name) else: print ("ERROR: No arguments given to perform any AWS operation.") parser.print_help() sys.exit(1)
[ "def", "main", "(", ")", ":", "setup_logging", "(", "logging", ".", "INFO", ")", "# Parse arguments", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "\"A utility to interact with AWS using Cloudera Manager.\"", ")", "parser", ".", "add_argu...
45.608108
20.851351
def save(self): """ Creates this index in the collection if it hasn't been already created """ api = Client.instance().api index_details = { 'type': self.index_type_obj.type_name } extra_index_attributes = self.index_type_obj.get_extra_attributes() for extra_attribute_key in extra_index_attributes: extra_attribute_value = extra_index_attributes[extra_attribute_key] index_details[extra_attribute_key] = extra_attribute_value query_parameters = { 'collection': self.collection.name, } result = api.index.post(data=index_details, **query_parameters) self.index_type_obj.is_new = result['isNewlyCreated'] self.index_type_obj.id = result['id']
[ "def", "save", "(", "self", ")", ":", "api", "=", "Client", ".", "instance", "(", ")", ".", "api", "index_details", "=", "{", "'type'", ":", "self", ".", "index_type_obj", ".", "type_name", "}", "extra_index_attributes", "=", "self", ".", "index_type_obj",...
31.2
25.44
def _label_setter(self, new_label, current_label, attr_label, default=np.NaN, use_names_default=False): """Generalized setter of default meta attributes Parameters ---------- new_label : str New label to use in the Meta object current_label : str The hidden attribute to be updated that actually stores metadata default : Deafult setting to use for label if there is no attribute value use_names_default : bool if True, MetaData variable names are used as the default value for the specified Meta attributes settings Examples -------- : @name_label.setter def name_label(self, new_label): self._label_setter(new_label, self._name_label, use_names_default=True) Notes ----- Not intended for end user """ if new_label not in self.attrs(): # new label not in metadata, including case # update existing label, if present if current_label in self.attrs(): # old label exists and has expected case self.data.loc[:, new_label] = self.data.loc[:, current_label] self.data.drop(current_label, axis=1, inplace=True) else: if self.has_attr(current_label): # there is something like label, wrong case though current_label = self.attr_case_name(current_label) self.data.loc[:, new_label] = self.data.loc[:, current_label] self.data.drop(current_label, axis=1, inplace=True) else: # there is no existing label # setting for the first time if use_names_default: self.data[new_label] = self.data.index else: self.data[new_label] = default # check higher order structures as well # recursively change labels here for key in self.keys_nD(): setattr(self.ho_data[key], attr_label, new_label) # now update 'hidden' attribute value # current_label = new_label setattr(self, ''.join(('_',attr_label)), new_label)
[ "def", "_label_setter", "(", "self", ",", "new_label", ",", "current_label", ",", "attr_label", ",", "default", "=", "np", ".", "NaN", ",", "use_names_default", "=", "False", ")", ":", "if", "new_label", "not", "in", "self", ".", "attrs", "(", ")", ":", ...
41.586207
19.189655
def path(self, root_dir): """Manually establishes the build root for the current workspace.""" path = os.path.realpath(root_dir) if not os.path.exists(path): raise ValueError('Build root does not exist: {}'.format(root_dir)) self._root_dir = path
[ "def", "path", "(", "self", ",", "root_dir", ")", ":", "path", "=", "os", ".", "path", ".", "realpath", "(", "root_dir", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "raise", "ValueError", "(", "'Build root does not exist:...
43.833333
12.166667
async def client_event_handler(self, client_id, event_tuple, user_data): """Method called to actually send an event to a client. Users of this class should override this method to actually forward device events to their clients. It is called with the client_id passed to (or returned from) :meth:`setup_client` as well as the user_data object that was included there. The event tuple is a 3-tuple of: - connection string - event name - event object If you override this to be acoroutine, it will be awaited. The default implementation just logs the event. Args: client_id (str): The client_id that this event should be forwarded to. event_tuple (tuple): The connection_string, event_name and event_object that should be forwarded. user_data (object): Any user data that was passed to setup_client. """ conn_string, event_name, _event = event_tuple self._logger.debug("Ignoring event %s from device %s forwarded for client %s", event_name, conn_string, client_id) return None
[ "async", "def", "client_event_handler", "(", "self", ",", "client_id", ",", "event_tuple", ",", "user_data", ")", ":", "conn_string", ",", "event_name", ",", "_event", "=", "event_tuple", "self", ".", "_logger", ".", "debug", "(", "\"Ignoring event %s from device ...
39.233333
26.1
def sync(self, command, arguments, tags=None, id=None): """ Same as self.raw except it do a response.get() waiting for the command execution to finish and reads the result :param command: Command name to execute supported by the node (ex: core.system, info.cpu, etc...) check documentation for list of built in commands :param arguments: A dict of required command arguments depends on the command name. :param tags: job tags :param id: job id. Generated if not supplied :return: Result object """ response = self.raw(command, arguments, tags=tags, id=id) result = response.get() if result.state != 'SUCCESS': raise ResultError(msg='%s' % result.data, code=result.code) return result
[ "def", "sync", "(", "self", ",", "command", ",", "arguments", ",", "tags", "=", "None", ",", "id", "=", "None", ")", ":", "response", "=", "self", ".", "raw", "(", "command", ",", "arguments", ",", "tags", "=", "tags", ",", "id", "=", "id", ")", ...
44.5
26.833333
def search_dimensions(self, *args, **kwargs): """ Args: query (string): elasticsearch string query order_by (optional[string]): property by which to order results offset (optional[int]): number of results to skip for pagination (default=0) limit (optional[int]): how many results to return (default=50) timeout (optional[int]): how long to wait for response (in seconds) Returns: result of query search on dimensions """ return self._search_metrics_and_metadata( self._DIMENSION_ENDPOINT_SUFFIX, *args, **kwargs)
[ "def", "search_dimensions", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_search_metrics_and_metadata", "(", "self", ".", "_DIMENSION_ENDPOINT_SUFFIX", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
42.6
20.333333
def graph(self): """ Returns MultiDiGraph from kihs. Nodes are helices and edges are kihs. """ g = networkx.MultiDiGraph() edge_list = [(x.knob_helix, x.hole_helix, x.id, {'kih': x}) for x in self.get_monomers()] g.add_edges_from(edge_list) return g
[ "def", "graph", "(", "self", ")", ":", "g", "=", "networkx", ".", "MultiDiGraph", "(", ")", "edge_list", "=", "[", "(", "x", ".", "knob_helix", ",", "x", ".", "hole_helix", ",", "x", ".", "id", ",", "{", "'kih'", ":", "x", "}", ")", "for", "x",...
47.333333
19.166667
def dnd_endDnd(self, **kwargs) -> SlackResponse: """Ends the current user's Do Not Disturb session immediately.""" self._validate_xoxp_token() return self.api_call("dnd.endDnd", json=kwargs)
[ "def", "dnd_endDnd", "(", "self", ",", "*", "*", "kwargs", ")", "->", "SlackResponse", ":", "self", ".", "_validate_xoxp_token", "(", ")", "return", "self", ".", "api_call", "(", "\"dnd.endDnd\"", ",", "json", "=", "kwargs", ")" ]
52.75
7
def _delete(self, pos, idx): """Delete the item at the given (pos, idx). Combines lists that are less than half the load level. Updates the index when the sublist length is more than half the load level. This requires decrementing the nodes in a traversal from the leaf node to the root. For an example traversal see self._loc. """ _maxes, _lists, _index = self._maxes, self._lists, self._index lists_pos = _lists[pos] del lists_pos[idx] self._len -= 1 len_lists_pos = len(lists_pos) if len_lists_pos > self._half: _maxes[pos] = lists_pos[-1] if _index: child = self._offset + pos while child > 0: _index[child] -= 1 child = (child - 1) >> 1 _index[0] -= 1 elif len(_lists) > 1: if not pos: pos += 1 prev = pos - 1 _lists[prev].extend(_lists[pos]) _maxes[prev] = _lists[prev][-1] del _maxes[pos] del _lists[pos] del _index[:] self._expand(prev) elif len_lists_pos: _maxes[pos] = lists_pos[-1] else: del _maxes[pos] del _lists[pos] del _index[:]
[ "def", "_delete", "(", "self", ",", "pos", ",", "idx", ")", ":", "_maxes", ",", "_lists", ",", "_index", "=", "self", ".", "_maxes", ",", "self", ".", "_lists", ",", "self", ".", "_index", "lists_pos", "=", "_lists", "[", "pos", "]", "del", "lists_...
24.54717
21.377358
def get_agent_pools(self, pool_name=None, properties=None, pool_type=None, action_filter=None): """GetAgentPools. [Preview API] Get a list of agent pools. :param str pool_name: Filter by name :param [str] properties: Filter by agent pool properties (comma-separated) :param str pool_type: Filter by pool type :param str action_filter: Filter by whether the calling user has use or manage permissions :rtype: [TaskAgentPool] """ query_parameters = {} if pool_name is not None: query_parameters['poolName'] = self._serialize.query('pool_name', pool_name, 'str') if properties is not None: properties = ",".join(properties) query_parameters['properties'] = self._serialize.query('properties', properties, 'str') if pool_type is not None: query_parameters['poolType'] = self._serialize.query('pool_type', pool_type, 'str') if action_filter is not None: query_parameters['actionFilter'] = self._serialize.query('action_filter', action_filter, 'str') response = self._send(http_method='GET', location_id='a8c47e17-4d56-4a56-92bb-de7ea7dc65be', version='5.1-preview.1', query_parameters=query_parameters) return self._deserialize('[TaskAgentPool]', self._unwrap_collection(response))
[ "def", "get_agent_pools", "(", "self", ",", "pool_name", "=", "None", ",", "properties", "=", "None", ",", "pool_type", "=", "None", ",", "action_filter", "=", "None", ")", ":", "query_parameters", "=", "{", "}", "if", "pool_name", "is", "not", "None", "...
59.291667
24.708333
def from_payload(self, payload): """Init frame from binary data.""" self.session_id = payload[0]*256 + payload[1] self.status = CommandSendConfirmationStatus(payload[2])
[ "def", "from_payload", "(", "self", ",", "payload", ")", ":", "self", ".", "session_id", "=", "payload", "[", "0", "]", "*", "256", "+", "payload", "[", "1", "]", "self", ".", "status", "=", "CommandSendConfirmationStatus", "(", "payload", "[", "2", "]...
47.5
11
def on_failure(self, exc, task_id, args, kwargs, einfo): """ If the task fails, persist a record of the task. """ if not FailedTask.objects.filter(task_id=task_id, datetime_resolved=None).exists(): FailedTask.objects.create( task_name=_truncate_to_field(FailedTask, 'task_name', self.name), task_id=task_id, # Fixed length UUID: No need to truncate args=args, kwargs=kwargs, exc=_truncate_to_field(FailedTask, 'exc', repr(exc)), ) super(PersistOnFailureTask, self).on_failure(exc, task_id, args, kwargs, einfo)
[ "def", "on_failure", "(", "self", ",", "exc", ",", "task_id", ",", "args", ",", "kwargs", ",", "einfo", ")", ":", "if", "not", "FailedTask", ".", "objects", ".", "filter", "(", "task_id", "=", "task_id", ",", "datetime_resolved", "=", "None", ")", ".",...
49.461538
22.076923
def tan_rand(q, seed=9): """Find a random vector in the tangent space of the n sphere This function will find a random orthogonal vector to q. Parameters ---------- q (n+1,) array which is in the n-sphere Returns ------- qd (n+1,) array which is orthogonal to n-sphere and also random """ # probably need a check in case we get a parallel vector rs = np.random.RandomState(seed) rvec = rs.rand(q.shape[0]) qd = np.cross(rvec, q) qd = qd / np.linalg.norm(qd) while np.dot(q, qd) > 1e-6: rvec = rs.rand(q.shape[0]) qd = np.cross(rvec, q) qd = qd / np.linalg.norm(qd) return qd
[ "def", "tan_rand", "(", "q", ",", "seed", "=", "9", ")", ":", "# probably need a check in case we get a parallel vector", "rs", "=", "np", ".", "random", ".", "RandomState", "(", "seed", ")", "rvec", "=", "rs", ".", "rand", "(", "q", ".", "shape", "[", "...
22.655172
22.068966
def step(self, y, u, t, h): """ This is called by solve, but can be called by the user who wants to run through an integration with a control force. y - state at t u - control inputs at t t - time h - step size """ k1 = h * self.func(t, y, u) k2 = h * self.func(t + .5*h, y + .5*h*k1, u) k3 = h * self.func(t + .5*h, y + .5*h*k2, u) k4 = h * self.func(t + h, y + h*k3, u) return y + (k1 + 2*k2 + 2*k3 + k4) / 6.0
[ "def", "step", "(", "self", ",", "y", ",", "u", ",", "t", ",", "h", ")", ":", "k1", "=", "h", "*", "self", ".", "func", "(", "t", ",", "y", ",", "u", ")", "k2", "=", "h", "*", "self", ".", "func", "(", "t", "+", ".5", "*", "h", ",", ...
28.333333
14.066667
def delete_files(): """ Delete one or more files from the server """ session_token = request.headers['session_token'] repository = request.headers['repository'] #=== current_user = have_authenticated_user(request.environ['REMOTE_ADDR'], repository, session_token) if current_user is False: return fail(user_auth_fail_msg) #=== repository_path = config['repositories'][repository]['path'] body_data = request.get_json() def with_exclusive_lock(): if not varify_user_lock(repository_path, session_token): return fail(lock_fail_msg) try: data_store = versioned_storage(repository_path) if not data_store.have_active_commit(): return fail(no_active_commit_msg) #------------- for fle in json.loads(body_data['files']): data_store.fs_delete(fle) # updates the user lock expiry update_user_lock(repository_path, session_token) return success() except Exception: return fail() # pylint: disable=broad-except return lock_access(repository_path, with_exclusive_lock)
[ "def", "delete_files", "(", ")", ":", "session_token", "=", "request", ".", "headers", "[", "'session_token'", "]", "repository", "=", "request", ".", "headers", "[", "'repository'", "]", "#===", "current_user", "=", "have_authenticated_user", "(", "request", "....
36.866667
25.466667
def lon180to360(lon): """Convert longitude from (-180, 180) to (0, 360) """ if np.any(lon > 180.0) or np.any(lon < -180.0): print("Warning: lon outside expected range") lon = lon360to180(lon) #lon[lon < 0.0] += 360.0 lon = (lon + 360.0) % 360.0 return lon
[ "def", "lon180to360", "(", "lon", ")", ":", "if", "np", ".", "any", "(", "lon", ">", "180.0", ")", "or", "np", ".", "any", "(", "lon", "<", "-", "180.0", ")", ":", "print", "(", "\"Warning: lon outside expected range\"", ")", "lon", "=", "lon360to180",...
31.888889
11
def check(self, instance): """ Process both the istio_mesh instance and process_mixer instance associated with this instance """ # Get the config for the istio_mesh instance istio_mesh_endpoint = instance.get('istio_mesh_endpoint') istio_mesh_config = self.config_map[istio_mesh_endpoint] # Process istio_mesh self.process(istio_mesh_config) # Get the config for the process_mixer instance process_mixer_endpoint = instance.get('mixer_endpoint') process_mixer_config = self.config_map[process_mixer_endpoint] # Process process_mixer self.process(process_mixer_config)
[ "def", "check", "(", "self", ",", "instance", ")", ":", "# Get the config for the istio_mesh instance", "istio_mesh_endpoint", "=", "instance", ".", "get", "(", "'istio_mesh_endpoint'", ")", "istio_mesh_config", "=", "self", ".", "config_map", "[", "istio_mesh_endpoint"...
36.555556
21.555556
def route(self, method, pattern): """Decorator to add route for a request with any HTTP method. Arguments: method (str): HTTP method name, e.g. GET, POST, etc. pattern (str): Routing pattern the path must match. Returns: function: Decorator function to add route. """ def decorator(callback): self._router.add(method, pattern, callback) return callback return decorator
[ "def", "route", "(", "self", ",", "method", ",", "pattern", ")", ":", "def", "decorator", "(", "callback", ")", ":", "self", ".", "_router", ".", "add", "(", "method", ",", "pattern", ",", "callback", ")", "return", "callback", "return", "decorator" ]
32.857143
17.142857
def create_closure_model(cls): """Creates a <Model>Closure model in the same module as the model.""" meta_vals = { 'unique_together': (("parent", "child"),) } if getattr(cls._meta, 'db_table', None): meta_vals['db_table'] = '%sclosure' % getattr(cls._meta, 'db_table') model = type('%sClosure' % cls.__name__, (models.Model,), { 'parent': models.ForeignKey( cls.__name__, related_name=cls.closure_parentref() ), 'child': models.ForeignKey( cls.__name__, related_name=cls.closure_childref() ), 'depth': models.IntegerField(), '__module__': cls.__module__, '__unicode__': _closure_model_unicode, 'Meta': type('Meta', (object,), meta_vals), }) setattr(cls, "_closure_model", model) return model
[ "def", "create_closure_model", "(", "cls", ")", ":", "meta_vals", "=", "{", "'unique_together'", ":", "(", "(", "\"parent\"", ",", "\"child\"", ")", ",", ")", "}", "if", "getattr", "(", "cls", ".", "_meta", ",", "'db_table'", ",", "None", ")", ":", "me...
36.173913
14.478261
def inv_diagonal(S): """ Computes the inverse of a diagonal NxN np.array S. In general this will be much faster than calling np.linalg.inv(). However, does NOT check if the off diagonal elements are non-zero. So long as S is truly diagonal, the output is identical to np.linalg.inv(). Parameters ---------- S : np.array diagonal NxN array to take inverse of Returns ------- S_inv : np.array inverse of S Examples -------- This is meant to be used as a replacement inverse function for the KalmanFilter class when you know the system covariance S is diagonal. It just makes the filter run faster, there is >>> kf = KalmanFilter(dim_x=3, dim_z=1) >>> kf.inv = inv_diagonal # S is 1x1, so safely diagonal """ S = np.asarray(S) if S.ndim != 2 or S.shape[0] != S.shape[1]: raise ValueError('S must be a square Matrix') si = np.zeros(S.shape) for i in range(len(S)): si[i, i] = 1. / S[i, i] return si
[ "def", "inv_diagonal", "(", "S", ")", ":", "S", "=", "np", ".", "asarray", "(", "S", ")", "if", "S", ".", "ndim", "!=", "2", "or", "S", ".", "shape", "[", "0", "]", "!=", "S", ".", "shape", "[", "1", "]", ":", "raise", "ValueError", "(", "'...
25.512821
24.74359
def center(self): ''' Point whose coordinates are (midX,midY,origin.z), Point. ''' return Point(self.midX, self.midY, self.origin.z)
[ "def", "center", "(", "self", ")", ":", "return", "Point", "(", "self", ".", "midX", ",", "self", ".", "midY", ",", "self", ".", "origin", ".", "z", ")" ]
32
24.4
def AgregarTributo(self, codigo_tributo, descripcion, base_imponible, alicuota, importe): "Agrega la información referente a las retenciones de la liquidación" trib = dict(codigoTributo=codigo_tributo, descripcion=descripcion, baseImponible=base_imponible, alicuota=alicuota, importe=importe) self.solicitud['tributo'].append(trib) return True
[ "def", "AgregarTributo", "(", "self", ",", "codigo_tributo", ",", "descripcion", ",", "base_imponible", ",", "alicuota", ",", "importe", ")", ":", "trib", "=", "dict", "(", "codigoTributo", "=", "codigo_tributo", ",", "descripcion", "=", "descripcion", ",", "b...
74.2
42.6
def generate_seviri_file(seviri, platform_name): """Generate the pyspectral internal common format relative response function file for one SEVIRI """ import h5py filename = os.path.join(seviri.output_dir, "rsr_seviri_{0}.h5".format(platform_name)) sat_name = platform_name with h5py.File(filename, "w") as h5f: h5f.attrs['description'] = 'Relative Spectral Responses for SEVIRI' h5f.attrs['platform_name'] = platform_name bandlist = [str(key) for key in seviri.rsr.keys()] h5f.attrs['band_names'] = bandlist for key in seviri.rsr.keys(): grp = h5f.create_group(key) if isinstance(seviri.central_wavelength[key][sat_name], dict): grp.attrs['central_wavelength'] = \ seviri.central_wavelength[key][sat_name]['95'] else: grp.attrs['central_wavelength'] = \ seviri.central_wavelength[key][sat_name] arr = seviri.rsr[key]['wavelength'] dset = grp.create_dataset('wavelength', arr.shape, dtype='f') dset.attrs['unit'] = 'm' dset.attrs['scale'] = 1e-06 dset[...] = arr try: arr = seviri.rsr[key][sat_name]['95'] except ValueError: arr = seviri.rsr[key][sat_name] dset = grp.create_dataset('response', arr.shape, dtype='f') dset[...] = arr return
[ "def", "generate_seviri_file", "(", "seviri", ",", "platform_name", ")", ":", "import", "h5py", "filename", "=", "os", ".", "path", ".", "join", "(", "seviri", ".", "output_dir", ",", "\"rsr_seviri_{0}.h5\"", ".", "format", "(", "platform_name", ")", ")", "s...
38.263158
17.631579
async def deserialize(data: dict): """ :param data: Data provided by the serialize method Example: msg_id = '1' phone_number = '8019119191' connection = await Connection.create(source_id) await connection.connect(phone_number) disclosed_proof = await DisclosedProof.create_with_msgid(source_id, connection, msg_id) data = await disclosed_proof.serialize() disclosed_proof2 = await DisclosedProof.deserialize(data) :return: DisclosedProof """ disclosed_proof = await DisclosedProof._deserialize("vcx_disclosed_proof_deserialize", json.dumps(data), data.get('data').get('source_id')) return disclosed_proof
[ "async", "def", "deserialize", "(", "data", ":", "dict", ")", ":", "disclosed_proof", "=", "await", "DisclosedProof", ".", "_deserialize", "(", "\"vcx_disclosed_proof_deserialize\"", ",", "json", ".", "dumps", "(", "data", ")", ",", "data", ".", "get", "(", ...
47.588235
19.588235
def replace_tax_class_by_id(cls, tax_class_id, tax_class, **kwargs): """Replace TaxClass Replace all attributes of TaxClass This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.replace_tax_class_by_id(tax_class_id, tax_class, async=True) >>> result = thread.get() :param async bool :param str tax_class_id: ID of taxClass to replace (required) :param TaxClass tax_class: Attributes of taxClass to replace (required) :return: TaxClass If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._replace_tax_class_by_id_with_http_info(tax_class_id, tax_class, **kwargs) else: (data) = cls._replace_tax_class_by_id_with_http_info(tax_class_id, tax_class, **kwargs) return data
[ "def", "replace_tax_class_by_id", "(", "cls", ",", "tax_class_id", ",", "tax_class", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async'", ")", ":", "return", "cls", ".",...
45.727273
23.090909
def add(self, name, nestable, **kw): """ Adds a level to the nesting and creates a checkpoint that can be reverted to later for aggregation by calling :meth:`SConsWrap.pop`. :param name: Identifier for the nest level :param nestable: A nestable object - see :meth:`Nest.add() <nestly.core.Nest.add>`. :param kw: Additional parameters to pass to :meth:`Nest.add() <nestly.core.Nest.add>`. """ self.checkpoints[name] = self.nest self.nest = copy.copy(self.nest) return self.nest.add(name, nestable, **kw)
[ "def", "add", "(", "self", ",", "name", ",", "nestable", ",", "*", "*", "kw", ")", ":", "self", ".", "checkpoints", "[", "name", "]", "=", "self", ".", "nest", "self", ".", "nest", "=", "copy", ".", "copy", "(", "self", ".", "nest", ")", "retur...
42.428571
12.857143
def with_connection(func): """Decorate a function to open a new datafind connection if required This method will inspect the ``connection`` keyword, and if `None` (or missing), will use the ``host`` and ``port`` keywords to open a new connection and pass it as ``connection=<new>`` to ``func``. """ @wraps(func) def wrapped(*args, **kwargs): if kwargs.get('connection') is None: kwargs['connection'] = _choose_connection(host=kwargs.get('host'), port=kwargs.get('port')) try: return func(*args, **kwargs) except HTTPException: kwargs['connection'] = reconnect(kwargs['connection']) return func(*args, **kwargs) return wrapped
[ "def", "with_connection", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapped", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "kwargs", ".", "get", "(", "'connection'", ")", "is", "None", ":", "kwargs", "[", "'connec...
42.611111
18.888889
def _place_tables_section(skeleton_section, sheet, keys_section): """ Place data into skeleton for either a paleo or chron section. :param dict skeleton_section: Empty or current progress of skeleton w/ data :param dict sheet: Sheet metadata :param list keys_section: Paleo or Chron specific keys :return dict: Skeleton section full of data """ logger_excel.info("enter place_tables_section") try: logger_excel.info("excel: place_tables_section: placing table: {}".format(sheet["new_name"])) new_name = sheet["new_name"] logger_excel.info("placing_tables_section: {}".format(new_name)) # get all the sheet metadata needed for this function idx_pc = sheet["idx_pc"] - 1 idx_model = sheet["idx_model"] idx_table = sheet["idx_table"] table_type = sheet["table_type"] data = sheet["data"] # paleoMeas or chronMeas key key_1 = keys_section[0] # paleoModel or chronModel key key_2 = keys_section[1] # Is this a measurement, or distribution table? if idx_table: # Yes, a table idx exists, so decrement it. idx_table = sheet["idx_table"] - 1 # Is this a ensemble, dist, or summary table? if idx_model: # Yes, a model idx exists, so decrement it. idx_model -= 1 except Exception as e: logger_excel.debug("excel: place_tables_section: error during setup, {}".format(e)) # If it's measurement table, it goes in first. try: if table_type == "measurement": skeleton_section[idx_pc][key_1][idx_table] = data # Other types of tables go one step below elif table_type in ["ensemble", "distribution", "summary"]: if table_type == "summary": skeleton_section[idx_pc][key_2][idx_model]["summaryTable"] = data elif table_type == "ensemble": skeleton_section[idx_pc][key_2][idx_model]["ensembleTable"] = data elif table_type == "distribution": skeleton_section[idx_pc][key_2][idx_model]["distributionTable"][idx_table] = data except Exception as e: logger_excel.warn("excel: place_tables_section: Unable to place table {}, {}".format(new_name, e)) logger_excel.info("exit place_tables_section") return skeleton_section
[ "def", "_place_tables_section", "(", "skeleton_section", ",", "sheet", ",", "keys_section", ")", ":", "logger_excel", ".", "info", "(", "\"enter place_tables_section\"", ")", "try", ":", "logger_excel", ".", "info", "(", "\"excel: place_tables_section: placing table: {}\"...
46.58
17.9
def class_args(cls): """ Decorates a class to handle the arguments parser. """ # get the Singleton ap_ = ArgParseInator(skip_init=True) # collect special vars (really need?) utils.collect_appendvars(ap_, cls) # set class reference cls.__cls__ = cls cmds = {} # get eventual class arguments cls.__arguments__ = getattr(cls, '__arguments__', []) # cycle through class functions for func in [f for f in cls.__dict__.values() if hasattr(f, '__cmd_name__') and not inspect.isclass(f)]: # clear subcommands func.__subcommands__ = None # set the parent class func.__cls__ = cls # assign to commands dict cmds[func.__cmd_name__] = func if hasattr(cls, '__cmd_name__') and cls.__cmd_name__ not in ap_.commands: # if che class has the __cmd_name__ attribute and is not already present # in the ArgParseInator commands # set the class subcommands cls.__subcommands__ = cmds # add the class as ArgParseInator command ap_.commands[cls.__cmd_name__] = cls else: # else if we don't have a __cmd_name__ # we will add all the functions directly to the ArgParseInator commands # if it don't already exists. for name, func in cmds.items(): if name not in ap_.commands: ap_.commands[name] = func return cls
[ "def", "class_args", "(", "cls", ")", ":", "# get the Singleton", "ap_", "=", "ArgParseInator", "(", "skip_init", "=", "True", ")", "# collect special vars (really need?)", "utils", ".", "collect_appendvars", "(", "ap_", ",", "cls", ")", "# set class reference", "cl...
37.594595
12.027027
def handle_route_spec_request(): """ Process request for route spec. Either a new one is posted or the current one is to be retrieved. """ try: if bottle.request.method == 'GET': # Just return what we currenty have cached as the route spec data = CURRENT_STATE.route_spec if not data: bottle.response.status = 404 msg = "Route spec not found!" else: bottle.response.status = 200 msg = json.dumps(data) else: # A new route spec is posted raw_data = bottle.request.body.read() new_route_spec = json.loads(raw_data) logging.info("New route spec posted") common.parse_route_spec_config(new_route_spec) _Q_ROUTE_SPEC.put(new_route_spec) bottle.response.status = 200 msg = "Ok" except ValueError as e: logging.error("Config ignored: %s" % str(e)) bottle.response.status = 400 msg = "Config ignored: %s" % str(e) except Exception as e: logging.error("Exception while processing HTTP request: %s" % str(e)) bottle.response.status = 500 msg = "Internal server error" bottle.response.content_type = 'application/json' return msg
[ "def", "handle_route_spec_request", "(", ")", ":", "try", ":", "if", "bottle", ".", "request", ".", "method", "==", "'GET'", ":", "# Just return what we currenty have cached as the route spec", "data", "=", "CURRENT_STATE", ".", "route_spec", "if", "not", "data", ":...
32.325
15.725
def cpustats(): ''' Return the CPU stats for this minion .. versionchanged:: 2016.11.4 Added support for AIX .. versionchanged:: 2018.3.0 Added support for OpenBSD CLI Example: .. code-block:: bash salt '*' status.cpustats ''' def linux_cpustats(): ''' linux specific implementation of cpustats ''' ret = {} try: with salt.utils.files.fopen('/proc/stat', 'r') as fp_: stats = salt.utils.stringutils.to_unicode(fp_.read()) except IOError: pass else: for line in stats.splitlines(): if not line: continue comps = line.split() if comps[0] == 'cpu': ret[comps[0]] = {'idle': _number(comps[4]), 'iowait': _number(comps[5]), 'irq': _number(comps[6]), 'nice': _number(comps[2]), 'softirq': _number(comps[7]), 'steal': _number(comps[8]), 'system': _number(comps[3]), 'user': _number(comps[1])} elif comps[0] == 'intr': ret[comps[0]] = {'total': _number(comps[1]), 'irqs': [_number(x) for x in comps[2:]]} elif comps[0] == 'softirq': ret[comps[0]] = {'total': _number(comps[1]), 'softirqs': [_number(x) for x in comps[2:]]} else: ret[comps[0]] = _number(comps[1]) return ret def freebsd_cpustats(): ''' freebsd specific implementation of cpustats ''' vmstat = __salt__['cmd.run']('vmstat -P').splitlines() vm0 = vmstat[0].split() cpu0loc = vm0.index('cpu0') vm1 = vmstat[1].split() usloc = vm1.index('us') vm2 = vmstat[2].split() cpuctr = 0 ret = {} for cpu in vm0[cpu0loc:]: ret[cpu] = {'us': _number(vm2[usloc + 3 * cpuctr]), 'sy': _number(vm2[usloc + 1 + 3 * cpuctr]), 'id': _number(vm2[usloc + 2 + 3 * cpuctr]), } cpuctr += 1 return ret def sunos_cpustats(): ''' sunos specific implementation of cpustats ''' mpstat = __salt__['cmd.run']('mpstat 1 2').splitlines() fields = mpstat[0].split() ret = {} for cpu in mpstat: if cpu.startswith('CPU'): continue cpu = cpu.split() ret[_number(cpu[0])] = {} for i in range(1, len(fields)-1): ret[_number(cpu[0])][fields[i]] = _number(cpu[i]) return ret def aix_cpustats(): ''' AIX specific implementation of cpustats ''' ret = {} ret['mpstat'] = [] procn = None fields = [] for line in __salt__['cmd.run']('mpstat -a').splitlines(): if not line: continue procn = len(ret['mpstat']) if line.startswith('System'): comps = line.split(':') ret['mpstat'].append({}) ret['mpstat'][procn]['system'] = {} cpu_comps = comps[1].split() for i in range(0, len(cpu_comps)): cpu_vals = cpu_comps[i].split('=') ret['mpstat'][procn]['system'][cpu_vals[0]] = cpu_vals[1] if line.startswith('cpu'): fields = line.split() continue if fields: cpustat = line.split() ret[_number(cpustat[0])] = {} for i in range(1, len(fields)-1): ret[_number(cpustat[0])][fields[i]] = _number(cpustat[i]) return ret def openbsd_cpustats(): ''' openbsd specific implementation of cpustats ''' systat = __salt__['cmd.run']('systat -s 2 -B cpu').splitlines() fields = systat[3].split() ret = {} for cpu in systat[4:]: cpu_line = cpu.split() cpu_idx = cpu_line[0] ret[cpu_idx] = {} for idx, field in enumerate(fields[1:]): ret[cpu_idx][field] = cpu_line[idx+1] return ret # dict that return a function that does the right thing per platform get_version = { 'Linux': linux_cpustats, 'FreeBSD': freebsd_cpustats, 'OpenBSD': openbsd_cpustats, 'SunOS': sunos_cpustats, 'AIX': aix_cpustats, } errmsg = 'This method is unsupported on the current operating system!' return get_version.get(__grains__['kernel'], lambda: errmsg)()
[ "def", "cpustats", "(", ")", ":", "def", "linux_cpustats", "(", ")", ":", "'''\n linux specific implementation of cpustats\n '''", "ret", "=", "{", "}", "try", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "'/proc/stat'", ",",...
32.739726
19.506849
def register(self, event, callable, priority=10): """Register interest in an event. event: name of the event (str) callable: the callable to be used as a callback function Returns an EventReceiver object. To unregister interest, simply delete the object.""" logger.debug('registered: ' + event + ': ' + repr(callable) + ' [' + repr(self) + ']') return EventReceiver(event, callable, manager=self, priority=priority)
[ "def", "register", "(", "self", ",", "event", ",", "callable", ",", "priority", "=", "10", ")", ":", "logger", ".", "debug", "(", "'registered: '", "+", "event", "+", "': '", "+", "repr", "(", "callable", ")", "+", "' ['", "+", "repr", "(", "self", ...
60.625
21.5
def console_getfd(self, ttynum=-1): """ Attach to console of running container. """ if not self.running: return False return _lxc.Container.console_getfd(self, ttynum)
[ "def", "console_getfd", "(", "self", ",", "ttynum", "=", "-", "1", ")", ":", "if", "not", "self", ".", "running", ":", "return", "False", "return", "_lxc", ".", "Container", ".", "console_getfd", "(", "self", ",", "ttynum", ")" ]
24.111111
15.666667
def _parseExceptionDirectory(self, rva, size, magic = consts.PE32): """ Parses the C{IMAGE_EXCEPTION_DIRECTORY} directory. @type rva: int @param rva: The RVA where the C{IMAGE_EXCEPTION_DIRECTORY} starts. @type size: int @param size: The size of the C{IMAGE_EXCEPTION_DIRECTORY} directory. @type magic: int @param magic: (Optional) The type of PE. This value could be L{consts.PE32} or L{consts.PE64}. @rtype: str @return: The C{IMAGE_EXCEPTION_DIRECTORY} data. """ return self.getDataAtRva(rva, size)
[ "def", "_parseExceptionDirectory", "(", "self", ",", "rva", ",", "size", ",", "magic", "=", "consts", ".", "PE32", ")", ":", "return", "self", ".", "getDataAtRva", "(", "rva", ",", "size", ")" ]
36.352941
23.176471
def create_tag(self, tag_name=None, **properties): """Creates a tag and adds it to the tag table of the TextBuffer. :param str tag_name: Name of the new tag, or None :param **properties: Keyword list of properties and their values :returns: A new tag. This is equivalent to creating a Gtk.TextTag and then adding the tag to the buffer's tag table. The returned tag is owned by the buffer's tag table. If ``tag_name`` is None, the tag is anonymous. If ``tag_name`` is not None, a tag called ``tag_name`` must not already exist in the tag table for this buffer. Properties are passed as a keyword list of names and values (e.g. foreground='DodgerBlue', weight=Pango.Weight.BOLD) """ tag = Gtk.TextTag(name=tag_name, **properties) self._get_or_create_tag_table().add(tag) return tag
[ "def", "create_tag", "(", "self", ",", "tag_name", "=", "None", ",", "*", "*", "properties", ")", ":", "tag", "=", "Gtk", ".", "TextTag", "(", "name", "=", "tag_name", ",", "*", "*", "properties", ")", "self", ".", "_get_or_create_tag_table", "(", ")",...
35.538462
21.269231
def starts_within(self, start=None, end=None): """ :return: normal occurrences that start within the given start and end datetimes, inclusive, and drop-in occurrences that """ qs = self if start: dt_start=coerce_dt_awareness(start) qs = qs.filter( Q(is_all_day=False, start__gte=dt_start) | Q(is_all_day=True, start__gte=zero_datetime(dt_start)) ) if end: dt_end=coerce_dt_awareness(end, t=time.max) qs = qs.filter( # Exclusive for datetime, inclusive for date. Q(is_all_day=False, start__lt=dt_end) | Q(is_all_day=True, start__lte=zero_datetime(dt_end)) ) return qs
[ "def", "starts_within", "(", "self", ",", "start", "=", "None", ",", "end", "=", "None", ")", ":", "qs", "=", "self", "if", "start", ":", "dt_start", "=", "coerce_dt_awareness", "(", "start", ")", "qs", "=", "qs", ".", "filter", "(", "Q", "(", "is_...
33.26087
21.347826
def generate_search_space(code_dir): """Generate search space from Python source code. Return a serializable search space object. code_dir: directory path of source files (str) """ search_space = {} if code_dir.endswith(slash): code_dir = code_dir[:-1] for subdir, _, files in os.walk(code_dir): # generate module name from path if subdir == code_dir: package = '' else: assert subdir.startswith(code_dir + slash), subdir prefix_len = len(code_dir) + 1 package = subdir[prefix_len:].replace(slash, '.') + '.' for file_name in files: if file_name.endswith('.py'): path = os.path.join(subdir, file_name) module = package + file_name[:-3] search_space.update(_generate_file_search_space(path, module)) return search_space
[ "def", "generate_search_space", "(", "code_dir", ")", ":", "search_space", "=", "{", "}", "if", "code_dir", ".", "endswith", "(", "slash", ")", ":", "code_dir", "=", "code_dir", "[", ":", "-", "1", "]", "for", "subdir", ",", "_", ",", "files", "in", ...
33.884615
15.730769
def FileHacks(self): """Hacks to make the filesystem look normal.""" if sys.platform == "win32": import win32api # pylint: disable=g-import-not-at-top # Make the filesystem look like the topmost level are the drive letters. if self.path == "/": self.files = win32api.GetLogicalDriveStrings().split("\x00") # Remove empty strings and strip trailing backslashes. self.files = [drive.rstrip("\\") for drive in self.files if drive] # This regex will match the various windows devices. Raw hard disk devices # must be considered files, however in windows, if we try to list them as # directories this also works. Since the code above distinguished between # files and directories using the file listing property, we must force # treating raw devices as files. elif re.match(r"/*\\\\.\\[^\\]+\\?$", self.path) is not None: # Special case windows devices cant seek to the end so just lie about # the size self.size = 0x7fffffffffffffff # Windows raw devices can be opened in two incompatible modes. With a # trailing \ they look like a directory, but without they are the raw # device. In GRR we only support opening devices in raw mode so ensure # that we never append a \ to raw device name. self.path = self.path.rstrip("\\") # In windows raw devices must be accessed using sector alignment. self.alignment = 512 elif sys.platform == "darwin": # On Mac, raw disk devices are also not seekable to the end and have no # size so we use the same approach as on Windows. if re.match("/dev/r?disk.*", self.path): self.size = 0x7fffffffffffffff self.alignment = 512
[ "def", "FileHacks", "(", "self", ")", ":", "if", "sys", ".", "platform", "==", "\"win32\"", ":", "import", "win32api", "# pylint: disable=g-import-not-at-top", "# Make the filesystem look like the topmost level are the drive letters.", "if", "self", ".", "path", "==", "\"...
49.4
24
def _serve_individual_image(self, request): """Serves an individual image.""" run = request.args.get('run') tag = request.args.get('tag') index = int(request.args.get('index')) sample = int(request.args.get('sample', 0)) data = self._get_individual_image(run, tag, index, sample) image_type = imghdr.what(None, data) content_type = _IMGHDR_TO_MIMETYPE.get(image_type, _DEFAULT_IMAGE_MIMETYPE) return http_util.Respond(request, data, content_type)
[ "def", "_serve_individual_image", "(", "self", ",", "request", ")", ":", "run", "=", "request", ".", "args", ".", "get", "(", "'run'", ")", "tag", "=", "request", ".", "args", ".", "get", "(", "'tag'", ")", "index", "=", "int", "(", "request", ".", ...
47.3
10.4
def get_graph(graph=None, *, _limit=(), _print=()): """ Extracts a list of cafes with on euro in Paris, renames the name, address and zipcode fields, reorders the fields and formats to json and csv files. """ graph = graph or bonobo.Graph() producer = ( graph.get_cursor() >> ODSReader(dataset="liste-des-cafes-a-un-euro", netloc="opendata.paris.fr") >> PartialGraph(*_limit) >> bonobo.UnpackItems(0) >> bonobo.Rename(name="nom_du_cafe", address="adresse", zipcode="arrondissement") >> bonobo.Format(city="Paris", country="France") >> bonobo.OrderFields(["name", "address", "zipcode", "city", "country", "geometry", "geoloc"]) >> PartialGraph(*_print) ) # Comma separated values. graph.get_cursor(producer.output) >> bonobo.CsvWriter( "coffeeshops.csv", fields=["name", "address", "zipcode", "city"], delimiter="," ) # Standard JSON graph.get_cursor(producer.output) >> bonobo.JsonWriter(path="coffeeshops.json") # Line-delimited JSON graph.get_cursor(producer.output) >> bonobo.LdjsonWriter(path="coffeeshops.ldjson") return graph
[ "def", "get_graph", "(", "graph", "=", "None", ",", "*", ",", "_limit", "=", "(", ")", ",", "_print", "=", "(", ")", ")", ":", "graph", "=", "graph", "or", "bonobo", ".", "Graph", "(", ")", "producer", "=", "(", "graph", ".", "get_cursor", "(", ...
36.741935
27.774194
def _log_vector_matrix(vs, ms): """Multiply tensor of vectors by matrices assuming values stored are logs.""" return tf.reduce_logsumexp(input_tensor=vs[..., tf.newaxis] + ms, axis=-2)
[ "def", "_log_vector_matrix", "(", "vs", ",", "ms", ")", ":", "return", "tf", ".", "reduce_logsumexp", "(", "input_tensor", "=", "vs", "[", "...", ",", "tf", ".", "newaxis", "]", "+", "ms", ",", "axis", "=", "-", "2", ")" ]
46.5
21.25
def resources(ctx, gpu): """Get build job resources. Uses [Caching](/references/polyaxon-cli/#caching) Examples: \b ```bash $ polyaxon build -b 2 resources ``` For GPU resources \b ```bash $ polyaxon build -b 2 resources --gpu ``` """ user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build')) try: message_handler = Printer.gpu_resources if gpu else Printer.resources PolyaxonClient().build_job.resources(user, project_name, _build, message_handler=message_handler) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not get resources for build job `{}`.'.format(_build)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1)
[ "def", "resources", "(", "ctx", ",", "gpu", ")", ":", "user", ",", "project_name", ",", "_build", "=", "get_build_or_local", "(", "ctx", ".", "obj", ".", "get", "(", "'project'", ")", ",", "ctx", ".", "obj", ".", "get", "(", "'build'", ")", ")", "t...
31.533333
27.066667
def find_replace(obj, find, replace): """ Searches an object and performs a find and replace. Args: obj (object): The object to iterate and find/replace. find (str): The string to search for. replace (str): The string to replace with. Returns: object: The object with replaced strings. """ try: if isinstance(obj, dict): return {find_replace(key,find,replace): find_replace(value,find,replace) for key, value in obj.items()} elif isinstance(obj, list): return [find_replace(element,find,replace) for element in obj] elif obj == find: return unicode_convert(replace) else: try: return unicode_convert(find_replace_string(obj, find, replace)) #obj = unicode_convert(json.loads(obj)) #return find_replace(obj,find,replace) except: return unicode_convert(obj) except: line, filename, synerror = trace() raise ArcRestHelperError({ "function": "find_replace", "line": line, "filename": filename, "synerror": synerror, } ) finally: pass
[ "def", "find_replace", "(", "obj", ",", "find", ",", "replace", ")", ":", "try", ":", "if", "isinstance", "(", "obj", ",", "dict", ")", ":", "return", "{", "find_replace", "(", "key", ",", "find", ",", "replace", ")", ":", "find_replace", "(", "value...
35.805556
17.138889
def conformPadding(cls, chars): """ Ensure alternate input padding formats are conformed to formats defined in PAD_MAP If chars is already a format defined in PAD_MAP, then it is returned unmodified. Example:: '#' -> '#' '@@@@' -> '@@@@' '%04d' -> '#' Args: chars (str): input padding chars Returns: str: conformed padding chars Raises: ValueError: If chars contains invalid padding characters """ pad = chars if pad and pad[0] not in PAD_MAP: pad = cls.getPaddingChars(cls.getPaddingNum(pad)) return pad
[ "def", "conformPadding", "(", "cls", ",", "chars", ")", ":", "pad", "=", "chars", "if", "pad", "and", "pad", "[", "0", "]", "not", "in", "PAD_MAP", ":", "pad", "=", "cls", ".", "getPaddingChars", "(", "cls", ".", "getPaddingNum", "(", "pad", ")", "...
25.961538
19.115385
def connect(self): """ Creates the connection with the redis server. Return ``True`` if the connection works, else returns ``False``. It does not take any arguments. :return: ``Boolean`` value .. note:: After creating the ``Queue`` object the user should call the ``connect`` method to create the connection. .. doctest:: >>> from retask import Queue >>> q = Queue('test') >>> q.connect() True """ config = self.config self.rdb = redis.Redis(config['host'], config['port'], config['db'],\ config['password']) try: info = self.rdb.info() self.connected = True except redis.ConnectionError: return False return True
[ "def", "connect", "(", "self", ")", ":", "config", "=", "self", ".", "config", "self", ".", "rdb", "=", "redis", ".", "Redis", "(", "config", "[", "'host'", "]", ",", "config", "[", "'port'", "]", ",", "config", "[", "'db'", "]", ",", "config", "...
26.645161
20.258065
def _set_exit_timeout(self, timeout, reason): """Set a timeout for the remainder of the session, along with an exception to raise. which is implemented by NailgunProtocol. This method may be called by a signal handler to set a timeout for the remainder of the session. If the session completes before the timeout does, the exception in `reason` is raised. Otherwise, `NailgunProtocol.ProcessStreamTimeout` is raised. :param float timeout: The length of time to time out, in seconds. :param Exception reason: The exception to raise if the session completes before the timeout occurs. """ self._exit_timeout_start_time = time.time() self._exit_timeout = timeout self._exit_reason = reason
[ "def", "_set_exit_timeout", "(", "self", ",", "timeout", ",", "reason", ")", ":", "self", ".", "_exit_timeout_start_time", "=", "time", ".", "time", "(", ")", "self", ".", "_exit_timeout", "=", "timeout", "self", ".", "_exit_reason", "=", "reason" ]
49.8
22.4
def _set_interface_type(self, v, load=False): """ Setter method for interface_type, mapped from YANG variable /brocade_interface_ext_rpc/get_interface_detail/input/interface_type (enumeration) If this variable is read-only (config: false) in the source YANG file, then _set_interface_type is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_interface_type() directly. YANG Description: The type of the interface. An 'unknown' type represents error scenario and should not be used. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'port-channel': {'value': 5}, u'loopback': {'value': 7}, u'fortygigabitethernet': {'value': 4}, u'unknown': {'value': 1}, u'gigabitethernet': {'value': 2}, u'tengigabitethernet': {'value': 3}, u'hundredgigabitethernet': {'value': 9}, u'fibrechannel': {'value': 8}, u'l2vlan': {'value': 6}},), is_leaf=True, yang_name="interface-type", rest_name="interface-type", parent=self, choice=(u'request-type', u'get-request'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'info': u"The type of the interface. An 'unknown' type \nrepresents error scenario and should not be used."}}, namespace='urn:brocade.com:mgmt:brocade-interface-ext', defining_module='brocade-interface-ext', yang_type='enumeration', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """interface_type must be of a type compatible with enumeration""", 'defined-type': "brocade-interface-ext:enumeration", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'port-channel': {'value': 5}, u'loopback': {'value': 7}, u'fortygigabitethernet': {'value': 4}, u'unknown': {'value': 1}, u'gigabitethernet': {'value': 2}, u'tengigabitethernet': {'value': 3}, u'hundredgigabitethernet': {'value': 9}, u'fibrechannel': {'value': 8}, u'l2vlan': {'value': 6}},), is_leaf=True, yang_name="interface-type", rest_name="interface-type", parent=self, choice=(u'request-type', u'get-request'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'info': u"The type of the interface. An 'unknown' type \nrepresents error scenario and should not be used."}}, namespace='urn:brocade.com:mgmt:brocade-interface-ext', defining_module='brocade-interface-ext', yang_type='enumeration', is_config=True)""", }) self.__interface_type = t if hasattr(self, '_set'): self._set()
[ "def", "_set_interface_type", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", ...
115.4
57.04
def _heuristic_bin_width(obs): """Optimal histogram bin width based on the Freedman-Diaconis rule""" IQR = sp.percentile(obs, 75) - sp.percentile(obs, 25) N = len(obs) return 2*IQR*N**(-1/3)
[ "def", "_heuristic_bin_width", "(", "obs", ")", ":", "IQR", "=", "sp", ".", "percentile", "(", "obs", ",", "75", ")", "-", "sp", ".", "percentile", "(", "obs", ",", "25", ")", "N", "=", "len", "(", "obs", ")", "return", "2", "*", "IQR", "*", "N...
40.4
13
def _index(*args, **kwargs): """Implementation of list searching. :param of: Element to search for :param where: Predicate to search for :param in_: List to search in :param start: Start index for the lookup :param step: Counter step (i.e. in/decrement) for each iteration :return: Pair of ``(list, index)``, where ``list`` is the list we searched in and ``index`` is the index of the first element found, or -1 """ start = kwargs.pop('start', 0) step = kwargs.pop('step', 1) if len(args) == 2: elem, list_ = args ensure_sequence(list_) predicate = lambda item: item == elem else: ensure_keyword_args(kwargs, mandatory=('in_',), optional=('of', 'where')) if 'of' in kwargs and 'where' in kwargs: raise TypeError( "either an item or predicate must be supplied, not both") if not ('of' in kwargs or 'where' in kwargs): raise TypeError("an item or predicate must be supplied") list_ = ensure_sequence(kwargs['in_']) if 'where' in kwargs: predicate = ensure_callable(kwargs['where']) else: elem = kwargs['of'] predicate = lambda item: item == elem len_ = len(list_) start = max(0, min(len_ - 1, start)) i = start while 0 <= i < len_: if predicate(list_[i]): return list_, i i += step else: return list_, -1
[ "def", "_index", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "start", "=", "kwargs", ".", "pop", "(", "'start'", ",", "0", ")", "step", "=", "kwargs", ".", "pop", "(", "'step'", ",", "1", ")", "if", "len", "(", "args", ")", "==", "2"...
31.934783
17.391304
def to_xml(self): """ Serialize all properties as XML """ ret = '<exif>' for k in self.__dict__: ret += '<%s>%s</%s>' % (k, self.__dict__[k], k) ret += '</exif>' return ret
[ "def", "to_xml", "(", "self", ")", ":", "ret", "=", "'<exif>'", "for", "k", "in", "self", ".", "__dict__", ":", "ret", "+=", "'<%s>%s</%s>'", "%", "(", "k", ",", "self", ".", "__dict__", "[", "k", "]", ",", "k", ")", "ret", "+=", "'</exif>'", "re...
25.777778
12
def send(self, target, topic, content): """ Fires a message """ event = threading.Event() results = [] def got_message(sender, content): results.append(content) event.set() self.post(target, topic, content, got_message) event.wait() return results
[ "def", "send", "(", "self", ",", "target", ",", "topic", ",", "content", ")", ":", "event", "=", "threading", ".", "Event", "(", ")", "results", "=", "[", "]", "def", "got_message", "(", "sender", ",", "content", ")", ":", "results", ".", "append", ...
22.133333
16
def delete_all_volumes(self): """Remove all the volumes. Only the manager nodes can delete a volume """ # Raise an exception if we are not a manager if not self._manager: raise RuntimeError('Volumes can only be deleted ' 'on swarm manager nodes') volume_list = self.get_volume_list() for volumes in volume_list: # Remove all the services self._api_client.remove_volume(volumes, force=True)
[ "def", "delete_all_volumes", "(", "self", ")", ":", "# Raise an exception if we are not a manager", "if", "not", "self", ".", "_manager", ":", "raise", "RuntimeError", "(", "'Volumes can only be deleted '", "'on swarm manager nodes'", ")", "volume_list", "=", "self", ".",...
35.785714
14
def kwargs_helper(kwargs): """This function preprocesses the kwargs dictionary to sanitize it.""" args = [] for param, value in kwargs.items(): param = kw_subst.get(param, param) args.append((param, value)) return args
[ "def", "kwargs_helper", "(", "kwargs", ")", ":", "args", "=", "[", "]", "for", "param", ",", "value", "in", "kwargs", ".", "items", "(", ")", ":", "param", "=", "kw_subst", ".", "get", "(", "param", ",", "param", ")", "args", ".", "append", "(", ...
30.5
14.25
def processors(self, processor_name=None): """Return a list of Processor objects. :param project_id: ObjectId of Genesis project :type project_id: string :rtype: list of Processor objects """ if processor_name: return self.api.processor.get(name=processor_name)['objects'] else: return self.api.processor.get()['objects']
[ "def", "processors", "(", "self", ",", "processor_name", "=", "None", ")", ":", "if", "processor_name", ":", "return", "self", ".", "api", ".", "processor", ".", "get", "(", "name", "=", "processor_name", ")", "[", "'objects'", "]", "else", ":", "return"...
32.666667
16.083333
def sync_agg_metric(self, unique_identifier, metric, start_date, end_date): """ Uses the count for each day in the date range to recalculate the counters for the associated weeks and months for the ``metric`` for ``unique_identifier``. Useful for updating the counters for week and month after using set_metric_by_day. The redis backend supports lists for both ``unique_identifier`` and ``metric`` allowing for the setting of multiple metrics for multiple unique_identifiers efficiently. Not all backends may support this. :param unique_identifier: Unique string indetifying the object this metric is for :param metric: A unique name for the metric you want to track :param start_date: Date syncing starts :param end_date: Date syncing end """ self.sync_week_metric(unique_identifier, metric, start_date, end_date) self.sync_month_metric(unique_identifier, metric, start_date, end_date)
[ "def", "sync_agg_metric", "(", "self", ",", "unique_identifier", ",", "metric", ",", "start_date", ",", "end_date", ")", ":", "self", ".", "sync_week_metric", "(", "unique_identifier", ",", "metric", ",", "start_date", ",", "end_date", ")", "self", ".", "sync_...
64.733333
39.266667
def categorical__int(self, column_name, output_column_prefix): """ Interprets an integer column as a categorical variable. """ return [_ColumnFunctionTransformation( features = [column_name], output_column_prefix = output_column_prefix, transform_function = lambda col: col.astype(str), transform_function_name = "astype(str)")]
[ "def", "categorical__int", "(", "self", ",", "column_name", ",", "output_column_prefix", ")", ":", "return", "[", "_ColumnFunctionTransformation", "(", "features", "=", "[", "column_name", "]", ",", "output_column_prefix", "=", "output_column_prefix", ",", "transform_...
40
14.4
def stop(self): '''Stop the rpc server.''' if self.uiautomator_process and self.uiautomator_process.poll() is None: res = None try: res = urllib2.urlopen(self.stop_uri) self.uiautomator_process.wait() except: self.uiautomator_process.kill() finally: if res is not None: res.close() self.uiautomator_process = None try: out = self.adb.cmd("shell", "ps", "-C", "uiautomator").communicate()[0].decode("utf-8").strip().splitlines() if out: index = out[0].split().index("PID") for line in out[1:]: if len(line.split()) > index: self.adb.cmd("shell", "kill", "-9", line.split()[index]).wait() except: pass
[ "def", "stop", "(", "self", ")", ":", "if", "self", ".", "uiautomator_process", "and", "self", ".", "uiautomator_process", ".", "poll", "(", ")", "is", "None", ":", "res", "=", "None", "try", ":", "res", "=", "urllib2", ".", "urlopen", "(", "self", "...
39.545455
20.454545
def compute_center( feed: "Feed", num_busiest_stops: Optional[int] = None ) -> Tuple: """ Return the centroid (WGS84 longitude-latitude pair) of the convex hull of the stops of the given Feed. If ``num_busiest_stops`` (integer) is given, then compute the ``num_busiest_stops`` busiest stops in the feed on the first Monday of the feed and return the mean of the longitudes and the mean of the latitudes of these stops, respectively. """ s = feed.stops.copy() if num_busiest_stops is None: hull = compute_convex_hull(feed) lon, lat = list(hull.centroid.coords)[0] else: date = feed.get_first_week()[0] ss = feed.compute_stop_stats([date]).sort_values( "num_trips", ascending=False ) if ss.stop_id.isnull().all(): # No stats, which could happen with a crappy feed. # Fall back to all stops. hull = compute_convex_hull(feed) lon, lat = list(hull.centroid.coords)[0] else: f = ss.head(num_busiest_stops) f = s.merge(f) lon = f["stop_lon"].mean() lat = f["stop_lat"].mean() return lon, lat
[ "def", "compute_center", "(", "feed", ":", "\"Feed\"", ",", "num_busiest_stops", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "Tuple", ":", "s", "=", "feed", ".", "stops", ".", "copy", "(", ")", "if", "num_busiest_stops", "is", "None", ":"...
35.545455
13.909091
def query(*args, **kwargs): ''' Query the node for specific information. Parameters: * **scope**: Specify scope of the query. * **System**: Return system data. * **Software**: Return software information. * **Services**: Return known services. * **Identity**: Return user accounts information for this system. accounts Can be either 'local', 'remote' or 'all' (equal to "local,remote"). Remote accounts cannot be resolved on all systems, but only those, which supports 'passwd -S -a'. disabled True (or False, default) to return only disabled accounts. * **payload**: Payload scope parameters: filter Include only results which path starts from the filter string. time Display time in Unix ticks or format according to the configured TZ (default) Values: ticks, tz (default) size Format size. Values: B, KB, MB, GB type Include payload type. Values (comma-separated): directory (or dir), link, file (default) Example (returns everything): type=directory,link,file owners Resolve UID/GID to an actual names or leave them numeric (default). Values: name (default), id brief Return just a list of payload elements, if True. Default: False. * **all**: Return all information (default). CLI Example: .. code-block:: bash salt '*' inspector.query scope=system salt '*' inspector.query scope=payload type=file,link filter=/etc size=Kb brief=False ''' query = _("query") try: return query.Query(kwargs.get('scope'), cachedir=__opts__['cachedir'])(*args, **kwargs) except InspectorQueryException as ex: raise CommandExecutionError(ex) except Exception as ex: log.error(_get_error_message(ex)) raise Exception(ex)
[ "def", "query", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "query", "=", "_", "(", "\"query\"", ")", "try", ":", "return", "query", ".", "Query", "(", "kwargs", ".", "get", "(", "'scope'", ")", ",", "cachedir", "=", "__opts__", "[", "'c...
30.793651
25.904762
def maybe_convert_to_index_date_type(index, date): """Convert a datetime-like object to the index's date type. Datetime indexing in xarray can be done using either a pandas DatetimeIndex or a CFTimeIndex. Both support partial-datetime string indexing regardless of the calendar type of the underlying data; therefore if a string is passed as a date, we return it unchanged. If a datetime-like object is provided, it will be converted to the underlying date type of the index. For a DatetimeIndex that is np.datetime64; for a CFTimeIndex that is an object of type cftime.datetime specific to the calendar used. Parameters ---------- index : pd.Index Input time index date : datetime-like object or str Input datetime Returns ------- date of the type appropriate for the time index of the Dataset """ if isinstance(date, str): return date if isinstance(index, pd.DatetimeIndex): if isinstance(date, np.datetime64): return date else: return np.datetime64(str(date)) else: date_type = index.date_type if isinstance(date, date_type): return date else: if isinstance(date, np.datetime64): # Convert to datetime.date or datetime.datetime object date = date.item() if isinstance(date, datetime.date): # Convert to a datetime.datetime object date = datetime.datetime.combine( date, datetime.datetime.min.time()) return date_type(date.year, date.month, date.day, date.hour, date.minute, date.second, date.microsecond)
[ "def", "maybe_convert_to_index_date_type", "(", "index", ",", "date", ")", ":", "if", "isinstance", "(", "date", ",", "str", ")", ":", "return", "date", "if", "isinstance", "(", "index", ",", "pd", ".", "DatetimeIndex", ")", ":", "if", "isinstance", "(", ...
36.148936
21.510638
def get_keywords(lexer): """Get the keywords for a given lexer. """ if not hasattr(lexer, 'tokens'): return [] if 'keywords' in lexer.tokens: try: return lexer.tokens['keywords'][0][0].words except: pass keywords = [] for vals in lexer.tokens.values(): for val in vals: try: if isinstance(val[0], words): keywords.extend(val[0].words) else: ini_val = val[0] if ')\\b' in val[0] or ')(\\s+)' in val[0]: val = re.sub(r'\\.', '', val[0]) val = re.sub(r'[^0-9a-zA-Z|]+', '', val) if '|' in ini_val: keywords.extend(val.split('|')) else: keywords.append(val) except Exception: continue return keywords
[ "def", "get_keywords", "(", "lexer", ")", ":", "if", "not", "hasattr", "(", "lexer", ",", "'tokens'", ")", ":", "return", "[", "]", "if", "'keywords'", "in", "lexer", ".", "tokens", ":", "try", ":", "return", "lexer", ".", "tokens", "[", "'keywords'", ...
33.464286
14.071429
def set_code_exprs(self, codes): """Convenience: sets all the code expressions at once.""" self.code_objs = dict() self._codes = [] for code in codes: self.append_code_expr(code)
[ "def", "set_code_exprs", "(", "self", ",", "codes", ")", ":", "self", ".", "code_objs", "=", "dict", "(", ")", "self", ".", "_codes", "=", "[", "]", "for", "code", "in", "codes", ":", "self", ".", "append_code_expr", "(", "code", ")" ]
36.166667
8
def data_mod(self, *args, **kwargs): """ Register a function to modify data of member Instruments. The function is not partially applied to modify member data. When the Constellation receives a function call to register a function for data modification, it passes the call to each instrument and registers it in the instrument's pysat.Custom queue. (Wraps pysat.Custom.add; documentation of that function is reproduced here.) Parameters ---------- function : string or function object name of function or function object to be added to queue kind : {'add, 'modify', 'pass'} add Adds data returned from fuction to instrument object. modify pysat instrument object supplied to routine. Any and all changes to object are retained. pass A copy of pysat object is passed to function. No data is accepted from return. at_pos : string or int insert at position. (default, insert at end). args : extra arguments Note ---- Allowed `add` function returns: - {'data' : pandas Series/DataFrame/array_like, 'units' : string/array_like of strings, 'long_name' : string/array_like of strings, 'name' : string/array_like of strings (iff data array_like)} - pandas DataFrame, names of columns are used - pandas Series, .name required - (string/list of strings, numpy array/list of arrays) """ for instrument in self.instruments: instrument.custom.add(*args, **kwargs)
[ "def", "data_mod", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "for", "instrument", "in", "self", ".", "instruments", ":", "instrument", ".", "custom", ".", "add", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
35.265306
23.877551
def run(self): '''Run until there are no events to be processed.''' # We left-append rather than emit (right-append) because some message # may have been already queued for execution before the director runs. global_event_queue.appendleft((INITIATE, self, (), {})) while global_event_queue: self.process_event(global_event_queue.popleft())
[ "def", "run", "(", "self", ")", ":", "# We left-append rather than emit (right-append) because some message", "# may have been already queued for execution before the director runs.", "global_event_queue", ".", "appendleft", "(", "(", "INITIATE", ",", "self", ",", "(", ")", ","...
55
24.428571
def generate_from_yaml(pseudo_ast, language): ''' generate output code in `language` converts yaml input to a Node-based pseudo internal tree and passes it to `generate ''' return pseudo.generate(pseudo.loader.as_tree(pseudo_ast), language)
[ "def", "generate_from_yaml", "(", "pseudo_ast", ",", "language", ")", ":", "return", "pseudo", ".", "generate", "(", "pseudo", ".", "loader", ".", "as_tree", "(", "pseudo_ast", ")", ",", "language", ")" ]
28.666667
24.666667
def logical_chassis_fwdl_sanity_input_host(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logical_chassis_fwdl_sanity = ET.Element("logical_chassis_fwdl_sanity") config = logical_chassis_fwdl_sanity input = ET.SubElement(logical_chassis_fwdl_sanity, "input") host = ET.SubElement(input, "host") host.text = kwargs.pop('host') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "logical_chassis_fwdl_sanity_input_host", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "logical_chassis_fwdl_sanity", "=", "ET", ".", "Element", "(", "\"logical_chassis_fwdl_sanity\"", ")", "co...
41.333333
13.583333
def _fetch_dataframe(self): """Return a pandas dataframe with all the training jobs, along with their hyperparameters, results, and metadata. This also includes a column to indicate if a training job was the best seen so far. """ def reshape(training_summary): # Helper method to reshape a single training job summary into a dataframe record out = {} for k, v in training_summary['TunedHyperParameters'].items(): # Something (bokeh?) gets confused with ints so convert to float try: v = float(v) except (TypeError, ValueError): pass out[k] = v out['TrainingJobName'] = training_summary['TrainingJobName'] out['TrainingJobStatus'] = training_summary['TrainingJobStatus'] out['FinalObjectiveValue'] = training_summary.get('FinalHyperParameterTuningJobObjectiveMetric', {}).get('Value') start_time = training_summary.get('TrainingStartTime', None) end_time = training_summary.get('TrainingEndTime', None) out['TrainingStartTime'] = start_time out['TrainingEndTime'] = end_time if start_time and end_time: out['TrainingElapsedTimeSeconds'] = (end_time - start_time).total_seconds() return out # Run that helper over all the summaries. df = pd.DataFrame([reshape(tjs) for tjs in self.training_job_summaries()]) return df
[ "def", "_fetch_dataframe", "(", "self", ")", ":", "def", "reshape", "(", "training_summary", ")", ":", "# Helper method to reshape a single training job summary into a dataframe record", "out", "=", "{", "}", "for", "k", ",", "v", "in", "training_summary", "[", "'Tune...
52.566667
23.833333
def ConsultarCTGExcel(self, numero_carta_de_porte=None, numero_ctg=None, patente=None, cuit_solicitante=None, cuit_destino=None, fecha_emision_desde=None, fecha_emision_hasta=None, archivo="planilla.xls"): "Operación que realiza consulta de CTGs, graba una planilla xls" ret = self.client.consultarCTGExcel(request=dict( auth={ 'token': self.Token, 'sign': self.Sign, 'cuitRepresentado': self.Cuit, }, consultarCTGDatos=dict( cartaPorte=numero_carta_de_porte, ctg=numero_ctg, patente=patente, cuitSolicitante=cuit_solicitante, cuitDestino=cuit_destino, fechaEmisionDesde=fecha_emision_desde, fechaEmisionHasta=fecha_emision_hasta, )))['response'] self.__analizar_errores(ret) datos = base64.b64decode(ret.get('archivo') or "") f = open(archivo, "wb") f.write(datos) f.close() return True
[ "def", "ConsultarCTGExcel", "(", "self", ",", "numero_carta_de_porte", "=", "None", ",", "numero_ctg", "=", "None", ",", "patente", "=", "None", ",", "cuit_solicitante", "=", "None", ",", "cuit_destino", "=", "None", ",", "fecha_emision_desde", "=", "None", ",...
50.875
17.958333
def _send_to_address(self, address, data, timeout=10): """send data to *address* and *port* without verification of response. """ # Socket to talk to server socket = get_context().socket(REQ) try: socket.setsockopt(LINGER, timeout * 1000) if address.find(":") == -1: socket.connect("tcp://%s:%d" % (address, self.default_port)) else: socket.connect("tcp://%s" % address) socket.send_string(data) message = socket.recv_string() if message != "ok": LOGGER.warn("invalid acknowledge received: %s" % message) finally: socket.close()
[ "def", "_send_to_address", "(", "self", ",", "address", ",", "data", ",", "timeout", "=", "10", ")", ":", "# Socket to talk to server", "socket", "=", "get_context", "(", ")", ".", "socket", "(", "REQ", ")", "try", ":", "socket", ".", "setsockopt", "(", ...
38.444444
14.5
def login(self, **params): """ **login** Use the current credentials to get a valid Gett access token. Input: * A dict of parameters to use for the login attempt (optional) Output: * ``True`` Example:: if client.user.login(): print "You have %s bytes of storage remaining." % ( client.user.storage_limit - client_user.storage_used ) """ if not params: params = { "apikey": self.apikey, "email": self.email, "password": self.password } response = GettRequest().post("/users/login", params) if response.http_status == 200: self._access_token = response.response['accesstoken'] self.refresh_token = response.response['refreshtoken'] self.access_token_expires = int(time()) + response.response['expires'] self.userid = response.response['user']['userid'] self.fullname = response.response['user']['fullname'] self.storage_used = response.response['user']['storage']['used'] self.storage_limit = response.response['user']['storage']['limit'] return True
[ "def", "login", "(", "self", ",", "*", "*", "params", ")", ":", "if", "not", "params", ":", "params", "=", "{", "\"apikey\"", ":", "self", ".", "apikey", ",", "\"email\"", ":", "self", ".", "email", ",", "\"password\"", ":", "self", ".", "password", ...
33
25.972973
def accounts(self): """Ask the bank for the known :py:class:`ofxclient.Account` list. :rtype: list of :py:class:`ofxclient.Account` objects """ from ofxclient.account import Account client = self.client() query = client.account_list_query() resp = client.post(query) resp_handle = StringIO(resp) if IS_PYTHON_2: parsed = OfxParser.parse(resp_handle) else: parsed = OfxParser.parse(BytesIO(resp_handle.read().encode())) return [Account.from_ofxparse(a, institution=self) for a in parsed.accounts]
[ "def", "accounts", "(", "self", ")", ":", "from", "ofxclient", ".", "account", "import", "Account", "client", "=", "self", ".", "client", "(", ")", "query", "=", "client", ".", "account_list_query", "(", ")", "resp", "=", "client", ".", "post", "(", "q...
33.833333
16.5
def get(self, k): """Returns key contents, and modify time""" if self._changed(): self._read() if k in self.store: return tuple(self.store[k]) else: return None
[ "def", "get", "(", "self", ",", "k", ")", ":", "if", "self", ".", "_changed", "(", ")", ":", "self", ".", "_read", "(", ")", "if", "k", "in", "self", ".", "store", ":", "return", "tuple", "(", "self", ".", "store", "[", "k", "]", ")", "else",...
24.555556
16.666667
def load_cufflinks(self, filter_ok=True): """ Load a Cufflinks gene expression data for a cohort Parameters ---------- filter_ok : bool, optional If true, filter Cufflinks data to row with FPKM_status == "OK" Returns ------- cufflinks_data : Pandas dataframe Pandas dataframe with Cufflinks data for all patients columns include patient_id, gene_id, gene_short_name, FPKM, FPKM_conf_lo, FPKM_conf_hi """ return \ pd.concat( [self._load_single_patient_cufflinks(patient, filter_ok) for patient in self], copy=False )
[ "def", "load_cufflinks", "(", "self", ",", "filter_ok", "=", "True", ")", ":", "return", "pd", ".", "concat", "(", "[", "self", ".", "_load_single_patient_cufflinks", "(", "patient", ",", "filter_ok", ")", "for", "patient", "in", "self", "]", ",", "copy", ...
33.3
22.9
def polygons_full(self): """ A list of shapely.geometry.Polygon objects with interiors created by checking which closed polygons enclose which other polygons. Returns --------- full : (len(self.root),) shapely.geometry.Polygon Polygons containing interiors """ # pre- allocate the list to avoid indexing problems full = [None] * len(self.root) # store the graph to avoid cache thrashing enclosure = self.enclosure_directed # store closed polygons to avoid cache hits closed = self.polygons_closed # loop through root curves for i, root in enumerate(self.root): # a list of multiple Polygon objects that # are fully contained by the root curve children = [closed[child] for child in enclosure[root].keys()] # all polygons_closed are CCW, so for interiors reverse them holes = [np.array(p.exterior.coords)[::-1] for p in children] # a single Polygon object shell = closed[root].exterior # create a polygon with interiors full[i] = polygons.repair_invalid(Polygon(shell=shell, holes=holes)) # so we can use advanced indexing full = np.array(full) return full
[ "def", "polygons_full", "(", "self", ")", ":", "# pre- allocate the list to avoid indexing problems", "full", "=", "[", "None", "]", "*", "len", "(", "self", ".", "root", ")", "# store the graph to avoid cache thrashing", "enclosure", "=", "self", ".", "enclosure_dire...
39.628571
14.657143
def _get_site(self, url, headers, cookies, timeout, driver_args, driver_kwargs): """ Try and return page content in the requested format using selenium """ try: # **TODO**: Find what exception this will throw and catch it and call # self.driver.execute_script("window.stop()") # Then still try and get the source from the page self.driver.set_page_load_timeout(timeout) self.driver.get(url) header_data = self.get_selenium_header() status_code = header_data['status-code'] # Set data to access from script self.status_code = status_code self.url = self.driver.current_url except TimeoutException: logger.warning("Page timeout: {}".format(url)) try: scraper_monitor.failed_url(url, 'Timeout') except (NameError, AttributeError): # Happens when scraper_monitor is not being used/setup pass except Exception: logger.exception("Unknown problem with scraper_monitor sending a failed url") except Exception as e: raise e.with_traceback(sys.exc_info()[2]) else: # If an exception was not thrown then check the http status code if status_code < 400: # If the http status code is not an error return self.driver.page_source else: # If http status code is 400 or greater raise SeleniumHTTPError("Status code >= 400", status_code=status_code)
[ "def", "_get_site", "(", "self", ",", "url", ",", "headers", ",", "cookies", ",", "timeout", ",", "driver_args", ",", "driver_kwargs", ")", ":", "try", ":", "# **TODO**: Find what exception this will throw and catch it and call", "# self.driver.execute_script(\"window.sto...
41.025641
21.282051
def remove_post_process(self, name): """remove a post-process Parameters ---------- name : str name of the post-process to remove. """ self._pprocesses = [post_process for post_process in self._pprocesses if post_process.name != name]
[ "def", "remove_post_process", "(", "self", ",", "name", ")", ":", "self", ".", "_pprocesses", "=", "[", "post_process", "for", "post_process", "in", "self", ".", "_pprocesses", "if", "post_process", ".", "name", "!=", "name", "]" ]
31
14.363636
def _global_step(hparams): """Adjust global step if a multi-step optimizer is used.""" step = tf.to_float(tf.train.get_or_create_global_step()) multiplier = hparams.optimizer_multistep_accumulate_steps if not multiplier: return step tf.logging.info("Dividing global step by %d for multi-step optimizer." % multiplier) return step / tf.to_float(multiplier)
[ "def", "_global_step", "(", "hparams", ")", ":", "step", "=", "tf", ".", "to_float", "(", "tf", ".", "train", ".", "get_or_create_global_step", "(", ")", ")", "multiplier", "=", "hparams", ".", "optimizer_multistep_accumulate_steps", "if", "not", "multiplier", ...
38.1
17.8
def parse_header(data, verbose=False, *args, **kwargs): """Parse the data using the grammar specified in this module :param str data: delimited data to be parsed for metadata :return list parsed_data: structured metadata """ # the parser if verbose: print >> sys.stderr, "Creating parser object..." parser = Parser(amira_header_grammar) # the processor if verbose: print >> sys.stderr, "Defining dispatch processor..." amira_processor = AmiraDispatchProcessor() # parsing if verbose: print >> sys.stderr, "Parsing data..." success, parsed_data, next_item = parser.parse(data, production='amira', processor=amira_processor) if success: if verbose: print >> sys.stderr, "Successfully parsed data..." return parsed_data else: raise TypeError("Parse: {}\nNext: {}\n".format(parsed_data, next_item))
[ "def", "parse_header", "(", "data", ",", "verbose", "=", "False", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# the parser", "if", "verbose", ":", "print", ">>", "sys", ".", "stderr", ",", "\"Creating parser object...\"", "parser", "=", "Parser",...
33.666667
22.296296
def _active_mounts(ret): ''' List active mounts on Linux systems ''' _list = _list_mounts() filename = '/proc/self/mounts' if not os.access(filename, os.R_OK): msg = 'File not readable {0}' raise CommandExecutionError(msg.format(filename)) with salt.utils.files.fopen(filename) as ifile: for line in ifile: comps = salt.utils.stringutils.to_unicode(line).split() ret[comps[1]] = {'device': comps[0], 'alt_device': _list.get(comps[1], None), 'fstype': comps[2], 'opts': _resolve_user_group_names(comps[3].split(','))} return ret
[ "def", "_active_mounts", "(", "ret", ")", ":", "_list", "=", "_list_mounts", "(", ")", "filename", "=", "'/proc/self/mounts'", "if", "not", "os", ".", "access", "(", "filename", ",", "os", ".", "R_OK", ")", ":", "msg", "=", "'File not readable {0}'", "rais...
37.666667
18.333333
def list(self, request): """Search the doctypes for this model.""" query = get_query_params(request).get("search", "") results = [] base = self.model.get_base_class() doctypes = indexable_registry.families[base] for doctype, klass in doctypes.items(): name = klass._meta.verbose_name.title() if query.lower() in name.lower(): results.append(dict( name=name, doctype=doctype )) results.sort(key=lambda x: x["name"]) return Response(dict(results=results))
[ "def", "list", "(", "self", ",", "request", ")", ":", "query", "=", "get_query_params", "(", "request", ")", ".", "get", "(", "\"search\"", ",", "\"\"", ")", "results", "=", "[", "]", "base", "=", "self", ".", "model", ".", "get_base_class", "(", ")"...
40.466667
10.133333
def data(self, root): '''Convert etree.Element into a dictionary''' value = self.dict() children = [node for node in root if isinstance(node.tag, basestring)] for attr, attrval in root.attrib.items(): attr = attr if self.attr_prefix is None else self.attr_prefix + attr value[attr] = self._fromstring(attrval) if root.text and self.text_content is not None: text = root.text.strip() if text: if self.simple_text and len(children) == len(root.attrib) == 0: value = self._fromstring(text) else: value[self.text_content] = self._fromstring(text) count = Counter(child.tag for child in children) for child in children: if count[child.tag] == 1: value.update(self.data(child)) else: result = value.setdefault(child.tag, self.list()) result += self.data(child).values() # if simple_text, elements with no children nor attrs become '', not {} if isinstance(value, dict) and not value and self.simple_text: value = '' return self.dict([(root.tag, value)])
[ "def", "data", "(", "self", ",", "root", ")", ":", "value", "=", "self", ".", "dict", "(", ")", "children", "=", "[", "node", "for", "node", "in", "root", "if", "isinstance", "(", "node", ".", "tag", ",", "basestring", ")", "]", "for", "attr", ",...
48.28
18.6
async def get_random(self) -> Word: """Gets a random word. Returns: A random :class:`Word`\. Raises: UrbanConnectionError: If the response status isn't ``200``. """ resp = await self._get(random=True) return Word(resp['list'][0])
[ "async", "def", "get_random", "(", "self", ")", "->", "Word", ":", "resp", "=", "await", "self", ".", "_get", "(", "random", "=", "True", ")", "return", "Word", "(", "resp", "[", "'list'", "]", "[", "0", "]", ")" ]
28.090909
14.545455
def cherry_pick(self, branch, **kwargs): """Cherry-pick a commit into a branch. Args: branch (str): Name of target branch **kwargs: Extra options to send to the server (e.g. sudo) Raises: GitlabAuthenticationError: If authentication is not correct GitlabCherryPickError: If the cherry-pick could not be performed """ path = '%s/%s/cherry_pick' % (self.manager.path, self.get_id()) post_data = {'branch': branch} self.manager.gitlab.http_post(path, post_data=post_data, **kwargs)
[ "def", "cherry_pick", "(", "self", ",", "branch", ",", "*", "*", "kwargs", ")", ":", "path", "=", "'%s/%s/cherry_pick'", "%", "(", "self", ".", "manager", ".", "path", ",", "self", ".", "get_id", "(", ")", ")", "post_data", "=", "{", "'branch'", ":",...
40.785714
21.571429
def show_plot(t_array, th_array): """ Display theta vs t plot. """ th_mean = gv.mean(th_array) th_sdev = gv.sdev(th_array) thp = th_mean + th_sdev thm = th_mean - th_sdev plt.fill_between(t_array, thp, thm, color='0.8') plt.plot(t_array, th_mean, linewidth=0.5) plt.xlabel('$t$') plt.ylabel(r'$\theta(t)$') plt.savefig('pendulum.pdf', bbox_inches='tight') plt.show()
[ "def", "show_plot", "(", "t_array", ",", "th_array", ")", ":", "th_mean", "=", "gv", ".", "mean", "(", "th_array", ")", "th_sdev", "=", "gv", ".", "sdev", "(", "th_array", ")", "thp", "=", "th_mean", "+", "th_sdev", "thm", "=", "th_mean", "-", "th_sd...
33.25
11.25
def set_iter_mesh(self, mesh, shift=None, is_time_reversal=True, is_mesh_symmetry=True, is_eigenvectors=False, is_gamma_center=False): """Create an IterMesh instancer Attributes ---------- See set_mesh method. """ warnings.warn("Phonopy.set_iter_mesh is deprecated. " "Use Phonopy.run_mesh with use_iter_mesh=True.", DeprecationWarning) self.run_mesh(mesh=mesh, shift=shift, is_time_reversal=is_time_reversal, is_mesh_symmetry=is_mesh_symmetry, with_eigenvectors=is_eigenvectors, is_gamma_center=is_gamma_center, use_iter_mesh=True)
[ "def", "set_iter_mesh", "(", "self", ",", "mesh", ",", "shift", "=", "None", ",", "is_time_reversal", "=", "True", ",", "is_mesh_symmetry", "=", "True", ",", "is_eigenvectors", "=", "False", ",", "is_gamma_center", "=", "False", ")", ":", "warnings", ".", ...
33.653846
15.346154
def get_wiki(self, section): """ Returns a section of the wiki. Only for Album/Track. section can be "content", "summary" or "published" (for published date) """ doc = self._request(self.ws_prefix + ".getInfo", True) if len(doc.getElementsByTagName("wiki")) == 0: return node = doc.getElementsByTagName("wiki")[0] return _extract(node, section)
[ "def", "get_wiki", "(", "self", ",", "section", ")", ":", "doc", "=", "self", ".", "_request", "(", "self", ".", "ws_prefix", "+", "\".getInfo\"", ",", "True", ")", "if", "len", "(", "doc", ".", "getElementsByTagName", "(", "\"wiki\"", ")", ")", "==", ...
26.8125
16.5625
def ask_float(msg="Enter a float", dft=None, vld=None, hlp=None): """Prompts the user for a float.""" vld = vld or [float] return ask(msg, dft=dft, vld=vld, fmt=partial(cast, typ=float), hlp=hlp)
[ "def", "ask_float", "(", "msg", "=", "\"Enter a float\"", ",", "dft", "=", "None", ",", "vld", "=", "None", ",", "hlp", "=", "None", ")", ":", "vld", "=", "vld", "or", "[", "float", "]", "return", "ask", "(", "msg", ",", "dft", "=", "dft", ",", ...
51
19.25
def update_case_task(self, task): """ :Updates TheHive Task :param case: The task to update. The task's `id` determines which Task to update. :return: """ req = self.url + "/api/case/task/{}".format(task.id) # Choose which attributes to send update_keys = [ 'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate' ] data = {k: v for k, v in task.__dict__.items() if k in update_keys} try: return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert) except requests.exceptions.RequestException as e: raise CaseTaskException("Case task update error: {}".format(e))
[ "def", "update_case_task", "(", "self", ",", "task", ")", ":", "req", "=", "self", ".", "url", "+", "\"/api/case/task/{}\"", ".", "format", "(", "task", ".", "id", ")", "# Choose which attributes to send", "update_keys", "=", "[", "'title'", ",", "'description...
40.7
27.4
def begin(self): """Initialize communication with the PN532. Must be called before any other calls are made against the PN532. """ # Assert CS pin low for a second for PN532 to be ready. self._gpio.set_low(self._cs) time.sleep(1.0) # Call GetFirmwareVersion to sync up with the PN532. This might not be # required but is done in the Arduino library and kept for consistency. self.get_firmware_version() self._gpio.set_high(self._cs)
[ "def", "begin", "(", "self", ")", ":", "# Assert CS pin low for a second for PN532 to be ready.", "self", ".", "_gpio", ".", "set_low", "(", "self", ".", "_cs", ")", "time", ".", "sleep", "(", "1.0", ")", "# Call GetFirmwareVersion to sync up with the PN532. This might...
45.818182
14.636364
def ParseArguments(self): """Parses the command line arguments. Returns: bool: True if the arguments were successfully parsed. """ loggers.ConfigureLogging() argument_parser = argparse.ArgumentParser( description=self.DESCRIPTION, epilog=self.EPILOG, add_help=False, formatter_class=argparse.RawDescriptionHelpFormatter) self.AddBasicOptions(argument_parser) helpers_manager.ArgumentHelperManager.AddCommandLineArguments( argument_parser, names=['storage_file']) data_location_group = argument_parser.add_argument_group( 'data location arguments') argument_helper_names = ['artifact_definitions', 'data_location'] helpers_manager.ArgumentHelperManager.AddCommandLineArguments( data_location_group, names=argument_helper_names) extraction_group = argument_parser.add_argument_group( 'extraction arguments') argument_helper_names = [ 'artifact_filters', 'extraction', 'filter_file', 'hashers', 'parsers', 'yara_rules'] helpers_manager.ArgumentHelperManager.AddCommandLineArguments( extraction_group, names=argument_helper_names) self.AddStorageMediaImageOptions(extraction_group) self.AddTimeZoneOption(extraction_group) self.AddVSSProcessingOptions(extraction_group) self.AddCredentialOptions(extraction_group) info_group = argument_parser.add_argument_group('informational arguments') self.AddInformationalOptions(info_group) info_group.add_argument( '--info', dest='show_info', action='store_true', default=False, help='Print out information about supported plugins and parsers.') info_group.add_argument( '--use_markdown', '--use-markdown', dest='use_markdown', action='store_true', default=False, help=( 'Output lists in Markdown format use in combination with ' '"--hashers list", "--parsers list" or "--timezone list"')) info_group.add_argument( '--no_dependencies_check', '--no-dependencies-check', dest='dependencies_check', action='store_false', default=True, help='Disable the dependencies check.') self.AddLogFileOptions(info_group) helpers_manager.ArgumentHelperManager.AddCommandLineArguments( info_group, names=['status_view']) output_group = argument_parser.add_argument_group('output arguments') helpers_manager.ArgumentHelperManager.AddCommandLineArguments( output_group, names=['text_prepend']) processing_group = argument_parser.add_argument_group( 'processing arguments') self.AddPerformanceOptions(processing_group) self.AddProcessingOptions(processing_group) processing_group.add_argument( '--sigsegv_handler', '--sigsegv-handler', dest='sigsegv_handler', action='store_true', default=False, help=( 'Enables the SIGSEGV handler. WARNING this functionality is ' 'experimental and will a deadlock worker process if a real ' 'segfault is caught, but not signal SIGSEGV. This functionality ' 'is therefore primarily intended for debugging purposes')) profiling_group = argument_parser.add_argument_group('profiling arguments') helpers_manager.ArgumentHelperManager.AddCommandLineArguments( profiling_group, names=['profiling']) storage_group = argument_parser.add_argument_group('storage arguments') helpers_manager.ArgumentHelperManager.AddCommandLineArguments( storage_group, names=['storage_format']) argument_parser.add_argument( self._SOURCE_OPTION, action='store', metavar='SOURCE', nargs='?', default=None, type=str, help=( 'Path to a source device, file or directory. If the source is ' 'a supported storage media device or image file, archive file ' 'or a directory, the files within are processed recursively.')) try: options = argument_parser.parse_args() except UnicodeEncodeError: # If we get here we are attempting to print help in a non-Unicode # terminal. self._output_writer.Write('\n') self._output_writer.Write(argument_parser.format_help()) return False # Properly prepare the attributes according to local encoding. if self.preferred_encoding == 'ascii': logger.warning( 'The preferred encoding of your system is ASCII, which is not ' 'optimal for the typically non-ASCII characters that need to be ' 'parsed and processed. The tool will most likely crash and die, ' 'perhaps in a way that may not be recoverable. A five second delay ' 'is introduced to give you time to cancel the runtime and ' 'reconfigure your preferred encoding, otherwise continue at own ' 'risk.') time.sleep(5) if self._process_archives: logger.warning( 'Scanning archive files currently can cause deadlock. Continue at ' 'your own risk.') time.sleep(5) try: self.ParseOptions(options) except errors.BadConfigOption as exception: self._output_writer.Write('ERROR: {0!s}\n'.format(exception)) self._output_writer.Write('\n') self._output_writer.Write(argument_parser.format_usage()) return False self._command_line_arguments = self.GetCommandLineArguments() loggers.ConfigureLogging( debug_output=self._debug_mode, filename=self._log_file, quiet_mode=self._quiet_mode) return True
[ "def", "ParseArguments", "(", "self", ")", ":", "loggers", ".", "ConfigureLogging", "(", ")", "argument_parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "self", ".", "DESCRIPTION", ",", "epilog", "=", "self", ".", "EPILOG", ",", "add...
38.392857
24.178571