docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Takes an excerpt of the adb logcat log from a certain time point to current time. Args: tag: An identifier of the time period, usualy the name of a test. begin_time: Logline format timestamp of the beginning of the time period.
def cat_adb_log(self, tag, begin_time): if not self.adb_logcat_file_path: raise Error( self._ad, 'Attempting to cat adb log when none has been collected.') end_time = mobly_logger.get_log_line_timestamp() self._ad.log.debug('Extracting adb log from logcat.') adb_excerpt_path = os.path.join(self._ad.log_path, 'AdbLogExcerpts') utils.create_dir(adb_excerpt_path) f_name = os.path.basename(self.adb_logcat_file_path) out_name = f_name.replace('adblog,', '').replace('.txt', '') out_name = ',%s,%s.txt' % (begin_time, out_name) out_name = out_name.replace(':', '-') tag_len = utils.MAX_FILENAME_LEN - len(out_name) tag = tag[:tag_len] out_name = tag + out_name full_adblog_path = os.path.join(adb_excerpt_path, out_name) with io.open(full_adblog_path, 'w', encoding='utf-8') as out: in_file = self.adb_logcat_file_path with io.open( in_file, 'r', encoding='utf-8', errors='replace') as f: in_range = False while True: line = None try: line = f.readline() if not line: break except: continue line_time = line[:mobly_logger.log_line_timestamp_len] if not mobly_logger.is_valid_logline_timestamp(line_time): continue if self._is_timestamp_in_range(line_time, begin_time, end_time): in_range = True if not line.endswith('\n'): line += '\n' out.write(line) else: if in_range: break
377,952
Sends an Rpc message through the connection. Args: msg: string, the message to send. Raises: Error: a socket error occurred during the send.
def _client_send(self, msg): try: self._client.write(msg.encode("utf8") + b'\n') self._client.flush() self.log.debug('Snippet sent %s.', msg) except socket.error as e: raise Error( self._ad, 'Encountered socket error "%s" sending RPC message "%s"' % (e, msg))
377,959
Send a command to the server. Args: command: str, The name of the command to execute. uid: int, the uid of the session to send the command to. Returns: The line that was written back.
def _cmd(self, command, uid=None): if not uid: uid = self.uid self._client_send(json.dumps({'cmd': command, 'uid': uid})) return self._client_receive()
377,961
Sends an rpc to the app. Args: method: str, The name of the method to execute. args: any, The args of the method. Returns: The result of the rpc. Raises: ProtocolError: Something went wrong with the protocol. ApiError: The rpc went through, however executed with errors.
def _rpc(self, method, *args): with self._lock: apiid = next(self._counter) data = {'id': apiid, 'method': method, 'params': args} request = json.dumps(data) self._client_send(request) response = self._client_receive() if not response: raise ProtocolError(self._ad, ProtocolError.NO_RESPONSE_FROM_SERVER) result = json.loads(str(response, encoding='utf8')) if result['error']: raise ApiError(self._ad, result['error']) if result['id'] != apiid: raise ProtocolError(self._ad, ProtocolError.MISMATCHED_API_ID) if result.get('callback') is not None: if self._event_client is None: self._event_client = self._start_event_client() return callback_handler.CallbackHandler( callback_id=result['callback'], event_client=self._event_client, ret_value=result['result'], method_name=method, ad=self._ad) return result['result']
377,962
Starts iperf server on specified port. Args: extra_args: A string representing extra arguments to start iperf server with. tag: Appended to log file name to identify logs from different iperf runs.
def start(self, extra_args="", tag=""): if self.started: return utils.create_dir(self.log_path) if tag: tag = tag + ',' out_file_name = "IPerfServer,{},{}{}.log".format( self.port, tag, len(self.log_files)) full_out_path = os.path.join(self.log_path, out_file_name) cmd = '%s %s > %s' % (self.iperf_str, extra_args, full_out_path) self.iperf_process = utils.start_standing_subprocess(cmd, shell=True) self.log_files.append(full_out_path) self.started = True
377,972
Get a property of the device. This is a convenience wrapper for "adb shell getprop xxx". Args: prop_name: A string that is the name of the property to get. Returns: A string that is the value of the property, or None if the property doesn't exist.
def getprop(self, prop_name): return self.shell( ['getprop', prop_name], timeout=DEFAULT_GETPROP_TIMEOUT_SEC).decode('utf-8').strip()
377,992
Checks to see if a given check command exists on the device. Args: command: A string that is the name of the command to check. Returns: A boolean that is True if the command exists and False otherwise.
def has_shell_command(self, command): try: output = self.shell(['command', '-v', command]).decode('utf-8').strip() return command in output except AdbError: # If the command doesn't exist, then 'command -v' can return # an exit code > 1. return False
377,993
Assert the equality of objects, otherwise fail the test. Error message is "first != second" by default. Additional explanation can be supplied in the message. Args: first: The first object to compare. second: The second object to compare. msg: A string that adds additional info about the failure. extras: An optional field for extra information to be included in test result.
def assert_equal(first, second, msg=None, extras=None): my_msg = None try: _pyunit_proxy.assertEqual(first, second) except AssertionError as e: my_msg = str(e) if msg: my_msg = "%s %s" % (my_msg, msg) # This raise statement is outside of the above except statement to prevent # Python3's exception message from having two tracebacks. if my_msg is not None: raise signals.TestFailure(my_msg, extras=extras)
377,997
Gets all the events of a certain name that have been received so far. This is a non-blocking call. Args: callback_id: The id of the callback. event_name: string, the name of the event to get. Returns: A list of SnippetEvent, each representing an event from the Java side.
def getAll(self, event_name): raw_events = self._event_client.eventGetAll(self._id, event_name) return [snippet_event.from_dict(msg) for msg in raw_events]
378,005
Verifies that a config dict for an attenuator device is valid. Args: config: A dict that is the configuration for an attenuator device. Raises: attenuator.Error: A config is not valid.
def _validate_config(config): required_keys = [KEY_ADDRESS, KEY_MODEL, KEY_PORT, KEY_PATHS] for key in required_keys: if key not in config: raise Error("Required key %s missing from config %s", (key, config))
378,009
Create Monsoon instances from a list of serials. Args: serials: A list of Monsoon (integer) serials. Returns: A list of Monsoon objects.
def get_instances(serials): objs = [] for s in serials: objs.append(Monsoon(serial=s)) return objs
378,012
Instantiates a MonsoonData object. Args: data_points: A list of current values in Amp (float). timestamps: A list of epoch timestamps (int). hz: The hertz at which the data points are measured. voltage: The voltage at which the data points are measured. offset: The number of initial data points to discard in calculations.
def __init__(self, data_points, timestamps, hz, voltage, offset=0): self._data_points = data_points self._timestamps = timestamps self.offset = offset num_of_data_pt = len(self._data_points) if self.offset >= num_of_data_pt: raise MonsoonError( ("Offset number (%d) must be smaller than the " "number of data points (%d).") % (offset, num_of_data_pt)) self.data_points = self._data_points[self.offset:] self.timestamps = self._timestamps[self.offset:] self.hz = hz self.voltage = voltage self.tag = None self._validate_data()
378,023
Creates a MonsoonData object from a string representation generated by __str__. Args: str: The string representation of a MonsoonData. Returns: A MonsoonData object.
def from_string(data_str): lines = data_str.strip().split('\n') err_msg = ("Invalid input string format. Is this string generated by " "MonsoonData class?") conditions = [ len(lines) <= 4, "Average Current:" not in lines[1], "Voltage: " not in lines[2], "Total Power: " not in lines[3], "samples taken at " not in lines[4], lines[5] != "Time" + ' ' * 7 + "Amp" ] if any(conditions): raise MonsoonError(err_msg) hz_str = lines[4].split()[2] hz = int(hz_str[:-2]) voltage_str = lines[2].split()[1] voltage = int(voltage_str[:-1]) lines = lines[6:] t = [] v = [] for l in lines: try: timestamp, value = l.split(' ') t.append(int(timestamp)) v.append(float(value)) except ValueError: raise MonsoonError(err_msg) return MonsoonData(v, t, hz, voltage)
378,027
Save multiple MonsoonData objects to a text file. Args: monsoon_data: A list of MonsoonData objects to write to a text file. file_path: The full path of the file to save to, including the file name.
def save_to_text_file(monsoon_data, file_path): if not monsoon_data: raise MonsoonError("Attempting to write empty Monsoon data to " "file, abort") utils.create_dir(os.path.dirname(file_path)) with io.open(file_path, 'w', encoding='utf-8') as f: for md in monsoon_data: f.write(str(md)) f.write(MonsoonData.delimiter)
378,028
Load MonsoonData objects from a text file generated by MonsoonData.save_to_text_file. Args: file_path: The full path of the file load from, including the file name. Returns: A list of MonsoonData objects.
def from_text_file(file_path): results = [] with io.open(file_path, 'r', encoding='utf-8') as f: data_strs = f.read().split(MonsoonData.delimiter) for data_str in data_strs: results.append(MonsoonData.from_string(data_str)) return results
378,029
Updates how many data points to skip in caculations. Always use this function to update offset instead of directly setting self.offset. Args: new_offset: The new offset.
def update_offset(self, new_offset): self.offset = new_offset self.data_points = self._data_points[self.offset:] self.timestamps = self._timestamps[self.offset:]
378,031
Returns a list of average current numbers, each representing the average over the last n data points. Args: n: Number of data points to average over. Returns: A list of average current values.
def get_average_record(self, n): history_deque = collections.deque() averages = [] for d in self.data_points: history_deque.appendleft(d) if len(history_deque) > n: history_deque.pop() avg = sum(history_deque) / len(history_deque) averages.append(round(avg, self.lr)) return averages
378,033
Sets the output voltage of monsoon. Args: volt: Voltage to set the output to. ramp: If true, the output voltage will be increased gradually to prevent tripping Monsoon overvoltage.
def set_voltage(self, volt, ramp=False): if ramp: self.mon.RampVoltage(self.mon.start_voltage, volt) else: self.mon.SetVoltage(volt)
378,037
Decorator specifying the unique identifier (UID) of a test case. The UID will be recorded in the test's record when executed by Mobly. If you use any other decorator for the test method, you may want to use this as the outer-most one. Note a common UID system is the Universal Unitque Identifier (UUID), but we are not limiting people to use UUID, hence the more generic name `UID`. Args: uid: string, the uid for the decorated test function.
def uid(uid): if uid is None: raise ValueError('UID cannot be None.') def decorate(test_func): @functools.wraps(test_func) def wrapper(*args, **kwargs): return test_func(*args, **kwargs) setattr(wrapper, 'uid', uid) return wrapper return decorate
378,045
Sets the `details` field. Args: content: the content to extract details from.
def _set_details(self, content): try: self.details = str(content) except UnicodeEncodeError: if sys.version_info < (3, 0): # If Py2 threw encode error, convert to unicode. self.details = unicode(content) else: # We should never hit this in Py3, if this happens, record # an encoded version of the content for users to handle. logging.error( 'Unable to decode "%s" in Py3, encoding in utf-8.', content) self.details = content.encode('utf-8')
378,051
Marks the end of the test logic. Args: result: One of the TEST_RESULT enums in TestResultEnums. e: A test termination signal (usually an exception object). It can be any exception instance or of any subclass of mobly.signals.TestSignal.
def _test_end(self, result, e): if self.begin_time is not None: self.end_time = utils.get_current_epoch_time() self.result = result if e: self.termination_signal = ExceptionRecord(e)
378,055
Overrides '+' operator for TestResult class. The add operator merges two TestResult objects by concatenating all of their lists together. Args: r: another instance of TestResult to be added Returns: A TestResult instance that's the sum of two TestResult instances.
def __add__(self, r): if not isinstance(r, TestResult): raise TypeError('Operand %s of type %s is not a TestResult.' % (r, type(r))) sum_result = TestResult() for name in sum_result.__dict__: r_value = getattr(r, name) l_value = getattr(self, name) if isinstance(r_value, list): setattr(sum_result, name, l_value + r_value) return sum_result
378,062
Adds a test record to test result. A record is considered executed once it's added to the test result. Adding the record finalizes the content of a record, so no change should be made to the record afterwards. Args: record: A test record object to add.
def add_record(self, record): record.update_record() if record.result == TestResultEnums.TEST_RESULT_SKIP: self.skipped.append(record) return self.executed.append(record) if record.result == TestResultEnums.TEST_RESULT_FAIL: self.failed.append(record) elif record.result == TestResultEnums.TEST_RESULT_PASS: self.passed.append(record) else: self.error.append(record)
378,063
Checks if a specific test has been executed. Args: test_name: string, the name of the test to check. Returns: True if the test has been executed according to the test result, False otherwise.
def is_test_executed(self, test_name): for record in self.executed: if record.test_name == test_name: return True return False
378,064
Validates the name of a test bed. Since test bed names are used as part of the test run id, it needs to meet certain requirements. Args: name: The test bed's name specified in config file. Raises: MoblyConfigError: The name does not meet any criteria.
def _validate_testbed_name(name): if not name: raise MoblyConfigError("Test bed names can't be empty.") name = str(name) for char in name: if char not in utils.valid_filename_chars: raise MoblyConfigError( 'Char "%s" is not allowed in test bed names.' % char)
378,070
Validates the testbed configurations. Args: testbed_configs: A list of testbed configuration dicts. Raises: MoblyConfigError: Some parts of the configuration is invalid.
def _validate_testbed_configs(testbed_configs): seen_names = set() # Cross checks testbed configs for resource conflicts. for config in testbed_configs: # Check for conflicts between multiple concurrent testbed configs. # No need to call it if there's only one testbed config. name = config[keys.Config.key_testbed_name.value] _validate_testbed_name(name) # Test bed names should be unique. if name in seen_names: raise MoblyConfigError('Duplicate testbed name %s found.' % name) seen_names.add(name)
378,071
Loads a test config file. The test config file has to be in YAML format. Args: path: A string that is the full path to the config file, including the file name. Returns: A dict that represents info in the config file.
def _load_config_file(path): with io.open(utils.abs_path(path), 'r', encoding='utf-8') as f: conf = yaml.load(f) return conf
378,073
Adds a snippet client to the management. Args: name: string, the attribute name to which to attach the snippet client. E.g. `name='maps'` attaches the snippet client to `ad.maps`. package: string, the package name of the snippet apk to connect to. Raises: Error, if a duplicated name or package is passed in.
def add_snippet_client(self, name, package): # Should not load snippet with the same name more than once. if name in self._snippet_clients: raise Error( self, 'Name "%s" is already registered with package "%s", it cannot ' 'be used again.' % (name, self._snippet_clients[name].client.package)) # Should not load the same snippet package more than once. for snippet_name, client in self._snippet_clients.items(): if package == client.package: raise Error( self, 'Snippet package "%s" has already been loaded under name' ' "%s".' % (package, snippet_name)) client = snippet_client.SnippetClient(package=package, ad=self._device) client.start_app_and_connect() self._snippet_clients[name] = client
378,077
Removes a snippet client from management. Args: name: string, the name of the snippet client to remove. Raises: Error: if no snippet client is managed under the specified name.
def remove_snippet_client(self, name): if name not in self._snippet_clients: raise Error(self._device, MISSING_SNIPPET_CLIENT_MSG % name) client = self._snippet_clients.pop(name) client.stop_app()
378,078
Execute the registered handler of an event. Retrieve the handler and its arguments, and execute the handler in a new thread. Args: event_obj: Json object of the event. event_name: Name of the event to call handler for.
def handle_subscribed_event(self, event_obj, event_name): handler, args = self.handlers[event_name] self.executor.submit(handler, event_obj, *args)
378,094
Return and remove all stored events of a specified name. Pops all events from their queue. May miss the latest ones. If no event is available, return immediately. Args: event_name: Name of the events to be popped. Returns: List of the desired events. Raises: IllegalStateError: Raised if pop is called before the dispatcher starts polling.
def pop_all(self, event_name): if not self.started: raise IllegalStateError(("Dispatcher needs to be started before " "popping.")) results = [] try: self.lock.acquire() while True: e = self.event_dict[event_name].get(block=False) results.append(e) except (queue.Empty, KeyError): return results finally: self.lock.release()
378,097
Clear all events of a particular name. Args: event_name: Name of the events to be popped.
def clear_events(self, event_name): self.lock.acquire() try: q = self.get_event_q(event_name) q.queue.clear() except queue.Empty: return finally: self.lock.release()
378,098
Create a SnippetEvent object from a dictionary. Args: event_dict: a dictionary representing an event. Returns: A SnippetEvent object.
def from_dict(event_dict): return SnippetEvent( callback_id=event_dict['callbackId'], name=event_dict['name'], creation_time=event_dict['time'], data=event_dict['data'])
378,102
Creates a directory if it does not exist already. Args: path: The path of the directory to create.
def create_dir(path): full_path = abs_path(path) if not os.path.exists(full_path): try: os.makedirs(full_path) except OSError as e: # ignore the error for dir already exist. if e.errno != os.errno.EEXIST: raise
378,105
Creates an alias at 'alias_path' pointing to the file 'target_path'. On Unix, this is implemented via symlink. On Windows, this is done by creating a Windows shortcut file. Args: target_path: Destination path that the alias should point to. alias_path: Path at which to create the new alias.
def create_alias(target_path, alias_path): if platform.system() == 'Windows' and not alias_path.endswith('.lnk'): alias_path += '.lnk' if os.path.lexists(alias_path): os.remove(alias_path) if platform.system() == 'Windows': from win32com import client shell = client.Dispatch('WScript.Shell') shortcut = shell.CreateShortCut(alias_path) shortcut.Targetpath = target_path shortcut.save() else: os.symlink(target_path, alias_path)
378,106
Converts an epoch timestamp to human readable time. This essentially converts an output of get_current_epoch_time to an output of get_current_human_time Args: epoch_time: An integer representing an epoch timestamp in milliseconds. Returns: A time string representing the input time. None if input param is invalid.
def epoch_to_human_time(epoch_time): if isinstance(epoch_time, int): try: d = datetime.datetime.fromtimestamp(epoch_time / 1000) return d.strftime("%m-%d-%Y %H:%M:%S ") except ValueError: return None
378,107
Locate files whose names and extensions match the given predicate in the specified directories. Args: paths: A list of directory paths where to find the files. file_predicate: A function that returns True if the file name and extension are desired. Returns: A list of files that match the predicate.
def find_files(paths, file_predicate): file_list = [] for path in paths: p = abs_path(path) for dirPath, _, fileList in os.walk(p): for fname in fileList: name, ext = os.path.splitext(fname) if file_predicate(name, ext): file_list.append((dirPath, name, ext)) return file_list
378,109
Loads the content of a file into a base64 string. Args: f_path: full path to the file including the file name. Returns: A base64 string representing the content of the file in utf-8 encoding.
def load_file_to_base64_str(f_path): path = abs_path(f_path) with io.open(path, 'rb') as f: f_bytes = f.read() base64_str = base64.b64encode(f_bytes).decode("utf-8") return base64_str
378,110
Finds the value of a field in a dict object that satisfies certain conditions. Args: item_list: A list of dict objects. cond: A param that defines the condition. comparator: A function that checks if an dict satisfies the condition. target_field: Name of the field whose value to be returned if an item satisfies the condition. Returns: Target value or None if no item satisfies the condition.
def find_field(item_list, cond, comparator, target_field): for item in item_list: if comparator(item, cond) and target_field in item: return item[target_field] return None
378,111
Generates a random string of specified length, composed of ascii letters and digits. Args: length: The number of characters in the string. Returns: The random string generated.
def rand_ascii_str(length): letters = [random.choice(ascii_letters_and_digits) for _ in range(length)] return ''.join(letters)
378,112
Stops a subprocess started by start_standing_subprocess. Before killing the process, we check if the process is running, if it has terminated, Error is raised. Catches and ignores the PermissionError which only happens on Macs. Args: proc: Subprocess to terminate. Raises: Error: if the subprocess could not be stopped.
def stop_standing_subprocess(proc): # Only import psutil when actually needed. # psutil may cause import error in certain env. This way the utils module # doesn't crash upon import. import psutil pid = proc.pid logging.debug('Stopping standing subprocess %d', pid) process = psutil.Process(pid) failed = [] try: children = process.children(recursive=True) except AttributeError: # Handle versions <3.0.0 of psutil. children = process.get_children(recursive=True) for child in children: try: child.kill() child.wait(timeout=10) except psutil.NoSuchProcess: # Ignore if the child process has already terminated. pass except: failed.append(child.pid) logging.exception('Failed to kill standing subprocess %d', child.pid) try: process.kill() process.wait(timeout=10) except psutil.NoSuchProcess: # Ignore if the process has already terminated. pass except: failed.append(pid) logging.exception('Failed to kill standing subprocess %d', pid) if failed: raise Error('Failed to kill standing subprocesses: %s' % failed) # Call wait and close pipes on the original Python object so we don't get # runtime warnings. if proc.stdout: proc.stdout.close() if proc.stderr: proc.stderr.close() proc.wait() logging.debug('Stopped standing subprocess %d', pid)
378,116
Similar to linux's `grep`, this returns the line in an output stream that matches a given regex pattern. It does not rely on the `grep` binary and is not sensitive to line endings, so it can be used cross-platform. Args: regex: string, a regex that matches the expected pattern. output: byte string, the raw output of the adb cmd. Returns: A list of strings, all of which are output lines that matches the regex pattern.
def grep(regex, output): lines = output.decode('utf-8').strip().splitlines() results = [] for line in lines: if re.search(regex, line): results.append(line.strip()) return results
378,118
Converts a cmd arg list to string. Args: args: list of strings, the arguments of a command. Returns: String representation of the command.
def cli_cmd_to_string(args): if isinstance(args, basestring): # Return directly if it's already a string. return args return ' '.join([pipes.quote(arg) for arg in args])
378,119
Executes commands in a new shell. Directing stderr to PIPE. This is fastboot's own exe_cmd because of its peculiar way of writing non-error info to stderr. Args: cmds: A sequence of commands and arguments. Returns: The output of the command run. Raises: Exception: An error occurred during the command execution.
def exe_cmd(*cmds): cmd = ' '.join(cmds) proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) (out, err) = proc.communicate() if not err: return out return err
378,120
Verifies a module object follows the required interface for controllers. The interface is explained in the docstring of `base_test.BaseTestClass.register_controller`. Args: module: An object that is a controller module. This is usually imported with import statements or loaded by importlib. Raises: ControllerError: if the module does not match the Mobly controller interface, or one of the required members is null.
def verify_controller_module(module): required_attributes = ('create', 'destroy', 'MOBLY_CONTROLLER_CONFIG_NAME') for attr in required_attributes: if not hasattr(module, attr): raise signals.ControllerError( 'Module %s missing required controller module attribute' ' %s.' % (module.__name__, attr)) if not getattr(module, attr): raise signals.ControllerError( 'Controller interface %s in %s cannot be null.' % (attr, module.__name__))
378,124
Creates controller info record for a particular controller type. Info is retrieved from all the controller objects spawned from the specified module, using the controller module's `get_info` function. Args: controller_module_name: string, the name of the controller module to retrieve info from. Returns: A records.ControllerInfoRecord object.
def _create_controller_info_record(self, controller_module_name): module = self._controller_modules[controller_module_name] controller_info = None try: controller_info = module.get_info( copy.copy(self._controller_objects[controller_module_name])) except AttributeError: logging.warning('No optional debug info found for controller ' '%s. To provide it, implement `get_info`.', controller_module_name) try: yaml.dump(controller_info) except TypeError: logging.warning('The info of controller %s in class "%s" is not ' 'YAML serializable! Coercing it to string.', controller_module_name, self._class_name) controller_info = str(controller_info) return records.ControllerInfoRecord( self._class_name, module.MOBLY_CONTROLLER_CONFIG_NAME, controller_info)
378,128
Decorator/decorator factory that logs inputs and the return result. If used with inputs (i.e. as a decorator factory), it accepts the following parameters: @param action_type: The action type to use. If not given the function name will be used. @param include_args: If given, should be a list of strings, the arguments to log. @param include_result: True by default. If False, the return result isn't logged.
def log_call( wrapped_function=None, action_type=None, include_args=None, include_result=True ): if wrapped_function is None: return partial(log_call, action_type=action_type, include_args=include_args, include_result=include_result) if action_type is None: if PY3: action_type = "{}.{}".format(wrapped_function.__module__, wrapped_function.__qualname__) else: action_type = wrapped_function.__name__ if PY3 and include_args is not None: from inspect import signature sig = signature(wrapped_function) if set(include_args) - set(sig.parameters): raise ValueError( ("include_args ({}) lists arguments not in the " "wrapped function").format(include_args) ) @wraps(wrapped_function) def logging_wrapper(*args, **kwargs): callargs = getcallargs(wrapped_function, *args, **kwargs) # Remove self is it's included: if "self" in callargs: callargs.pop("self") # Filter arguments to log, if necessary: if include_args is not None: callargs = {k: callargs[k] for k in include_args} with start_action(action_type=action_type, **callargs) as ctx: result = wrapped_function(*args, **kwargs) if include_result: ctx.add_success_fields(result=result) return result return logging_wrapper
378,985
Initialize WSGI Gateway instance with request. Args: req (HTTPRequest): current HTTP request
def __init__(self, req): super(Gateway, self).__init__(req) self.started_response = False self.env = self.get_environ() self.remaining_bytes_out = None
379,042
Initialize path info WSGI app dispatcher. Args: apps (dict[str,object]|list[tuple[str,object]]): URI prefix and WSGI app pairs
def __init__(self, apps): try: apps = list(apps.items()) except AttributeError: pass # Sort the apps by len(path), descending def by_path_len(app): return len(app[0]) apps.sort(key=by_path_len, reverse=True) # The path_prefix strings must start, but not end, with a slash. # Use "" instead of "/". self.apps = [(p.rstrip('/'), a) for p, a in apps]
379,051
Process incoming WSGI request. Ref: :pep:`3333` Args: environ (Mapping): a dict containing WSGI environment variables start_response (callable): function, which sets response status and headers Returns: list[bytes]: iterable containing bytes to be returned in HTTP response body
def __call__(self, environ, start_response): path = environ['PATH_INFO'] or '/' for p, app in self.apps: # The apps list should be sorted by length, descending. if path.startswith(p + '/') or path == p: environ = environ.copy() environ['SCRIPT_NAME'] = environ.get('SCRIPT_NAME', '') + p environ['PATH_INFO'] = path[len(p):] return app(environ, start_response) start_response( '404 Not Found', [ ('Content-Type', 'text/plain'), ('Content-Length', '0'), ], ) return ['']
379,052
Initialize SizeCheckWrapper instance. Args: rfile (file): file of a limited size maxlen (int): maximum length of the file being read
def __init__(self, rfile, maxlen): self.rfile = rfile self.maxlen = maxlen self.bytes_read = 0
379,087
Read a chunk from rfile buffer and return it. Args: size (int): amount of data to read Returns: bytes: Chunk from rfile, limited by size if specified.
def read(self, size=None): data = self.rfile.read(size) self.bytes_read += len(data) self._check_length() return data
379,089
Read a single line from rfile buffer and return it. Args: size (int): minimum amount of data to read Returns: bytes: One line from rfile.
def readline(self, size=None): if size is not None: data = self.rfile.readline(size) self.bytes_read += len(data) self._check_length() return data # User didn't specify a size ... # We read the line in chunks to make sure it's not a 100MB line ! res = [] while True: data = self.rfile.readline(256) self.bytes_read += len(data) self._check_length() res.append(data) # See https://github.com/cherrypy/cherrypy/issues/421 if len(data) < 256 or data[-1:] == LF: return EMPTY.join(res)
379,090
Initialize KnownLengthRFile instance. Args: rfile (file): file of a known size content_length (int): length of the file being read
def __init__(self, rfile, content_length): self.rfile = rfile self.remaining = content_length
379,091
Initialize ChunkedRFile instance. Args: rfile (file): file encoded with the 'chunked' transfer encoding maxlen (int): maximum length of the file being read bufsize (int): size of the buffer used to read the file
def __init__(self, rfile, maxlen, bufsize=8192): self.rfile = rfile self.maxlen = maxlen self.bytes_read = 0 self.buffer = EMPTY self.bufsize = bufsize self.closed = False
379,093
Read a chunk from rfile buffer and return it. Args: size (int): amount of data to read Returns: bytes: Chunk from rfile, limited by size if specified.
def read(self, size=None): data = EMPTY if size == 0: return data while True: if size and len(data) >= size: return data if not self.buffer: self._fetch() if not self.buffer: # EOF return data if size: remaining = size - len(data) data += self.buffer[:remaining] self.buffer = self.buffer[remaining:] else: data += self.buffer self.buffer = EMPTY
379,095
Read a single line from rfile buffer and return it. Args: size (int): minimum amount of data to read Returns: bytes: One line from rfile.
def readline(self, size=None): data = EMPTY if size == 0: return data while True: if size and len(data) >= size: return data if not self.buffer: self._fetch() if not self.buffer: # EOF return data newline_pos = self.buffer.find(LF) if size: if newline_pos == -1: remaining = size - len(data) data += self.buffer[:remaining] self.buffer = self.buffer[remaining:] else: remaining = min(size - len(data), newline_pos) data += self.buffer[:remaining] self.buffer = self.buffer[remaining:] else: if newline_pos == -1: data += self.buffer self.buffer = EMPTY else: data += self.buffer[:newline_pos] self.buffer = self.buffer[newline_pos:]
379,096
Initialize HTTPConnection instance. Args: server (HTTPServer): web server object receiving this request socket (socket._socketobject): the raw socket object (usually TCP) for this connection makefile (file): a fileobject class for reading from the socket
def __init__(self, server, sock, makefile=MakeFile): self.server = server self.socket = sock self.rfile = makefile(sock, 'rb', self.rbufsize) self.wfile = makefile(sock, 'wb', self.wbufsize) self.requests_seen = 0 self.peercreds_enabled = self.server.peercreds_enabled self.peercreds_resolve_enabled = self.server.peercreds_resolve_enabled # LRU cached methods: # Ref: https://stackoverflow.com/a/14946506/595220 self.resolve_peer_creds = ( lru_cache(maxsize=1)(self.resolve_peer_creds) ) self.get_peer_creds = ( lru_cache(maxsize=1)(self.get_peer_creds) )
379,105
Initialize HTTPServer instance. Args: bind_addr (tuple): network interface to listen to gateway (Gateway): gateway for processing HTTP requests minthreads (int): minimum number of threads for HTTP thread pool maxthreads (int): maximum number of threads for HTTP thread pool server_name (str): web server name to be advertised via Server HTTP header
def __init__( self, bind_addr, gateway, minthreads=10, maxthreads=-1, server_name=None, peercreds_enabled=False, peercreds_resolve_enabled=False, ): self.bind_addr = bind_addr self.gateway = gateway self.requests = threadpool.ThreadPool( self, min=minthreads or 1, max=maxthreads, ) if not server_name: server_name = self.version self.server_name = server_name self.peercreds_enabled = peercreds_enabled self.peercreds_resolve_enabled = ( peercreds_resolve_enabled and peercreds_enabled ) self.clear_stats()
379,113
Write error message to log. Args: msg (str): error message level (int): logging level traceback (bool): add traceback to output or not
def error_log(self, msg='', level=20, traceback=False): # Override this in subclasses as desired sys.stderr.write(msg + '\n') sys.stderr.flush() if traceback: tblines = traceback_.format_exc() sys.stderr.write(tblines) sys.stderr.flush()
379,120
Initialize WorkerThread instance. Args: server (cheroot.server.HTTPServer): web server object receiving this request
def __init__(self, server): self.ready = False self.server = server self.requests_seen = 0 self.bytes_read = 0 self.bytes_written = 0 self.start_time = None self.work_time = 0 self.stats = { 'Requests': lambda s: self.requests_seen + ( self.start_time is None and trueyzero or self.conn.requests_seen ), 'Bytes Read': lambda s: self.bytes_read + ( self.start_time is None and trueyzero or self.conn.rfile.bytes_read ), 'Bytes Written': lambda s: self.bytes_written + ( self.start_time is None and trueyzero or self.conn.wfile.bytes_written ), 'Work Time': lambda s: self.work_time + ( self.start_time is None and trueyzero or time.time() - self.start_time ), 'Read Throughput': lambda s: s['Bytes Read'](s) / ( s['Work Time'](s) or 1e-6 ), 'Write Throughput': lambda s: s['Bytes Written'](s) / ( s['Work Time'](s) or 1e-6 ), } threading.Thread.__init__(self)
379,128
Initialize HTTP requests queue instance. Args: server (cheroot.server.HTTPServer): web server object receiving this request min (int): minimum number of worker threads max (int): maximum number of worker threads accepted_queue_size (int): maximum number of active requests in queue accepted_queue_timeout (int): timeout for putting request into queue
def __init__( self, server, min=10, max=-1, accepted_queue_size=-1, accepted_queue_timeout=10, ): self.server = server self.min = min self.max = max self._threads = [] self._queue = queue.Queue(maxsize=accepted_queue_size) self._queue_put_timeout = accepted_queue_timeout self.get = self._queue.get
379,130
Put request into queue. Args: obj (cheroot.server.HTTPConnection): HTTP connection waiting to be processed
def put(self, obj): self._queue.put(obj, block=True, timeout=self._queue_put_timeout) if obj is _SHUTDOWNREQUEST: return
379,131
Terminate all worker threads. Args: timeout (int): time to wait for threads to stop gracefully
def stop(self, timeout=5): # Must shut down threads here so the code that calls # this method can know when all threads are stopped. for worker in self._threads: self._queue.put(_SHUTDOWNREQUEST) # Don't join currentThread (when stop is called inside a request). current = threading.currentThread() if timeout is not None and timeout >= 0: endtime = time.time() + timeout while self._threads: worker = self._threads.pop() if worker is not current and worker.isAlive(): try: if timeout is None or timeout < 0: worker.join() else: remaining_time = endtime - time.time() if remaining_time > 0: worker.join(remaining_time) if worker.isAlive(): # We exhausted the timeout. # Forcibly shut down the socket. c = worker.conn if c and not c.rfile.closed: try: c.socket.shutdown(socket.SHUT_RD) except TypeError: # pyOpenSSL sockets don't take an arg c.socket.shutdown() worker.join() except ( AssertionError, # Ignore repeated Ctrl-C. # See # https://github.com/cherrypy/cherrypy/issues/691. KeyboardInterrupt, ): pass
379,135
Return the changes needed for this refactoring Parameters: - `dest_attr`: the name of the destination attribute - `new_name`: the name of the new method; if `None` uses the old name - `resources` can be a list of `rope.base.resources.File`\s to apply this refactoring on. If `None`, the restructuring will be applied to all python files.
def get_changes(self, dest_attr, new_name=None, resources=None, task_handle=taskhandle.NullTaskHandle()): changes = ChangeSet('Moving method <%s>' % self.method_name) if resources is None: resources = self.project.get_python_files() if new_name is None: new_name = self.get_method_name() resource1, start1, end1, new_content1 = \ self._get_changes_made_by_old_class(dest_attr, new_name) collector1 = codeanalyze.ChangeCollector(resource1.read()) collector1.add_change(start1, end1, new_content1) resource2, start2, end2, new_content2 = \ self._get_changes_made_by_new_class(dest_attr, new_name) if resource1 == resource2: collector1.add_change(start2, end2, new_content2) else: collector2 = codeanalyze.ChangeCollector(resource2.read()) collector2.add_change(start2, end2, new_content2) result = collector2.get_changed() import_tools = importutils.ImportTools(self.project) new_imports = self._get_used_imports(import_tools) if new_imports: goal_pymodule = libutils.get_string_module( self.project, result, resource2) result = _add_imports_to_module( import_tools, goal_pymodule, new_imports) if resource2 in resources: changes.add_change(ChangeContents(resource2, result)) if resource1 in resources: changes.add_change(ChangeContents(resource1, collector1.get_changed())) return changes
379,354
Construct an ObtainLease request payload struct. Args: unique_identifier (string): The ID of the managed object (e.g., a public key) to obtain a lease for. Optional, defaults to None.
def __init__(self, unique_identifier=None): super(ObtainLeaseRequestPayload, self).__init__( enums.Tags.REQUEST_PAYLOAD ) self._unique_identifier = None self.unique_identifier = unique_identifier
380,473
Construct a Cancel response payload struct. Args: asynchronous_correlation_value (bytes): The ID of a pending operation that was cancelled, in bytes. Optional, defaults to None. cancellation_result (enum): A CancellationResult enumeration specifying the result of canceling the operation. Optional, defaults to None.
def __init__(self, asynchronous_correlation_value=None, cancellation_result=None): super(CancelResponsePayload, self).__init__( enums.Tags.RESPONSE_PAYLOAD ) self._asynchronous_correlation_value = None self._cancellation_result = None self.asynchronous_correlation_value = asynchronous_correlation_value self.cancellation_result = cancellation_result
380,484
Construct a HashingAlgorithm object. Args: value (HashingAlgorithm): A HashingAlgorithm enumeration value, (e.g., HashingAlgorithm.MD5). Optional, defaults to HashingAlgorithm.SHA_256.
def __init__(self, value=HashingAlgorithmEnum.SHA_256): super(HashingAlgorithm, self).__init__( enums.HashingAlgorithm, value, Tags.HASHING_ALGORITHM)
380,502
Construct a CertificateType object. Args: value (CertificateType): A CertificateType enumeration value, (e.g., CertificateType.PGP). Optional, defaults to CertificateType.X_509.
def __init__(self, value=enums.CertificateType.X_509): super(CertificateType, self).__init__( enums.CertificateType, value, Tags.CERTIFICATE_TYPE)
380,520
Construct a DigestValue object. Args: value (bytes): The bytes of the hash. Optional, defaults to the empty byte string.
def __init__(self, value=b''): super(DigestValue, self).__init__(value, Tags.DIGEST_VALUE)
380,521
Read the data encoding the Digest object and decode it into its constituent parts. Args: istream (Stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0.
def read(self, istream, kmip_version=enums.KMIPVersion.KMIP_1_0): super(Digest, self).read(istream, kmip_version=kmip_version) tstream = BytearrayStream(istream.read(self.length)) self.hashing_algorithm.read(tstream, kmip_version=kmip_version) self.digest_value.read(tstream, kmip_version=kmip_version) self.key_format_type.read(tstream, kmip_version=kmip_version) self.is_oversized(tstream) self.validate()
380,523
Write the data encoding the Digest object to a stream. Args: ostream (Stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0.
def write(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0): tstream = BytearrayStream() self.hashing_algorithm.write(tstream, kmip_version=kmip_version) self.digest_value.write(tstream, kmip_version=kmip_version) self.key_format_type.write(tstream, kmip_version=kmip_version) self.length = tstream.length() super(Digest, self).write(ostream, kmip_version=kmip_version) ostream.write(tstream.buffer)
380,524
Construct an ApplicationNamespace object. Args: value (str): A string representing a namespace. Optional, defaults to None.
def __init__(self, value=None): super(ApplicationNamespace, self).__init__( value, Tags.APPLICATION_NAMESPACE)
380,533
Construct an ApplicationData object. Args: value (str): A string representing data for a particular namespace. Optional, defaults to None.
def __init__(self, value=None): super(ApplicationData, self).__init__(value, Tags.APPLICATION_DATA)
380,534
Construct an ApplicationSpecificInformation object. Args: application_namespace (ApplicationNamespace): The name of a namespace supported by the server. Optional, defaults to None. application_data (ApplicationData): String data relevant to the specified namespace. Optional, defaults to None.
def __init__(self, application_namespace=None, application_data=None): super(ApplicationSpecificInformation, self).__init__( Tags.APPLICATION_SPECIFIC_INFORMATION) if application_namespace is None: self.application_namespace = ApplicationNamespace() else: self.application_namespace = application_namespace if application_data is None: self.application_data = ApplicationData() else: self.application_data = application_data self.validate()
380,535
Read the data encoding the ApplicationSpecificInformation object and decode it into its constituent parts. Args: istream (Stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0.
def read(self, istream, kmip_version=enums.KMIPVersion.KMIP_1_0): super(ApplicationSpecificInformation, self).read( istream, kmip_version=kmip_version ) tstream = BytearrayStream(istream.read(self.length)) self.application_namespace.read(tstream, kmip_version=kmip_version) self.application_data.read(tstream, kmip_version=kmip_version) self.is_oversized(tstream) self.validate()
380,536
Write the data encoding the ApplicationSpecificInformation object to a stream. Args: ostream (Stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0.
def write(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0): tstream = BytearrayStream() self.application_namespace.write(tstream, kmip_version=kmip_version) self.application_data.write(tstream, kmip_version=kmip_version) self.length = tstream.length() super(ApplicationSpecificInformation, self).write( ostream, kmip_version=kmip_version ) ostream.write(tstream.buffer)
380,537
Read the data encoding the DerivationParameters struct and decode it into its constituent parts. Args: input_stream (stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0.
def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): super(DerivationParameters, self).read( input_stream, kmip_version=kmip_version ) local_stream = BytearrayStream(input_stream.read(self.length)) if self.is_tag_next( enums.Tags.CRYPTOGRAPHIC_PARAMETERS, local_stream ): self._cryptographic_parameters = CryptographicParameters() self._cryptographic_parameters.read( local_stream, kmip_version=kmip_version ) if self.is_tag_next(enums.Tags.INITIALIZATION_VECTOR, local_stream): self._initialization_vector = ByteString( tag=enums.Tags.INITIALIZATION_VECTOR ) self._initialization_vector.read( local_stream, kmip_version=kmip_version ) if self.is_tag_next(enums.Tags.DERIVATION_DATA, local_stream): self._derivation_data = ByteString(tag=enums.Tags.DERIVATION_DATA) self._derivation_data.read(local_stream, kmip_version=kmip_version) if self.is_tag_next(enums.Tags.SALT, local_stream): self._salt = ByteString(tag=enums.Tags.SALT) self._salt.read(local_stream, kmip_version=kmip_version) if self.is_tag_next(Tags.ITERATION_COUNT, local_stream): self._iteration_count = Integer(tag=Tags.ITERATION_COUNT) self._iteration_count.read(local_stream, kmip_version=kmip_version) self.is_oversized(local_stream)
380,550
Write the data encoding the DerivationParameters struct to a stream. Args: output_stream (stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0.
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): local_stream = BytearrayStream() if self._cryptographic_parameters: self._cryptographic_parameters.write( local_stream, kmip_version=kmip_version ) if self._initialization_vector: self._initialization_vector.write( local_stream, kmip_version=kmip_version ) if self._derivation_data: self._derivation_data.write( local_stream, kmip_version=kmip_version ) if self._salt: self._salt.write( local_stream, kmip_version=kmip_version ) if self._iteration_count: self._iteration_count.write( local_stream, kmip_version=kmip_version ) self.length = local_stream.length() super(DerivationParameters, self).write( output_stream, kmip_version=kmip_version ) output_stream.write(local_stream.buffer)
380,551
Read the data encoding the Get request payload and decode it into its constituent parts. Args: input_stream (stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0.
def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): super(GetRequestPayload, self).read( input_stream, kmip_version=kmip_version ) local_stream = utils.BytearrayStream(input_stream.read(self.length)) if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): self._unique_identifier = primitives.TextString( tag=enums.Tags.UNIQUE_IDENTIFIER ) self._unique_identifier.read( local_stream, kmip_version=kmip_version ) if self.is_tag_next(enums.Tags.KEY_FORMAT_TYPE, local_stream): self._key_format_type = primitives.Enumeration( enum=enums.KeyFormatType, tag=enums.Tags.KEY_FORMAT_TYPE ) self._key_format_type.read( local_stream, kmip_version=kmip_version ) if self.is_tag_next(enums.Tags.KEY_COMPRESSION_TYPE, local_stream): self._key_compression_type = primitives.Enumeration( enum=enums.KeyCompressionType, tag=enums.Tags.KEY_COMPRESSION_TYPE ) self._key_compression_type.read( local_stream, kmip_version=kmip_version ) if self.is_tag_next( enums.Tags.KEY_WRAPPING_SPECIFICATION, local_stream ): self._key_wrapping_specification = \ objects.KeyWrappingSpecification() self._key_wrapping_specification.read( local_stream, kmip_version=kmip_version ) self.is_oversized(local_stream)
380,559
Write the data encoding the Get request payload to a stream. Args: output_stream (stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0.
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): local_stream = utils.BytearrayStream() if self._unique_identifier is not None: self._unique_identifier.write( local_stream, kmip_version=kmip_version ) if self._key_format_type is not None: self._key_format_type.write( local_stream, kmip_version=kmip_version ) if self._key_compression_type is not None: self._key_compression_type.write( local_stream, kmip_version=kmip_version ) if self._key_wrapping_specification is not None: self._key_wrapping_specification.write( local_stream, kmip_version=kmip_version ) self.length = local_stream.length() super(GetRequestPayload, self).write( output_stream, kmip_version=kmip_version ) output_stream.write(local_stream.buffer)
380,560
Create a KmipEngine. Args: policy_path (string): The path to the filesystem directory containing PyKMIP server operation policy JSON files. Optional, defaults to None. database_path (string): The path to the SQLite database file used to store all server data. Optional, defaults to None. If none, database path defaults to '/tmp/pykmip.database'.
def __init__(self, policies=None, database_path=None): self._logger = logging.getLogger('kmip.server.engine') self._cryptography_engine = engine.CryptographyEngine() self.database_path = 'sqlite:///{}'.format(database_path) if not database_path: self.database_path = 'sqlite:////tmp/pykmip.database' self._data_store = sqlalchemy.create_engine( self.database_path, echo=False, connect_args={'check_same_thread': False} ) sqltypes.Base.metadata.create_all(self._data_store) self._data_store_session_factory = sqlalchemy.orm.sessionmaker( bind=self._data_store ) self._lock = threading.RLock() self._id_placeholder = None self._protocol_versions = [ contents.ProtocolVersion(1, 4), contents.ProtocolVersion(1, 3), contents.ProtocolVersion(1, 2), contents.ProtocolVersion(1, 1), contents.ProtocolVersion(1, 0) ] self.default_protocol_version = self._protocol_versions[2] self._protocol_version = self._protocol_versions[2] self._object_map = { enums.ObjectType.CERTIFICATE: objects.X509Certificate, enums.ObjectType.SYMMETRIC_KEY: objects.SymmetricKey, enums.ObjectType.PUBLIC_KEY: objects.PublicKey, enums.ObjectType.PRIVATE_KEY: objects.PrivateKey, enums.ObjectType.SPLIT_KEY: None, enums.ObjectType.TEMPLATE: None, enums.ObjectType.SECRET_DATA: objects.SecretData, enums.ObjectType.OPAQUE_DATA: objects.OpaqueObject } self._attribute_policy = policy.AttributePolicy(self._protocol_version) self._operation_policies = policies self._client_identity = [None, None]
380,592
Construct a Decrypt response payload struct. Args: unique_identifier (string): The ID of the managed object (e.g., a symmetric key) used for decryption. Required for encoding and decoding. data (bytes): The decrypted data in binary form. Required for encoding and decoding.
def __init__(self, unique_identifier=None, data=None): super(DecryptResponsePayload, self).__init__( enums.Tags.RESPONSE_PAYLOAD ) self._unique_identifier = None self._data = None self.unique_identifier = unique_identifier self.data = data
380,657
Construct a RevokeRequestPayload object. Args: unique_identifier (UniqueIdentifier): The UUID of a managed cryptographic object. revocation_reason (RevocationReason): The reason why the object was revoked. compromise_occurrence_date (DateTime): the datetime when the object was first believed to be compromised.
def __init__(self, unique_identifier=None, revocation_reason=None, compromise_occurrence_date=None): super(RevokeRequestPayload, self).__init__( tag=enums.Tags.REQUEST_PAYLOAD) self.unique_identifier = unique_identifier self.compromise_occurrence_date = compromise_occurrence_date self.revocation_reason = revocation_reason if self.revocation_reason is None: self.revocation_reason = objects.RevocationReason() self.validate()
380,659
Read the data encoding the RevokeRequestPayload object and decode it into its constituent parts. Args: istream (Stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0.
def read(self, istream, kmip_version=enums.KMIPVersion.KMIP_1_0): super(RevokeRequestPayload, self).read( istream, kmip_version=kmip_version ) tstream = BytearrayStream(istream.read(self.length)) self.unique_identifier = attributes.UniqueIdentifier() self.unique_identifier.read(tstream, kmip_version=kmip_version) self.revocation_reason = objects.RevocationReason() self.revocation_reason.read(tstream, kmip_version=kmip_version) if self.is_tag_next(enums.Tags.COMPROMISE_OCCURRENCE_DATE, tstream): self.compromise_occurrence_date = primitives.DateTime( tag=enums.Tags.COMPROMISE_OCCURRENCE_DATE) self.compromise_occurrence_date.read( tstream, kmip_version=kmip_version ) self.is_oversized(tstream) self.validate()
380,660
Write the data encoding the RevokeRequestPayload object to a stream. Args: ostream (Stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0.
def write(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0): tstream = BytearrayStream() # Write the contents of the request payload if self.unique_identifier is not None: self.unique_identifier.write(tstream, kmip_version=kmip_version) self.revocation_reason.write(tstream, kmip_version=kmip_version) if self.compromise_occurrence_date is not None: self.compromise_occurrence_date.write( tstream, kmip_version=kmip_version ) # Write the length and value of the request payload self.length = tstream.length() super(RevokeRequestPayload, self).write( ostream, kmip_version=kmip_version ) ostream.write(tstream.buffer)
380,661
Construct a RevokeResponsePayload object. Args: unique_identifier (UniqueIdentifier): The UUID of a managed cryptographic object.
def __init__(self, unique_identifier=None): super(RevokeResponsePayload, self).__init__( tag=enums.Tags.RESPONSE_PAYLOAD) if unique_identifier is None: self.unique_identifier = attributes.UniqueIdentifier() else: self.unique_identifier = unique_identifier self.validate()
380,663
Read the data encoding the RevokeResponsePayload object and decode it into its constituent parts. Args: istream (Stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0.
def read(self, istream, kmip_version=enums.KMIPVersion.KMIP_1_0): super(RevokeResponsePayload, self).read( istream, kmip_version=kmip_version ) tstream = BytearrayStream(istream.read(self.length)) self.unique_identifier = attributes.UniqueIdentifier() self.unique_identifier.read(tstream, kmip_version=kmip_version) self.is_oversized(tstream) self.validate()
380,664
Create a BasicAuthenticationSuite object. Args: cipher_suites (list): A list of strings representing the names of cipher suites to use. Overrides the default set of cipher suites. Optional, defaults to None.
def __init__(self, cipher_suites=None): super(BasicAuthenticationSuite, self).__init__(cipher_suites) self._protocol = ssl.PROTOCOL_TLSv1
380,675
Create a TLS12AuthenticationSuite object. Args: cipher_suites (list): A list of strings representing the names of cipher suites to use. Overrides the default set of cipher suites. Optional, defaults to None.
def __init__(self, cipher_suites=None): super(TLS12AuthenticationSuite, self).__init__(cipher_suites) self._protocol = ssl.PROTOCOL_TLSv1_2
380,676
Load configuration settings from the file pointed to by path. This will overwrite all current setting values. Args: path (string): The path to the configuration file containing the settings to load. Required. Raises: ConfigurationError: Raised if the path does not point to an existing file or if a setting value is invalid.
def load_settings(self, path): if not os.path.exists(path): raise exceptions.ConfigurationError( "The server configuration file ('{0}') could not be " "located.".format(path) ) self._logger.info( "Loading server configuration settings from: {0}".format(path) ) parser = configparser.ConfigParser() parser.read(path) self._parse_settings(parser) self.parse_auth_settings(parser)
380,706
Returns the integer value of the usage mask bitmask. This value is stored in the database. Args: value(list<enums.CryptographicUsageMask>): list of enums in the usage mask dialect(string): SQL dialect
def process_bind_param(self, value, dialect): bitmask = 0x00 for e in value: bitmask = bitmask | e.value return bitmask
380,718
Returns a new list of enums.CryptographicUsageMask Enums. This converts the integer value into the list of enums. Args: value(int): The integer value stored in the database that is used to create the list of enums.CryptographicUsageMask Enums. dialect(string): SQL dialect
def process_result_value(self, value, dialect): masks = list() if value: for e in enums.CryptographicUsageMask: if e.value & value: masks.append(e) return masks
380,719
Create a new EnumType. This new EnumType requires a class object in the constructor. The class is used to construct new instances of the Enum when the integer value is retrieved from the database. Args: cls(class): An Enum class used to create new instances from integer values.
def __init__(self, cls): super(EnumType, self).__init__() self._cls = cls
380,720
Create a LongInteger. Args: value (int): The value of the LongInteger. Optional, defaults to 0. tag (Tags): An enumeration defining the tag of the LongInteger. Optional, defaults to Tags.DEFAULT.
def __init__(self, value=0, tag=enums.Tags.DEFAULT): super(LongInteger, self).__init__(tag, type=enums.Types.LONG_INTEGER) self.value = value self.length = LongInteger.LENGTH self.validate()
380,744
Write the encoding of the LongInteger to the output stream. Args: ostream (stream): A buffer to contain the encoded bytes of a LongInteger. Usually a BytearrayStream object. Required. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0.
def write(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0): super(LongInteger, self).write(ostream, kmip_version=kmip_version) ostream.write(pack('!q', self.value))
380,746
Write the encoding of the BigInteger to the output stream. Args: ostream (Stream): A buffer to contain the encoded bytes of a BigInteger object. Usually a BytearrayStream object. Required. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0.
def write(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0): # Convert the value to binary and pad it as needed. binary = "{0:b}".format(abs(self.value)) binary = ("0" * (64 - (len(binary) % 64))) + binary # If the value is negative, convert via two's complement. if self.value < 0: binary = binary.replace('1', 'i') binary = binary.replace('0', '1') binary = binary.replace('i', '0') pivot = binary.rfind('0') binary = binary[0:pivot] + '1' + ('0' * len(binary[pivot + 1:])) # Convert each byte to hex and build the hex string for the value. hexadecimal = b'' for i in range(0, len(binary), 8): byte = binary[i:i + 8] byte = int(byte, 2) hexadecimal += struct.pack('!B', byte) self.length = len(hexadecimal) super(BigInteger, self).write(ostream, kmip_version=kmip_version) ostream.write(hexadecimal)
380,751
Create an Enumeration. Args: enum (class): The enumeration class of which value is a member (e.g., Tags). Required. value (int): The value of the Enumeration, must be an integer (e.g., Tags.DEFAULT). Optional, defaults to None. tag (Tags): An enumeration defining the tag of the Enumeration. Optional, defaults to Tags.DEFAULT.
def __init__(self, enum, value=None, tag=enums.Tags.DEFAULT): super(Enumeration, self).__init__(tag, enums.Types.ENUMERATION) self.value = value self.enum = enum self.length = Enumeration.LENGTH self.validate()
380,755
Create a Boolean object. Args: value (bool): The value of the Boolean. Optional, defaults to True. tag (Tags): An enumeration defining the tag of the Boolean object. Optional, defaults to Tags.DEFAULT.
def __init__(self, value=True, tag=enums.Tags.DEFAULT): super(Boolean, self).__init__(tag, type=enums.Types.BOOLEAN) self.logger = logging.getLogger(__name__) self.value = value self.length = self.LENGTH self.validate()
380,760
Write the value of the Boolean object to the output stream. Args: ostream (Stream): A buffer to contain the encoded bytes of the value of a Boolean object. Usually a BytearrayStream object. Required. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0.
def write_value(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0): try: ostream.write(pack('!Q', self.value)) except Exception: self.logger.error("Error writing boolean value to buffer") raise
380,762