docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Search for the specified filesystem object, resolving all links. Args: file_path: Specifies the target FakeFile object to retrieve. follow_symlinks: If `False`, the link itself is resolved, otherwise the object linked to. allow_fd: If `True`, `file_path` may be an open file descriptor Returns: The FakeFile object corresponding to `file_path`. Raises: IOError: if the object is not found.
def resolve(self, file_path, follow_symlinks=True, allow_fd=False): if isinstance(file_path, int): if allow_fd and sys.version_info >= (3, 3): return self.get_open_file(file_path).get_object() raise TypeError('path should be string, bytes or ' 'os.PathLike (if supported), not int') if follow_symlinks: file_path = make_string_path(file_path) return self.get_object_from_normpath(self.resolve_path(file_path)) return self.lresolve(file_path)
370,291
Search for the specified object, resolving only parent links. This is analogous to the stat/lstat difference. This resolves links *to* the object but not of the final object itself. Args: path: Specifies target FakeFile object to retrieve. Returns: The FakeFile object corresponding to path. Raises: IOError: if the object is not found.
def lresolve(self, path): path = make_string_path(path) if path == self.root.name: # The root directory will never be a link return self.root # remove trailing separator path = self._path_without_trailing_separators(path) path = self._original_path(path) parent_directory, child_name = self.splitpath(path) if not parent_directory: parent_directory = self.cwd try: parent_obj = self.resolve(parent_directory) assert parent_obj if not isinstance(parent_obj, FakeDirectory): if not self.is_windows_fs and isinstance(parent_obj, FakeFile): self.raise_io_error(errno.ENOTDIR, path) self.raise_io_error(errno.ENOENT, path) return parent_obj.get_entry(child_name) except KeyError: self.raise_io_error(errno.ENOENT, path)
370,292
Add a fake file or directory into the filesystem at file_path. Args: file_path: The path to the file to be added relative to self. file_object: File or directory to add. error_class: The error class to be thrown if file_path does not correspond to a directory (used internally( Raises: IOError or OSError: if file_path does not correspond to a directory.
def add_object(self, file_path, file_object, error_fct=None): error_fct = error_fct or self.raise_os_error if not file_path: target_directory = self.root else: target_directory = self.resolve(file_path) if not S_ISDIR(target_directory.st_mode): error = errno.ENOENT if self.is_windows_fs else errno.ENOTDIR error_fct(error, file_path) target_directory.add_entry(file_object)
370,293
Remove an existing file or directory. Args: file_path: The path to the file relative to self. Raises: IOError: if file_path does not correspond to an existing file, or if part of the path refers to something other than a directory. OSError: if the directory is in use (eg, if it is '/').
def remove_object(self, file_path): file_path = self.absnormpath(self._original_path(file_path)) if self._is_root_path(file_path): self.raise_os_error(errno.EBUSY, file_path) try: dirname, basename = self.splitpath(file_path) target_directory = self.resolve(dirname) target_directory.remove_entry(basename) except KeyError: self.raise_io_error(errno.ENOENT, file_path) except AttributeError: self.raise_io_error(errno.ENOTDIR, file_path)
370,300
Create `directory_path`, and all the parent directories. Helper method to set up your test faster. Args: directory_path: The full directory path to create. perm_bits: The permission bits as set by `chmod`. Returns: The newly created FakeDirectory object. Raises: OSError: if the directory already exists.
def create_dir(self, directory_path, perm_bits=PERM_DEF): directory_path = self.make_string_path(directory_path) directory_path = self.absnormpath(directory_path) self._auto_mount_drive_if_needed(directory_path) if self.exists(directory_path, check_link=True): self.raise_os_error(errno.EEXIST, directory_path) path_components = self._path_components(directory_path) current_dir = self.root new_dirs = [] for component in path_components: directory = self._directory_content(current_dir, component)[1] if not directory: new_dir = FakeDirectory(component, filesystem=self) new_dirs.append(new_dir) current_dir.add_entry(new_dir) current_dir = new_dir else: if S_ISLNK(directory.st_mode): directory = self.resolve(directory.contents) current_dir = directory if directory.st_mode & S_IFDIR != S_IFDIR: self.raise_os_error(errno.ENOTDIR, current_dir.path) # set the permission after creating the directories # to allow directory creation inside a read-only directory for new_dir in new_dirs: new_dir.st_mode = S_IFDIR | perm_bits self._last_ino += 1 current_dir.st_ino = self._last_ino return current_dir
370,302
Read the target of a symlink. Args: path: symlink to read the target of. Returns: the string representing the path to which the symbolic link points. Raises: TypeError: if path is None OSError: (with errno=ENOENT) if path is not a valid path, or (with errno=EINVAL) if path is valid, but is not a symlink, or if the path ends with a path separator (Posix only)
def readlink(self, path): if path is None: raise TypeError try: link_obj = self.lresolve(path) except IOError as exc: self.raise_os_error(exc.errno, path) if S_IFMT(link_obj.st_mode) != S_IFLNK: self.raise_os_error(errno.EINVAL, path) if self.ends_with_path_separator(path): if not self.is_windows_fs and self.exists(path): self.raise_os_error(errno.EINVAL, path) if not self.exists(link_obj.path): if self.is_windows_fs: error = errno.EINVAL elif self._is_circular_link(link_obj): if self.is_macos: return link_obj.path error = errno.ELOOP else: error = errno.ENOENT self.raise_os_error(error, link_obj.path) return link_obj.contents
370,311
Create a leaf Fake directory. Args: dir_name: (str) Name of directory to create. Relative paths are assumed to be relative to '/'. mode: (int) Mode to create directory with. This argument defaults to 0o777. The umask is applied to this mode. Raises: OSError: if the directory name is invalid or parent directory is read only or as per :py:meth:`add_object`.
def makedir(self, dir_name, mode=PERM_DEF): dir_name = make_string_path(dir_name) ends_with_sep = self.ends_with_path_separator(dir_name) dir_name = self._path_without_trailing_separators(dir_name) if not dir_name: self.raise_os_error(errno.ENOENT, '') if self.is_windows_fs: dir_name = self.absnormpath(dir_name) parent_dir, _ = self.splitpath(dir_name) if parent_dir: base_dir = self.normpath(parent_dir) ellipsis = self._matching_string( parent_dir, self.path_separator + '..') if parent_dir.endswith(ellipsis) and not self.is_windows_fs: base_dir, dummy_dotdot, _ = parent_dir.partition(ellipsis) if not self.exists(base_dir): self.raise_os_error(errno.ENOENT, base_dir) dir_name = self.absnormpath(dir_name) if self.exists(dir_name, check_link=True): if self.is_windows_fs and dir_name == self.path_separator: error_nr = errno.EACCES else: error_nr = errno.EEXIST if ends_with_sep and self.is_macos and not self.exists(dir_name): # to avoid EEXIST exception, remove the link self.remove_object(dir_name) else: self.raise_os_error(error_nr, dir_name) head, tail = self.splitpath(dir_name) self.add_object( head, FakeDirectory(tail, mode & ~self.umask, filesystem=self))
370,312
Helper function to implement isdir(), islink(), etc. See the stat(2) man page for valid stat.S_I* flag values Args: path: Path to file to stat and test st_flag: The stat.S_I* flag checked for the file's st_mode Returns: (boolean) `True` if the st_flag is set in path's st_mode. Raises: TypeError: if path is None
def _is_of_type(self, path, st_flag, follow_symlinks=True): path = make_string_path(path) if path is None: raise TypeError try: obj = self.resolve(path, follow_symlinks) if obj: self.raise_for_filepath_ending_with_separator( path, obj, macos_handling=not follow_symlinks) return S_IFMT(obj.st_mode) == st_flag except (IOError, OSError): return False return False
370,314
Determine if path identifies a directory. Args: path: Path to filesystem object. Returns: `True` if path points to a directory (following symlinks). Raises: TypeError: if path is None.
def isdir(self, path, follow_symlinks=True): return self._is_of_type(path, S_IFDIR, follow_symlinks)
370,315
Determine if path identifies a regular file. Args: path: Path to filesystem object. Returns: `True` if path points to a regular file (following symlinks). Raises: TypeError: if path is None.
def isfile(self, path, follow_symlinks=True): return self._is_of_type(path, S_IFREG, follow_symlinks)
370,316
Test that the target is actually a directory, raising OSError if not. Args: target_directory: Path to the target directory within the fake filesystem. Returns: The FakeDirectory object corresponding to target_directory. Raises: OSError: if the target is not a directory.
def confirmdir(self, target_directory): try: directory = self.resolve(target_directory) except IOError as exc: self.raise_os_error(exc.errno, target_directory) if not directory.st_mode & S_IFDIR: if self.is_windows_fs and IS_PY2: error_nr = errno.EINVAL else: error_nr = errno.ENOTDIR self.raise_os_error(error_nr, target_directory, 267) return directory
370,317
Remove the FakeFile object at the specified file path. Args: path: Path to file to be removed. Raises: OSError: if path points to a directory. OSError: if path does not exist. OSError: if removal failed.
def remove(self, path): norm_path = self.absnormpath(path) if self.ends_with_path_separator(path): self._handle_broken_link_with_trailing_sep(norm_path) if self.exists(norm_path): obj = self.resolve(norm_path) if S_IFMT(obj.st_mode) == S_IFDIR: link_obj = self.lresolve(norm_path) if S_IFMT(link_obj.st_mode) != S_IFLNK: if self.is_windows_fs: error = errno.EACCES elif self.is_macos: error = errno.EPERM else: error = errno.EISDIR self.raise_os_error(error, norm_path) norm_path = make_string_path(norm_path) if path.endswith(self.path_separator): if self.is_windows_fs: error = errno.EACCES elif self.is_macos: error = errno.EPERM else: error = errno.ENOTDIR self.raise_os_error(error, norm_path) else: self.raise_for_filepath_ending_with_separator(path, obj) try: self.remove_object(norm_path) except IOError as exc: self.raise_os_error(exc.errno, exc.filename)
370,318
Return a list of file names in target_directory. Args: target_directory: Path to the target directory within the fake filesystem. Returns: A list of file names within the target directory in arbitrary order. Raises: OSError: if the target is not a directory.
def listdir(self, target_directory): target_directory = self.resolve_path(target_directory, allow_fd=True) directory = self.confirmdir(target_directory) directory_contents = directory.contents return list(directory_contents.keys())
370,320
Init. Args: filesystem: FakeFilesystem used to provide file system information os_module: (deprecated) FakeOsModule to assign to self.os
def __init__(self, filesystem, os_module=None): self.filesystem = filesystem self._os_path = self._OS_PATH_COPY if os_module is None: warnings.warn(FAKE_PATH_MODULE_DEPRECATION, DeprecationWarning, stacklevel=2) self._os_path.os = self.os = os_module self.sep = self.filesystem.path_separator self.altsep = self.filesystem.alternative_path_separator
370,323
Return the file object size in bytes. Args: path: path to the file object. Returns: file size in bytes.
def getsize(self, path): try: file_obj = self.filesystem.resolve(path) if (self.filesystem.ends_with_path_separator(path) and S_IFMT(file_obj.st_mode) != S_IFDIR): error_nr = (errno.EINVAL if self.filesystem.is_windows_fs else errno.ENOTDIR) self.filesystem.raise_os_error(error_nr, path) return file_obj.st_size except IOError as exc: raise os.error(exc.errno, exc.strerror)
370,324
Returns the modification time of the fake file. Args: path: the path to fake file. Returns: (int, float) the modification time of the fake file in number of seconds since the epoch. Raises: OSError: if the file does not exist.
def getmtime(self, path): try: file_obj = self.filesystem.resolve(path) return file_obj.st_mtime except IOError: self.filesystem.raise_os_error(errno.ENOENT, winerror=3)
370,326
Returns the last access time of the fake file. Note: Access time is not set automatically in fake filesystem on access. Args: path: the path to fake file. Returns: (int, float) the access time of the fake file in number of seconds since the epoch. Raises: OSError: if the file does not exist.
def getatime(self, path): try: file_obj = self.filesystem.resolve(path) except IOError: self.filesystem.raise_os_error(errno.ENOENT) return file_obj.st_atime
370,327
Returns the creation time of the fake file. Args: path: the path to fake file. Returns: (int, float) the creation time of the fake file in number of seconds since the epoch. Raises: OSError: if the file does not exist.
def getctime(self, path): try: file_obj = self.filesystem.resolve(path) except IOError: self.filesystem.raise_os_error(errno.ENOENT) return file_obj.st_ctime
370,328
Return true if the given path is a mount point. Args: path: Path to filesystem object to be checked Returns: `True` if path is a mount point added to the fake file system. Under Windows also returns True for drive and UNC roots (independent of their existence).
def ismount(self, path): path = make_string_path(path) if not path: return False normed_path = self.filesystem.absnormpath(path) sep = self.filesystem._path_separator(path) if self.filesystem.is_windows_fs: if self.filesystem.alternative_path_separator is not None: path_seps = ( sep, self.filesystem._alternative_path_separator(path) ) else: path_seps = (sep, ) drive, rest = self.filesystem.splitdrive(normed_path) if drive and drive[:1] in path_seps: return (not rest) or (rest in path_seps) if rest in path_seps: return True for mount_point in self.filesystem.mount_points: if normed_path.rstrip(sep) == mount_point.rstrip(sep): return True return False
370,335
Also exposes self.path (to fake os.path). Args: filesystem: FakeFilesystem used to provide file system information os_path_module: (deprecated) Optional FakePathModule instance
def __init__(self, filesystem, os_path_module=None): self.filesystem = filesystem self.sep = filesystem.path_separator self.altsep = filesystem.alternative_path_separator self.linesep = filesystem.line_separator() self._os_module = os if os_path_module is None: self.path = FakePathModule(self.filesystem, self) else: warnings.warn(FAKE_PATH_MODULE_DEPRECATION, DeprecationWarning, stacklevel=2) self.path = os_path_module if IS_PY2: self.fdopen = self._fdopen_ver2 else: self.fdopen = self._fdopen self.__class__.devnull = ('/dev/nul' if filesystem.is_windows_fs else '/dev/nul')
370,337
Redirector to open() builtin function. Args: *args: Pass through args. **kwargs: Pass through kwargs. Returns: File object corresponding to file_des. Raises: TypeError: if file descriptor is not an integer.
def _fdopen(self, *args, **kwargs): if not is_int_type(args[0]): raise TypeError('an integer is required') return FakeFileOpen(self.filesystem)(*args, **kwargs)
370,338
Close a file descriptor. Args: file_des: An integer file descriptor for the file object requested. Raises: OSError: bad file descriptor. TypeError: if file descriptor is not an integer.
def close(self, file_des): file_handle = self.filesystem.get_open_file(file_des) file_handle.close()
370,342
Read number of bytes from a file descriptor, returns bytes read. Args: file_des: An integer file descriptor for the file object requested. num_bytes: Number of bytes to read from file. Returns: Bytes read from file. Raises: OSError: bad file descriptor. TypeError: if file descriptor is not an integer.
def read(self, file_des, num_bytes): file_handle = self.filesystem.get_open_file(file_des) file_handle.raw_io = True return file_handle.read(num_bytes)
370,343
Write string to file descriptor, returns number of bytes written. Args: file_des: An integer file descriptor for the file object requested. contents: String of bytes to write to file. Returns: Number of bytes written. Raises: OSError: bad file descriptor. TypeError: if file descriptor is not an integer.
def write(self, file_des, contents): file_handle = self.filesystem.get_open_file(file_des) if isinstance(file_handle, FakeDirWrapper): self.filesystem.raise_os_error(errno.EBADF, file_handle.file_path) if isinstance(file_handle, FakePipeWrapper): return file_handle.write(contents) file_handle.raw_io = True file_handle._sync_io() file_handle.update_flush_pos() file_handle.write(contents) file_handle.flush() return len(contents)
370,344
Return the os.stat-like tuple for the FakeFile object of file_des. Args: file_des: The file descriptor of filesystem object to retrieve. Returns: The FakeStatResult object corresponding to entry_path. Raises: OSError: if the filesystem object doesn't exist.
def fstat(self, file_des): # stat should return the tuple representing return value of os.stat file_object = self.filesystem.get_open_file(file_des).get_object() return file_object.stat_result.copy()
370,346
Change the current umask. Args: new_mask: (int) The new umask value. Returns: The old umask. Raises: TypeError: if new_mask is of an invalid type.
def umask(self, new_mask): if not is_int_type(new_mask): raise TypeError('an integer is required') old_umask = self.filesystem.umask self.filesystem.umask = new_mask return old_umask
370,347
Change current working directory to target directory. Args: target_directory: The path to new current working directory. Raises: OSError: if user lacks permission to enter the argument directory or if the target is not a directory.
def chdir(self, target_directory): target_directory = self.filesystem.resolve_path( target_directory, allow_fd=True) self.filesystem.confirmdir(target_directory) directory = self.filesystem.resolve(target_directory) # A full implementation would check permissions all the way # up the tree. if not is_root() and not directory.st_mode | PERM_EXE: self.filesystem.raise_os_error(errno.EACCES, directory) self.filesystem.cwd = target_directory
370,348
Return the os.stat-like tuple for entry_path, not following symlinks. Args: entry_path: path to filesystem object to retrieve. dir_fd: If not `None`, the file descriptor of a directory, with `entry_path` being relative to this directory. New in Python 3.3. Returns: the FakeStatResult object corresponding to `entry_path`. Raises: OSError: if the filesystem object doesn't exist.
def lstat(self, entry_path, dir_fd=None): # stat should return the tuple representing return value of os.stat entry_path = self._path_with_dir_fd(entry_path, self.lstat, dir_fd) return self.filesystem.stat(entry_path, follow_symlinks=False)
370,351
Remove the FakeFile object at the specified file path. Args: path: Path to file to be removed. dir_fd: If not `None`, the file descriptor of a directory, with `path` being relative to this directory. New in Python 3.3. Raises: OSError: if path points to a directory. OSError: if path does not exist. OSError: if removal failed.
def remove(self, path, dir_fd=None): path = self._path_with_dir_fd(path, self.remove, dir_fd) self.filesystem.remove(path)
370,352
Remove a leaf Fake directory. Args: target_directory: (str) Name of directory to remove. dir_fd: If not `None`, the file descriptor of a directory, with `target_directory` being relative to this directory. New in Python 3.3. Raises: OSError: if target_directory does not exist or is not a directory, or as per FakeFilesystem.remove_object. Cannot remove '.'.
def rmdir(self, target_directory, dir_fd=None): target_directory = self._path_with_dir_fd( target_directory, self.rmdir, dir_fd) self.filesystem.rmdir(target_directory)
370,354
Remove a leaf fake directory and all empty intermediate ones. Args: target_directory: the directory to be removed. Raises: OSError: if target_directory does not exist or is not a directory. OSError: if target_directory is not empty.
def removedirs(self, target_directory): target_directory = self.filesystem.absnormpath(target_directory) directory = self.filesystem.confirmdir(target_directory) if directory.contents: self.filesystem.raise_os_error( errno.ENOTEMPTY, self.path.basename(target_directory)) else: self.rmdir(target_directory) head, tail = self.path.split(target_directory) if not tail: head, tail = self.path.split(head) while head and tail: head_dir = self.filesystem.confirmdir(head) if head_dir.contents: break # only the top-level dir may not be a symlink self.filesystem.rmdir(head, allow_symlink=True) head, tail = self.path.split(head)
370,355
Change the permissions of a file as encoded in integer mode. If the file is a link, the permissions of the link are changed. Args: path: (str) Path to the file. mode: (int) Permissions.
def lchmod(self, path, mode): if self.filesystem.is_windows_fs: raise (NameError, "name 'lchmod' is not defined") self.filesystem.chmod(path, mode, follow_symlinks=False)
370,361
Creates the specified symlink, pointed at the specified link target. Args: link_target: The target of the symlink. path: Path to the symlink to create. dir_fd: If not `None`, the file descriptor of a directory, with `link_target` being relative to this directory. New in Python 3.3. Raises: OSError: if the file already exists.
def symlink(self, link_target, path, dir_fd=None): link_target = self._path_with_dir_fd(link_target, self.symlink, dir_fd) self.filesystem.create_symlink( path, link_target, create_missing_dirs=False)
370,365
Perform fsync for a fake file (in other words, do nothing). Args: file_des: The file descriptor of the open file. Raises: OSError: file_des is an invalid file descriptor. TypeError: file_des is not an integer.
def fsync(self, file_des): # Throw an error if file_des isn't valid if 0 <= file_des < NR_STD_STREAMS: self.filesystem.raise_os_error(errno.EINVAL) file_object = self.filesystem.get_open_file(file_des) if self.filesystem.is_windows_fs: if (not hasattr(file_object, 'allow_update') or not file_object.allow_update): self.filesystem.raise_os_error( errno.EBADF, file_object.file_path)
370,367
Perform fdatasync for a fake file (in other words, do nothing). Args: file_des: The file descriptor of the open file. Raises: OSError: file_des is an invalid file descriptor. TypeError: file_des is not an integer.
def fdatasync(self, file_des): # Throw an error if file_des isn't valid if self.filesystem.is_windows_fs or self.filesystem.is_macos: raise AttributeError("module 'os' has no attribute 'fdatasync'") if 0 <= file_des < NR_STD_STREAMS: self.filesystem.raise_os_error(errno.EINVAL) self.filesystem.get_open_file(file_des)
370,368
Wrap a stream attribute in a read wrapper. Returns a read_wrapper which tracks our own read pointer since the stream object has no concept of a different read and write pointer. Args: name: The name of the attribute to wrap. Should be a read call. Returns: The read_wrapper function.
def _read_wrappers(self, name): io_attr = getattr(self._io, name) def read_wrapper(*args, **kwargs): self._io.seek(self._read_seek, self._read_whence) ret_value = io_attr(*args, **kwargs) self._read_seek = self._io.tell() self._read_whence = 0 self._io.seek(0, 2) return ret_value return read_wrapper
370,379
Wrap a stream attribute in an other_wrapper. Args: name: the name of the stream attribute to wrap. Returns: other_wrapper which is described below.
def _other_wrapper(self, name, writing): io_attr = getattr(self._io, name) def other_wrapper(*args, **kwargs): write_seek = self._io.tell() ret_value = io_attr(*args, **kwargs) if write_seek != self._io.tell(): self._read_seek = self._io.tell() self._read_whence = 0 if not writing or not IS_PY2: return ret_value return other_wrapper
370,380
Determine whether a file's time stamps are reported as floats or ints. Calling without arguments returns the current value. The value is shared by all instances of FakeOsModule. Args: newvalue: If `True`, mtime, ctime, atime are reported as floats. Otherwise, they are returned as ints (rounding down).
def stat_float_times(cls, newvalue=None): if newvalue is not None: cls._stat_float_times = bool(newvalue) return cls._stat_float_times
370,402
Add attachments using filenames as title Arguments: One or more file paths to add as attachments: An optional Item ID, which will create child attachments
def attachment_simple(self, files, parentid=None): orig = self._attachment_template("imported_file") to_add = [orig.copy() for fls in files] for idx, tmplt in enumerate(to_add): tmplt["title"] = os.path.basename(files[idx]) tmplt["filename"] = files[idx] if parentid: return self._attachment(to_add, parentid) else: return self._attachment(to_add)
370,565
Add child attachments using title, filename Arguments: One or more lists or tuples containing title, file path An optional Item ID, which will create child attachments
def attachment_both(self, files, parentid=None): orig = self._attachment_template("imported_file") to_add = [orig.copy() for f in files] for idx, tmplt in enumerate(to_add): tmplt["title"] = files[idx][0] tmplt["filename"] = files[idx][1] if parentid: return self._attachment(to_add, parentid) else: return self._attachment(to_add)
370,566
Open a DB in file `path` in mode `mode` as a context manager. On exiting the context the DB will be automatically closed. Args: path: The path to the DB file. mode: The mode in which to open the DB. See the `Mode` enum for details. Raises: FileNotFoundError: If `mode` is `Mode.open` and `path` does not exist.
def use_db(path, mode=WorkDB.Mode.create): database = WorkDB(path, mode) try: yield database finally: database.close()
370,618
Open a DB in file `path` in mode `mode`. Args: path: The path to the DB file. mode: The mode in which to open the DB. See the `Mode` enum for details. Raises: FileNotFoundError: If `mode` is `Mode.open` and `path` does not exist.
def __init__(self, path, mode): if (mode == WorkDB.Mode.open) and (not os.path.exists(path)): raise FileNotFoundError('Requested file {} not found'.format(path)) self._path = path self._conn = sqlite3.connect(path) self._init_db()
370,619
Set (replace) the configuration for the session. Args: config: Configuration object
def set_config(self, config): with self._conn: self._conn.execute("DELETE FROM config") self._conn.execute('INSERT INTO config VALUES(?)', (serialize_config(config),))
370,620
Add a WorkItems. Args: work_item: A WorkItem.
def add_work_item(self, work_item): with self._conn: self._conn.execute( , _work_item_to_row(work_item))
370,623
Set the result for a job. This will overwrite any existing results for the job. Args: job_id: The ID of the WorkItem to set the result for. result: A WorkResult indicating the result of the job. Raises: KeyError: If there is no work-item with a matching job-id.
def set_result(self, job_id, result): with self._conn: try: self._conn.execute( , _work_result_to_row(job_id, result)) except sqlite3.IntegrityError as exc: raise KeyError('Can not add result with job-id {}'.format( job_id)) from exc
370,626
Get the AST for the code in a file. Args: module_path: pathlib.Path to the file containing the code. python_version: Python version as a "MAJ.MIN" string. Returns: The parso parse tree for the code in `module_path`.
def get_ast(module_path, python_version): with module_path.open(mode='rt', encoding='utf-8') as handle: source = handle.read() return parso.parse(source, version=python_version)
370,640
Find all modules in the module (possibly package) represented by `module_path`. Args: module_path: A pathlib.Path to a Python package or module. Returns: An iterable of paths Python modules (i.e. *py files).
def find_modules(module_path): if module_path.is_file(): if module_path.suffix == '.py': yield module_path elif module_path.is_dir(): pyfiles = glob.glob('{}/**/*.py'.format(module_path), recursive=True) yield from (Path(pyfile) for pyfile in pyfiles)
370,651
Clear and initialize a work-db with work items. Any existing data in the work-db will be cleared and replaced with entirely new work orders. In particular, this means that any results in the db are removed. Args: module_paths: iterable of pathlib.Paths of modules to mutate. work_db: A `WorkDB` instance into which the work orders will be saved. config: The configuration for the new session.
def init(module_paths, work_db, config): operator_names = cosmic_ray.plugins.operator_names() work_db.set_config(config=config) work_db.clear() for module_path in module_paths: module_ast = get_ast( module_path, python_version=config.python_version) for op_name in operator_names: operator = get_operator(op_name)(config.python_version) visitor = WorkDBInitVisitor(module_path, op_name, work_db, operator) visitor.walk(module_ast) apply_interceptors(work_db, config.sub('interceptors').get('enabled', ()))
370,652
Report progress from any currently installed reporters. Args: stream: The text stream (default: sys.stderr) to which progress will be reported.
def report_progress(stream=None): if stream is None: stream = sys.stderr for reporter in _reporters: reporter(stream)
370,664
A decorator factory to mark functions which report progress. Args: reporter: A zero-argument callable to report progress. The callable provided should have the means to both retrieve and display current progress information.
def reports_progress(reporter): def decorator(func): # pylint: disable=missing-docstring @wraps(func) def wrapper(*args, **kwargs): # pylint: disable=missing-docstring with progress_reporter(reporter): return func(*args, **kwargs) return wrapper return decorator
370,665
The celery task which performs a single mutation and runs a test suite. This runs `cosmic-ray worker` in a subprocess and returns the results, passing `config` to it via stdin. Args: work_item: A dict describing a WorkItem. config: The configuration to use for the test execution. Returns: An updated WorkItem
def worker_task(work_item, config): global _workspace _ensure_workspace(config) result = worker( work_item.module_path, config.python_version, work_item.operator_name, work_item.occurrence, config.test_command, config.timeout) return work_item.job_id, result
370,670
Execute a suite of tests for a given set of work items. Args: work_items: An iterable of `work_db.WorkItem`s. config: The configuration to use for the test execution. Returns: An iterable of WorkItems.
def execute_work_items(work_items, config): return celery.group( worker_task.s(work_item, config) for work_item in work_items )
370,672
Create a cloned workspace and yield it. This creates a workspace for a with-block and cleans it up on exit. By default, this will also change to the workspace's `clone_dir` for the duration of the with-block. Args: clone_config: The execution engine configuration to use for the workspace. chdir: Whether to change to the workspace's `clone_dir` before entering the with-block. Yields: The `CloneWorkspace` instance created for the context.
def cloned_workspace(clone_config, chdir=True): workspace = ClonedWorkspace(clone_config) original_dir = os.getcwd() if chdir: os.chdir(workspace.clone_dir) try: yield workspace finally: os.chdir(original_dir) workspace.cleanup()
370,673
Create a clone by cloning a git repository. Args: repo_uri: The URI of the git repository to clone. dest_path: The location to clone to.
def clone_with_git(repo_uri, dest_path): log.info('Cloning git repo %s to %s', repo_uri, dest_path) git.Repo.clone_from(repo_uri, dest_path, depth=1)
370,674
Clone a directory try by copying it. Args: src_path: The directory to be copied. dest_path: The location to copy the directory to.
def clone_with_copy(src_path, dest_path): log.info('Cloning directory tree %s to %s', src_path, dest_path) shutil.copytree(src_path, dest_path)
370,675
Replace variable placeholders in `text` with values from the virtual env. The variables are: - {python-executable} Args: text: The text to do replacment int. Returns: The text after replacement.
def replace_variables(self, text): variables = { 'python-executable': str(self._venv_path / 'bin' / 'python') } return text.format(**variables)
370,678
Apply a specific mutation to a file on disk. Args: module_path: The path to the module to mutate. operator: The `operator` instance to use. occurrence: The occurrence of the operator to apply. Returns: A `(unmutated-code, mutated-code)` tuple to the with-block. If there was no mutation performed, the `mutated-code` is `None`.
def apply_mutation(module_path, operator, occurrence): module_ast = get_ast(module_path, python_version=operator.python_version) original_code = module_ast.get_code() visitor = MutationVisitor(occurrence, operator) mutated_ast = visitor.walk(module_ast) mutated_code = None if visitor.mutation_applied: mutated_code = mutated_ast.get_code() with module_path.open(mode='wt', encoding='utf-8') as handle: handle.write(mutated_code) handle.flush() return original_code, mutated_code
370,690
Locate image position with cv2.templateFind Use pixel match to find pictures. Args: im_source(string): 图像、素材 im_search(string): 需要查找的图片 threshold: 阈值,当相识度小于该阈值的时候,就忽略掉 Returns: A tuple of found [(point, score), ...] Raises: IOError: when file read error
def find_all_template(im_source, im_search, threshold=0.5, maxcnt=0, rgb=False, bgremove=False): # method = cv2.TM_CCORR_NORMED # method = cv2.TM_SQDIFF_NORMED method = cv2.TM_CCOEFF_NORMED if rgb: s_bgr = cv2.split(im_search) # Blue Green Red i_bgr = cv2.split(im_source) weight = (0.3, 0.3, 0.4) resbgr = [0, 0, 0] for i in range(3): # bgr resbgr[i] = cv2.matchTemplate(i_bgr[i], s_bgr[i], method) res = resbgr[0]*weight[0] + resbgr[1]*weight[1] + resbgr[2]*weight[2] else: s_gray = cv2.cvtColor(im_search, cv2.COLOR_BGR2GRAY) i_gray = cv2.cvtColor(im_source, cv2.COLOR_BGR2GRAY) # 边界提取(来实现背景去除的功能) if bgremove: s_gray = cv2.Canny(s_gray, 100, 200) i_gray = cv2.Canny(i_gray, 100, 200) res = cv2.matchTemplate(i_gray, s_gray, method) w, h = im_search.shape[1], im_search.shape[0] result = [] while True: min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res) if method in [cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED]: top_left = min_loc else: top_left = max_loc if DEBUG: print('templmatch_value(thresh:%.1f) = %.3f' %(threshold, max_val)) # not show debug if max_val < threshold: break # calculator middle point middle_point = (top_left[0]+w/2, top_left[1]+h/2) result.append(dict( result=middle_point, rectangle=(top_left, (top_left[0], top_left[1] + h), (top_left[0] + w, top_left[1]), (top_left[0] + w, top_left[1] + h)), confidence=max_val )) if maxcnt and len(result) >= maxcnt: break # floodfill the already found area cv2.floodFill(res, None, max_loc, (-1000,), max_val-threshold+0.1, 1, flags=cv2.FLOODFILL_FIXED_RANGE) return result
370,877
使用sift算法进行多个相同元素的查找 Args: im_source(string): 图像、素材 im_search(string): 需要查找的图片 threshold: 阈值,当相识度小于该阈值的时候,就忽略掉 maxcnt: 限制匹配的数量 Returns: A tuple of found [(point, rectangle), ...] A tuple of found [{"point": point, "rectangle": rectangle, "confidence": 0.76}, ...] rectangle is a 4 points list
def find_all_sift(im_source, im_search, min_match_count=4, maxcnt=0): sift = _sift_instance() flann = cv2.FlannBasedMatcher({'algorithm': FLANN_INDEX_KDTREE, 'trees': 5}, dict(checks=50)) kp_sch, des_sch = sift.detectAndCompute(im_search, None) if len(kp_sch) < min_match_count: return None kp_src, des_src = sift.detectAndCompute(im_source, None) if len(kp_src) < min_match_count: return None h, w = im_search.shape[1:] result = [] while True: # 匹配两个图片中的特征点,k=2表示每个特征点取2个最匹配的点 matches = flann.knnMatch(des_sch, des_src, k=2) good = [] for m, n in matches: # 剔除掉跟第二匹配太接近的特征点 if m.distance < 0.9 * n.distance: good.append(m) if len(good) < min_match_count: break sch_pts = np.float32([kp_sch[m.queryIdx].pt for m in good]).reshape(-1, 1, 2) img_pts = np.float32([kp_src[m.trainIdx].pt for m in good]).reshape(-1, 1, 2) # M是转化矩阵 M, mask = cv2.findHomography(sch_pts, img_pts, cv2.RANSAC, 5.0) matches_mask = mask.ravel().tolist() # 计算四个角矩阵变换后的坐标,也就是在大图中的坐标 h, w = im_search.shape[:2] pts = np.float32([[0, 0], [0, h-1], [w-1, h-1], [w-1, 0]]).reshape(-1, 1, 2) dst = cv2.perspectiveTransform(pts, M) # trans numpy arrary to python list # [(a, b), (a1, b1), ...] pypts = [] for npt in dst.astype(int).tolist(): pypts.append(tuple(npt[0])) lt, br = pypts[0], pypts[2] middle_point = (lt[0] + br[0]) / 2, (lt[1] + br[1]) / 2 result.append(dict( result=middle_point, rectangle=pypts, confidence=(matches_mask.count(1), len(good)) #min(1.0 * matches_mask.count(1) / 10, 1.0) )) if maxcnt and len(result) >= maxcnt: break # 从特征点中删掉那些已经匹配过的, 用于寻找多个目标 qindexes, tindexes = [], [] for m in good: qindexes.append(m.queryIdx) # need to remove from kp_sch tindexes.append(m.trainIdx) # need to remove from kp_img def filter_index(indexes, arr): r = np.ndarray(0, np.float32) for i, item in enumerate(arr): if i not in qindexes: r = np.append(r, item) return r kp_src = filter_index(tindexes, kp_src) des_src = filter_index(tindexes, des_src) return result
370,881
Return the brightness of an image Args: im(numpy): image Returns: float, average brightness of an image
def brightness(im): im_hsv = cv2.cvtColor(im, cv2.COLOR_BGR2HSV) h, s, v = cv2.split(im_hsv) height, weight = v.shape[:2] total_bright = 0 for i in v: total_bright = total_bright+sum(i) return float(total_bright)/(height*weight)
370,884
Creates an instance of *MibCompiler* class. Args: parser: ASN.1 MIB parser object codegen: MIB transformation object writer: transformed MIB storing object
def __init__(self, parser, codegen, writer): self._parser = parser self._codegen = codegen self._symbolgen = SymtableCodeGen() self._writer = writer self._sources = [] self._searchers = [] self._borrowers = []
371,922
Add more ASN.1 MIB source repositories. MibCompiler.compile will invoke each of configured source objects in order of their addition asking each to fetch MIB module specified by name. Args: sources: reader object(s) Returns: reference to itself (can be used for call chaining)
def addSources(self, *sources): self._sources.extend(sources) debug.logger & debug.flagCompiler and debug.logger( 'current MIB source(s): %s' % ', '.join([str(x) for x in self._sources])) return self
371,923
Add more transformed MIBs repositories. MibCompiler.compile will invoke each of configured searcher objects in order of their addition asking each if already transformed MIB module already exists and is more recent than specified. Args: searchers: searcher object(s) Returns: reference to itself (can be used for call chaining)
def addSearchers(self, *searchers): self._searchers.extend(searchers) debug.logger & debug.flagCompiler and debug.logger( 'current compiled MIBs location(s): %s' % ', '.join([str(x) for x in self._searchers])) return self
371,924
Creates an instance of *Borrower* class. Args: reader: a *reader* object Keyword Args: genText: indicates whether this borrower should be looking for transformed MIBs that include human-oriented texts
def __init__(self, reader, genTexts=False): if genTexts is not None: self.genTexts = genTexts self._reader = reader
371,930
Create an instance of *CallbackReader* bound to specific URL. Args: cbFun (callable): user callable accepting *MIB name* and *cbCtx* objects Keyword Args: cbCtx (object): user object that can be used to communicate state information between user-scope code and the *cbFun* callable scope
def __init__(self, cbFun, cbCtx=None): self._cbFun = cbFun self._cbCtx = cbCtx
371,936
Create an instance of *FileReader* serving a directory. Args: path (str): directory to search MIB files Keyword Args: recursive (bool): whether to include subdirectories ignoreErrors (bool): ignore filesystem access errors
def __init__(self, path, recursive=True, ignoreErrors=True): self._path = os.path.normpath(path) self._recursive = recursive self._ignoreErrors = ignoreErrors self._indexLoaded = False self._mibIndex = None
371,938
Create an instance of *ZipReader* serving a ZIP archive. Args: path (str): path to ZIP archive containing MIB files Keyword Args: ignoreErrors (bool): ignore ZIP archive access errors
def __init__(self, path, ignoreErrors=True): self._name = path self._members = {} self._pendingError = None try: self._members = self._readZipDirectory(fileObj=open(path, 'rb')) except Exception: debug.logger & debug.flagReader and debug.logger( 'ZIP file %s open failure: %s' % (self._name, sys.exc_info()[1])) if not ignoreErrors: self._pendingError = error.PySmiError('file %s access error: %s' % (self._name, sys.exc_info()[1]))
371,985
Create an instance of *PyFileSearcher* bound to specific directory. Args: path (str): path to local directory
def __init__(self, path): self._path = os.path.normpath(decode(path))
372,041
Sets up the _paths attribute. Args: paths: Comma-separated list of strings representing the paths to collect.
def setup(self, paths=None): # pylint: disable=arguments-differ if not paths: self.state.add_error( 'No `paths` argument provided in recipe, bailing', critical=True) else: self._paths = [path.strip() for path in paths.strip().split(',')]
372,049
Create specified hunt. Args: name: string containing hunt name. args: proto (*FlowArgs) for type of hunt, as defined in GRR flow proto. Returns: The newly created GRR hunt object. Raises: ValueError: if approval is needed and approvers were not specified.
def _create_hunt(self, name, args): runner_args = self.grr_api.types.CreateHuntRunnerArgs() runner_args.description = self.reason hunt = self.grr_api.CreateHunt( flow_name=name, flow_args=args, hunt_runner_args=runner_args) print('{0!s}: Hunt created'.format(hunt.hunt_id)) self._check_approval_wrapper(hunt, hunt.Start) return hunt
372,051
Initializes a GRR Hunt artifact collector. Args: artifacts: str, comma-separated list of GRR-defined artifacts. use_tsk: toggle for use_tsk flag. reason: justification for GRR access. grr_server_url: GRR server URL. grr_username: GRR username. grr_password: GRR password. approvers: str, comma-separated list of GRR approval recipients. verify: boolean, whether to verify the GRR server's x509 certificate.
def setup(self, artifacts, use_tsk, reason, grr_server_url, grr_username, grr_password, approvers=None, verify=True): super(GRRHuntArtifactCollector, self).setup( reason, grr_server_url, grr_username, grr_password, approvers=approvers, verify=verify) self.artifacts = [item.strip() for item in artifacts.strip().split(',')] if not artifacts: self.state.add_error('No artifacts were specified.', critical=True) self.use_tsk = use_tsk
372,053
Initializes a GRR Hunt file collector. Args: file_path_list: comma-separated list of file paths. reason: justification for GRR access. grr_server_url: GRR server URL. grr_username: GRR username. grr_password: GRR password. approvers: comma-separated list of GRR approval recipients. verify: boolean, whether to verify the GRR server's x509 certificate.
def setup(self, file_path_list, reason, grr_server_url, grr_username, grr_password, approvers=None, verify=True): super(GRRHuntFileCollector, self).setup( reason, grr_server_url, grr_username, grr_password, approvers=approvers, verify=verify) self.file_path_list = [item.strip() for item in file_path_list.strip().split(',')] if not file_path_list: self.state.add_error('Files must be specified for hunts', critical=True)
372,056
Initializes a GRR Hunt file collector. Args: hunt_id: Hunt ID to download results from. reason: justification for GRR access. grr_server_url: GRR server URL. grr_username: GRR username. grr_password: GRR password. approvers: comma-separated list of GRR approval recipients. verify: boolean, whether to verify the GRR server's x509 certificate.
def setup(self, hunt_id, reason, grr_server_url, grr_username, grr_password, approvers=None, verify=True): super(GRRHuntDownloader, self).setup( reason, grr_server_url, grr_username, grr_password, approvers=approvers, verify=verify) self.hunt_id = hunt_id self.output_path = tempfile.mkdtemp()
372,059
Download current set of files in results. Args: hunt: The GRR hunt object to download files from. Returns: list: tuples containing: str: human-readable description of the source of the collection. For example, the name of the source host. str: path to the collected data. Raises: ValueError: if approval is needed and approvers were not specified.
def collect_hunt_results(self, hunt): if not os.path.isdir(self.output_path): os.makedirs(self.output_path) output_file_path = os.path.join( self.output_path, '.'.join((self.hunt_id, 'zip'))) if os.path.exists(output_file_path): print('{0:s} already exists: Skipping'.format(output_file_path)) return None self._check_approval_wrapper( hunt, self._get_and_write_archive, hunt, output_file_path) results = self._extract_hunt_results(output_file_path) print('Wrote results of {0:s} to {1:s}'.format( hunt.hunt_id, output_file_path)) return results
372,060
Gets and writes a hunt archive. Function is necessary for the _check_approval_wrapper to work. Args: hunt: The GRR hunt object. output_file_path: The output path where to write the Hunt Archive.
def _get_and_write_archive(self, hunt, output_file_path): hunt_archive = hunt.GetFilesArchive() hunt_archive.WriteToFile(output_file_path)
372,061
Extracts a GRR client's FQDN from its client_info.yaml file. Args: client_info_contents: The contents of the client_info.yaml file. Returns: A (str, str) tuple representing client ID and client FQDN.
def _get_client_fqdn(self, client_info_contents): yamldict = yaml.safe_load(client_info_contents) fqdn = yamldict['system_info']['fqdn'] client_id = yamldict['client_id'].split('/')[1] return client_id, fqdn
372,062
Open a hunt output archive and extract files. Args: output_file_path: The path where the hunt archive is downloaded to. Returns: list: tuples containing: str: The name of the client from where the files were downloaded. str: The directory where the files were downloaded to.
def _extract_hunt_results(self, output_file_path): # Extract items from archive by host for processing collection_paths = [] client_ids = set() client_id_to_fqdn = {} hunt_dir = None try: with zipfile.ZipFile(output_file_path) as archive: items = archive.infolist() for f in items: if not hunt_dir: hunt_dir = f.filename.split('/')[0] # If we're dealing with client_info.yaml, use it to build a client # ID to FQDN correspondence table & skip extraction. if f.filename.split('/')[-1] == 'client_info.yaml': client_id, fqdn = self._get_client_fqdn(archive.read(f)) client_id_to_fqdn[client_id] = fqdn continue client_id = f.filename.split('/')[1] if client_id.startswith('C.'): if client_id not in client_ids: client_directory = os.path.join(self.output_path, hunt_dir, client_id) collection_paths.append((client_id, client_directory)) client_ids.add(client_id) try: archive.extract(f, self.output_path) except KeyError as exception: print('Extraction error: {0:s}'.format(exception)) return [] except OSError as exception: msg = 'Error manipulating file {0:s}: {1!s}'.format( output_file_path, exception) self.state.add_error(msg, critical=True) return [] except zipfile.BadZipfile as exception: msg = 'Bad zipfile {0:s}: {1!s}'.format( output_file_path, exception) self.state.add_error(msg, critical=True) return [] try: os.remove(output_file_path) except OSError as exception: print('Output path {0:s} could not be removed: {1:s}'.format( output_file_path, exception)) # Translate GRR client IDs to FQDNs with the information retrieved # earlier fqdn_collection_paths = [] for client_id, path in collection_paths: fqdn = client_id_to_fqdn.get(client_id, client_id) fqdn_collection_paths.append((fqdn, path)) if not fqdn_collection_paths: self.state.add_error('Nothing was extracted from the hunt archive', critical=True) return [] return fqdn_collection_paths
372,063
Gets extra configuration parameters. These parameters should be loaded through load_extra or load_extra_data. Args: name: str, the name of the configuration data to load. Returns: A dictionary containing the requested configuration data. None if data was never loaded under that name.
def get_extra(cls, name=None): if not name: return cls._extra_config return cls._extra_config.get(name, None)
372,065
Loads extra JSON configuration parameters from a file on the filesystem. Args: filename: str, the filename to open. Returns: bool: True if the extra configuration parameters were read.
def load_extra(cls, filename): try: with open(filename, 'rb') as configuration_file: cls.load_extra_data(configuration_file.read()) sys.stderr.write("Config successfully loaded from {0:s}\n".format( filename)) return True except IOError: return False
372,066
Loads extra JSON configuration parameters from a data buffer. The data buffer must represent a JSON object. Args: data: str, the buffer to load the JSON data from.
def load_extra_data(cls, data): try: cls._extra_config.update(json.loads(data)) except ValueError as exception: sys.stderr.write('Could convert to JSON. {0:s}'.format(exception)) exit(-1)
372,067
Registers a dftimewolf recipe. Args: recipe: imported python module representing the recipe.
def register_recipe(cls, recipe): recipe_name = recipe.contents['name'] cls._recipe_classes[recipe_name] = ( recipe.contents, recipe.args, recipe.__doc__)
372,068
Initializes the analysis report. Args: module_name (str): name of the analysis plugin that generated the report. text (str): report text.
def __init__(self, module_name, text): super(Report, self).__init__() self.module_name = module_name self.text = text
372,069
Search GRR by hostname and get the latest active client. Args: hostname: hostname to search for. Returns: GRR API Client object Raises: DFTimewolfError: if no client ID found for hostname.
def _get_client_by_hostname(self, hostname): # Search for the hostname in GRR print('Searching for client: {0:s}'.format(hostname)) try: search_result = self.grr_api.SearchClients(hostname) except grr_errors.UnknownError as exception: self.state.add_error('Could not search for host {0:s}: {1!s}'.format( hostname, exception ), critical=True) return None result = [] for client in search_result: if hostname.lower() in client.data.os_info.fqdn.lower(): result.append((client.data.last_seen_at, client)) if not result: self.state.add_error( 'Could not get client_id for {0:s}'.format(hostname), critical=True) return None last_seen, client = sorted(result, key=lambda x: x[0], reverse=True)[0] # Remove microseconds and create datetime object last_seen_datetime = datetime.datetime.utcfromtimestamp( last_seen / 1000000) # Timedelta between now and when the client was last seen, in minutes. # First, count total seconds. This will return a float. last_seen_seconds = ( datetime.datetime.utcnow() - last_seen_datetime).total_seconds() last_seen_minutes = int(round(last_seen_seconds / 60)) print('{0:s}: Found active client'.format(client.client_id)) print('Found active client: {0:s}'.format(client.client_id)) print('Client last seen: {0:s} ({1:d} minutes ago)'.format( last_seen_datetime.strftime('%Y-%m-%dT%H:%M:%S+0000'), last_seen_minutes)) return client
372,071
Finds GRR clients given a list of hosts. Args: hosts: List of hostname FQDNs Returns: List of GRR client objects.
def find_clients(self, hosts): # TODO(tomchop): Thread this clients = [] for host in hosts: clients.append(self._get_client_by_hostname(host)) return [client for client in clients if client is not None]
372,072
Get GRR client dictionary and make sure valid approvals exist. Args: client_id: GRR client ID. Returns: GRR API Client object
def _get_client_by_id(self, client_id): client = self.grr_api.Client(client_id) print('Checking for client approval') self._check_approval_wrapper(client, client.ListFlows) print('{0:s}: Client approval is valid'.format(client_id)) return client.Get()
372,073
Create specified flow, setting KeepAlive if requested. Args: client: GRR Client object on which to launch the flow. name: string containing flow name. args: proto (*FlowArgs) for type of flow, as defined in GRR flow proto. Returns: string containing ID of launched flow
def _launch_flow(self, client, name, args): # Start the flow and get the flow ID flow = self._check_approval_wrapper( client, client.CreateFlow, name=name, args=args) flow_id = flow.flow_id print('{0:s}: Scheduled'.format(flow_id)) if self.keepalive: keepalive_flow = client.CreateFlow( name='KeepAlive', args=flows_pb2.KeepAliveArgs()) print('KeepAlive Flow:{0:s} scheduled'.format(keepalive_flow.flow_id)) return flow_id
372,074
Awaits flow completion. Args: client: GRR Client object in which to await the flow. flow_id: string containing ID of flow to await. Raises: DFTimewolfError: if flow error encountered.
def _await_flow(self, client, flow_id): # Wait for the flow to finish print('{0:s}: Waiting to finish'.format(flow_id)) while True: try: status = client.Flow(flow_id).Get().data except grr_errors.UnknownError: msg = 'Unable to stat flow {0:s} for host {1:s}'.format( flow_id, client.data.os_info.fqdn.lower()) self.state.add_error(msg) raise DFTimewolfError( 'Unable to stat flow {0:s} for host {1:s}'.format( flow_id, client.data.os_info.fqdn.lower())) if status.state == flows_pb2.FlowContext.ERROR: # TODO(jbn): If one artifact fails, what happens? Test. message = status.context.backtrace if 'ArtifactNotRegisteredError' in status.context.backtrace: message = status.context.backtrace.split('\n')[-2] raise DFTimewolfError( '{0:s}: FAILED! Message from GRR:\n{1:s}'.format( flow_id, message)) if status.state == flows_pb2.FlowContext.TERMINATED: print('{0:s}: Complete'.format(flow_id)) break time.sleep(self._CHECK_FLOW_INTERVAL_SEC)
372,075
Download files from the specified flow. Args: client: GRR Client object to which to download flow data from. flow_id: GRR flow ID. Returns: str: path of downloaded files.
def _download_files(self, client, flow_id): output_file_path = os.path.join( self.output_path, '.'.join((flow_id, 'zip'))) if os.path.exists(output_file_path): print('{0:s} already exists: Skipping'.format(output_file_path)) return None flow = client.Flow(flow_id) file_archive = flow.GetFilesArchive() file_archive.WriteToFile(output_file_path) # Unzip archive for processing and remove redundant zip fqdn = client.data.os_info.fqdn.lower() client_output_file = os.path.join(self.output_path, fqdn) if not os.path.isdir(client_output_file): os.makedirs(client_output_file) with zipfile.ZipFile(output_file_path) as archive: archive.extractall(path=client_output_file) os.remove(output_file_path) return client_output_file
372,076
Process a single GRR client. Args: client: a GRR client object.
def _process_thread(self, client): system_type = client.data.os_info.system print('System type: {0:s}'.format(system_type)) # If the list is supplied by the user via a flag, honor that. artifact_list = [] if self.artifacts: print('Artifacts to be collected: {0!s}'.format(self.artifacts)) artifact_list = self.artifacts else: default_artifacts = self.artifact_registry.get(system_type, None) if default_artifacts: print('Collecting default artifacts for {0:s}: {1:s}'.format( system_type, ', '.join(default_artifacts))) artifact_list.extend(default_artifacts) if self.extra_artifacts: print('Throwing in an extra {0!s}'.format(self.extra_artifacts)) artifact_list.extend(self.extra_artifacts) artifact_list = list(set(artifact_list)) if not artifact_list: return flow_args = flows_pb2.ArtifactCollectorFlowArgs( artifact_list=artifact_list, use_tsk=self.use_tsk, ignore_interpolation_errors=True, apply_parsers=False) flow_id = self._launch_flow(client, 'ArtifactCollectorFlow', flow_args) self._await_flow(client, flow_id) collected_flow_data = self._download_files(client, flow_id) if collected_flow_data: print('{0!s}: Downloaded: {1:s}'.format(flow_id, collected_flow_data)) fqdn = client.data.os_info.fqdn.lower() self.state.output.append((fqdn, collected_flow_data))
372,079
Initializes a GRR file collector. Args: hosts: Comma-separated list of hostnames to launch the flow on. files: list of file paths. use_tsk: toggle for use_tsk flag on GRR flow. reason: justification for GRR access. grr_server_url: GRR server URL. grr_username: GRR username. grr_password: GRR password. approvers: list of GRR approval recipients. verify: boolean, whether to verify the GRR server's x509 certificate.
def setup(self, hosts, files, use_tsk, reason, grr_server_url, grr_username, grr_password, approvers=None, verify=True): super(GRRFileCollector, self).setup( reason, grr_server_url, grr_username, grr_password, approvers=approvers, verify=verify) if files is not None: self.files = [item.strip() for item in files.strip().split(',')] self.hostnames = [item.strip() for item in hosts.strip().split(',')] self.use_tsk = use_tsk
372,082
Process a single client. Args: client: GRR client object to act on.
def _process_thread(self, client): file_list = self.files if not file_list: return print('Filefinder to collect {0:d} items'.format(len(file_list))) flow_action = flows_pb2.FileFinderAction( action_type=flows_pb2.FileFinderAction.DOWNLOAD) flow_args = flows_pb2.FileFinderArgs( paths=file_list, action=flow_action,) flow_id = self._launch_flow(client, 'FileFinder', flow_args) self._await_flow(client, flow_id) collected_flow_data = self._download_files(client, flow_id) if collected_flow_data: print('{0!s}: Downloaded: {1:s}'.format(flow_id, collected_flow_data)) fqdn = client.data.os_info.fqdn.lower() self.state.output.append((fqdn, collected_flow_data))
372,083
Initializes a GRR flow collector. Args: host: hostname of machine. flow_id: ID of GRR flow to retrieve. reason: justification for GRR access. grr_server_url: GRR server URL. grr_username: GRR username. grr_password: GRR password. approvers: list of GRR approval recipients. verify: boolean, whether to verify the GRR server's x509 certificate.
def setup(self, host, flow_id, reason, grr_server_url, grr_username, grr_password, approvers=None, verify=True): super(GRRFlowCollector, self).setup( reason, grr_server_url, grr_username, grr_password, approvers=approvers, verify=verify) self.flow_id = flow_id self.host = host
372,085
Sets up the _timezone attribute. Args: timezone: Timezone name (optional)
def setup(self, timezone=None): # pylint: disable=arguments-differ self._timezone = timezone self._output_path = tempfile.mkdtemp()
372,091
Initializes a GRR hunt result collector. Args: reason: justification for GRR access. grr_server_url: GRR server URL. grr_username: GRR username. grr_password: GRR password. approvers: list of GRR approval recipients. verify: boolean, whether to verify the GRR server's x509 certificate.
def setup(self, reason, grr_server_url, grr_username, grr_password, approvers=None, verify=True): grr_auth = (grr_username, grr_password) self.approvers = [] if approvers: self.approvers = [item.strip() for item in approvers.strip().split(',')] self.grr_api = grr_api.InitHttp(api_endpoint=grr_server_url, auth=grr_auth, verify=verify) self.output_path = tempfile.mkdtemp() self.reason = reason
372,094
Wraps a call to GRR functions checking for approval. Args: grr_object: the GRR object to create the eventual approval on. grr_function: The GRR function requiring approval. *args: Positional arguments that are to be passed to `grr_function`. **kwargs: Keyword arguments that are to be passed to `grr_function`. Returns: The return value of the execution of grr_function(*args, **kwargs).
def _check_approval_wrapper(self, grr_object, grr_function, *args, **kwargs): approval_sent = False while True: try: return grr_function(*args, **kwargs) except grr_errors.AccessForbiddenError as exception: print('No valid approval found: {0!s}'.format(exception)) # If approval was already sent, just wait a bit more. if approval_sent: print('Approval not yet granted, waiting {0:d}s'.format( self._CHECK_APPROVAL_INTERVAL_SEC)) time.sleep(self._CHECK_APPROVAL_INTERVAL_SEC) continue # If no approvers were specified, abort. if not self.approvers: message = ('GRR needs approval but no approvers specified ' '(hint: use --approvers)') self.state.add_error(message, critical=True) return None # Otherwise, send a request for approval grr_object.CreateApproval( reason=self.reason, notified_users=self.approvers) approval_sent = True print('{0!s}: approval request sent to: {1!s} (reason: {2:s})'.format( grr_object, self.approvers, self.reason))
372,095
Initialize the Timesketch API client object. Args: host_url (str): URL of Timesketch instance username (str): Timesketch username password (str): Timesketch password
def __init__(self, host_url, username, password): self.host_url = host_url self.api_base_url = '{0:s}/api/v1'.format(self.host_url) self.username = username self.session = self._create_session(username, password)
372,098
Create HTTP session. Args: username (str): Timesketch username password (str): Timesketch password Returns: requests.Session: Session object.
def _create_session(self, username, password): session = requests.Session() session.verify = False # Depending on SSL cert is verifiable try: response = session.get(self.host_url) except requests.exceptions.ConnectionError: return False # Get the CSRF token from the response soup = BeautifulSoup(response.text, 'html.parser') csrf_token = soup.find('input', dict(name='csrf_token'))['value'] login_data = dict(username=username, password=password) session.headers.update({ 'x-csrftoken': csrf_token, 'referer': self.host_url }) _ = session.post('{0:s}/login/'.format(self.host_url), data=login_data) return session
372,099
Create a new sketch with the specified name and description. Args: name (str): Title of sketch description (str): Description of sketch Returns: int: ID of created sketch
def create_sketch(self, name, description): resource_url = '{0:s}/sketches/'.format(self.api_base_url) form_data = {'name': name, 'description': description} response = self.session.post(resource_url, json=form_data) response_dict = response.json() sketch_id = response_dict['objects'][0]['id'] return sketch_id
372,100
Create a timeline with the specified name from the given plaso file. Args: timeline_name (str): Name of timeline plaso_storage_path (str): Local path of plaso file to be uploaded Returns: int: ID of uploaded timeline Raises: RuntimeError: When the JSON response from Timesketch cannot be decoded.
def upload_timeline(self, timeline_name, plaso_storage_path): resource_url = '{0:s}/upload/'.format(self.api_base_url) files = {'file': open(plaso_storage_path, 'rb')} data = {'name': timeline_name} response = self.session.post(resource_url, files=files, data=data) try: response_dict = response.json() except ValueError: raise RuntimeError( 'Could not decode JSON response from Timesketch' ' (Status {0:d}):\n{1:s}'.format( response.status_code, response.content)) index_id = response_dict['objects'][0]['id'] return index_id
372,101
Upload provided artifacts to specified, or new if non-existent, sketch. Args: processed_artifacts: List of (timeline_name, artifact_path) tuples sketch_id: ID of sketch to append the timeline to Returns: int: ID of sketch.
def export_artifacts(self, processed_artifacts, sketch_id): # Export processed timeline(s) for timeline_name, artifact_path in processed_artifacts: print('Uploading {0:s} to timeline {1:s}'.format( artifact_path, timeline_name)) new_timeline_id = self.upload_timeline(timeline_name, artifact_path) self.add_timeline_to_sketch(sketch_id, new_timeline_id) return sketch_id
372,102