_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q262900
BasePlugin.get_user
validation
def get_user(self, username): """ Utility function to query slack for a particular user :param username: The username of the user to lookup :return: SlackUser object or None """ if hasattr(self._bot, 'user_manager'): user = self._bot.user_manager.get_by_username(username) if user: return user user = SlackUser.get_user(self._bot.sc, username) self._bot.user_manager.set(user) return user return SlackUser.get_user(self._bot.sc, username)
python
{ "resource": "" }
q262901
webhook
validation
def webhook(*args, **kwargs): """ Decorator to mark plugin functions as entry points for web calls * route - web route to register, uses Flask syntax * method - GET/POST, defaults to POST """ def wrapper(func): func.is_webhook = True func.route = args[0] func.form_params = kwargs.get('form_params', []) func.method = kwargs.get('method', 'POST') return func return wrapper
python
{ "resource": "" }
q262902
freeze
validation
def freeze(value): """ Cast value to its frozen counterpart. """ if isinstance(value, list): return FrozenList(*value) if isinstance(value, dict): return FrozenDict(**value) return value
python
{ "resource": "" }
q262903
Core.help
validation
def help(self, msg, args): """Displays help for each command""" output = [] if len(args) == 0: commands = sorted(self._bot.dispatcher.commands.items(), key=itemgetter(0)) commands = filter(lambda x: x[1].is_subcmd is False, commands) # Filter commands if auth is enabled, hide_admin_commands is enabled, and user is not admin if self._should_filter_help_commands(msg.user): commands = filter(lambda x: x[1].admin_only is False, commands) for name, cmd in commands: output.append(self._get_short_help_for_command(name)) else: name = '!' + args[0] output = [self._get_help_for_command(name)] return '\n'.join(output)
python
{ "resource": "" }
q262904
Core.save
validation
def save(self, msg, args): """Causes the bot to write its current state to backend.""" self.send_message(msg.channel, "Saving current state...") self._bot.plugins.save_state() self.send_message(msg.channel, "Done.")
python
{ "resource": "" }
q262905
Core.shutdown
validation
def shutdown(self, msg, args): """Causes the bot to gracefully shutdown.""" self.log.info("Received shutdown from %s", msg.user.username) self._bot.runnable = False return "Shutting down..."
python
{ "resource": "" }
q262906
Core.whoami
validation
def whoami(self, msg, args): """Prints information about the user and bot version.""" output = ["Hello %s" % msg.user] if hasattr(self._bot.dispatcher, 'auth_manager') and msg.user.is_admin is True: output.append("You are a *bot admin*.") output.append("Bot version: %s-%s" % (self._bot.version, self._bot.commit)) return '\n'.join(output)
python
{ "resource": "" }
q262907
Core.sleep
validation
def sleep(self, channel): """Causes the bot to ignore all messages from the channel. Usage: !sleep [channel name] - ignore the specified channel (or current if none specified) """ self.log.info('Sleeping in %s', channel) self._bot.dispatcher.ignore(channel) self.send_message(channel, 'Good night')
python
{ "resource": "" }
q262908
Core.wake
validation
def wake(self, channel): """Causes the bot to resume operation in the channel. Usage: !wake [channel name] - unignore the specified channel (or current if none specified) """ self.log.info('Waking up in %s', channel) self._bot.dispatcher.unignore(channel) self.send_message(channel, 'Hello, how may I be of service?')
python
{ "resource": "" }
q262909
_sort_by
validation
def _sort_by(key): """ High order function for sort methods. """ @staticmethod def sort_by(p_list, reverse=False): return sorted( p_list, key=lambda p: getattr(p, key), reverse=reverse, ) return sort_by
python
{ "resource": "" }
q262910
PathFilters.select
validation
def select(self, filters=all_true, recursive=True): """Select path by criterion. :param filters: a lambda function that take a `pathlib.Path` as input, boolean as a output. :param recursive: include files in subfolder or not. **中文文档** 根据filters中定义的条件选择路径。 """ self.assert_is_dir_and_exists() if recursive: for p in self.glob("**/*"): if filters(p): yield p else: for p in self.iterdir(): if filters(p): yield p
python
{ "resource": "" }
q262911
PathFilters.select_file
validation
def select_file(self, filters=all_true, recursive=True): """Select file path by criterion. **中文文档** 根据filters中定义的条件选择文件。 """ for p in self.select(filters, recursive): if p.is_file(): yield p
python
{ "resource": "" }
q262912
PathFilters.select_dir
validation
def select_dir(self, filters=all_true, recursive=True): """Select dir path by criterion. **中文文档** 根据filters中定义的条件选择文件夹。 """ for p in self.select(filters, recursive): if p.is_dir(): yield p
python
{ "resource": "" }
q262913
PathFilters.n_file
validation
def n_file(self): """ Count how many files in this directory. Including file in sub folder. """ self.assert_is_dir_and_exists() n = 0 for _ in self.select_file(recursive=True): n += 1 return n
python
{ "resource": "" }
q262914
PathFilters.n_dir
validation
def n_dir(self): """ Count how many folders in this directory. Including folder in sub folder. """ self.assert_is_dir_and_exists() n = 0 for _ in self.select_dir(recursive=True): n += 1 return n
python
{ "resource": "" }
q262915
PathFilters.select_by_ext
validation
def select_by_ext(self, ext, recursive=True): """ Select file path by extension. :param ext: **中文文档** 选择与预定义的若干个扩展名匹配的文件。 """ ext = [ext.strip().lower() for ext in ensure_list(ext)] def filters(p): return p.suffix.lower() in ext return self.select_file(filters, recursive)
python
{ "resource": "" }
q262916
PathFilters.select_by_pattern_in_fname
validation
def select_by_pattern_in_fname(self, pattern, recursive=True, case_sensitive=False): """ Select file path by text pattern in file name. **中文文档** 选择文件名中包含指定子字符串的文件。 """ if case_sensitive: def filters(p): return pattern in p.fname else: pattern = pattern.lower() def filters(p): return pattern in p.fname.lower() return self.select_file(filters, recursive)
python
{ "resource": "" }
q262917
PathFilters.select_by_pattern_in_abspath
validation
def select_by_pattern_in_abspath(self, pattern, recursive=True, case_sensitive=False): """ Select file path by text pattern in absolute path. **中文文档** 选择绝对路径中包含指定子字符串的文件。 """ if case_sensitive: def filters(p): return pattern in p.abspath else: pattern = pattern.lower() def filters(p): return pattern in p.abspath.lower() return self.select_file(filters, recursive)
python
{ "resource": "" }
q262918
PathFilters.select_by_size
validation
def select_by_size(self, min_size=0, max_size=1 << 40, recursive=True): """ Select file path by size. **中文文档** 选择所有文件大小在一定范围内的文件。 """ def filters(p): return min_size <= p.size <= max_size return self.select_file(filters, recursive)
python
{ "resource": "" }
q262919
PathFilters.select_by_mtime
validation
def select_by_mtime(self, min_time=0, max_time=ts_2100, recursive=True): """ Select file path by modify time. :param min_time: lower bound timestamp :param max_time: upper bound timestamp **中文文档** 选择所有 :attr:`pathlib_mate.pathlib2.Path.mtime` 在一定范围内的文件。 """ def filters(p): return min_time <= p.mtime <= max_time return self.select_file(filters, recursive)
python
{ "resource": "" }
q262920
PathFilters.select_by_atime
validation
def select_by_atime(self, min_time=0, max_time=ts_2100, recursive=True): """ Select file path by access time. :param min_time: lower bound timestamp :param max_time: upper bound timestamp **中文文档** 选择所有 :attr:`pathlib_mate.pathlib2.Path.atime` 在一定范围内的文件。 """ def filters(p): return min_time <= p.atime <= max_time return self.select_file(filters, recursive)
python
{ "resource": "" }
q262921
PathFilters.select_by_ctime
validation
def select_by_ctime(self, min_time=0, max_time=ts_2100, recursive=True): """ Select file path by create time. :param min_time: lower bound timestamp :param max_time: upper bound timestamp **中文文档** 选择所有 :attr:`pathlib_mate.pathlib2.Path.ctime` 在一定范围内的文件。 """ def filters(p): return min_time <= p.ctime <= max_time return self.select_file(filters, recursive)
python
{ "resource": "" }
q262922
ToolBoxZip.make_zip_archive
validation
def make_zip_archive(self, dst=None, filters=all_true, compress=True, overwrite=False, makedirs=False, verbose=False): # pragma: no cover """ Make a zip archive. :param dst: output file path. if not given, will be automatically assigned. :param filters: custom path filter. By default it allows any file. :param compress: compress or not. :param overwrite: overwrite exists or not. :param verbose: display log or not. :return: """ self.assert_exists() if dst is None: dst = self._auto_zip_archive_dst() else: dst = self.change(new_abspath=dst) if not dst.basename.lower().endswith(".zip"): raise ValueError("zip archive name has to be endswith '.zip'!") if dst.exists(): if not overwrite: raise IOError("'%s' already exists!" % dst) if compress: compression = ZIP_DEFLATED else: compression = ZIP_STORED if not dst.parent.exists(): if makedirs: os.makedirs(dst.parent.abspath) if verbose: msg = "Making zip archive for '%s' ..." % self print(msg) current_dir = os.getcwd() if self.is_dir(): total_size = 0 selected = list() for p in self.glob("**/*"): if filters(p): selected.append(p) total_size += p.size if verbose: msg = "Got {} files, total size is {}, compressing ...".format( len(selected), repr_data_size(total_size), ) print(msg) with ZipFile(dst.abspath, "w", compression) as f: os.chdir(self.abspath) for p in selected: relpath = p.relative_to(self).__str__() f.write(relpath) elif self.is_file(): with ZipFile(dst.abspath, "w", compression) as f: os.chdir(self.parent.abspath) f.write(self.basename) os.chdir(current_dir) if verbose: msg = "Complete! Archive size is {}.".format(dst.size_in_text) print(msg)
python
{ "resource": "" }
q262923
ToolBoxZip.backup
validation
def backup(self, dst=None, ignore=None, ignore_ext=None, ignore_pattern=None, ignore_size_smaller_than=None, ignore_size_larger_than=None, case_sensitive=False): # pragma: no cover """ Create a compressed zip archive backup for a directory. :param dst: the output file path. :param ignore: file or directory defined in this list will be ignored. :param ignore_ext: file with extensions defined in this list will be ignored. :param ignore_pattern: any file or directory that contains this pattern will be ignored. :param ignore_size_smaller_than: any file size smaller than this will be ignored. :param ignore_size_larger_than: any file size larger than this will be ignored. **中文文档** 为一个目录创建一个备份压缩包。可以通过过滤器选择你要备份的文件。 """ def preprocess_arg(arg): # pragma: no cover if arg is None: return [] if isinstance(arg, (tuple, list)): return list(arg) else: return [arg, ] self.assert_is_dir_and_exists() ignore = preprocess_arg(ignore) for i in ignore: if i.startswith("/") or i.startswith("\\"): raise ValueError ignore_ext = preprocess_arg(ignore_ext) for ext in ignore_ext: if not ext.startswith("."): raise ValueError ignore_pattern = preprocess_arg(ignore_pattern) if case_sensitive: pass else: ignore = [i.lower() for i in ignore] ignore_ext = [i.lower() for i in ignore_ext] ignore_pattern = [i.lower() for i in ignore_pattern] def filters(p): relpath = p.relative_to(self).abspath if not case_sensitive: relpath = relpath.lower() # ignore for i in ignore: if relpath.startswith(i): return False # ignore_ext if case_sensitive: ext = p.ext else: ext = p.ext.lower() if ext in ignore_ext: return False # ignore_pattern for pattern in ignore_pattern: if pattern in relpath: return False # ignore_size_smaller_than if ignore_size_smaller_than: if p.size < ignore_size_smaller_than: return False # ignore_size_larger_than if ignore_size_larger_than: if p.size > ignore_size_larger_than: return False return True self.make_zip_archive( dst=dst, filters=filters, compress=True, overwrite=False, verbose=True, )
python
{ "resource": "" }
q262924
acquire_lock
validation
def acquire_lock(func): """Decorate methods when locking repository is required.""" @wraps(func) def wrapper(self, *args, **kwargs): with self.locker as r: # get the result acquired, code, _ = r if acquired: try: r = func(self, *args, **kwargs) except Exception as err: e = str(err) else: e = None else: warnings.warn("code %s. Unable to aquire the lock when calling '%s'. You may try again!"%(code,func.__name__) ) e = None r = None # raise error after exiting with statement and releasing the lock! if e is not None: traceback.print_stack() raise Exception(e) return r return wrapper
python
{ "resource": "" }
q262925
sync_required
validation
def sync_required(func): """Decorate methods when synchronizing repository is required.""" @wraps(func) def wrapper(self, *args, **kwargs): if not self._keepSynchronized: r = func(self, *args, **kwargs) else: state = self._load_state() #print("-----------> ",state, self.state) if state is None: r = func(self, *args, **kwargs) elif state == self.state: r = func(self, *args, **kwargs) else: warnings.warn("Repository at '%s' is out of date. Need to load it again to avoid conflict."%self.path) r = None return r return wrapper
python
{ "resource": "" }
q262926
get_pickling_errors
validation
def get_pickling_errors(obj, seen=None): """Investigate pickling errors.""" if seen == None: seen = [] if hasattr(obj, "__getstate__"): state = obj.__getstate__() #elif hasattr(obj, "__dict__"): # state = obj.__dict__ else: return None #try: # state = obj.__getstate__() #except AttributeError as e: # #state = obj.__dict__ # return str(e) if state == None: return 'object state is None' if isinstance(state,tuple): if not isinstance(state[0], dict): state=state[1] else: state=state[0].update(state[1]) result = {} for i in state: try: pickle.dumps(state[i], protocol=2) except pickle.PicklingError as e: if not state[i] in seen: seen.append(state[i]) result[i]=get_pickling_errors(state[i],seen) return result
python
{ "resource": "" }
q262927
Repository.walk_files_relative_path
validation
def walk_files_relative_path(self, relativePath=""): """ Walk the repository and yield all found files relative path joined with file name. :parameters: #. relativePath (str): The relative path from which start the walk. """ def walk_files(directory, relativePath): directories = dict.__getitem__(directory, 'directories') files = dict.__getitem__(directory, 'files') for f in sorted(files): yield os.path.join(relativePath, f) for k in sorted(dict.keys(directories)): path = os.path.join(relativePath, k) dir = directories.__getitem__(k) for e in walk_files(dir, path): yield e dir, errorMessage = self.get_directory_info(relativePath) assert dir is not None, errorMessage return walk_files(dir, relativePath='')
python
{ "resource": "" }
q262928
Repository.walk_directories_relative_path
validation
def walk_directories_relative_path(self, relativePath=""): """ Walk repository and yield all found directories relative path :parameters: #. relativePath (str): The relative path from which start the walk. """ def walk_directories(directory, relativePath): directories = dict.__getitem__(directory, 'directories') dirNames = dict.keys(directories) for d in sorted(dirNames): yield os.path.join(relativePath, d) for k in sorted(dict.keys(directories)): path = os.path.join(relativePath, k) dir = dict.__getitem__(directories, k) for e in walk_directories(dir, path): yield e dir, errorMessage = self.get_directory_info(relativePath) assert dir is not None, errorMessage return walk_directories(dir, relativePath='')
python
{ "resource": "" }
q262929
Repository.walk_directories_info
validation
def walk_directories_info(self, relativePath=""): """ Walk repository and yield all found directories relative path. :parameters: #. relativePath (str): The relative path from which start the walk. """ def walk_directories(directory, relativePath): directories = dict.__getitem__(directory, 'directories') for fname in sorted(directories): info = dict.__getitem__(directories,fname) yield os.path.join(relativePath, fname), info for k in sorted(dict.keys(directories)): path = os.path.join(relativePath, k) dir = dict.__getitem__(directories, k) for e in walk_directories(dir, path): yield e dir, errorMessage = self.get_directory_info(relativePath) assert dir is not None, errorMessage return walk_directories(dir, relativePath='')
python
{ "resource": "" }
q262930
Repository.walk_directory_directories_relative_path
validation
def walk_directory_directories_relative_path(self, relativePath=""): """ Walk a certain directory in repository and yield all found directories relative path. :parameters: #. relativePath (str): The relative path of the directory. """ # get directory info dict errorMessage = "" relativePath = os.path.normpath(relativePath) dirInfoDict, errorMessage = self.get_directory_info(relativePath) assert dirInfoDict is not None, errorMessage for dname in dict.__getitem__(dirInfoDict, "directories"): yield os.path.join(relativePath, dname)
python
{ "resource": "" }
q262931
Repository.synchronize
validation
def synchronize(self, verbose=False): """ Synchronizes the Repository information with the directory. All registered but missing files and directories in the directory, will be automatically removed from the Repository. :parameters: #. verbose (boolean): Whether to be warn and inform about any abnormalities. """ if self.__path is None: return # walk directories for dirPath in sorted(list(self.walk_directories_relative_path())): realPath = os.path.join(self.__path, dirPath) # if directory exist if os.path.isdir(realPath): continue if verbose: warnings.warn("%s directory is missing"%realPath) # loop to get dirInfoDict keys = dirPath.split(os.sep) dirInfoDict = self for idx in range(len(keys)-1): dirs = dict.get(dirInfoDict, 'directories', None) if dirs is None: break dirInfoDict = dict.get(dirs, keys[idx], None) if dirInfoDict is None: break # remove dirInfoDict directory if existing if dirInfoDict is not None: dirs = dict.get(dirInfoDict, 'directories', None) if dirs is not None: dict.pop( dirs, keys[-1], None ) # walk files for filePath in sorted(list(self.walk_files_relative_path())): realPath = os.path.join(self.__path, filePath) # if file exists if os.path.isfile( realPath ): continue if verbose: warnings.warn("%s file is missing"%realPath) # loop to get dirInfoDict keys = filePath.split(os.sep) dirInfoDict = self for idx in range(len(keys)-1): dirs = dict.get(dirInfoDict, 'directories', None) if dirs is None: break dirInfoDict = dict.get(dirs, keys[idx], None) if dirInfoDict is None: break # remove dirInfoDict file if existing if dirInfoDict is not None: files = dict.get(dirInfoDict, 'files', None) if files is not None: dict.pop( files, keys[-1], None )
python
{ "resource": "" }
q262932
Repository.load_repository
validation
def load_repository(self, path): """ Load repository from a directory path and update the current instance. :Parameters: #. path (string): The path of the directory from where to load the repository. If '.' or an empty string is passed, the current working directory will be used. :Returns: #. repository (pyrep.Repository): returns self repository with loaded data. """ # try to open if path.strip() in ('','.'): path = os.getcwd() repoPath = os.path.realpath( os.path.expanduser(path) ) if not self.is_repository(repoPath): raise Exception("no repository found in '%s'"%str(repoPath)) # get pyrepinfo path repoInfoPath = os.path.join(repoPath, ".pyrepinfo") try: fd = open(repoInfoPath, 'rb') except Exception as e: raise Exception("unable to open repository file(%s)"%e) # before doing anything try to lock repository # can't decorate with @acquire_lock because this will point to old repository # path or to current working directory which might not be the path anyways L = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(repoPath, ".pyreplock")) acquired, code = L.acquire_lock() # check if acquired. if not acquired: warnings.warn("code %s. Unable to aquire the lock when calling 'load_repository'. You may try again!"%(code,) ) return try: # unpickle file try: repo = pickle.load( fd ) except Exception as e: fd.close() raise Exception("unable to pickle load repository (%s)"%e) finally: fd.close() # check if it's a PyrepInfo instance if not isinstance(repo, Repository): raise Exception(".pyrepinfo in '%s' is not a repository instance."%s) else: # update info path self.__reset_repository() self.__update_repository(repo) self.__path = repoPath # set timestamp self.__state = self._get_or_create_state() except Exception as e: L.release_lock() raise Exception(e) finally: L.release_lock() # set loaded repo locker path to L because repository have been moved to another directory self.__locker = L # return return self
python
{ "resource": "" }
q262933
Repository.get_repository
validation
def get_repository(self, path, info=None, verbose=True): """ Create a repository at given real path or load any existing one. This method insures the creation of the directory in the system if it is missing.\n Unlike create_repository, this method doesn't erase any existing repository in the path but loads it instead. **N.B. On some systems and some paths, creating a directory may requires root permissions.** :Parameters: #. path (string): The real absolute path where to create the Repository. If '.' or an empty string is passed, the current working directory will be used. #. info (None, object): Any information that can identify the repository. #. verbose (boolean): Whether to be warn and informed about any abnormalities. """ # get real path if path.strip() in ('','.'): path = os.getcwd() realPath = os.path.realpath( os.path.expanduser(path) ) # create directory if not existing if not os.path.isdir(realPath): os.makedirs(realPath) # create Repository if not self.is_repository(realPath): self.create_repository(realPath, info=info, verbose=verbose) else: self.load_repository(realPath)
python
{ "resource": "" }
q262934
Repository.remove_repository
validation
def remove_repository(self, path=None, relatedFiles=False, relatedFolders=False, verbose=True): """ Remove .pyrepinfo file from path if exists and related files and directories when respective flags are set to True. :Parameters: #. path (None, string): The path of the directory where to remove an existing repository. If None, current repository is removed if initialized. #. relatedFiles (boolean): Whether to also remove all related files from system as well. #. relatedFolders (boolean): Whether to also remove all related directories from system as well. Directories will be removed only if they are left empty after removing the files. #. verbose (boolean): Whether to be warn and informed about any abnormalities. """ if path is not None: realPath = os.path.realpath( os.path.expanduser(path) ) else: realPath = self.__path if realPath is None: if verbose: warnings.warn('path is None and current Repository is not initialized!') return if not self.is_repository(realPath): if verbose: warnings.warn("No repository found in '%s'!"%realPath) return # check for security if realPath == os.path.realpath('/..') : if verbose: warnings.warn('You are about to wipe out your system !!! action aboarded') return # get repo if path is not None: repo = Repository() repo.load_repository(realPath) else: repo = self # delete files if relatedFiles: for relativePath in repo.walk_files_relative_path(): realPath = os.path.join(repo.path, relativePath) if not os.path.isfile(realPath): continue if not os.path.exists(realPath): continue os.remove( realPath ) # delete directories if relatedFolders: for relativePath in reversed(list(repo.walk_directories_relative_path())): realPath = os.path.join(repo.path, relativePath) # protect from wiping out the system if not os.path.isdir(realPath): continue if not os.path.exists(realPath): continue if not len(os.listdir(realPath)): os.rmdir( realPath ) # delete repository os.remove( os.path.join(repo.path, ".pyrepinfo" ) ) for fname in (".pyrepstate", ".pyreplock"): p = os.path.join(repo.path, fname ) if os.path.exists( p ): os.remove( p ) # remove main directory if empty if os.path.isdir(repo.path): if not len(os.listdir(repo.path)): os.rmdir( repo.path ) # reset repository repo.__reset_repository()
python
{ "resource": "" }
q262935
Repository.save
validation
def save(self): """ Save repository .pyrepinfo to disk. """ # open file repoInfoPath = os.path.join(self.__path, ".pyrepinfo") try: fdinfo = open(repoInfoPath, 'wb') except Exception as e: raise Exception("unable to open repository info for saving (%s)"%e) # save repository try: pickle.dump( self, fdinfo, protocol=2 ) except Exception as e: fdinfo.flush() os.fsync(fdinfo.fileno()) fdinfo.close() raise Exception( "Unable to save repository info (%s)"%e ) finally: fdinfo.flush() os.fsync(fdinfo.fileno()) fdinfo.close() # save timestamp repoTimePath = os.path.join(self.__path, ".pyrepstate") try: self.__state = ("%.6f"%time.time()).encode() with open(repoTimePath, 'wb') as fdtime: fdtime.write( self.__state ) fdtime.flush() os.fsync(fdtime.fileno()) except Exception as e: raise Exception("unable to open repository time stamp for saving (%s)"%e)
python
{ "resource": "" }
q262936
Repository.create_package
validation
def create_package(self, path=None, name=None, mode=None): """ Create a tar file package of all the repository files and directories. Only files and directories that are stored in the repository info are stored in the package tar file. **N.B. On some systems packaging requires root permissions.** :Parameters: #. path (None, string): The real absolute path where to create the package. If None, it will be created in the same directory as the repository If '.' or an empty string is passed, the current working directory will be used. #. name (None, string): The name to give to the package file If None, the package directory name will be used with the appropriate extension added. #. mode (None, string): The writing mode of the tarfile. If None, automatically the best compression mode will be chose. Available modes are ('w', 'w:', 'w:gz', 'w:bz2') """ # check mode assert mode in (None, 'w', 'w:', 'w:gz', 'w:bz2'), 'unkown archive mode %s'%str(mode) if mode is None: mode = 'w:bz2' mode = 'w:' # get root if path is None: root = os.path.split(self.__path)[0] elif path.strip() in ('','.'): root = os.getcwd() else: root = os.path.realpath( os.path.expanduser(path) ) assert os.path.isdir(root), 'absolute path %s is not a valid directory'%path # get name if name is None: ext = mode.split(":") if len(ext) == 2: if len(ext[1]): ext = "."+ext[1] else: ext = '.tar' else: ext = '.tar' name = os.path.split(self.__path)[1]+ext # save repository self.save() # create tar file tarfilePath = os.path.join(root, name) try: tarHandler = tarfile.TarFile.open(tarfilePath, mode=mode) except Exception as e: raise Exception("Unable to create package (%s)"%e) # walk directory and create empty directories for directory in sorted(list(self.walk_directories_relative_path())): t = tarfile.TarInfo( directory ) t.type = tarfile.DIRTYPE tarHandler.addfile(t) # walk files and add to tar for file in self.walk_files_relative_path(): tarHandler.add(os.path.join(self.__path,file), arcname=file) # save repository .pyrepinfo tarHandler.add(os.path.join(self.__path,".pyrepinfo"), arcname=".pyrepinfo") # close tar file tarHandler.close()
python
{ "resource": "" }
q262937
Repository.get_directory_info
validation
def get_directory_info(self, relativePath): """ get directory info from the Repository. :Parameters: #. relativePath (string): The relative to the repository path of the directory. :Returns: #. info (None, dictionary): The directory information dictionary. If None, it means an error has occurred. #. error (string): The error message if any error occurred. """ relativePath = os.path.normpath(relativePath) # if root directory if relativePath in ('','.'): return self, "" currentDir = self.__path dirInfoDict = self for dir in relativePath.split(os.sep): dirInfoDict = dict.__getitem__(dirInfoDict, "directories") currentDir = os.path.join(currentDir, dir) # check if path exists if not os.path.exists(currentDir): return None, "directory '%s' is not found"%currentDir val = dirInfoDict.get(dir, None) # check if directory is registered in repository if val is None: return None, "directory '%s' is not registered in PyrepInfo"%currentDir dirInfoDict = val return dirInfoDict, ""
python
{ "resource": "" }
q262938
Repository.get_parent_directory_info
validation
def get_parent_directory_info(self, relativePath): """ get parent directory info of a file or directory from the Repository. :Parameters: #. relativePath (string): The relative to the repository path of the file or directory of which the parent directory info is requested. :Returns: #. info (None, dictionary): The directory information dictionary. If None, it means an error has occurred. #. error (string): The error message if any error occurred. """ relativePath = os.path.normpath(relativePath) # if root directory if relativePath in ('','.'): return self, "relativePath is empty pointing to the repostitory itself." # split path parentDirPath, _ = os.path.split(relativePath) # get parent directory info return self.get_directory_info(parentDirPath)
python
{ "resource": "" }
q262939
Repository.get_file_info
validation
def get_file_info(self, relativePath, name=None): """ get file information dict from the repository given its relative path and name. :Parameters: #. relativePath (string): The relative to the repository path of the directory where the file is. #. name (string): The file name. If None is given, name will be split from relativePath. :Returns: #. info (None, dictionary): The file information dictionary. If None, it means an error has occurred. #. errorMessage (string): The error message if any error occurred. """ # normalize relative path and name relativePath = os.path.normpath(relativePath) if relativePath == '.': relativePath = '' assert name != '.pyrepinfo', "'.pyrepinfo' can't be a file name." if name is None: assert len(relativePath), "name must be given when relative path is given as empty string or as a simple dot '.'" relativePath,name = os.path.split(relativePath) # initialize message errorMessage = "" # get directory info dirInfoDict, errorMessage = self.get_directory_info(relativePath) if dirInfoDict is None: return None, errorMessage # get file info fileInfo = dict.__getitem__(dirInfoDict, "files").get(name, None) if fileInfo is None: errorMessage = "file %s does not exist in relative path '%s'"%(name, relativePath) return fileInfo, errorMessage
python
{ "resource": "" }
q262940
Repository.get_file_relative_path_by_id
validation
def get_file_relative_path_by_id(self, id): """ Given an id, get the corresponding file info relative path joined with file name. Parameters: #. id (string): The file unique id string. :Returns: #. relativePath (string): The file relative path joined with file name. If None, it means file was not found. """ for path, info in self.walk_files_info(): if info['id']==id: return path # none was found return None
python
{ "resource": "" }
q262941
Repository.get_file_relative_path_by_name
validation
def get_file_relative_path_by_name(self, name, skip=0): """ Get file relative path given the file name. If file name is redundant in different directories in the repository, this method ensures to return all or some of the files according to skip value. Parameters: #. name (string): The file name. #. skip (None, integer): As file names can be identical, skip determines the number of satisfying files name to skip before returning.\n If None is given, a list of all files relative path will be returned. :Returns: #. relativePath (string, list): The file relative path. If None, it means file was not found.\n If skip is None a list of all found files relative paths will be returned. """ if skip is None: paths = [] else: paths = None for path, info in self.walk_files_info(): _, n = os.path.split(path) if n==name: if skip is None: paths.append(path) elif skip>0: skip -= 1 else: paths = path break return paths
python
{ "resource": "" }
q262942
Repository.add_directory
validation
def add_directory(self, relativePath, info=None): """ Adds a directory in the repository and creates its attribute in the Repository with utc timestamp. It insures adding all the missing directories in the path. :Parameters: #. relativePath (string): The relative to the repository path of the directory to add in the repository. #. info (None, string, pickable object): Any random info about the folder. :Returns: #. info (dict): The directory info dict. """ path = os.path.normpath(relativePath) # create directories currentDir = self.path currentDict = self if path in ("","."): return currentDict save = False for dir in path.split(os.sep): dirPath = os.path.join(currentDir, dir) # create directory if not os.path.exists(dirPath): os.mkdir(dirPath) # create dictionary key currentDict = dict.__getitem__(currentDict, "directories") if currentDict.get(dir, None) is None: save = True currentDict[dir] = {"directories":{}, "files":{}, "timestamp":datetime.utcnow(), "id":str(uuid.uuid1()), "info": info} # INFO MUST BE SET ONLY FOR THE LAST DIRECTORY currentDict = currentDict[dir] currentDir = dirPath # save repository if save: self.save() # return currentDict return currentDict
python
{ "resource": "" }
q262943
Repository.remove_directory
validation
def remove_directory(self, relativePath, removeFromSystem=False): """ Remove directory from repository. :Parameters: #. relativePath (string): The relative to the repository path of the directory to remove from the repository. #. removeFromSystem (boolean): Whether to also remove directory and all files from the system.\n Only files saved in the repository will be removed and empty left directories. """ # get parent directory info relativePath = os.path.normpath(relativePath) parentDirInfoDict, errorMessage = self.get_parent_directory_info(relativePath) assert parentDirInfoDict is not None, errorMessage # split path path, name = os.path.split(relativePath) if dict.__getitem__(parentDirInfoDict, 'directories').get(name, None) is None: raise Exception("'%s' is not a registered directory in repository relative path '%s'"%(name, path)) # remove from system if removeFromSystem: # remove files for rp in self.walk_files_relative_path(relativePath=relativePath): ap = os.path.join(self.__path, relativePath, rp) if not os.path.isfile(ap): continue if not os.path.exists(ap): continue if os.path.isfile(ap): os.remove( ap ) # remove directories for rp in self.walk_directories_relative_path(relativePath=relativePath): ap = os.path.join(self.__path, relativePath, rp) if not os.path.isdir(ap): continue if not os.path.exists(ap): continue if not len(os.listdir(ap)): os.rmdir(ap) # pop directory from repo dict.__getitem__(parentDirInfoDict, 'directories').pop(name, None) ap = os.path.join(self.__path, relativePath) if not os.path.isdir(ap): if not len(os.listdir(ap)): os.rmdir(ap) # save repository self.save()
python
{ "resource": "" }
q262944
Repository.move_directory
validation
def move_directory(self, relativePath, relativeDestination, replace=False, verbose=True): """ Move a directory in the repository from one place to another. It insures moving all the files and subdirectories in the system. :Parameters: #. relativePath (string): The relative to the repository path of the directory to be moved. #. relativeDestination (string): The new relative to the repository path of the directory. #. replace (boolean): Whether to replace existing files with the same name in the new created directory. #. verbose (boolean): Whether to be warn and informed about any abnormalities. """ # normalize path relativePath = os.path.normpath(relativePath) relativeDestination = os.path.normpath(relativeDestination) # get files and directories filesInfo = list( self.walk_files_info(relativePath=relativePath) ) dirsPath = list( self.walk_directories_relative_path(relativePath=relativePath) ) dirInfoDict, errorMessage = self.get_directory_info(relativePath) assert dirInfoDict is not None, errorMessage # remove directory info only self.remove_directory(relativePath=relativePath, removeFromSystem=False) # create new relative path self.add_directory(relativeDestination) # move files for RP, info in filesInfo: source = os.path.join(self.__path, relativePath, RP) destination = os.path.join(self.__path, relativeDestination, RP) # add directory newDirRP, fileName = os.path.split(os.path.join(relativeDestination, RP)) dirInfoDict = self.add_directory( newDirRP ) # move file if os.path.isfile(destination): if replace: os.remove(destination) if verbose: warnings.warn("file '%s' is copied replacing existing one in destination '%s'."%(fileName, newDirRP)) else: if verbose: warnings.warn("file '%s' is not copied because the same file exists in destination '%s'."%(fileName,destination)) continue os.rename(source, destination) # set file information dict.__getitem__(dirInfoDict, "files")[fileName] = info # save repository self.save()
python
{ "resource": "" }
q262945
Repository.rename_file
validation
def rename_file(self, relativePath, name, newName, replace=False, verbose=True): """ Rename a directory in the repository. It insures renaming the file in the system. :Parameters: #. relativePath (string): The relative to the repository path of the directory where the file is located. #. name (string): The file name. #. newName (string): The file new name. #. replace (boolean): Whether to force renaming when new folder name exists in the system. It fails when new folder name is registered in repository. #. verbose (boolean): Whether to be warn and informed about any abnormalities. """ # normalize path relativePath = os.path.normpath(relativePath) if relativePath == '.': relativePath = '' dirInfoDict, errorMessage = self.get_directory_info(relativePath) assert dirInfoDict is not None, errorMessage # check directory in repository assert name in dict.__getitem__(dirInfoDict, "files"), "file '%s' is not found in repository relative path '%s'"%(name, relativePath) # get real path realPath = os.path.join(self.__path, relativePath, name) assert os.path.isfile(realPath), "file '%s' is not found in system"%realPath # assert directory new name doesn't exist in repository assert newName not in dict.__getitem__(dirInfoDict, "files"), "file '%s' already exists in repository relative path '%s'"%(newName, relativePath) # check new directory in system newRealPath = os.path.join(self.__path, relativePath, newName) if os.path.isfile( newRealPath ): if replace: os.remove(newRealPath) if verbose: warnings.warn( "file '%s' already exists found in system, it is now replaced by '%s' because 'replace' flag is True."%(newRealPath,realPath) ) else: raise Exception( "file '%s' already exists in system but not registered in repository."%newRealPath ) # rename file os.rename(realPath, newRealPath) dict.__setitem__( dict.__getitem__(dirInfoDict, "files"), newName, dict.__getitem__(dirInfoDict, "files").pop(name) ) # save repository self.save()
python
{ "resource": "" }
q262946
Repository.dump_copy
validation
def dump_copy(self, path, relativePath, name=None, description=None, replace=False, verbose=False): """ Copy an exisitng system file to the repository. attribute in the Repository with utc timestamp. :Parameters: #. path (str): The full path of the file to copy into the repository. #. relativePath (str): The relative to the repository path of the directory where the file should be dumped. If relativePath does not exist, it will be created automatically. #. name (string): The file name. If None is given, name will be split from path. #. description (None, string, pickable object): Any random description about the file. #. replace (boolean): Whether to replace any existing file with the same name if existing. #. verbose (boolean): Whether to be warn and informed about any abnormalities. """ relativePath = os.path.normpath(relativePath) if relativePath == '.': relativePath = '' if name is None: _,name = os.path.split(path) # ensure directory added self.add_directory(relativePath) # ger real path realPath = os.path.join(self.__path, relativePath) # get directory info dict dirInfoDict, errorMessage = self.get_directory_info(relativePath) assert dirInfoDict is not None, errorMessage if name in dict.__getitem__(dirInfoDict, "files"): if not replace: if verbose: warnings.warn("a file with the name '%s' is already defined in repository dictionary info. Set replace flag to True if you want to replace the existing file"%(name)) return # convert dump and pull methods to strings dump = "raise Exception(\"dump is ambiguous for copied file '$FILE_PATH' \")" pull = "raise Exception(\"pull is ambiguous for copied file '$FILE_PATH' \")" # dump file try: shutil.copyfile(path, os.path.join(realPath,name)) except Exception as e: if verbose: warnings.warn(e) return # set info klass = None # save the new file to the repository dict.__getitem__(dirInfoDict, "files")[name] = {"dump":dump, "pull":pull, "timestamp":datetime.utcnow(), "id":str(uuid.uuid1()), "class": klass, "description":description} # save repository self.save()
python
{ "resource": "" }
q262947
Repository.update_file
validation
def update_file(self, value, relativePath, name=None, description=False, klass=False, dump=False, pull=False, ACID=None, verbose=False): """ Update the value and the utc timestamp of a file that is already in the Repository.\n If file is not registered in repository, and error will be thrown.\n If file is missing in the system, it will be regenerated as dump method is called. :Parameters: #. value (object): The value of the file to update. It is any python object or a file. #. relativePath (str): The relative to the repository path of the directory where the file should be dumped. #. name (None, string): The file name. If None is given, name will be split from relativePath. #. description (False, string, pickable object): Any random description about the file. If False is given, the description info won't be updated, otherwise it will be update to what description argument value is. #. klass (False, class): The dumped object class. If False is given, the class info won't be updated, otherwise it will be update to what klass argument value is. #. dump (False, string): The new dump method. If False is given, the old one will be used. #. pull (False, string): The new pull method. If False is given, the old one will be used. #. ACID (boolean): Whether to ensure the ACID (Atomicity, Consistency, Isolation, Durability) properties of the repository upon dumping a file. This is ensured by dumping the file in a temporary path first and then moving it to the desired path. If None is given, repository ACID property will be used. #. verbose (boolean): Whether to be warn and informed about any abnormalities. """ # check ACID if ACID is None: ACID = self.__ACID assert isinstance(ACID, bool), "ACID must be boolean" # get relative path normalized relativePath = os.path.normpath(relativePath) if relativePath == '.': relativePath = '' assert name != '.pyrepinfo', "'.pyrepinfo' is not allowed as file name in main repository directory" assert name != '.pyrepstate', "'.pyrepstate' is not allowed as file name in main repository directory" assert name != '.pyreplock', "'.pyreplock' is not allowed as file name in main repository directory" if name is None: assert len(relativePath), "name must be given when relative path is given as empty string or as a simple dot '.'" relativePath,name = os.path.split(relativePath) # get file info dict fileInfoDict, errorMessage = self.get_file_info(relativePath, name) assert fileInfoDict is not None, errorMessage # get real path realPath = os.path.join(self.__path, relativePath) # check if file exists if verbose: if not os.path.isfile( os.path.join(realPath, name) ): warnings.warn("file '%s' is in repository but does not exist in the system. It is therefore being recreated."%os.path.join(realPath, name)) # convert dump and pull methods to strings if not dump: dump = fileInfoDict["dump"] if not pull: pull = fileInfoDict["pull"] # get savePath if ACID: savePath = os.path.join(tempfile.gettempdir(), name) else: savePath = os.path.join(realPath,name) # dump file try: exec( dump.replace("$FILE_PATH", str(savePath)) ) except Exception as e: message = "unable to dump the file (%s)"%e if 'pickle.dump(' in dump: message += '\nmore info: %s'%str(get_pickling_errors(value)) raise Exception( message ) # copy if ACID if ACID: try: shutil.copyfile(savePath, os.path.join(realPath,name)) except Exception as e: os.remove(savePath) if verbose: warnings.warn(e) return os.remove(savePath) # update timestamp fileInfoDict["timestamp"] = datetime.utcnow() if description is not False: fileInfoDict["description"] = description if klass is not False: assert inspect.isclass(klass), "klass must be a class definition" fileInfoDict["class"] = klass # save repository self.save()
python
{ "resource": "" }
q262948
ensure_str
validation
def ensure_str(value): """ Ensure value is string. """ if isinstance(value, six.string_types): return value else: return six.text_type(value)
python
{ "resource": "" }
q262949
TraceCollector.stats
validation
def stats(cls, traces): """Build per minute stats for each key""" data = {} stats = {} # Group traces by key and minute for trace in traces: key = trace['key'] if key not in data: data[key] = [] stats[key] = {} data[key].append(trace['total_time']) cls._traces.pop(trace['id']) for key in data: times = data[key] stats[key] = dict( count=len(times), max=max(times), min=min(times), avg=sum(times)/len(times) ) return stats
python
{ "resource": "" }
q262950
SynchronisedFilesDataSource.start
validation
def start(self): """ Monitors data kept in files in the predefined directory in a new thread. Note: Due to the underlying library, it may take a few milliseconds after this method is started for changes to start to being noticed. """ with self._status_lock: if self._running: raise RuntimeError("Already running") self._running = True # Cannot re-use Observer after stopped self._observer = Observer() self._observer.schedule(self._event_handler, self._directory_location, recursive=True) self._observer.start() # Load all in directory afterwards to ensure no undetected changes between loading all and observing self._origin_mapped_data = self._load_all_in_directory()
python
{ "resource": "" }
q262951
SynchronisedFilesDataSource.stop
validation
def stop(self): """ Stops monitoring the predefined directory. """ with self._status_lock: if self._running: assert self._observer is not None self._observer.stop() self._running = False self._origin_mapped_data = dict()
python
{ "resource": "" }
q262952
SynchronisedFilesDataSource._on_file_moved
validation
def _on_file_moved(self, event: FileSystemMovedEvent): """ Called when a file in the monitored directory has been moved. Breaks move down into a delete and a create (which it is sometimes detected as!). :param event: the file system event """ if not event.is_directory and self.is_data_file(event.src_path): delete_event = FileSystemEvent(event.src_path) delete_event.event_type = EVENT_TYPE_DELETED self._on_file_deleted(delete_event) create_event = FileSystemEvent(event.dest_path) create_event.event_type = EVENT_TYPE_CREATED self._on_file_created(create_event)
python
{ "resource": "" }
q262953
TempManager.tear_down
validation
def tear_down(self): """ Tears down all temp files and directories. """ while len(self._temp_directories) > 0: directory = self._temp_directories.pop() shutil.rmtree(directory, ignore_errors=True) while len(self._temp_files) > 0: file = self._temp_files.pop() try: os.remove(file) except OSError: pass
python
{ "resource": "" }
q262954
MutateMethods.is_not_exist_or_allow_overwrite
validation
def is_not_exist_or_allow_overwrite(self, overwrite=False): """ Test whether a file target is not exists or it exists but allow overwrite. """ if self.exists() and overwrite is False: return False else: # pragma: no cover return True
python
{ "resource": "" }
q262955
MutateMethods.copyto
validation
def copyto(self, new_abspath=None, new_dirpath=None, new_dirname=None, new_basename=None, new_fname=None, new_ext=None, overwrite=False, makedirs=False): """ Copy this file to other place. """ self.assert_exists() p = self.change( new_abspath=new_abspath, new_dirpath=new_dirpath, new_dirname=new_dirname, new_basename=new_basename, new_fname=new_fname, new_ext=new_ext, ) if p.is_not_exist_or_allow_overwrite(overwrite=overwrite): # 如果两个路径不同, 才进行copy if self.abspath != p.abspath: try: shutil.copy(self.abspath, p.abspath) except IOError as e: if makedirs: os.makedirs(p.parent.abspath) shutil.copy(self.abspath, p.abspath) else: raise e return p
python
{ "resource": "" }
q262956
create_client
validation
def create_client() -> APIClient: """ Clients a Docker client. Will raise a `ConnectionError` if the Docker daemon is not accessible. :return: the Docker client """ global _client client = _client() if client is None: # First try looking at the environment variables for specification of the daemon's location docker_environment = kwargs_from_env(assert_hostname=False) if "base_url" in docker_environment: client = _create_client(docker_environment.get("base_url"), docker_environment.get("tls")) if client is None: raise ConnectionError( "Could not connect to the Docker daemon specified by the `DOCKER_X` environment variables: %s" % docker_environment) else: logging.info("Connected to Docker daemon specified by the environment variables") else: # Let's see if the Docker daemon is accessible via the UNIX socket client = _create_client("unix://var/run/docker.sock") if client is not None: logging.info("Connected to Docker daemon running on UNIX socket") else: raise ConnectionError( "Cannot connect to Docker - is the Docker daemon running? `$DOCKER_HOST` should be set or the " "daemon should be accessible via the standard UNIX socket.") _client = weakref.ref(client) assert isinstance(client, APIClient) return client
python
{ "resource": "" }
q262957
path_required
validation
def path_required(func): """Decorate methods when repository path is required.""" @wraps(func) def wrapper(self, *args, **kwargs): if self.path is None: warnings.warn('Must load (Repository.load_repository) or initialize (Repository.create_repository) the repository first !') return return func(self, *args, **kwargs) return wrapper
python
{ "resource": "" }
q262958
Repository.__clean_before_after
validation
def __clean_before_after(self, stateBefore, stateAfter, keepNoneEmptyDirectory=True): """clean repository given before and after states""" # prepare after for faster search errors = [] afterDict = {} [afterDict.setdefault(list(aitem)[0],[]).append(aitem) for aitem in stateAfter] # loop before for bitem in reversed(stateBefore): relaPath = list(bitem)[0] basename = os.path.basename(relaPath) btype = bitem[relaPath]['type'] alist = afterDict.get(relaPath, []) aitem = [a for a in alist if a[relaPath]['type']==btype] if len(aitem)>1: errors.append("Multiple '%s' of type '%s' where found in '%s', this should never had happened. Please report issue"%(basename,btype,relaPath)) continue if not len(aitem): removeDirs = [] removeFiles = [] if btype == 'dir': if not len(relaPath): errors.append("Removing main repository directory is not allowed") continue removeDirs.append(os.path.join(self.__path,relaPath)) removeFiles.append(os.path.join(self.__path,relaPath,self.__dirInfo)) removeFiles.append(os.path.join(self.__path,relaPath,self.__dirLock)) elif btype == 'file': removeFiles.append(os.path.join(self.__path,relaPath)) removeFiles.append(os.path.join(self.__path,relaPath,self.__fileInfo%basename)) removeFiles.append(os.path.join(self.__path,relaPath,self.__fileLock%basename)) else: ### MUST VERIFY THAT ONCE pyrepobjectdir IS IMPLEMENTED removeDirs.append(os.path.join(self.__path,relaPath)) removeFiles.append(os.path.join(self.__path,relaPath,self.__fileInfo%basename)) # remove files for fpath in removeFiles: if os.path.isfile(fpath): try: os.remove(fpath) except Exception as err: errors.append("Unable to clean file '%s' (%s)"%(fpath, str(err))) # remove directories for dpath in removeDirs: if os.path.isdir(dpath): if keepNoneEmptyDirectory or not len(os.listdir(dpath)): try: shutil.rmtree(dpath) except Exception as err: errors.append("Unable to clean directory '%s' (%s)"%(fpath, str(err))) # return result and errors list return len(errors)==0, errors
python
{ "resource": "" }
q262959
Repository.get_stats
validation
def get_stats(self): """ Get repository descriptive stats :Returns: #. numberOfDirectories (integer): Number of diretories in repository #. numberOfFiles (integer): Number of files in repository """ if self.__path is None: return 0,0 nfiles = 0 ndirs = 0 for fdict in self.get_repository_state(): fdname = list(fdict)[0] if fdname == '': continue if fdict[fdname].get('pyrepfileinfo', False): nfiles += 1 elif fdict[fdname].get('pyrepdirinfo', False): ndirs += 1 else: raise Exception('Not sure what to do next. Please report issue') return ndirs,nfiles
python
{ "resource": "" }
q262960
Repository.reset
validation
def reset(self): """Reset repository instance. """ self.__path = None self.__repo = {'repository_unique_name': str(uuid.uuid1()), 'create_utctime': time.time(), 'last_update_utctime': None, 'pyrep_version': str(__version__), 'repository_information': '', 'walk_repo': []}
python
{ "resource": "" }
q262961
Repository.load_repository
validation
def load_repository(self, path, verbose=True, ntrials=3): """ Load repository from a directory path and update the current instance. First, new repository still will be loaded. If failed, then old style repository load will be tried. :Parameters: #. path (string): The path of the directory from where to load the repository from. If '.' or an empty string is passed, the current working directory will be used. #. verbose (boolean): Whether to be verbose about abnormalities #. ntrials (int): After aquiring all locks, ntrials is the maximum number of trials allowed before failing. In rare cases, when multiple processes are accessing the same repository components, different processes can alter repository components between successive lock releases of some other process. Bigger number of trials lowers the likelyhood of failure due to multiple processes same time alteration. :Returns: #. repository (pyrep.Repository): returns self repository with loaded data. """ assert isinstance(ntrials, int), "ntrials must be integer" assert ntrials>0, "ntrials must be >0" repo = None for _trial in range(ntrials): try: self.__load_repository(path=path, verbose=True) except Exception as err1: try: from .OldRepository import Repository REP = Repository(path) except Exception as err2: #traceback.print_exc() error = "Unable to load repository using neiher new style (%s) nor old style (%s)"%(err1, err2) if self.DEBUG_PRINT_FAILED_TRIALS: print("Trial %i failed in Repository.%s (%s). Set Repository.DEBUG_PRINT_FAILED_TRIALS to False to mute"%(_trial, inspect.stack()[1][3], str(error))) else: error = None repo = REP break else: error = None repo = self break # check and return assert error is None, error return repo
python
{ "resource": "" }
q262962
Repository.remove_repository
validation
def remove_repository(self, path=None, removeEmptyDirs=True): """ Remove all repository from path along with all repository tracked files. :Parameters: #. path (None, string): The path the repository to remove. #. removeEmptyDirs (boolean): Whether to remove remaining empty directories. """ assert isinstance(removeEmptyDirs, bool), "removeEmptyDirs must be boolean" if path is not None: if path != self.__path: repo = Repository() repo.load_repository(path) else: repo = self else: repo = self assert repo.path is not None, "path is not given and repository is not initialized" # remove repo files and directories for fdict in reversed(repo.get_repository_state()): relaPath = list(fdict)[0] realPath = os.path.join(repo.path, relaPath) path, name = os.path.split(realPath) if fdict[relaPath]['type'] == 'file': if os.path.isfile(realPath): os.remove(realPath) if os.path.isfile(os.path.join(repo.path,path,self.__fileInfo%name)): os.remove(os.path.join(repo.path,path,self.__fileInfo%name)) if os.path.isfile(os.path.join(repo.path,path,self.__fileLock%name)): os.remove(os.path.join(repo.path,path,self.__fileLock%name)) if os.path.isfile(os.path.join(repo.path,path,self.__fileClass%name)): os.remove(os.path.join(repo.path,path,self.__fileClass%name)) elif fdict[relaPath]['type'] == 'dir': if os.path.isfile(os.path.join(realPath,self.__dirInfo)): os.remove(os.path.join(realPath,self.__dirInfo)) if os.path.isfile(os.path.join(realPath,self.__dirLock)): os.remove(os.path.join(realPath,self.__dirLock)) if not len(os.listdir(realPath)) and removeEmptyDirs: shutil.rmtree( realPath ) # remove repo information file if os.path.isfile(os.path.join(repo.path,self.__repoFile)): os.remove(os.path.join(repo.path,self.__repoFile)) if os.path.isfile(os.path.join(repo.path,self.__repoLock)): os.remove(os.path.join(repo.path,self.__repoLock))
python
{ "resource": "" }
q262963
Repository.is_name_allowed
validation
def is_name_allowed(self, path): """ Get whether creating a file or a directory from the basenane of the given path is allowed :Parameters: #. path (str): The absolute or relative path or simply the file or directory name. :Returns: #. allowed (bool): Whether name is allowed. #. message (None, str): Reason for the name to be forbidden. """ assert isinstance(path, basestring), "given path must be a string" name = os.path.basename(path) if not len(name): return False, "empty name is not allowed" # exact match for em in [self.__repoLock,self.__repoFile,self.__dirInfo,self.__dirLock]: if name == em: return False, "name '%s' is reserved for pyrep internal usage"%em # pattern match for pm in [self.__fileInfo,self.__fileLock]:#,self.__objectDir]: if name == pm or (name.endswith(pm[3:]) and name.startswith('.')): return False, "name pattern '%s' is not allowed as result may be reserved for pyrep internal usage"%pm # name is ok return True, None
python
{ "resource": "" }
q262964
Repository.to_repo_relative_path
validation
def to_repo_relative_path(self, path, split=False): """ Given a path, return relative path to diretory :Parameters: #. path (str): Path as a string #. split (boolean): Whether to split path to its components :Returns: #. relativePath (str, list): Relative path as a string or as a list of components if split is True """ path = os.path.normpath(path) if path == '.': path = '' path = path.split(self.__path)[-1].strip(os.sep) if split: return path.split(os.sep) else: return path
python
{ "resource": "" }
q262965
Repository.get_repository_state
validation
def get_repository_state(self, relaPath=None): """ Get a list representation of repository state along with useful information. List state is ordered relativeley to directories level :Parameters: #. relaPath (None, str): relative directory path from where to start. If None all repository representation is returned. :Returns: #. state (list): List representation of the repository. List items are all dictionaries. Every dictionary has a single key which is the file or the directory name and the value is a dictionary of information including: * 'type': the type of the tracked whether it's file, dir, or objectdir * 'exists': whether file or directory actually exists on disk * 'pyrepfileinfo': In case of a file or an objectdir whether .%s_pyrepfileinfo exists * 'pyrepdirinfo': In case of a directory whether .pyrepdirinfo exists """ state = [] def _walk_dir(relaPath, dirList): dirDict = {'type':'dir', 'exists':os.path.isdir(os.path.join(self.__path,relaPath)), 'pyrepdirinfo':os.path.isfile(os.path.join(self.__path,relaPath,self.__dirInfo)), } state.append({relaPath:dirDict}) # loop files and dirobjects for fname in sorted([f for f in dirList if isinstance(f, basestring)]): relaFilePath = os.path.join(relaPath,fname) realFilePath = os.path.join(self.__path,relaFilePath) #if os.path.isdir(realFilePath) and df.startswith('.') and df.endswith(self.__objectDir[3:]): # fileDict = {'type':'objectdir', # 'exists':True, # 'pyrepfileinfo':os.path.isfile(os.path.join(self.__path,relaPath,self.__fileInfo%fname)), # } #else: # fileDict = {'type':'file', # 'exists':os.path.isfile(realFilePath), # 'pyrepfileinfo':os.path.isfile(os.path.join(self.__path,relaPath,self.__fileInfo%fname)), # } fileDict = {'type':'file', 'exists':os.path.isfile(realFilePath), 'pyrepfileinfo':os.path.isfile(os.path.join(self.__path,relaPath,self.__fileInfo%fname)), } state.append({relaFilePath:fileDict}) # loop directories #for ddict in sorted([d for d in dirList if isinstance(d, dict) and len(d)], key=lambda k: list(k)[0]): for ddict in sorted([d for d in dirList if isinstance(d, dict)], key=lambda k: list(k)[0]): dirname = list(ddict)[0] _walk_dir(relaPath=os.path.join(relaPath,dirname), dirList=ddict[dirname]) # call recursive _walk_dir if relaPath is None: _walk_dir(relaPath='', dirList=self.__repo['walk_repo']) else: assert isinstance(relaPath, basestring), "relaPath must be None or a str" relaPath = self.to_repo_relative_path(path=relaPath, split=False) spath = relaPath.split(os.sep) dirList = self.__repo['walk_repo'] while len(spath): dirname = spath.pop(0) dList = [d for d in dirList if isinstance(d, dict)] if not len(dList): dirList = None break cDict = [d for d in dList if dirname in d] if not len(cDict): dirList = None break dirList = cDict[0][dirname] if dirList is not None: _walk_dir(relaPath=relaPath, dirList=dirList) # return state list return state
python
{ "resource": "" }
q262966
Repository.get_file_info
validation
def get_file_info(self, relativePath): """ Get file information dict from the repository given its relative path. :Parameters: #. relativePath (string): The relative to the repository path of the file. :Returns: #. info (None, dictionary): The file information dictionary. If None, it means an error has occurred. #. errorMessage (string): The error message if any error occurred. """ relativePath = self.to_repo_relative_path(path=relativePath, split=False) fileName = os.path.basename(relativePath) isRepoFile,fileOnDisk, infoOnDisk, classOnDisk = self.is_repository_file(relativePath) if not isRepoFile: return None, "file is not a registered repository file." if not infoOnDisk: return None, "file is a registered repository file but info file missing" fileInfoPath = os.path.join(self.__path,os.path.dirname(relativePath),self.__fileInfo%fileName) try: with open(fileInfoPath, 'rb') as fd: info = pickle.load(fd) except Exception as err: return None, "Unable to read file info from disk (%s)"%str(err) return info, ''
python
{ "resource": "" }
q262967
Repository.is_repository_file
validation
def is_repository_file(self, relativePath): """ Check whether a given relative path is a repository file path :Parameters: #. relativePath (string): File relative path :Returns: #. isRepoFile (boolean): Whether file is a repository file. #. isFileOnDisk (boolean): Whether file is found on disk. #. isFileInfoOnDisk (boolean): Whether file info is found on disk. #. isFileClassOnDisk (boolean): Whether file class is found on disk. """ relativePath = self.to_repo_relative_path(path=relativePath, split=False) if relativePath == '': return False, False, False, False relaDir, name = os.path.split(relativePath) fileOnDisk = os.path.isfile(os.path.join(self.__path, relativePath)) infoOnDisk = os.path.isfile(os.path.join(self.__path,os.path.dirname(relativePath),self.__fileInfo%name)) classOnDisk = os.path.isfile(os.path.join(self.__path,os.path.dirname(relativePath),self.__fileClass%name)) cDir = self.__repo['walk_repo'] if len(relaDir): for dirname in relaDir.split(os.sep): dList = [d for d in cDir if isinstance(d, dict)] if not len(dList): cDir = None break cDict = [d for d in dList if dirname in d] if not len(cDict): cDir = None break cDir = cDict[0][dirname] if cDir is None: return False, fileOnDisk, infoOnDisk, classOnDisk #if name not in cDir: if str(name) not in [str(i) for i in cDir]: return False, fileOnDisk, infoOnDisk, classOnDisk # this is a repository registered file. check whether all is on disk return True, fileOnDisk, infoOnDisk, classOnDisk
python
{ "resource": "" }
q262968
Repository.create_package
validation
def create_package(self, path=None, name=None, mode=None): """ Create a tar file package of all the repository files and directories. Only files and directories that are tracked in the repository are stored in the package tar file. **N.B. On some systems packaging requires root permissions.** :Parameters: #. path (None, string): The real absolute path where to create the package. If None, it will be created in the same directory as the repository. If '.' or an empty string is passed, the current working directory will be used. #. name (None, string): The name to give to the package file If None, the package directory name will be used with the appropriate extension added. #. mode (None, string): The writing mode of the tarfile. If None, automatically the best compression mode will be chose. Available modes are ('w', 'w:', 'w:gz', 'w:bz2') """ # check mode assert mode in (None, 'w', 'w:', 'w:gz', 'w:bz2'), 'unkown archive mode %s'%str(mode) if mode is None: #mode = 'w:bz2' mode = 'w:' # get root if path is None: root = os.path.split(self.__path)[0] elif path.strip() in ('','.'): root = os.getcwd() else: root = os.path.realpath( os.path.expanduser(path) ) assert os.path.isdir(root), 'absolute path %s is not a valid directory'%path # get name if name is None: ext = mode.split(":") if len(ext) == 2: if len(ext[1]): ext = "."+ext[1] else: ext = '.tar' else: ext = '.tar' name = os.path.split(self.__path)[1]+ext # create tar file tarfilePath = os.path.join(root, name) try: tarHandler = tarfile.TarFile.open(tarfilePath, mode=mode) except Exception as e: raise Exception("Unable to create package (%s)"%e) # walk directory and create empty directories for dpath in sorted(list(self.walk_directories_path(recursive=True))): t = tarfile.TarInfo( dpath ) t.type = tarfile.DIRTYPE tarHandler.addfile(t) tarHandler.add(os.path.join(self.__path,dpath,self.__dirInfo), arcname=self.__dirInfo) # walk files and add to tar for fpath in self.walk_files_path(recursive=True): relaPath, fname = os.path.split(fpath) tarHandler.add(os.path.join(self.__path,fpath), arcname=fname) tarHandler.add(os.path.join(self.__path,relaPath,self.__fileInfo%fname), arcname=self.__fileInfo%fname) tarHandler.add(os.path.join(self.__path,relaPath,self.__fileClass%fname), arcname=self.__fileClass%fname) # save repository .pyrepinfo tarHandler.add(os.path.join(self.__path,self.__repoFile), arcname=".pyrepinfo") # close tar file tarHandler.close()
python
{ "resource": "" }
q262969
Metadata.rename
validation
def rename(self, key: Any, new_key: Any): """ Renames an item in this collection as a transaction. Will override if new key name already exists. :param key: the current name of the item :param new_key: the new name that the item should have """ if new_key == key: return required_locks = [self._key_locks[key], self._key_locks[new_key]] ordered_required_locks = sorted(required_locks, key=lambda x: id(x)) for lock in ordered_required_locks: lock.acquire() try: if key not in self._data: raise KeyError("Attribute to rename \"%s\" does not exist" % key) self._data[new_key] = self[key] del self._data[key] finally: for lock in required_locks: lock.release()
python
{ "resource": "" }
q262970
get_text_fingerprint
validation
def get_text_fingerprint(text, hash_meth, encoding="utf-8"): # pragma: no cover """ Use default hash method to return hash value of a piece of string default setting use 'utf-8' encoding. """ m = hash_meth() m.update(text.encode(encoding)) return m.hexdigest()
python
{ "resource": "" }
q262971
md5file
validation
def md5file(abspath, nbytes=0, chunk_size=DEFAULT_CHUNK_SIZE): """ Return md5 hash value of a piece of a file Estimate processing time on: :param abspath: the absolute path to the file :param nbytes: only has first N bytes of the file. if 0 or None, hash all file CPU = i7-4600U 2.10GHz - 2.70GHz, RAM = 8.00 GB 1 second can process 0.25GB data - 0.59G - 2.43 sec - 1.3G - 5.68 sec - 1.9G - 7.72 sec - 2.5G - 10.32 sec - 3.9G - 16.0 sec """ return get_file_fingerprint(abspath, hashlib.md5, nbytes=nbytes, chunk_size=chunk_size)
python
{ "resource": "" }
q262972
sha256file
validation
def sha256file(abspath, nbytes=0, chunk_size=DEFAULT_CHUNK_SIZE): """ Return sha256 hash value of a piece of a file Estimate processing time on: :param abspath: the absolute path to the file :param nbytes: only has first N bytes of the file. if 0 or None, hash all file """ return get_file_fingerprint(abspath, hashlib.sha256, nbytes=nbytes, chunk_size=chunk_size)
python
{ "resource": "" }
q262973
sha512file
validation
def sha512file(abspath, nbytes=0, chunk_size=DEFAULT_CHUNK_SIZE): """ Return sha512 hash value of a piece of a file Estimate processing time on: :param abspath: the absolute path to the file :param nbytes: only has first N bytes of the file. if 0 or None, hash all file """ return get_file_fingerprint(abspath, hashlib.sha512, nbytes=nbytes, chunk_size=chunk_size)
python
{ "resource": "" }
q262974
ToolBox.auto_complete_choices
validation
def auto_complete_choices(self, case_sensitive=False): """ A command line auto complete similar behavior. Find all item with same prefix of this one. :param case_sensitive: toggle if it is case sensitive. :return: list of :class:`pathlib_mate.pathlib2.Path`. """ self_basename = self.basename self_basename_lower = self.basename.lower() if case_sensitive: # pragma: no cover def match(basename): return basename.startswith(self_basename) else: def match(basename): return basename.lower().startswith(self_basename_lower) choices = list() if self.is_dir(): choices.append(self) for p in self.sort_by_abspath(self.select(recursive=False)): choices.append(p) else: p_parent = self.parent if p_parent.is_dir(): for p in self.sort_by_abspath(p_parent.select(recursive=False)): if match(p.basename): choices.append(p) else: # pragma: no cover raise ValueError("'%s' directory does not exist!" % p_parent) return choices
python
{ "resource": "" }
q262975
ToolBox.print_big_dir
validation
def print_big_dir(self, top_n=5): """ Print ``top_n`` big dir in this dir. """ self.assert_is_dir_and_exists() size_table = sorted( [(p, p.dirsize) for p in self.select_dir(recursive=False)], key=lambda x: x[1], reverse=True, ) for p, size in size_table[:top_n]: print("{:<9} {:<9}".format(repr_data_size(size), p.abspath))
python
{ "resource": "" }
q262976
ToolBox.print_big_file
validation
def print_big_file(self, top_n=5): """ Print ``top_n`` big file in this dir. """ self.assert_is_dir_and_exists() size_table = sorted( [(p, p.size) for p in self.select_file(recursive=True)], key=lambda x: x[1], reverse=True, ) for p, size in size_table[:top_n]: print("{:<9} {:<9}".format(repr_data_size(size), p.abspath))
python
{ "resource": "" }
q262977
ToolBox.print_big_dir_and_big_file
validation
def print_big_dir_and_big_file(self, top_n=5): """Print ``top_n`` big dir and ``top_n`` big file in each dir. """ self.assert_is_dir_and_exists() size_table1 = sorted( [(p, p.dirsize) for p in self.select_dir(recursive=False)], key=lambda x: x[1], reverse=True, ) for p1, size1 in size_table1[:top_n]: print("{:<9} {:<9}".format(repr_data_size(size1), p1.abspath)) size_table2 = sorted( [(p, p.size) for p in p1.select_file(recursive=True)], key=lambda x: x[1], reverse=True, ) for p2, size2 in size_table2[:top_n]: print(" {:<9} {:<9}".format( repr_data_size(size2), p2.abspath))
python
{ "resource": "" }
q262978
ToolBox.mirror_to
validation
def mirror_to(self, dst): # pragma: no cover """ Create a new folder having exactly same structure with this directory. However, all files are just empty file with same file name. :param dst: destination directory. The directory can't exists before you execute this. **中文文档** 创建一个目录的镜像拷贝, 与拷贝操作不同的是, 文件的副本只是在文件名上 与原件一致, 但是是空文件, 完全没有内容, 文件大小为0。 """ self.assert_is_dir_and_exists() src = self.abspath dst = os.path.abspath(dst) if os.path.exists(dst): # pragma: no cover raise Exception("distination already exist!") folder_to_create = list() file_to_create = list() for current_folder, _, file_list in os.walk(self.abspath): current_folder = current_folder.replace(src, dst) try: os.mkdir(current_folder) except: # pragma: no cover pass for basename in file_list: abspath = os.path.join(current_folder, basename) with open(abspath, "wb") as _: pass
python
{ "resource": "" }
q262979
ToolBox.execute_pyfile
validation
def execute_pyfile(self, py_exe=None): # pragma: no cover """ Execute every ``.py`` file as main script. :param py_exe: str, python command or python executable path. **中文文档** 将目录下的所有Python文件作为主脚本用当前解释器运行。 """ import subprocess self.assert_is_dir_and_exists() if py_exe is None: if six.PY2: py_exe = "python2" elif six.PY3: py_exe = "python3" for p in self.select_by_ext(".py"): subprocess.Popen('%s "%s"' % (py_exe, p.abspath))
python
{ "resource": "" }
q262980
ToolBox.trail_space
validation
def trail_space(self, filters=lambda p: p.ext == ".py"): # pragma: no cover """ Trail white space at end of each line for every ``.py`` file. **中文文档** 将目录下的所有被选择的文件中行末的空格删除。 """ self.assert_is_dir_and_exists() for p in self.select_file(filters): try: with open(p.abspath, "rb") as f: lines = list() for line in f: lines.append(line.decode("utf-8").rstrip()) with open(p.abspath, "wb") as f: f.write("\n".join(lines).encode("utf-8")) except Exception as e: # pragma: no cover raise e
python
{ "resource": "" }
q262981
ToolBox.autopep8
validation
def autopep8(self, **kwargs): # pragma: no cover """ Auto convert your python code in a directory to pep8 styled code. :param kwargs: arguments for ``autopep8.fix_code`` method. **中文文档** 将目录下的所有Python文件用pep8风格格式化。增加其可读性和规范性。 """ self.assert_is_dir_and_exists() for p in self.select_by_ext(".py"): with open(p.abspath, "rb") as f: code = f.read().decode("utf-8") formatted_code = autopep8.fix_code(code, **kwargs) with open(p.abspath, "wb") as f: f.write(formatted_code.encode("utf-8"))
python
{ "resource": "" }
q262982
AttrAccessor.size
validation
def size(self): """ File size in bytes. """ try: return self._stat.st_size except: # pragma: no cover self._stat = self.stat() return self.size
python
{ "resource": "" }
q262983
AttrAccessor.mtime
validation
def mtime(self): """ Get most recent modify time in timestamp. """ try: return self._stat.st_mtime except: # pragma: no cover self._stat = self.stat() return self.mtime
python
{ "resource": "" }
q262984
AttrAccessor.atime
validation
def atime(self): """ Get most recent access time in timestamp. """ try: return self._stat.st_atime except: # pragma: no cover self._stat = self.stat() return self.atime
python
{ "resource": "" }
q262985
AttrAccessor.ctime
validation
def ctime(self): """ Get most recent create time in timestamp. """ try: return self._stat.st_ctime except: # pragma: no cover self._stat = self.stat() return self.ctime
python
{ "resource": "" }
q262986
StrictConfigParser.unusedoptions
validation
def unusedoptions(self, sections): """Lists options that have not been used to format other values in their sections. Good for finding out if the user has misspelled any of the options. """ unused = set([]) for section in _list(sections): if not self.has_section(section): continue options = self.options(section) raw_values = [self.get(section, option, raw=True) for option in options] for option in options: formatter = "%(" + option + ")s" for raw_value in raw_values: if formatter in raw_value: break else: unused.add(option) return list(unused)
python
{ "resource": "" }
q262987
tui.keys
validation
def keys(self): """List names of options and positional arguments.""" return self.options.keys() + [p.name for p in self.positional_args]
python
{ "resource": "" }
q262988
tui._add_option
validation
def _add_option(self, option): """Add an Option object to the user interface.""" if option.name in self.options: raise ValueError('name already in use') if option.abbreviation in self.abbreviations: raise ValueError('abbreviation already in use') if option.name in [arg.name for arg in self.positional_args]: raise ValueError('name already in use by a positional argument') self.options[option.name] = option if option.abbreviation: self.abbreviations[option.abbreviation] = option self.option_order.append(option.name)
python
{ "resource": "" }
q262989
tui._add_positional_argument
validation
def _add_positional_argument(self, posarg): """Append a positional argument to the user interface. Optional positional arguments must be added after the required ones. The user interface can have at most one recurring positional argument, and if present, that argument must be the last one. """ if self.positional_args: if self.positional_args[-1].recurring: raise ValueError("recurring positional arguments must be last") if self.positional_args[-1].optional and not posarg.optional: raise ValueError("required positional arguments must precede optional ones") self.positional_args.append(posarg)
python
{ "resource": "" }
q262990
tui.read_docs
validation
def read_docs(self, docsfiles): """Read program documentation from a DocParser compatible file. docsfiles is a list of paths to potential docsfiles: parse if present. A string is taken as a list of one item. """ updates = DocParser() for docsfile in _list(docsfiles): if os.path.isfile(docsfile): updates.parse(docsfile) self.docs.update((k, _docs(updates[k], self.docvars)) for k in self.docs if updates.blocks[k]) for name, text in updates['parameters'].items(): if name in self: self.getparam(name).docs = text[0] % self.docvars elif name not in self.ignore: raise ValueError("parameter %r does not exist" % name)
python
{ "resource": "" }
q262991
tui.optionhelp
validation
def optionhelp(self, indent=0, maxindent=25, width=79): """Return user friendly help on program options.""" def makelabels(option): labels = '%*s--%s' % (indent, ' ', option.name) if option.abbreviation: labels += ', -' + option.abbreviation return labels + ': ' docs = [] helpindent = _autoindent([makelabels(o) for o in self.options.values()], indent, maxindent) for name in self.option_order: option = self.options[name] labels = makelabels(option) helpstring = "%s(%s). %s" % (option.formatname, option.strvalue, option.docs) wrapped = self._wrap_labelled(labels, helpstring, helpindent, width) docs.extend(wrapped) return '\n'.join(docs)
python
{ "resource": "" }
q262992
tui.posarghelp
validation
def posarghelp(self, indent=0, maxindent=25, width=79): """Return user friendly help on positional arguments in the program.""" docs = [] makelabel = lambda posarg: ' ' * indent + posarg.displayname + ': ' helpindent = _autoindent([makelabel(p) for p in self.positional_args], indent, maxindent) for posarg in self.positional_args: label = makelabel(posarg) text = posarg.formatname + '. ' + posarg.docs wrapped = self._wrap_labelled(label, text, helpindent, width) docs.extend(wrapped) return '\n'.join(docs)
python
{ "resource": "" }
q262993
tui.strsettings
validation
def strsettings(self, indent=0, maxindent=25, width=0): """Return user friendly help on positional arguments. indent is the number of spaces preceeding the text on each line. The indent of the documentation is dependent on the length of the longest label that is shorter than maxindent. A label longer than maxindent will be printed on its own line. width is maximum allowed page width, use self.width if 0. """ out = [] makelabel = lambda name: ' ' * indent + name + ': ' settingsindent = _autoindent([makelabel(s) for s in self.options], indent, maxindent) for name in self.option_order: option = self.options[name] label = makelabel(name) settingshelp = "%s(%s): %s" % (option.formatname, option.strvalue, option.location) wrapped = self._wrap_labelled(label, settingshelp, settingsindent, width) out.extend(wrapped) return '\n'.join(out)
python
{ "resource": "" }
q262994
tui.settingshelp
validation
def settingshelp(self, width=0): """Return a summary of program options, their values and origins. width is maximum allowed page width, use self.width if 0. """ out = [] out.append(self._wrap(self.docs['title'], width=width)) if self.docs['description']: out.append(self._wrap(self.docs['description'], indent=2, width=width)) out.append('') out.append('SETTINGS:') out.append(self.strsettings(indent=2, width=width)) out.append('') return '\n'.join(out)
python
{ "resource": "" }
q262995
TextBlockParser.parse
validation
def parse(self, file): """Parse text blocks from a file.""" if isinstance(file, basestring): file = open(file) line_number = 0 label = None block = self.untagged for line in file: line_number += 1 line = line.rstrip('\n') if self.tabsize > 0: line = line.replace('\t', ' ' * self.tabsize) if self.decommenter: line = self.decommenter.decomment(line) if line is None: continue tag = line.split(':', 1)[0].strip() # Still in the same block? if tag not in self.names: if block is None: if line and not line.isspace(): raise ParseError(file.name, line, "garbage before first block: %r" % line) continue block.addline(line) continue # Open a new block. name = self.names[tag] label = line.split(':',1)[1].strip() if name in self.labelled_classes: if not label: raise ParseError(file.name, line, "missing label for %r block" % name) block = self.blocks[name].setdefault(label, self.labelled_classes[name]()) else: if label: msg = "label %r present for unlabelled block %r" % (label, name) raise ParseError(file.name, line_number, msg) block = self.blocks[name] block.startblock()
python
{ "resource": "" }
q262996
Format.parse
validation
def parse(self, argv): """Pop, parse and return the first self.nargs items from args. if self.nargs > 1 a list of parsed values will be returned. Raise BadNumberOfArguments or BadArgument on errors. NOTE: argv may be modified in place by this method. """ if len(argv) < self.nargs: raise BadNumberOfArguments(self.nargs, len(argv)) if self.nargs == 1: return self.parse_argument(argv.pop(0)) return [self.parse_argument(argv.pop(0)) for tmp in range(self.nargs)]
python
{ "resource": "" }
q262997
Flag.parsestr
validation
def parsestr(self, argstr): """Parse arguments found in settings files. Use the values in self.true for True in settings files, or those in self.false for False, case insensitive. """ argv = shlex.split(argstr, comments=True) if len(argv) != 1: raise BadNumberOfArguments(1, len(argv)) arg = argv[0] lower = arg.lower() if lower in self.true: return True if lower in self.false: return False raise BadArgument(arg, "Allowed values are " + self.allowed + '.')
python
{ "resource": "" }
q262998
Tuple.get_separator
validation
def get_separator(self, i): """Return the separator that preceding format i, or '' for i == 0.""" return i and self.separator[min(i - 1, len(self.separator) - 1)] or ''
python
{ "resource": "" }
q262999
MixcloudOauth.authorize_url
validation
def authorize_url(self): """ Return a URL to redirect the user to for OAuth authentication. """ auth_url = OAUTH_ROOT + '/authorize' params = { 'client_id': self.client_id, 'redirect_uri': self.redirect_uri, } return "{}?{}".format(auth_url, urlencode(params))
python
{ "resource": "" }