_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q271200
AsyncSatel.close
test
def close(self): """Stop monitoring and close connection.""" _LOGGER.debug("Closing...") self.closed = True if self.connected: self._writer.close()
python
{ "resource": "" }
q271201
PostgresContentsManager.purge_db
test
def purge_db(self): """ Clear all matching our user_id. """ with self.engine.begin() as db: purge_user(db, self.user_id)
python
{ "resource": "" }
q271202
PostgresContentsManager.guess_type
test
def guess_type(self, path, allow_directory=True): """ Guess the type of a file. If allow_directory is False, don't consider the possibility that the file is a directory. """ if path.endswith('.ipynb'): return 'notebook' elif allow_directory and self.dir_exists(path): return 'directory' else: return 'file'
python
{ "resource": "" }
q271203
PostgresContentsManager.get_file_id
test
def get_file_id(self, path): """ Get the id of a file in the database. This function is specific to this implementation of ContentsManager and is not in the base class. """ with self.engine.begin() as db: try: file_id = get_file_id(db, self.user_id, path) except NoSuchFile: self.no_such_entity(path) return file_id
python
{ "resource": "" }
q271204
PostgresContentsManager._get_notebook
test
def _get_notebook(self, path, content, format): """ Get a notebook from the database. """ with self.engine.begin() as db: try: record = get_file( db, self.user_id, path, content, self.crypto.decrypt, ) except NoSuchFile: self.no_such_entity(path) return self._notebook_model_from_db(record, content)
python
{ "resource": "" }
q271205
PostgresContentsManager._notebook_model_from_db
test
def _notebook_model_from_db(self, record, content): """ Build a notebook model from database record. """ path = to_api_path(record['parent_name'] + record['name']) model = base_model(path) model['type'] = 'notebook' model['last_modified'] = model['created'] = record['created_at'] if content: content = reads_base64(record['content']) self.mark_trusted_cells(content, path) model['content'] = content model['format'] = 'json' self.validate_notebook_model(model) return model
python
{ "resource": "" }
q271206
PostgresContentsManager._get_directory
test
def _get_directory(self, path, content, format): """ Get a directory from the database. """ with self.engine.begin() as db: try: record = get_directory( db, self.user_id, path, content ) except NoSuchDirectory: if self.file_exists(path): # TODO: It's awkward/expensive to have to check this to # return a 400 instead of 404. Consider just 404ing. self.do_400("Wrong type: %s" % path) else: self.no_such_entity(path) return self._directory_model_from_db(record, content)
python
{ "resource": "" }
q271207
PostgresContentsManager._convert_file_records
test
def _convert_file_records(self, file_records): """ Apply _notebook_model_from_db or _file_model_from_db to each entry in file_records, depending on the result of `guess_type`. """ for record in file_records: type_ = self.guess_type(record['name'], allow_directory=False) if type_ == 'notebook': yield self._notebook_model_from_db(record, False) elif type_ == 'file': yield self._file_model_from_db(record, False, None) else: self.do_500("Unknown file type %s" % type_)
python
{ "resource": "" }
q271208
PostgresContentsManager._directory_model_from_db
test
def _directory_model_from_db(self, record, content): """ Build a directory model from database directory record. """ model = base_directory_model(to_api_path(record['name'])) if content: model['format'] = 'json' model['content'] = list( chain( self._convert_file_records(record['files']), ( self._directory_model_from_db(subdir, False) for subdir in record['subdirs'] ), ) ) return model
python
{ "resource": "" }
q271209
PostgresContentsManager._file_model_from_db
test
def _file_model_from_db(self, record, content, format): """ Build a file model from database record. """ # TODO: Most of this is shared with _notebook_model_from_db. path = to_api_path(record['parent_name'] + record['name']) model = base_model(path) model['type'] = 'file' model['last_modified'] = model['created'] = record['created_at'] if content: bcontent = record['content'] model['content'], model['format'], model['mimetype'] = from_b64( path, bcontent, format, ) return model
python
{ "resource": "" }
q271210
PostgresContentsManager._save_notebook
test
def _save_notebook(self, db, model, path): """ Save a notebook. Returns a validation message. """ nb_contents = from_dict(model['content']) self.check_and_sign(nb_contents, path) save_file( db, self.user_id, path, writes_base64(nb_contents), self.crypto.encrypt, self.max_file_size_bytes, ) # It's awkward that this writes to the model instead of returning. self.validate_notebook_model(model) return model.get('message')
python
{ "resource": "" }
q271211
PostgresContentsManager._save_file
test
def _save_file(self, db, model, path): """ Save a non-notebook file. """ save_file( db, self.user_id, path, to_b64(model['content'], model.get('format', None)), self.crypto.encrypt, self.max_file_size_bytes, ) return None
python
{ "resource": "" }
q271212
PostgresContentsManager.rename_file
test
def rename_file(self, old_path, path): """ Rename object from old_path to path. NOTE: This method is unfortunately named on the base class. It actually moves a file or a directory. """ with self.engine.begin() as db: try: if self.file_exists(old_path): rename_file(db, self.user_id, old_path, path) elif self.dir_exists(old_path): rename_directory(db, self.user_id, old_path, path) else: self.no_such_entity(path) except (FileExists, DirectoryExists): self.already_exists(path) except RenameRoot as e: self.do_409(str(e))
python
{ "resource": "" }
q271213
PostgresContentsManager.delete_file
test
def delete_file(self, path): """ Delete object corresponding to path. """ if self.file_exists(path): self._delete_non_directory(path) elif self.dir_exists(path): self._delete_directory(path) else: self.no_such_entity(path)
python
{ "resource": "" }
q271214
ensure_db_user
test
def ensure_db_user(db, user_id): """ Add a new user if they don't already exist. """ with ignore_unique_violation(): db.execute( users.insert().values(id=user_id), )
python
{ "resource": "" }
q271215
purge_user
test
def purge_user(db, user_id): """ Delete a user and all of their resources. """ db.execute(files.delete().where( files.c.user_id == user_id )) db.execute(directories.delete().where( directories.c.user_id == user_id )) db.execute(users.delete().where( users.c.id == user_id ))
python
{ "resource": "" }
q271216
create_directory
test
def create_directory(db, user_id, api_path): """ Create a directory. """ name = from_api_dirname(api_path) if name == '/': parent_name = null() parent_user_id = null() else: # Convert '/foo/bar/buzz/' -> '/foo/bar/' parent_name = name[:name.rindex('/', 0, -1) + 1] parent_user_id = user_id db.execute( directories.insert().values( name=name, user_id=user_id, parent_name=parent_name, parent_user_id=parent_user_id, ) )
python
{ "resource": "" }
q271217
_is_in_directory
test
def _is_in_directory(table, user_id, db_dirname): """ Return a WHERE clause that matches entries in a directory. Parameterized on table because this clause is re-used between files and directories. """ return and_( table.c.parent_name == db_dirname, table.c.user_id == user_id, )
python
{ "resource": "" }
q271218
delete_directory
test
def delete_directory(db, user_id, api_path): """ Delete a directory. """ db_dirname = from_api_dirname(api_path) try: result = db.execute( directories.delete().where( and_( directories.c.user_id == user_id, directories.c.name == db_dirname, ) ) ) except IntegrityError as error: if is_foreign_key_violation(error): raise DirectoryNotEmpty(api_path) else: raise rowcount = result.rowcount if not rowcount: raise NoSuchDirectory(api_path) return rowcount
python
{ "resource": "" }
q271219
_dir_exists
test
def _dir_exists(db, user_id, db_dirname): """ Internal implementation of dir_exists. Expects a db-style path name. """ return db.execute( select( [func.count(directories.c.name)], ).where( and_( directories.c.user_id == user_id, directories.c.name == db_dirname, ), ) ).scalar() != 0
python
{ "resource": "" }
q271220
files_in_directory
test
def files_in_directory(db, user_id, db_dirname): """ Return files in a directory. """ fields = _file_default_fields() rows = db.execute( select( fields, ).where( _is_in_directory(files, user_id, db_dirname), ).order_by( files.c.user_id, files.c.parent_name, files.c.name, files.c.created_at, ).distinct( files.c.user_id, files.c.parent_name, files.c.name, ) ) return [to_dict_no_content(fields, row) for row in rows]
python
{ "resource": "" }
q271221
directories_in_directory
test
def directories_in_directory(db, user_id, db_dirname): """ Return subdirectories of a directory. """ fields = _directory_default_fields() rows = db.execute( select( fields, ).where( _is_in_directory(directories, user_id, db_dirname), ) ) return [to_dict_no_content(fields, row) for row in rows]
python
{ "resource": "" }
q271222
_file_where
test
def _file_where(user_id, api_path): """ Return a WHERE clause matching the given API path and user_id. """ directory, name = split_api_filepath(api_path) return and_( files.c.name == name, files.c.user_id == user_id, files.c.parent_name == directory, )
python
{ "resource": "" }
q271223
_select_file
test
def _select_file(user_id, api_path, fields, limit): """ Return a SELECT statement that returns the latest N versions of a file. """ query = select(fields).where( _file_where(user_id, api_path), ).order_by( _file_creation_order(), ) if limit is not None: query = query.limit(limit) return query
python
{ "resource": "" }
q271224
_file_default_fields
test
def _file_default_fields(): """ Default fields returned by a file query. """ return [ files.c.name, files.c.created_at, files.c.parent_name, ]
python
{ "resource": "" }
q271225
_get_file
test
def _get_file(db, user_id, api_path, query_fields, decrypt_func): """ Get file data for the given user_id, path, and query_fields. The query_fields parameter specifies which database fields should be included in the returned file data. """ result = db.execute( _select_file(user_id, api_path, query_fields, limit=1), ).first() if result is None: raise NoSuchFile(api_path) if files.c.content in query_fields: return to_dict_with_content(query_fields, result, decrypt_func) else: return to_dict_no_content(query_fields, result)
python
{ "resource": "" }
q271226
get_file
test
def get_file(db, user_id, api_path, include_content, decrypt_func): """ Get file data for the given user_id and path. Include content only if include_content=True. """ query_fields = _file_default_fields() if include_content: query_fields.append(files.c.content) return _get_file(db, user_id, api_path, query_fields, decrypt_func)
python
{ "resource": "" }
q271227
get_file_id
test
def get_file_id(db, user_id, api_path): """ Get the value in the 'id' column for the file with the given user_id and path. """ return _get_file( db, user_id, api_path, [files.c.id], unused_decrypt_func, )['id']
python
{ "resource": "" }
q271228
file_exists
test
def file_exists(db, user_id, path): """ Check if a file exists. """ try: get_file( db, user_id, path, include_content=False, decrypt_func=unused_decrypt_func, ) return True except NoSuchFile: return False
python
{ "resource": "" }
q271229
rename_directory
test
def rename_directory(db, user_id, old_api_path, new_api_path): """ Rename a directory. """ old_db_path = from_api_dirname(old_api_path) new_db_path = from_api_dirname(new_api_path) if old_db_path == '/': raise RenameRoot('Renaming the root directory is not permitted.') # Overwriting existing directories is disallowed. if _dir_exists(db, user_id, new_db_path): raise DirectoryExists(new_api_path) # Set this foreign key constraint to deferred so it's not violated # when we run the first statement to update the name of the directory. db.execute('SET CONSTRAINTS ' 'pgcontents.directories_parent_user_id_fkey DEFERRED') # Update name column for the directory that's being renamed db.execute( directories.update().where( and_( directories.c.user_id == user_id, directories.c.name == old_db_path, ) ).values( name=new_db_path, ) ) # Update the name and parent_name of any descendant directories. Do # this in a single statement so the non-deferrable check constraint # is satisfied. db.execute( directories.update().where( and_( directories.c.user_id == user_id, directories.c.name.startswith(old_db_path), directories.c.parent_name.startswith(old_db_path), ) ).values( name=func.concat( new_db_path, func.right(directories.c.name, -func.length(old_db_path)) ), parent_name=func.concat( new_db_path, func.right( directories.c.parent_name, -func.length(old_db_path) ) ), ) )
python
{ "resource": "" }
q271230
save_file
test
def save_file(db, user_id, path, content, encrypt_func, max_size_bytes): """ Save a file. TODO: Update-then-insert is probably cheaper than insert-then-update. """ content = preprocess_incoming_content( content, encrypt_func, max_size_bytes, ) directory, name = split_api_filepath(path) with db.begin_nested() as savepoint: try: res = db.execute( files.insert().values( name=name, user_id=user_id, parent_name=directory, content=content, ) ) except IntegrityError as error: # The file already exists, so overwrite its content with the newer # version. if is_unique_violation(error): savepoint.rollback() res = db.execute( files.update().where( _file_where(user_id, path), ).values( content=content, created_at=func.now(), ) ) else: # Unknown error. Reraise raise return res
python
{ "resource": "" }
q271231
generate_files
test
def generate_files(engine, crypto_factory, min_dt=None, max_dt=None, logger=None): """ Create a generator of decrypted files. Files are yielded in ascending order of their timestamp. This function selects all current notebooks (optionally, falling within a datetime range), decrypts them, and returns a generator yielding dicts, each containing a decoded notebook and metadata including the user, filepath, and timestamp. Parameters ---------- engine : SQLAlchemy.engine Engine encapsulating database connections. crypto_factory : function[str -> Any] A function from user_id to an object providing the interface required by PostgresContentsManager.crypto. Results of this will be used for decryption of the selected notebooks. min_dt : datetime.datetime, optional Minimum last modified datetime at which a file will be included. max_dt : datetime.datetime, optional Last modified datetime at and after which a file will be excluded. logger : Logger, optional """ return _generate_notebooks(files, files.c.created_at, engine, crypto_factory, min_dt, max_dt, logger)
python
{ "resource": "" }
q271232
purge_remote_checkpoints
test
def purge_remote_checkpoints(db, user_id): """ Delete all database records for the given user_id. """ db.execute( remote_checkpoints.delete().where( remote_checkpoints.c.user_id == user_id, ) )
python
{ "resource": "" }
q271233
generate_checkpoints
test
def generate_checkpoints(engine, crypto_factory, min_dt=None, max_dt=None, logger=None): """ Create a generator of decrypted remote checkpoints. Checkpoints are yielded in ascending order of their timestamp. This function selects all notebook checkpoints (optionally, falling within a datetime range), decrypts them, and returns a generator yielding dicts, each containing a decoded notebook and metadata including the user, filepath, and timestamp. Parameters ---------- engine : SQLAlchemy.engine Engine encapsulating database connections. crypto_factory : function[str -> Any] A function from user_id to an object providing the interface required by PostgresContentsManager.crypto. Results of this will be used for decryption of the selected notebooks. min_dt : datetime.datetime, optional Minimum last modified datetime at which a file will be included. max_dt : datetime.datetime, optional Last modified datetime at and after which a file will be excluded. logger : Logger, optional """ return _generate_notebooks(remote_checkpoints, remote_checkpoints.c.last_modified, engine, crypto_factory, min_dt, max_dt, logger)
python
{ "resource": "" }
q271234
_generate_notebooks
test
def _generate_notebooks(table, timestamp_column, engine, crypto_factory, min_dt, max_dt, logger): """ See docstrings for `generate_files` and `generate_checkpoints`. Parameters ---------- table : SQLAlchemy.Table Table to fetch notebooks from, `files` or `remote_checkpoints. timestamp_column : SQLAlchemy.Column `table`'s column storing timestamps, `created_at` or `last_modified`. engine : SQLAlchemy.engine Engine encapsulating database connections. crypto_factory : function[str -> Any] A function from user_id to an object providing the interface required by PostgresContentsManager.crypto. Results of this will be used for decryption of the selected notebooks. min_dt : datetime.datetime Minimum last modified datetime at which a file will be included. max_dt : datetime.datetime Last modified datetime at and after which a file will be excluded. logger : Logger """ where_conds = [] if min_dt is not None: where_conds.append(timestamp_column >= min_dt) if max_dt is not None: where_conds.append(timestamp_column < max_dt) if table is files: # Only select files that are notebooks where_conds.append(files.c.name.like(u'%.ipynb')) # Query for notebooks satisfying the conditions. query = select([table]).order_by(timestamp_column) for cond in where_conds: query = query.where(cond) result = engine.execute(query) # Decrypt each notebook and yield the result. for nb_row in result: try: # The decrypt function depends on the user user_id = nb_row['user_id'] decrypt_func = crypto_factory(user_id).decrypt nb_dict = to_dict_with_content(table.c, nb_row, decrypt_func) if table is files: # Correct for files schema differing somewhat from checkpoints. nb_dict['path'] = nb_dict['parent_name'] + nb_dict['name'] nb_dict['last_modified'] = nb_dict['created_at'] # For 'content', we use `reads_base64` directly. If the db content # format is changed from base64, the decoding should be changed # here as well. yield { 'id': nb_dict['id'], 'user_id': user_id, 'path': to_api_path(nb_dict['path']), 'last_modified': nb_dict['last_modified'], 'content': reads_base64(nb_dict['content']), } except CorruptedFile: if logger is not None: logger.warning( 'Corrupted file with id %d in table %s.' % (nb_row['id'], table.name) )
python
{ "resource": "" }
q271235
reencrypt_row_content
test
def reencrypt_row_content(db, table, row_id, decrypt_func, encrypt_func, logger): """ Re-encrypt a row from ``table`` with ``id`` of ``row_id``. """ q = (select([table.c.content]) .with_for_update() .where(table.c.id == row_id)) [(content,)] = db.execute(q) logger.info("Begin encrypting %s row %s.", table.name, row_id) db.execute( table .update() .where(table.c.id == row_id) .values(content=encrypt_func(decrypt_func(content))) ) logger.info("Done encrypting %s row %s.", table.name, row_id)
python
{ "resource": "" }
q271236
reencrypt_user_content
test
def reencrypt_user_content(engine, user_id, old_decrypt_func, new_encrypt_func, logger): """ Re-encrypt all of the files and checkpoints for a single user. """ logger.info("Begin re-encryption for user %s", user_id) with engine.begin() as db: # NOTE: Doing both of these operations in one transaction depends for # correctness on the fact that the creation of new checkpoints always # involves writing new data into the database from Python, rather than # simply copying data inside the DB. # If we change checkpoint creation so that it does an in-database copy, # then we need to split this transaction to ensure that # file-reencryption is complete before checkpoint-reencryption starts. # If that doesn't happen, it will be possible for a user to create a # new checkpoint in a transaction that hasn't seen the completed # file-reencryption process, but we might not see that checkpoint here, # which means that we would never update the content of that checkpoint # to the new encryption key. logger.info("Re-encrypting files for %s", user_id) for (file_id,) in select_file_ids(db, user_id): reencrypt_row_content( db, files, file_id, old_decrypt_func, new_encrypt_func, logger, ) logger.info("Re-encrypting checkpoints for %s", user_id) for (cp_id,) in select_remote_checkpoint_ids(db, user_id): reencrypt_row_content( db, remote_checkpoints, cp_id, old_decrypt_func, new_encrypt_func, logger, ) logger.info("Finished re-encryption for user %s", user_id)
python
{ "resource": "" }
q271237
derive_single_fernet_key
test
def derive_single_fernet_key(password, user_id): """ Convert a secret key and a user ID into an encryption key to use with a ``cryptography.fernet.Fernet``. Taken from https://cryptography.io/en/latest/fernet/#using-passwords-with-fernet Parameters ---------- password : unicode ascii-encodable key to derive user_id : unicode ascii-encodable user_id to use as salt """ password = ascii_unicode_to_bytes(password) user_id = ascii_unicode_to_bytes(user_id) kdf = PBKDF2HMAC( algorithm=hashes.SHA256(), length=32, salt=user_id, iterations=100000, backend=default_backend(), ) return base64.urlsafe_b64encode(kdf.derive(password))
python
{ "resource": "" }
q271238
derive_fallback_fernet_keys
test
def derive_fallback_fernet_keys(passwords, user_id): """ Derive a list of per-user Fernet keys from a list of master keys and a username. If a None is encountered in ``passwords``, it is forwarded. Parameters ---------- passwords : list[unicode] List of ascii-encodable keys to derive. user_id : unicode or None ascii-encodable user_id to use as salt """ # Normally I wouldn't advocate for these kinds of assertions, but we really # really really don't want to mess up deriving encryption keys. assert isinstance(passwords, (list, tuple)), \ "Expected list or tuple of keys, got %s." % type(passwords) def derive_single_allow_none(k): if k is None: return None return derive_single_fernet_key(k, user_id).decode('ascii') return list(map(derive_single_allow_none, passwords))
python
{ "resource": "" }
q271239
single_password_crypto_factory
test
def single_password_crypto_factory(password): """ Create and return a function suitable for passing as a crypto_factory to ``pgcontents.utils.sync.reencrypt_all_users`` The factory here returns a ``FernetEncryption`` that uses a key derived from ``password`` and salted with the supplied user_id. """ @memoize_single_arg def factory(user_id): return FernetEncryption( Fernet(derive_single_fernet_key(password, user_id)) ) return factory
python
{ "resource": "" }
q271240
memoize_single_arg
test
def memoize_single_arg(f): """ Decorator memoizing a single-argument function """ memo = {} @wraps(f) def memoized_f(arg): try: return memo[arg] except KeyError: result = memo[arg] = f(arg) return result return memoized_f
python
{ "resource": "" }
q271241
_get_name
test
def _get_name(column_like): """ Get the name from a column-like SQLAlchemy expression. Works for Columns and Cast expressions. """ if isinstance(column_like, Column): return column_like.name elif isinstance(column_like, Cast): return column_like.clause.name
python
{ "resource": "" }
q271242
to_dict_no_content
test
def to_dict_no_content(fields, row): """ Convert a SQLAlchemy row that does not contain a 'content' field to a dict. If row is None, return None. Raises AssertionError if there is a field named 'content' in ``fields``. """ assert(len(fields) == len(row)) field_names = list(map(_get_name, fields)) assert 'content' not in field_names, "Unexpected content field." return dict(zip(field_names, row))
python
{ "resource": "" }
q271243
to_dict_with_content
test
def to_dict_with_content(fields, row, decrypt_func): """ Convert a SQLAlchemy row that contains a 'content' field to a dict. ``decrypt_func`` will be applied to the ``content`` field of the row. If row is None, return None. Raises AssertionError if there is no field named 'content' in ``fields``. """ assert(len(fields) == len(row)) field_names = list(map(_get_name, fields)) assert 'content' in field_names, "Missing content field." result = dict(zip(field_names, row)) result['content'] = decrypt_func(result['content']) return result
python
{ "resource": "" }
q271244
PostgresCheckpoints.create_notebook_checkpoint
test
def create_notebook_checkpoint(self, nb, path): """Create a checkpoint of the current state of a notebook Returns a checkpoint_id for the new checkpoint. """ b64_content = writes_base64(nb) with self.engine.begin() as db: return save_remote_checkpoint( db, self.user_id, path, b64_content, self.crypto.encrypt, self.max_file_size_bytes, )
python
{ "resource": "" }
q271245
PostgresCheckpoints.create_file_checkpoint
test
def create_file_checkpoint(self, content, format, path): """Create a checkpoint of the current state of a file Returns a checkpoint_id for the new checkpoint. """ try: b64_content = to_b64(content, format) except ValueError as e: self.do_400(str(e)) with self.engine.begin() as db: return save_remote_checkpoint( db, self.user_id, path, b64_content, self.crypto.encrypt, self.max_file_size_bytes, )
python
{ "resource": "" }
q271246
PostgresCheckpoints.delete_checkpoint
test
def delete_checkpoint(self, checkpoint_id, path): """delete a checkpoint for a file""" with self.engine.begin() as db: return delete_single_remote_checkpoint( db, self.user_id, path, checkpoint_id, )
python
{ "resource": "" }
q271247
PostgresCheckpoints.get_checkpoint_content
test
def get_checkpoint_content(self, checkpoint_id, path): """Get the content of a checkpoint.""" with self.engine.begin() as db: return get_remote_checkpoint( db, self.user_id, path, checkpoint_id, self.crypto.decrypt, )['content']
python
{ "resource": "" }
q271248
PostgresCheckpoints.list_checkpoints
test
def list_checkpoints(self, path): """Return a list of checkpoints for a given file""" with self.engine.begin() as db: return list_remote_checkpoints(db, self.user_id, path)
python
{ "resource": "" }
q271249
PostgresCheckpoints.rename_all_checkpoints
test
def rename_all_checkpoints(self, old_path, new_path): """Rename all checkpoints for old_path to new_path.""" with self.engine.begin() as db: return move_remote_checkpoints( db, self.user_id, old_path, new_path, )
python
{ "resource": "" }
q271250
PostgresCheckpoints.delete_all_checkpoints
test
def delete_all_checkpoints(self, path): """Delete all checkpoints for the given path.""" with self.engine.begin() as db: delete_remote_checkpoints(db, self.user_id, path)
python
{ "resource": "" }
q271251
PostgresCheckpoints.purge_db
test
def purge_db(self): """ Purge all database records for the current user. """ with self.engine.begin() as db: purge_remote_checkpoints(db, self.user_id)
python
{ "resource": "" }
q271252
_resolve_path
test
def _resolve_path(path, manager_dict): """ Resolve a path based on a dictionary of manager prefixes. Returns a triple of (prefix, manager, manager_relative_path). """ path = normalize_api_path(path) parts = path.split('/') # Try to find a sub-manager for the first subdirectory. mgr = manager_dict.get(parts[0]) if mgr is not None: return parts[0], mgr, '/'.join(parts[1:]) # Try to find use the root manager, if one was supplied. mgr = manager_dict.get('') if mgr is not None: return '', mgr, path raise HTTPError( 404, "Couldn't resolve path [{path}] and " "no root manager supplied!".format(path=path) )
python
{ "resource": "" }
q271253
_apply_prefix
test
def _apply_prefix(prefix, model): """ Prefix all path entries in model with the given prefix. """ if not isinstance(model, dict): raise TypeError("Expected dict for model, got %s" % type(model)) # We get unwanted leading/trailing slashes if prefix or model['path'] are # '', both of which are legal values. model['path'] = '/'.join((prefix, model['path'])).strip('/') if model['type'] in ('notebook', 'file'): return model if model['type'] != 'directory': raise ValueError("Unknown model type %s." % type(model)) content = model.get('content', None) if content is not None: for sub_model in content: _apply_prefix(prefix, sub_model) return model
python
{ "resource": "" }
q271254
path_dispatch1
test
def path_dispatch1(mname, returns_model): """ Decorator for methods that accept path as a first argument. """ def _wrapper(self, *args, **kwargs): path, args = _get_arg('path', args, kwargs) prefix, mgr, mgr_path = _resolve_path(path, self.managers) result = getattr(mgr, mname)(mgr_path, *args, **kwargs) if returns_model and prefix: return _apply_prefix(prefix, result) else: return result return _wrapper
python
{ "resource": "" }
q271255
path_dispatch_kwarg
test
def path_dispatch_kwarg(mname, path_default, returns_model): """ Parameterized decorator for methods that accept path as a second argument. """ def _wrapper(self, path=path_default, **kwargs): prefix, mgr, mgr_path = _resolve_path(path, self.managers) result = getattr(mgr, mname)(path=mgr_path, **kwargs) if returns_model and prefix: return _apply_prefix(prefix, result) else: return result return _wrapper
python
{ "resource": "" }
q271256
path_dispatch_old_new
test
def path_dispatch_old_new(mname, returns_model): """ Decorator for methods accepting old_path and new_path. """ def _wrapper(self, old_path, new_path, *args, **kwargs): old_prefix, old_mgr, old_mgr_path = _resolve_path( old_path, self.managers ) new_prefix, new_mgr, new_mgr_path = _resolve_path( new_path, self.managers, ) if old_mgr is not new_mgr: # TODO: Consider supporting this via get+delete+save. raise HTTPError( 400, "Can't move files between backends ({old} -> {new})".format( old=old_path, new=new_path, ) ) assert new_prefix == old_prefix result = getattr(new_mgr, mname)( old_mgr_path, new_mgr_path, *args, **kwargs ) if returns_model and new_prefix: return _apply_prefix(new_prefix, result) else: return result return _wrapper
python
{ "resource": "" }
q271257
HybridContentsManager._managers_changed
test
def _managers_changed(self, name, old, new): """ Strip slashes from directories before updating. """ for key in new: if '/' in key: raise ValueError( "Expected directory names w/o slashes. Got [%s]" % key ) self.managers = {k.strip('/'): v for k, v in new.items()}
python
{ "resource": "" }
q271258
HybridContentsManager.get
test
def get(self, path, content=True, type=None, format=None): """ Special case handling for listing root dir. """ path = normalize_api_path(path) if path: return self.__get(path, content=content, type=type, format=format) if not content: return base_directory_model('') extra_content = self._extra_root_dirs() rm = self.root_manager if rm is None: root_model = base_directory_model('') root_model.update( format='json', content=extra_content, ) else: root_model = rm.get( path, content=content, type=type, format=format, ) # Append the extra directories. root_model['content'].extend(extra_content) return root_model
python
{ "resource": "" }
q271259
normalize_api_path
test
def normalize_api_path(api_path): """ Resolve paths with '..' to normalized paths, raising an error if the final result is outside root. """ normalized = posixpath.normpath(api_path.strip('/')) if normalized == '.': normalized = '' elif normalized.startswith('..'): raise PathOutsideRoot(normalized) return normalized
python
{ "resource": "" }
q271260
split_api_filepath
test
def split_api_filepath(path): """ Split an API file path into directory and name. """ parts = path.rsplit('/', 1) if len(parts) == 1: name = parts[0] dirname = '/' else: name = parts[1] dirname = parts[0] + '/' return from_api_dirname(dirname), name
python
{ "resource": "" }
q271261
writes_base64
test
def writes_base64(nb, version=NBFORMAT_VERSION): """ Write a notebook as base64. """ return b64encode(writes(nb, version=version).encode('utf-8'))
python
{ "resource": "" }
q271262
reads_base64
test
def reads_base64(nb, as_version=NBFORMAT_VERSION): """ Read a notebook from base64. """ try: return reads(b64decode(nb).decode('utf-8'), as_version=as_version) except Exception as e: raise CorruptedFile(e)
python
{ "resource": "" }
q271263
_decode_unknown_from_base64
test
def _decode_unknown_from_base64(path, bcontent): """ Decode base64 data of unknown format. Attempts to interpret data as utf-8, falling back to ascii on failure. """ content = b64decode(bcontent) try: return (content.decode('utf-8'), 'text') except UnicodeError: pass return bcontent.decode('ascii'), 'base64'
python
{ "resource": "" }
q271264
from_b64
test
def from_b64(path, bcontent, format): """ Decode base64 content for a file. format: If 'text', the contents will be decoded as UTF-8. If 'base64', do nothing. If not specified, try to decode as UTF-8, and fall back to base64 Returns a triple of decoded_content, format, and mimetype. """ decoders = { 'base64': lambda path, bcontent: (bcontent.decode('ascii'), 'base64'), 'text': _decode_text_from_base64, None: _decode_unknown_from_base64, } try: content, real_format = decoders[format](path, bcontent) except HTTPError: # Pass through HTTPErrors, since we intend for them to bubble all the # way back to the API layer. raise except Exception as e: # Anything else should be wrapped in a CorruptedFile, since it likely # indicates misconfiguration of encryption. raise CorruptedFile(e) default_mimes = { 'text': 'text/plain', 'base64': 'application/octet-stream', } mimetype = mimetypes.guess_type(path)[0] or default_mimes[real_format] return content, real_format, mimetype
python
{ "resource": "" }
q271265
prefix_dirs
test
def prefix_dirs(path): """ Return an iterable of all prefix directories of path, descending from root. """ _dirname = posixpath.dirname path = path.strip('/') out = [] while path != '': path = _dirname(path) out.append(path) return reversed(out)
python
{ "resource": "" }
q271266
outside_root_to_404
test
def outside_root_to_404(fn): """ Decorator for converting PathOutsideRoot errors to 404s. """ @wraps(fn) def wrapped(*args, **kwargs): try: return fn(*args, **kwargs) except PathOutsideRoot as e: raise HTTPError(404, "Path outside root: [%s]" % e.args[0]) return wrapped
python
{ "resource": "" }
q271267
create_user
test
def create_user(db_url, user): """ Create a user. """ PostgresCheckpoints( db_url=db_url, user_id=user, create_user_on_startup=True, )
python
{ "resource": "" }
q271268
_separate_dirs_files
test
def _separate_dirs_files(models): """ Split an iterable of models into a list of file paths and a list of directory paths. """ dirs = [] files = [] for model in models: if model['type'] == 'directory': dirs.append(model['path']) else: files.append(model['path']) return dirs, files
python
{ "resource": "" }
q271269
walk_dirs
test
def walk_dirs(mgr, dirs): """ Recursive helper for walk. """ for directory in dirs: children = mgr.get( directory, content=True, type='directory', )['content'] dirs, files = map(sorted, _separate_dirs_files(children)) yield directory, dirs, files if dirs: for entry in walk_dirs(mgr, dirs): yield entry
python
{ "resource": "" }
q271270
walk_files
test
def walk_files(mgr): """ Iterate over all files visible to ``mgr``. """ for dir_, subdirs, files in walk_files(mgr): for file_ in files: yield file_
python
{ "resource": "" }
q271271
walk_files_with_content
test
def walk_files_with_content(mgr): """ Iterate over the contents of all files visible to ``mgr``. """ for _, _, files in walk(mgr): for f in files: yield mgr.get(f, content=True)
python
{ "resource": "" }
q271272
reencrypt_all_users
test
def reencrypt_all_users(engine, old_crypto_factory, new_crypto_factory, logger): """ Re-encrypt data for all users. This function is idempotent, meaning that it should be possible to apply the same re-encryption process multiple times without having any effect on the database. Idempotency is achieved by first attempting to decrypt with the old crypto and falling back to the new crypto on failure. An important consequence of this strategy is that **decrypting** a database is not supported with this function, because ``NoEncryption.decrypt`` always succeeds. To decrypt an already-encrypted database, use ``unencrypt_all_users`` instead. It is, however, possible to perform an initial encryption of a database by passing a function returning a ``NoEncryption`` as ``old_crypto_factory``. Parameters ---------- engine : SQLAlchemy.engine Engine encapsulating database connections. old_crypto_factory : function[str -> Any] A function from user_id to an object providing the interface required by PostgresContentsManager.crypto. Results of this will be used for decryption of existing database content. new_crypto_factory : function[str -> Any] A function from user_id to an object providing the interface required by PostgresContentsManager.crypto. Results of this will be used for re-encryption of database content. This **must not** return instances of ``NoEncryption``. Use ``unencrypt_all_users`` if you want to unencrypt a database. logger : logging.Logger, optional A logger to user during re-encryption. See Also -------- reencrypt_user unencrypt_all_users """ logger.info("Beginning re-encryption for all users.") for user_id in all_user_ids(engine): reencrypt_single_user( engine, user_id, old_crypto=old_crypto_factory(user_id), new_crypto=new_crypto_factory(user_id), logger=logger, ) logger.info("Finished re-encryption for all users.")
python
{ "resource": "" }
q271273
reencrypt_single_user
test
def reencrypt_single_user(engine, user_id, old_crypto, new_crypto, logger): """ Re-encrypt all files and checkpoints for a single user. """ # Use FallbackCrypto so that we're re-entrant if we halt partway through. crypto = FallbackCrypto([new_crypto, old_crypto]) reencrypt_user_content( engine=engine, user_id=user_id, old_decrypt_func=crypto.decrypt, new_encrypt_func=crypto.encrypt, logger=logger, )
python
{ "resource": "" }
q271274
unencrypt_all_users
test
def unencrypt_all_users(engine, old_crypto_factory, logger): """ Unencrypt data for all users. Parameters ---------- engine : SQLAlchemy.engine Engine encapsulating database connections. old_crypto_factory : function[str -> Any] A function from user_id to an object providing the interface required by PostgresContentsManager.crypto. Results of this will be used for decryption of existing database content. logger : logging.Logger, optional A logger to user during re-encryption. """ logger.info("Beginning re-encryption for all users.") for user_id in all_user_ids(engine): unencrypt_single_user( engine=engine, user_id=user_id, old_crypto=old_crypto_factory(user_id), logger=logger, ) logger.info("Finished re-encryption for all users.")
python
{ "resource": "" }
q271275
unencrypt_single_user
test
def unencrypt_single_user(engine, user_id, old_crypto, logger): """ Unencrypt all files and checkpoints for a single user. """ reencrypt_user_content( engine=engine, user_id=user_id, old_decrypt_func=old_crypto.decrypt, new_encrypt_func=lambda s: s, logger=logger, )
python
{ "resource": "" }
q271276
temp_alembic_ini
test
def temp_alembic_ini(alembic_dir_location, sqlalchemy_url): """ Temporarily write an alembic.ini file for use with alembic migration scripts. """ with TemporaryDirectory() as tempdir: alembic_ini_filename = join(tempdir, 'temp_alembic.ini') with open(alembic_ini_filename, 'w') as f: f.write( ALEMBIC_INI_TEMPLATE.format( alembic_dir_location=alembic_dir_location, sqlalchemy_url=sqlalchemy_url, ) ) yield alembic_ini_filename
python
{ "resource": "" }
q271277
upgrade
test
def upgrade(db_url, revision): """ Upgrade the given database to revision. """ with temp_alembic_ini(ALEMBIC_DIR_LOCATION, db_url) as alembic_ini: subprocess.check_call( ['alembic', '-c', alembic_ini, 'upgrade', revision] )
python
{ "resource": "" }
q271278
ContentSerializer.sanitize_block
test
def sanitize_block(self, block): """Santizes the data for the given block. If block has a matching embed serializer, use the `to_internal_value` method.""" embed_type = block.get('type', None) data = block.get('data', {}) serializer = self.serializers.get(embed_type, None) if serializer is None: return block block['data'] = serializer.to_internal_value(data) return block
python
{ "resource": "" }
q271279
ContentSerializer.queue_instance
test
def queue_instance(self, embed_type, data): """Queue an instance to be fetched from the database.""" serializer = self.serializers.get(embed_type, None) if serializer is None: return instance_id = serializer.get_id(data) if embed_type not in self.ids: self.ids[embed_type] = [] self.ids[embed_type].append(instance_id)
python
{ "resource": "" }
q271280
ContentSerializer.load_instances
test
def load_instances(self, embed_type, ids): """Fetch all queued instances of type `embed_type`, save results to `self.instances`""" serializer = self.serializers.get(embed_type, None) if serializer is None: return self.instances[embed_type] = serializer.fetch(ids)
python
{ "resource": "" }
q271281
ContentSerializer.insert_instance
test
def insert_instance(self, block): """Insert a fetched instance into embed block.""" embed_type = block.get('type', None) data = block.get('data', {}) serializer = self.serializers.get(embed_type, None) if serializer is None: return block try: instance_id = serializer.get_id(data) instance = self.instances[embed_type][instance_id] data[embed_type] = serializer.serialize(instance) except: data[embed_type] = None block['data'] = data return block
python
{ "resource": "" }
q271282
ContentSerializer.load_data
test
def load_data(self): """Load data in bulk for each embed block.""" for embed_type in self.ids.keys(): self.load_instances(embed_type, self.ids[embed_type])
python
{ "resource": "" }
q271283
ZoneSerializer.validate
test
def validate(self, data): """Perform validation of the widget data""" from dispatch.theme import ThemeManager errors = {} if data.get('widget') is not None: try: widget = ThemeManager.Widgets.get(data['widget']) except WidgetNotFound as e: errors['widget'] = str(e) else: for field in widget.fields: field_data = data['data'].get(field.name) if field_data is not None: try: field.validate(field_data) except InvalidField as e: errors[field.name] = str(e) elif field.required: errors[field.name] = '%s is required' % field.label if errors: raise ValidationError(errors) return data
python
{ "resource": "" }
q271284
admin
test
def admin(request): """Render HTML entry point for manager app.""" context = { 'api_url': settings.API_URL, 'app_js_bundle': 'manager-%s.js' % dispatch.__version__, 'app_css_bundle': 'manager-%s.css' % dispatch.__version__ } return render_to_response('manager/index.html', context)
python
{ "resource": "" }
q271285
Template.to_json
test
def to_json(self): """Return JSON representation for this template""" result = {} for field in self.fields: result[field.name] = field.to_json(self.data.get(field.name)) return result
python
{ "resource": "" }
q271286
DispatchModelSerializer.hide_authenticated_fields
test
def hide_authenticated_fields(self): """Hides authenticated_fields if request context is missing or user is not authenticated""" authenticated_fields = getattr(self.Meta, 'authenticated_fields', []) if not self.is_authenticated(): for field in authenticated_fields: self.fields.pop(field)
python
{ "resource": "" }
q271287
DispatchModelSerializer.exclude_fields
test
def exclude_fields(self): """Excludes fields that are included in the queryparameters""" request = self.context.get('request') if request: exclude = request.query_params.get('exclude', None) if exclude is None: return excluded_fields = exclude.split(',') for field in excluded_fields: self.fields.pop(field)
python
{ "resource": "" }
q271288
PublishableManager.get
test
def get(self, *args, **kwargs): """Get the latest article with the given primary key.""" if 'pk' in kwargs: kwargs['parent'] = kwargs['pk'] kwargs['head'] = True del kwargs['pk'] """If the url requested includes the querystring parameters 'version' and 'preview_id', get the article with the specified version and preview_id. Otherwise, get the published version of the article. """ if 'request' in kwargs: request = kwargs['request'] version = request.GET.get('version', None) preview_id = request.GET.get('preview_id', None) if (version is not None) and (preview_id is not None): kwargs['revision_id'] = version kwargs['preview_id'] = preview_id del kwargs['is_published'] del kwargs['request'] return super(PublishableManager, self).get(*args, **kwargs)
python
{ "resource": "" }
q271289
ArticleViewSet.get_queryset
test
def get_queryset(self): """Optionally restricts the returned articles by filtering against a `topic` query parameter in the URL.""" # Get base queryset from DispatchPublishableMixin queryset = self.get_publishable_queryset() # Optimize queries by prefetching related data queryset = queryset \ .select_related('featured_image', 'featured_video', 'topic', 'section', 'subsection') \ .prefetch_related( 'tags', 'featured_image__image__authors', 'authors' ) queryset = queryset.order_by('-updated_at') q = self.request.query_params.get('q', None) section = self.request.query_params.get('section', None) tags = self.request.query_params.getlist('tags', None) author = self.request.query_params.get('author', None) if q is not None: queryset = queryset.filter(headline__icontains=q) if section is not None: queryset = queryset.filter(section_id=section) if tags is not None: for tag in tags: queryset = queryset.filter(tags__id=tag) if author is not None: queryset = queryset.filter(authors__person_id=author) return queryset
python
{ "resource": "" }
q271290
PageViewSet.get_queryset
test
def get_queryset(self): """Only display unpublished content to authenticated users, filter by query parameter if present.""" # Get base queryset from DispatchPublishableMixin queryset = self.get_publishable_queryset() queryset = queryset.order_by('-updated_at') # Optionally filter by a query parameter q = self.request.query_params.get('q') if q: queryset = queryset.filter(title__icontains=q) return queryset
python
{ "resource": "" }
q271291
NullBooleanField.get_attribute
test
def get_attribute(self, instance): """Overrides the default get_attribute method to convert None values to False.""" attr = super(NullBooleanField, self).get_attribute(instance) return True if attr else False
python
{ "resource": "" }
q271292
validate_widget
test
def validate_widget(widget): """Checks that the given widget contains the required fields""" if not has_valid_id(widget): raise InvalidWidget("%s must contain a valid 'id' attribute" % widget.__name__) if not has_valid_name(widget): raise InvalidWidget("%s must contain a valid 'name' attribute" % widget.__name__) if not has_valid_template(widget): raise InvalidWidget("%s must contain a valid 'template' attribute" % widget.__name__) if not hasattr(widget, 'zones') or not widget.zones: raise InvalidWidget("%s must be compatible with at least one zone" % widget.__name__)
python
{ "resource": "" }
q271293
validate_zone
test
def validate_zone(zone): """Checks that the given zone contains the required fields""" if not has_valid_id(zone): raise InvalidZone("%s must contain a valid 'id' attribute" % zone.__name__) if not has_valid_name(zone): raise InvalidZone("%s must contain a valid 'name' attribute" % zone.__name__)
python
{ "resource": "" }
q271294
is_valid_uuid
test
def is_valid_uuid(id): """Return True if id is a valid UUID, False otherwise.""" if not isinstance(id, basestring): return False try: val = UUID(id, version=4) except ValueError: return False return True
python
{ "resource": "" }
q271295
User.get_permissions
test
def get_permissions(self): """Returns the user's permissions.""" permissions = '' if self.groups.filter(name='Admin').exists() or self.is_superuser: permissions = 'admin' return permissions
python
{ "resource": "" }
q271296
User.modify_permissions
test
def modify_permissions(self, permissions): """Modify the user's permissions.""" group = Group.objects.get(name='Admin') if permissions == 'admin': self.groups.add(group) else: self.groups.remove(group)
python
{ "resource": "" }
q271297
AuthorValidator
test
def AuthorValidator(data): """Raise a ValidationError if data does not match the author format.""" if not isinstance(data, list): # Convert single instance to a list data = [data] for author in data: if 'person' not in author: raise ValidationError('An author must contain a person.') if 'type' in author and not isinstance(author['type'], basestring): # If type is defined, it should be a string raise ValidationError('The author type must be a string.')
python
{ "resource": "" }
q271298
Zone.save
test
def save(self, validated_data): """Save widget data for this zone.""" (zone, created) = ZoneModel.objects.get_or_create(zone_id=self.id) zone.widget_id = validated_data['widget'] zone.data = validated_data['data'] # Call widget before-save hook on nested widgets for key in list(zone.data.keys()): if isinstance(zone.data[key], dict) and ('id' in zone.data[key].keys()) and ('data' in zone.data[key].keys()): zone.data[key]['data'] = self.before_save(zone.data[key]['id'], zone.data[key]['data']) # Call widget before-save hook zone.data = self.before_save(zone.widget_id, zone.data) return zone.save()
python
{ "resource": "" }
q271299
Widget.get_data
test
def get_data(self): """Returns data from each field.""" result = {} for field in self.fields: result[field.name] = self.data.get(field.name) return result
python
{ "resource": "" }