Search is not available for this dataset
text stringlengths 75 104k |
|---|
def get(self,
variable_path: str,
default: t.Optional[t.Any] = None,
coerce_type: t.Optional[t.Type] = None,
coercer: t.Optional[t.Callable] = None,
**kwargs):
"""
Reads a value of ``variable_path`` from consul kv storage.
:param varia... |
def add_format(mimetype, format, requires_context=False):
""" Registers a new format to be used in a graph's serialize call
If you've installed an rdflib serializer plugin, use this
to add it to the content negotiation system
Set requires_context=True if this format requires a context-aware graph
"""
... |
def add_format(self, mimetype, format, requires_context=False):
""" Registers a new format to be used in a graph's serialize call
If you've installed an rdflib serializer plugin, use this
to add it to the content negotiation system
Set requires_context=True if this format requires a context-aware gr... |
def get_default_mimetype(self):
""" Returns the default mimetype """
mimetype = self.default_mimetype
if mimetype is None: # class inherits from module default
mimetype = DEFAULT_MIMETYPE
if mimetype is None: # module is set to None?
mimetype = 'application/rdf+xml'
return mimetype |
def get_wildcard_mimetype(self):
""" Returns the mimetype if the client sends */* """
mimetype = self.wildcard_mimetype
if mimetype is None: # class inherits from module default
mimetype = WILDCARD_MIMETYPE
if mimetype is None: # module is set to None?
mimetype = 'application/rdf+xml'
return mimetype |
def decide_mimetype(self, accepts, context_aware = False):
""" Returns what mimetype the client wants to receive
Parses the given Accept header and returns the best one that
we know how to output
An empty Accept will default to application/rdf+xml
An Accept with */* use rdf+xml unless a better... |
def get_serialize_format(self, mimetype):
""" Get the serialization format for the given mimetype """
format = self.formats.get(mimetype, None)
if format is None:
format = formats.get(mimetype, None)
return format |
def decide(self, accepts, context_aware=False):
""" Returns what (mimetype,format) the client wants to receive
Parses the given Accept header and picks the best one that
we know how to output
Returns (mimetype, format)
An empty Accept will default to rdf+xml
An Accept with */* use rdf+xm... |
def wants_rdf(self, accepts):
""" Returns whether this client's Accept header indicates
that the client wants to receive RDF
"""
mimetype = mimeparse.best_match(all_mimetypes + self.all_mimetypes + [WILDCARD], accepts)
return mimetype and mimetype != WILDCARD |
async def send_http(session, method, url, *,
retries=1,
interval=1,
backoff=2,
http_status_codes_to_retry=HTTP_STATUS_CODES_TO_RETRY,
fn=lambda x:x,
**kwargs):
"""
Sends a HTTP request and imp... |
def generate_output(self, writer):
"""
Generates the sitemap file and the stylesheet file and puts them into the content dir.
:param writer: the writer instance
:type writer: pelican.writers.Writer
"""
# write xml stylesheet
with codecs_open(os.path.join(os.path.d... |
def __get_direct_template_url(self, name):
"""
Returns the URL for the given DIRECT_TEMPLATE name.
Favors ${DIRECT_TEMPLATE}_SAVE_AS over the default path.
:param name: name of the direct template
:return: str
"""
url = self.pelican_settings.get('{}_SAVE_AS'.forma... |
def __process_url_wrapper_elements(self, elements):
"""
Creates the url nodes for pelican.urlwrappers.Category and pelican.urlwrappers.Tag.
:param elements: list of wrapper elements
:type elements: list
:return: the processes urls as HTML
:rtype: str
"""
u... |
def __create_url_node_for_content(self, content, content_type, url=None, modification_time=None):
"""
Creates the required <url> node for the sitemap xml.
:param content: the content class to handle
:type content: pelican.contents.Content | None
:param content_type: the type of t... |
def get_missing_commands(_platform):
"""Check I can identify the necessary commands for managing users."""
missing = list()
if _platform in ('Linux', 'OpenBSD'):
if not LINUX_CMD_USERADD:
missing.append('useradd')
if not LINUX_CMD_USERMOD:
missing.append('usermod')
... |
def execute_command(command=None):
"""Execute a command and return the stdout and stderr."""
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stdin = process.communicate()
process.wait()
return (stdout, stdin), process.returncode |
def base64encode(_input=None):
"""Return base64 encoded representation of a string."""
if PY2: # pragma: no cover
return base64.b64encode(_input)
elif PY3: # pragma: no cover
if isinstance(_input, bytes):
return base64.b64encode(_input).decode('UTF-8')
elif isinstance(_... |
def base64decode(_input=None):
"""Take a base64 encoded string and return the decoded string."""
missing_padding = 4 - len(_input) % 4
if missing_padding:
_input += '=' * missing_padding
if PY2: # pragma: no cover
return base64.decodestring(_input)
elif PY3: # pragma: no cover
... |
def read_sudoers():
""" Read the sudoers entry for the specified user.
args:
username (str): username.
returns:`r
str: sudoers entry for the specified user.
"""
sudoers_path = '/etc/sudoers'
rnd_chars = random_string(length=RANDOM_FILE_EXT_LENGTH)
tmp_sudoers_path = '/tmp/s... |
def write_sudoers_entry(username=None, sudoers_entry=None):
"""Write sudoers entry.
args:
user (User): Instance of User containing sudoers entry.
returns:
str: sudoers entry for the specified user.
"""
sudoers_path = '/etc/sudoers'
rnd_chars = random_string(length=RANDOM_FILE_... |
def get_sudoers_entry(username=None, sudoers_entries=None):
""" Find the sudoers entry in the sudoers file for the specified user.
args:
username (str): username.
sudoers_entries (list): list of lines from the sudoers file.
returns:`r
str: sudoers entry for the specified user.
... |
def docstring(documentation, prepend=False, join=""):
r"""Prepend or append a string to the current documentation of the function.
This decorator should be robust even if ``func.__doc__`` is None
(for example, if -OO was passed to the interpreter).
Usage::
@docstring('Appended this line')
... |
def create_admin(user_config_path: str = 'CONFIG.superuser') -> bool:
"""
Creates a superuser from a specified dict/object bundle located at ``user_config_path``.
Skips if the specified object contains no email or no username.
If a user with the specified username already exists and has no usable passwo... |
def run_gunicorn(application: WSGIHandler, gunicorn_module_name: str = 'gunicorn_prod'):
"""
Runs gunicorn with a specified config.
:param application: Django uwsgi application
:param gunicorn_module_name: gunicorn settings module name
:return: ``Application().run()``
"""
from gunicorn.app.... |
def set_color(
fg=Color.normal,
bg=Color.normal,
fg_dark=False,
bg_dark=False,
underlined=False,
):
"""Set the console color.
>>> set_color(Color.red, Color.blue)
>>> set_color('red', 'blue')
>>> set_color() # returns back to normal
"""
_set_color(fg, bg, fg_dar... |
def cprint(
text,
fg=Color.normal,
bg=Color.normal,
fg_dark=False,
bg_dark=False,
underlined=False,
parse=False,
):
"""Print string in to stdout using colored font.
See L{set_color} for more details about colors.
Args:
text (str): Text that needs to be pri... |
def colorize_output(output, colors, indent=0):
r"""Print output to console using provided color mappings.
Color mapping is dict with regular expressions as key and tuple of two as
values. Key is used to match if line should be colorized and tuple contains
color to be used and boolean value that in... |
def _colorize_single_line(line, regexp, color_def):
"""Print single line to console with ability to colorize parts of it."""
match = regexp.match(line)
groupdict = match.groupdict()
groups = match.groups()
if not groupdict:
# no named groups, just colorize whole line
color = c... |
def height(self):
"""Terminal height.
"""
if self.interactive:
if self._height is None:
self._height = self.term.height
return self._height |
def clear_last_lines(self, n):
"""Clear last N lines of terminal output.
"""
self.term.stream.write(
self.term.move_up * n + self.term.clear_eos)
self.term.stream.flush() |
def overwrite_line(self, n, text):
"""Move back N lines and overwrite line with `text`.
"""
with self._moveback(n):
self.term.stream.write(text) |
def move_to(self, n):
"""Move back N lines in terminal.
"""
self.term.stream.write(self.term.move_up * n) |
def get(self,
variable_path: str,
default: t.Optional[t.Any] = None,
coerce_type: t.Optional[t.Type] = None,
coercer: t.Optional[t.Callable] = None,
required: bool = False,
**kwargs):
"""
Tries to read a ``variable_path`` from each ... |
def import_parsers(parser_modules: t.Iterable[str]) -> t.Generator[t.Type[BaseParser], None, None]:
"""
Resolves and imports all modules specified in ``parser_modules``. Short names from the local scope
are supported (the scope is ``django_docker_helpers.config.backends``).
:param parse... |
def load_parser_options_from_env(
parser_class: t.Type[BaseParser],
env: t.Optional[t.Dict[str, str]] = None) -> t.Dict[str, t.Any]:
"""
Extracts arguments from ``parser_class.__init__`` and populates them from environment variables.
Uses ``__init__`` argument type annot... |
def from_env(parser_modules: t.Optional[t.Union[t.List[str], t.Tuple[str]]] = DEFAULT_PARSER_MODULES,
env: t.Optional[t.Dict[str, str]] = None,
silent: bool = False,
suppress_logs: bool = False,
extra: t.Optional[dict] = None) -> 'ConfigLoader':
... |
def print_config_read_queue(
self,
use_color: bool = False,
max_col_width: int = 50):
"""
Prints all read (in call order) options.
:param max_col_width: limit column width, ``50`` by default
:param use_color: use terminal colors
:return: nothi... |
def format_config_read_queue(self,
use_color: bool = False,
max_col_width: int = 50) -> str:
"""
Prepares a string with pretty printed config read queue.
:param use_color: use terminal colors
:param max_col_width: limit c... |
def get_graph(cls, response):
""" Given a Flask response, find the rdflib Graph """
if cls.is_graph(response): # single graph object
return response
if hasattr(response, '__getitem__'): # indexable tuple
if len(response) > 0 and \
cls.is_graph(response[0]): # graph object
return response[0] |
def replace_graph(cls, response, serialized):
""" Replace the rdflib Graph in a Flask response """
if cls.is_graph(response): # single graph object
return serialized
if hasattr(response, '__getitem__'): # indexable tuple
if len(response) > 0 and \
cls.is_graph(response[0]): # graph object
return ... |
def _from_hex_digest(digest):
"""Convert hex digest to sequence of bytes."""
return "".join(
[chr(int(digest[x : x + 2], 16)) for x in range(0, len(digest), 2)]
) |
def encrypt(data, digest=True):
"""Perform encryption of provided data."""
alg = get_best_algorithm()
enc = implementations["encryption"][alg](
data, implementations["get_key"]()
)
return "%s$%s" % (alg, (_to_hex_digest(enc) if digest else enc)) |
def decrypt(data, digest=True):
"""Decrypt provided data."""
alg, _, data = data.rpartition("$")
if not alg:
return data
data = _from_hex_digest(data) if digest else data
try:
return implementations["decryption"][alg](
data, implementations["get_key"]()
)
exce... |
def one_greedy(self,dp,namax=None,nimax=None,nomax=None):
"""Reconstructs a directed acyclic graph according to prior information of edge significance.
This function first ranks all edges and introduce the most significant one by one, avoiding
those that would create a loop. Optional constraints on the maximum total... |
def gen_vocab(cli, args):
''' Generate vocabulary list from a tokenized file '''
if args.topk and args.topk <= 0:
topk = None
cli.logger.warning("Invalid k will be ignored (k should be greater than or equal to 1)")
else:
topk = args.topk
if args.stopwords:
with open(args.... |
def main():
''' ChirpText Tools main function '''
app = CLIApp(desc='ChirpText Tools', logger=__name__, show_version=show_version)
# add tasks
vocab_task = app.add_task('vocab', func=gen_vocab)
vocab_task.add_argument('input', help='Input file')
vocab_task.add_argument('--output', help='Output f... |
def add_attachment(message, attachment, rfc2231=True):
'''Attach an attachment to a message as a side effect.
Arguments:
message: MIMEMultipart instance.
attachment: Attachment instance.
'''
data = attachment.read()
part = MIMEBase('application', 'octet-stream')
part.set_payloa... |
def _login(self):
'''Login to the SMTP server specified at instantiation
Returns an authenticated SMTP instance.
'''
server, port, mode, debug = self.connection_details
if mode == 'SSL':
smtp_class = smtplib.SMTP_SSL
else:
smtp_class = smtplib.SM... |
def send(self, email, attachments=()):
'''Send an email. Connect/Disconnect if not already connected
Arguments:
email: Email instance to send.
attachments: iterable containing Attachment instances
'''
msg = email.as_mime(attachments)
if 'From' not in ms... |
def column_types(self):
"""Return a dict mapping column name to type for all columns in table
"""
column_types = {}
for c in self.sqla_columns:
column_types[c.name] = c.type
return column_types |
def _valid_table_name(self, table_name):
"""Check if the table name is obviously invalid.
"""
if table_name is None or not len(table_name.strip()):
raise ValueError("Invalid table name: %r" % table_name)
return table_name.strip() |
def add_primary_key(self, column="id"):
"""Add primary key constraint to specified column
"""
if not self.primary_key:
sql = """ALTER TABLE {s}.{t}
ADD PRIMARY KEY ({c})
""".format(
s=self.schema, t=self.name, c=column
... |
def drop(self):
"""Drop the table from the database
"""
if self._is_dropped is False:
self.table.drop(self.engine)
self._is_dropped = True |
def create_column(self, name, type):
"""
Explicitely create a new column ``name`` of a specified type.
``type`` must be a `SQLAlchemy column type <http://docs.sqlalchemy.org/en/rel_0_8/core/types.html>`_.
::
table.create_column('created_at', sqlalchemy.DateTime)
"""
... |
def drop_column(self, name):
"""
Drop the column ``name``
::
table.drop_column('created_at')
"""
self._check_dropped()
if name in list(self.table.columns.keys()):
self.op.drop_column(self.table.name, name, schema=self.schema)
self.tabl... |
def create_index(self, columns, name=None, index_type="btree"):
"""
Create an index to speed up queries on a table.
If no ``name`` is given a random name is created.
::
table.create_index(['name', 'country'])
"""
self._check_dropped()
if not name:
... |
def distinct(self, *columns, **_filter):
"""
Returns all rows of a table, but removes rows in with duplicate values in ``columns``.
Interally this creates a `DISTINCT statement <http://www.w3schools.com/sql/sql_distinct.asp>`_.
::
# returns only one row per year, ignoring th... |
def insert(self, row):
"""
Add a row (type: dict) by inserting it into the table.
Columns must exist.
::
data = dict(title='I am a banana!')
table.insert(data)
Returns the inserted row's primary key.
"""
self._check_dropped()
res = ... |
def insert_many(self, rows, chunk_size=1000):
"""
Add many rows at a time, which is significantly faster than adding
them one by one. Per default the rows are processed in chunks of
1000 per commit, unless you specify a different ``chunk_size``.
See :py:meth:`insert() <dataset.Ta... |
def rename(self, name):
"""Rename the table
"""
sql = """ALTER TABLE {s}.{t} RENAME TO {name}
""".format(
s=self.schema, t=self.name, name=name
)
self.engine.execute(sql)
self.table = SQLATable(name, self.metadata, schema=self.schema, autoload=Tr... |
def find_one(self, **kwargs):
"""
Works just like :py:meth:`find() <dataset.Table.find>` but returns one result, or None.
::
row = table.find_one(country='United States')
"""
kwargs["_limit"] = 1
iterator = self.find(**kwargs)
try:
return n... |
def find(
self,
_limit=None,
_offset=0,
_step=5000,
order_by="id",
return_count=False,
**_filter
):
"""
Performs a simple search on the table. Simply pass keyword arguments as ``filter``.
::
results = table.find(country='Fra... |
def connect(url=None, schema=None, sql_path=None, multiprocessing=False):
"""Open a new connection to postgres via psycopg2/sqlalchemy
"""
if url is None:
url = os.environ.get("DATABASE_URL")
return Database(url, schema, sql_path=sql_path, multiprocessing=multiprocessing) |
def create_db(url=None):
"""Create a new database
"""
if url is None:
url = os.environ.get("DATABASE_URL")
parsed_url = urlparse(url)
db_name = parsed_url.path
db_name = db_name.strip("/")
db = connect("postgresql://" + parsed_url.netloc)
# check that db does not exist
q = ""... |
def drop_db(url):
"""Drop specified database
"""
parsed_url = urlparse(url)
db_name = parsed_url.path
db_name = db_name.strip("/")
db = connect("postgresql://" + parsed_url.netloc)
# check that db exists
q = """SELECT 1 as exists
FROM pg_database
WHERE datname = '{d... |
def shred(key_name: str,
value: t.Any,
field_names: t.Iterable[str] = SHRED_DATA_FIELD_NAMES) -> t.Union[t.Any, str]:
"""
Replaces sensitive data in ``value`` with ``*`` if ``key_name`` contains something that looks like a secret.
:param field_names: a list of key names that can possibl... |
def dot_path(obj: t.Union[t.Dict, object],
path: str,
default: t.Any = None,
separator: str = '.'):
"""
Provides an access to elements of a mixed dict/object type by a delimiter-separated path.
::
class O1:
my_dict = {'a': {'b': 1}}
class ... |
def dotkey(obj: dict, path: str, default=None, separator='.'):
"""
Provides an interface to traverse nested dict values by dot-separated paths. Wrapper for ``dpath.util.get``.
:param obj: dict like ``{'some': {'value': 3}}``
:param path: ``'some.value'``
:param separator: ``'.'`` or ``'/'`` or what... |
def _materialize_dict(bundle: dict, separator: str = '.') -> t.Generator[t.Tuple[str, t.Any], None, None]:
"""
Traverses and transforms a given dict ``bundle`` into tuples of ``(key_path, value)``.
:param bundle: a dict to traverse
:param separator: build paths with a given separator
:return: a gen... |
def materialize_dict(bundle: dict, separator: str = '.') -> t.List[t.Tuple[str, t.Any]]:
"""
Transforms a given ``bundle`` into a *sorted* list of tuples with materialized value paths and values:
``('path.to.value', <value>)``. Output is ordered by depth: the deepest element first.
:param bundle: a dic... |
def mp_serialize_dict(
bundle: dict,
separator: str = '.',
serialize: t.Optional[t.Callable] = dump_yaml,
value_prefix: str = '::YAML::\n') -> t.List[t.Tuple[str, bytes]]:
"""
Transforms a given ``bundle`` into a *sorted* list of tuples with materialized value paths and values:
... |
def wf(raw_str: str,
flush: bool = True,
prevent_completion_polluting: bool = True,
stream: t.TextIO = sys.stdout):
"""
Writes a given ``raw_str`` into a ``stream``. Ignores output if ``prevent_completion_polluting`` is set and there's
no extra ``sys.argv`` arguments present (a bash com... |
def coerce_str_to_bool(val: t.Union[str, int, bool, None], strict: bool = False) -> bool:
"""
Converts a given string ``val`` into a boolean.
:param val: any string representation of boolean
:param strict: raise ``ValueError`` if ``val`` does not look like a boolean-like object
:return: ``True`` if... |
def env_bool_flag(flag_name: str, strict: bool = False, env: t.Optional[t.Dict[str, str]] = None) -> bool:
"""
Converts an environment variable into a boolean. Empty string (presence in env) is treated as ``True``.
:param flag_name: an environment variable name
:param strict: raise ``ValueError`` if a ... |
def run_env_once(f: t.Callable) -> t.Callable:
"""
A decorator to prevent ``manage.py`` from running code twice for everything.
(https://stackoverflow.com/questions/16546652/why-does-django-run-everything-twice)
:param f: function or method to decorate
:return: callable
"""
@wraps(f)
d... |
def is_dockerized(flag_name: str = 'DOCKERIZED', strict: bool = False):
"""
Reads env ``DOCKERIZED`` variable as a boolean.
:param flag_name: environment variable name
:param strict: raise a ``ValueError`` if variable does not look like a normal boolean
:return: ``True`` if has truthy ``DOCKERIZED`... |
def is_production(flag_name: str = 'PRODUCTION', strict: bool = False):
"""
Reads env ``PRODUCTION`` variable as a boolean.
:param flag_name: environment variable name
:param strict: raise a ``ValueError`` if variable does not look like a normal boolean
:return: ``True`` if has truthy ``PRODUCTION`... |
def load_geuvadis_data():
"""This function loads downsampled data files from the Geuvadis study (Lappalainen, T. et al. Transcriptome and genome sequencing uncovers functional variation in humans. Nature 501, 506-511 (2013)), including expression levels of 10 miRNAs and 3000 genes for 360 European individuals. Among t... |
def runcode(code):
"""Run the given code line by line with printing, as list of lines, and return variable 'ans'."""
for line in code:
print('# '+line)
exec(line,globals())
print('# return ans')
return ans |
def signal(*args, **kwargs):
from .core import Signal
"""A signal decorator designed to work both in the simpler way, like:
.. code:: python
@signal
def validation_function(arg1, ...):
'''Some doc'''
and also as a double-called decorator, like
.. code:: python
@signa... |
def exec_all_endpoints(self, *args, **kwargs):
"""Execute each passed endpoint and collect the results. If a result
is anoter `MultipleResults` it will extend the results with those
contained therein. If the result is `NoResult`, skip the addition."""
results = []
for handler in ... |
def run(self, *args, **kwargs):
"""Call all the registered handlers with the arguments passed.
If this signal is a class member, call also the handlers registered
at class-definition time. If an external publish function is
supplied, call it with the provided arguments.
:returns... |
def login_defs():
"""Discover the minimum and maximum UID number."""
uid_min = None
uid_max = None
login_defs_path = '/etc/login.defs'
if os.path.exists(login_defs_path):
with io.open(text_type(login_defs_path), encoding=text_type('utf-8')) as log_defs_file:
login_data = log_defs... |
def collect_static() -> bool:
"""
Runs Django ``collectstatic`` command in silent mode.
:return: always ``True``
"""
from django.core.management import execute_from_command_line
# from django.conf import settings
# if not os.listdir(settings.STATIC_ROOT):
wf('Collecting static files... ... |
def inner_parser(self) -> BaseParser:
"""
Prepares inner config parser for config stored at ``endpoint``.
:return: an instance of :class:`~django_docker_helpers.config.backends.base.BaseParser`
:raises config.exceptions.KVStorageValueIsEmpty: if specified ``endpoint`` does not contain ... |
def generate_add_user_command(proposed_user=None, manage_home=None):
"""Generate command to add a user.
args:
proposed_user (User): User
manage_home: bool
returns:
list: The command string split into shell-like syntax
"""
command = None
if get_platform() in ('Linux', 'O... |
def generate_modify_user_command(task=None, manage_home=None):
"""Generate command to modify existing user to become the proposed user.
args:
task (dict): A proposed user and the differences between it and the existing user
returns:
list: The command string split into shell-like syntax
... |
def generate_delete_user_command(username=None, manage_home=None):
"""Generate command to delete a user.
args:
username (str): user name
manage_home (bool): manage home directory
returns:
list: The user delete command string split into shell-like syntax
"""
command = None
... |
def compare_user(passed_user=None, user_list=None):
"""Check if supplied User instance exists in supplied Users list and, if so, return the differences.
args:
passed_user (User): the user instance to check for differences
user_list (Users): the Users instance containing a list of Users instance... |
def gecos(self):
"""Force double quoted gecos.
returns:
str: The double quoted gecos.
"""
if not self._gecos:
return None
if self._gecos.startswith(text_type('\'')) and self._gecos.endswith(text_type('\'')):
self._gecos = '\"{0}\"'.format(self... |
def to_dict(self):
""" Return the user as a dict. """
public_keys = [public_key.b64encoded for public_key in self.public_keys]
return dict(name=self.name, passwd=self.passwd, uid=self.uid, gid=self.gid, gecos=self.gecos,
home_dir=self.home_dir, shell=self.shell, public_keys=p... |
def insert(self, index, value):
"""Insert an instance of User into the collection."""
self.check(value)
self._user_list.insert(index, value) |
def remove(self, username=None):
"""Remove User instance based on supplied user name."""
self._user_list = [user for user in self._user_list if user.name != username] |
def describe_users(self, users_filter=None):
"""Return a list of users matching a filter (if provided)."""
user_list = Users(oktypes=User)
for user in self._user_list:
if users_filter and (users_filter.get('name') == user.name or users_filter.get('uid') == user.uid):
... |
def from_yaml(cls, file_path=None):
"""Create collection from a YAML file."""
try:
import yaml
except ImportError: # pragma: no cover
yaml = None
if not yaml:
import sys
sys.exit('PyYAML is not installed, but is required in order to parse ... |
def from_json(cls, file_path=None):
"""Create collection from a JSON file."""
with io.open(file_path, encoding=text_type('utf-8')) as stream:
try:
users_json = json.load(stream)
except ValueError:
raise ValueError('No JSON object could be decoded')... |
def from_passwd(uid_min=None, uid_max=None):
"""Create collection from locally discovered data, e.g. /etc/passwd."""
import pwd
users = Users(oktypes=User)
passwd_list = pwd.getpwall()
if not uid_min:
uid_min = UID_MIN
if not uid_max:
uid_max = UID... |
def construct_user_list(raw_users=None):
"""Construct a list of User objects from a list of dicts."""
users = Users(oktypes=User)
for user_dict in raw_users:
public_keys = None
if user_dict.get('public_keys'):
public_keys = [PublicKey(b64encoded=x, raw=Non... |
def to_dict(self):
""" Return a dict of the users. """
users = dict(users=list())
for user in self:
users['users'].append(user.to_dict())
return users |
def export(self, file_path=None, export_format=None):
""" Write the users to a file. """
with io.open(file_path, mode='w', encoding="utf-8") as export_file:
if export_format == 'yaml':
import yaml
yaml.safe_dump(self.to_dict(), export_file, default_flow_style=... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.