repository_name
stringclasses 316
values | func_path_in_repository
stringlengths 6
223
| func_name
stringlengths 1
134
| language
stringclasses 1
value | func_code_string
stringlengths 57
65.5k
| func_documentation_string
stringlengths 1
46.3k
| split_name
stringclasses 1
value | func_code_url
stringlengths 91
315
| called_functions
listlengths 1
156
⌀ | enclosing_scope
stringlengths 2
1.48M
|
|---|---|---|---|---|---|---|---|---|---|
jhermann/rudiments
|
src/rudiments/reamed/click.py
|
Configuration.from_context
|
python
|
def from_context(cls, ctx, config_paths=None, project=None):
if ctx.obj is None:
ctx.obj = Bunch()
ctx.obj.cfg = cls(ctx.info_name, config_paths, project=project)
return ctx.obj.cfg
|
Create a configuration object, and initialize the Click context with it.
|
train
|
https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/reamed/click.py#L97-L102
| null |
class Configuration(object):
""" Configuration container that is initialized early in the main command.
The default instance is available via the Click context as ``ctx.obj.cfg``.
Configuration is lazily loaded, on first access.
"""
NO_DEFAULT = object()
DEFAULT_PATH = [
'/etc/{appname}.conf',
'/etc/{appname}.d/',
'{appcfg}.conf',
]
DEFAULT_CONFIG_OPTS = dict(
encoding='utf-8',
default_encoding='utf-8',
)
@classmethod
def __init__(self, name, config_paths=None, project=None):
""" Set up static knowledge about configuration.
``config_paths`` is a list of PATHs to config files or directories.
If that is non-empty, exactly those paths will be considered; an empty
path element represents the default locations (just like in MANPATH).
If the environment variable ``<prefix>_CONFIG`` is set, its value will
be appended to the default locations.
"""
self.values = configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
self.project = project
self.name = name
self.config_paths = []
self.loaded = False
env_config = os.environ.get((self.name + '-config').upper().replace('-', '_'), '')
defaults = [i.format(appname=os.sep.join([self.project, self.name]) if project else self.name,
appdir=get_app_dir(self.project or self.name),
appcfg=get_app_dir(self.project) + os.sep + self.name if self.project else get_app_dir(self.name))
for i in self.DEFAULT_PATH
] + [i for i in env_config.split(os.pathsep) if i]
for path in config_paths or []:
for name in path.split(os.pathsep):
if name:
self.config_paths.append(name)
else:
self.config_paths.extend(defaults)
if not self.config_paths:
self.config_paths = defaults
def locations(self, exists=True):
""" Return the location of the config file(s).
A given directory will be scanned for ``*.conf`` files, in alphabetical order.
Any duplicates will be eliminated.
If ``exists`` is True, only existing configuration locations are returned.
"""
result = []
for config_files in self.config_paths:
if not config_files:
continue
if os.path.isdir(config_files):
config_files = [os.path.join(config_files, i)
for i in sorted(os.listdir(config_files))
if i.endswith('.conf')]
else:
config_files = [config_files]
for config_file in config_files:
if not exists or os.path.exists(config_file):
config_file = os.path.abspath(config_file)
if config_file in result:
result.remove(config_file)
result.append(config_file)
return result
def load(self):
"""Load configuration from the defined locations."""
if not self.loaded:
self.values = configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
for path in self.locations():
try:
part = configobj.ConfigObj(infile=path, **self.DEFAULT_CONFIG_OPTS)
except configobj.ConfigObjError as cause:
raise LoggedFailure("Error in file '{path}': {cause}".format(path=pretty_path(path), cause=cause))
self.values.merge(part)
self.loaded = True
return self.values
def dump(self, to=None): # pylint: disable=invalid-name
"""Dump the merged configuration to a stream or stdout."""
self.load().write(to or sys.stdout)
def section(self, ctx, optional=False):
"""
Return section of the config for a specific context (sub-command).
Parameters:
ctx (Context): The Click context object.
optional (bool): If ``True``, return an empty config object when section is missing.
Returns:
Section: The configuration section belonging to
the active (sub-)command (based on ``ctx.info_name``).
"""
values = self.load()
try:
return values[ctx.info_name]
except KeyError:
if optional:
return configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
raise LoggedFailure("Configuration section '{}' not found!".format(ctx.info_name))
def get(self, name, default=NO_DEFAULT):
"""
Return the specified name from the root section.
Parameters:
name (str): The name of the requested value.
default (optional): If set, the default value to use
instead of raising :class:`LoggedFailure` for
unknown names.
Returns:
The value for `name`.
Raises:
LoggedFailure: The requested `name` was not found.
"""
values = self.load()
try:
return values[name]
except KeyError:
if default is self.NO_DEFAULT:
raise LoggedFailure("Configuration value '{}' not found in root section!".format(name))
return default
|
jhermann/rudiments
|
src/rudiments/reamed/click.py
|
Configuration.locations
|
python
|
def locations(self, exists=True):
result = []
for config_files in self.config_paths:
if not config_files:
continue
if os.path.isdir(config_files):
config_files = [os.path.join(config_files, i)
for i in sorted(os.listdir(config_files))
if i.endswith('.conf')]
else:
config_files = [config_files]
for config_file in config_files:
if not exists or os.path.exists(config_file):
config_file = os.path.abspath(config_file)
if config_file in result:
result.remove(config_file)
result.append(config_file)
return result
|
Return the location of the config file(s).
A given directory will be scanned for ``*.conf`` files, in alphabetical order.
Any duplicates will be eliminated.
If ``exists`` is True, only existing configuration locations are returned.
|
train
|
https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/reamed/click.py#L137-L161
| null |
class Configuration(object):
""" Configuration container that is initialized early in the main command.
The default instance is available via the Click context as ``ctx.obj.cfg``.
Configuration is lazily loaded, on first access.
"""
NO_DEFAULT = object()
DEFAULT_PATH = [
'/etc/{appname}.conf',
'/etc/{appname}.d/',
'{appcfg}.conf',
]
DEFAULT_CONFIG_OPTS = dict(
encoding='utf-8',
default_encoding='utf-8',
)
@classmethod
def from_context(cls, ctx, config_paths=None, project=None):
"""Create a configuration object, and initialize the Click context with it."""
if ctx.obj is None:
ctx.obj = Bunch()
ctx.obj.cfg = cls(ctx.info_name, config_paths, project=project)
return ctx.obj.cfg
def __init__(self, name, config_paths=None, project=None):
""" Set up static knowledge about configuration.
``config_paths`` is a list of PATHs to config files or directories.
If that is non-empty, exactly those paths will be considered; an empty
path element represents the default locations (just like in MANPATH).
If the environment variable ``<prefix>_CONFIG`` is set, its value will
be appended to the default locations.
"""
self.values = configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
self.project = project
self.name = name
self.config_paths = []
self.loaded = False
env_config = os.environ.get((self.name + '-config').upper().replace('-', '_'), '')
defaults = [i.format(appname=os.sep.join([self.project, self.name]) if project else self.name,
appdir=get_app_dir(self.project or self.name),
appcfg=get_app_dir(self.project) + os.sep + self.name if self.project else get_app_dir(self.name))
for i in self.DEFAULT_PATH
] + [i for i in env_config.split(os.pathsep) if i]
for path in config_paths or []:
for name in path.split(os.pathsep):
if name:
self.config_paths.append(name)
else:
self.config_paths.extend(defaults)
if not self.config_paths:
self.config_paths = defaults
def load(self):
"""Load configuration from the defined locations."""
if not self.loaded:
self.values = configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
for path in self.locations():
try:
part = configobj.ConfigObj(infile=path, **self.DEFAULT_CONFIG_OPTS)
except configobj.ConfigObjError as cause:
raise LoggedFailure("Error in file '{path}': {cause}".format(path=pretty_path(path), cause=cause))
self.values.merge(part)
self.loaded = True
return self.values
def dump(self, to=None): # pylint: disable=invalid-name
"""Dump the merged configuration to a stream or stdout."""
self.load().write(to or sys.stdout)
def section(self, ctx, optional=False):
"""
Return section of the config for a specific context (sub-command).
Parameters:
ctx (Context): The Click context object.
optional (bool): If ``True``, return an empty config object when section is missing.
Returns:
Section: The configuration section belonging to
the active (sub-)command (based on ``ctx.info_name``).
"""
values = self.load()
try:
return values[ctx.info_name]
except KeyError:
if optional:
return configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
raise LoggedFailure("Configuration section '{}' not found!".format(ctx.info_name))
def get(self, name, default=NO_DEFAULT):
"""
Return the specified name from the root section.
Parameters:
name (str): The name of the requested value.
default (optional): If set, the default value to use
instead of raising :class:`LoggedFailure` for
unknown names.
Returns:
The value for `name`.
Raises:
LoggedFailure: The requested `name` was not found.
"""
values = self.load()
try:
return values[name]
except KeyError:
if default is self.NO_DEFAULT:
raise LoggedFailure("Configuration value '{}' not found in root section!".format(name))
return default
|
jhermann/rudiments
|
src/rudiments/reamed/click.py
|
Configuration.load
|
python
|
def load(self):
if not self.loaded:
self.values = configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
for path in self.locations():
try:
part = configobj.ConfigObj(infile=path, **self.DEFAULT_CONFIG_OPTS)
except configobj.ConfigObjError as cause:
raise LoggedFailure("Error in file '{path}': {cause}".format(path=pretty_path(path), cause=cause))
self.values.merge(part)
self.loaded = True
return self.values
|
Load configuration from the defined locations.
|
train
|
https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/reamed/click.py#L163-L174
|
[
"def pretty_path(path, _home_re=re.compile('^' + re.escape(os.path.expanduser('~') + os.sep))):\n \"\"\"Prettify path for humans, and make it Unicode.\"\"\"\n path = format_filename(path)\n path = _home_re.sub('~' + os.sep, path)\n return path\n"
] |
class Configuration(object):
""" Configuration container that is initialized early in the main command.
The default instance is available via the Click context as ``ctx.obj.cfg``.
Configuration is lazily loaded, on first access.
"""
NO_DEFAULT = object()
DEFAULT_PATH = [
'/etc/{appname}.conf',
'/etc/{appname}.d/',
'{appcfg}.conf',
]
DEFAULT_CONFIG_OPTS = dict(
encoding='utf-8',
default_encoding='utf-8',
)
@classmethod
def from_context(cls, ctx, config_paths=None, project=None):
"""Create a configuration object, and initialize the Click context with it."""
if ctx.obj is None:
ctx.obj = Bunch()
ctx.obj.cfg = cls(ctx.info_name, config_paths, project=project)
return ctx.obj.cfg
def __init__(self, name, config_paths=None, project=None):
""" Set up static knowledge about configuration.
``config_paths`` is a list of PATHs to config files or directories.
If that is non-empty, exactly those paths will be considered; an empty
path element represents the default locations (just like in MANPATH).
If the environment variable ``<prefix>_CONFIG`` is set, its value will
be appended to the default locations.
"""
self.values = configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
self.project = project
self.name = name
self.config_paths = []
self.loaded = False
env_config = os.environ.get((self.name + '-config').upper().replace('-', '_'), '')
defaults = [i.format(appname=os.sep.join([self.project, self.name]) if project else self.name,
appdir=get_app_dir(self.project or self.name),
appcfg=get_app_dir(self.project) + os.sep + self.name if self.project else get_app_dir(self.name))
for i in self.DEFAULT_PATH
] + [i for i in env_config.split(os.pathsep) if i]
for path in config_paths or []:
for name in path.split(os.pathsep):
if name:
self.config_paths.append(name)
else:
self.config_paths.extend(defaults)
if not self.config_paths:
self.config_paths = defaults
def locations(self, exists=True):
""" Return the location of the config file(s).
A given directory will be scanned for ``*.conf`` files, in alphabetical order.
Any duplicates will be eliminated.
If ``exists`` is True, only existing configuration locations are returned.
"""
result = []
for config_files in self.config_paths:
if not config_files:
continue
if os.path.isdir(config_files):
config_files = [os.path.join(config_files, i)
for i in sorted(os.listdir(config_files))
if i.endswith('.conf')]
else:
config_files = [config_files]
for config_file in config_files:
if not exists or os.path.exists(config_file):
config_file = os.path.abspath(config_file)
if config_file in result:
result.remove(config_file)
result.append(config_file)
return result
def dump(self, to=None): # pylint: disable=invalid-name
"""Dump the merged configuration to a stream or stdout."""
self.load().write(to or sys.stdout)
def section(self, ctx, optional=False):
"""
Return section of the config for a specific context (sub-command).
Parameters:
ctx (Context): The Click context object.
optional (bool): If ``True``, return an empty config object when section is missing.
Returns:
Section: The configuration section belonging to
the active (sub-)command (based on ``ctx.info_name``).
"""
values = self.load()
try:
return values[ctx.info_name]
except KeyError:
if optional:
return configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
raise LoggedFailure("Configuration section '{}' not found!".format(ctx.info_name))
def get(self, name, default=NO_DEFAULT):
"""
Return the specified name from the root section.
Parameters:
name (str): The name of the requested value.
default (optional): If set, the default value to use
instead of raising :class:`LoggedFailure` for
unknown names.
Returns:
The value for `name`.
Raises:
LoggedFailure: The requested `name` was not found.
"""
values = self.load()
try:
return values[name]
except KeyError:
if default is self.NO_DEFAULT:
raise LoggedFailure("Configuration value '{}' not found in root section!".format(name))
return default
|
jhermann/rudiments
|
src/rudiments/reamed/click.py
|
Configuration.section
|
python
|
def section(self, ctx, optional=False):
values = self.load()
try:
return values[ctx.info_name]
except KeyError:
if optional:
return configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
raise LoggedFailure("Configuration section '{}' not found!".format(ctx.info_name))
|
Return section of the config for a specific context (sub-command).
Parameters:
ctx (Context): The Click context object.
optional (bool): If ``True``, return an empty config object when section is missing.
Returns:
Section: The configuration section belonging to
the active (sub-)command (based on ``ctx.info_name``).
|
train
|
https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/reamed/click.py#L180-L199
| null |
class Configuration(object):
""" Configuration container that is initialized early in the main command.
The default instance is available via the Click context as ``ctx.obj.cfg``.
Configuration is lazily loaded, on first access.
"""
NO_DEFAULT = object()
DEFAULT_PATH = [
'/etc/{appname}.conf',
'/etc/{appname}.d/',
'{appcfg}.conf',
]
DEFAULT_CONFIG_OPTS = dict(
encoding='utf-8',
default_encoding='utf-8',
)
@classmethod
def from_context(cls, ctx, config_paths=None, project=None):
"""Create a configuration object, and initialize the Click context with it."""
if ctx.obj is None:
ctx.obj = Bunch()
ctx.obj.cfg = cls(ctx.info_name, config_paths, project=project)
return ctx.obj.cfg
def __init__(self, name, config_paths=None, project=None):
""" Set up static knowledge about configuration.
``config_paths`` is a list of PATHs to config files or directories.
If that is non-empty, exactly those paths will be considered; an empty
path element represents the default locations (just like in MANPATH).
If the environment variable ``<prefix>_CONFIG`` is set, its value will
be appended to the default locations.
"""
self.values = configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
self.project = project
self.name = name
self.config_paths = []
self.loaded = False
env_config = os.environ.get((self.name + '-config').upper().replace('-', '_'), '')
defaults = [i.format(appname=os.sep.join([self.project, self.name]) if project else self.name,
appdir=get_app_dir(self.project or self.name),
appcfg=get_app_dir(self.project) + os.sep + self.name if self.project else get_app_dir(self.name))
for i in self.DEFAULT_PATH
] + [i for i in env_config.split(os.pathsep) if i]
for path in config_paths or []:
for name in path.split(os.pathsep):
if name:
self.config_paths.append(name)
else:
self.config_paths.extend(defaults)
if not self.config_paths:
self.config_paths = defaults
def locations(self, exists=True):
""" Return the location of the config file(s).
A given directory will be scanned for ``*.conf`` files, in alphabetical order.
Any duplicates will be eliminated.
If ``exists`` is True, only existing configuration locations are returned.
"""
result = []
for config_files in self.config_paths:
if not config_files:
continue
if os.path.isdir(config_files):
config_files = [os.path.join(config_files, i)
for i in sorted(os.listdir(config_files))
if i.endswith('.conf')]
else:
config_files = [config_files]
for config_file in config_files:
if not exists or os.path.exists(config_file):
config_file = os.path.abspath(config_file)
if config_file in result:
result.remove(config_file)
result.append(config_file)
return result
def load(self):
"""Load configuration from the defined locations."""
if not self.loaded:
self.values = configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
for path in self.locations():
try:
part = configobj.ConfigObj(infile=path, **self.DEFAULT_CONFIG_OPTS)
except configobj.ConfigObjError as cause:
raise LoggedFailure("Error in file '{path}': {cause}".format(path=pretty_path(path), cause=cause))
self.values.merge(part)
self.loaded = True
return self.values
def dump(self, to=None): # pylint: disable=invalid-name
"""Dump the merged configuration to a stream or stdout."""
self.load().write(to or sys.stdout)
def get(self, name, default=NO_DEFAULT):
"""
Return the specified name from the root section.
Parameters:
name (str): The name of the requested value.
default (optional): If set, the default value to use
instead of raising :class:`LoggedFailure` for
unknown names.
Returns:
The value for `name`.
Raises:
LoggedFailure: The requested `name` was not found.
"""
values = self.load()
try:
return values[name]
except KeyError:
if default is self.NO_DEFAULT:
raise LoggedFailure("Configuration value '{}' not found in root section!".format(name))
return default
|
jhermann/rudiments
|
src/rudiments/reamed/click.py
|
Configuration.get
|
python
|
def get(self, name, default=NO_DEFAULT):
values = self.load()
try:
return values[name]
except KeyError:
if default is self.NO_DEFAULT:
raise LoggedFailure("Configuration value '{}' not found in root section!".format(name))
return default
|
Return the specified name from the root section.
Parameters:
name (str): The name of the requested value.
default (optional): If set, the default value to use
instead of raising :class:`LoggedFailure` for
unknown names.
Returns:
The value for `name`.
Raises:
LoggedFailure: The requested `name` was not found.
|
train
|
https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/reamed/click.py#L201-L223
| null |
class Configuration(object):
""" Configuration container that is initialized early in the main command.
The default instance is available via the Click context as ``ctx.obj.cfg``.
Configuration is lazily loaded, on first access.
"""
NO_DEFAULT = object()
DEFAULT_PATH = [
'/etc/{appname}.conf',
'/etc/{appname}.d/',
'{appcfg}.conf',
]
DEFAULT_CONFIG_OPTS = dict(
encoding='utf-8',
default_encoding='utf-8',
)
@classmethod
def from_context(cls, ctx, config_paths=None, project=None):
"""Create a configuration object, and initialize the Click context with it."""
if ctx.obj is None:
ctx.obj = Bunch()
ctx.obj.cfg = cls(ctx.info_name, config_paths, project=project)
return ctx.obj.cfg
def __init__(self, name, config_paths=None, project=None):
""" Set up static knowledge about configuration.
``config_paths`` is a list of PATHs to config files or directories.
If that is non-empty, exactly those paths will be considered; an empty
path element represents the default locations (just like in MANPATH).
If the environment variable ``<prefix>_CONFIG`` is set, its value will
be appended to the default locations.
"""
self.values = configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
self.project = project
self.name = name
self.config_paths = []
self.loaded = False
env_config = os.environ.get((self.name + '-config').upper().replace('-', '_'), '')
defaults = [i.format(appname=os.sep.join([self.project, self.name]) if project else self.name,
appdir=get_app_dir(self.project or self.name),
appcfg=get_app_dir(self.project) + os.sep + self.name if self.project else get_app_dir(self.name))
for i in self.DEFAULT_PATH
] + [i for i in env_config.split(os.pathsep) if i]
for path in config_paths or []:
for name in path.split(os.pathsep):
if name:
self.config_paths.append(name)
else:
self.config_paths.extend(defaults)
if not self.config_paths:
self.config_paths = defaults
def locations(self, exists=True):
""" Return the location of the config file(s).
A given directory will be scanned for ``*.conf`` files, in alphabetical order.
Any duplicates will be eliminated.
If ``exists`` is True, only existing configuration locations are returned.
"""
result = []
for config_files in self.config_paths:
if not config_files:
continue
if os.path.isdir(config_files):
config_files = [os.path.join(config_files, i)
for i in sorted(os.listdir(config_files))
if i.endswith('.conf')]
else:
config_files = [config_files]
for config_file in config_files:
if not exists or os.path.exists(config_file):
config_file = os.path.abspath(config_file)
if config_file in result:
result.remove(config_file)
result.append(config_file)
return result
def load(self):
"""Load configuration from the defined locations."""
if not self.loaded:
self.values = configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
for path in self.locations():
try:
part = configobj.ConfigObj(infile=path, **self.DEFAULT_CONFIG_OPTS)
except configobj.ConfigObjError as cause:
raise LoggedFailure("Error in file '{path}': {cause}".format(path=pretty_path(path), cause=cause))
self.values.merge(part)
self.loaded = True
return self.values
def dump(self, to=None): # pylint: disable=invalid-name
"""Dump the merged configuration to a stream or stdout."""
self.load().write(to or sys.stdout)
def section(self, ctx, optional=False):
"""
Return section of the config for a specific context (sub-command).
Parameters:
ctx (Context): The Click context object.
optional (bool): If ``True``, return an empty config object when section is missing.
Returns:
Section: The configuration section belonging to
the active (sub-)command (based on ``ctx.info_name``).
"""
values = self.load()
try:
return values[ctx.info_name]
except KeyError:
if optional:
return configobj.ConfigObj({}, **self.DEFAULT_CONFIG_OPTS)
raise LoggedFailure("Configuration section '{}' not found!".format(ctx.info_name))
|
jhermann/rudiments
|
src/rudiments/pysupport.py
|
import_name
|
python
|
def import_name(modulename, name=None):
if name is None:
modulename, name = modulename.rsplit(':', 1)
module = __import__(modulename, globals(), {}, [name])
return getattr(module, name)
|
Import identifier ``name`` from module ``modulename``.
If ``name`` is omitted, ``modulename`` must contain the name after the
module path, delimited by a colon.
Parameters:
modulename (str): Fully qualified module name, e.g. ``x.y.z``.
name (str): Name to import from ``modulename``.
Returns:
object: Requested object.
|
train
|
https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/pysupport.py#L25-L41
| null |
# -*- coding: utf-8 -*-
# pylint: disable=bad-continuation
""" Python helpers + magic.
"""
# Copyright © 2015 Jürgen Hermann <jh@web.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals, print_function
import os
import imp
import sys
def load_module(modulename, modulepath):
""" Load a Python module from a path under a specified name.
Parameters:
modulename (str): Fully qualified module name, e.g. ``x.y.z``.
modulepath (str): Filename of the module.
Returns:
Loaded module.
"""
if '.' in modulename:
modulepackage, modulebase = modulename.rsplit('.', 1)
else:
modulepackage = ''
imp.acquire_lock()
try:
# Check if module is already loaded
if modulename not in sys.modules:
# Find module on disk and try to load it
path, name = os.path.split(modulepath)
name = os.path.splitext(name)[0]
handle, path, info = imp.find_module(name, [path])
try:
# Load the module and put into sys.modules
module = imp.load_module(modulename, handle, path, info)
if modulepackage:
setattr(sys.modules[modulepackage], modulebase, module)
finally:
# Make sure handle is closed properly
if handle:
handle.close()
finally:
imp.release_lock()
return sys.modules[modulename]
|
jhermann/rudiments
|
src/rudiments/pysupport.py
|
load_module
|
python
|
def load_module(modulename, modulepath):
if '.' in modulename:
modulepackage, modulebase = modulename.rsplit('.', 1)
else:
modulepackage = ''
imp.acquire_lock()
try:
# Check if module is already loaded
if modulename not in sys.modules:
# Find module on disk and try to load it
path, name = os.path.split(modulepath)
name = os.path.splitext(name)[0]
handle, path, info = imp.find_module(name, [path])
try:
# Load the module and put into sys.modules
module = imp.load_module(modulename, handle, path, info)
if modulepackage:
setattr(sys.modules[modulepackage], modulebase, module)
finally:
# Make sure handle is closed properly
if handle:
handle.close()
finally:
imp.release_lock()
return sys.modules[modulename]
|
Load a Python module from a path under a specified name.
Parameters:
modulename (str): Fully qualified module name, e.g. ``x.y.z``.
modulepath (str): Filename of the module.
Returns:
Loaded module.
|
train
|
https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/pysupport.py#L44-L79
| null |
# -*- coding: utf-8 -*-
# pylint: disable=bad-continuation
""" Python helpers + magic.
"""
# Copyright © 2015 Jürgen Hermann <jh@web.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals, print_function
import os
import imp
import sys
def import_name(modulename, name=None):
""" Import identifier ``name`` from module ``modulename``.
If ``name`` is omitted, ``modulename`` must contain the name after the
module path, delimited by a colon.
Parameters:
modulename (str): Fully qualified module name, e.g. ``x.y.z``.
name (str): Name to import from ``modulename``.
Returns:
object: Requested object.
"""
if name is None:
modulename, name = modulename.rsplit(':', 1)
module = __import__(modulename, globals(), {}, [name])
return getattr(module, name)
|
jhermann/rudiments
|
src/rudiments/www.py
|
url_as_file
|
python
|
def url_as_file(url, ext=None):
if ext:
ext = '.' + ext.strip('.') # normalize extension
url_hint = 'www-{}-'.format(urlparse(url).hostname or 'any')
content = requests.get(url).content
with tempfile.NamedTemporaryFile(suffix=ext or '', prefix=url_hint, delete=False) as handle:
handle.write(content)
try:
yield handle.name
finally:
if os.path.exists(handle.name):
os.remove(handle.name)
|
Context manager that GETs a given `url` and provides it as a local file.
The file is in a closed state upon entering the context,
and removed when leaving it, if still there.
To give the file name a specific extension, use `ext`;
the extension can optionally include a separating dot,
otherwise it will be added.
Parameters:
url (str): URL to retrieve.
ext (str, optional): Extension for the generated filename.
Yields:
str: The path to a temporary file with the content of the URL.
Raises:
requests.RequestException: Base exception of ``requests``, see its
docs for more detailed ones.
Example:
>>> import io, re, json
>>> with url_as_file('https://api.github.com/meta', ext='json') as meta:
... meta, json.load(io.open(meta, encoding='ascii'))['hooks']
(u'/tmp/www-api.github.com-Ba5OhD.json', [u'192.30.252.0/22'])
|
train
|
https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/www.py#L38-L78
| null |
# -*- coding: utf-8 -*-
# pylint: disable=bad-continuation
""" WWW access helpers.
You need a dependency on
`requests <http://docs.python-requests.org/en/latest/api/>`_
in your project if you use this module.
"""
# Copyright © 2015 Jürgen Hermann <jh@web.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals, print_function
import os
import tempfile
from contextlib import contextmanager
import requests
from ._compat import urlparse, encode_filename # pylint: disable=unused-import
__all__ = ['url_as_file']
__all__ = [encode_filename(_) for _ in __all__]
@contextmanager
|
jhermann/rudiments
|
src/rudiments/humanize.py
|
bytes2iec
|
python
|
def bytes2iec(size, compact=False):
postfn = lambda text: text.replace(' ', '') if compact else text
if size < 0:
raise ValueError("Negative byte size value {}".format(size))
if size < 1024:
return postfn('{:4d} bytes'.format(size))
scaled = size
for iec_unit in IEC_UNITS[1:]:
scaled /= 1024.0
if scaled < 1024:
return postfn('{:6.1f} {}'.format(scaled, iec_unit))
raise ValueError("Byte size value {} out of bounds".format(size))
|
Convert a size value in bytes to its equivalent in IEC notation.
See `<http://physics.nist.gov/cuu/Units/binary.html>`_.
Parameters:
size (int): Number of bytes.
compact (bool): If ``True``, the result contains no spaces.
Return:
String representation of ``size``.
Raises:
ValueError: Negative or out of bounds value for ``size``.
|
train
|
https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/humanize.py#L24-L51
|
[
"postfn = lambda text: text.replace(' ', '') if compact else text\n"
] |
# -*- coding: utf-8 -*-
# pylint: disable=bad-continuation
""" I/O of common values in forms understood by humans.
"""
# Copyright © 2015 Jürgen Hermann <jh@web.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals, print_function
IEC_UNITS = ('B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB')
def iec2bytes(size_spec, only_positive=True):
""" Convert a size specification, optionally containing a scaling
unit in IEC notation, to a number of bytes.
Parameters:
size_spec (str): Number, optionally followed by a unit.
only_positive (bool): Allow only positive values?
Return:
Numeric bytes size.
Raises:
ValueError: Unknown unit specifiers, or bad leading integer.
"""
scale = 1
try:
size = int(0 + size_spec) # return numeric values as-is
except (TypeError, ValueError):
spec = size_spec.strip().lower()
for exp, iec_unit in enumerate(IEC_UNITS[1:], 1):
iec_unit = iec_unit.lower()
if spec.endswith(iec_unit):
spec = spec[:-len(iec_unit)]
scale = 2 ** (10 * exp)
break
elif spec.endswith(iec_unit[0]):
spec = spec[:-1]
scale = 2 ** (10 * exp)
break
else:
if spec.endswith('b'):
spec = spec[:-1]
try:
if '.' in spec:
size = float(spec.strip())
else:
size = int(spec.strip(), base=0)
except (TypeError, ValueError) as cause:
raise ValueError('Invalid bytes size specification {!r}: {}'.format(size_spec, cause))
if only_positive and size < 0:
raise ValueError('Invalid negative bytes size specification {!r}'.format(size_spec))
return int(size * scale)
def merge_adjacent(numbers, indicator='..', base=0):
""" Merge adjacent numbers in an iterable of numbers.
Parameters:
numbers (list): List of integers or numeric strings.
indicator (str): Delimiter to indicate generated ranges.
base (int): Passed to the `int()` conversion when comparing numbers.
Return:
list of str: Condensed sequence with either ranges or isolated numbers.
"""
integers = list(sorted([(int("%s" % i, base), i) for i in numbers]))
idx = 0
result = []
while idx < len(numbers):
end = idx + 1
while end < len(numbers) and integers[end-1][0] == integers[end][0] - 1:
end += 1
result.append("%s%s%s" % (integers[idx][1], indicator, integers[end-1][1])
if end > idx + 1
else "%s" % integers[idx][1])
idx = end
return result
|
jhermann/rudiments
|
src/rudiments/humanize.py
|
iec2bytes
|
python
|
def iec2bytes(size_spec, only_positive=True):
scale = 1
try:
size = int(0 + size_spec) # return numeric values as-is
except (TypeError, ValueError):
spec = size_spec.strip().lower()
for exp, iec_unit in enumerate(IEC_UNITS[1:], 1):
iec_unit = iec_unit.lower()
if spec.endswith(iec_unit):
spec = spec[:-len(iec_unit)]
scale = 2 ** (10 * exp)
break
elif spec.endswith(iec_unit[0]):
spec = spec[:-1]
scale = 2 ** (10 * exp)
break
else:
if spec.endswith('b'):
spec = spec[:-1]
try:
if '.' in spec:
size = float(spec.strip())
else:
size = int(spec.strip(), base=0)
except (TypeError, ValueError) as cause:
raise ValueError('Invalid bytes size specification {!r}: {}'.format(size_spec, cause))
if only_positive and size < 0:
raise ValueError('Invalid negative bytes size specification {!r}'.format(size_spec))
return int(size * scale)
|
Convert a size specification, optionally containing a scaling
unit in IEC notation, to a number of bytes.
Parameters:
size_spec (str): Number, optionally followed by a unit.
only_positive (bool): Allow only positive values?
Return:
Numeric bytes size.
Raises:
ValueError: Unknown unit specifiers, or bad leading integer.
|
train
|
https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/humanize.py#L54-L99
| null |
# -*- coding: utf-8 -*-
# pylint: disable=bad-continuation
""" I/O of common values in forms understood by humans.
"""
# Copyright © 2015 Jürgen Hermann <jh@web.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals, print_function
IEC_UNITS = ('B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB')
def bytes2iec(size, compact=False):
""" Convert a size value in bytes to its equivalent in IEC notation.
See `<http://physics.nist.gov/cuu/Units/binary.html>`_.
Parameters:
size (int): Number of bytes.
compact (bool): If ``True``, the result contains no spaces.
Return:
String representation of ``size``.
Raises:
ValueError: Negative or out of bounds value for ``size``.
"""
postfn = lambda text: text.replace(' ', '') if compact else text
if size < 0:
raise ValueError("Negative byte size value {}".format(size))
if size < 1024:
return postfn('{:4d} bytes'.format(size))
scaled = size
for iec_unit in IEC_UNITS[1:]:
scaled /= 1024.0
if scaled < 1024:
return postfn('{:6.1f} {}'.format(scaled, iec_unit))
raise ValueError("Byte size value {} out of bounds".format(size))
def merge_adjacent(numbers, indicator='..', base=0):
""" Merge adjacent numbers in an iterable of numbers.
Parameters:
numbers (list): List of integers or numeric strings.
indicator (str): Delimiter to indicate generated ranges.
base (int): Passed to the `int()` conversion when comparing numbers.
Return:
list of str: Condensed sequence with either ranges or isolated numbers.
"""
integers = list(sorted([(int("%s" % i, base), i) for i in numbers]))
idx = 0
result = []
while idx < len(numbers):
end = idx + 1
while end < len(numbers) and integers[end-1][0] == integers[end][0] - 1:
end += 1
result.append("%s%s%s" % (integers[idx][1], indicator, integers[end-1][1])
if end > idx + 1
else "%s" % integers[idx][1])
idx = end
return result
|
jhermann/rudiments
|
src/rudiments/humanize.py
|
merge_adjacent
|
python
|
def merge_adjacent(numbers, indicator='..', base=0):
integers = list(sorted([(int("%s" % i, base), i) for i in numbers]))
idx = 0
result = []
while idx < len(numbers):
end = idx + 1
while end < len(numbers) and integers[end-1][0] == integers[end][0] - 1:
end += 1
result.append("%s%s%s" % (integers[idx][1], indicator, integers[end-1][1])
if end > idx + 1
else "%s" % integers[idx][1])
idx = end
return result
|
Merge adjacent numbers in an iterable of numbers.
Parameters:
numbers (list): List of integers or numeric strings.
indicator (str): Delimiter to indicate generated ranges.
base (int): Passed to the `int()` conversion when comparing numbers.
Return:
list of str: Condensed sequence with either ranges or isolated numbers.
|
train
|
https://github.com/jhermann/rudiments/blob/028ec7237946115c7b18e50557cbc5f6b824653e/src/rudiments/humanize.py#L102-L125
| null |
# -*- coding: utf-8 -*-
# pylint: disable=bad-continuation
""" I/O of common values in forms understood by humans.
"""
# Copyright © 2015 Jürgen Hermann <jh@web.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals, print_function
IEC_UNITS = ('B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB')
def bytes2iec(size, compact=False):
""" Convert a size value in bytes to its equivalent in IEC notation.
See `<http://physics.nist.gov/cuu/Units/binary.html>`_.
Parameters:
size (int): Number of bytes.
compact (bool): If ``True``, the result contains no spaces.
Return:
String representation of ``size``.
Raises:
ValueError: Negative or out of bounds value for ``size``.
"""
postfn = lambda text: text.replace(' ', '') if compact else text
if size < 0:
raise ValueError("Negative byte size value {}".format(size))
if size < 1024:
return postfn('{:4d} bytes'.format(size))
scaled = size
for iec_unit in IEC_UNITS[1:]:
scaled /= 1024.0
if scaled < 1024:
return postfn('{:6.1f} {}'.format(scaled, iec_unit))
raise ValueError("Byte size value {} out of bounds".format(size))
def iec2bytes(size_spec, only_positive=True):
""" Convert a size specification, optionally containing a scaling
unit in IEC notation, to a number of bytes.
Parameters:
size_spec (str): Number, optionally followed by a unit.
only_positive (bool): Allow only positive values?
Return:
Numeric bytes size.
Raises:
ValueError: Unknown unit specifiers, or bad leading integer.
"""
scale = 1
try:
size = int(0 + size_spec) # return numeric values as-is
except (TypeError, ValueError):
spec = size_spec.strip().lower()
for exp, iec_unit in enumerate(IEC_UNITS[1:], 1):
iec_unit = iec_unit.lower()
if spec.endswith(iec_unit):
spec = spec[:-len(iec_unit)]
scale = 2 ** (10 * exp)
break
elif spec.endswith(iec_unit[0]):
spec = spec[:-1]
scale = 2 ** (10 * exp)
break
else:
if spec.endswith('b'):
spec = spec[:-1]
try:
if '.' in spec:
size = float(spec.strip())
else:
size = int(spec.strip(), base=0)
except (TypeError, ValueError) as cause:
raise ValueError('Invalid bytes size specification {!r}: {}'.format(size_spec, cause))
if only_positive and size < 0:
raise ValueError('Invalid negative bytes size specification {!r}'.format(size_spec))
return int(size * scale)
|
quantmind/pulsar-odm
|
odm/mapper.py
|
model_base
|
python
|
def model_base(bind_label=None, info=None):
Model = type('Model', (BaseModel,), {'__odm_abstract__': True})
info = {}
Model.__table_args__ = table_args(info=info)
if bind_label:
info['bind_label'] = bind_label
return Model
|
Create a base declarative class
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/odm/mapper.py#L114-L122
|
[
"def table_args(*args, **kwargs):\n targs = ()\n tkwargs = {}\n\n if args:\n if hasattr(args[0], '__table_args__'):\n targs = args[0].__table_args__\n targs, tkwargs = targs[:-1], targs[-1].copy()\n args = args[1:]\n\n targs += args\n\n for key, value in kwargs.items():\n if isinstance(value, dict) and key in tkwargs:\n new_value = tkwargs[key].copy()\n new_value.update(value)\n value = new_value\n tkwargs[key] = value\n\n return targs + (tkwargs,)\n"
] |
import logging
import sys
from copy import copy
from inspect import getmodule
from contextlib import contextmanager
from collections import OrderedDict
import sqlalchemy
from sqlalchemy.engine import url
from sqlalchemy.engine.strategies import PlainEngineStrategy
from sqlalchemy import MetaData, Table, event
from sqlalchemy.ext.declarative.api import (declarative_base, declared_attr,
_as_declarative, _add_attribute)
from sqlalchemy.orm.session import Session
from sqlalchemy.orm import object_session
from sqlalchemy.schema import DDL
from pulsar.api import ImproperlyConfigured
from .utils import database_operation
from . import dialects # noqa
logger = logging.getLogger('pulsar.odm')
def create_engine(*args, **kwargs):
kwargs.setdefault('strategy', 'odm')
return sqlalchemy.create_engine(*args, **kwargs)
class OdmEngineStrategy(PlainEngineStrategy):
name = 'odm'
def create(self, name_or_url, **kwargs):
# create url.URL object
u = url.make_url(name_or_url)
if 'pool_size' in u.query:
kwargs['pool_size'] = int(u.query.pop('pool_size'))
if 'pool_timeout' in u.query:
kwargs['pool_timeout'] = float(u.query.pop('pool_timeout'))
return super().create(name_or_url, **kwargs)
OdmEngineStrategy()
class OdmMeta(type):
def __new__(cls, name, bases, attrs):
abstract = attrs.pop('__odm_abstract__', False)
klass = super().__new__(cls, name, bases, attrs)
if not abstract and not isinstance(klass, DeclarativeMeta):
module = getmodule(klass)
models = getattr(module, '__odm_models__', None)
if models is None:
models = OrderedDict()
module.__odm_models__ = models
name = klass.__name__.lower()
models[name] = klass
return klass
class DeclarativeMeta(OdmMeta):
def __init__(cls, classname, bases, dict_):
if '_decl_class_registry' not in cls.__dict__:
_as_declarative(cls, classname, cls.__dict__)
type.__init__(cls, classname, bases, dict_)
def __setattr__(cls, key, value):
_add_attribute(cls, key, value)
class BaseModel(metaclass=OdmMeta):
__odm_abstract__ = True
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
@classmethod
def create_table(cls, name, *columns, **kwargs):
targs = table_args(cls, **kwargs)
args, kwargs = targs[:-1], targs[-1]
table = Table(name, MetaData(), *columns, *args, **kwargs)
return table
def table_args(*args, **kwargs):
targs = ()
tkwargs = {}
if args:
if hasattr(args[0], '__table_args__'):
targs = args[0].__table_args__
targs, tkwargs = targs[:-1], targs[-1].copy()
args = args[1:]
targs += args
for key, value in kwargs.items():
if isinstance(value, dict) and key in tkwargs:
new_value = tkwargs[key].copy()
new_value.update(value)
value = new_value
tkwargs[key] = value
return targs + (tkwargs,)
def module_tables(module):
for name, table in vars(module).items():
if isinstance(table, Table):
yield table
def copy_models(module_from, module_to):
"""Copy models from one module to another
:param module_from:
:param module_to:
:return:
"""
module_from = get_module(module_from)
module_to = get_module(module_to)
models = get_models(module_from)
if models:
models = models.copy()
models.update(((t.key, t) for t in module_tables(module_from)))
module_to.__odm_models__ = models
return models
def move_models(module_from, module_to):
module_from = get_module(module_from)
if copy_models(module_from, module_to):
del module_from.__odm_models__
def get_module(module_or_name):
if isinstance(module_or_name, str):
return sys.modules[module_or_name]
else:
return getmodule(module_or_name)
def get_models(module):
"""Get models from a module
:param module:
:return:
"""
return getattr(get_module(module), '__odm_models__', None)
Model = model_base()
class Mapper:
"""SQLAlchemy wrapper
.. attribute:: binds
Dictionary of labels-engine pairs. The "default" label is always
present and it is used for tables without `bind_label` in their
`info` dictionary.
"""
def __init__(self, binds):
# Setup mdoels and engines
if not binds:
binds = {}
elif isinstance(binds, str):
binds = {'default': binds}
if 'default' not in binds:
raise ImproperlyConfigured('default datastore not specified')
self._engines = {}
self._declarative_register = {}
self._bases = {}
self._base_declarative = declarative_base(name='OdmBase',
metaclass=DeclarativeMeta)
self.binds = {}
self.is_green = False
for name, bind in tuple(binds.items()):
key = None if name == 'default' else name
engine = create_engine(bind)
dialect = engine.dialect
# Dialect requires Green Pool
if getattr(dialect, 'is_green', False):
self.is_green = True
self._engines[key] = engine
def __getitem__(self, model):
return self._declarative_register[model]
def __getattr__(self, name):
if name in self._declarative_register:
return self._declarative_register[name]
raise AttributeError('No model named "%s"' % name)
@property
def metadata(self):
"""Returns the :class:`~sqlalchemy.Metadata` for this mapper
"""
return self._base_declarative.metadata
def copy(self, binds):
return self.__class__(binds)
def register(self, model, **attr):
"""Register a model or a table with this mapper
:param model: a table or a :class:`.BaseModel` class
:return: a Model class or a table
"""
metadata = self.metadata
if not isinstance(model, Table):
model_name = self._create_model(model, **attr)
if not model_name:
return
model, name = model_name
table = model.__table__
self._declarative_register[name] = model
if name in self._bases:
for model in self._bases.pop(name):
self.register(model)
else:
table = model.tometadata(metadata)
model = table
# Register engine
engine = None
label = table.info.get('bind_label')
keys = ('%s.%s' % (label, table.key),
label, None) if label else (None,)
#
# Find the engine for this table
for key in keys:
engine = self.get_engine(key)
if engine:
break
assert engine
self.binds[table] = engine
return model
def register_module(self, module, exclude=None):
module = get_module(module)
models = get_models(module)
exclude = set(exclude or ())
if models:
for name, model in models.items():
if name in exclude:
continue
self.register(model)
for table in module_tables(module):
if table.key not in exclude:
self.register(table)
def create_table(self, name, *columns, **kwargs):
"""Create a new table with the same metadata and info
"""
targs = table_args(**kwargs)
args, kwargs = targs[:-1], targs[-1]
return Table(name, self.metadata, *columns, *args, **kwargs)
def database_create(self, database, **params):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
dbname = database
for key, engine in self.keys_engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot create a database, no db name given"
key = key if key else 'default'
binds[key] = self._database_create(engine, dbname)
return self.copy(binds)
def database_exist(self):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
for key, engine in self.keys_engines():
key = key if key else 'default'
binds[key] = self._database_exist(engine)
return binds
def database_all(self):
"""Return a dictionary mapping engines with databases
"""
all = {}
for engine in self.engines():
all[engine] = self._database_all(engine)
return all
def database_drop(self, database=None, **params):
dbname = database
for engine in self.engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot drop database, no db name given"
self._database_drop(engine, dbname)
def tables(self):
tables = []
for engine in self.engines():
tbs = engine.table_names()
if tbs:
tables.append((str(engine.url), tbs))
return tables
def table_create(self, remove_existing=False):
"""Creates all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Create all tables for %s', engine)
try:
self.metadata.create_all(engine, tables=tables)
except Exception as exc:
raise
def table_drop(self):
"""Drops all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Drop all tables for %s', engine)
self.metadata.drop_all(engine, tables=tables)
@contextmanager
def begin(self, close=True, expire_on_commit=False, session=None,
commit=False, **options):
"""Provide a transactional scope around a series of operations.
By default, ``expire_on_commit`` is set to False so that instances
can be used outside the session.
"""
if not session:
commit = True
session = self.session(expire_on_commit=expire_on_commit,
**options)
else:
close = False
try:
yield session
if commit:
session.commit()
except Exception:
session.rollback()
raise
finally:
if close:
session.close()
def session(self, **options):
options['binds'] = self.binds
return OdmSession(self, **options)
def session_from_object(self, *objs):
for obj in objs:
session = object_session(obj)
if session is not None:
return session
def dialect(self, key):
"""Dialect object for a model/table name
"""
return self.binds[self[key].__table__].dialect
def get_engine(self, key=None):
"""Get an engine by key
"""
if key in self._engines:
return self._engines[key]
def engines(self):
"""Iterator over all engines
"""
return self._engines.values()
def keys_engines(self):
return self._engines.items()
def close(self):
for engine in self.engines():
engine.dispose()
# INTERNALS
def _create_model(self, model):
model_name = model.__name__
meta = type(self._base_declarative)
if isinstance(model, meta):
raise ImproperlyConfigured('Cannot register declarative classes '
'only mixins allowed')
base = getattr(model, '__inherit_from__', None)
if base:
if base not in self._declarative_register:
models = self._bases.get(base)
if not models:
self._bases[base] = models = []
models.append(model)
return
else:
base = self._declarative_register[base]
else:
base = self._base_declarative
#
# Create SqlAlchemy Model
model = meta(model_name, (model, base), {})
create = getattr(model, '__create_sql__', None)
name = model_name.lower()
if create:
event.listen(self.metadata,
'after_create',
DDL(create.format({'name': name})))
drop = getattr(model, '__drop_sql__', None)
if not drop:
logger.warning('Model %s has create statement but not drop. '
'To mute this warning add a __drop_sql__ '
'statement in the model class', name)
else:
event.listen(self.metadata,
'before_drop',
DDL(drop.format({'name': name})))
return model, name
def _get_tables(self, engine, create_drop=False):
tables = []
for table, eng in self.binds.items():
if eng == engine:
if table.key in self._declarative_register:
model = self[table.key]
if create_drop and hasattr(model, '__create_sql__'):
continue
tables.append(table)
return tables
def _database_all(self, engine):
return database_operation(engine, 'all')
def _database_create(self, engine, database):
"""Create a new database and return a new url representing
a connection to the new database
"""
logger.info('Creating database "%s" in "%s"', database, engine)
database_operation(engine, 'create', database)
url = copy(engine.url)
url.database = database
return str(url)
def _database_drop(self, engine, database):
logger.info('dropping database "%s" from "%s"', database, engine)
database_operation(engine, 'drop', database)
def _database_exist(self, engine):
return database_operation(engine, 'exists')
class OdmSession(Session):
def __init__(self, mapper, **options):
self.mapper = mapper
super().__init__(**options)
|
quantmind/pulsar-odm
|
odm/mapper.py
|
copy_models
|
python
|
def copy_models(module_from, module_to):
module_from = get_module(module_from)
module_to = get_module(module_to)
models = get_models(module_from)
if models:
models = models.copy()
models.update(((t.key, t) for t in module_tables(module_from)))
module_to.__odm_models__ = models
return models
|
Copy models from one module to another
:param module_from:
:param module_to:
:return:
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/odm/mapper.py#L131-L144
|
[
"def module_tables(module):\n for name, table in vars(module).items():\n if isinstance(table, Table):\n yield table\n",
"def get_module(module_or_name):\n if isinstance(module_or_name, str):\n return sys.modules[module_or_name]\n else:\n return getmodule(module_or_name)\n",
"def get_models(module):\n \"\"\"Get models from a module\n :param module:\n :return:\n \"\"\"\n return getattr(get_module(module), '__odm_models__', None)\n"
] |
import logging
import sys
from copy import copy
from inspect import getmodule
from contextlib import contextmanager
from collections import OrderedDict
import sqlalchemy
from sqlalchemy.engine import url
from sqlalchemy.engine.strategies import PlainEngineStrategy
from sqlalchemy import MetaData, Table, event
from sqlalchemy.ext.declarative.api import (declarative_base, declared_attr,
_as_declarative, _add_attribute)
from sqlalchemy.orm.session import Session
from sqlalchemy.orm import object_session
from sqlalchemy.schema import DDL
from pulsar.api import ImproperlyConfigured
from .utils import database_operation
from . import dialects # noqa
logger = logging.getLogger('pulsar.odm')
def create_engine(*args, **kwargs):
kwargs.setdefault('strategy', 'odm')
return sqlalchemy.create_engine(*args, **kwargs)
class OdmEngineStrategy(PlainEngineStrategy):
name = 'odm'
def create(self, name_or_url, **kwargs):
# create url.URL object
u = url.make_url(name_or_url)
if 'pool_size' in u.query:
kwargs['pool_size'] = int(u.query.pop('pool_size'))
if 'pool_timeout' in u.query:
kwargs['pool_timeout'] = float(u.query.pop('pool_timeout'))
return super().create(name_or_url, **kwargs)
OdmEngineStrategy()
class OdmMeta(type):
def __new__(cls, name, bases, attrs):
abstract = attrs.pop('__odm_abstract__', False)
klass = super().__new__(cls, name, bases, attrs)
if not abstract and not isinstance(klass, DeclarativeMeta):
module = getmodule(klass)
models = getattr(module, '__odm_models__', None)
if models is None:
models = OrderedDict()
module.__odm_models__ = models
name = klass.__name__.lower()
models[name] = klass
return klass
class DeclarativeMeta(OdmMeta):
def __init__(cls, classname, bases, dict_):
if '_decl_class_registry' not in cls.__dict__:
_as_declarative(cls, classname, cls.__dict__)
type.__init__(cls, classname, bases, dict_)
def __setattr__(cls, key, value):
_add_attribute(cls, key, value)
class BaseModel(metaclass=OdmMeta):
__odm_abstract__ = True
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
@classmethod
def create_table(cls, name, *columns, **kwargs):
targs = table_args(cls, **kwargs)
args, kwargs = targs[:-1], targs[-1]
table = Table(name, MetaData(), *columns, *args, **kwargs)
return table
def table_args(*args, **kwargs):
targs = ()
tkwargs = {}
if args:
if hasattr(args[0], '__table_args__'):
targs = args[0].__table_args__
targs, tkwargs = targs[:-1], targs[-1].copy()
args = args[1:]
targs += args
for key, value in kwargs.items():
if isinstance(value, dict) and key in tkwargs:
new_value = tkwargs[key].copy()
new_value.update(value)
value = new_value
tkwargs[key] = value
return targs + (tkwargs,)
def model_base(bind_label=None, info=None):
"""Create a base declarative class
"""
Model = type('Model', (BaseModel,), {'__odm_abstract__': True})
info = {}
Model.__table_args__ = table_args(info=info)
if bind_label:
info['bind_label'] = bind_label
return Model
def module_tables(module):
for name, table in vars(module).items():
if isinstance(table, Table):
yield table
def move_models(module_from, module_to):
module_from = get_module(module_from)
if copy_models(module_from, module_to):
del module_from.__odm_models__
def get_module(module_or_name):
if isinstance(module_or_name, str):
return sys.modules[module_or_name]
else:
return getmodule(module_or_name)
def get_models(module):
"""Get models from a module
:param module:
:return:
"""
return getattr(get_module(module), '__odm_models__', None)
Model = model_base()
class Mapper:
"""SQLAlchemy wrapper
.. attribute:: binds
Dictionary of labels-engine pairs. The "default" label is always
present and it is used for tables without `bind_label` in their
`info` dictionary.
"""
def __init__(self, binds):
# Setup mdoels and engines
if not binds:
binds = {}
elif isinstance(binds, str):
binds = {'default': binds}
if 'default' not in binds:
raise ImproperlyConfigured('default datastore not specified')
self._engines = {}
self._declarative_register = {}
self._bases = {}
self._base_declarative = declarative_base(name='OdmBase',
metaclass=DeclarativeMeta)
self.binds = {}
self.is_green = False
for name, bind in tuple(binds.items()):
key = None if name == 'default' else name
engine = create_engine(bind)
dialect = engine.dialect
# Dialect requires Green Pool
if getattr(dialect, 'is_green', False):
self.is_green = True
self._engines[key] = engine
def __getitem__(self, model):
return self._declarative_register[model]
def __getattr__(self, name):
if name in self._declarative_register:
return self._declarative_register[name]
raise AttributeError('No model named "%s"' % name)
@property
def metadata(self):
"""Returns the :class:`~sqlalchemy.Metadata` for this mapper
"""
return self._base_declarative.metadata
def copy(self, binds):
return self.__class__(binds)
def register(self, model, **attr):
"""Register a model or a table with this mapper
:param model: a table or a :class:`.BaseModel` class
:return: a Model class or a table
"""
metadata = self.metadata
if not isinstance(model, Table):
model_name = self._create_model(model, **attr)
if not model_name:
return
model, name = model_name
table = model.__table__
self._declarative_register[name] = model
if name in self._bases:
for model in self._bases.pop(name):
self.register(model)
else:
table = model.tometadata(metadata)
model = table
# Register engine
engine = None
label = table.info.get('bind_label')
keys = ('%s.%s' % (label, table.key),
label, None) if label else (None,)
#
# Find the engine for this table
for key in keys:
engine = self.get_engine(key)
if engine:
break
assert engine
self.binds[table] = engine
return model
def register_module(self, module, exclude=None):
module = get_module(module)
models = get_models(module)
exclude = set(exclude or ())
if models:
for name, model in models.items():
if name in exclude:
continue
self.register(model)
for table in module_tables(module):
if table.key not in exclude:
self.register(table)
def create_table(self, name, *columns, **kwargs):
"""Create a new table with the same metadata and info
"""
targs = table_args(**kwargs)
args, kwargs = targs[:-1], targs[-1]
return Table(name, self.metadata, *columns, *args, **kwargs)
def database_create(self, database, **params):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
dbname = database
for key, engine in self.keys_engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot create a database, no db name given"
key = key if key else 'default'
binds[key] = self._database_create(engine, dbname)
return self.copy(binds)
def database_exist(self):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
for key, engine in self.keys_engines():
key = key if key else 'default'
binds[key] = self._database_exist(engine)
return binds
def database_all(self):
"""Return a dictionary mapping engines with databases
"""
all = {}
for engine in self.engines():
all[engine] = self._database_all(engine)
return all
def database_drop(self, database=None, **params):
dbname = database
for engine in self.engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot drop database, no db name given"
self._database_drop(engine, dbname)
def tables(self):
tables = []
for engine in self.engines():
tbs = engine.table_names()
if tbs:
tables.append((str(engine.url), tbs))
return tables
def table_create(self, remove_existing=False):
"""Creates all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Create all tables for %s', engine)
try:
self.metadata.create_all(engine, tables=tables)
except Exception as exc:
raise
def table_drop(self):
"""Drops all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Drop all tables for %s', engine)
self.metadata.drop_all(engine, tables=tables)
@contextmanager
def begin(self, close=True, expire_on_commit=False, session=None,
commit=False, **options):
"""Provide a transactional scope around a series of operations.
By default, ``expire_on_commit`` is set to False so that instances
can be used outside the session.
"""
if not session:
commit = True
session = self.session(expire_on_commit=expire_on_commit,
**options)
else:
close = False
try:
yield session
if commit:
session.commit()
except Exception:
session.rollback()
raise
finally:
if close:
session.close()
def session(self, **options):
options['binds'] = self.binds
return OdmSession(self, **options)
def session_from_object(self, *objs):
for obj in objs:
session = object_session(obj)
if session is not None:
return session
def dialect(self, key):
"""Dialect object for a model/table name
"""
return self.binds[self[key].__table__].dialect
def get_engine(self, key=None):
"""Get an engine by key
"""
if key in self._engines:
return self._engines[key]
def engines(self):
"""Iterator over all engines
"""
return self._engines.values()
def keys_engines(self):
return self._engines.items()
def close(self):
for engine in self.engines():
engine.dispose()
# INTERNALS
def _create_model(self, model):
model_name = model.__name__
meta = type(self._base_declarative)
if isinstance(model, meta):
raise ImproperlyConfigured('Cannot register declarative classes '
'only mixins allowed')
base = getattr(model, '__inherit_from__', None)
if base:
if base not in self._declarative_register:
models = self._bases.get(base)
if not models:
self._bases[base] = models = []
models.append(model)
return
else:
base = self._declarative_register[base]
else:
base = self._base_declarative
#
# Create SqlAlchemy Model
model = meta(model_name, (model, base), {})
create = getattr(model, '__create_sql__', None)
name = model_name.lower()
if create:
event.listen(self.metadata,
'after_create',
DDL(create.format({'name': name})))
drop = getattr(model, '__drop_sql__', None)
if not drop:
logger.warning('Model %s has create statement but not drop. '
'To mute this warning add a __drop_sql__ '
'statement in the model class', name)
else:
event.listen(self.metadata,
'before_drop',
DDL(drop.format({'name': name})))
return model, name
def _get_tables(self, engine, create_drop=False):
tables = []
for table, eng in self.binds.items():
if eng == engine:
if table.key in self._declarative_register:
model = self[table.key]
if create_drop and hasattr(model, '__create_sql__'):
continue
tables.append(table)
return tables
def _database_all(self, engine):
return database_operation(engine, 'all')
def _database_create(self, engine, database):
"""Create a new database and return a new url representing
a connection to the new database
"""
logger.info('Creating database "%s" in "%s"', database, engine)
database_operation(engine, 'create', database)
url = copy(engine.url)
url.database = database
return str(url)
def _database_drop(self, engine, database):
logger.info('dropping database "%s" from "%s"', database, engine)
database_operation(engine, 'drop', database)
def _database_exist(self, engine):
return database_operation(engine, 'exists')
class OdmSession(Session):
def __init__(self, mapper, **options):
self.mapper = mapper
super().__init__(**options)
|
quantmind/pulsar-odm
|
odm/mapper.py
|
Mapper.register
|
python
|
def register(self, model, **attr):
metadata = self.metadata
if not isinstance(model, Table):
model_name = self._create_model(model, **attr)
if not model_name:
return
model, name = model_name
table = model.__table__
self._declarative_register[name] = model
if name in self._bases:
for model in self._bases.pop(name):
self.register(model)
else:
table = model.tometadata(metadata)
model = table
# Register engine
engine = None
label = table.info.get('bind_label')
keys = ('%s.%s' % (label, table.key),
label, None) if label else (None,)
#
# Find the engine for this table
for key in keys:
engine = self.get_engine(key)
if engine:
break
assert engine
self.binds[table] = engine
return model
|
Register a model or a table with this mapper
:param model: a table or a :class:`.BaseModel` class
:return: a Model class or a table
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/odm/mapper.py#L223-L259
|
[
"def register(self, model, **attr):\n \"\"\"Register a model or a table with this mapper\n\n :param model: a table or a :class:`.BaseModel` class\n :return: a Model class or a table\n \"\"\"\n metadata = self.metadata\n if not isinstance(model, Table):\n model_name = self._create_model(model, **attr)\n if not model_name:\n return\n model, name = model_name\n table = model.__table__\n self._declarative_register[name] = model\n\n if name in self._bases:\n for model in self._bases.pop(name):\n self.register(model)\n else:\n table = model.tometadata(metadata)\n model = table\n\n # Register engine\n engine = None\n label = table.info.get('bind_label')\n keys = ('%s.%s' % (label, table.key),\n label, None) if label else (None,)\n #\n # Find the engine for this table\n for key in keys:\n engine = self.get_engine(key)\n if engine:\n break\n assert engine\n self.binds[table] = engine\n\n return model\n",
"def get_engine(self, key=None):\n \"\"\"Get an engine by key\n \"\"\"\n if key in self._engines:\n return self._engines[key]\n",
"def _create_model(self, model):\n model_name = model.__name__\n meta = type(self._base_declarative)\n if isinstance(model, meta):\n raise ImproperlyConfigured('Cannot register declarative classes '\n 'only mixins allowed')\n base = getattr(model, '__inherit_from__', None)\n if base:\n if base not in self._declarative_register:\n models = self._bases.get(base)\n if not models:\n self._bases[base] = models = []\n models.append(model)\n return\n else:\n base = self._declarative_register[base]\n else:\n base = self._base_declarative\n\n #\n # Create SqlAlchemy Model\n model = meta(model_name, (model, base), {})\n create = getattr(model, '__create_sql__', None)\n name = model_name.lower()\n if create:\n event.listen(self.metadata,\n 'after_create',\n DDL(create.format({'name': name})))\n drop = getattr(model, '__drop_sql__', None)\n if not drop:\n logger.warning('Model %s has create statement but not drop. '\n 'To mute this warning add a __drop_sql__ '\n 'statement in the model class', name)\n else:\n event.listen(self.metadata,\n 'before_drop',\n DDL(drop.format({'name': name})))\n\n return model, name\n"
] |
class Mapper:
"""SQLAlchemy wrapper
.. attribute:: binds
Dictionary of labels-engine pairs. The "default" label is always
present and it is used for tables without `bind_label` in their
`info` dictionary.
"""
def __init__(self, binds):
# Setup mdoels and engines
if not binds:
binds = {}
elif isinstance(binds, str):
binds = {'default': binds}
if 'default' not in binds:
raise ImproperlyConfigured('default datastore not specified')
self._engines = {}
self._declarative_register = {}
self._bases = {}
self._base_declarative = declarative_base(name='OdmBase',
metaclass=DeclarativeMeta)
self.binds = {}
self.is_green = False
for name, bind in tuple(binds.items()):
key = None if name == 'default' else name
engine = create_engine(bind)
dialect = engine.dialect
# Dialect requires Green Pool
if getattr(dialect, 'is_green', False):
self.is_green = True
self._engines[key] = engine
def __getitem__(self, model):
return self._declarative_register[model]
def __getattr__(self, name):
if name in self._declarative_register:
return self._declarative_register[name]
raise AttributeError('No model named "%s"' % name)
@property
def metadata(self):
"""Returns the :class:`~sqlalchemy.Metadata` for this mapper
"""
return self._base_declarative.metadata
def copy(self, binds):
return self.__class__(binds)
def register_module(self, module, exclude=None):
module = get_module(module)
models = get_models(module)
exclude = set(exclude or ())
if models:
for name, model in models.items():
if name in exclude:
continue
self.register(model)
for table in module_tables(module):
if table.key not in exclude:
self.register(table)
def create_table(self, name, *columns, **kwargs):
"""Create a new table with the same metadata and info
"""
targs = table_args(**kwargs)
args, kwargs = targs[:-1], targs[-1]
return Table(name, self.metadata, *columns, *args, **kwargs)
def database_create(self, database, **params):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
dbname = database
for key, engine in self.keys_engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot create a database, no db name given"
key = key if key else 'default'
binds[key] = self._database_create(engine, dbname)
return self.copy(binds)
def database_exist(self):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
for key, engine in self.keys_engines():
key = key if key else 'default'
binds[key] = self._database_exist(engine)
return binds
def database_all(self):
"""Return a dictionary mapping engines with databases
"""
all = {}
for engine in self.engines():
all[engine] = self._database_all(engine)
return all
def database_drop(self, database=None, **params):
dbname = database
for engine in self.engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot drop database, no db name given"
self._database_drop(engine, dbname)
def tables(self):
tables = []
for engine in self.engines():
tbs = engine.table_names()
if tbs:
tables.append((str(engine.url), tbs))
return tables
def table_create(self, remove_existing=False):
"""Creates all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Create all tables for %s', engine)
try:
self.metadata.create_all(engine, tables=tables)
except Exception as exc:
raise
def table_drop(self):
"""Drops all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Drop all tables for %s', engine)
self.metadata.drop_all(engine, tables=tables)
@contextmanager
def begin(self, close=True, expire_on_commit=False, session=None,
commit=False, **options):
"""Provide a transactional scope around a series of operations.
By default, ``expire_on_commit`` is set to False so that instances
can be used outside the session.
"""
if not session:
commit = True
session = self.session(expire_on_commit=expire_on_commit,
**options)
else:
close = False
try:
yield session
if commit:
session.commit()
except Exception:
session.rollback()
raise
finally:
if close:
session.close()
def session(self, **options):
options['binds'] = self.binds
return OdmSession(self, **options)
def session_from_object(self, *objs):
for obj in objs:
session = object_session(obj)
if session is not None:
return session
def dialect(self, key):
"""Dialect object for a model/table name
"""
return self.binds[self[key].__table__].dialect
def get_engine(self, key=None):
"""Get an engine by key
"""
if key in self._engines:
return self._engines[key]
def engines(self):
"""Iterator over all engines
"""
return self._engines.values()
def keys_engines(self):
return self._engines.items()
def close(self):
for engine in self.engines():
engine.dispose()
# INTERNALS
def _create_model(self, model):
model_name = model.__name__
meta = type(self._base_declarative)
if isinstance(model, meta):
raise ImproperlyConfigured('Cannot register declarative classes '
'only mixins allowed')
base = getattr(model, '__inherit_from__', None)
if base:
if base not in self._declarative_register:
models = self._bases.get(base)
if not models:
self._bases[base] = models = []
models.append(model)
return
else:
base = self._declarative_register[base]
else:
base = self._base_declarative
#
# Create SqlAlchemy Model
model = meta(model_name, (model, base), {})
create = getattr(model, '__create_sql__', None)
name = model_name.lower()
if create:
event.listen(self.metadata,
'after_create',
DDL(create.format({'name': name})))
drop = getattr(model, '__drop_sql__', None)
if not drop:
logger.warning('Model %s has create statement but not drop. '
'To mute this warning add a __drop_sql__ '
'statement in the model class', name)
else:
event.listen(self.metadata,
'before_drop',
DDL(drop.format({'name': name})))
return model, name
def _get_tables(self, engine, create_drop=False):
tables = []
for table, eng in self.binds.items():
if eng == engine:
if table.key in self._declarative_register:
model = self[table.key]
if create_drop and hasattr(model, '__create_sql__'):
continue
tables.append(table)
return tables
def _database_all(self, engine):
return database_operation(engine, 'all')
def _database_create(self, engine, database):
"""Create a new database and return a new url representing
a connection to the new database
"""
logger.info('Creating database "%s" in "%s"', database, engine)
database_operation(engine, 'create', database)
url = copy(engine.url)
url.database = database
return str(url)
def _database_drop(self, engine, database):
logger.info('dropping database "%s" from "%s"', database, engine)
database_operation(engine, 'drop', database)
def _database_exist(self, engine):
return database_operation(engine, 'exists')
|
quantmind/pulsar-odm
|
odm/mapper.py
|
Mapper.create_table
|
python
|
def create_table(self, name, *columns, **kwargs):
targs = table_args(**kwargs)
args, kwargs = targs[:-1], targs[-1]
return Table(name, self.metadata, *columns, *args, **kwargs)
|
Create a new table with the same metadata and info
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/odm/mapper.py#L274-L279
|
[
"def table_args(*args, **kwargs):\n targs = ()\n tkwargs = {}\n\n if args:\n if hasattr(args[0], '__table_args__'):\n targs = args[0].__table_args__\n targs, tkwargs = targs[:-1], targs[-1].copy()\n args = args[1:]\n\n targs += args\n\n for key, value in kwargs.items():\n if isinstance(value, dict) and key in tkwargs:\n new_value = tkwargs[key].copy()\n new_value.update(value)\n value = new_value\n tkwargs[key] = value\n\n return targs + (tkwargs,)\n"
] |
class Mapper:
"""SQLAlchemy wrapper
.. attribute:: binds
Dictionary of labels-engine pairs. The "default" label is always
present and it is used for tables without `bind_label` in their
`info` dictionary.
"""
def __init__(self, binds):
# Setup mdoels and engines
if not binds:
binds = {}
elif isinstance(binds, str):
binds = {'default': binds}
if 'default' not in binds:
raise ImproperlyConfigured('default datastore not specified')
self._engines = {}
self._declarative_register = {}
self._bases = {}
self._base_declarative = declarative_base(name='OdmBase',
metaclass=DeclarativeMeta)
self.binds = {}
self.is_green = False
for name, bind in tuple(binds.items()):
key = None if name == 'default' else name
engine = create_engine(bind)
dialect = engine.dialect
# Dialect requires Green Pool
if getattr(dialect, 'is_green', False):
self.is_green = True
self._engines[key] = engine
def __getitem__(self, model):
return self._declarative_register[model]
def __getattr__(self, name):
if name in self._declarative_register:
return self._declarative_register[name]
raise AttributeError('No model named "%s"' % name)
@property
def metadata(self):
"""Returns the :class:`~sqlalchemy.Metadata` for this mapper
"""
return self._base_declarative.metadata
def copy(self, binds):
return self.__class__(binds)
def register(self, model, **attr):
"""Register a model or a table with this mapper
:param model: a table or a :class:`.BaseModel` class
:return: a Model class or a table
"""
metadata = self.metadata
if not isinstance(model, Table):
model_name = self._create_model(model, **attr)
if not model_name:
return
model, name = model_name
table = model.__table__
self._declarative_register[name] = model
if name in self._bases:
for model in self._bases.pop(name):
self.register(model)
else:
table = model.tometadata(metadata)
model = table
# Register engine
engine = None
label = table.info.get('bind_label')
keys = ('%s.%s' % (label, table.key),
label, None) if label else (None,)
#
# Find the engine for this table
for key in keys:
engine = self.get_engine(key)
if engine:
break
assert engine
self.binds[table] = engine
return model
def register_module(self, module, exclude=None):
module = get_module(module)
models = get_models(module)
exclude = set(exclude or ())
if models:
for name, model in models.items():
if name in exclude:
continue
self.register(model)
for table in module_tables(module):
if table.key not in exclude:
self.register(table)
def database_create(self, database, **params):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
dbname = database
for key, engine in self.keys_engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot create a database, no db name given"
key = key if key else 'default'
binds[key] = self._database_create(engine, dbname)
return self.copy(binds)
def database_exist(self):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
for key, engine in self.keys_engines():
key = key if key else 'default'
binds[key] = self._database_exist(engine)
return binds
def database_all(self):
"""Return a dictionary mapping engines with databases
"""
all = {}
for engine in self.engines():
all[engine] = self._database_all(engine)
return all
def database_drop(self, database=None, **params):
dbname = database
for engine in self.engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot drop database, no db name given"
self._database_drop(engine, dbname)
def tables(self):
tables = []
for engine in self.engines():
tbs = engine.table_names()
if tbs:
tables.append((str(engine.url), tbs))
return tables
def table_create(self, remove_existing=False):
"""Creates all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Create all tables for %s', engine)
try:
self.metadata.create_all(engine, tables=tables)
except Exception as exc:
raise
def table_drop(self):
"""Drops all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Drop all tables for %s', engine)
self.metadata.drop_all(engine, tables=tables)
@contextmanager
def begin(self, close=True, expire_on_commit=False, session=None,
commit=False, **options):
"""Provide a transactional scope around a series of operations.
By default, ``expire_on_commit`` is set to False so that instances
can be used outside the session.
"""
if not session:
commit = True
session = self.session(expire_on_commit=expire_on_commit,
**options)
else:
close = False
try:
yield session
if commit:
session.commit()
except Exception:
session.rollback()
raise
finally:
if close:
session.close()
def session(self, **options):
options['binds'] = self.binds
return OdmSession(self, **options)
def session_from_object(self, *objs):
for obj in objs:
session = object_session(obj)
if session is not None:
return session
def dialect(self, key):
"""Dialect object for a model/table name
"""
return self.binds[self[key].__table__].dialect
def get_engine(self, key=None):
"""Get an engine by key
"""
if key in self._engines:
return self._engines[key]
def engines(self):
"""Iterator over all engines
"""
return self._engines.values()
def keys_engines(self):
return self._engines.items()
def close(self):
for engine in self.engines():
engine.dispose()
# INTERNALS
def _create_model(self, model):
model_name = model.__name__
meta = type(self._base_declarative)
if isinstance(model, meta):
raise ImproperlyConfigured('Cannot register declarative classes '
'only mixins allowed')
base = getattr(model, '__inherit_from__', None)
if base:
if base not in self._declarative_register:
models = self._bases.get(base)
if not models:
self._bases[base] = models = []
models.append(model)
return
else:
base = self._declarative_register[base]
else:
base = self._base_declarative
#
# Create SqlAlchemy Model
model = meta(model_name, (model, base), {})
create = getattr(model, '__create_sql__', None)
name = model_name.lower()
if create:
event.listen(self.metadata,
'after_create',
DDL(create.format({'name': name})))
drop = getattr(model, '__drop_sql__', None)
if not drop:
logger.warning('Model %s has create statement but not drop. '
'To mute this warning add a __drop_sql__ '
'statement in the model class', name)
else:
event.listen(self.metadata,
'before_drop',
DDL(drop.format({'name': name})))
return model, name
def _get_tables(self, engine, create_drop=False):
tables = []
for table, eng in self.binds.items():
if eng == engine:
if table.key in self._declarative_register:
model = self[table.key]
if create_drop and hasattr(model, '__create_sql__'):
continue
tables.append(table)
return tables
def _database_all(self, engine):
return database_operation(engine, 'all')
def _database_create(self, engine, database):
"""Create a new database and return a new url representing
a connection to the new database
"""
logger.info('Creating database "%s" in "%s"', database, engine)
database_operation(engine, 'create', database)
url = copy(engine.url)
url.database = database
return str(url)
def _database_drop(self, engine, database):
logger.info('dropping database "%s" from "%s"', database, engine)
database_operation(engine, 'drop', database)
def _database_exist(self, engine):
return database_operation(engine, 'exists')
|
quantmind/pulsar-odm
|
odm/mapper.py
|
Mapper.database_create
|
python
|
def database_create(self, database, **params):
binds = {}
dbname = database
for key, engine in self.keys_engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot create a database, no db name given"
key = key if key else 'default'
binds[key] = self._database_create(engine, dbname)
return self.copy(binds)
|
Create databases for each engine and return a new :class:`.Mapper`.
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/odm/mapper.py#L281-L292
|
[
"def copy(self, binds):\n return self.__class__(binds)\n",
"def keys_engines(self):\n return self._engines.items()\n",
"def _database_create(self, engine, database):\n \"\"\"Create a new database and return a new url representing\n a connection to the new database\n \"\"\"\n logger.info('Creating database \"%s\" in \"%s\"', database, engine)\n database_operation(engine, 'create', database)\n url = copy(engine.url)\n url.database = database\n return str(url)\n"
] |
class Mapper:
"""SQLAlchemy wrapper
.. attribute:: binds
Dictionary of labels-engine pairs. The "default" label is always
present and it is used for tables without `bind_label` in their
`info` dictionary.
"""
def __init__(self, binds):
# Setup mdoels and engines
if not binds:
binds = {}
elif isinstance(binds, str):
binds = {'default': binds}
if 'default' not in binds:
raise ImproperlyConfigured('default datastore not specified')
self._engines = {}
self._declarative_register = {}
self._bases = {}
self._base_declarative = declarative_base(name='OdmBase',
metaclass=DeclarativeMeta)
self.binds = {}
self.is_green = False
for name, bind in tuple(binds.items()):
key = None if name == 'default' else name
engine = create_engine(bind)
dialect = engine.dialect
# Dialect requires Green Pool
if getattr(dialect, 'is_green', False):
self.is_green = True
self._engines[key] = engine
def __getitem__(self, model):
return self._declarative_register[model]
def __getattr__(self, name):
if name in self._declarative_register:
return self._declarative_register[name]
raise AttributeError('No model named "%s"' % name)
@property
def metadata(self):
"""Returns the :class:`~sqlalchemy.Metadata` for this mapper
"""
return self._base_declarative.metadata
def copy(self, binds):
return self.__class__(binds)
def register(self, model, **attr):
"""Register a model or a table with this mapper
:param model: a table or a :class:`.BaseModel` class
:return: a Model class or a table
"""
metadata = self.metadata
if not isinstance(model, Table):
model_name = self._create_model(model, **attr)
if not model_name:
return
model, name = model_name
table = model.__table__
self._declarative_register[name] = model
if name in self._bases:
for model in self._bases.pop(name):
self.register(model)
else:
table = model.tometadata(metadata)
model = table
# Register engine
engine = None
label = table.info.get('bind_label')
keys = ('%s.%s' % (label, table.key),
label, None) if label else (None,)
#
# Find the engine for this table
for key in keys:
engine = self.get_engine(key)
if engine:
break
assert engine
self.binds[table] = engine
return model
def register_module(self, module, exclude=None):
module = get_module(module)
models = get_models(module)
exclude = set(exclude or ())
if models:
for name, model in models.items():
if name in exclude:
continue
self.register(model)
for table in module_tables(module):
if table.key not in exclude:
self.register(table)
def create_table(self, name, *columns, **kwargs):
"""Create a new table with the same metadata and info
"""
targs = table_args(**kwargs)
args, kwargs = targs[:-1], targs[-1]
return Table(name, self.metadata, *columns, *args, **kwargs)
def database_exist(self):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
for key, engine in self.keys_engines():
key = key if key else 'default'
binds[key] = self._database_exist(engine)
return binds
def database_all(self):
"""Return a dictionary mapping engines with databases
"""
all = {}
for engine in self.engines():
all[engine] = self._database_all(engine)
return all
def database_drop(self, database=None, **params):
dbname = database
for engine in self.engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot drop database, no db name given"
self._database_drop(engine, dbname)
def tables(self):
tables = []
for engine in self.engines():
tbs = engine.table_names()
if tbs:
tables.append((str(engine.url), tbs))
return tables
def table_create(self, remove_existing=False):
"""Creates all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Create all tables for %s', engine)
try:
self.metadata.create_all(engine, tables=tables)
except Exception as exc:
raise
def table_drop(self):
"""Drops all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Drop all tables for %s', engine)
self.metadata.drop_all(engine, tables=tables)
@contextmanager
def begin(self, close=True, expire_on_commit=False, session=None,
commit=False, **options):
"""Provide a transactional scope around a series of operations.
By default, ``expire_on_commit`` is set to False so that instances
can be used outside the session.
"""
if not session:
commit = True
session = self.session(expire_on_commit=expire_on_commit,
**options)
else:
close = False
try:
yield session
if commit:
session.commit()
except Exception:
session.rollback()
raise
finally:
if close:
session.close()
def session(self, **options):
options['binds'] = self.binds
return OdmSession(self, **options)
def session_from_object(self, *objs):
for obj in objs:
session = object_session(obj)
if session is not None:
return session
def dialect(self, key):
"""Dialect object for a model/table name
"""
return self.binds[self[key].__table__].dialect
def get_engine(self, key=None):
"""Get an engine by key
"""
if key in self._engines:
return self._engines[key]
def engines(self):
"""Iterator over all engines
"""
return self._engines.values()
def keys_engines(self):
return self._engines.items()
def close(self):
for engine in self.engines():
engine.dispose()
# INTERNALS
def _create_model(self, model):
model_name = model.__name__
meta = type(self._base_declarative)
if isinstance(model, meta):
raise ImproperlyConfigured('Cannot register declarative classes '
'only mixins allowed')
base = getattr(model, '__inherit_from__', None)
if base:
if base not in self._declarative_register:
models = self._bases.get(base)
if not models:
self._bases[base] = models = []
models.append(model)
return
else:
base = self._declarative_register[base]
else:
base = self._base_declarative
#
# Create SqlAlchemy Model
model = meta(model_name, (model, base), {})
create = getattr(model, '__create_sql__', None)
name = model_name.lower()
if create:
event.listen(self.metadata,
'after_create',
DDL(create.format({'name': name})))
drop = getattr(model, '__drop_sql__', None)
if not drop:
logger.warning('Model %s has create statement but not drop. '
'To mute this warning add a __drop_sql__ '
'statement in the model class', name)
else:
event.listen(self.metadata,
'before_drop',
DDL(drop.format({'name': name})))
return model, name
def _get_tables(self, engine, create_drop=False):
tables = []
for table, eng in self.binds.items():
if eng == engine:
if table.key in self._declarative_register:
model = self[table.key]
if create_drop and hasattr(model, '__create_sql__'):
continue
tables.append(table)
return tables
def _database_all(self, engine):
return database_operation(engine, 'all')
def _database_create(self, engine, database):
"""Create a new database and return a new url representing
a connection to the new database
"""
logger.info('Creating database "%s" in "%s"', database, engine)
database_operation(engine, 'create', database)
url = copy(engine.url)
url.database = database
return str(url)
def _database_drop(self, engine, database):
logger.info('dropping database "%s" from "%s"', database, engine)
database_operation(engine, 'drop', database)
def _database_exist(self, engine):
return database_operation(engine, 'exists')
|
quantmind/pulsar-odm
|
odm/mapper.py
|
Mapper.database_exist
|
python
|
def database_exist(self):
binds = {}
for key, engine in self.keys_engines():
key = key if key else 'default'
binds[key] = self._database_exist(engine)
return binds
|
Create databases for each engine and return a new :class:`.Mapper`.
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/odm/mapper.py#L294-L301
|
[
"def keys_engines(self):\n return self._engines.items()\n",
"def _database_exist(self, engine):\n return database_operation(engine, 'exists')\n"
] |
class Mapper:
"""SQLAlchemy wrapper
.. attribute:: binds
Dictionary of labels-engine pairs. The "default" label is always
present and it is used for tables without `bind_label` in their
`info` dictionary.
"""
def __init__(self, binds):
# Setup mdoels and engines
if not binds:
binds = {}
elif isinstance(binds, str):
binds = {'default': binds}
if 'default' not in binds:
raise ImproperlyConfigured('default datastore not specified')
self._engines = {}
self._declarative_register = {}
self._bases = {}
self._base_declarative = declarative_base(name='OdmBase',
metaclass=DeclarativeMeta)
self.binds = {}
self.is_green = False
for name, bind in tuple(binds.items()):
key = None if name == 'default' else name
engine = create_engine(bind)
dialect = engine.dialect
# Dialect requires Green Pool
if getattr(dialect, 'is_green', False):
self.is_green = True
self._engines[key] = engine
def __getitem__(self, model):
return self._declarative_register[model]
def __getattr__(self, name):
if name in self._declarative_register:
return self._declarative_register[name]
raise AttributeError('No model named "%s"' % name)
@property
def metadata(self):
"""Returns the :class:`~sqlalchemy.Metadata` for this mapper
"""
return self._base_declarative.metadata
def copy(self, binds):
return self.__class__(binds)
def register(self, model, **attr):
"""Register a model or a table with this mapper
:param model: a table or a :class:`.BaseModel` class
:return: a Model class or a table
"""
metadata = self.metadata
if not isinstance(model, Table):
model_name = self._create_model(model, **attr)
if not model_name:
return
model, name = model_name
table = model.__table__
self._declarative_register[name] = model
if name in self._bases:
for model in self._bases.pop(name):
self.register(model)
else:
table = model.tometadata(metadata)
model = table
# Register engine
engine = None
label = table.info.get('bind_label')
keys = ('%s.%s' % (label, table.key),
label, None) if label else (None,)
#
# Find the engine for this table
for key in keys:
engine = self.get_engine(key)
if engine:
break
assert engine
self.binds[table] = engine
return model
def register_module(self, module, exclude=None):
module = get_module(module)
models = get_models(module)
exclude = set(exclude or ())
if models:
for name, model in models.items():
if name in exclude:
continue
self.register(model)
for table in module_tables(module):
if table.key not in exclude:
self.register(table)
def create_table(self, name, *columns, **kwargs):
"""Create a new table with the same metadata and info
"""
targs = table_args(**kwargs)
args, kwargs = targs[:-1], targs[-1]
return Table(name, self.metadata, *columns, *args, **kwargs)
def database_create(self, database, **params):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
dbname = database
for key, engine in self.keys_engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot create a database, no db name given"
key = key if key else 'default'
binds[key] = self._database_create(engine, dbname)
return self.copy(binds)
def database_all(self):
"""Return a dictionary mapping engines with databases
"""
all = {}
for engine in self.engines():
all[engine] = self._database_all(engine)
return all
def database_drop(self, database=None, **params):
dbname = database
for engine in self.engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot drop database, no db name given"
self._database_drop(engine, dbname)
def tables(self):
tables = []
for engine in self.engines():
tbs = engine.table_names()
if tbs:
tables.append((str(engine.url), tbs))
return tables
def table_create(self, remove_existing=False):
"""Creates all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Create all tables for %s', engine)
try:
self.metadata.create_all(engine, tables=tables)
except Exception as exc:
raise
def table_drop(self):
"""Drops all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Drop all tables for %s', engine)
self.metadata.drop_all(engine, tables=tables)
@contextmanager
def begin(self, close=True, expire_on_commit=False, session=None,
commit=False, **options):
"""Provide a transactional scope around a series of operations.
By default, ``expire_on_commit`` is set to False so that instances
can be used outside the session.
"""
if not session:
commit = True
session = self.session(expire_on_commit=expire_on_commit,
**options)
else:
close = False
try:
yield session
if commit:
session.commit()
except Exception:
session.rollback()
raise
finally:
if close:
session.close()
def session(self, **options):
options['binds'] = self.binds
return OdmSession(self, **options)
def session_from_object(self, *objs):
for obj in objs:
session = object_session(obj)
if session is not None:
return session
def dialect(self, key):
"""Dialect object for a model/table name
"""
return self.binds[self[key].__table__].dialect
def get_engine(self, key=None):
"""Get an engine by key
"""
if key in self._engines:
return self._engines[key]
def engines(self):
"""Iterator over all engines
"""
return self._engines.values()
def keys_engines(self):
return self._engines.items()
def close(self):
for engine in self.engines():
engine.dispose()
# INTERNALS
def _create_model(self, model):
model_name = model.__name__
meta = type(self._base_declarative)
if isinstance(model, meta):
raise ImproperlyConfigured('Cannot register declarative classes '
'only mixins allowed')
base = getattr(model, '__inherit_from__', None)
if base:
if base not in self._declarative_register:
models = self._bases.get(base)
if not models:
self._bases[base] = models = []
models.append(model)
return
else:
base = self._declarative_register[base]
else:
base = self._base_declarative
#
# Create SqlAlchemy Model
model = meta(model_name, (model, base), {})
create = getattr(model, '__create_sql__', None)
name = model_name.lower()
if create:
event.listen(self.metadata,
'after_create',
DDL(create.format({'name': name})))
drop = getattr(model, '__drop_sql__', None)
if not drop:
logger.warning('Model %s has create statement but not drop. '
'To mute this warning add a __drop_sql__ '
'statement in the model class', name)
else:
event.listen(self.metadata,
'before_drop',
DDL(drop.format({'name': name})))
return model, name
def _get_tables(self, engine, create_drop=False):
tables = []
for table, eng in self.binds.items():
if eng == engine:
if table.key in self._declarative_register:
model = self[table.key]
if create_drop and hasattr(model, '__create_sql__'):
continue
tables.append(table)
return tables
def _database_all(self, engine):
return database_operation(engine, 'all')
def _database_create(self, engine, database):
"""Create a new database and return a new url representing
a connection to the new database
"""
logger.info('Creating database "%s" in "%s"', database, engine)
database_operation(engine, 'create', database)
url = copy(engine.url)
url.database = database
return str(url)
def _database_drop(self, engine, database):
logger.info('dropping database "%s" from "%s"', database, engine)
database_operation(engine, 'drop', database)
def _database_exist(self, engine):
return database_operation(engine, 'exists')
|
quantmind/pulsar-odm
|
odm/mapper.py
|
Mapper.database_all
|
python
|
def database_all(self):
all = {}
for engine in self.engines():
all[engine] = self._database_all(engine)
return all
|
Return a dictionary mapping engines with databases
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/odm/mapper.py#L303-L309
|
[
"def engines(self):\n \"\"\"Iterator over all engines\n \"\"\"\n return self._engines.values()\n",
"def _database_all(self, engine):\n return database_operation(engine, 'all')\n"
] |
class Mapper:
"""SQLAlchemy wrapper
.. attribute:: binds
Dictionary of labels-engine pairs. The "default" label is always
present and it is used for tables without `bind_label` in their
`info` dictionary.
"""
def __init__(self, binds):
# Setup mdoels and engines
if not binds:
binds = {}
elif isinstance(binds, str):
binds = {'default': binds}
if 'default' not in binds:
raise ImproperlyConfigured('default datastore not specified')
self._engines = {}
self._declarative_register = {}
self._bases = {}
self._base_declarative = declarative_base(name='OdmBase',
metaclass=DeclarativeMeta)
self.binds = {}
self.is_green = False
for name, bind in tuple(binds.items()):
key = None if name == 'default' else name
engine = create_engine(bind)
dialect = engine.dialect
# Dialect requires Green Pool
if getattr(dialect, 'is_green', False):
self.is_green = True
self._engines[key] = engine
def __getitem__(self, model):
return self._declarative_register[model]
def __getattr__(self, name):
if name in self._declarative_register:
return self._declarative_register[name]
raise AttributeError('No model named "%s"' % name)
@property
def metadata(self):
"""Returns the :class:`~sqlalchemy.Metadata` for this mapper
"""
return self._base_declarative.metadata
def copy(self, binds):
return self.__class__(binds)
def register(self, model, **attr):
"""Register a model or a table with this mapper
:param model: a table or a :class:`.BaseModel` class
:return: a Model class or a table
"""
metadata = self.metadata
if not isinstance(model, Table):
model_name = self._create_model(model, **attr)
if not model_name:
return
model, name = model_name
table = model.__table__
self._declarative_register[name] = model
if name in self._bases:
for model in self._bases.pop(name):
self.register(model)
else:
table = model.tometadata(metadata)
model = table
# Register engine
engine = None
label = table.info.get('bind_label')
keys = ('%s.%s' % (label, table.key),
label, None) if label else (None,)
#
# Find the engine for this table
for key in keys:
engine = self.get_engine(key)
if engine:
break
assert engine
self.binds[table] = engine
return model
def register_module(self, module, exclude=None):
module = get_module(module)
models = get_models(module)
exclude = set(exclude or ())
if models:
for name, model in models.items():
if name in exclude:
continue
self.register(model)
for table in module_tables(module):
if table.key not in exclude:
self.register(table)
def create_table(self, name, *columns, **kwargs):
"""Create a new table with the same metadata and info
"""
targs = table_args(**kwargs)
args, kwargs = targs[:-1], targs[-1]
return Table(name, self.metadata, *columns, *args, **kwargs)
def database_create(self, database, **params):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
dbname = database
for key, engine in self.keys_engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot create a database, no db name given"
key = key if key else 'default'
binds[key] = self._database_create(engine, dbname)
return self.copy(binds)
def database_exist(self):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
for key, engine in self.keys_engines():
key = key if key else 'default'
binds[key] = self._database_exist(engine)
return binds
def database_drop(self, database=None, **params):
dbname = database
for engine in self.engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot drop database, no db name given"
self._database_drop(engine, dbname)
def tables(self):
tables = []
for engine in self.engines():
tbs = engine.table_names()
if tbs:
tables.append((str(engine.url), tbs))
return tables
def table_create(self, remove_existing=False):
"""Creates all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Create all tables for %s', engine)
try:
self.metadata.create_all(engine, tables=tables)
except Exception as exc:
raise
def table_drop(self):
"""Drops all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Drop all tables for %s', engine)
self.metadata.drop_all(engine, tables=tables)
@contextmanager
def begin(self, close=True, expire_on_commit=False, session=None,
commit=False, **options):
"""Provide a transactional scope around a series of operations.
By default, ``expire_on_commit`` is set to False so that instances
can be used outside the session.
"""
if not session:
commit = True
session = self.session(expire_on_commit=expire_on_commit,
**options)
else:
close = False
try:
yield session
if commit:
session.commit()
except Exception:
session.rollback()
raise
finally:
if close:
session.close()
def session(self, **options):
options['binds'] = self.binds
return OdmSession(self, **options)
def session_from_object(self, *objs):
for obj in objs:
session = object_session(obj)
if session is not None:
return session
def dialect(self, key):
"""Dialect object for a model/table name
"""
return self.binds[self[key].__table__].dialect
def get_engine(self, key=None):
"""Get an engine by key
"""
if key in self._engines:
return self._engines[key]
def engines(self):
"""Iterator over all engines
"""
return self._engines.values()
def keys_engines(self):
return self._engines.items()
def close(self):
for engine in self.engines():
engine.dispose()
# INTERNALS
def _create_model(self, model):
model_name = model.__name__
meta = type(self._base_declarative)
if isinstance(model, meta):
raise ImproperlyConfigured('Cannot register declarative classes '
'only mixins allowed')
base = getattr(model, '__inherit_from__', None)
if base:
if base not in self._declarative_register:
models = self._bases.get(base)
if not models:
self._bases[base] = models = []
models.append(model)
return
else:
base = self._declarative_register[base]
else:
base = self._base_declarative
#
# Create SqlAlchemy Model
model = meta(model_name, (model, base), {})
create = getattr(model, '__create_sql__', None)
name = model_name.lower()
if create:
event.listen(self.metadata,
'after_create',
DDL(create.format({'name': name})))
drop = getattr(model, '__drop_sql__', None)
if not drop:
logger.warning('Model %s has create statement but not drop. '
'To mute this warning add a __drop_sql__ '
'statement in the model class', name)
else:
event.listen(self.metadata,
'before_drop',
DDL(drop.format({'name': name})))
return model, name
def _get_tables(self, engine, create_drop=False):
tables = []
for table, eng in self.binds.items():
if eng == engine:
if table.key in self._declarative_register:
model = self[table.key]
if create_drop and hasattr(model, '__create_sql__'):
continue
tables.append(table)
return tables
def _database_all(self, engine):
return database_operation(engine, 'all')
def _database_create(self, engine, database):
"""Create a new database and return a new url representing
a connection to the new database
"""
logger.info('Creating database "%s" in "%s"', database, engine)
database_operation(engine, 'create', database)
url = copy(engine.url)
url.database = database
return str(url)
def _database_drop(self, engine, database):
logger.info('dropping database "%s" from "%s"', database, engine)
database_operation(engine, 'drop', database)
def _database_exist(self, engine):
return database_operation(engine, 'exists')
|
quantmind/pulsar-odm
|
odm/mapper.py
|
Mapper.table_create
|
python
|
def table_create(self, remove_existing=False):
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Create all tables for %s', engine)
try:
self.metadata.create_all(engine, tables=tables)
except Exception as exc:
raise
|
Creates all tables.
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/odm/mapper.py#L327-L336
|
[
"def engines(self):\n \"\"\"Iterator over all engines\n \"\"\"\n return self._engines.values()\n",
"def _get_tables(self, engine, create_drop=False):\n tables = []\n for table, eng in self.binds.items():\n if eng == engine:\n if table.key in self._declarative_register:\n model = self[table.key]\n if create_drop and hasattr(model, '__create_sql__'):\n continue\n tables.append(table)\n return tables\n"
] |
class Mapper:
"""SQLAlchemy wrapper
.. attribute:: binds
Dictionary of labels-engine pairs. The "default" label is always
present and it is used for tables without `bind_label` in their
`info` dictionary.
"""
def __init__(self, binds):
# Setup mdoels and engines
if not binds:
binds = {}
elif isinstance(binds, str):
binds = {'default': binds}
if 'default' not in binds:
raise ImproperlyConfigured('default datastore not specified')
self._engines = {}
self._declarative_register = {}
self._bases = {}
self._base_declarative = declarative_base(name='OdmBase',
metaclass=DeclarativeMeta)
self.binds = {}
self.is_green = False
for name, bind in tuple(binds.items()):
key = None if name == 'default' else name
engine = create_engine(bind)
dialect = engine.dialect
# Dialect requires Green Pool
if getattr(dialect, 'is_green', False):
self.is_green = True
self._engines[key] = engine
def __getitem__(self, model):
return self._declarative_register[model]
def __getattr__(self, name):
if name in self._declarative_register:
return self._declarative_register[name]
raise AttributeError('No model named "%s"' % name)
@property
def metadata(self):
"""Returns the :class:`~sqlalchemy.Metadata` for this mapper
"""
return self._base_declarative.metadata
def copy(self, binds):
return self.__class__(binds)
def register(self, model, **attr):
"""Register a model or a table with this mapper
:param model: a table or a :class:`.BaseModel` class
:return: a Model class or a table
"""
metadata = self.metadata
if not isinstance(model, Table):
model_name = self._create_model(model, **attr)
if not model_name:
return
model, name = model_name
table = model.__table__
self._declarative_register[name] = model
if name in self._bases:
for model in self._bases.pop(name):
self.register(model)
else:
table = model.tometadata(metadata)
model = table
# Register engine
engine = None
label = table.info.get('bind_label')
keys = ('%s.%s' % (label, table.key),
label, None) if label else (None,)
#
# Find the engine for this table
for key in keys:
engine = self.get_engine(key)
if engine:
break
assert engine
self.binds[table] = engine
return model
def register_module(self, module, exclude=None):
module = get_module(module)
models = get_models(module)
exclude = set(exclude or ())
if models:
for name, model in models.items():
if name in exclude:
continue
self.register(model)
for table in module_tables(module):
if table.key not in exclude:
self.register(table)
def create_table(self, name, *columns, **kwargs):
"""Create a new table with the same metadata and info
"""
targs = table_args(**kwargs)
args, kwargs = targs[:-1], targs[-1]
return Table(name, self.metadata, *columns, *args, **kwargs)
def database_create(self, database, **params):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
dbname = database
for key, engine in self.keys_engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot create a database, no db name given"
key = key if key else 'default'
binds[key] = self._database_create(engine, dbname)
return self.copy(binds)
def database_exist(self):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
for key, engine in self.keys_engines():
key = key if key else 'default'
binds[key] = self._database_exist(engine)
return binds
def database_all(self):
"""Return a dictionary mapping engines with databases
"""
all = {}
for engine in self.engines():
all[engine] = self._database_all(engine)
return all
def database_drop(self, database=None, **params):
dbname = database
for engine in self.engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot drop database, no db name given"
self._database_drop(engine, dbname)
def tables(self):
tables = []
for engine in self.engines():
tbs = engine.table_names()
if tbs:
tables.append((str(engine.url), tbs))
return tables
def table_drop(self):
"""Drops all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Drop all tables for %s', engine)
self.metadata.drop_all(engine, tables=tables)
@contextmanager
def begin(self, close=True, expire_on_commit=False, session=None,
commit=False, **options):
"""Provide a transactional scope around a series of operations.
By default, ``expire_on_commit`` is set to False so that instances
can be used outside the session.
"""
if not session:
commit = True
session = self.session(expire_on_commit=expire_on_commit,
**options)
else:
close = False
try:
yield session
if commit:
session.commit()
except Exception:
session.rollback()
raise
finally:
if close:
session.close()
def session(self, **options):
options['binds'] = self.binds
return OdmSession(self, **options)
def session_from_object(self, *objs):
for obj in objs:
session = object_session(obj)
if session is not None:
return session
def dialect(self, key):
"""Dialect object for a model/table name
"""
return self.binds[self[key].__table__].dialect
def get_engine(self, key=None):
"""Get an engine by key
"""
if key in self._engines:
return self._engines[key]
def engines(self):
"""Iterator over all engines
"""
return self._engines.values()
def keys_engines(self):
return self._engines.items()
def close(self):
for engine in self.engines():
engine.dispose()
# INTERNALS
def _create_model(self, model):
model_name = model.__name__
meta = type(self._base_declarative)
if isinstance(model, meta):
raise ImproperlyConfigured('Cannot register declarative classes '
'only mixins allowed')
base = getattr(model, '__inherit_from__', None)
if base:
if base not in self._declarative_register:
models = self._bases.get(base)
if not models:
self._bases[base] = models = []
models.append(model)
return
else:
base = self._declarative_register[base]
else:
base = self._base_declarative
#
# Create SqlAlchemy Model
model = meta(model_name, (model, base), {})
create = getattr(model, '__create_sql__', None)
name = model_name.lower()
if create:
event.listen(self.metadata,
'after_create',
DDL(create.format({'name': name})))
drop = getattr(model, '__drop_sql__', None)
if not drop:
logger.warning('Model %s has create statement but not drop. '
'To mute this warning add a __drop_sql__ '
'statement in the model class', name)
else:
event.listen(self.metadata,
'before_drop',
DDL(drop.format({'name': name})))
return model, name
def _get_tables(self, engine, create_drop=False):
tables = []
for table, eng in self.binds.items():
if eng == engine:
if table.key in self._declarative_register:
model = self[table.key]
if create_drop and hasattr(model, '__create_sql__'):
continue
tables.append(table)
return tables
def _database_all(self, engine):
return database_operation(engine, 'all')
def _database_create(self, engine, database):
"""Create a new database and return a new url representing
a connection to the new database
"""
logger.info('Creating database "%s" in "%s"', database, engine)
database_operation(engine, 'create', database)
url = copy(engine.url)
url.database = database
return str(url)
def _database_drop(self, engine, database):
logger.info('dropping database "%s" from "%s"', database, engine)
database_operation(engine, 'drop', database)
def _database_exist(self, engine):
return database_operation(engine, 'exists')
|
quantmind/pulsar-odm
|
odm/mapper.py
|
Mapper.table_drop
|
python
|
def table_drop(self):
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Drop all tables for %s', engine)
self.metadata.drop_all(engine, tables=tables)
|
Drops all tables.
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/odm/mapper.py#L338-L344
|
[
"def engines(self):\n \"\"\"Iterator over all engines\n \"\"\"\n return self._engines.values()\n",
"def _get_tables(self, engine, create_drop=False):\n tables = []\n for table, eng in self.binds.items():\n if eng == engine:\n if table.key in self._declarative_register:\n model = self[table.key]\n if create_drop and hasattr(model, '__create_sql__'):\n continue\n tables.append(table)\n return tables\n"
] |
class Mapper:
"""SQLAlchemy wrapper
.. attribute:: binds
Dictionary of labels-engine pairs. The "default" label is always
present and it is used for tables without `bind_label` in their
`info` dictionary.
"""
def __init__(self, binds):
# Setup mdoels and engines
if not binds:
binds = {}
elif isinstance(binds, str):
binds = {'default': binds}
if 'default' not in binds:
raise ImproperlyConfigured('default datastore not specified')
self._engines = {}
self._declarative_register = {}
self._bases = {}
self._base_declarative = declarative_base(name='OdmBase',
metaclass=DeclarativeMeta)
self.binds = {}
self.is_green = False
for name, bind in tuple(binds.items()):
key = None if name == 'default' else name
engine = create_engine(bind)
dialect = engine.dialect
# Dialect requires Green Pool
if getattr(dialect, 'is_green', False):
self.is_green = True
self._engines[key] = engine
def __getitem__(self, model):
return self._declarative_register[model]
def __getattr__(self, name):
if name in self._declarative_register:
return self._declarative_register[name]
raise AttributeError('No model named "%s"' % name)
@property
def metadata(self):
"""Returns the :class:`~sqlalchemy.Metadata` for this mapper
"""
return self._base_declarative.metadata
def copy(self, binds):
return self.__class__(binds)
def register(self, model, **attr):
"""Register a model or a table with this mapper
:param model: a table or a :class:`.BaseModel` class
:return: a Model class or a table
"""
metadata = self.metadata
if not isinstance(model, Table):
model_name = self._create_model(model, **attr)
if not model_name:
return
model, name = model_name
table = model.__table__
self._declarative_register[name] = model
if name in self._bases:
for model in self._bases.pop(name):
self.register(model)
else:
table = model.tometadata(metadata)
model = table
# Register engine
engine = None
label = table.info.get('bind_label')
keys = ('%s.%s' % (label, table.key),
label, None) if label else (None,)
#
# Find the engine for this table
for key in keys:
engine = self.get_engine(key)
if engine:
break
assert engine
self.binds[table] = engine
return model
def register_module(self, module, exclude=None):
module = get_module(module)
models = get_models(module)
exclude = set(exclude or ())
if models:
for name, model in models.items():
if name in exclude:
continue
self.register(model)
for table in module_tables(module):
if table.key not in exclude:
self.register(table)
def create_table(self, name, *columns, **kwargs):
"""Create a new table with the same metadata and info
"""
targs = table_args(**kwargs)
args, kwargs = targs[:-1], targs[-1]
return Table(name, self.metadata, *columns, *args, **kwargs)
def database_create(self, database, **params):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
dbname = database
for key, engine in self.keys_engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot create a database, no db name given"
key = key if key else 'default'
binds[key] = self._database_create(engine, dbname)
return self.copy(binds)
def database_exist(self):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
for key, engine in self.keys_engines():
key = key if key else 'default'
binds[key] = self._database_exist(engine)
return binds
def database_all(self):
"""Return a dictionary mapping engines with databases
"""
all = {}
for engine in self.engines():
all[engine] = self._database_all(engine)
return all
def database_drop(self, database=None, **params):
dbname = database
for engine in self.engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot drop database, no db name given"
self._database_drop(engine, dbname)
def tables(self):
tables = []
for engine in self.engines():
tbs = engine.table_names()
if tbs:
tables.append((str(engine.url), tbs))
return tables
def table_create(self, remove_existing=False):
"""Creates all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Create all tables for %s', engine)
try:
self.metadata.create_all(engine, tables=tables)
except Exception as exc:
raise
@contextmanager
def begin(self, close=True, expire_on_commit=False, session=None,
commit=False, **options):
"""Provide a transactional scope around a series of operations.
By default, ``expire_on_commit`` is set to False so that instances
can be used outside the session.
"""
if not session:
commit = True
session = self.session(expire_on_commit=expire_on_commit,
**options)
else:
close = False
try:
yield session
if commit:
session.commit()
except Exception:
session.rollback()
raise
finally:
if close:
session.close()
def session(self, **options):
options['binds'] = self.binds
return OdmSession(self, **options)
def session_from_object(self, *objs):
for obj in objs:
session = object_session(obj)
if session is not None:
return session
def dialect(self, key):
"""Dialect object for a model/table name
"""
return self.binds[self[key].__table__].dialect
def get_engine(self, key=None):
"""Get an engine by key
"""
if key in self._engines:
return self._engines[key]
def engines(self):
"""Iterator over all engines
"""
return self._engines.values()
def keys_engines(self):
return self._engines.items()
def close(self):
for engine in self.engines():
engine.dispose()
# INTERNALS
def _create_model(self, model):
model_name = model.__name__
meta = type(self._base_declarative)
if isinstance(model, meta):
raise ImproperlyConfigured('Cannot register declarative classes '
'only mixins allowed')
base = getattr(model, '__inherit_from__', None)
if base:
if base not in self._declarative_register:
models = self._bases.get(base)
if not models:
self._bases[base] = models = []
models.append(model)
return
else:
base = self._declarative_register[base]
else:
base = self._base_declarative
#
# Create SqlAlchemy Model
model = meta(model_name, (model, base), {})
create = getattr(model, '__create_sql__', None)
name = model_name.lower()
if create:
event.listen(self.metadata,
'after_create',
DDL(create.format({'name': name})))
drop = getattr(model, '__drop_sql__', None)
if not drop:
logger.warning('Model %s has create statement but not drop. '
'To mute this warning add a __drop_sql__ '
'statement in the model class', name)
else:
event.listen(self.metadata,
'before_drop',
DDL(drop.format({'name': name})))
return model, name
def _get_tables(self, engine, create_drop=False):
tables = []
for table, eng in self.binds.items():
if eng == engine:
if table.key in self._declarative_register:
model = self[table.key]
if create_drop and hasattr(model, '__create_sql__'):
continue
tables.append(table)
return tables
def _database_all(self, engine):
return database_operation(engine, 'all')
def _database_create(self, engine, database):
"""Create a new database and return a new url representing
a connection to the new database
"""
logger.info('Creating database "%s" in "%s"', database, engine)
database_operation(engine, 'create', database)
url = copy(engine.url)
url.database = database
return str(url)
def _database_drop(self, engine, database):
logger.info('dropping database "%s" from "%s"', database, engine)
database_operation(engine, 'drop', database)
def _database_exist(self, engine):
return database_operation(engine, 'exists')
|
quantmind/pulsar-odm
|
odm/mapper.py
|
Mapper.begin
|
python
|
def begin(self, close=True, expire_on_commit=False, session=None,
commit=False, **options):
if not session:
commit = True
session = self.session(expire_on_commit=expire_on_commit,
**options)
else:
close = False
try:
yield session
if commit:
session.commit()
except Exception:
session.rollback()
raise
finally:
if close:
session.close()
|
Provide a transactional scope around a series of operations.
By default, ``expire_on_commit`` is set to False so that instances
can be used outside the session.
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/odm/mapper.py#L347-L369
|
[
"def session(self, **options):\n options['binds'] = self.binds\n return OdmSession(self, **options)\n"
] |
class Mapper:
"""SQLAlchemy wrapper
.. attribute:: binds
Dictionary of labels-engine pairs. The "default" label is always
present and it is used for tables without `bind_label` in their
`info` dictionary.
"""
def __init__(self, binds):
# Setup mdoels and engines
if not binds:
binds = {}
elif isinstance(binds, str):
binds = {'default': binds}
if 'default' not in binds:
raise ImproperlyConfigured('default datastore not specified')
self._engines = {}
self._declarative_register = {}
self._bases = {}
self._base_declarative = declarative_base(name='OdmBase',
metaclass=DeclarativeMeta)
self.binds = {}
self.is_green = False
for name, bind in tuple(binds.items()):
key = None if name == 'default' else name
engine = create_engine(bind)
dialect = engine.dialect
# Dialect requires Green Pool
if getattr(dialect, 'is_green', False):
self.is_green = True
self._engines[key] = engine
def __getitem__(self, model):
return self._declarative_register[model]
def __getattr__(self, name):
if name in self._declarative_register:
return self._declarative_register[name]
raise AttributeError('No model named "%s"' % name)
@property
def metadata(self):
"""Returns the :class:`~sqlalchemy.Metadata` for this mapper
"""
return self._base_declarative.metadata
def copy(self, binds):
return self.__class__(binds)
def register(self, model, **attr):
"""Register a model or a table with this mapper
:param model: a table or a :class:`.BaseModel` class
:return: a Model class or a table
"""
metadata = self.metadata
if not isinstance(model, Table):
model_name = self._create_model(model, **attr)
if not model_name:
return
model, name = model_name
table = model.__table__
self._declarative_register[name] = model
if name in self._bases:
for model in self._bases.pop(name):
self.register(model)
else:
table = model.tometadata(metadata)
model = table
# Register engine
engine = None
label = table.info.get('bind_label')
keys = ('%s.%s' % (label, table.key),
label, None) if label else (None,)
#
# Find the engine for this table
for key in keys:
engine = self.get_engine(key)
if engine:
break
assert engine
self.binds[table] = engine
return model
def register_module(self, module, exclude=None):
module = get_module(module)
models = get_models(module)
exclude = set(exclude or ())
if models:
for name, model in models.items():
if name in exclude:
continue
self.register(model)
for table in module_tables(module):
if table.key not in exclude:
self.register(table)
def create_table(self, name, *columns, **kwargs):
"""Create a new table with the same metadata and info
"""
targs = table_args(**kwargs)
args, kwargs = targs[:-1], targs[-1]
return Table(name, self.metadata, *columns, *args, **kwargs)
def database_create(self, database, **params):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
dbname = database
for key, engine in self.keys_engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot create a database, no db name given"
key = key if key else 'default'
binds[key] = self._database_create(engine, dbname)
return self.copy(binds)
def database_exist(self):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
for key, engine in self.keys_engines():
key = key if key else 'default'
binds[key] = self._database_exist(engine)
return binds
def database_all(self):
"""Return a dictionary mapping engines with databases
"""
all = {}
for engine in self.engines():
all[engine] = self._database_all(engine)
return all
def database_drop(self, database=None, **params):
dbname = database
for engine in self.engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot drop database, no db name given"
self._database_drop(engine, dbname)
def tables(self):
tables = []
for engine in self.engines():
tbs = engine.table_names()
if tbs:
tables.append((str(engine.url), tbs))
return tables
def table_create(self, remove_existing=False):
"""Creates all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Create all tables for %s', engine)
try:
self.metadata.create_all(engine, tables=tables)
except Exception as exc:
raise
def table_drop(self):
"""Drops all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Drop all tables for %s', engine)
self.metadata.drop_all(engine, tables=tables)
@contextmanager
def session(self, **options):
options['binds'] = self.binds
return OdmSession(self, **options)
def session_from_object(self, *objs):
for obj in objs:
session = object_session(obj)
if session is not None:
return session
def dialect(self, key):
"""Dialect object for a model/table name
"""
return self.binds[self[key].__table__].dialect
def get_engine(self, key=None):
"""Get an engine by key
"""
if key in self._engines:
return self._engines[key]
def engines(self):
"""Iterator over all engines
"""
return self._engines.values()
def keys_engines(self):
return self._engines.items()
def close(self):
for engine in self.engines():
engine.dispose()
# INTERNALS
def _create_model(self, model):
model_name = model.__name__
meta = type(self._base_declarative)
if isinstance(model, meta):
raise ImproperlyConfigured('Cannot register declarative classes '
'only mixins allowed')
base = getattr(model, '__inherit_from__', None)
if base:
if base not in self._declarative_register:
models = self._bases.get(base)
if not models:
self._bases[base] = models = []
models.append(model)
return
else:
base = self._declarative_register[base]
else:
base = self._base_declarative
#
# Create SqlAlchemy Model
model = meta(model_name, (model, base), {})
create = getattr(model, '__create_sql__', None)
name = model_name.lower()
if create:
event.listen(self.metadata,
'after_create',
DDL(create.format({'name': name})))
drop = getattr(model, '__drop_sql__', None)
if not drop:
logger.warning('Model %s has create statement but not drop. '
'To mute this warning add a __drop_sql__ '
'statement in the model class', name)
else:
event.listen(self.metadata,
'before_drop',
DDL(drop.format({'name': name})))
return model, name
def _get_tables(self, engine, create_drop=False):
tables = []
for table, eng in self.binds.items():
if eng == engine:
if table.key in self._declarative_register:
model = self[table.key]
if create_drop and hasattr(model, '__create_sql__'):
continue
tables.append(table)
return tables
def _database_all(self, engine):
return database_operation(engine, 'all')
def _database_create(self, engine, database):
"""Create a new database and return a new url representing
a connection to the new database
"""
logger.info('Creating database "%s" in "%s"', database, engine)
database_operation(engine, 'create', database)
url = copy(engine.url)
url.database = database
return str(url)
def _database_drop(self, engine, database):
logger.info('dropping database "%s" from "%s"', database, engine)
database_operation(engine, 'drop', database)
def _database_exist(self, engine):
return database_operation(engine, 'exists')
|
quantmind/pulsar-odm
|
odm/mapper.py
|
Mapper._database_create
|
python
|
def _database_create(self, engine, database):
logger.info('Creating database "%s" in "%s"', database, engine)
database_operation(engine, 'create', database)
url = copy(engine.url)
url.database = database
return str(url)
|
Create a new database and return a new url representing
a connection to the new database
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/odm/mapper.py#L459-L467
|
[
"def database_operation(engine, oper, *args):\n operation = _database_operation(engine, oper)\n return operation(engine, *args)\n"
] |
class Mapper:
"""SQLAlchemy wrapper
.. attribute:: binds
Dictionary of labels-engine pairs. The "default" label is always
present and it is used for tables without `bind_label` in their
`info` dictionary.
"""
def __init__(self, binds):
# Setup mdoels and engines
if not binds:
binds = {}
elif isinstance(binds, str):
binds = {'default': binds}
if 'default' not in binds:
raise ImproperlyConfigured('default datastore not specified')
self._engines = {}
self._declarative_register = {}
self._bases = {}
self._base_declarative = declarative_base(name='OdmBase',
metaclass=DeclarativeMeta)
self.binds = {}
self.is_green = False
for name, bind in tuple(binds.items()):
key = None if name == 'default' else name
engine = create_engine(bind)
dialect = engine.dialect
# Dialect requires Green Pool
if getattr(dialect, 'is_green', False):
self.is_green = True
self._engines[key] = engine
def __getitem__(self, model):
return self._declarative_register[model]
def __getattr__(self, name):
if name in self._declarative_register:
return self._declarative_register[name]
raise AttributeError('No model named "%s"' % name)
@property
def metadata(self):
"""Returns the :class:`~sqlalchemy.Metadata` for this mapper
"""
return self._base_declarative.metadata
def copy(self, binds):
return self.__class__(binds)
def register(self, model, **attr):
"""Register a model or a table with this mapper
:param model: a table or a :class:`.BaseModel` class
:return: a Model class or a table
"""
metadata = self.metadata
if not isinstance(model, Table):
model_name = self._create_model(model, **attr)
if not model_name:
return
model, name = model_name
table = model.__table__
self._declarative_register[name] = model
if name in self._bases:
for model in self._bases.pop(name):
self.register(model)
else:
table = model.tometadata(metadata)
model = table
# Register engine
engine = None
label = table.info.get('bind_label')
keys = ('%s.%s' % (label, table.key),
label, None) if label else (None,)
#
# Find the engine for this table
for key in keys:
engine = self.get_engine(key)
if engine:
break
assert engine
self.binds[table] = engine
return model
def register_module(self, module, exclude=None):
module = get_module(module)
models = get_models(module)
exclude = set(exclude or ())
if models:
for name, model in models.items():
if name in exclude:
continue
self.register(model)
for table in module_tables(module):
if table.key not in exclude:
self.register(table)
def create_table(self, name, *columns, **kwargs):
"""Create a new table with the same metadata and info
"""
targs = table_args(**kwargs)
args, kwargs = targs[:-1], targs[-1]
return Table(name, self.metadata, *columns, *args, **kwargs)
def database_create(self, database, **params):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
dbname = database
for key, engine in self.keys_engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot create a database, no db name given"
key = key if key else 'default'
binds[key] = self._database_create(engine, dbname)
return self.copy(binds)
def database_exist(self):
"""Create databases for each engine and return a new :class:`.Mapper`.
"""
binds = {}
for key, engine in self.keys_engines():
key = key if key else 'default'
binds[key] = self._database_exist(engine)
return binds
def database_all(self):
"""Return a dictionary mapping engines with databases
"""
all = {}
for engine in self.engines():
all[engine] = self._database_all(engine)
return all
def database_drop(self, database=None, **params):
dbname = database
for engine in self.engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot drop database, no db name given"
self._database_drop(engine, dbname)
def tables(self):
tables = []
for engine in self.engines():
tbs = engine.table_names()
if tbs:
tables.append((str(engine.url), tbs))
return tables
def table_create(self, remove_existing=False):
"""Creates all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Create all tables for %s', engine)
try:
self.metadata.create_all(engine, tables=tables)
except Exception as exc:
raise
def table_drop(self):
"""Drops all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine, create_drop=True)
logger.info('Drop all tables for %s', engine)
self.metadata.drop_all(engine, tables=tables)
@contextmanager
def begin(self, close=True, expire_on_commit=False, session=None,
commit=False, **options):
"""Provide a transactional scope around a series of operations.
By default, ``expire_on_commit`` is set to False so that instances
can be used outside the session.
"""
if not session:
commit = True
session = self.session(expire_on_commit=expire_on_commit,
**options)
else:
close = False
try:
yield session
if commit:
session.commit()
except Exception:
session.rollback()
raise
finally:
if close:
session.close()
def session(self, **options):
options['binds'] = self.binds
return OdmSession(self, **options)
def session_from_object(self, *objs):
for obj in objs:
session = object_session(obj)
if session is not None:
return session
def dialect(self, key):
"""Dialect object for a model/table name
"""
return self.binds[self[key].__table__].dialect
def get_engine(self, key=None):
"""Get an engine by key
"""
if key in self._engines:
return self._engines[key]
def engines(self):
"""Iterator over all engines
"""
return self._engines.values()
def keys_engines(self):
return self._engines.items()
def close(self):
for engine in self.engines():
engine.dispose()
# INTERNALS
def _create_model(self, model):
model_name = model.__name__
meta = type(self._base_declarative)
if isinstance(model, meta):
raise ImproperlyConfigured('Cannot register declarative classes '
'only mixins allowed')
base = getattr(model, '__inherit_from__', None)
if base:
if base not in self._declarative_register:
models = self._bases.get(base)
if not models:
self._bases[base] = models = []
models.append(model)
return
else:
base = self._declarative_register[base]
else:
base = self._base_declarative
#
# Create SqlAlchemy Model
model = meta(model_name, (model, base), {})
create = getattr(model, '__create_sql__', None)
name = model_name.lower()
if create:
event.listen(self.metadata,
'after_create',
DDL(create.format({'name': name})))
drop = getattr(model, '__drop_sql__', None)
if not drop:
logger.warning('Model %s has create statement but not drop. '
'To mute this warning add a __drop_sql__ '
'statement in the model class', name)
else:
event.listen(self.metadata,
'before_drop',
DDL(drop.format({'name': name})))
return model, name
def _get_tables(self, engine, create_drop=False):
tables = []
for table, eng in self.binds.items():
if eng == engine:
if table.key in self._declarative_register:
model = self[table.key]
if create_drop and hasattr(model, '__create_sql__'):
continue
tables.append(table)
return tables
def _database_all(self, engine):
return database_operation(engine, 'all')
def _database_drop(self, engine, database):
logger.info('dropping database "%s" from "%s"', database, engine)
database_operation(engine, 'drop', database)
def _database_exist(self, engine):
return database_operation(engine, 'exists')
|
quantmind/pulsar-odm
|
benchmark/app.py
|
Router.get
|
python
|
def get(self, request):
'''Simply list test urls
'''
data = {}
for router in self.routes:
data[router.name] = request.absolute_uri(router.path())
return Json(data).http_response(request)
|
Simply list test urls
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/benchmark/app.py#L38-L44
| null |
class Router(wsgi.Router):
'''WSGI Router for the benchmarking application
'''
@route()
def json(self, request):
return Json({'message': "Hello, World!"}).http_response(request)
@route()
def plaintext(self, request):
return String('Hello, World!').http_response(request)
@route()
def db(self, request):
'''Single Database Query'''
with self.mapper.begin() as session:
world = session.query(World).get(randint(1, 10000))
return Json(self.get_json(world)).http_response(request)
@route()
def queries(self, request):
'''Multiple Database Queries'''
queries = self.get_queries(request)
worlds = []
with self.mapper.begin() as session:
for _ in range(queries):
world = session.query(World).get(randint(1, MAXINT))
worlds.append(self.get_json(world))
return Json(worlds).http_response(request)
@route()
def updates(self, request):
'''Multiple updates'''
queries = self.get_queries(request)
worlds = []
for _ in range(queries):
with self.mapper.begin() as session:
world = session.query(World).get(randint(1, MAXINT))
world.randomNumber = randint(1, MAXINT)
session.add(world)
worlds.append(self.get_json(world))
return Json(worlds).http_response(request)
def get_queries(self, request):
queries = request.url_data.get("queries", "1")
try:
queries = int(queries.strip())
except ValueError:
queries = 1
return min(max(1, queries), 500)
def get_json(self, world):
return {'id': world.id, 'randomNumber': world.randomNumber}
|
quantmind/pulsar-odm
|
benchmark/app.py
|
Router.db
|
python
|
def db(self, request):
'''Single Database Query'''
with self.mapper.begin() as session:
world = session.query(World).get(randint(1, 10000))
return Json(self.get_json(world)).http_response(request)
|
Single Database Query
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/benchmark/app.py#L55-L59
|
[
"def get_json(self, world):\n return {'id': world.id, 'randomNumber': world.randomNumber}\n"
] |
class Router(wsgi.Router):
'''WSGI Router for the benchmarking application
'''
def get(self, request):
'''Simply list test urls
'''
data = {}
for router in self.routes:
data[router.name] = request.absolute_uri(router.path())
return Json(data).http_response(request)
@route()
def json(self, request):
return Json({'message': "Hello, World!"}).http_response(request)
@route()
def plaintext(self, request):
return String('Hello, World!').http_response(request)
@route()
@route()
def queries(self, request):
'''Multiple Database Queries'''
queries = self.get_queries(request)
worlds = []
with self.mapper.begin() as session:
for _ in range(queries):
world = session.query(World).get(randint(1, MAXINT))
worlds.append(self.get_json(world))
return Json(worlds).http_response(request)
@route()
def updates(self, request):
'''Multiple updates'''
queries = self.get_queries(request)
worlds = []
for _ in range(queries):
with self.mapper.begin() as session:
world = session.query(World).get(randint(1, MAXINT))
world.randomNumber = randint(1, MAXINT)
session.add(world)
worlds.append(self.get_json(world))
return Json(worlds).http_response(request)
def get_queries(self, request):
queries = request.url_data.get("queries", "1")
try:
queries = int(queries.strip())
except ValueError:
queries = 1
return min(max(1, queries), 500)
def get_json(self, world):
return {'id': world.id, 'randomNumber': world.randomNumber}
|
quantmind/pulsar-odm
|
benchmark/app.py
|
Router.queries
|
python
|
def queries(self, request):
'''Multiple Database Queries'''
queries = self.get_queries(request)
worlds = []
with self.mapper.begin() as session:
for _ in range(queries):
world = session.query(World).get(randint(1, MAXINT))
worlds.append(self.get_json(world))
return Json(worlds).http_response(request)
|
Multiple Database Queries
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/benchmark/app.py#L62-L70
|
[
"def get_queries(self, request):\n queries = request.url_data.get(\"queries\", \"1\")\n try:\n queries = int(queries.strip())\n except ValueError:\n queries = 1\n\n return min(max(1, queries), 500)\n",
"def get_json(self, world):\n return {'id': world.id, 'randomNumber': world.randomNumber}\n"
] |
class Router(wsgi.Router):
'''WSGI Router for the benchmarking application
'''
def get(self, request):
'''Simply list test urls
'''
data = {}
for router in self.routes:
data[router.name] = request.absolute_uri(router.path())
return Json(data).http_response(request)
@route()
def json(self, request):
return Json({'message': "Hello, World!"}).http_response(request)
@route()
def plaintext(self, request):
return String('Hello, World!').http_response(request)
@route()
def db(self, request):
'''Single Database Query'''
with self.mapper.begin() as session:
world = session.query(World).get(randint(1, 10000))
return Json(self.get_json(world)).http_response(request)
@route()
@route()
def updates(self, request):
'''Multiple updates'''
queries = self.get_queries(request)
worlds = []
for _ in range(queries):
with self.mapper.begin() as session:
world = session.query(World).get(randint(1, MAXINT))
world.randomNumber = randint(1, MAXINT)
session.add(world)
worlds.append(self.get_json(world))
return Json(worlds).http_response(request)
def get_queries(self, request):
queries = request.url_data.get("queries", "1")
try:
queries = int(queries.strip())
except ValueError:
queries = 1
return min(max(1, queries), 500)
def get_json(self, world):
return {'id': world.id, 'randomNumber': world.randomNumber}
|
quantmind/pulsar-odm
|
odm/utils.py
|
get_columns
|
python
|
def get_columns(mixed):
if isinstance(mixed, sa.Table):
return mixed.c
if isinstance(mixed, sa.orm.util.AliasedClass):
return sa.inspect(mixed).mapper.columns
if isinstance(mixed, sa.sql.selectable.Alias):
return mixed.c
if isinstance(mixed, sa.orm.Mapper):
return mixed.columns
if not isclass(mixed):
mixed = mixed.__class__
return sa.inspect(mixed).columns
|
Return a collection of all Column objects for given SQLAlchemy
object.
The type of the collection depends on the type of the object to return the
columns from.
::
get_columns(User)
get_columns(User())
get_columns(User.__table__)
get_columns(User.__mapper__)
get_columns(sa.orm.aliased(User))
get_columns(sa.orm.alised(User.__table__))
:param mixed:
SA Table object, SA Mapper, SA declarative class, SA declarative class
instance or an alias of any of these objects
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/odm/utils.py#L9-L37
| null |
import os
from inspect import isclass
import sqlalchemy as sa
from sqlalchemy import inspect
from sqlalchemy.exc import OperationalError, ProgrammingError
def database_operation(engine, oper, *args):
operation = _database_operation(engine, oper)
return operation(engine, *args)
def _database_operation(engine, oper):
dialect = engine.dialect
method_name = 'database_%s' % oper
if hasattr(dialect, method_name):
return getattr(dialect, method_name)
else:
scripts = engine_scripts[method_name]
if hasattr(scripts, dialect.name):
return getattr(scripts, dialect.name)
else:
return scripts.default
class CreateDatabase:
def sqlite(self, engine, database):
pass
def default(self, engine, database):
conn = engine.connect()
# the connection will still be inside a transaction,
# so we have to end the open transaction with a commit
conn.execute("commit")
conn.execute('create database %s' % database)
conn.close()
class DropDatabase:
def sqlite(self, engine, database):
try:
os.remove(database)
except FileNotFoundError:
pass
def default(self, engine, database):
conn = engine.connect()
conn.execute("commit")
conn.execute('drop database %s' % database)
conn.close()
class AllDatabase:
def sqlite(self, engine):
database = engine.url.database
if os.path.isfile(database):
return [database]
else:
return []
def default(self, engine):
insp = inspect(engine)
return insp.get_schema_names()
class ExistDatabase:
"""Check if a database exists"""
def sqlite(self, engine):
database = engine.url.database
if database:
return database == ':memory:' or os.path.exists(database)
else:
# The default SQLAlchemy database is in memory,
# and :memory is not required, thus we should
# support that use-case
return True
def postgresql(self, engine):
database = engine.url.database
text = "SELECT 1 FROM pg_database WHERE datname='%s'" % database
return bool(engine.execute(text).scalar())
def mysql(self, engine):
database = engine.url.database
text = ("SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA "
"WHERE SCHEMA_NAME = '%s'" % database)
return bool(engine.execute(text).scalar())
def default(self, engine):
try:
engine.execute('SELECT 1')
return True
except (ProgrammingError, OperationalError):
return False
engine_scripts = {'database_exists': ExistDatabase(),
'database_create': CreateDatabase(),
'database_drop': DropDatabase(),
'database_all': AllDatabase()}
|
quantmind/pulsar-odm
|
benchmark/bench.py
|
run_benchmark
|
python
|
def run_benchmark(monitor):
'''Run the benchmarks
'''
url = urlparse(monitor.cfg.test_url)
name = slugify(url.path) or 'home'
name = '%s_%d.csv' % (name, monitor.cfg.workers)
monitor.logger.info('WRITING RESULTS ON "%s"', name)
total = REQUESTS//monitor.cfg.workers
with open(name, 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=FIELDNAMES)
writer.writeheader()
for pool_size in POOL_SIZES:
size = pool_size//monitor.cfg.workers
if size*monitor.cfg.workers != pool_size:
monitor.logger.error('Adjust workes so that pool sizes '
'can be evenly shared across them')
monitor._loop.stop()
# WORMUP
requests = [monitor.send(worker, 'run', wormup, size, total) for
worker in monitor.managed_actors]
yield from wait(requests)
# BENCHMARK
requests = [monitor.send(worker, 'run', bench) for
worker in monitor.managed_actors]
results, pending = yield from wait(requests)
assert not pending, 'Pending requets!'
results = [r.result() for r in results]
summary = {'concurrency': pool_size}
for name in results[0]:
summary[name] = reduce(add(name), results, 0)
writer.writerow(summary)
persec = summary['requests']/summary['time']
monitor.logger.info('%d concurrency - %d requests - '
'%d errors - %.3f seconds - '
'%.2f requests/sec',
pool_size,
summary['requests'],
summary['errors'],
summary['time'],
persec)
|
Run the benchmarks
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/benchmark/bench.py#L99-L143
| null |
import csv
from asyncio import async, wait
from random import randint
from urllib.parse import urlparse
from functools import reduce
import pulsar
from pulsar.apps.http import HttpClient
from pulsar.apps.greenio import GreenPool
from pulsar.utils.slugify import slugify
POOL_SIZES = [8, 16, 32, 64, 128, 256, 512, 1024]
POOL_SIZES = [8, 16, 32, 64, 128]
TIMEOUT = 120
REQUESTS = 10000
FIRST_WORMUP = 1000
FIELDNAMES = ['concurrency', 'requests', 'errors', 'time']
class PostgreSql(pulsar.Setting):
app = 'bench'
meta = "CONNECTION_STRING"
default = 'postgresql+async://odm:odmtest@127.0.0.1:5432/odmtests'
desc = 'Default connection string for the PostgreSql server'
class TestUrl(pulsar.Setting):
app = 'bench'
name = "test_url"
default = "http://127.0.0.1:8060/json"
flags = ["--test-url"]
desc = "url to test"
class FillDB(pulsar.Setting):
app = 'bench'
name = 'filldb'
flags = ['--filldb']
default = False
action = 'store_true'
desc = "Fill database with random data"
def wormup(worker, pool_size, total=FIRST_WORMUP):
worker.http = HttpClient(pool_size=pool_size, timeout=TIMEOUT)
worker.requests = total
worker.logger.info('WORM UP')
yield from request(worker, False)
yield from worker.send('monitor', 'run', ready)
def bench(worker):
worker.logger.info('BENCHMARKING')
results = yield from request(worker)
return results
def request(worker, log=True):
url = worker.cfg.test_url
loop = worker._loop
number = worker.requests
if log:
worker.logger.info('Sending %d requests with %d concurrency to "%s"',
number, worker.http.pool_size, url)
requests = [worker.http.get(url) for _ in range(number)]
start = loop.time()
done, pending = yield from wait(requests, loop=loop)
taken = loop.time() - start
assert not pending
errors = 0
for result in done:
try:
response = result.result()
if response.status_code != 200:
errors += 1
except Exception:
errors += 1
if log:
return {'time': taken,
'requests': number,
'errors': errors}
def add(name):
return lambda a, b: a + b[name]
def ready(monitor):
monitor.ready += 1
if monitor.ready == monitor.cfg.workers:
try:
yield from run_benchmark(monitor)
finally:
monitor._loop.stop()
class Bench(pulsar.Application):
cfg = pulsar.Config(apps=['bench'])
def monitor_start(self, monitor, exc=None):
if monitor.cfg.filldb:
self.pool = GreenPool()
try:
yield from self.pool.submit(self.filldb)
finally:
monitor._loop.stop()
else:
monitor.ready = 0
def worker_start(self, worker, exc=None):
if not exc:
worker._loop.call_later(1, async, wormup(worker, POOL_SIZES[0]))
def filldb(self):
'''Fill database
'''
from app import World, Fortune, odm, MAXINT
mapper = odm.Mapper(self.cfg.postgresql)
mapper.register(World)
mapper.register(Fortune)
mapper.table_create()
with mapper.begin() as session:
query = session.query(mapper.world)
N = query.count()
todo = max(0, MAXINT - N)
if todo:
for _ in range(todo):
world = mapper.world(randomNumber=randint(1, MAXINT))
session.add(world)
if todo:
odm.logger.info('Created %d World models', todo)
else:
odm.logger.info('%d World models already available', N)
if __name__ == '__main__':
Bench().start()
|
quantmind/pulsar-odm
|
benchmark/bench.py
|
Bench.filldb
|
python
|
def filldb(self):
'''Fill database
'''
from app import World, Fortune, odm, MAXINT
mapper = odm.Mapper(self.cfg.postgresql)
mapper.register(World)
mapper.register(Fortune)
mapper.table_create()
with mapper.begin() as session:
query = session.query(mapper.world)
N = query.count()
todo = max(0, MAXINT - N)
if todo:
for _ in range(todo):
world = mapper.world(randomNumber=randint(1, MAXINT))
session.add(world)
if todo:
odm.logger.info('Created %d World models', todo)
else:
odm.logger.info('%d World models already available', N)
|
Fill database
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/benchmark/bench.py#L163-L185
| null |
class Bench(pulsar.Application):
cfg = pulsar.Config(apps=['bench'])
def monitor_start(self, monitor, exc=None):
if monitor.cfg.filldb:
self.pool = GreenPool()
try:
yield from self.pool.submit(self.filldb)
finally:
monitor._loop.stop()
else:
monitor.ready = 0
def worker_start(self, worker, exc=None):
if not exc:
worker._loop.call_later(1, async, wormup(worker, POOL_SIZES[0]))
|
quantmind/pulsar-odm
|
odm/dialects/postgresql/green.py
|
psycopg2_wait_callback
|
python
|
def psycopg2_wait_callback(conn):
while True:
state = conn.poll()
if state == extensions.POLL_OK:
# Done with waiting
break
elif state == extensions.POLL_READ:
_wait_fd(conn)
elif state == extensions.POLL_WRITE:
_wait_fd(conn, read=False)
else: # pragma nocover
raise OperationalError("Bad result from poll: %r" % state)
|
A wait callback to allow greenlet to work with Psycopg.
The caller must be from a greenlet other than the main one.
:param conn: psycopg2 connection or file number
This function must be invoked from a coroutine with parent, therefore
invoking it from the main greenlet will raise an exception.
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/odm/dialects/postgresql/green.py#L12-L31
|
[
"def _wait_fd(conn, read=True):\n '''Wait for an event on file descriptor ``fd``.\n\n :param conn: file descriptor\n :param read: wait for a read event if ``True``, otherwise a wait\n for write event.\n\n This function must be invoked from a coroutine with parent, therefore\n invoking it from the main greenlet will raise an exception.\n '''\n current = getcurrent()\n parent = current.parent\n assert parent, '\"_wait_fd\" must be called by greenlet with a parent'\n try:\n fileno = conn.fileno()\n except AttributeError:\n fileno = conn\n future = Future()\n # When the event on fd occurs switch back to the current greenlet\n if read:\n future._loop.add_reader(fileno, _done_wait_fd, fileno, future, read)\n else:\n future._loop.add_writer(fileno, _done_wait_fd, fileno, future, read)\n # switch back to parent greenlet\n parent.switch(future)\n # Back on the child greenlet. Raise error if there is one\n future.result()\n"
] |
from asyncio import Future
from greenlet import getcurrent
import psycopg2
from psycopg2 import * # noqa
from psycopg2 import extensions, OperationalError
__version__ = psycopg2.__version__
# INTERNALS
def _wait_fd(conn, read=True):
'''Wait for an event on file descriptor ``fd``.
:param conn: file descriptor
:param read: wait for a read event if ``True``, otherwise a wait
for write event.
This function must be invoked from a coroutine with parent, therefore
invoking it from the main greenlet will raise an exception.
'''
current = getcurrent()
parent = current.parent
assert parent, '"_wait_fd" must be called by greenlet with a parent'
try:
fileno = conn.fileno()
except AttributeError:
fileno = conn
future = Future()
# When the event on fd occurs switch back to the current greenlet
if read:
future._loop.add_reader(fileno, _done_wait_fd, fileno, future, read)
else:
future._loop.add_writer(fileno, _done_wait_fd, fileno, future, read)
# switch back to parent greenlet
parent.switch(future)
# Back on the child greenlet. Raise error if there is one
future.result()
def _done_wait_fd(fd, future, read):
try:
if read:
future._loop.remove_reader(fd)
else:
future._loop.remove_writer(fd)
except Exception as exc:
future.set_exception(exc)
else:
future.set_result(None)
try:
extensions.POLL_OK
except AttributeError: # pragma nocover
from pulsar import ImproperlyConfigured
raise ImproperlyConfigured(
'Psycopg2 does not have support for asynchronous connections. '
'You need at least version 2.2.0 of Psycopg2.')
extensions.set_wait_callback(psycopg2_wait_callback)
|
quantmind/pulsar-odm
|
odm/dialects/postgresql/green.py
|
_wait_fd
|
python
|
def _wait_fd(conn, read=True):
'''Wait for an event on file descriptor ``fd``.
:param conn: file descriptor
:param read: wait for a read event if ``True``, otherwise a wait
for write event.
This function must be invoked from a coroutine with parent, therefore
invoking it from the main greenlet will raise an exception.
'''
current = getcurrent()
parent = current.parent
assert parent, '"_wait_fd" must be called by greenlet with a parent'
try:
fileno = conn.fileno()
except AttributeError:
fileno = conn
future = Future()
# When the event on fd occurs switch back to the current greenlet
if read:
future._loop.add_reader(fileno, _done_wait_fd, fileno, future, read)
else:
future._loop.add_writer(fileno, _done_wait_fd, fileno, future, read)
# switch back to parent greenlet
parent.switch(future)
# Back on the child greenlet. Raise error if there is one
future.result()
|
Wait for an event on file descriptor ``fd``.
:param conn: file descriptor
:param read: wait for a read event if ``True``, otherwise a wait
for write event.
This function must be invoked from a coroutine with parent, therefore
invoking it from the main greenlet will raise an exception.
|
train
|
https://github.com/quantmind/pulsar-odm/blob/5955c20beca0a89270c2b390335838deb7d5915e/odm/dialects/postgresql/green.py#L36-L62
| null |
from asyncio import Future
from greenlet import getcurrent
import psycopg2
from psycopg2 import * # noqa
from psycopg2 import extensions, OperationalError
__version__ = psycopg2.__version__
def psycopg2_wait_callback(conn):
"""A wait callback to allow greenlet to work with Psycopg.
The caller must be from a greenlet other than the main one.
:param conn: psycopg2 connection or file number
This function must be invoked from a coroutine with parent, therefore
invoking it from the main greenlet will raise an exception.
"""
while True:
state = conn.poll()
if state == extensions.POLL_OK:
# Done with waiting
break
elif state == extensions.POLL_READ:
_wait_fd(conn)
elif state == extensions.POLL_WRITE:
_wait_fd(conn, read=False)
else: # pragma nocover
raise OperationalError("Bad result from poll: %r" % state)
# INTERNALS
def _done_wait_fd(fd, future, read):
try:
if read:
future._loop.remove_reader(fd)
else:
future._loop.remove_writer(fd)
except Exception as exc:
future.set_exception(exc)
else:
future.set_result(None)
try:
extensions.POLL_OK
except AttributeError: # pragma nocover
from pulsar import ImproperlyConfigured
raise ImproperlyConfigured(
'Psycopg2 does not have support for asynchronous connections. '
'You need at least version 2.2.0 of Psycopg2.')
extensions.set_wait_callback(psycopg2_wait_callback)
|
xguse/table_enforcer
|
setup.py
|
filter_req_paths
|
python
|
def filter_req_paths(paths, func):
if not isinstance(paths, list):
raise ValueError("Paths must be a list of paths.")
libs = set()
junk = set(['\n'])
for p in paths:
with p.open(mode='r') as reqs:
lines = set([line for line in reqs if func(line)])
libs.update(lines)
return list(libs - junk)
|
Return list of filtered libs.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/setup.py#L9-L21
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The setup script."""
from setuptools import setup, find_packages
from pathlib import Path
def is_pipable(line):
"""Filter for pipable reqs."""
if "# not_pipable" in line:
return False
elif line.startswith('#'):
return False
else:
return True
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = filter_req_paths(
paths=[
Path("requirements.txt"),
Path("requirements.pip.txt"),
],
func=is_pipable,)
test_requirements = filter_req_paths(
paths=[
Path("requirements.dev.txt"),
Path("requirements.dev.pip.txt"),
],
func=is_pipable,)
setup(
name='table_enforcer',
version='0.4.4',
description="ORM-like package for defining, loading, and validating table schemas in pandas.",
long_description=readme + '\n\n' + history,
author="Gus Dunn",
author_email='w.gus.dunn@gmail.com',
url='https://github.com/xguse/table_enforcer',
packages=find_packages(),
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='table_enforcer',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
test_suite='tests',
tests_require=test_requirements,)
|
xguse/table_enforcer
|
table_enforcer/utils/validate/decorators.py
|
minmax
|
python
|
def minmax(low, high):
def decorator(function):
"""Decorate a function with args."""
@functools.wraps(function)
def wrapper(*args, **kwargs):
"""Wrap the function."""
series = function(*args, **kwargs)
lo_pass = low <= series
hi_pass = series <= high
return lo_pass & hi_pass
return wrapper
return decorator
|
Test that the data items fall within range: low <= x <= high.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/utils/validate/decorators.py#L5-L20
| null |
"""Provide decoration functions to augment the behavior of validator functions."""
import functools
def choice(choices):
"""Test that the data items are members of the set `choices`."""
def decorator(function):
"""Decorate a function with args."""
@functools.wraps(function)
def wrapper(*args, **kwargs):
"""Wrap the function."""
series = function(*args, **kwargs)
return series.isin(set(choices))
return wrapper
return decorator
def bounded_length(low, high=None):
"""Test that the length of the data items fall within range: low <= x <= high.
If high is None, treat as exact length.
"""
def decorator(function):
"""Decorate a function with args."""
@functools.wraps(function)
def wrapper(*args, **kwargs):
"""Wrap the function."""
series = function(*args, **kwargs)
if high is None:
return series.apply(lambda x: len(x) == low)
else:
lo_pass = series.apply(lambda x: low <= len(x))
hi_pass = series.apply(lambda x: len(x) <= high)
return lo_pass & hi_pass
return wrapper
return decorator
|
xguse/table_enforcer
|
table_enforcer/utils/validate/decorators.py
|
choice
|
python
|
def choice(choices):
def decorator(function):
"""Decorate a function with args."""
@functools.wraps(function)
def wrapper(*args, **kwargs):
"""Wrap the function."""
series = function(*args, **kwargs)
return series.isin(set(choices))
return wrapper
return decorator
|
Test that the data items are members of the set `choices`.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/utils/validate/decorators.py#L23-L35
| null |
"""Provide decoration functions to augment the behavior of validator functions."""
import functools
def minmax(low, high):
"""Test that the data items fall within range: low <= x <= high."""
def decorator(function):
"""Decorate a function with args."""
@functools.wraps(function)
def wrapper(*args, **kwargs):
"""Wrap the function."""
series = function(*args, **kwargs)
lo_pass = low <= series
hi_pass = series <= high
return lo_pass & hi_pass
return wrapper
return decorator
def bounded_length(low, high=None):
"""Test that the length of the data items fall within range: low <= x <= high.
If high is None, treat as exact length.
"""
def decorator(function):
"""Decorate a function with args."""
@functools.wraps(function)
def wrapper(*args, **kwargs):
"""Wrap the function."""
series = function(*args, **kwargs)
if high is None:
return series.apply(lambda x: len(x) == low)
else:
lo_pass = series.apply(lambda x: low <= len(x))
hi_pass = series.apply(lambda x: len(x) <= high)
return lo_pass & hi_pass
return wrapper
return decorator
|
xguse/table_enforcer
|
table_enforcer/utils/validate/decorators.py
|
bounded_length
|
python
|
def bounded_length(low, high=None):
def decorator(function):
"""Decorate a function with args."""
@functools.wraps(function)
def wrapper(*args, **kwargs):
"""Wrap the function."""
series = function(*args, **kwargs)
if high is None:
return series.apply(lambda x: len(x) == low)
else:
lo_pass = series.apply(lambda x: low <= len(x))
hi_pass = series.apply(lambda x: len(x) <= high)
return lo_pass & hi_pass
return wrapper
return decorator
|
Test that the length of the data items fall within range: low <= x <= high.
If high is None, treat as exact length.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/utils/validate/decorators.py#L38-L59
| null |
"""Provide decoration functions to augment the behavior of validator functions."""
import functools
def minmax(low, high):
"""Test that the data items fall within range: low <= x <= high."""
def decorator(function):
"""Decorate a function with args."""
@functools.wraps(function)
def wrapper(*args, **kwargs):
"""Wrap the function."""
series = function(*args, **kwargs)
lo_pass = low <= series
hi_pass = series <= high
return lo_pass & hi_pass
return wrapper
return decorator
def choice(choices):
"""Test that the data items are members of the set `choices`."""
def decorator(function):
"""Decorate a function with args."""
@functools.wraps(function)
def wrapper(*args, **kwargs):
"""Wrap the function."""
series = function(*args, **kwargs)
return series.isin(set(choices))
return wrapper
return decorator
|
xguse/table_enforcer
|
table_enforcer/utils/validate/funcs.py
|
unique
|
python
|
def unique(series: pd.Series) -> pd.Series:
return ~series.duplicated(keep=False)
|
Test that the data items do not repeat.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/utils/validate/funcs.py#L30-L32
| null |
"""Provide builtin validator functions for common use cases.
In general, validators take a single `pandas.Series` object as
input and return a `pandas.Series` of the same shape and indexes
containing `True` or `False` relative to which items passed the
validation logic.
"""
import pandas as pd
# import numpy as np
# from table_enforcer import errors as e
# from table_enforcer.validate import decorators as dec
def not_null(series: pd.Series) -> pd.Series:
"""Return Series with True/False bools based on which items pass."""
return pd.notnull(series)
def positive(series: pd.Series) -> pd.Series:
"""Test that the data items are positive."""
return series > 0
def negative(series: pd.Series) -> pd.Series:
"""Test that the data items are negative."""
return series < 0
def upper(series):
"""Test that the data items are all uppercase."""
return series.str.isupper()
def lower(series):
"""Test that the data items are all lowercase."""
return series.str.islower()
|
xguse/table_enforcer
|
table_enforcer/main_classes.py
|
Enforcer._make_validations
|
python
|
def _make_validations(self, table: pd.DataFrame) -> Box:
results = []
for column in self.columns:
results.append(column.validate(table))
return results
|
Return a dict-like object containing dataframes of which tests passed/failed for each column.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/main_classes.py#L42-L49
| null |
class Enforcer(object):
"""Class to define table definitions."""
def __init__(self, columns):
"""Initialize an enforcer instance."""
self.columns = columns
def validate(self, table: pd.DataFrame) -> bool:
"""Return True if all validation tests pass: False otherwise."""
validations = self._make_validations(table=table)
results = [df.all().all() for df in validations]
return all(results)
def recode(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
"""Return a fully recoded dataframe.
Args:
table (pd.DataFrame): A dataframe on which to apply recoding logic.
validate (bool): If ``True``, recoded table must pass validation tests.
"""
df = pd.DataFrame(index=table.index)
for column in self.columns:
df = column.update_dataframe(df, table=table, validate=validate)
return df
|
xguse/table_enforcer
|
table_enforcer/main_classes.py
|
Enforcer.validate
|
python
|
def validate(self, table: pd.DataFrame) -> bool:
validations = self._make_validations(table=table)
results = [df.all().all() for df in validations]
return all(results)
|
Return True if all validation tests pass: False otherwise.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/main_classes.py#L51-L57
|
[
"def _make_validations(self, table: pd.DataFrame) -> Box:\n \"\"\"Return a dict-like object containing dataframes of which tests passed/failed for each column.\"\"\"\n results = []\n\n for column in self.columns:\n results.append(column.validate(table))\n\n return results\n"
] |
class Enforcer(object):
"""Class to define table definitions."""
def __init__(self, columns):
"""Initialize an enforcer instance."""
self.columns = columns
def _make_validations(self, table: pd.DataFrame) -> Box:
"""Return a dict-like object containing dataframes of which tests passed/failed for each column."""
results = []
for column in self.columns:
results.append(column.validate(table))
return results
def recode(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
"""Return a fully recoded dataframe.
Args:
table (pd.DataFrame): A dataframe on which to apply recoding logic.
validate (bool): If ``True``, recoded table must pass validation tests.
"""
df = pd.DataFrame(index=table.index)
for column in self.columns:
df = column.update_dataframe(df, table=table, validate=validate)
return df
|
xguse/table_enforcer
|
table_enforcer/main_classes.py
|
Enforcer.recode
|
python
|
def recode(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
df = pd.DataFrame(index=table.index)
for column in self.columns:
df = column.update_dataframe(df, table=table, validate=validate)
return df
|
Return a fully recoded dataframe.
Args:
table (pd.DataFrame): A dataframe on which to apply recoding logic.
validate (bool): If ``True``, recoded table must pass validation tests.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/main_classes.py#L59-L71
| null |
class Enforcer(object):
"""Class to define table definitions."""
def __init__(self, columns):
"""Initialize an enforcer instance."""
self.columns = columns
def _make_validations(self, table: pd.DataFrame) -> Box:
"""Return a dict-like object containing dataframes of which tests passed/failed for each column."""
results = []
for column in self.columns:
results.append(column.validate(table))
return results
def validate(self, table: pd.DataFrame) -> bool:
"""Return True if all validation tests pass: False otherwise."""
validations = self._make_validations(table=table)
results = [df.all().all() for df in validations]
return all(results)
|
xguse/table_enforcer
|
table_enforcer/main_classes.py
|
BaseColumn.update_dataframe
|
python
|
def update_dataframe(self, df, table, validate=False):
df = df.copy()
recoded_columns = self.recode(table=table, validate=validate)
return pd.concat([df, recoded_columns], axis=1)
|
Perform ``self.recode`` and add resulting column(s) to ``df`` and return ``df``.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/main_classes.py#L80-L84
|
[
"def recode(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:\n \"\"\"Pass the appropriate columns through each recoder function sequentially and return the final result.\n\n Args:\n table (pd.DataFrame): A dataframe on which to apply recoding logic.\n validate (bool): If ``True``, recoded table must pass validation tests.\n \"\"\"\n raise NotImplementedError(\"This method must be defined for each subclass.\")\n"
] |
class BaseColumn(object):
"""Base Class for Columns.
Lays out essential methods api.
"""
def validate(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
"""Return a dataframe of validation results for the appropriate series vs the vector of validators.
Args:
table (pd.DataFrame): A dataframe on which to apply validation logic.
failed_only (bool): If ``True``: return only the indexes that failed to validate.
"""
raise NotImplementedError("This method must be defined for each subclass.")
def recode(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
"""Pass the appropriate columns through each recoder function sequentially and return the final result.
Args:
table (pd.DataFrame): A dataframe on which to apply recoding logic.
validate (bool): If ``True``, recoded table must pass validation tests.
"""
raise NotImplementedError("This method must be defined for each subclass.")
|
xguse/table_enforcer
|
table_enforcer/main_classes.py
|
BaseColumn.validate
|
python
|
def validate(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
raise NotImplementedError("This method must be defined for each subclass.")
|
Return a dataframe of validation results for the appropriate series vs the vector of validators.
Args:
table (pd.DataFrame): A dataframe on which to apply validation logic.
failed_only (bool): If ``True``: return only the indexes that failed to validate.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/main_classes.py#L86-L93
| null |
class BaseColumn(object):
"""Base Class for Columns.
Lays out essential methods api.
"""
def update_dataframe(self, df, table, validate=False):
"""Perform ``self.recode`` and add resulting column(s) to ``df`` and return ``df``."""
df = df.copy()
recoded_columns = self.recode(table=table, validate=validate)
return pd.concat([df, recoded_columns], axis=1)
def recode(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
"""Pass the appropriate columns through each recoder function sequentially and return the final result.
Args:
table (pd.DataFrame): A dataframe on which to apply recoding logic.
validate (bool): If ``True``, recoded table must pass validation tests.
"""
raise NotImplementedError("This method must be defined for each subclass.")
|
xguse/table_enforcer
|
table_enforcer/main_classes.py
|
BaseColumn.recode
|
python
|
def recode(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
raise NotImplementedError("This method must be defined for each subclass.")
|
Pass the appropriate columns through each recoder function sequentially and return the final result.
Args:
table (pd.DataFrame): A dataframe on which to apply recoding logic.
validate (bool): If ``True``, recoded table must pass validation tests.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/main_classes.py#L95-L102
| null |
class BaseColumn(object):
"""Base Class for Columns.
Lays out essential methods api.
"""
def update_dataframe(self, df, table, validate=False):
"""Perform ``self.recode`` and add resulting column(s) to ``df`` and return ``df``."""
df = df.copy()
recoded_columns = self.recode(table=table, validate=validate)
return pd.concat([df, recoded_columns], axis=1)
def validate(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
"""Return a dataframe of validation results for the appropriate series vs the vector of validators.
Args:
table (pd.DataFrame): A dataframe on which to apply validation logic.
failed_only (bool): If ``True``: return only the indexes that failed to validate.
"""
raise NotImplementedError("This method must be defined for each subclass.")
|
xguse/table_enforcer
|
table_enforcer/main_classes.py
|
Column._dict_of_funcs
|
python
|
def _dict_of_funcs(self, funcs: list) -> pd.Series:
return {func.__name__: func for func in funcs}
|
Return a pd.Series of functions with index derived from the function name.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/main_classes.py#L135-L137
| null |
class Column(BaseColumn):
"""Class representing a single table column."""
def __init__(
self,
name: str,
dtype: type,
unique: bool,
validators: t.List[VALIDATOR_FUNCTION],
recoders: t.List[RECODER_FUNCTION],) -> None:
"""Construct a new `Column` object.
Args:
name (str): The exact name of the column in a ``pd.DataFrame``.
dtype (type): The type that each member of the recoded column must belong to.
unique (bool): Whether values are allowed to recur in this column.
validators (list): A list of validator functions.
recoders (list): A list of recoder functions.
"""
if validators is None:
validators = []
if recoders is None:
recoders = []
self.name = name
self.dtype = dtype
self.unique = unique
self.validators = self._dict_of_funcs(validators)
self.recoders = self._dict_of_funcs(recoders)
def _validate_series_dtype(self, series: pd.Series) -> pd.Series:
"""Validate that the series data is the correct dtype."""
return series.apply(lambda i: isinstance(i, self.dtype))
def _check_series_name(self, series, override_name=None):
if override_name is None:
name = self.name
else:
name = override_name
if series.name != name:
raise ValueError(f"The name of provided series '{series.name}' does not match this column's name '{name}'.")
def validate(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
"""Return a dataframe of validation results for the appropriate series vs the vector of validators.
Args:
table (pd.DataFrame): A dataframe on which to apply validation logic.
failed_only (bool): If ``True``: return only the indexes that failed to validate.
"""
series = table[self.name]
self._check_series_name(series)
validators = self.validators
results = pd.DataFrame({validator: series for validator in validators}, index=series.index)
for name, func in validators.items():
results[name] = func(results[name])
results['dtype'] = self._validate_series_dtype(series)
if self.unique:
results['unique'] = v.funcs.unique(series)
if failed_only:
results = find_failed_rows(results)
return results
def recode(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
"""Pass the provided series obj through each recoder function sequentially and return the final result.
Args:
table (pd.DataFrame): A dataframe on which to apply recoding logic.
validate (bool): If ``True``, recoded table must pass validation tests.
"""
series = table[self.name]
self._check_series_name(series)
col = self.name
data = series.copy()
for recoder in self.recoders.values():
try:
data = recoder(data)
except (BaseException) as err:
raise RecodingError(col, recoder, err)
if validate:
failed_rows = find_failed_rows(self.validate(data.to_frame()))
if failed_rows.shape[0] > 0:
raise ValidationError(f"Rows that failed to validate for column '{self.name}':\n{failed_rows}")
return data.to_frame()
|
xguse/table_enforcer
|
table_enforcer/main_classes.py
|
Column._validate_series_dtype
|
python
|
def _validate_series_dtype(self, series: pd.Series) -> pd.Series:
return series.apply(lambda i: isinstance(i, self.dtype))
|
Validate that the series data is the correct dtype.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/main_classes.py#L139-L141
| null |
class Column(BaseColumn):
"""Class representing a single table column."""
def __init__(
self,
name: str,
dtype: type,
unique: bool,
validators: t.List[VALIDATOR_FUNCTION],
recoders: t.List[RECODER_FUNCTION],) -> None:
"""Construct a new `Column` object.
Args:
name (str): The exact name of the column in a ``pd.DataFrame``.
dtype (type): The type that each member of the recoded column must belong to.
unique (bool): Whether values are allowed to recur in this column.
validators (list): A list of validator functions.
recoders (list): A list of recoder functions.
"""
if validators is None:
validators = []
if recoders is None:
recoders = []
self.name = name
self.dtype = dtype
self.unique = unique
self.validators = self._dict_of_funcs(validators)
self.recoders = self._dict_of_funcs(recoders)
def _dict_of_funcs(self, funcs: list) -> pd.Series:
"""Return a pd.Series of functions with index derived from the function name."""
return {func.__name__: func for func in funcs}
def _check_series_name(self, series, override_name=None):
if override_name is None:
name = self.name
else:
name = override_name
if series.name != name:
raise ValueError(f"The name of provided series '{series.name}' does not match this column's name '{name}'.")
def validate(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
"""Return a dataframe of validation results for the appropriate series vs the vector of validators.
Args:
table (pd.DataFrame): A dataframe on which to apply validation logic.
failed_only (bool): If ``True``: return only the indexes that failed to validate.
"""
series = table[self.name]
self._check_series_name(series)
validators = self.validators
results = pd.DataFrame({validator: series for validator in validators}, index=series.index)
for name, func in validators.items():
results[name] = func(results[name])
results['dtype'] = self._validate_series_dtype(series)
if self.unique:
results['unique'] = v.funcs.unique(series)
if failed_only:
results = find_failed_rows(results)
return results
def recode(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
"""Pass the provided series obj through each recoder function sequentially and return the final result.
Args:
table (pd.DataFrame): A dataframe on which to apply recoding logic.
validate (bool): If ``True``, recoded table must pass validation tests.
"""
series = table[self.name]
self._check_series_name(series)
col = self.name
data = series.copy()
for recoder in self.recoders.values():
try:
data = recoder(data)
except (BaseException) as err:
raise RecodingError(col, recoder, err)
if validate:
failed_rows = find_failed_rows(self.validate(data.to_frame()))
if failed_rows.shape[0] > 0:
raise ValidationError(f"Rows that failed to validate for column '{self.name}':\n{failed_rows}")
return data.to_frame()
|
xguse/table_enforcer
|
table_enforcer/main_classes.py
|
Column.validate
|
python
|
def validate(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
series = table[self.name]
self._check_series_name(series)
validators = self.validators
results = pd.DataFrame({validator: series for validator in validators}, index=series.index)
for name, func in validators.items():
results[name] = func(results[name])
results['dtype'] = self._validate_series_dtype(series)
if self.unique:
results['unique'] = v.funcs.unique(series)
if failed_only:
results = find_failed_rows(results)
return results
|
Return a dataframe of validation results for the appropriate series vs the vector of validators.
Args:
table (pd.DataFrame): A dataframe on which to apply validation logic.
failed_only (bool): If ``True``: return only the indexes that failed to validate.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/main_classes.py#L152-L178
|
[
"def unique(series: pd.Series) -> pd.Series:\n \"\"\"Test that the data items do not repeat.\"\"\"\n return ~series.duplicated(keep=False)\n",
"def find_failed_rows(results):\n failed_rows = results.apply(lambda vec: ~vec.all(), axis=1)\n return results.loc[failed_rows]\n",
"def _validate_series_dtype(self, series: pd.Series) -> pd.Series:\n \"\"\"Validate that the series data is the correct dtype.\"\"\"\n return series.apply(lambda i: isinstance(i, self.dtype))\n",
"def _check_series_name(self, series, override_name=None):\n if override_name is None:\n name = self.name\n else:\n name = override_name\n\n if series.name != name:\n raise ValueError(f\"The name of provided series '{series.name}' does not match this column's name '{name}'.\")\n"
] |
class Column(BaseColumn):
"""Class representing a single table column."""
def __init__(
self,
name: str,
dtype: type,
unique: bool,
validators: t.List[VALIDATOR_FUNCTION],
recoders: t.List[RECODER_FUNCTION],) -> None:
"""Construct a new `Column` object.
Args:
name (str): The exact name of the column in a ``pd.DataFrame``.
dtype (type): The type that each member of the recoded column must belong to.
unique (bool): Whether values are allowed to recur in this column.
validators (list): A list of validator functions.
recoders (list): A list of recoder functions.
"""
if validators is None:
validators = []
if recoders is None:
recoders = []
self.name = name
self.dtype = dtype
self.unique = unique
self.validators = self._dict_of_funcs(validators)
self.recoders = self._dict_of_funcs(recoders)
def _dict_of_funcs(self, funcs: list) -> pd.Series:
"""Return a pd.Series of functions with index derived from the function name."""
return {func.__name__: func for func in funcs}
def _validate_series_dtype(self, series: pd.Series) -> pd.Series:
"""Validate that the series data is the correct dtype."""
return series.apply(lambda i: isinstance(i, self.dtype))
def _check_series_name(self, series, override_name=None):
if override_name is None:
name = self.name
else:
name = override_name
if series.name != name:
raise ValueError(f"The name of provided series '{series.name}' does not match this column's name '{name}'.")
def recode(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
"""Pass the provided series obj through each recoder function sequentially and return the final result.
Args:
table (pd.DataFrame): A dataframe on which to apply recoding logic.
validate (bool): If ``True``, recoded table must pass validation tests.
"""
series = table[self.name]
self._check_series_name(series)
col = self.name
data = series.copy()
for recoder in self.recoders.values():
try:
data = recoder(data)
except (BaseException) as err:
raise RecodingError(col, recoder, err)
if validate:
failed_rows = find_failed_rows(self.validate(data.to_frame()))
if failed_rows.shape[0] > 0:
raise ValidationError(f"Rows that failed to validate for column '{self.name}':\n{failed_rows}")
return data.to_frame()
|
xguse/table_enforcer
|
table_enforcer/main_classes.py
|
Column.recode
|
python
|
def recode(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
series = table[self.name]
self._check_series_name(series)
col = self.name
data = series.copy()
for recoder in self.recoders.values():
try:
data = recoder(data)
except (BaseException) as err:
raise RecodingError(col, recoder, err)
if validate:
failed_rows = find_failed_rows(self.validate(data.to_frame()))
if failed_rows.shape[0] > 0:
raise ValidationError(f"Rows that failed to validate for column '{self.name}':\n{failed_rows}")
return data.to_frame()
|
Pass the provided series obj through each recoder function sequentially and return the final result.
Args:
table (pd.DataFrame): A dataframe on which to apply recoding logic.
validate (bool): If ``True``, recoded table must pass validation tests.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/main_classes.py#L180-L206
|
[
"def find_failed_rows(results):\n failed_rows = results.apply(lambda vec: ~vec.all(), axis=1)\n return results.loc[failed_rows]\n",
"def _check_series_name(self, series, override_name=None):\n if override_name is None:\n name = self.name\n else:\n name = override_name\n\n if series.name != name:\n raise ValueError(f\"The name of provided series '{series.name}' does not match this column's name '{name}'.\")\n",
"def validate(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:\n \"\"\"Return a dataframe of validation results for the appropriate series vs the vector of validators.\n\n Args:\n table (pd.DataFrame): A dataframe on which to apply validation logic.\n failed_only (bool): If ``True``: return only the indexes that failed to validate.\n \"\"\"\n series = table[self.name]\n\n self._check_series_name(series)\n\n validators = self.validators\n\n results = pd.DataFrame({validator: series for validator in validators}, index=series.index)\n\n for name, func in validators.items():\n results[name] = func(results[name])\n\n results['dtype'] = self._validate_series_dtype(series)\n\n if self.unique:\n results['unique'] = v.funcs.unique(series)\n\n if failed_only:\n results = find_failed_rows(results)\n\n return results\n"
] |
class Column(BaseColumn):
"""Class representing a single table column."""
def __init__(
self,
name: str,
dtype: type,
unique: bool,
validators: t.List[VALIDATOR_FUNCTION],
recoders: t.List[RECODER_FUNCTION],) -> None:
"""Construct a new `Column` object.
Args:
name (str): The exact name of the column in a ``pd.DataFrame``.
dtype (type): The type that each member of the recoded column must belong to.
unique (bool): Whether values are allowed to recur in this column.
validators (list): A list of validator functions.
recoders (list): A list of recoder functions.
"""
if validators is None:
validators = []
if recoders is None:
recoders = []
self.name = name
self.dtype = dtype
self.unique = unique
self.validators = self._dict_of_funcs(validators)
self.recoders = self._dict_of_funcs(recoders)
def _dict_of_funcs(self, funcs: list) -> pd.Series:
"""Return a pd.Series of functions with index derived from the function name."""
return {func.__name__: func for func in funcs}
def _validate_series_dtype(self, series: pd.Series) -> pd.Series:
"""Validate that the series data is the correct dtype."""
return series.apply(lambda i: isinstance(i, self.dtype))
def _check_series_name(self, series, override_name=None):
if override_name is None:
name = self.name
else:
name = override_name
if series.name != name:
raise ValueError(f"The name of provided series '{series.name}' does not match this column's name '{name}'.")
def validate(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
"""Return a dataframe of validation results for the appropriate series vs the vector of validators.
Args:
table (pd.DataFrame): A dataframe on which to apply validation logic.
failed_only (bool): If ``True``: return only the indexes that failed to validate.
"""
series = table[self.name]
self._check_series_name(series)
validators = self.validators
results = pd.DataFrame({validator: series for validator in validators}, index=series.index)
for name, func in validators.items():
results[name] = func(results[name])
results['dtype'] = self._validate_series_dtype(series)
if self.unique:
results['unique'] = v.funcs.unique(series)
if failed_only:
results = find_failed_rows(results)
return results
|
xguse/table_enforcer
|
table_enforcer/main_classes.py
|
CompoundColumn._do_validation_set
|
python
|
def _do_validation_set(self, table: pd.DataFrame, columns, validation_type, failed_only=False) -> pd.DataFrame:
validations = []
for column in columns:
validation = column.validate(table=table, failed_only=failed_only)
validation["column_name"] = column.name
validation["validation_type"] = validation_type
validations.append(validation)
validation_table = pd.concat(validations)
validation_table.index.name = 'row'
return validation_table.reset_index().set_index(["validation_type", "column_name", "row"])
|
Return a dataframe of validation results for the appropriate series vs the vector of validators.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/main_classes.py#L228-L241
| null |
class CompoundColumn(BaseColumn):
"""Class representing multiple columns and the logic governing their transformation from source table to recoded table."""
def __init__(
self,
input_columns: t.List[Column],
output_columns: t.List[Column],
column_transform,) -> None:
"""Construct a new ``CompoundColumn`` object.
Args:
input_columns (list, Column): A list of ``Column`` objects representing column(s) from the SOURCE table.
output_columns (list, Column): A list of ``Column`` objects representing column(s) from the FINAL table.
column_transform (Callable): Function accepting the table object, performing transformations to it and returning a DataFrame containing the NEW columns only.
"""
self.input_columns = input_columns
self.output_columns = output_columns
self.column_transform = column_transform
def _validate_input(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
"""Return a dataframe of validation results for the appropriate series vs the vector of validators."""
return self._do_validation_set(
table=table,
columns=self.input_columns,
validation_type="input",
failed_only=failed_only,)
def _recode_set(self, table: pd.DataFrame, columns, validate=False) -> pd.DataFrame:
recoded_columns = []
for column in columns:
recoded = column.recode(table=table, validate=validate)
recoded_columns.append(recoded)
return pd.concat(recoded_columns, axis=1)
def _recode_input(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
return self._recode_set(table=table, columns=self.input_columns, validate=validate)
def _validate_output(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
transformed_columns = self.column_transform(table)
return self._do_validation_set(
table=transformed_columns,
columns=self.output_columns,
validation_type="output",
failed_only=failed_only,)
def _recode_output(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
transformed_columns = self.column_transform(table)
return self._recode_set(table=transformed_columns, columns=self.output_columns, validate=validate)
def validate(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
"""Return a dataframe of validation results for the appropriate series vs the vector of validators.
Args:
table (pd.DataFrame): A dataframe on which to apply validation logic.
failed_only (bool): If ``True``: return only the indexes that failed to validate.
"""
return pd.concat([
self._validate_input(table, failed_only=failed_only),
self._validate_output(table, failed_only=failed_only),
]).fillna(True)
def recode(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
"""Pass the appropriate columns through each recoder function sequentially and return the final result.
Args:
table (pd.DataFrame): A dataframe on which to apply recoding logic.
validate (bool): If ``True``, recoded table must pass validation tests.
"""
return self._recode_output(self._recode_input(table, validate=validate), validate=validate)
|
xguse/table_enforcer
|
table_enforcer/main_classes.py
|
CompoundColumn._validate_input
|
python
|
def _validate_input(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
return self._do_validation_set(
table=table,
columns=self.input_columns,
validation_type="input",
failed_only=failed_only,)
|
Return a dataframe of validation results for the appropriate series vs the vector of validators.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/main_classes.py#L243-L249
|
[
"def _do_validation_set(self, table: pd.DataFrame, columns, validation_type, failed_only=False) -> pd.DataFrame:\n \"\"\"Return a dataframe of validation results for the appropriate series vs the vector of validators.\"\"\"\n validations = []\n\n for column in columns:\n validation = column.validate(table=table, failed_only=failed_only)\n validation[\"column_name\"] = column.name\n validation[\"validation_type\"] = validation_type\n validations.append(validation)\n\n validation_table = pd.concat(validations)\n validation_table.index.name = 'row'\n\n return validation_table.reset_index().set_index([\"validation_type\", \"column_name\", \"row\"])\n"
] |
class CompoundColumn(BaseColumn):
"""Class representing multiple columns and the logic governing their transformation from source table to recoded table."""
def __init__(
self,
input_columns: t.List[Column],
output_columns: t.List[Column],
column_transform,) -> None:
"""Construct a new ``CompoundColumn`` object.
Args:
input_columns (list, Column): A list of ``Column`` objects representing column(s) from the SOURCE table.
output_columns (list, Column): A list of ``Column`` objects representing column(s) from the FINAL table.
column_transform (Callable): Function accepting the table object, performing transformations to it and returning a DataFrame containing the NEW columns only.
"""
self.input_columns = input_columns
self.output_columns = output_columns
self.column_transform = column_transform
def _do_validation_set(self, table: pd.DataFrame, columns, validation_type, failed_only=False) -> pd.DataFrame:
"""Return a dataframe of validation results for the appropriate series vs the vector of validators."""
validations = []
for column in columns:
validation = column.validate(table=table, failed_only=failed_only)
validation["column_name"] = column.name
validation["validation_type"] = validation_type
validations.append(validation)
validation_table = pd.concat(validations)
validation_table.index.name = 'row'
return validation_table.reset_index().set_index(["validation_type", "column_name", "row"])
def _recode_set(self, table: pd.DataFrame, columns, validate=False) -> pd.DataFrame:
recoded_columns = []
for column in columns:
recoded = column.recode(table=table, validate=validate)
recoded_columns.append(recoded)
return pd.concat(recoded_columns, axis=1)
def _recode_input(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
return self._recode_set(table=table, columns=self.input_columns, validate=validate)
def _validate_output(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
transformed_columns = self.column_transform(table)
return self._do_validation_set(
table=transformed_columns,
columns=self.output_columns,
validation_type="output",
failed_only=failed_only,)
def _recode_output(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
transformed_columns = self.column_transform(table)
return self._recode_set(table=transformed_columns, columns=self.output_columns, validate=validate)
def validate(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
"""Return a dataframe of validation results for the appropriate series vs the vector of validators.
Args:
table (pd.DataFrame): A dataframe on which to apply validation logic.
failed_only (bool): If ``True``: return only the indexes that failed to validate.
"""
return pd.concat([
self._validate_input(table, failed_only=failed_only),
self._validate_output(table, failed_only=failed_only),
]).fillna(True)
def recode(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
"""Pass the appropriate columns through each recoder function sequentially and return the final result.
Args:
table (pd.DataFrame): A dataframe on which to apply recoding logic.
validate (bool): If ``True``, recoded table must pass validation tests.
"""
return self._recode_output(self._recode_input(table, validate=validate), validate=validate)
|
xguse/table_enforcer
|
table_enforcer/main_classes.py
|
CompoundColumn.validate
|
python
|
def validate(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
return pd.concat([
self._validate_input(table, failed_only=failed_only),
self._validate_output(table, failed_only=failed_only),
]).fillna(True)
|
Return a dataframe of validation results for the appropriate series vs the vector of validators.
Args:
table (pd.DataFrame): A dataframe on which to apply validation logic.
failed_only (bool): If ``True``: return only the indexes that failed to validate.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/main_classes.py#L275-L285
|
[
"def _validate_input(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:\n \"\"\"Return a dataframe of validation results for the appropriate series vs the vector of validators.\"\"\"\n return self._do_validation_set(\n table=table,\n columns=self.input_columns,\n validation_type=\"input\",\n failed_only=failed_only,)\n",
"def _validate_output(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:\n transformed_columns = self.column_transform(table)\n return self._do_validation_set(\n table=transformed_columns,\n columns=self.output_columns,\n validation_type=\"output\",\n failed_only=failed_only,)\n"
] |
class CompoundColumn(BaseColumn):
"""Class representing multiple columns and the logic governing their transformation from source table to recoded table."""
def __init__(
self,
input_columns: t.List[Column],
output_columns: t.List[Column],
column_transform,) -> None:
"""Construct a new ``CompoundColumn`` object.
Args:
input_columns (list, Column): A list of ``Column`` objects representing column(s) from the SOURCE table.
output_columns (list, Column): A list of ``Column`` objects representing column(s) from the FINAL table.
column_transform (Callable): Function accepting the table object, performing transformations to it and returning a DataFrame containing the NEW columns only.
"""
self.input_columns = input_columns
self.output_columns = output_columns
self.column_transform = column_transform
def _do_validation_set(self, table: pd.DataFrame, columns, validation_type, failed_only=False) -> pd.DataFrame:
"""Return a dataframe of validation results for the appropriate series vs the vector of validators."""
validations = []
for column in columns:
validation = column.validate(table=table, failed_only=failed_only)
validation["column_name"] = column.name
validation["validation_type"] = validation_type
validations.append(validation)
validation_table = pd.concat(validations)
validation_table.index.name = 'row'
return validation_table.reset_index().set_index(["validation_type", "column_name", "row"])
def _validate_input(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
"""Return a dataframe of validation results for the appropriate series vs the vector of validators."""
return self._do_validation_set(
table=table,
columns=self.input_columns,
validation_type="input",
failed_only=failed_only,)
def _recode_set(self, table: pd.DataFrame, columns, validate=False) -> pd.DataFrame:
recoded_columns = []
for column in columns:
recoded = column.recode(table=table, validate=validate)
recoded_columns.append(recoded)
return pd.concat(recoded_columns, axis=1)
def _recode_input(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
return self._recode_set(table=table, columns=self.input_columns, validate=validate)
def _validate_output(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
transformed_columns = self.column_transform(table)
return self._do_validation_set(
table=transformed_columns,
columns=self.output_columns,
validation_type="output",
failed_only=failed_only,)
def _recode_output(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
transformed_columns = self.column_transform(table)
return self._recode_set(table=transformed_columns, columns=self.output_columns, validate=validate)
def recode(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
"""Pass the appropriate columns through each recoder function sequentially and return the final result.
Args:
table (pd.DataFrame): A dataframe on which to apply recoding logic.
validate (bool): If ``True``, recoded table must pass validation tests.
"""
return self._recode_output(self._recode_input(table, validate=validate), validate=validate)
|
xguse/table_enforcer
|
table_enforcer/main_classes.py
|
CompoundColumn.recode
|
python
|
def recode(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
return self._recode_output(self._recode_input(table, validate=validate), validate=validate)
|
Pass the appropriate columns through each recoder function sequentially and return the final result.
Args:
table (pd.DataFrame): A dataframe on which to apply recoding logic.
validate (bool): If ``True``, recoded table must pass validation tests.
|
train
|
https://github.com/xguse/table_enforcer/blob/f3137839574bf8ea933a14ea16a8acba45e3e0c3/table_enforcer/main_classes.py#L287-L294
|
[
"def _recode_input(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:\n return self._recode_set(table=table, columns=self.input_columns, validate=validate)\n",
"def _recode_output(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:\n transformed_columns = self.column_transform(table)\n return self._recode_set(table=transformed_columns, columns=self.output_columns, validate=validate)\n"
] |
class CompoundColumn(BaseColumn):
"""Class representing multiple columns and the logic governing their transformation from source table to recoded table."""
def __init__(
self,
input_columns: t.List[Column],
output_columns: t.List[Column],
column_transform,) -> None:
"""Construct a new ``CompoundColumn`` object.
Args:
input_columns (list, Column): A list of ``Column`` objects representing column(s) from the SOURCE table.
output_columns (list, Column): A list of ``Column`` objects representing column(s) from the FINAL table.
column_transform (Callable): Function accepting the table object, performing transformations to it and returning a DataFrame containing the NEW columns only.
"""
self.input_columns = input_columns
self.output_columns = output_columns
self.column_transform = column_transform
def _do_validation_set(self, table: pd.DataFrame, columns, validation_type, failed_only=False) -> pd.DataFrame:
"""Return a dataframe of validation results for the appropriate series vs the vector of validators."""
validations = []
for column in columns:
validation = column.validate(table=table, failed_only=failed_only)
validation["column_name"] = column.name
validation["validation_type"] = validation_type
validations.append(validation)
validation_table = pd.concat(validations)
validation_table.index.name = 'row'
return validation_table.reset_index().set_index(["validation_type", "column_name", "row"])
def _validate_input(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
"""Return a dataframe of validation results for the appropriate series vs the vector of validators."""
return self._do_validation_set(
table=table,
columns=self.input_columns,
validation_type="input",
failed_only=failed_only,)
def _recode_set(self, table: pd.DataFrame, columns, validate=False) -> pd.DataFrame:
recoded_columns = []
for column in columns:
recoded = column.recode(table=table, validate=validate)
recoded_columns.append(recoded)
return pd.concat(recoded_columns, axis=1)
def _recode_input(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
return self._recode_set(table=table, columns=self.input_columns, validate=validate)
def _validate_output(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
transformed_columns = self.column_transform(table)
return self._do_validation_set(
table=transformed_columns,
columns=self.output_columns,
validation_type="output",
failed_only=failed_only,)
def _recode_output(self, table: pd.DataFrame, validate=False) -> pd.DataFrame:
transformed_columns = self.column_transform(table)
return self._recode_set(table=transformed_columns, columns=self.output_columns, validate=validate)
def validate(self, table: pd.DataFrame, failed_only=False) -> pd.DataFrame:
"""Return a dataframe of validation results for the appropriate series vs the vector of validators.
Args:
table (pd.DataFrame): A dataframe on which to apply validation logic.
failed_only (bool): If ``True``: return only the indexes that failed to validate.
"""
return pd.concat([
self._validate_input(table, failed_only=failed_only),
self._validate_output(table, failed_only=failed_only),
]).fillna(True)
|
snbuback/django_services
|
django_services/api/utils.py
|
get_view_doc
|
python
|
def get_view_doc(view, html=True):
try:
description = view.__doc__ or ''
description = formatting.dedent(smart_text(description))
# include filters in description
filter_fields = get_filter_fields(view)
if filter_fields:
filter_doc = ['\n\n\n## Filters', '']
for f in filter_fields:
filter_doc.append('- `%s`' % f)
description += '\n'.join(filter_doc)
# replace {api_url} by current base url
api_url = "/api"
description = description.replace('{api_url}', api_url)
if html:
description = formatting.markup_description(description)
return description
except:
import traceback
traceback.print_exc()
raise
|
Build view documentation. Return in html format.
If you want in markdown format, use html=False
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/api/utils.py#L16-L42
|
[
"def get_filter_fields(view):\n filter_fields = list(getattr(view, 'filter_fields', []))\n for filter_backend in getattr(view, 'filter_backends', []):\n if hasattr(filter_backend, 'filter_field'):\n filter_fields.append(filter_backend.filter_field)\n return filter_fields\n"
] |
# -*- coding:utf-8 -*-
from django.utils.safestring import mark_safe
from rest_framework.utils import formatting
from rest_framework.compat import smart_text
def get_filter_fields(view):
filter_fields = list(getattr(view, 'filter_fields', []))
for filter_backend in getattr(view, 'filter_backends', []):
if hasattr(filter_backend, 'filter_field'):
filter_fields.append(filter_backend.filter_field)
return filter_fields
def wrap_accordion(text_body_list):
"""
Wrap text_body_list in twitter bootstrap accordion. text_body_list must be list with tuple with title and body
"""
html = ['<div class="accordion" id="accordion2">']
for i, item in enumerate(text_body_list):
params = {
'index': i,
'title': item[0],
'body': item[1]
}
html.append('''
<div class="accordion-group">
<div class="accordion-heading">
<a class="accordion-toggle" data-toggle="collapse" data-parent="#accordion2" href="#collapse%(index)d">
%(title)s
</a>
</div>
<div id="collapse%(index)d" class="accordion-body collapse">
<div class="accordion-inner">
%(body)s
</div>
</div>
</div>
<br/>
''' % params)
return mark_safe('\n'.join(html))
|
snbuback/django_services
|
django_services/api/utils.py
|
wrap_accordion
|
python
|
def wrap_accordion(text_body_list):
html = ['<div class="accordion" id="accordion2">']
for i, item in enumerate(text_body_list):
params = {
'index': i,
'title': item[0],
'body': item[1]
}
html.append('''
<div class="accordion-group">
<div class="accordion-heading">
<a class="accordion-toggle" data-toggle="collapse" data-parent="#accordion2" href="#collapse%(index)d">
%(title)s
</a>
</div>
<div id="collapse%(index)d" class="accordion-body collapse">
<div class="accordion-inner">
%(body)s
</div>
</div>
</div>
<br/>
''' % params)
return mark_safe('\n'.join(html))
|
Wrap text_body_list in twitter bootstrap accordion. text_body_list must be list with tuple with title and body
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/api/utils.py#L45-L73
| null |
# -*- coding:utf-8 -*-
from django.utils.safestring import mark_safe
from rest_framework.utils import formatting
from rest_framework.compat import smart_text
def get_filter_fields(view):
filter_fields = list(getattr(view, 'filter_fields', []))
for filter_backend in getattr(view, 'filter_backends', []):
if hasattr(filter_backend, 'filter_field'):
filter_fields.append(filter_backend.filter_field)
return filter_fields
def get_view_doc(view, html=True):
"""
Build view documentation. Return in html format.
If you want in markdown format, use html=False
"""
try:
description = view.__doc__ or ''
description = formatting.dedent(smart_text(description))
# include filters in description
filter_fields = get_filter_fields(view)
if filter_fields:
filter_doc = ['\n\n\n## Filters', '']
for f in filter_fields:
filter_doc.append('- `%s`' % f)
description += '\n'.join(filter_doc)
# replace {api_url} by current base url
api_url = "/api"
description = description.replace('{api_url}', api_url)
if html:
description = formatting.markup_description(description)
return description
except:
import traceback
traceback.print_exc()
raise
|
snbuback/django_services
|
django_services/service/base.py
|
ListServiceMixin.list
|
python
|
def list(self, **filters):
LOG.debug(u'Querying %s by filters=%s', self.model_class.__name__, filters)
query = self.__queryset__()
perm = build_permission_name(self.model_class, 'view')
LOG.debug(u"Checking if user %s has_perm %s" % (self.user, perm))
query_with_permission = filter(lambda o: self.user.has_perm(perm, obj=o), query)
ids = map(lambda o: o.pk, query_with_permission)
# FIXME: Return to query again without use database
queryset = self.__queryset__().filter(pk__in=ids)
related = getattr(self, 'select_related', None)
if related:
queryset = queryset.select_related(*related)
return queryset
|
Returns a queryset filtering object by user permission. If you want,
you can specify filter arguments.
See https://docs.djangoproject.com/en/dev/ref/models/querysets/#filter for more details
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/service/base.py#L51-L70
|
[
"def build_permission_name(model_class, prefix):\n \"\"\" Build permission name for model_class (like 'app.add_model'). \"\"\"\n model_name = model_class._meta.object_name.lower()\n app_label = model_class._meta.app_label\n action_name = prefix\n perm = '%s.%s_%s' % (app_label, action_name, model_name)\n return perm\n",
"def __queryset__(self):\n \"\"\" Returns basic queryset \"\"\"\n return self.model_class.objects.get_query_set()\n"
] |
class ListServiceMixin(object):
"""
Performe pre-filter in object list to avoid unauthorized access
"""
@nocheckpermission()
def __queryset__(self):
""" Returns basic queryset """
return self.model_class.objects.get_query_set()
@nocheckpermission()
def get(self, pk=None, **filters):
""" Retrieve an object instance. If a single argument is supplied, object is queried by
primary key, else filter queries will be applyed.
If more than one object was found raise MultipleObjectsReturned.
If no object found, raise DoesNotExist.
Raise PermissionDenied if user has no permission 'view' on object.
See https://docs.djangoproject.com/en/dev/ref/models/querysets/#get for more details
"""
LOG.debug(u'Querying (GET) %s by pk=%s and filters=%s', self.model_class.__name__, repr(pk), filters)
query = self.model_class.objects.filter(**filters)
if pk is None:
obj = query.get()
else:
if (isinstance(pk, basestring) and pk.isdigit()) or isinstance(pk, numbers.Number):
obj = query.get(pk=pk)
elif 'slug' in self.model_class._meta.get_all_field_names():
obj = query.get(slug=pk)
else:
# pk is not a number and model has no slug. So, object don't exists.
raise self.model_class.DoesNotExist()
perm = build_permission_name(self.model_class, 'view')
if not self.user.has_perm(perm, obj=obj):
raise PermissionDenied(u'User %s has no permission %s for object %s' % (self.user, perm, obj))
return obj
|
snbuback/django_services
|
django_services/service/base.py
|
ListServiceMixin.get
|
python
|
def get(self, pk=None, **filters):
LOG.debug(u'Querying (GET) %s by pk=%s and filters=%s', self.model_class.__name__, repr(pk), filters)
query = self.model_class.objects.filter(**filters)
if pk is None:
obj = query.get()
else:
if (isinstance(pk, basestring) and pk.isdigit()) or isinstance(pk, numbers.Number):
obj = query.get(pk=pk)
elif 'slug' in self.model_class._meta.get_all_field_names():
obj = query.get(slug=pk)
else:
# pk is not a number and model has no slug. So, object don't exists.
raise self.model_class.DoesNotExist()
perm = build_permission_name(self.model_class, 'view')
if not self.user.has_perm(perm, obj=obj):
raise PermissionDenied(u'User %s has no permission %s for object %s' % (self.user, perm, obj))
return obj
|
Retrieve an object instance. If a single argument is supplied, object is queried by
primary key, else filter queries will be applyed.
If more than one object was found raise MultipleObjectsReturned.
If no object found, raise DoesNotExist.
Raise PermissionDenied if user has no permission 'view' on object.
See https://docs.djangoproject.com/en/dev/ref/models/querysets/#get for more details
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/service/base.py#L77-L102
|
[
"def build_permission_name(model_class, prefix):\n \"\"\" Build permission name for model_class (like 'app.add_model'). \"\"\"\n model_name = model_class._meta.object_name.lower()\n app_label = model_class._meta.app_label\n action_name = prefix\n perm = '%s.%s_%s' % (app_label, action_name, model_name)\n return perm\n"
] |
class ListServiceMixin(object):
"""
Performe pre-filter in object list to avoid unauthorized access
"""
@nocheckpermission()
def list(self, **filters):
""" Returns a queryset filtering object by user permission. If you want,
you can specify filter arguments.
See https://docs.djangoproject.com/en/dev/ref/models/querysets/#filter for more details
"""
LOG.debug(u'Querying %s by filters=%s', self.model_class.__name__, filters)
query = self.__queryset__()
perm = build_permission_name(self.model_class, 'view')
LOG.debug(u"Checking if user %s has_perm %s" % (self.user, perm))
query_with_permission = filter(lambda o: self.user.has_perm(perm, obj=o), query)
ids = map(lambda o: o.pk, query_with_permission)
# FIXME: Return to query again without use database
queryset = self.__queryset__().filter(pk__in=ids)
related = getattr(self, 'select_related', None)
if related:
queryset = queryset.select_related(*related)
return queryset
def __queryset__(self):
""" Returns basic queryset """
return self.model_class.objects.get_query_set()
@nocheckpermission()
|
snbuback/django_services
|
django_services/admin/__init__.py
|
DjangoServicesAdmin.get_object
|
python
|
def get_object(self, request, object_id):
queryset = self.queryset(request)
model = queryset.model
try:
object_id = model._meta.pk.to_python(object_id)
return queryset.get(pk=object_id)
except (model.DoesNotExist, ValidationError):
return None
|
Returns an instance matching the primary key provided. ``None`` is
returned if no match is found (or the object_id failed validation
against the primary key field).
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/admin/__init__.py#L36-L48
|
[
"def queryset(self, request):\n service = self.get_service(request)\n qs = service.list()\n return qs\n"
] |
class DjangoServicesAdmin(admin.ModelAdmin):
actions = ['delete_selected']
def __init__(self, *args, **kwargs):
if getattr(self, 'service_class', None) is None:
raise RuntimeError("Missing service_class attribute on %s" % self.__class__.__name__)
super(DjangoServicesAdmin, self).__init__(*args, **kwargs)
def get_service(self, request):
return self.service_class(request)
def queryset(self, request):
service = self.get_service(request)
qs = service.list()
return qs
def delete_model(self, request, obj):
service = self.get_service(request)
service.delete(obj)
def save_model(self, request, obj, form, change):
service = self.get_service(request)
if change:
service.update(obj)
else:
service.create(obj)
def has_add_permission(self, request):
"""
Returns True if the given request has permission to add an object.
Can be overriden by the user in subclasses.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_add_permission())
def has_change_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overriden by the user in subclasses. In such case it should
return True if the given request has permission to change the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to change *any* object of the given type.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_change_permission(), obj)
def has_delete_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overriden by the user in subclasses. In such case it should
return True if the given request has permission to delete the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to delete *any* object of the given type.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_delete_permission(), obj)
@csrf_protect_m
@transaction.commit_on_success
def delete_view(self, request, object_id, extra_context=None):
"The 'delete' admin view for this model."
opts = self.model._meta
app_label = opts.app_label
obj = self.get_object(request, unquote(object_id))
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {'name': force_unicode(opts.verbose_name), 'key': escape(object_id)})
using = router.db_for_write(self.model)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
(deleted_objects, perms_needed, protected) = get_deleted_objects(
[obj], opts, request.user, self.admin_site, using)
perms_needed = False # cheat! Only object permission is required
if request.POST: # The user has already confirmed the deletion.
if perms_needed:
raise PermissionDenied
obj_display = force_unicode(obj)
self.log_deletion(request, obj, obj_display)
self.delete_model(request, obj)
self.message_user(request, _('The %(name)s "%(obj)s" was deleted successfully.') % {'name': force_unicode(opts.verbose_name), 'obj': force_unicode(obj_display)})
if not self.has_change_permission(request, None):
return HttpResponseRedirect(reverse('admin:index',
current_app=self.admin_site.name))
return HttpResponseRedirect(reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.module_name),
current_app=self.admin_site.name))
object_name = force_unicode(opts.verbose_name)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": object_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"object_name": object_name,
"object": obj,
"deleted_objects": deleted_objects,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
}
context.update(extra_context or {})
return TemplateResponse(request, self.delete_confirmation_template or [
"admin/%s/%s/delete_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_confirmation.html" % app_label,
"admin/delete_confirmation.html"
], context, current_app=self.admin_site.name)
def delete_selected(self, request, queryset):
"""
Overrides django's default delete_selected action do call _model_.delete() to
pass through services
Default action which deletes the selected objects.
This action first displays a confirmation page whichs shows all the
deleteable objects, or, if the user has no permission one of the related
childs (foreignkeys), a "permission denied" message.
Next, it delets all selected objects and redirects back to the change list.
"""
opts = self.model._meta
app_label = opts.app_label
# Check that the user has delete permission for the actual model
if not self.has_delete_permission(request):
raise PermissionDenied
using = router.db_for_write(self.model)
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
deletable_objects, perms_needed, protected = get_deleted_objects(
queryset, opts, request.user, self.admin_site, using)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
if perms_needed:
raise PermissionDenied
n = queryset.count()
if n:
for obj in queryset:
obj_display = force_text(obj)
self.log_deletion(request, obj, obj_display)
#remove the object
self.delete_model(request, obj)
self.message_user(request, _("Successfully deleted %(count)d %(items)s.") % {
"count": n, "items": model_ngettext(self.opts, n)
})
# Return None to display the change list page again.
return None
if len(queryset) == 1:
objects_name = force_text(opts.verbose_name)
else:
objects_name = force_text(opts.verbose_name_plural)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": objects_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"objects_name": objects_name,
"deletable_objects": [deletable_objects],
'queryset': queryset,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
'action_checkbox_name': helpers.ACTION_CHECKBOX_NAME,
}
# Display the confirmation page
return TemplateResponse(request, self.delete_selected_confirmation_template or [
"admin/%s/%s/delete_selected_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_selected_confirmation.html" % app_label,
"admin/delete_selected_confirmation.html"
], context, current_app=self.admin_site.name)
delete_selected.short_description = ugettext_lazy("Delete selected %(verbose_name_plural)s")
|
snbuback/django_services
|
django_services/admin/__init__.py
|
DjangoServicesAdmin.has_add_permission
|
python
|
def has_add_permission(self, request):
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_add_permission())
|
Returns True if the given request has permission to add an object.
Can be overriden by the user in subclasses.
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/admin/__init__.py#L61-L67
| null |
class DjangoServicesAdmin(admin.ModelAdmin):
actions = ['delete_selected']
def __init__(self, *args, **kwargs):
if getattr(self, 'service_class', None) is None:
raise RuntimeError("Missing service_class attribute on %s" % self.__class__.__name__)
super(DjangoServicesAdmin, self).__init__(*args, **kwargs)
def get_service(self, request):
return self.service_class(request)
def queryset(self, request):
service = self.get_service(request)
qs = service.list()
return qs
def get_object(self, request, object_id):
"""
Returns an instance matching the primary key provided. ``None`` is
returned if no match is found (or the object_id failed validation
against the primary key field).
"""
queryset = self.queryset(request)
model = queryset.model
try:
object_id = model._meta.pk.to_python(object_id)
return queryset.get(pk=object_id)
except (model.DoesNotExist, ValidationError):
return None
def delete_model(self, request, obj):
service = self.get_service(request)
service.delete(obj)
def save_model(self, request, obj, form, change):
service = self.get_service(request)
if change:
service.update(obj)
else:
service.create(obj)
def has_change_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overriden by the user in subclasses. In such case it should
return True if the given request has permission to change the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to change *any* object of the given type.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_change_permission(), obj)
def has_delete_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overriden by the user in subclasses. In such case it should
return True if the given request has permission to delete the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to delete *any* object of the given type.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_delete_permission(), obj)
@csrf_protect_m
@transaction.commit_on_success
def delete_view(self, request, object_id, extra_context=None):
"The 'delete' admin view for this model."
opts = self.model._meta
app_label = opts.app_label
obj = self.get_object(request, unquote(object_id))
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {'name': force_unicode(opts.verbose_name), 'key': escape(object_id)})
using = router.db_for_write(self.model)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
(deleted_objects, perms_needed, protected) = get_deleted_objects(
[obj], opts, request.user, self.admin_site, using)
perms_needed = False # cheat! Only object permission is required
if request.POST: # The user has already confirmed the deletion.
if perms_needed:
raise PermissionDenied
obj_display = force_unicode(obj)
self.log_deletion(request, obj, obj_display)
self.delete_model(request, obj)
self.message_user(request, _('The %(name)s "%(obj)s" was deleted successfully.') % {'name': force_unicode(opts.verbose_name), 'obj': force_unicode(obj_display)})
if not self.has_change_permission(request, None):
return HttpResponseRedirect(reverse('admin:index',
current_app=self.admin_site.name))
return HttpResponseRedirect(reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.module_name),
current_app=self.admin_site.name))
object_name = force_unicode(opts.verbose_name)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": object_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"object_name": object_name,
"object": obj,
"deleted_objects": deleted_objects,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
}
context.update(extra_context or {})
return TemplateResponse(request, self.delete_confirmation_template or [
"admin/%s/%s/delete_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_confirmation.html" % app_label,
"admin/delete_confirmation.html"
], context, current_app=self.admin_site.name)
def delete_selected(self, request, queryset):
"""
Overrides django's default delete_selected action do call _model_.delete() to
pass through services
Default action which deletes the selected objects.
This action first displays a confirmation page whichs shows all the
deleteable objects, or, if the user has no permission one of the related
childs (foreignkeys), a "permission denied" message.
Next, it delets all selected objects and redirects back to the change list.
"""
opts = self.model._meta
app_label = opts.app_label
# Check that the user has delete permission for the actual model
if not self.has_delete_permission(request):
raise PermissionDenied
using = router.db_for_write(self.model)
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
deletable_objects, perms_needed, protected = get_deleted_objects(
queryset, opts, request.user, self.admin_site, using)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
if perms_needed:
raise PermissionDenied
n = queryset.count()
if n:
for obj in queryset:
obj_display = force_text(obj)
self.log_deletion(request, obj, obj_display)
#remove the object
self.delete_model(request, obj)
self.message_user(request, _("Successfully deleted %(count)d %(items)s.") % {
"count": n, "items": model_ngettext(self.opts, n)
})
# Return None to display the change list page again.
return None
if len(queryset) == 1:
objects_name = force_text(opts.verbose_name)
else:
objects_name = force_text(opts.verbose_name_plural)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": objects_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"objects_name": objects_name,
"deletable_objects": [deletable_objects],
'queryset': queryset,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
'action_checkbox_name': helpers.ACTION_CHECKBOX_NAME,
}
# Display the confirmation page
return TemplateResponse(request, self.delete_selected_confirmation_template or [
"admin/%s/%s/delete_selected_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_selected_confirmation.html" % app_label,
"admin/delete_selected_confirmation.html"
], context, current_app=self.admin_site.name)
delete_selected.short_description = ugettext_lazy("Delete selected %(verbose_name_plural)s")
|
snbuback/django_services
|
django_services/admin/__init__.py
|
DjangoServicesAdmin.has_change_permission
|
python
|
def has_change_permission(self, request, obj=None):
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_change_permission(), obj)
|
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overriden by the user in subclasses. In such case it should
return True if the given request has permission to change the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to change *any* object of the given type.
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/admin/__init__.py#L69-L81
| null |
class DjangoServicesAdmin(admin.ModelAdmin):
actions = ['delete_selected']
def __init__(self, *args, **kwargs):
if getattr(self, 'service_class', None) is None:
raise RuntimeError("Missing service_class attribute on %s" % self.__class__.__name__)
super(DjangoServicesAdmin, self).__init__(*args, **kwargs)
def get_service(self, request):
return self.service_class(request)
def queryset(self, request):
service = self.get_service(request)
qs = service.list()
return qs
def get_object(self, request, object_id):
"""
Returns an instance matching the primary key provided. ``None`` is
returned if no match is found (or the object_id failed validation
against the primary key field).
"""
queryset = self.queryset(request)
model = queryset.model
try:
object_id = model._meta.pk.to_python(object_id)
return queryset.get(pk=object_id)
except (model.DoesNotExist, ValidationError):
return None
def delete_model(self, request, obj):
service = self.get_service(request)
service.delete(obj)
def save_model(self, request, obj, form, change):
service = self.get_service(request)
if change:
service.update(obj)
else:
service.create(obj)
def has_add_permission(self, request):
"""
Returns True if the given request has permission to add an object.
Can be overriden by the user in subclasses.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_add_permission())
def has_delete_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overriden by the user in subclasses. In such case it should
return True if the given request has permission to delete the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to delete *any* object of the given type.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_delete_permission(), obj)
@csrf_protect_m
@transaction.commit_on_success
def delete_view(self, request, object_id, extra_context=None):
"The 'delete' admin view for this model."
opts = self.model._meta
app_label = opts.app_label
obj = self.get_object(request, unquote(object_id))
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {'name': force_unicode(opts.verbose_name), 'key': escape(object_id)})
using = router.db_for_write(self.model)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
(deleted_objects, perms_needed, protected) = get_deleted_objects(
[obj], opts, request.user, self.admin_site, using)
perms_needed = False # cheat! Only object permission is required
if request.POST: # The user has already confirmed the deletion.
if perms_needed:
raise PermissionDenied
obj_display = force_unicode(obj)
self.log_deletion(request, obj, obj_display)
self.delete_model(request, obj)
self.message_user(request, _('The %(name)s "%(obj)s" was deleted successfully.') % {'name': force_unicode(opts.verbose_name), 'obj': force_unicode(obj_display)})
if not self.has_change_permission(request, None):
return HttpResponseRedirect(reverse('admin:index',
current_app=self.admin_site.name))
return HttpResponseRedirect(reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.module_name),
current_app=self.admin_site.name))
object_name = force_unicode(opts.verbose_name)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": object_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"object_name": object_name,
"object": obj,
"deleted_objects": deleted_objects,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
}
context.update(extra_context or {})
return TemplateResponse(request, self.delete_confirmation_template or [
"admin/%s/%s/delete_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_confirmation.html" % app_label,
"admin/delete_confirmation.html"
], context, current_app=self.admin_site.name)
def delete_selected(self, request, queryset):
"""
Overrides django's default delete_selected action do call _model_.delete() to
pass through services
Default action which deletes the selected objects.
This action first displays a confirmation page whichs shows all the
deleteable objects, or, if the user has no permission one of the related
childs (foreignkeys), a "permission denied" message.
Next, it delets all selected objects and redirects back to the change list.
"""
opts = self.model._meta
app_label = opts.app_label
# Check that the user has delete permission for the actual model
if not self.has_delete_permission(request):
raise PermissionDenied
using = router.db_for_write(self.model)
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
deletable_objects, perms_needed, protected = get_deleted_objects(
queryset, opts, request.user, self.admin_site, using)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
if perms_needed:
raise PermissionDenied
n = queryset.count()
if n:
for obj in queryset:
obj_display = force_text(obj)
self.log_deletion(request, obj, obj_display)
#remove the object
self.delete_model(request, obj)
self.message_user(request, _("Successfully deleted %(count)d %(items)s.") % {
"count": n, "items": model_ngettext(self.opts, n)
})
# Return None to display the change list page again.
return None
if len(queryset) == 1:
objects_name = force_text(opts.verbose_name)
else:
objects_name = force_text(opts.verbose_name_plural)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": objects_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"objects_name": objects_name,
"deletable_objects": [deletable_objects],
'queryset': queryset,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
'action_checkbox_name': helpers.ACTION_CHECKBOX_NAME,
}
# Display the confirmation page
return TemplateResponse(request, self.delete_selected_confirmation_template or [
"admin/%s/%s/delete_selected_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_selected_confirmation.html" % app_label,
"admin/delete_selected_confirmation.html"
], context, current_app=self.admin_site.name)
delete_selected.short_description = ugettext_lazy("Delete selected %(verbose_name_plural)s")
|
snbuback/django_services
|
django_services/admin/__init__.py
|
DjangoServicesAdmin.has_delete_permission
|
python
|
def has_delete_permission(self, request, obj=None):
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_delete_permission(), obj)
|
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overriden by the user in subclasses. In such case it should
return True if the given request has permission to delete the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to delete *any* object of the given type.
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/admin/__init__.py#L83-L95
| null |
class DjangoServicesAdmin(admin.ModelAdmin):
actions = ['delete_selected']
def __init__(self, *args, **kwargs):
if getattr(self, 'service_class', None) is None:
raise RuntimeError("Missing service_class attribute on %s" % self.__class__.__name__)
super(DjangoServicesAdmin, self).__init__(*args, **kwargs)
def get_service(self, request):
return self.service_class(request)
def queryset(self, request):
service = self.get_service(request)
qs = service.list()
return qs
def get_object(self, request, object_id):
"""
Returns an instance matching the primary key provided. ``None`` is
returned if no match is found (or the object_id failed validation
against the primary key field).
"""
queryset = self.queryset(request)
model = queryset.model
try:
object_id = model._meta.pk.to_python(object_id)
return queryset.get(pk=object_id)
except (model.DoesNotExist, ValidationError):
return None
def delete_model(self, request, obj):
service = self.get_service(request)
service.delete(obj)
def save_model(self, request, obj, form, change):
service = self.get_service(request)
if change:
service.update(obj)
else:
service.create(obj)
def has_add_permission(self, request):
"""
Returns True if the given request has permission to add an object.
Can be overriden by the user in subclasses.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_add_permission())
def has_change_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overriden by the user in subclasses. In such case it should
return True if the given request has permission to change the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to change *any* object of the given type.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_change_permission(), obj)
@csrf_protect_m
@transaction.commit_on_success
def delete_view(self, request, object_id, extra_context=None):
"The 'delete' admin view for this model."
opts = self.model._meta
app_label = opts.app_label
obj = self.get_object(request, unquote(object_id))
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {'name': force_unicode(opts.verbose_name), 'key': escape(object_id)})
using = router.db_for_write(self.model)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
(deleted_objects, perms_needed, protected) = get_deleted_objects(
[obj], opts, request.user, self.admin_site, using)
perms_needed = False # cheat! Only object permission is required
if request.POST: # The user has already confirmed the deletion.
if perms_needed:
raise PermissionDenied
obj_display = force_unicode(obj)
self.log_deletion(request, obj, obj_display)
self.delete_model(request, obj)
self.message_user(request, _('The %(name)s "%(obj)s" was deleted successfully.') % {'name': force_unicode(opts.verbose_name), 'obj': force_unicode(obj_display)})
if not self.has_change_permission(request, None):
return HttpResponseRedirect(reverse('admin:index',
current_app=self.admin_site.name))
return HttpResponseRedirect(reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.module_name),
current_app=self.admin_site.name))
object_name = force_unicode(opts.verbose_name)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": object_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"object_name": object_name,
"object": obj,
"deleted_objects": deleted_objects,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
}
context.update(extra_context or {})
return TemplateResponse(request, self.delete_confirmation_template or [
"admin/%s/%s/delete_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_confirmation.html" % app_label,
"admin/delete_confirmation.html"
], context, current_app=self.admin_site.name)
def delete_selected(self, request, queryset):
"""
Overrides django's default delete_selected action do call _model_.delete() to
pass through services
Default action which deletes the selected objects.
This action first displays a confirmation page whichs shows all the
deleteable objects, or, if the user has no permission one of the related
childs (foreignkeys), a "permission denied" message.
Next, it delets all selected objects and redirects back to the change list.
"""
opts = self.model._meta
app_label = opts.app_label
# Check that the user has delete permission for the actual model
if not self.has_delete_permission(request):
raise PermissionDenied
using = router.db_for_write(self.model)
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
deletable_objects, perms_needed, protected = get_deleted_objects(
queryset, opts, request.user, self.admin_site, using)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
if perms_needed:
raise PermissionDenied
n = queryset.count()
if n:
for obj in queryset:
obj_display = force_text(obj)
self.log_deletion(request, obj, obj_display)
#remove the object
self.delete_model(request, obj)
self.message_user(request, _("Successfully deleted %(count)d %(items)s.") % {
"count": n, "items": model_ngettext(self.opts, n)
})
# Return None to display the change list page again.
return None
if len(queryset) == 1:
objects_name = force_text(opts.verbose_name)
else:
objects_name = force_text(opts.verbose_name_plural)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": objects_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"objects_name": objects_name,
"deletable_objects": [deletable_objects],
'queryset': queryset,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
'action_checkbox_name': helpers.ACTION_CHECKBOX_NAME,
}
# Display the confirmation page
return TemplateResponse(request, self.delete_selected_confirmation_template or [
"admin/%s/%s/delete_selected_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_selected_confirmation.html" % app_label,
"admin/delete_selected_confirmation.html"
], context, current_app=self.admin_site.name)
delete_selected.short_description = ugettext_lazy("Delete selected %(verbose_name_plural)s")
|
snbuback/django_services
|
django_services/admin/__init__.py
|
DjangoServicesAdmin.delete_view
|
python
|
def delete_view(self, request, object_id, extra_context=None):
"The 'delete' admin view for this model."
opts = self.model._meta
app_label = opts.app_label
obj = self.get_object(request, unquote(object_id))
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {'name': force_unicode(opts.verbose_name), 'key': escape(object_id)})
using = router.db_for_write(self.model)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
(deleted_objects, perms_needed, protected) = get_deleted_objects(
[obj], opts, request.user, self.admin_site, using)
perms_needed = False # cheat! Only object permission is required
if request.POST: # The user has already confirmed the deletion.
if perms_needed:
raise PermissionDenied
obj_display = force_unicode(obj)
self.log_deletion(request, obj, obj_display)
self.delete_model(request, obj)
self.message_user(request, _('The %(name)s "%(obj)s" was deleted successfully.') % {'name': force_unicode(opts.verbose_name), 'obj': force_unicode(obj_display)})
if not self.has_change_permission(request, None):
return HttpResponseRedirect(reverse('admin:index',
current_app=self.admin_site.name))
return HttpResponseRedirect(reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.module_name),
current_app=self.admin_site.name))
object_name = force_unicode(opts.verbose_name)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": object_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"object_name": object_name,
"object": obj,
"deleted_objects": deleted_objects,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
}
context.update(extra_context or {})
return TemplateResponse(request, self.delete_confirmation_template or [
"admin/%s/%s/delete_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_confirmation.html" % app_label,
"admin/delete_confirmation.html"
], context, current_app=self.admin_site.name)
|
The 'delete' admin view for this model.
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/admin/__init__.py#L99-L159
|
[
"def get_object(self, request, object_id):\n \"\"\"\n Returns an instance matching the primary key provided. ``None`` is\n returned if no match is found (or the object_id failed validation\n against the primary key field).\n \"\"\"\n queryset = self.queryset(request)\n model = queryset.model\n try:\n object_id = model._meta.pk.to_python(object_id)\n return queryset.get(pk=object_id)\n except (model.DoesNotExist, ValidationError):\n return None\n",
"def has_delete_permission(self, request, obj=None):\n \"\"\"\n Returns True if the given request has permission to change the given\n Django model instance, the default implementation doesn't examine the\n `obj` parameter.\n\n Can be overriden by the user in subclasses. In such case it should\n return True if the given request has permission to delete the `obj`\n model instance. If `obj` is None, this should return True if the given\n request has permission to delete *any* object of the given type.\n \"\"\"\n opts = self.opts\n return request.user.has_perm(opts.app_label + '.' + opts.get_delete_permission(), obj)\n"
] |
class DjangoServicesAdmin(admin.ModelAdmin):
actions = ['delete_selected']
def __init__(self, *args, **kwargs):
if getattr(self, 'service_class', None) is None:
raise RuntimeError("Missing service_class attribute on %s" % self.__class__.__name__)
super(DjangoServicesAdmin, self).__init__(*args, **kwargs)
def get_service(self, request):
return self.service_class(request)
def queryset(self, request):
service = self.get_service(request)
qs = service.list()
return qs
def get_object(self, request, object_id):
"""
Returns an instance matching the primary key provided. ``None`` is
returned if no match is found (or the object_id failed validation
against the primary key field).
"""
queryset = self.queryset(request)
model = queryset.model
try:
object_id = model._meta.pk.to_python(object_id)
return queryset.get(pk=object_id)
except (model.DoesNotExist, ValidationError):
return None
def delete_model(self, request, obj):
service = self.get_service(request)
service.delete(obj)
def save_model(self, request, obj, form, change):
service = self.get_service(request)
if change:
service.update(obj)
else:
service.create(obj)
def has_add_permission(self, request):
"""
Returns True if the given request has permission to add an object.
Can be overriden by the user in subclasses.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_add_permission())
def has_change_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overriden by the user in subclasses. In such case it should
return True if the given request has permission to change the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to change *any* object of the given type.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_change_permission(), obj)
def has_delete_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overriden by the user in subclasses. In such case it should
return True if the given request has permission to delete the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to delete *any* object of the given type.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_delete_permission(), obj)
@csrf_protect_m
@transaction.commit_on_success
def delete_selected(self, request, queryset):
"""
Overrides django's default delete_selected action do call _model_.delete() to
pass through services
Default action which deletes the selected objects.
This action first displays a confirmation page whichs shows all the
deleteable objects, or, if the user has no permission one of the related
childs (foreignkeys), a "permission denied" message.
Next, it delets all selected objects and redirects back to the change list.
"""
opts = self.model._meta
app_label = opts.app_label
# Check that the user has delete permission for the actual model
if not self.has_delete_permission(request):
raise PermissionDenied
using = router.db_for_write(self.model)
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
deletable_objects, perms_needed, protected = get_deleted_objects(
queryset, opts, request.user, self.admin_site, using)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
if perms_needed:
raise PermissionDenied
n = queryset.count()
if n:
for obj in queryset:
obj_display = force_text(obj)
self.log_deletion(request, obj, obj_display)
#remove the object
self.delete_model(request, obj)
self.message_user(request, _("Successfully deleted %(count)d %(items)s.") % {
"count": n, "items": model_ngettext(self.opts, n)
})
# Return None to display the change list page again.
return None
if len(queryset) == 1:
objects_name = force_text(opts.verbose_name)
else:
objects_name = force_text(opts.verbose_name_plural)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": objects_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"objects_name": objects_name,
"deletable_objects": [deletable_objects],
'queryset': queryset,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
'action_checkbox_name': helpers.ACTION_CHECKBOX_NAME,
}
# Display the confirmation page
return TemplateResponse(request, self.delete_selected_confirmation_template or [
"admin/%s/%s/delete_selected_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_selected_confirmation.html" % app_label,
"admin/delete_selected_confirmation.html"
], context, current_app=self.admin_site.name)
delete_selected.short_description = ugettext_lazy("Delete selected %(verbose_name_plural)s")
|
snbuback/django_services
|
django_services/admin/__init__.py
|
DjangoServicesAdmin.delete_selected
|
python
|
def delete_selected(self, request, queryset):
opts = self.model._meta
app_label = opts.app_label
# Check that the user has delete permission for the actual model
if not self.has_delete_permission(request):
raise PermissionDenied
using = router.db_for_write(self.model)
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
deletable_objects, perms_needed, protected = get_deleted_objects(
queryset, opts, request.user, self.admin_site, using)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
if perms_needed:
raise PermissionDenied
n = queryset.count()
if n:
for obj in queryset:
obj_display = force_text(obj)
self.log_deletion(request, obj, obj_display)
#remove the object
self.delete_model(request, obj)
self.message_user(request, _("Successfully deleted %(count)d %(items)s.") % {
"count": n, "items": model_ngettext(self.opts, n)
})
# Return None to display the change list page again.
return None
if len(queryset) == 1:
objects_name = force_text(opts.verbose_name)
else:
objects_name = force_text(opts.verbose_name_plural)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": objects_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"objects_name": objects_name,
"deletable_objects": [deletable_objects],
'queryset': queryset,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
'action_checkbox_name': helpers.ACTION_CHECKBOX_NAME,
}
# Display the confirmation page
return TemplateResponse(request, self.delete_selected_confirmation_template or [
"admin/%s/%s/delete_selected_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_selected_confirmation.html" % app_label,
"admin/delete_selected_confirmation.html"
], context, current_app=self.admin_site.name)
|
Overrides django's default delete_selected action do call _model_.delete() to
pass through services
Default action which deletes the selected objects.
This action first displays a confirmation page whichs shows all the
deleteable objects, or, if the user has no permission one of the related
childs (foreignkeys), a "permission denied" message.
Next, it delets all selected objects and redirects back to the change list.
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/admin/__init__.py#L161-L234
|
[
"def has_delete_permission(self, request, obj=None):\n \"\"\"\n Returns True if the given request has permission to change the given\n Django model instance, the default implementation doesn't examine the\n `obj` parameter.\n\n Can be overriden by the user in subclasses. In such case it should\n return True if the given request has permission to delete the `obj`\n model instance. If `obj` is None, this should return True if the given\n request has permission to delete *any* object of the given type.\n \"\"\"\n opts = self.opts\n return request.user.has_perm(opts.app_label + '.' + opts.get_delete_permission(), obj)\n"
] |
class DjangoServicesAdmin(admin.ModelAdmin):
actions = ['delete_selected']
def __init__(self, *args, **kwargs):
if getattr(self, 'service_class', None) is None:
raise RuntimeError("Missing service_class attribute on %s" % self.__class__.__name__)
super(DjangoServicesAdmin, self).__init__(*args, **kwargs)
def get_service(self, request):
return self.service_class(request)
def queryset(self, request):
service = self.get_service(request)
qs = service.list()
return qs
def get_object(self, request, object_id):
"""
Returns an instance matching the primary key provided. ``None`` is
returned if no match is found (or the object_id failed validation
against the primary key field).
"""
queryset = self.queryset(request)
model = queryset.model
try:
object_id = model._meta.pk.to_python(object_id)
return queryset.get(pk=object_id)
except (model.DoesNotExist, ValidationError):
return None
def delete_model(self, request, obj):
service = self.get_service(request)
service.delete(obj)
def save_model(self, request, obj, form, change):
service = self.get_service(request)
if change:
service.update(obj)
else:
service.create(obj)
def has_add_permission(self, request):
"""
Returns True if the given request has permission to add an object.
Can be overriden by the user in subclasses.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_add_permission())
def has_change_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overriden by the user in subclasses. In such case it should
return True if the given request has permission to change the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to change *any* object of the given type.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_change_permission(), obj)
def has_delete_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overriden by the user in subclasses. In such case it should
return True if the given request has permission to delete the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to delete *any* object of the given type.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_delete_permission(), obj)
@csrf_protect_m
@transaction.commit_on_success
def delete_view(self, request, object_id, extra_context=None):
"The 'delete' admin view for this model."
opts = self.model._meta
app_label = opts.app_label
obj = self.get_object(request, unquote(object_id))
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {'name': force_unicode(opts.verbose_name), 'key': escape(object_id)})
using = router.db_for_write(self.model)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
(deleted_objects, perms_needed, protected) = get_deleted_objects(
[obj], opts, request.user, self.admin_site, using)
perms_needed = False # cheat! Only object permission is required
if request.POST: # The user has already confirmed the deletion.
if perms_needed:
raise PermissionDenied
obj_display = force_unicode(obj)
self.log_deletion(request, obj, obj_display)
self.delete_model(request, obj)
self.message_user(request, _('The %(name)s "%(obj)s" was deleted successfully.') % {'name': force_unicode(opts.verbose_name), 'obj': force_unicode(obj_display)})
if not self.has_change_permission(request, None):
return HttpResponseRedirect(reverse('admin:index',
current_app=self.admin_site.name))
return HttpResponseRedirect(reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.module_name),
current_app=self.admin_site.name))
object_name = force_unicode(opts.verbose_name)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": object_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"object_name": object_name,
"object": obj,
"deleted_objects": deleted_objects,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
}
context.update(extra_context or {})
return TemplateResponse(request, self.delete_confirmation_template or [
"admin/%s/%s/delete_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_confirmation.html" % app_label,
"admin/delete_confirmation.html"
], context, current_app=self.admin_site.name)
delete_selected.short_description = ugettext_lazy("Delete selected %(verbose_name_plural)s")
|
snbuback/django_services
|
django_services/service/core.py
|
build_permission_name
|
python
|
def build_permission_name(model_class, prefix):
model_name = model_class._meta.object_name.lower()
app_label = model_class._meta.app_label
action_name = prefix
perm = '%s.%s_%s' % (app_label, action_name, model_name)
return perm
|
Build permission name for model_class (like 'app.add_model').
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/service/core.py#L15-L21
| null |
# encoding: utf-8
import logging
from functools import wraps
from django.core.exceptions import ValidationError, PermissionDenied
LOG = logging.getLogger(__name__)
LOG_PERM = logging.getLogger('%s.perm' % __name__)
METHOD_PERMISSION_TRANSLATION = {
'create': 'add',
'update': 'change'
}
class checkpermission(object):
"""
Decorator only to BaseService methods, to protect it from unauthorized calls.
If no arguments given, permission will be build from method name.
For example, for method 'start' in VirtualMachineService
the default permission required will be 'virtualmachine.start_virtualmachine'
where the first virtualmachine is appname (in this situation same as class name)
If you like, you can customize permission, only with prefix. For method called
'start', you can specify:
@checkpermission(prefix="iniciar")
to use permission 'virtualmachine.iniciar_virtualmachine'
Or if you want total control, you can specify entire permission as:
@checkpermission(permission="virtualmachine.myops_vm")
"""
def __init__(self, prefix=None, permission=None):
self.prefix = prefix
self.permission = permission
def __call__(self, func):
@wraps(func)
def __check__(service, *args, **kwargs):
perm_name = self.get_permission(service, func)
obj = args[0] if len(args) else None
call_name = "%s.%s" % (type(service).__name__, func.__name__)
self.has_perm(service, perm_name, obj, call_name)
return func(service, *args, **kwargs)
__check__.checkpermission = self
return __check__
def get_permission(self, service, func):
"""
Build permission required to access function "func"
"""
if self.permission:
perm = self.permission
elif self.prefix is False:
# No permission will be checked
perm = False
elif self.prefix:
perm = build_permission_name(service.model_class, self.prefix)
else:
name = func.__name__
# check if there is a translation between default permission and method name
action_name = METHOD_PERMISSION_TRANSLATION.get(name, name)
perm = build_permission_name(service.model_class, action_name)
return perm
def has_perm(self, service, perm_name, obj, call_name):
"""
Raise PermissionDenied if user has no permission in object
"""
user = service.user
if not (perm_name is False):
if not user.has_perm(perm_name, obj=obj):
LOG_PERM.warn(
u'User %s has no permission %s. Access to %s with obj=%s',
user, perm_name, call_name, obj)
raise PermissionDenied(u'User %s has no permission %s for object %s' % (service.user, perm_name, obj))
LOG_PERM.debug(
u'User %s was authorized to access %s with permission %s with obj=%s',
user, call_name, perm_name, obj)
class nocheckpermission(checkpermission):
"""
Disable checkpermission for a method
"""
def __init__(self):
super(nocheckpermission, self).__init__(False)
class CheckMethodPermissions(type):
def __new__(cls, classname, bases, classdict):
no_perm = ('model_class',)
for attr, item in classdict.items():
if not (attr in no_perm) and not attr.startswith('_') and callable(item):
if not hasattr(item, 'checkpermission'):
# comment line bellow if you want disable checkpermission
classdict[attr] = checkpermission()(item)
return type.__new__(cls, classname, bases, classdict)
class BaseService(object):
__metaclass__ = CheckMethodPermissions
# no_perm = ('model_class',)
'''
Base service class. All bussiness logic must be in services class.
Methods, not starting with '_', will have check permission before call it.
If you want to change permission required to execute that method, you can
put an attribute "perm" in method. If perm is string, it is own permission name.
If perm is method (or lambda), it will executed before and need to raise PermissionError
if no permission
'''
def __init__(self, request=None, user=None):
assert request or user, 'Both request and user are None'
self.request = request
self.user = user or request.user
assert self.model_class is not None, 'model_class'
@nocheckpermission()
def validate(self, obj):
""" Raises django.core.exceptions.ValidationError if any validation error exists """
if not isinstance(obj, self.model_class):
raise ValidationError('Invalid object(%s) for service %s' % (type(obj), type(self)))
LOG.debug(u'Object %s state: %s', self.model_class, obj.__dict__)
obj.full_clean()
@nocheckpermission()
def filter_objects(self, objects, perm=None):
""" Return only objects with specified permission in objects list. If perm not specified, 'view' perm will be used. """
if perm is None:
perm = build_permission_name(self.model_class, 'view')
return filter(lambda o: self.user.has_perm(perm, obj=o), objects)
def __repr__(self):
return '%s(user=%s)' % (type(self).__name__, self.user)
def __str__(self):
return repr(self)
|
snbuback/django_services
|
django_services/service/core.py
|
checkpermission.get_permission
|
python
|
def get_permission(self, service, func):
if self.permission:
perm = self.permission
elif self.prefix is False:
# No permission will be checked
perm = False
elif self.prefix:
perm = build_permission_name(service.model_class, self.prefix)
else:
name = func.__name__
# check if there is a translation between default permission and method name
action_name = METHOD_PERMISSION_TRANSLATION.get(name, name)
perm = build_permission_name(service.model_class, action_name)
return perm
|
Build permission required to access function "func"
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/service/core.py#L58-L78
|
[
"def build_permission_name(model_class, prefix):\n \"\"\" Build permission name for model_class (like 'app.add_model'). \"\"\"\n model_name = model_class._meta.object_name.lower()\n app_label = model_class._meta.app_label\n action_name = prefix\n perm = '%s.%s_%s' % (app_label, action_name, model_name)\n return perm\n"
] |
class checkpermission(object):
"""
Decorator only to BaseService methods, to protect it from unauthorized calls.
If no arguments given, permission will be build from method name.
For example, for method 'start' in VirtualMachineService
the default permission required will be 'virtualmachine.start_virtualmachine'
where the first virtualmachine is appname (in this situation same as class name)
If you like, you can customize permission, only with prefix. For method called
'start', you can specify:
@checkpermission(prefix="iniciar")
to use permission 'virtualmachine.iniciar_virtualmachine'
Or if you want total control, you can specify entire permission as:
@checkpermission(permission="virtualmachine.myops_vm")
"""
def __init__(self, prefix=None, permission=None):
self.prefix = prefix
self.permission = permission
def __call__(self, func):
@wraps(func)
def __check__(service, *args, **kwargs):
perm_name = self.get_permission(service, func)
obj = args[0] if len(args) else None
call_name = "%s.%s" % (type(service).__name__, func.__name__)
self.has_perm(service, perm_name, obj, call_name)
return func(service, *args, **kwargs)
__check__.checkpermission = self
return __check__
def has_perm(self, service, perm_name, obj, call_name):
"""
Raise PermissionDenied if user has no permission in object
"""
user = service.user
if not (perm_name is False):
if not user.has_perm(perm_name, obj=obj):
LOG_PERM.warn(
u'User %s has no permission %s. Access to %s with obj=%s',
user, perm_name, call_name, obj)
raise PermissionDenied(u'User %s has no permission %s for object %s' % (service.user, perm_name, obj))
LOG_PERM.debug(
u'User %s was authorized to access %s with permission %s with obj=%s',
user, call_name, perm_name, obj)
|
snbuback/django_services
|
django_services/service/core.py
|
checkpermission.has_perm
|
python
|
def has_perm(self, service, perm_name, obj, call_name):
user = service.user
if not (perm_name is False):
if not user.has_perm(perm_name, obj=obj):
LOG_PERM.warn(
u'User %s has no permission %s. Access to %s with obj=%s',
user, perm_name, call_name, obj)
raise PermissionDenied(u'User %s has no permission %s for object %s' % (service.user, perm_name, obj))
LOG_PERM.debug(
u'User %s was authorized to access %s with permission %s with obj=%s',
user, call_name, perm_name, obj)
|
Raise PermissionDenied if user has no permission in object
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/service/core.py#L80-L94
| null |
class checkpermission(object):
"""
Decorator only to BaseService methods, to protect it from unauthorized calls.
If no arguments given, permission will be build from method name.
For example, for method 'start' in VirtualMachineService
the default permission required will be 'virtualmachine.start_virtualmachine'
where the first virtualmachine is appname (in this situation same as class name)
If you like, you can customize permission, only with prefix. For method called
'start', you can specify:
@checkpermission(prefix="iniciar")
to use permission 'virtualmachine.iniciar_virtualmachine'
Or if you want total control, you can specify entire permission as:
@checkpermission(permission="virtualmachine.myops_vm")
"""
def __init__(self, prefix=None, permission=None):
self.prefix = prefix
self.permission = permission
def __call__(self, func):
@wraps(func)
def __check__(service, *args, **kwargs):
perm_name = self.get_permission(service, func)
obj = args[0] if len(args) else None
call_name = "%s.%s" % (type(service).__name__, func.__name__)
self.has_perm(service, perm_name, obj, call_name)
return func(service, *args, **kwargs)
__check__.checkpermission = self
return __check__
def get_permission(self, service, func):
"""
Build permission required to access function "func"
"""
if self.permission:
perm = self.permission
elif self.prefix is False:
# No permission will be checked
perm = False
elif self.prefix:
perm = build_permission_name(service.model_class, self.prefix)
else:
name = func.__name__
# check if there is a translation between default permission and method name
action_name = METHOD_PERMISSION_TRANSLATION.get(name, name)
perm = build_permission_name(service.model_class, action_name)
return perm
|
snbuback/django_services
|
django_services/service/core.py
|
BaseService.validate
|
python
|
def validate(self, obj):
if not isinstance(obj, self.model_class):
raise ValidationError('Invalid object(%s) for service %s' % (type(obj), type(self)))
LOG.debug(u'Object %s state: %s', self.model_class, obj.__dict__)
obj.full_clean()
|
Raises django.core.exceptions.ValidationError if any validation error exists
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/service/core.py#L142-L148
| null |
class BaseService(object):
__metaclass__ = CheckMethodPermissions
# no_perm = ('model_class',)
'''
Base service class. All bussiness logic must be in services class.
Methods, not starting with '_', will have check permission before call it.
If you want to change permission required to execute that method, you can
put an attribute "perm" in method. If perm is string, it is own permission name.
If perm is method (or lambda), it will executed before and need to raise PermissionError
if no permission
'''
def __init__(self, request=None, user=None):
assert request or user, 'Both request and user are None'
self.request = request
self.user = user or request.user
assert self.model_class is not None, 'model_class'
@nocheckpermission()
@nocheckpermission()
def filter_objects(self, objects, perm=None):
""" Return only objects with specified permission in objects list. If perm not specified, 'view' perm will be used. """
if perm is None:
perm = build_permission_name(self.model_class, 'view')
return filter(lambda o: self.user.has_perm(perm, obj=o), objects)
def __repr__(self):
return '%s(user=%s)' % (type(self).__name__, self.user)
def __str__(self):
return repr(self)
|
snbuback/django_services
|
django_services/service/core.py
|
BaseService.filter_objects
|
python
|
def filter_objects(self, objects, perm=None):
if perm is None:
perm = build_permission_name(self.model_class, 'view')
return filter(lambda o: self.user.has_perm(perm, obj=o), objects)
|
Return only objects with specified permission in objects list. If perm not specified, 'view' perm will be used.
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/service/core.py#L151-L155
|
[
"def build_permission_name(model_class, prefix):\n \"\"\" Build permission name for model_class (like 'app.add_model'). \"\"\"\n model_name = model_class._meta.object_name.lower()\n app_label = model_class._meta.app_label\n action_name = prefix\n perm = '%s.%s_%s' % (app_label, action_name, model_name)\n return perm\n"
] |
class BaseService(object):
__metaclass__ = CheckMethodPermissions
# no_perm = ('model_class',)
'''
Base service class. All bussiness logic must be in services class.
Methods, not starting with '_', will have check permission before call it.
If you want to change permission required to execute that method, you can
put an attribute "perm" in method. If perm is string, it is own permission name.
If perm is method (or lambda), it will executed before and need to raise PermissionError
if no permission
'''
def __init__(self, request=None, user=None):
assert request or user, 'Both request and user are None'
self.request = request
self.user = user or request.user
assert self.model_class is not None, 'model_class'
@nocheckpermission()
def validate(self, obj):
""" Raises django.core.exceptions.ValidationError if any validation error exists """
if not isinstance(obj, self.model_class):
raise ValidationError('Invalid object(%s) for service %s' % (type(obj), type(self)))
LOG.debug(u'Object %s state: %s', self.model_class, obj.__dict__)
obj.full_clean()
@nocheckpermission()
def __repr__(self):
return '%s(user=%s)' % (type(self).__name__, self.user)
def __str__(self):
return repr(self)
|
snbuback/django_services
|
django_services/api/api.py
|
exception_translation
|
python
|
def exception_translation(func):
@wraps(func)
def decorator(*arg, **kwargs):
try:
return func(*arg, **kwargs)
except InvalidOperationException, e:
return Response(status=status.HTTP_412_PRECONDITION_FAILED, data={'detail': e.message}, headers={'Content-Type': 'application/json'})
return decorator
|
Catch exception and build correct api response for it.
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/api/api.py#L26-L36
| null |
# -*- coding:utf-8 -*-
import logging
import json
from functools import wraps
from django.http import Http404
from rest_framework import generics, mixins, status, viewsets
from rest_framework.response import Response
from django_services.service.exceptions import InvalidOperationException
LIST = 'list'
CREATE = 'create'
RETRIEVE = 'retrieve'
UPDATE = 'update'
DESTROY = 'destroy'
DEFAULT_OPERATIONS = frozenset([LIST, RETRIEVE])
ALL_OPERATIONS = frozenset([LIST, CREATE, RETRIEVE, UPDATE, DESTROY])
LOG = logging.getLogger(__name__)
def make_json(detail):
return json.dumps({'detail': detail})
def getattr_in_cls_list(cls_list, attr, default):
""" Search for an attribute (attr) in class list (cls_list). Returns
attribute value if exists or None if not. """
for cls in cls_list:
if hasattr(cls, attr):
return getattr(cls, attr)
return default
class create_api_class(type):
def __new__(mcs, name, bases, attrs):
if name == 'DjangoServiceAPI':
# use default class creator
return type.__new__(mcs, name, bases, attrs)
new_bases = list(bases)
# remove object class
if object in new_bases:
new_bases.remove(object)
if not 'model' in attrs:
attrs['model'] = attrs['service_class'].model_class
# extract operations attribute. Try to search subclasses
if 'operations' in attrs:
operations = attrs['operations']
else:
operations = getattr_in_cls_list(new_bases, 'operations', DEFAULT_OPERATIONS)
# verify if all operations are known.
if not ALL_OPERATIONS.issuperset(operations):
raise RuntimeError("Invalid operations: %s in %s" % (list(set(operations) - ALL_OPERATIONS), name))
if LIST in operations:
new_bases.append(mixins.ListModelMixin)
if CREATE in operations:
new_bases.append(CreateModelUsingService)
if RETRIEVE in operations:
new_bases.append(mixins.RetrieveModelMixin)
if UPDATE in operations:
new_bases.append(UpdateModelUsingService)
if DESTROY in operations:
new_bases.append(DestroyModelUsingService)
new_bases += (generics.SingleObjectAPIView, generics.MultipleObjectAPIView,)
return type.__new__(mcs, name, tuple(new_bases), attrs)
class DestroyModelUsingService(mixins.DestroyModelMixin):
"""
Use service model to call destroy
"""
def destroy(self, request, *args, **kwargs):
obj = self.get_object()
self.service.delete(obj)
return Response(status=status.HTTP_204_NO_CONTENT)
class CreateModelUsingService(mixins.CreateModelMixin):
"""
Use service model to call create
"""
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.DATA, files=request.FILES)
if serializer.is_valid():
self.pre_save(serializer.object)
self.object = self.service.create(serializer.object)
self.post_save(self.object, created=True)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED,
headers=headers)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class UpdateModelUsingService(mixins.UpdateModelMixin):
"""
Use service model to call update
"""
def update(self, request, *args, **kwargs):
partial = kwargs.pop('partial', False)
self.object = self.get_object()
serializer = self.get_serializer(self.object, data=request.DATA,
files=request.FILES, partial=partial)
if serializer.is_valid():
self.pre_save(serializer.object)
self.object = self.service.update(self.object)
self.post_save(self.object, created=False)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class DjangoServiceAPI(viewsets.GenericViewSet):
__metaclass__ = create_api_class
@property
def http_request(self):
return self.request._request
@property
def user(self):
return self.http_request and self.http_request.user
@property
def service(self):
'''
Instantiate service class with django http_request
'''
service_class = getattr(self, 'service_class')
service = service_class(self.http_request)
return service
def get_queryset(self):
# allow serializer without service
query = self.service.list()
return query
def get_object(self, queryset=None):
"""
Override default to add support for object-level permissions.
"""
try:
pk = self.kwargs.get('pk', None)
# allow serializer without service
obj = self.service.get(pk)
return obj
except self.model.DoesNotExist:
raise Http404()
def response_object(self, obj, status=status.HTTP_200_OK):
serializer = self.get_serializer(instance=obj)
return Response(serializer.data, status=status)
|
snbuback/django_services
|
django_services/api/api.py
|
getattr_in_cls_list
|
python
|
def getattr_in_cls_list(cls_list, attr, default):
for cls in cls_list:
if hasattr(cls, attr):
return getattr(cls, attr)
return default
|
Search for an attribute (attr) in class list (cls_list). Returns
attribute value if exists or None if not.
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/api/api.py#L39-L45
| null |
# -*- coding:utf-8 -*-
import logging
import json
from functools import wraps
from django.http import Http404
from rest_framework import generics, mixins, status, viewsets
from rest_framework.response import Response
from django_services.service.exceptions import InvalidOperationException
LIST = 'list'
CREATE = 'create'
RETRIEVE = 'retrieve'
UPDATE = 'update'
DESTROY = 'destroy'
DEFAULT_OPERATIONS = frozenset([LIST, RETRIEVE])
ALL_OPERATIONS = frozenset([LIST, CREATE, RETRIEVE, UPDATE, DESTROY])
LOG = logging.getLogger(__name__)
def make_json(detail):
return json.dumps({'detail': detail})
def exception_translation(func):
"""
Catch exception and build correct api response for it.
"""
@wraps(func)
def decorator(*arg, **kwargs):
try:
return func(*arg, **kwargs)
except InvalidOperationException, e:
return Response(status=status.HTTP_412_PRECONDITION_FAILED, data={'detail': e.message}, headers={'Content-Type': 'application/json'})
return decorator
class create_api_class(type):
def __new__(mcs, name, bases, attrs):
if name == 'DjangoServiceAPI':
# use default class creator
return type.__new__(mcs, name, bases, attrs)
new_bases = list(bases)
# remove object class
if object in new_bases:
new_bases.remove(object)
if not 'model' in attrs:
attrs['model'] = attrs['service_class'].model_class
# extract operations attribute. Try to search subclasses
if 'operations' in attrs:
operations = attrs['operations']
else:
operations = getattr_in_cls_list(new_bases, 'operations', DEFAULT_OPERATIONS)
# verify if all operations are known.
if not ALL_OPERATIONS.issuperset(operations):
raise RuntimeError("Invalid operations: %s in %s" % (list(set(operations) - ALL_OPERATIONS), name))
if LIST in operations:
new_bases.append(mixins.ListModelMixin)
if CREATE in operations:
new_bases.append(CreateModelUsingService)
if RETRIEVE in operations:
new_bases.append(mixins.RetrieveModelMixin)
if UPDATE in operations:
new_bases.append(UpdateModelUsingService)
if DESTROY in operations:
new_bases.append(DestroyModelUsingService)
new_bases += (generics.SingleObjectAPIView, generics.MultipleObjectAPIView,)
return type.__new__(mcs, name, tuple(new_bases), attrs)
class DestroyModelUsingService(mixins.DestroyModelMixin):
"""
Use service model to call destroy
"""
def destroy(self, request, *args, **kwargs):
obj = self.get_object()
self.service.delete(obj)
return Response(status=status.HTTP_204_NO_CONTENT)
class CreateModelUsingService(mixins.CreateModelMixin):
"""
Use service model to call create
"""
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.DATA, files=request.FILES)
if serializer.is_valid():
self.pre_save(serializer.object)
self.object = self.service.create(serializer.object)
self.post_save(self.object, created=True)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED,
headers=headers)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class UpdateModelUsingService(mixins.UpdateModelMixin):
"""
Use service model to call update
"""
def update(self, request, *args, **kwargs):
partial = kwargs.pop('partial', False)
self.object = self.get_object()
serializer = self.get_serializer(self.object, data=request.DATA,
files=request.FILES, partial=partial)
if serializer.is_valid():
self.pre_save(serializer.object)
self.object = self.service.update(self.object)
self.post_save(self.object, created=False)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class DjangoServiceAPI(viewsets.GenericViewSet):
__metaclass__ = create_api_class
@property
def http_request(self):
return self.request._request
@property
def user(self):
return self.http_request and self.http_request.user
@property
def service(self):
'''
Instantiate service class with django http_request
'''
service_class = getattr(self, 'service_class')
service = service_class(self.http_request)
return service
def get_queryset(self):
# allow serializer without service
query = self.service.list()
return query
def get_object(self, queryset=None):
"""
Override default to add support for object-level permissions.
"""
try:
pk = self.kwargs.get('pk', None)
# allow serializer without service
obj = self.service.get(pk)
return obj
except self.model.DoesNotExist:
raise Http404()
def response_object(self, obj, status=status.HTTP_200_OK):
serializer = self.get_serializer(instance=obj)
return Response(serializer.data, status=status)
|
snbuback/django_services
|
django_services/api/api.py
|
DjangoServiceAPI.service
|
python
|
def service(self):
'''
Instantiate service class with django http_request
'''
service_class = getattr(self, 'service_class')
service = service_class(self.http_request)
return service
|
Instantiate service class with django http_request
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/api/api.py#L154-L160
| null |
class DjangoServiceAPI(viewsets.GenericViewSet):
__metaclass__ = create_api_class
@property
def http_request(self):
return self.request._request
@property
def user(self):
return self.http_request and self.http_request.user
@property
def get_queryset(self):
# allow serializer without service
query = self.service.list()
return query
def get_object(self, queryset=None):
"""
Override default to add support for object-level permissions.
"""
try:
pk = self.kwargs.get('pk', None)
# allow serializer without service
obj = self.service.get(pk)
return obj
except self.model.DoesNotExist:
raise Http404()
def response_object(self, obj, status=status.HTTP_200_OK):
serializer = self.get_serializer(instance=obj)
return Response(serializer.data, status=status)
|
snbuback/django_services
|
django_services/api/api.py
|
DjangoServiceAPI.get_object
|
python
|
def get_object(self, queryset=None):
try:
pk = self.kwargs.get('pk', None)
# allow serializer without service
obj = self.service.get(pk)
return obj
except self.model.DoesNotExist:
raise Http404()
|
Override default to add support for object-level permissions.
|
train
|
https://github.com/snbuback/django_services/blob/58cbdea878bb11197add0ed1008a9206e4d92671/django_services/api/api.py#L167-L178
| null |
class DjangoServiceAPI(viewsets.GenericViewSet):
__metaclass__ = create_api_class
@property
def http_request(self):
return self.request._request
@property
def user(self):
return self.http_request and self.http_request.user
@property
def service(self):
'''
Instantiate service class with django http_request
'''
service_class = getattr(self, 'service_class')
service = service_class(self.http_request)
return service
def get_queryset(self):
# allow serializer without service
query = self.service.list()
return query
def response_object(self, obj, status=status.HTTP_200_OK):
serializer = self.get_serializer(instance=obj)
return Response(serializer.data, status=status)
|
kmike/django-generic-images
|
generic_images/admin.py
|
attachedimage_form_factory
|
python
|
def attachedimage_form_factory(lang='en', debug=False):
''' Returns ModelForm class to be used in admin.
'lang' is the language for GearsUploader (can be 'en' and 'ru' at the
moment).
'''
yui = '' if debug else '.yui'
class _AttachedImageAdminForm(forms.ModelForm):
caption = forms.CharField(label=_('Caption'), required=False)
class Media:
js = [
'generic_images/js/mootools-1.2.4-core-yc.js',
'generic_images/js/GearsUploader.%s%s.js' % (lang, yui,),
'generic_images/js/AttachedImageInline.js',
]
class Meta:
model = AttachedImage
return _AttachedImageAdminForm
|
Returns ModelForm class to be used in admin.
'lang' is the language for GearsUploader (can be 'en' and 'ru' at the
moment).
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_images/admin.py#L10-L29
| null |
from django import forms
from django.contrib import admin
from django.contrib.contenttypes.generic import GenericTabularInline
from django.utils.translation import ugettext_lazy as _
from generic_images.models import AttachedImage
admin.site.register(AttachedImage)
AttachedImageAdminForm = attachedimage_form_factory()
''' Form for AttachedImage model to be used in inline admin '''
def attachedimages_inline_factory(lang='en', max_width='', debug=False):
''' Returns InlineModelAdmin for attached images.
'lang' is the language for GearsUploader (can be 'en' and 'ru' at the
moment). 'max_width' is default resize width parameter to be set in
widget.
'''
class _AttachedImagesInline(GenericTabularInline):
model = AttachedImage
form = attachedimage_form_factory(lang, debug)
template = 'generic_images/attached_images_inline.html'
max_w = max_width
return _AttachedImagesInline
AttachedImagesInline = attachedimages_inline_factory()
''' InlineModelAdmin for attached images.
Adds multi-image uploader with progress bar, before-upload image
previews and client-side resizing. Uploader is based
on GearsUploader (http://bitbucket.org/kmike/gearsuploader/) project.
To make this work copy ``generic_images`` folder from
``generic_images/media/`` to your ``MEDIA_ROOT``. Then use
``AttachedImagesInline`` class for you inlines::
#admin.py
from django.contrib import admin
from generic_images.admin import AttachedImagesInline
class MyModelAdmin(admin.ModelAdmin):
inlines = [AttachedImagesInline]
admin.site.register(MyModel, MyModelAdmin)
Just before standard formset the following uploader is displayed:
.. image:: img/admin-with-formset.png
Gears plugin is here
.. image:: img/admin-nogears.png
Message is displayed if Gears plugin is not available
.. image:: img/admin-previews.png
User can select several files at once using Ctrl or Shift keys
(Cmd on Mac) in standard OS file selection dialog. He can also remove
images from selection by clicking on thumbnails. Several files can also
be selected by opening file selection dialog several times.
.. image:: img/admin-uploading.png
User presses 'Upload' button and upload process begins
By default the 'Resize ..' checkbox is unchecked and the input field is
blank. If it is unchecked then images are not resized before uploading.
User can check it and set his max image width.
In order to set the default value and mark the checkbox as checked by
default create customized ``AttachedImagesInline`` class using
:func:`attachedimages_inline_factory` function. This function can also be
used to change uploader language (language auto-discovering is not
implemented)::
from generic_images.admin import attachedimages_inline_factory
class MyModelAdmin(admin.ModelAdmin):
inlines = [attachedimages_inline_factory(lang='ru', max_width=1024)]
admin.site.register(MyModel, MyModelAdmin)
'''
|
kmike/django-generic-images
|
generic_images/admin.py
|
attachedimages_inline_factory
|
python
|
def attachedimages_inline_factory(lang='en', max_width='', debug=False):
''' Returns InlineModelAdmin for attached images.
'lang' is the language for GearsUploader (can be 'en' and 'ru' at the
moment). 'max_width' is default resize width parameter to be set in
widget.
'''
class _AttachedImagesInline(GenericTabularInline):
model = AttachedImage
form = attachedimage_form_factory(lang, debug)
template = 'generic_images/attached_images_inline.html'
max_w = max_width
return _AttachedImagesInline
|
Returns InlineModelAdmin for attached images.
'lang' is the language for GearsUploader (can be 'en' and 'ru' at the
moment). 'max_width' is default resize width parameter to be set in
widget.
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_images/admin.py#L35-L48
| null |
from django import forms
from django.contrib import admin
from django.contrib.contenttypes.generic import GenericTabularInline
from django.utils.translation import ugettext_lazy as _
from generic_images.models import AttachedImage
admin.site.register(AttachedImage)
def attachedimage_form_factory(lang='en', debug=False):
''' Returns ModelForm class to be used in admin.
'lang' is the language for GearsUploader (can be 'en' and 'ru' at the
moment).
'''
yui = '' if debug else '.yui'
class _AttachedImageAdminForm(forms.ModelForm):
caption = forms.CharField(label=_('Caption'), required=False)
class Media:
js = [
'generic_images/js/mootools-1.2.4-core-yc.js',
'generic_images/js/GearsUploader.%s%s.js' % (lang, yui,),
'generic_images/js/AttachedImageInline.js',
]
class Meta:
model = AttachedImage
return _AttachedImageAdminForm
AttachedImageAdminForm = attachedimage_form_factory()
''' Form for AttachedImage model to be used in inline admin '''
AttachedImagesInline = attachedimages_inline_factory()
''' InlineModelAdmin for attached images.
Adds multi-image uploader with progress bar, before-upload image
previews and client-side resizing. Uploader is based
on GearsUploader (http://bitbucket.org/kmike/gearsuploader/) project.
To make this work copy ``generic_images`` folder from
``generic_images/media/`` to your ``MEDIA_ROOT``. Then use
``AttachedImagesInline`` class for you inlines::
#admin.py
from django.contrib import admin
from generic_images.admin import AttachedImagesInline
class MyModelAdmin(admin.ModelAdmin):
inlines = [AttachedImagesInline]
admin.site.register(MyModel, MyModelAdmin)
Just before standard formset the following uploader is displayed:
.. image:: img/admin-with-formset.png
Gears plugin is here
.. image:: img/admin-nogears.png
Message is displayed if Gears plugin is not available
.. image:: img/admin-previews.png
User can select several files at once using Ctrl or Shift keys
(Cmd on Mac) in standard OS file selection dialog. He can also remove
images from selection by clicking on thumbnails. Several files can also
be selected by opening file selection dialog several times.
.. image:: img/admin-uploading.png
User presses 'Upload' button and upload process begins
By default the 'Resize ..' checkbox is unchecked and the input field is
blank. If it is unchecked then images are not resized before uploading.
User can check it and set his max image width.
In order to set the default value and mark the checkbox as checked by
default create customized ``AttachedImagesInline`` class using
:func:`attachedimages_inline_factory` function. This function can also be
used to change uploader language (language auto-discovering is not
implemented)::
from generic_images.admin import attachedimages_inline_factory
class MyModelAdmin(admin.ModelAdmin):
inlines = [attachedimages_inline_factory(lang='ru', max_width=1024)]
admin.site.register(MyModel, MyModelAdmin)
'''
|
kmike/django-generic-images
|
generic_images/managers.py
|
ImagesAndUserManager.select_with_main_images
|
python
|
def select_with_main_images(self, limit=None, **kwargs):
''' Select all objects with filters passed as kwargs.
For each object it's main image instance is accessible as ``object.main_image``.
Results can be limited using ``limit`` parameter.
Selection is performed using only 2 or 3 sql queries.
'''
objects = self.get_query_set().filter(**kwargs)[:limit]
self.image_model_class.injector.inject_to(objects,'main_image', is_main=True)
return objects
|
Select all objects with filters passed as kwargs.
For each object it's main image instance is accessible as ``object.main_image``.
Results can be limited using ``limit`` parameter.
Selection is performed using only 2 or 3 sql queries.
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_images/managers.py#L26-L34
| null |
class ImagesAndUserManager(models.Manager):
""" Useful manager for models that have AttachedImage (or subclass) field
and 'injector=GenericIngector()' manager.
"""
def __init__(self, *args, **kwargs):
try:
image_model_class = kwargs.pop('image_model_class')
except KeyError:
image_model_class = 'generic_images.AttachedImage'
self.image_model_class = get_model_class_by_name(image_model_class)
super(ImagesAndUserManager, self).__init__(*args, **kwargs)
def for_user_with_main_images(self, user, limit=None):
return self.select_with_main_images(user=user, limit=limit)
def get_for_user(self, user):
objects = self.get_query_set().filter(user=user)
return objects
|
kmike/django-generic-images
|
generic_images/managers.py
|
AttachedImageManager.get_main_for
|
python
|
def get_main_for(self, model):
'''
Returns main image for given model
'''
try:
return self.for_model(model).get(is_main=True)
except models.ObjectDoesNotExist:
return None
|
Returns main image for given model
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_images/managers.py#L54-L61
|
[
"def for_model(self, model, content_type=None):\n ''' Returns all objects that are attached to given model '''\n content_type = content_type or ContentType.objects.get_for_model(model)\n kwargs = {\n self.ct_field: content_type,\n self.fk_field: model.pk\n }\n objects = self.get_query_set().filter(**kwargs)\n return objects\n"
] |
class AttachedImageManager(GenericModelManager):
''' Manager with helpful functions for attached images
'''
def get_for_model(self, model):
''' Returns all images that are attached to given model.
Deprecated. Use `for_model` instead.
'''
return self.for_model(model)
|
kmike/django-generic-images
|
generic_utils/managers.py
|
RelatedInjector.inject_to
|
python
|
def inject_to(self, objects, field_name, get_inject_object = lambda obj: obj,
select_related = None, **kwargs):
'''
``objects`` is an iterable. Related objects
will be attached to elements of this iterable.
``field_name`` is the attached object attribute name
``get_injector_object`` is a callable that takes object in `objects`
iterable. Related objects will be available as an attribute of the
result of ``get_inject_object(obj)``. It is assumed that ``fk_field``
points to ``get_inject_object(obj)``.
``select_related`` is a list to be passed to select_related method for
related objects.
All other kwargs will be passed as arguments to queryset filter function.
For example, we need to prefetch user profiles when we display a list of
comments::
# models.py
class UserProfile(models.Model):
user = models.ForeignKey(User, unique=True)
info = models.CharField(max_length=100)
objects = models.Manager()
injector = RelatedInjector(fk_field='user')
# views.py
def show_comments(request, obj_id):
...
comments = list(Comment.objects.for_model(obj).select_related('user'))
UserProfile.injector.inject_to(comments, '_profile_cache',
lambda comment: comment.user)
return direct_to_template('comment_list.html', {'comments': comments})
# in comment_list.html
{% for comment in comments %}
<h3>{{ comment.user }}</h3>
<h4>{{ comment.user.get_profile.info }}</h4>
{{ comment.comment|linebreaks }}
{% endfor %}
``comment.user`` attribute will be selected using ``select_related`` and
``comment.user._profile_cache`` (exposed by get_profile method) will be
selected by our injector. So there will be only 2 SQL queries for
selecting all comments with users and user profiles.
'''
#get related data
kwargs.update({self.fk_field+'__in': [ get_inject_object(obj).pk for obj in objects ]})
data = self.get_query_set().filter(**kwargs)
if select_related:
data = data.select_related(select_related)
data_dict = dict((getattr(item, self.fk_field), item) for item in list(data))
# add info to original data
for obj in objects:
injected_obj = get_inject_object(obj)
if data_dict.has_key(injected_obj):
# fk_field was ForeignKey so there are objects in lookup dict
get_inject_object(obj).__setattr__(field_name, data_dict[injected_obj])
elif data_dict.has_key(injected_obj.pk):
# fk_field was simple IntegerField so there are pk's in lookup dict
get_inject_object(obj).__setattr__(field_name, data_dict[injected_obj.pk])
|
``objects`` is an iterable. Related objects
will be attached to elements of this iterable.
``field_name`` is the attached object attribute name
``get_injector_object`` is a callable that takes object in `objects`
iterable. Related objects will be available as an attribute of the
result of ``get_inject_object(obj)``. It is assumed that ``fk_field``
points to ``get_inject_object(obj)``.
``select_related`` is a list to be passed to select_related method for
related objects.
All other kwargs will be passed as arguments to queryset filter function.
For example, we need to prefetch user profiles when we display a list of
comments::
# models.py
class UserProfile(models.Model):
user = models.ForeignKey(User, unique=True)
info = models.CharField(max_length=100)
objects = models.Manager()
injector = RelatedInjector(fk_field='user')
# views.py
def show_comments(request, obj_id):
...
comments = list(Comment.objects.for_model(obj).select_related('user'))
UserProfile.injector.inject_to(comments, '_profile_cache',
lambda comment: comment.user)
return direct_to_template('comment_list.html', {'comments': comments})
# in comment_list.html
{% for comment in comments %}
<h3>{{ comment.user }}</h3>
<h4>{{ comment.user.get_profile.info }}</h4>
{{ comment.comment|linebreaks }}
{% endfor %}
``comment.user`` attribute will be selected using ``select_related`` and
``comment.user._profile_cache`` (exposed by get_profile method) will be
selected by our injector. So there will be only 2 SQL queries for
selecting all comments with users and user profiles.
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_utils/managers.py#L20-L90
|
[
"def inject_to(self, objects, field_name, get_inject_object = lambda obj: obj,\n",
"def inject_to(self, objects, field_name, get_inject_object = lambda obj: obj, **kwargs):\n"
] |
class RelatedInjector(models.Manager):
""" Manager that can emulate ``select_related`` fetching
reverse relations using 1 additional SQL query.
"""
def __init__(self, fk_field='object_id', *args, **kwargs):
self.fk_field = fk_field
super(RelatedInjector, self).__init__(*args, **kwargs)
|
kmike/django-generic-images
|
generic_utils/managers.py
|
GenericInjector.inject_to
|
python
|
def inject_to(self, objects, field_name, get_inject_object = lambda obj: obj, **kwargs):
'''
``objects`` is an iterable. Images (or other generic-related model instances)
will be attached to elements of this iterable.
``field_name`` is the attached object attribute name
``get_inject_object`` is a callable that takes object in `objects` iterable.
Image will be available as an attribute of the result of
`get_injector_object(object)`. Images attached to `get_injector_object(object)`
will be selected.
All other kwargs will be passed as arguments to queryset filter function.
Example: you have a list of comments. Each comment has 'user' attribute.
You want to fetch 10 comments and their authors with avatars. Avatars should
be accessible as `user.avatar`::
comments = Comment.objects.all().select_related('user')[:10]
AttachedImage.injector.inject_to(comments, 'avatar', lambda obj: obj.user, is_main=True)
'''
try:
content_type = ContentType.objects.get_for_model(get_inject_object(objects[0]))
except IndexError:
return objects
kwargs.update({self.ct_field: content_type})
return super(GenericInjector, self).inject_to(objects, field_name, get_inject_object, **kwargs)
|
``objects`` is an iterable. Images (or other generic-related model instances)
will be attached to elements of this iterable.
``field_name`` is the attached object attribute name
``get_inject_object`` is a callable that takes object in `objects` iterable.
Image will be available as an attribute of the result of
`get_injector_object(object)`. Images attached to `get_injector_object(object)`
will be selected.
All other kwargs will be passed as arguments to queryset filter function.
Example: you have a list of comments. Each comment has 'user' attribute.
You want to fetch 10 comments and their authors with avatars. Avatars should
be accessible as `user.avatar`::
comments = Comment.objects.all().select_related('user')[:10]
AttachedImage.injector.inject_to(comments, 'avatar', lambda obj: obj.user, is_main=True)
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_utils/managers.py#L130-L159
|
[
"def inject_to(self, objects, field_name, get_inject_object = lambda obj: obj,\n select_related = None, **kwargs):\n '''\n ``objects`` is an iterable. Related objects\n will be attached to elements of this iterable.\n\n ``field_name`` is the attached object attribute name\n\n ``get_injector_object`` is a callable that takes object in `objects`\n iterable. Related objects will be available as an attribute of the\n result of ``get_inject_object(obj)``. It is assumed that ``fk_field``\n points to ``get_inject_object(obj)``.\n\n ``select_related`` is a list to be passed to select_related method for\n related objects.\n\n All other kwargs will be passed as arguments to queryset filter function.\n\n For example, we need to prefetch user profiles when we display a list of\n comments::\n\n # models.py\n class UserProfile(models.Model):\n user = models.ForeignKey(User, unique=True)\n info = models.CharField(max_length=100)\n objects = models.Manager()\n injector = RelatedInjector(fk_field='user')\n\n # views.py\n def show_comments(request, obj_id):\n ...\n comments = list(Comment.objects.for_model(obj).select_related('user'))\n UserProfile.injector.inject_to(comments, '_profile_cache',\n lambda comment: comment.user)\n\n return direct_to_template('comment_list.html', {'comments': comments})\n\n # in comment_list.html\n {% for comment in comments %}\n <h3>{{ comment.user }}</h3>\n <h4>{{ comment.user.get_profile.info }}</h4>\n {{ comment.comment|linebreaks }}\n {% endfor %}\n\n ``comment.user`` attribute will be selected using ``select_related`` and\n ``comment.user._profile_cache`` (exposed by get_profile method) will be\n selected by our injector. So there will be only 2 SQL queries for\n selecting all comments with users and user profiles.\n\n '''\n\n #get related data\n kwargs.update({self.fk_field+'__in': [ get_inject_object(obj).pk for obj in objects ]})\n\n data = self.get_query_set().filter(**kwargs)\n if select_related:\n data = data.select_related(select_related)\n\n data_dict = dict((getattr(item, self.fk_field), item) for item in list(data))\n\n # add info to original data\n for obj in objects:\n injected_obj = get_inject_object(obj)\n\n if data_dict.has_key(injected_obj):\n # fk_field was ForeignKey so there are objects in lookup dict\n get_inject_object(obj).__setattr__(field_name, data_dict[injected_obj])\n\n elif data_dict.has_key(injected_obj.pk):\n # fk_field was simple IntegerField so there are pk's in lookup dict\n get_inject_object(obj).__setattr__(field_name, data_dict[injected_obj.pk])\n",
"def inject_to(self, objects, field_name, get_inject_object = lambda obj: obj, **kwargs):\n"
] |
class GenericInjector(RelatedInjector):
''' RelatedInjector but for GenericForeignKey's.
Manager for selecting all generic-related objects in one (two) SQL queries.
Selection is performed for a list of objects. Resulting data is aviable as attribute
of original model. Only one instance per object can be selected. Example usage:
select (and make acessible as user.avatar) all avatars for a list of user when
avatars are AttachedImage's attached to User model with is_main=True attributes.
Example::
from django.contrib.auth.models import User
from generic_images.models import AttachedImage
users = User.objects.all()[:10]
AttachedImage.injector.inject_to(users, 'avatar', is_main=True)
# i=0..9: users[i].avatar is AttachedImage objects with is_main=True.
# If there is no such AttachedImage (user doesn't have an avatar),
# users[i].avatar is None
For this example 2 or 3 sql queries will be executed:
1. one query for selecting 10 users,
2. one query for selecting all avatars (images with is_main=True) for selected users
3. and maybe one query for selecting content-type for User model
One can reuse GenericInjector manager for other models that are supposed to
be attached via generic relationship. It can be considered as an addition to
GFKmanager and GFKQuerySet from djangosnippets for different use cases.
'''
def __init__(self, fk_field='object_id', ct_field='content_type', *args, **kwargs):
self.ct_field = ct_field
super(GenericInjector, self).__init__(fk_field, *args, **kwargs)
|
kmike/django-generic-images
|
generic_utils/managers.py
|
GenericModelManager.for_model
|
python
|
def for_model(self, model, content_type=None):
''' Returns all objects that are attached to given model '''
content_type = content_type or ContentType.objects.get_for_model(model)
kwargs = {
self.ct_field: content_type,
self.fk_field: model.pk
}
objects = self.get_query_set().filter(**kwargs)
return objects
|
Returns all objects that are attached to given model
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_utils/managers.py#L169-L177
| null |
class GenericModelManager(models.Manager):
""" Manager with for_model method. """
def __init__(self, *args, **kwargs):
self.ct_field, self.fk_field = _pop_data_from_kwargs(kwargs)
super(GenericModelManager, self).__init__(*args, **kwargs)
|
kmike/django-generic-images
|
generic_utils/__init__.py
|
get_template_search_list
|
python
|
def get_template_search_list(app_name, object, template_name):
ctype = ContentType.objects.get_for_model(object)
return [
u"%s/%s/%s/%s" % (app_name, ctype.app_label, ctype.model, template_name),
u"%s/%s/%s" % (app_name, ctype.app_label, template_name,),
u"%s/%s" % (app_name, template_name,)
]
|
Returns template search list.
Example::
>>> from django.contrib.auth.models import User
>>> user=User()
>>> get_template_search_list('my_app', user, 'list.html')
[u'my_app/auth/user/list.html', u'my_app/auth/list.html', 'my_app/list.html']
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_utils/__init__.py#L3-L18
| null |
from django.contrib.contenttypes.models import ContentType
def get_template_search_list(app_name, object, template_name):
""" Returns template search list.
Example::
>>> from django.contrib.auth.models import User
>>> user=User()
>>> get_template_search_list('my_app', user, 'list.html')
[u'my_app/auth/user/list.html', u'my_app/auth/list.html', 'my_app/list.html']
"""
ctype = ContentType.objects.get_for_model(object)
return [
u"%s/%s/%s/%s" % (app_name, ctype.app_label, ctype.model, template_name),
u"%s/%s/%s" % (app_name, ctype.app_label, template_name,),
u"%s/%s" % (app_name, template_name,)
]
|
kmike/django-generic-images
|
generic_utils/templatetags.py
|
validate_params
|
python
|
def validate_params(bits, arguments_count, keyword_positions):
'''
Raises exception if passed params (`bits`) do not match signature.
Signature is defined by `arguments_count` (acceptible number of params) and
keyword_positions (dictionary with positions in keys and keywords in values,
for ex. {2:'by', 4:'of', 5:'type', 7:'as'}).
'''
if len(bits) != arguments_count+1:
raise InvalidParamsError("'%s' tag takes %d arguments" % (bits[0], arguments_count,))
for pos in keyword_positions:
value = keyword_positions[pos]
if bits[pos] != value:
raise InvalidParamsError("argument #%d to '%s' tag must be '%s'" % (pos, bits[0], value))
|
Raises exception if passed params (`bits`) do not match signature.
Signature is defined by `arguments_count` (acceptible number of params) and
keyword_positions (dictionary with positions in keys and keywords in values,
for ex. {2:'by', 4:'of', 5:'type', 7:'as'}).
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_utils/templatetags.py#L9-L23
| null |
from django import template
class InvalidParamsError(template.TemplateSyntaxError):
''' Custom exception class to distinguish usual TemplateSyntaxErrors
and validation errors for templatetags introduced by ``validate_params``
function'''
pass
def validate_params(bits, arguments_count, keyword_positions):
'''
Raises exception if passed params (`bits`) do not match signature.
Signature is defined by `arguments_count` (acceptible number of params) and
keyword_positions (dictionary with positions in keys and keywords in values,
for ex. {2:'by', 4:'of', 5:'type', 7:'as'}).
'''
if len(bits) != arguments_count+1:
raise InvalidParamsError("'%s' tag takes %d arguments" % (bits[0], arguments_count,))
for pos in keyword_positions:
value = keyword_positions[pos]
if bits[pos] != value:
raise InvalidParamsError("argument #%d to '%s' tag must be '%s'" % (pos, bits[0], value))
|
kmike/django-generic-images
|
generic_images/fields.py
|
force_recalculate
|
python
|
def force_recalculate(obj):
''' Recalculate all ImageCountField and UserImageCountField fields
in object ``obj``.
This should be used if auto-updating of these fields was disabled for
some reason.
To disable auto-update when saving AttachedImage instance
(for example when you need to save a lot of images and want to
recalculate denormalised values only after all images are saved) use
this pattern::
image = AttachedImage(...)
image.send_signal = False
image.save()
'''
class Stub(object):
content_object = obj
img = Stub()
image_saved.send(sender = obj.__class__, instance = img)
|
Recalculate all ImageCountField and UserImageCountField fields
in object ``obj``.
This should be used if auto-updating of these fields was disabled for
some reason.
To disable auto-update when saving AttachedImage instance
(for example when you need to save a lot of images and want to
recalculate denormalised values only after all images are saved) use
this pattern::
image = AttachedImage(...)
image.send_signal = False
image.save()
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_images/fields.py#L18-L38
| null |
#coding: utf-8
'''
django-generic-images provides fields for storing information about
attached images count. Value is stored in model that images are
attached to. Value is updated automatically when image is saved or deleted.
Access to this value is much faster than additional "count()" queries.
'''
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from composition.base import CompositionField
from generic_images.models import AttachedImage
from generic_images.signals import image_saved, image_deleted
def force_recalculate(obj):
''' Recalculate all ImageCountField and UserImageCountField fields
in object ``obj``.
This should be used if auto-updating of these fields was disabled for
some reason.
To disable auto-update when saving AttachedImage instance
(for example when you need to save a lot of images and want to
recalculate denormalised values only after all images are saved) use
this pattern::
image = AttachedImage(...)
image.send_signal = False
image.save()
'''
class Stub(object):
content_object = obj
img = Stub()
image_saved.send(sender = obj.__class__, instance = img)
class ImageCountField(CompositionField):
''' Field with model's attached images count.
Value of this field is updated automatically when
image is added or removed. Access to this field
doesn't produce additional 'select count(*)' query,
data is stored in table.
Example 1::
from generic_images.fields import ImageCountField
class MyModel1(models.Model):
#... fields definitions
image_count = ImageCountField()
Example 2::
class MyModel2(models.Model):
#... fields definitions
image_count = ImageCountField(native=models.IntegerField(u'MyModel2 Images count', default=0))
'''
def __init__(self, native=None):
self.internal_init(
native = native or models.PositiveIntegerField(default=0, editable=False),
trigger = {
'on': (image_saved, image_deleted,),
'do': lambda model, image, signal: AttachedImage.objects.get_for_model(model).count(),
'field_holder_getter': lambda image: image.content_object
}
)
class UserImageCountField(CompositionField):
""" Field that should be put into user's profile (AUTH_PROFILE_MODULE).
It will contain number of images that are attached to corresponding User.
This field is useful when you want to use something like
:class:`~generic_images.fields.ImageCountField` for ``User`` model.
It is not possible to add a field to User model without
duck punching (monkey patching). ``UserImageCountField`` should be
put into user's profile (same model as defined in AUTH_PROFILE_MODULE).
It will contain number of images that are attached to corresponding User.
FK attribute to User model is considered ``'user'`` by default, but this
can be overrided using ``user_attr`` argument to ``UserImageCountField``
constructor. As with :class:`~generic_images.fields.ImageCountField`,
``UserImageCountField`` constructor accepts also ``native`` argument - an
underlying field.
"""
def __init__(self, native=None, user_attr='user'):
def get_field_value(model, image, signal):
return AttachedImage.objects.get_for_model(getattr(model, user_attr)).count()
self.internal_init(
native = native or models.PositiveIntegerField(default=0, editable=False),
trigger = {
'on': (image_saved, image_deleted,),
'do': get_field_value,
'field_holder_getter': lambda image: image.content_object.get_profile(),
'sender_model': User,
}
)
#class ImageCountField(CompositionField):
# def __init__(self, native=None, signal=None):
#
# def get_field_value(model, image, signal):
## we need to handle situation where the field with same name exists in model
## but it is not this ImageCountField
# if model is None:
# return
# ctype = ContentType.objects.get_for_model(self._composition_meta.model)
# model_ctype = ContentType.objects.get_for_model(model)
# if ctype==model_ctype:
# try:
# return AttachedImage.objects.get_for_model(model).count()
# except AttributeError:
# return None
# else:
# return 0
# return getattr(model, self._composition_meta.name)
#
# self.internal_init(
# native = native or models.PositiveIntegerField(default=0),
# trigger = dict(
# on = signal or (models.signals.post_save, models.signals.post_delete),
# sender_model = AttachedImage,
# do = get_field_value,
# field_holder_getter = lambda image: image.content_object
# )
# )
#
|
kmike/django-generic-images
|
generic_utils/app_utils.py
|
get_site_decorator
|
python
|
def get_site_decorator(site_param='site', obj_param='obj', context_param='context'):
''' It is a function that returns decorator factory useful for PluggableSite
views. This decorator factory returns decorator that do some
boilerplate work and make writing PluggableSite views easier.
It passes PluggableSite instance to decorated view,
retreives and passes object that site is attached to and passes
common context. It also passes and all the decorator factory's
keyword arguments.
For example usage please check photo_albums.views.
Btw, this decorator seems frightening for me. It feels that
"views as PluggableSite methods" approach can easily make this decorator
obsolete. But for now it just works.
'''
def site_method(**extra_params):
def decorator(fn):
@wraps(fn)
def wrapper(request, **kwargs):
try:
site = kwargs.pop(site_param)
except KeyError:
raise ValueError("'%s' parameter must be passed to "
"decorated view (%s)" % (site_param, fn))
# Pop parameters to be passed to actual view function.
params={}
for key in extra_params:
value = kwargs.pop(key, extra_params[key])
params.update({key:value})
# Now there are only site.object_getter lookup parameters in
# kwargs. Get the object and compute common request context.
try:
obj = site.object_getter(**kwargs)
except models.ObjectDoesNotExist:
raise Http404("Base object does not exist.")
context = site.get_common_context(obj)
context_instance = RequestContext(request, context,
processors=site.context_processors)
# pass site name, the object and common request to decorated view
params.update({
site_param:site,
obj_param: obj,
context_param: context_instance
})
return fn(request, **params)
return wrapper
return decorator
return site_method
|
It is a function that returns decorator factory useful for PluggableSite
views. This decorator factory returns decorator that do some
boilerplate work and make writing PluggableSite views easier.
It passes PluggableSite instance to decorated view,
retreives and passes object that site is attached to and passes
common context. It also passes and all the decorator factory's
keyword arguments.
For example usage please check photo_albums.views.
Btw, this decorator seems frightening for me. It feels that
"views as PluggableSite methods" approach can easily make this decorator
obsolete. But for now it just works.
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_utils/app_utils.py#L10-L61
| null |
#coding: utf-8
from django.conf.urls.defaults import *
from django.http import Http404
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.db import models
from django.db.models.query import QuerySet
from django.utils.functional import wraps
def simple_getter(queryset, object_regex=None, lookup_field=None):
''' Returns simple object_getter function for use with PluggableSite.
It takes 'queryset' with QuerySet or Model instance, 'object_regex' with
url regex and 'lookup_field' with lookup field.
'''
object_regex = object_regex or r'\d+'
lookup_field = lookup_field or 'pk'
if isinstance(queryset, models.Model):
qs = queryset._default_manager.all()
elif isinstance(queryset, QuerySet) or isinstance(queryset, models.Manager):
qs = queryset
def object_getter(object_id):
return qs.get(**{lookup_field: object_id})
object_getter.regex = "(?P<object_id>%s)" % object_regex
return object_getter
class PluggableSite(object):
''' Base class for reusable apps.
The approach is similar to django AdminSite.
For usage case please check photo_albums app.
'''
def __init__(self,
instance_name,
app_name,
queryset = None,
object_regex = None,
lookup_field = None,
extra_context=None,
template_object_name = 'object',
has_edit_permission = lambda request, obj: True,
context_processors = None,
object_getter = None):
self.instance_name = instance_name
self.extra_context = extra_context or {}
self.app_name = app_name
self.has_edit_permission = has_edit_permission
self.template_object_name = template_object_name
self.context_processors = context_processors
if object_regex or lookup_field or (queryset is not None):
if object_getter is not None:
raise ValueError('It is ambiguos what lookup method should be '
'used: old (queryset+object_regex+lookup_field)'
' or new (object_getter).')
self.object_getter = simple_getter(queryset, object_regex, lookup_field)
elif object_getter is not None:
self.object_getter = object_getter
else:
raise ValueError('Please provide object_getter or queryset.')
def reverse(self, url, args=None, kwargs=None):
''' Reverse an url taking self.app_name in account '''
return reverse("%s:%s" % (self.instance_name, url,),
args=args,
kwargs=kwargs,
current_app = self.app_name)
def check_permissions(self, request, object):
if not self.has_edit_permission(request, object):
raise Http404('Not allowed')
def get_common_context(self, obj):
context = {self.template_object_name: obj, 'current_app': self.app_name}
if (self.extra_context):
context.update(self.extra_context)
return context
def make_regex(self, url):
'''
Make regex string for ``PluggableSite`` urlpatterns: prepend url
with parent object's url and app name.
See also: http://code.djangoproject.com/ticket/11559.
'''
return r"^%s/%s%s$" % (self.object_getter.regex, self.app_name, url)
def patterns(self):
''' This method should return url patterns (like urlpatterns variable in
:file:`urls.py`). It is helpful to construct regex with
:meth:`~generic_utils.app_utils.PluggableSite.make_regex` method.
Example::
return patterns('photo_albums.views',
url(
self.make_regex('/'),
'show_album',
{'album_site': self},
name = 'show_album',
),
)
'''
raise NotImplementedError
@property
def urls(self):
'''
Use it in :file:`urls.py`.
Example::
urlpatterns += patterns('', url(r'^my_site/', include(my_pluggable_site.urls)),)
'''
return self.patterns(), self.app_name, self.instance_name
|
kmike/django-generic-images
|
generic_utils/app_utils.py
|
simple_getter
|
python
|
def simple_getter(queryset, object_regex=None, lookup_field=None):
''' Returns simple object_getter function for use with PluggableSite.
It takes 'queryset' with QuerySet or Model instance, 'object_regex' with
url regex and 'lookup_field' with lookup field.
'''
object_regex = object_regex or r'\d+'
lookup_field = lookup_field or 'pk'
if isinstance(queryset, models.Model):
qs = queryset._default_manager.all()
elif isinstance(queryset, QuerySet) or isinstance(queryset, models.Manager):
qs = queryset
def object_getter(object_id):
return qs.get(**{lookup_field: object_id})
object_getter.regex = "(?P<object_id>%s)" % object_regex
return object_getter
|
Returns simple object_getter function for use with PluggableSite.
It takes 'queryset' with QuerySet or Model instance, 'object_regex' with
url regex and 'lookup_field' with lookup field.
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_utils/app_utils.py#L64-L81
| null |
#coding: utf-8
from django.conf.urls.defaults import *
from django.http import Http404
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.db import models
from django.db.models.query import QuerySet
from django.utils.functional import wraps
def get_site_decorator(site_param='site', obj_param='obj', context_param='context'):
''' It is a function that returns decorator factory useful for PluggableSite
views. This decorator factory returns decorator that do some
boilerplate work and make writing PluggableSite views easier.
It passes PluggableSite instance to decorated view,
retreives and passes object that site is attached to and passes
common context. It also passes and all the decorator factory's
keyword arguments.
For example usage please check photo_albums.views.
Btw, this decorator seems frightening for me. It feels that
"views as PluggableSite methods" approach can easily make this decorator
obsolete. But for now it just works.
'''
def site_method(**extra_params):
def decorator(fn):
@wraps(fn)
def wrapper(request, **kwargs):
try:
site = kwargs.pop(site_param)
except KeyError:
raise ValueError("'%s' parameter must be passed to "
"decorated view (%s)" % (site_param, fn))
# Pop parameters to be passed to actual view function.
params={}
for key in extra_params:
value = kwargs.pop(key, extra_params[key])
params.update({key:value})
# Now there are only site.object_getter lookup parameters in
# kwargs. Get the object and compute common request context.
try:
obj = site.object_getter(**kwargs)
except models.ObjectDoesNotExist:
raise Http404("Base object does not exist.")
context = site.get_common_context(obj)
context_instance = RequestContext(request, context,
processors=site.context_processors)
# pass site name, the object and common request to decorated view
params.update({
site_param:site,
obj_param: obj,
context_param: context_instance
})
return fn(request, **params)
return wrapper
return decorator
return site_method
class PluggableSite(object):
''' Base class for reusable apps.
The approach is similar to django AdminSite.
For usage case please check photo_albums app.
'''
def __init__(self,
instance_name,
app_name,
queryset = None,
object_regex = None,
lookup_field = None,
extra_context=None,
template_object_name = 'object',
has_edit_permission = lambda request, obj: True,
context_processors = None,
object_getter = None):
self.instance_name = instance_name
self.extra_context = extra_context or {}
self.app_name = app_name
self.has_edit_permission = has_edit_permission
self.template_object_name = template_object_name
self.context_processors = context_processors
if object_regex or lookup_field or (queryset is not None):
if object_getter is not None:
raise ValueError('It is ambiguos what lookup method should be '
'used: old (queryset+object_regex+lookup_field)'
' or new (object_getter).')
self.object_getter = simple_getter(queryset, object_regex, lookup_field)
elif object_getter is not None:
self.object_getter = object_getter
else:
raise ValueError('Please provide object_getter or queryset.')
def reverse(self, url, args=None, kwargs=None):
''' Reverse an url taking self.app_name in account '''
return reverse("%s:%s" % (self.instance_name, url,),
args=args,
kwargs=kwargs,
current_app = self.app_name)
def check_permissions(self, request, object):
if not self.has_edit_permission(request, object):
raise Http404('Not allowed')
def get_common_context(self, obj):
context = {self.template_object_name: obj, 'current_app': self.app_name}
if (self.extra_context):
context.update(self.extra_context)
return context
def make_regex(self, url):
'''
Make regex string for ``PluggableSite`` urlpatterns: prepend url
with parent object's url and app name.
See also: http://code.djangoproject.com/ticket/11559.
'''
return r"^%s/%s%s$" % (self.object_getter.regex, self.app_name, url)
def patterns(self):
''' This method should return url patterns (like urlpatterns variable in
:file:`urls.py`). It is helpful to construct regex with
:meth:`~generic_utils.app_utils.PluggableSite.make_regex` method.
Example::
return patterns('photo_albums.views',
url(
self.make_regex('/'),
'show_album',
{'album_site': self},
name = 'show_album',
),
)
'''
raise NotImplementedError
@property
def urls(self):
'''
Use it in :file:`urls.py`.
Example::
urlpatterns += patterns('', url(r'^my_site/', include(my_pluggable_site.urls)),)
'''
return self.patterns(), self.app_name, self.instance_name
|
kmike/django-generic-images
|
generic_utils/app_utils.py
|
PluggableSite.reverse
|
python
|
def reverse(self, url, args=None, kwargs=None):
''' Reverse an url taking self.app_name in account '''
return reverse("%s:%s" % (self.instance_name, url,),
args=args,
kwargs=kwargs,
current_app = self.app_name)
|
Reverse an url taking self.app_name in account
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_utils/app_utils.py#L121-L126
| null |
class PluggableSite(object):
''' Base class for reusable apps.
The approach is similar to django AdminSite.
For usage case please check photo_albums app.
'''
def __init__(self,
instance_name,
app_name,
queryset = None,
object_regex = None,
lookup_field = None,
extra_context=None,
template_object_name = 'object',
has_edit_permission = lambda request, obj: True,
context_processors = None,
object_getter = None):
self.instance_name = instance_name
self.extra_context = extra_context or {}
self.app_name = app_name
self.has_edit_permission = has_edit_permission
self.template_object_name = template_object_name
self.context_processors = context_processors
if object_regex or lookup_field or (queryset is not None):
if object_getter is not None:
raise ValueError('It is ambiguos what lookup method should be '
'used: old (queryset+object_regex+lookup_field)'
' or new (object_getter).')
self.object_getter = simple_getter(queryset, object_regex, lookup_field)
elif object_getter is not None:
self.object_getter = object_getter
else:
raise ValueError('Please provide object_getter or queryset.')
def check_permissions(self, request, object):
if not self.has_edit_permission(request, object):
raise Http404('Not allowed')
def get_common_context(self, obj):
context = {self.template_object_name: obj, 'current_app': self.app_name}
if (self.extra_context):
context.update(self.extra_context)
return context
def make_regex(self, url):
'''
Make regex string for ``PluggableSite`` urlpatterns: prepend url
with parent object's url and app name.
See also: http://code.djangoproject.com/ticket/11559.
'''
return r"^%s/%s%s$" % (self.object_getter.regex, self.app_name, url)
def patterns(self):
''' This method should return url patterns (like urlpatterns variable in
:file:`urls.py`). It is helpful to construct regex with
:meth:`~generic_utils.app_utils.PluggableSite.make_regex` method.
Example::
return patterns('photo_albums.views',
url(
self.make_regex('/'),
'show_album',
{'album_site': self},
name = 'show_album',
),
)
'''
raise NotImplementedError
@property
def urls(self):
'''
Use it in :file:`urls.py`.
Example::
urlpatterns += patterns('', url(r'^my_site/', include(my_pluggable_site.urls)),)
'''
return self.patterns(), self.app_name, self.instance_name
|
kmike/django-generic-images
|
generic_utils/app_utils.py
|
PluggableSite.make_regex
|
python
|
def make_regex(self, url):
'''
Make regex string for ``PluggableSite`` urlpatterns: prepend url
with parent object's url and app name.
See also: http://code.djangoproject.com/ticket/11559.
'''
return r"^%s/%s%s$" % (self.object_getter.regex, self.app_name, url)
|
Make regex string for ``PluggableSite`` urlpatterns: prepend url
with parent object's url and app name.
See also: http://code.djangoproject.com/ticket/11559.
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_utils/app_utils.py#L141-L148
| null |
class PluggableSite(object):
''' Base class for reusable apps.
The approach is similar to django AdminSite.
For usage case please check photo_albums app.
'''
def __init__(self,
instance_name,
app_name,
queryset = None,
object_regex = None,
lookup_field = None,
extra_context=None,
template_object_name = 'object',
has_edit_permission = lambda request, obj: True,
context_processors = None,
object_getter = None):
self.instance_name = instance_name
self.extra_context = extra_context or {}
self.app_name = app_name
self.has_edit_permission = has_edit_permission
self.template_object_name = template_object_name
self.context_processors = context_processors
if object_regex or lookup_field or (queryset is not None):
if object_getter is not None:
raise ValueError('It is ambiguos what lookup method should be '
'used: old (queryset+object_regex+lookup_field)'
' or new (object_getter).')
self.object_getter = simple_getter(queryset, object_regex, lookup_field)
elif object_getter is not None:
self.object_getter = object_getter
else:
raise ValueError('Please provide object_getter or queryset.')
def reverse(self, url, args=None, kwargs=None):
''' Reverse an url taking self.app_name in account '''
return reverse("%s:%s" % (self.instance_name, url,),
args=args,
kwargs=kwargs,
current_app = self.app_name)
def check_permissions(self, request, object):
if not self.has_edit_permission(request, object):
raise Http404('Not allowed')
def get_common_context(self, obj):
context = {self.template_object_name: obj, 'current_app': self.app_name}
if (self.extra_context):
context.update(self.extra_context)
return context
def patterns(self):
''' This method should return url patterns (like urlpatterns variable in
:file:`urls.py`). It is helpful to construct regex with
:meth:`~generic_utils.app_utils.PluggableSite.make_regex` method.
Example::
return patterns('photo_albums.views',
url(
self.make_regex('/'),
'show_album',
{'album_site': self},
name = 'show_album',
),
)
'''
raise NotImplementedError
@property
def urls(self):
'''
Use it in :file:`urls.py`.
Example::
urlpatterns += patterns('', url(r'^my_site/', include(my_pluggable_site.urls)),)
'''
return self.patterns(), self.app_name, self.instance_name
|
kmike/django-generic-images
|
generic_images/models.py
|
ReplaceOldImageModel._replace_old_image
|
python
|
def _replace_old_image(self):
''' Override this in subclass if you don't want
image replacing or want to customize image replacing
'''
try:
old_obj = self.__class__.objects.get(pk=self.pk)
if old_obj.image.path != self.image.path:
path = old_obj.image.path
default_storage.delete(path)
except self.__class__.DoesNotExist:
pass
|
Override this in subclass if you don't want
image replacing or want to customize image replacing
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_images/models.py#L44-L54
| null |
class ReplaceOldImageModel(BaseImageModel):
'''
Abstract Model class with image field.
If the file for image is re-uploaded, old file is deleted.
'''
def save(self, *args, **kwargs):
if self.pk:
self._replace_old_image()
super(ReplaceOldImageModel, self).save(*args, **kwargs)
class Meta:
abstract = True
|
kmike/django-generic-images
|
generic_images/models.py
|
AbstractAttachedImage.next
|
python
|
def next(self):
''' Returns next image for same content_object and None if image is
the last. '''
try:
return self.__class__.objects.for_model(self.content_object,
self.content_type).\
filter(order__lt=self.order).order_by('-order')[0]
except IndexError:
return None
|
Returns next image for same content_object and None if image is
the last.
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_images/models.py#L102-L110
| null |
class AbstractAttachedImage(ReplaceOldImageModel, GenericModelBase):
'''
Abstract Image model that can be attached to any other Django model
using generic relations.
.. attribute:: is_main
BooleanField. Whether the image is the main image for object.
This field is set to False automatically for all images attached to
same object if image with is_main=True is saved to ensure that there
is only 1 main image for object.
.. attribute:: order
IntegerField to support ordered image sets.
On creation it is set to max(id)+1.
'''
user = models.ForeignKey(User, blank=True, null=True,
verbose_name=_('User'))
'''A ForeignKey to associated user, for example user who uploaded image.
Can be empty.'''
caption = models.TextField(_('Caption'), null=True, blank=True)
'TextField caption for image'
is_main = models.BooleanField(_('Main image'), default=False)
order = models.IntegerField(_('Order'), default=0)
objects = AttachedImageManager()
'''Default manager of :class:`~generic_images.managers.AttachedImageManager`
type.'''
def previous(self):
''' Returns previous image for same content_object and None if image
is the first. '''
try:
return self.__class__.objects.for_model(self.content_object,
self.content_type).\
filter(order__gt=self.order).order_by('order')[0]
except IndexError:
return None
def get_order_in_album(self, reversed_ordering=True):
''' Returns image order number. It is calculated as (number+1) of images
attached to the same content_object whose order is greater
(if 'reverse_ordering' is True) or lesser (if 'reverse_ordering' is
False) than image's order.
'''
lookup = 'order__gt' if reversed_ordering else 'order__lt'
return self.__class__.objects.\
for_model(self.content_object, self.content_type).\
filter(**{lookup: self.order}).count() + 1
def _get_next_pk(self):
max_pk = self.__class__.objects.aggregate(m=Max('pk'))['m'] or 0
return max_pk+1
# def put_as_last(self):
# """ Sets order to max(order)+1 for self.content_object
# """
# last = self.__class__.objects.exclude(id=self.id).\
# filter(
# object_id = self.object_id,
# content_type = self.content_type,
# ).aggregate(max_order=Max('order'))['max_order'] or 0
# self.order = last+1
def get_file_name(self, filename):
''' Returns file name (without path and extenstion)
for uploaded image. Default is 'max(pk)+1'.
Override this in subclass or assign another functions per-instance
if you want different file names (ex: random string).
'''
# alphabet = "1234567890abcdefghijklmnopqrstuvwxyz"
# # 1e25 variants
# return ''.join([random.choice(alphabet) for i in xrange(16)])
# anyway _get_next_pk is needed for setting `order` field
return str(self._get_next_pk())
def get_upload_path(self, filename):
''' Override this in proxy subclass to customize upload path.
Default upload path is
:file:`/media/images/<user.id>/<filename>.<ext>`
or :file:`/media/images/common/<filename>.<ext>` if user is not set.
``<filename>`` is returned by
:meth:`~generic_images.models.AbstractAttachedImage.get_file_name`
method. By default it is probable id of new image (it is
predicted as it is unknown at this stage).
'''
user_folder = str(self.user.pk) if self.user else 'common'
root, ext = os.path.splitext(filename)
return os.path.join('media', 'images', user_folder,
self.get_file_name(filename) + ext)
def save(self, *args, **kwargs):
send_signal = getattr(self, 'send_signal', True)
if self.is_main:
related_images = self.__class__.objects.filter(
content_type=self.content_type,
object_id=self.object_id
)
related_images.update(is_main=False)
if not self.pk: # object is created
if not self.order: # order is not set
self.order = self._get_next_pk() # let it be max(pk)+1
super(AbstractAttachedImage, self).save(*args, **kwargs)
if send_signal:
image_saved.send(sender = self.content_type.model_class(),
instance = self)
def delete(self, *args, **kwargs):
send_signal = getattr(self, 'send_signal', True)
super(AbstractAttachedImage, self).delete(*args, **kwargs)
if send_signal:
image_deleted.send(sender = self.content_type.model_class(),
instance = self)
def __unicode__(self):
try:
if self.user:
return u"AttachedImage #%d for [%s] by [%s]" % (
self.pk, self.content_object, self.user)
else:
return u"AttachedImage #%d for [%s]" % (
self.pk, self.content_object,)
except:
try:
return u"AttachedImage #%d" % (self.pk)
except TypeError:
return u"new AttachedImage"
class Meta:
abstract=True
|
kmike/django-generic-images
|
generic_images/models.py
|
AbstractAttachedImage.previous
|
python
|
def previous(self):
''' Returns previous image for same content_object and None if image
is the first. '''
try:
return self.__class__.objects.for_model(self.content_object,
self.content_type).\
filter(order__gt=self.order).order_by('order')[0]
except IndexError:
return None
|
Returns previous image for same content_object and None if image
is the first.
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_images/models.py#L112-L120
| null |
class AbstractAttachedImage(ReplaceOldImageModel, GenericModelBase):
'''
Abstract Image model that can be attached to any other Django model
using generic relations.
.. attribute:: is_main
BooleanField. Whether the image is the main image for object.
This field is set to False automatically for all images attached to
same object if image with is_main=True is saved to ensure that there
is only 1 main image for object.
.. attribute:: order
IntegerField to support ordered image sets.
On creation it is set to max(id)+1.
'''
user = models.ForeignKey(User, blank=True, null=True,
verbose_name=_('User'))
'''A ForeignKey to associated user, for example user who uploaded image.
Can be empty.'''
caption = models.TextField(_('Caption'), null=True, blank=True)
'TextField caption for image'
is_main = models.BooleanField(_('Main image'), default=False)
order = models.IntegerField(_('Order'), default=0)
objects = AttachedImageManager()
'''Default manager of :class:`~generic_images.managers.AttachedImageManager`
type.'''
def next(self):
''' Returns next image for same content_object and None if image is
the last. '''
try:
return self.__class__.objects.for_model(self.content_object,
self.content_type).\
filter(order__lt=self.order).order_by('-order')[0]
except IndexError:
return None
def get_order_in_album(self, reversed_ordering=True):
''' Returns image order number. It is calculated as (number+1) of images
attached to the same content_object whose order is greater
(if 'reverse_ordering' is True) or lesser (if 'reverse_ordering' is
False) than image's order.
'''
lookup = 'order__gt' if reversed_ordering else 'order__lt'
return self.__class__.objects.\
for_model(self.content_object, self.content_type).\
filter(**{lookup: self.order}).count() + 1
def _get_next_pk(self):
max_pk = self.__class__.objects.aggregate(m=Max('pk'))['m'] or 0
return max_pk+1
# def put_as_last(self):
# """ Sets order to max(order)+1 for self.content_object
# """
# last = self.__class__.objects.exclude(id=self.id).\
# filter(
# object_id = self.object_id,
# content_type = self.content_type,
# ).aggregate(max_order=Max('order'))['max_order'] or 0
# self.order = last+1
def get_file_name(self, filename):
''' Returns file name (without path and extenstion)
for uploaded image. Default is 'max(pk)+1'.
Override this in subclass or assign another functions per-instance
if you want different file names (ex: random string).
'''
# alphabet = "1234567890abcdefghijklmnopqrstuvwxyz"
# # 1e25 variants
# return ''.join([random.choice(alphabet) for i in xrange(16)])
# anyway _get_next_pk is needed for setting `order` field
return str(self._get_next_pk())
def get_upload_path(self, filename):
''' Override this in proxy subclass to customize upload path.
Default upload path is
:file:`/media/images/<user.id>/<filename>.<ext>`
or :file:`/media/images/common/<filename>.<ext>` if user is not set.
``<filename>`` is returned by
:meth:`~generic_images.models.AbstractAttachedImage.get_file_name`
method. By default it is probable id of new image (it is
predicted as it is unknown at this stage).
'''
user_folder = str(self.user.pk) if self.user else 'common'
root, ext = os.path.splitext(filename)
return os.path.join('media', 'images', user_folder,
self.get_file_name(filename) + ext)
def save(self, *args, **kwargs):
send_signal = getattr(self, 'send_signal', True)
if self.is_main:
related_images = self.__class__.objects.filter(
content_type=self.content_type,
object_id=self.object_id
)
related_images.update(is_main=False)
if not self.pk: # object is created
if not self.order: # order is not set
self.order = self._get_next_pk() # let it be max(pk)+1
super(AbstractAttachedImage, self).save(*args, **kwargs)
if send_signal:
image_saved.send(sender = self.content_type.model_class(),
instance = self)
def delete(self, *args, **kwargs):
send_signal = getattr(self, 'send_signal', True)
super(AbstractAttachedImage, self).delete(*args, **kwargs)
if send_signal:
image_deleted.send(sender = self.content_type.model_class(),
instance = self)
def __unicode__(self):
try:
if self.user:
return u"AttachedImage #%d for [%s] by [%s]" % (
self.pk, self.content_object, self.user)
else:
return u"AttachedImage #%d for [%s]" % (
self.pk, self.content_object,)
except:
try:
return u"AttachedImage #%d" % (self.pk)
except TypeError:
return u"new AttachedImage"
class Meta:
abstract=True
|
kmike/django-generic-images
|
generic_images/models.py
|
AbstractAttachedImage.get_order_in_album
|
python
|
def get_order_in_album(self, reversed_ordering=True):
''' Returns image order number. It is calculated as (number+1) of images
attached to the same content_object whose order is greater
(if 'reverse_ordering' is True) or lesser (if 'reverse_ordering' is
False) than image's order.
'''
lookup = 'order__gt' if reversed_ordering else 'order__lt'
return self.__class__.objects.\
for_model(self.content_object, self.content_type).\
filter(**{lookup: self.order}).count() + 1
|
Returns image order number. It is calculated as (number+1) of images
attached to the same content_object whose order is greater
(if 'reverse_ordering' is True) or lesser (if 'reverse_ordering' is
False) than image's order.
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_images/models.py#L122-L131
| null |
class AbstractAttachedImage(ReplaceOldImageModel, GenericModelBase):
'''
Abstract Image model that can be attached to any other Django model
using generic relations.
.. attribute:: is_main
BooleanField. Whether the image is the main image for object.
This field is set to False automatically for all images attached to
same object if image with is_main=True is saved to ensure that there
is only 1 main image for object.
.. attribute:: order
IntegerField to support ordered image sets.
On creation it is set to max(id)+1.
'''
user = models.ForeignKey(User, blank=True, null=True,
verbose_name=_('User'))
'''A ForeignKey to associated user, for example user who uploaded image.
Can be empty.'''
caption = models.TextField(_('Caption'), null=True, blank=True)
'TextField caption for image'
is_main = models.BooleanField(_('Main image'), default=False)
order = models.IntegerField(_('Order'), default=0)
objects = AttachedImageManager()
'''Default manager of :class:`~generic_images.managers.AttachedImageManager`
type.'''
def next(self):
''' Returns next image for same content_object and None if image is
the last. '''
try:
return self.__class__.objects.for_model(self.content_object,
self.content_type).\
filter(order__lt=self.order).order_by('-order')[0]
except IndexError:
return None
def previous(self):
''' Returns previous image for same content_object and None if image
is the first. '''
try:
return self.__class__.objects.for_model(self.content_object,
self.content_type).\
filter(order__gt=self.order).order_by('order')[0]
except IndexError:
return None
def _get_next_pk(self):
max_pk = self.__class__.objects.aggregate(m=Max('pk'))['m'] or 0
return max_pk+1
# def put_as_last(self):
# """ Sets order to max(order)+1 for self.content_object
# """
# last = self.__class__.objects.exclude(id=self.id).\
# filter(
# object_id = self.object_id,
# content_type = self.content_type,
# ).aggregate(max_order=Max('order'))['max_order'] or 0
# self.order = last+1
def get_file_name(self, filename):
''' Returns file name (without path and extenstion)
for uploaded image. Default is 'max(pk)+1'.
Override this in subclass or assign another functions per-instance
if you want different file names (ex: random string).
'''
# alphabet = "1234567890abcdefghijklmnopqrstuvwxyz"
# # 1e25 variants
# return ''.join([random.choice(alphabet) for i in xrange(16)])
# anyway _get_next_pk is needed for setting `order` field
return str(self._get_next_pk())
def get_upload_path(self, filename):
''' Override this in proxy subclass to customize upload path.
Default upload path is
:file:`/media/images/<user.id>/<filename>.<ext>`
or :file:`/media/images/common/<filename>.<ext>` if user is not set.
``<filename>`` is returned by
:meth:`~generic_images.models.AbstractAttachedImage.get_file_name`
method. By default it is probable id of new image (it is
predicted as it is unknown at this stage).
'''
user_folder = str(self.user.pk) if self.user else 'common'
root, ext = os.path.splitext(filename)
return os.path.join('media', 'images', user_folder,
self.get_file_name(filename) + ext)
def save(self, *args, **kwargs):
send_signal = getattr(self, 'send_signal', True)
if self.is_main:
related_images = self.__class__.objects.filter(
content_type=self.content_type,
object_id=self.object_id
)
related_images.update(is_main=False)
if not self.pk: # object is created
if not self.order: # order is not set
self.order = self._get_next_pk() # let it be max(pk)+1
super(AbstractAttachedImage, self).save(*args, **kwargs)
if send_signal:
image_saved.send(sender = self.content_type.model_class(),
instance = self)
def delete(self, *args, **kwargs):
send_signal = getattr(self, 'send_signal', True)
super(AbstractAttachedImage, self).delete(*args, **kwargs)
if send_signal:
image_deleted.send(sender = self.content_type.model_class(),
instance = self)
def __unicode__(self):
try:
if self.user:
return u"AttachedImage #%d for [%s] by [%s]" % (
self.pk, self.content_object, self.user)
else:
return u"AttachedImage #%d for [%s]" % (
self.pk, self.content_object,)
except:
try:
return u"AttachedImage #%d" % (self.pk)
except TypeError:
return u"new AttachedImage"
class Meta:
abstract=True
|
kmike/django-generic-images
|
generic_images/models.py
|
AbstractAttachedImage.get_upload_path
|
python
|
def get_upload_path(self, filename):
''' Override this in proxy subclass to customize upload path.
Default upload path is
:file:`/media/images/<user.id>/<filename>.<ext>`
or :file:`/media/images/common/<filename>.<ext>` if user is not set.
``<filename>`` is returned by
:meth:`~generic_images.models.AbstractAttachedImage.get_file_name`
method. By default it is probable id of new image (it is
predicted as it is unknown at this stage).
'''
user_folder = str(self.user.pk) if self.user else 'common'
root, ext = os.path.splitext(filename)
return os.path.join('media', 'images', user_folder,
self.get_file_name(filename) + ext)
|
Override this in proxy subclass to customize upload path.
Default upload path is
:file:`/media/images/<user.id>/<filename>.<ext>`
or :file:`/media/images/common/<filename>.<ext>` if user is not set.
``<filename>`` is returned by
:meth:`~generic_images.models.AbstractAttachedImage.get_file_name`
method. By default it is probable id of new image (it is
predicted as it is unknown at this stage).
|
train
|
https://github.com/kmike/django-generic-images/blob/4e45068ed219ac35396758eb6b6e1fe5306147df/generic_images/models.py#L164-L179
|
[
" def get_file_name(self, filename):\n ''' Returns file name (without path and extenstion)\n for uploaded image. Default is 'max(pk)+1'.\n Override this in subclass or assign another functions per-instance\n if you want different file names (ex: random string).\n '''\n# alphabet = \"1234567890abcdefghijklmnopqrstuvwxyz\"\n# # 1e25 variants\n# return ''.join([random.choice(alphabet) for i in xrange(16)])\n\n # anyway _get_next_pk is needed for setting `order` field\n return str(self._get_next_pk())\n"
] |
class AbstractAttachedImage(ReplaceOldImageModel, GenericModelBase):
'''
Abstract Image model that can be attached to any other Django model
using generic relations.
.. attribute:: is_main
BooleanField. Whether the image is the main image for object.
This field is set to False automatically for all images attached to
same object if image with is_main=True is saved to ensure that there
is only 1 main image for object.
.. attribute:: order
IntegerField to support ordered image sets.
On creation it is set to max(id)+1.
'''
user = models.ForeignKey(User, blank=True, null=True,
verbose_name=_('User'))
'''A ForeignKey to associated user, for example user who uploaded image.
Can be empty.'''
caption = models.TextField(_('Caption'), null=True, blank=True)
'TextField caption for image'
is_main = models.BooleanField(_('Main image'), default=False)
order = models.IntegerField(_('Order'), default=0)
objects = AttachedImageManager()
'''Default manager of :class:`~generic_images.managers.AttachedImageManager`
type.'''
def next(self):
''' Returns next image for same content_object and None if image is
the last. '''
try:
return self.__class__.objects.for_model(self.content_object,
self.content_type).\
filter(order__lt=self.order).order_by('-order')[0]
except IndexError:
return None
def previous(self):
''' Returns previous image for same content_object and None if image
is the first. '''
try:
return self.__class__.objects.for_model(self.content_object,
self.content_type).\
filter(order__gt=self.order).order_by('order')[0]
except IndexError:
return None
def get_order_in_album(self, reversed_ordering=True):
''' Returns image order number. It is calculated as (number+1) of images
attached to the same content_object whose order is greater
(if 'reverse_ordering' is True) or lesser (if 'reverse_ordering' is
False) than image's order.
'''
lookup = 'order__gt' if reversed_ordering else 'order__lt'
return self.__class__.objects.\
for_model(self.content_object, self.content_type).\
filter(**{lookup: self.order}).count() + 1
def _get_next_pk(self):
max_pk = self.__class__.objects.aggregate(m=Max('pk'))['m'] or 0
return max_pk+1
# def put_as_last(self):
# """ Sets order to max(order)+1 for self.content_object
# """
# last = self.__class__.objects.exclude(id=self.id).\
# filter(
# object_id = self.object_id,
# content_type = self.content_type,
# ).aggregate(max_order=Max('order'))['max_order'] or 0
# self.order = last+1
def get_file_name(self, filename):
''' Returns file name (without path and extenstion)
for uploaded image. Default is 'max(pk)+1'.
Override this in subclass or assign another functions per-instance
if you want different file names (ex: random string).
'''
# alphabet = "1234567890abcdefghijklmnopqrstuvwxyz"
# # 1e25 variants
# return ''.join([random.choice(alphabet) for i in xrange(16)])
# anyway _get_next_pk is needed for setting `order` field
return str(self._get_next_pk())
def save(self, *args, **kwargs):
send_signal = getattr(self, 'send_signal', True)
if self.is_main:
related_images = self.__class__.objects.filter(
content_type=self.content_type,
object_id=self.object_id
)
related_images.update(is_main=False)
if not self.pk: # object is created
if not self.order: # order is not set
self.order = self._get_next_pk() # let it be max(pk)+1
super(AbstractAttachedImage, self).save(*args, **kwargs)
if send_signal:
image_saved.send(sender = self.content_type.model_class(),
instance = self)
def delete(self, *args, **kwargs):
send_signal = getattr(self, 'send_signal', True)
super(AbstractAttachedImage, self).delete(*args, **kwargs)
if send_signal:
image_deleted.send(sender = self.content_type.model_class(),
instance = self)
def __unicode__(self):
try:
if self.user:
return u"AttachedImage #%d for [%s] by [%s]" % (
self.pk, self.content_object, self.user)
else:
return u"AttachedImage #%d for [%s]" % (
self.pk, self.content_object,)
except:
try:
return u"AttachedImage #%d" % (self.pk)
except TypeError:
return u"new AttachedImage"
class Meta:
abstract=True
|
zulily/pudl
|
pudl/ad_group.py
|
ADGroup.group
|
python
|
def group(self, base_dn, samaccountname, attributes=(), explicit_membership_only=False):
groups = self.groups(base_dn, samaccountnames=[samaccountname], attributes=attributes,
explicit_membership_only=explicit_membership_only)
try:
# Usually we will find a match, but perhaps not always
return groups[0]
except IndexError:
logging.info("%s - unable to retrieve object from AD by sAMAccountName", samaccountname)
|
Produces a single, populated ADGroup object through the object factory.
Does not populate attributes for the caller instance.
sAMAccountName may not be present in group objects in modern AD schemas.
Searching by common name and object class (group) may be an alternative
approach if required in the future.
:param str base_dn: The base DN to search within
:param str samaccountname: The group's sAMAccountName
:param list attributes: Object attributes to populate, defaults to all
:return: A populated ADGroup object
:rtype: ADGroup
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/ad_group.py#L34-L57
|
[
"def groups(self, base_dn, samaccountnames=(), attributes=(), explicit_membership_only=False):\n \"\"\"Gathers a list of ADGroup objects\n\n sAMAccountName may not be present in group objects in modern AD schemas.\n Searching by common name and object class (group) may be an alternative\n approach if required in the future.\n\n :param str base_dn: The base DN to search within\n :param list samaccountnames: A list of group names for which objects will be\n created, defaults to all groups if unspecified\n :param list attributes: Object attributes to populate, defaults to all\n\n :return: A list of populated ADGroup objects\n :rtype: list\n \"\"\"\n ad_groups = []\n\n search_filter = '(&(objectClass=group)(!(objectClass=user))(!(objectClass=computer)){0})'\n # If no samaccountnames specified, filter will pull all group objects under\n # base_dn\n if not samaccountnames:\n search_filter = search_filter.format('(sAMAccountName=*)')\n else:\n if len(samaccountnames) == 1:\n group_names = '(sAMAccountName={0})'.format(samaccountnames[0])\n else:\n group_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.\\\n format(group) for group\n in samaccountnames]))\n\n search_filter = search_filter.format(group_names)\n\n\n logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)\n\n results = self.adq.search(base_dn, search_filter, attributes)\n\n for search_result in results:\n adg = self._object_factory(search_result)\n if not explicit_membership_only and 'member' in dir(adg):\n member = [u[0] for u in\n self.adq.search(base_dn, '(memberOf:1.2.840.113556.1.4.1941:={0})'.\\\n format(search_result[0]), attributes=['member'])]\n adg.member = member\n ad_groups.append(adg)\n\n return ad_groups\n"
] |
class ADGroup(ADObject):
"""A class to represent AD group objects. Includes a number of
helper methods, particularly object-factory related.
ADGroup objects have minimal depth, with attributes set to
strings or lists. Available attributes are dependent
on the results returned by the LDAP query.
In its current implementation, the memberOf attribute
is not expanded. The member attribute is however flattened out.
"""
def groups(self, base_dn, samaccountnames=(), attributes=(), explicit_membership_only=False):
"""Gathers a list of ADGroup objects
sAMAccountName may not be present in group objects in modern AD schemas.
Searching by common name and object class (group) may be an alternative
approach if required in the future.
:param str base_dn: The base DN to search within
:param list samaccountnames: A list of group names for which objects will be
created, defaults to all groups if unspecified
:param list attributes: Object attributes to populate, defaults to all
:return: A list of populated ADGroup objects
:rtype: list
"""
ad_groups = []
search_filter = '(&(objectClass=group)(!(objectClass=user))(!(objectClass=computer)){0})'
# If no samaccountnames specified, filter will pull all group objects under
# base_dn
if not samaccountnames:
search_filter = search_filter.format('(sAMAccountName=*)')
else:
if len(samaccountnames) == 1:
group_names = '(sAMAccountName={0})'.format(samaccountnames[0])
else:
group_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.\
format(group) for group
in samaccountnames]))
search_filter = search_filter.format(group_names)
logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)
results = self.adq.search(base_dn, search_filter, attributes)
for search_result in results:
adg = self._object_factory(search_result)
if not explicit_membership_only and 'member' in dir(adg):
member = [u[0] for u in
self.adq.search(base_dn, '(memberOf:1.2.840.113556.1.4.1941:={0})'.\
format(search_result[0]), attributes=['member'])]
adg.member = member
ad_groups.append(adg)
return ad_groups
|
zulily/pudl
|
pudl/ad_group.py
|
ADGroup.groups
|
python
|
def groups(self, base_dn, samaccountnames=(), attributes=(), explicit_membership_only=False):
ad_groups = []
search_filter = '(&(objectClass=group)(!(objectClass=user))(!(objectClass=computer)){0})'
# If no samaccountnames specified, filter will pull all group objects under
# base_dn
if not samaccountnames:
search_filter = search_filter.format('(sAMAccountName=*)')
else:
if len(samaccountnames) == 1:
group_names = '(sAMAccountName={0})'.format(samaccountnames[0])
else:
group_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.\
format(group) for group
in samaccountnames]))
search_filter = search_filter.format(group_names)
logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)
results = self.adq.search(base_dn, search_filter, attributes)
for search_result in results:
adg = self._object_factory(search_result)
if not explicit_membership_only and 'member' in dir(adg):
member = [u[0] for u in
self.adq.search(base_dn, '(memberOf:1.2.840.113556.1.4.1941:={0})'.\
format(search_result[0]), attributes=['member'])]
adg.member = member
ad_groups.append(adg)
return ad_groups
|
Gathers a list of ADGroup objects
sAMAccountName may not be present in group objects in modern AD schemas.
Searching by common name and object class (group) may be an alternative
approach if required in the future.
:param str base_dn: The base DN to search within
:param list samaccountnames: A list of group names for which objects will be
created, defaults to all groups if unspecified
:param list attributes: Object attributes to populate, defaults to all
:return: A list of populated ADGroup objects
:rtype: list
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/ad_group.py#L60-L106
|
[
"def _object_factory(self, search_result):\n \"\"\"Given a single search result, create and return an object\n\n :param tuple search_result: a single search result returned by an LDAP query,\n position 0 is the DN and position 1 is a dictionary of key/value pairs\n\n :return: A single AD object instance\n :rtype: Object (ADUser, ADGroup, etc.)\n\n \"\"\"\n class_name = self.__class__.__name__\n module = self.__module__\n logging.debug('Creating object of type %s for DN: %s', class_name, search_result[0])\n module = importlib.import_module('{0}'.format(module))\n class_ = getattr(module, class_name)\n ado = class_(self.adq)\n\n # A unique set of all attribute names found\n attribute_names = set()\n # A unique set\n multiples = set()\n for k in search_result[1].keys():\n if k not in attribute_names:\n attribute_names.add(k)\n else:\n multiples.add(k)\n for k, val in search_result[1].iteritems():\n if k in multiples and not hasattr(ado, k):\n setattr(ado, k.lower(), list())\n if hasattr(ado, k):\n value = getattr(ado, k)\n if len(val) == 1:\n value.append(val[0])\n else:\n value.append(val)\n else:\n if len(val) == 1:\n setattr(ado, k.lower(), val[0])\n else:\n setattr(ado, k.lower(), val)\n\n\n logging.debug('Attributes and values for %s object (DN: %s): %s', class_name,\n search_result[0], ado.__dict__)\n\n return ado\n",
"def search(self, base_dn, search_filter, attributes=()):\n \"\"\"Perform an AD search\n\n :param str base_dn: The base DN to search within\n :param str search_filter: The search filter to apply, such as:\n *objectClass=person*\n :param list attributes: Object attributes to populate, defaults to all\n \"\"\"\n results = []\n page = 0\n while page == 0 or self.sprc.cookie:\n page += 1\n #pylint: disable=no-member\n message_id = self.ldap.search_ext(base_dn, ldap.SCOPE_SUBTREE,\n search_filter, attributes,\n serverctrls=[self.sprc])\n #pylint: enable=no-member\n data, server_controls = self.ldap.result3(message_id)[1::2]\n self.sprc.cookie = server_controls[0].cookie\n logging.debug('%s - Page %s results: %s', \\\n self.__class__.__name__, page, ', '.join(k[0] for k in data))\n results += [u for u in data]\n\n return results\n"
] |
class ADGroup(ADObject):
"""A class to represent AD group objects. Includes a number of
helper methods, particularly object-factory related.
ADGroup objects have minimal depth, with attributes set to
strings or lists. Available attributes are dependent
on the results returned by the LDAP query.
In its current implementation, the memberOf attribute
is not expanded. The member attribute is however flattened out.
"""
def group(self, base_dn, samaccountname, attributes=(), explicit_membership_only=False):
"""Produces a single, populated ADGroup object through the object factory.
Does not populate attributes for the caller instance.
sAMAccountName may not be present in group objects in modern AD schemas.
Searching by common name and object class (group) may be an alternative
approach if required in the future.
:param str base_dn: The base DN to search within
:param str samaccountname: The group's sAMAccountName
:param list attributes: Object attributes to populate, defaults to all
:return: A populated ADGroup object
:rtype: ADGroup
"""
groups = self.groups(base_dn, samaccountnames=[samaccountname], attributes=attributes,
explicit_membership_only=explicit_membership_only)
try:
# Usually we will find a match, but perhaps not always
return groups[0]
except IndexError:
logging.info("%s - unable to retrieve object from AD by sAMAccountName", samaccountname)
|
zulily/pudl
|
pudl/scripts/cli.py
|
main
|
python
|
def main():
# Parse all command line argument
args = parse_arguments().parse_args()
# Setup logging
configure_logging(args)
logging.debug(args)
# Prompt for a password if necessary
if not args.password:
password = getpass.getpass(prompt='Password ({0}): '.format(args.user))
else:
password = args.password
# Create an instance of ADQuery which sets up a single
# connection used for querying, for all AD object types
ldap_url = 'ldap://{0}:{1}'.format(args.host, args.port)
adq = ADQuery(user=args.user, password=password, page_size=args.page_size,
ldap_url=ldap_url, tls_no_verify=args.tls_no_verify)
if args.subcommand == 'user':
adu = ADUser(adq)
users = adu.users(base_dn=args.base_dn, attributes=args.attributes,
samaccountnames=args.samaccountnames,
explicit_membership_only=args.explicit_membership_only)
users = object_filter(users, args.grep)
print(serialize([u.to_dict() for u in users],
output_format=args.output_format, attributes_only=args.attributes_only))
elif args.subcommand == 'group':
adg = ADGroup(adq)
groups = adg.groups(base_dn=args.base_dn, attributes=args.attributes,
samaccountnames=args.samaccountnames,
explicit_membership_only=args.explicit_membership_only)
groups = object_filter(groups, args.grep)
print(serialize([g.to_dict() for g in groups],
output_format=args.output_format, attributes_only=args.attributes_only))
elif args.subcommand == 'computer':
adg = ADComputer(adq)
computers = adg.computers(base_dn=args.base_dn, attributes=args.attributes,
samaccountnames=args.samaccountnames)
computers = object_filter(computers, args.grep)
print(serialize([c.to_dict() for c in computers],
output_format=args.output_format, attributes_only=args.attributes_only))
|
Do some stuff
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/scripts/cli.py#L34-L76
|
[
"def serialize(ad_objects, output_format='json', indent=2, attributes_only=False):\n \"\"\"Serialize the object to the specified format\n\n :param ad_objects list: A list of ADObjects to serialize\n :param output_format str: The output format, json or yaml. Defaults to json\n :param indent int: The number of spaces to indent, defaults to 2\n :param attributes only: Only serialize the attributes found in the first record of the list\n of ADObjects\n\n :return: A serialized, formatted representation of the list of ADObjects\n :rtype: str\n \"\"\"\n\n # If the request is to only show attributes for objects returned\n # in the query, overwrite ad_objects with only those attributes present in\n # the first object in the list\n if attributes_only:\n ad_objects = [key for key in sorted(ad_objects[0].keys())]\n\n if output_format == 'json':\n return json.dumps(ad_objects, indent=indent, ensure_ascii=False, sort_keys=True)\n elif output_format == 'yaml':\n return yaml.dump(sorted(ad_objects), indent=indent)\n",
"def object_filter(objects, grep):\n \"\"\"Filter out any objects that do not have attributes with values matching\n *all* regular expressions present in grep (AND, essentially)\n\n :param objects ADObject: A list of ADObjects\n :param grep list: A list of regular expressions that must match for filtering\n\n :return: A list of filtered ADObjects\n :rtype: list\n \"\"\"\n filtered = []\n if grep:\n for ad_object in objects:\n o_string = ' '.join([value for value in ad_object.to_dict().values()\n if isinstance(value, str)])\n skip = False\n for regex in grep:\n if not re.search(regex, o_string, re.M|re.S|re.I):\n skip = True\n break\n if not skip:\n filtered.append(ad_object)\n\n return filtered\n else:\n return objects\n",
"def parse_arguments():\n \"\"\"Collect command-line arguments. Let the caller run parse_args(), as\n sphinx-argparse requires a function that returns an instance of\n argparse.ArgumentParser\n \"\"\"\n # Pull a few settings from the environment, should they exist\n base_dn = os.environ['PUDL_BASE_DN'] if 'PUDL_BASE_DN' in os.environ \\\n else 'OU=Departments,DC=example,DC=com'\n domain = os.environ['PUDL_DOMAIN'].upper() if 'PUDL_DOMAIN' in os.environ else 'EXAMPLE'\n page_size = os.environ['PUDL_PAGE_SIZE'].upper() if 'PUDL_PAGE_SIZE' in os.environ else 300\n tls_no_verify = bool(os.environ['PUDL_TLS_NO_VERIFY'].lower().capitalize()) \\\n if 'PUDL_TLS_NO_VERIFY' in os.environ else False\n\n parser = argparse.ArgumentParser(prog='pudl',\n description='A script for interacting with Active ' + \\\n 'Directory, leveraging python-ldap')\n parser.add_argument('-V', '--version', action='version', version='pudl v' + pudl_version,\n help=\"Print the version number and exit\")\n subparsers = parser.add_subparsers(dest='subcommand', help='Sub-command help')\n parser_common = subparsers.add_parser('common', add_help=False)\n parser_common.add_argument('--user', '-u', action='store', dest='user',\n help='The ldap user (bind dn) to connect as. ' + \\\n 'The full DN will work, or often, just the CN may be ' + \\\n 'sufficient, such as \"John Smith\", or more commonly, ' + \\\n 'specify the domain and sAMAccountName. Defaults to ' + \\\n '{0}\\\\username. The domain '.format(domain) + \\\n 'portion may be overridden with PUDL_DOMAIN',\n default='{0}\\\\{1}'.format(domain, getpass.getuser()))\n parser_common.add_argument('--password', '-p', action='store',\n dest='password', help=\"The connecting user's password\")\n parser_common.add_argument('--host', '-H', action='store',\n dest='host', help='The AD/LDAP host, defaults to ldap',\n default='ldap')\n parser_common.add_argument('--port', '-P', action='store', dest='port',\n help='The ldap port, defaults to 389. 389 is ' + \\\n 'is the standard port', type=int, default=389)\n parser_common.add_argument('--page-size', '-s', action='store', dest='page_size',\n help='The ldap results are paged, specify the ' + \\\n 'number of results per page, defaults to ' + \\\n '{0}. May be overridden with PUDL_PAGE_SIZE'.format(page_size),\n type=int, default=page_size)\n parser_common.add_argument('--base-dn', '-b', action='store',\n dest='base_dn', default=base_dn,\n help=\"The Base DN to use, defaults to {0}. \".format(base_dn) + \\\n \"May be overridden with PUDL_BASE_DN\")\n parser_common.add_argument('--attribute', '-a', action='append',\n dest='attributes', metavar='ATTRIBUTE',\n help=\"Attributes to include in results objects. Note that \" + \\\n \"any nested objects return all attributes. Maybe be used \" + \\\n \"multiple times, and if not specified, all \" + \\\n \"attributes are included in top-level objects\")\n parser_common.add_argument('--grep', '-g', action='append', dest='grep',\n help='Filter results to only those matching the specified ' + \\\n 'regular expression (compares against all attributes). ' + \\\n 'May be used multiple times')\n parser_common.add_argument('--attributes-only', '-A', action='store_true',\n dest='attributes_only', help=\"Only display a list of attributes \" + \\\n \"that are present for the object type returned by the LDAP query\")\n parser_common.add_argument('--output-format', '-f', action='store', dest='output_format',\n choices=['json', 'yaml'], default='json',\n help=\"Output format, defaults to json.\")\n parser_common.add_argument('--verbose', '-v', action='store_true', dest='verbose',\n help='Turn on verbose output', default=False)\n parser_common.add_argument('--debug', '-d', action='store_true', dest='debug', default=False,\n help=\"Print out debugging information, very chatty\")\n parser_common.add_argument('--tls-no-verify', '-V', action='store_true',\n dest='tls_no_verify',\n default=tls_no_verify, help=\"Don't verify the authenticity \" + \\\n \"of the server's certificate, defaults to \" + \\\n \"{0} and may be overridden with \".format(tls_no_verify) + \\\n \"PUDL_TLS_NO_VERIFY\")\n parser_user = subparsers.add_parser('user', parents=[parser_common], conflict_handler='resolve',\n help='Pull user objects from AD')\n parser_user.add_argument(nargs=\"*\", dest='samaccountnames',\n help='sAMAccountNames for any user objects that are to be ' + \\\n 'looked up. If unspecified, returns all users under the base ' + \\\n 'DN provided')\n parser_user.add_argument('--explicit-membership-only', '-e', action='store_true',\n dest='explicit_membership_only', default=False,\n help=\"Only show membership for users that is explicit, \" + \\\n \"not taking into account group nesting. Defaults to False\")\n parser_group = subparsers.add_parser('group', parents=[parser_common],\n conflict_handler='resolve',\n help='Pull group objects from AD')\n parser_group.add_argument(nargs=\"*\", dest='samaccountnames',\n help=\"sAMAccountNames for any group objects that are to be \" + \\\n 'looked up. If unspecified, returns all groups under the base ' + \\\n 'DN provided. sAMAccountName may not be present in group ' + \\\n 'objects in modern AD schemas')\n parser_group.add_argument('--explicit-membership-only', '-e', action='store_true',\n dest='explicit_membership_only', default=False,\n help=\"Only show membership for users that is explicit, \" + \\\n \"not taking into account group nesting. Defaults to False\")\n parser_computer = subparsers.add_parser('computer', parents=[parser_common],\n conflict_handler='resolve',\n help='Pull computer objects from AD')\n parser_computer.add_argument(nargs=\"*\", dest='samaccountnames',\n help=\"sAMAccountNames for any computer objects that are to be \" + \\\n 'looked up. If unspecified, returns all computers under ' + \\\n 'the base DN provided.')\n\n # sphinx is not add_help=False aware...\n del subparsers.choices['common']\n\n return parser\n",
"def configure_logging(args):\n \"\"\"Logging to console\"\"\"\n log_format = logging.Formatter('%(levelname)s:%(name)s:line %(lineno)s:%(message)s')\n log_level = logging.INFO if args.verbose else logging.WARN\n log_level = logging.DEBUG if args.debug else log_level\n console = logging.StreamHandler()\n console.setFormatter(log_format)\n console.setLevel(log_level)\n root_logger = logging.getLogger()\n if len(root_logger.handlers) == 0:\n root_logger.addHandler(console)\n root_logger.setLevel(log_level)\n root_logger.handlers[0].setFormatter(log_format)\n logging.getLogger(__name__)\n",
"def computers(self, base_dn, samaccountnames=(), attributes=()):\n \"\"\"Gathers a list of ADComputer objects\n\n :param str base_dn: The base DN to search within\n :param list samaccountnames: A list of computer names for which objects will be\n created, defaults to all computers if unspecified\n :param list attributes: Object attributes to populate, defaults to all\n\n :return: A list of populated ADComputer objects\n :rtype: list\n \"\"\"\n ad_computers = []\n\n search_filter = '(&(objectClass=computer){0})'\n # If no samaccountnames specified, filter will pull all computer objects under\n # base_dn\n if not samaccountnames:\n search_filter = search_filter.format('(sAMAccountName=*)')\n else:\n if len(samaccountnames) == 1:\n computer_names = '(sAMAccountName={0})'.format(samaccountnames[0])\n else:\n computer_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.\\\n format(computer) for computer\n in samaccountnames]))\n\n search_filter = search_filter.format(computer_names)\n\n\n logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)\n\n results = self.adq.search(base_dn, search_filter, attributes)\n\n for search_result in results:\n adc = self._object_factory(search_result)\n ad_computers.append(adc)\n\n return ad_computers\n",
"def groups(self, base_dn, samaccountnames=(), attributes=(), explicit_membership_only=False):\n \"\"\"Gathers a list of ADGroup objects\n\n sAMAccountName may not be present in group objects in modern AD schemas.\n Searching by common name and object class (group) may be an alternative\n approach if required in the future.\n\n :param str base_dn: The base DN to search within\n :param list samaccountnames: A list of group names for which objects will be\n created, defaults to all groups if unspecified\n :param list attributes: Object attributes to populate, defaults to all\n\n :return: A list of populated ADGroup objects\n :rtype: list\n \"\"\"\n ad_groups = []\n\n search_filter = '(&(objectClass=group)(!(objectClass=user))(!(objectClass=computer)){0})'\n # If no samaccountnames specified, filter will pull all group objects under\n # base_dn\n if not samaccountnames:\n search_filter = search_filter.format('(sAMAccountName=*)')\n else:\n if len(samaccountnames) == 1:\n group_names = '(sAMAccountName={0})'.format(samaccountnames[0])\n else:\n group_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.\\\n format(group) for group\n in samaccountnames]))\n\n search_filter = search_filter.format(group_names)\n\n\n logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)\n\n results = self.adq.search(base_dn, search_filter, attributes)\n\n for search_result in results:\n adg = self._object_factory(search_result)\n if not explicit_membership_only and 'member' in dir(adg):\n member = [u[0] for u in\n self.adq.search(base_dn, '(memberOf:1.2.840.113556.1.4.1941:={0})'.\\\n format(search_result[0]), attributes=['member'])]\n adg.member = member\n ad_groups.append(adg)\n\n return ad_groups\n",
"def users(self, base_dn, samaccountnames=(), attributes=(), explicit_membership_only=False):\n \"\"\"Gathers a list of ADUser objects\n\n :param str base_dn: The base DN to search within\n :param list attributes: Object attributes to populate, defaults to all\n :param list samaccountnames: A list of usernames for which objects will be\n created, defaults to all users if unspecified\n :param bool explicit_membership_only: If set True, memberof will only\n list groups for which users are directly referenced members\n\n :return: A list of populated ADUser objects\n :rtype: list\n \"\"\"\n ad_users = []\n\n search_filter = '(&(objectClass=user)(!(objectClass=group))(!(objectClass=computer)){0})'\n # If no samaccountnames specified, filter will pull all user objects under\n # base_dn\n if not samaccountnames:\n search_filter = search_filter.format('(sAMAccountName=*)')\n else:\n # Extensible filter: http://bit.ly/1Qh4eyV\n if len(samaccountnames) == 1:\n account_names = '(sAMAccountName={0})'.format(samaccountnames[0])\n else:\n account_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.format(username) \\\n for username in samaccountnames]))\n\n search_filter = search_filter.format(account_names)\n\n\n logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)\n\n results = self.adq.search(base_dn, search_filter, attributes)\n\n for search_result in results:\n adu = self._object_factory(search_result)\n # Each results index 0 of the tuple is the DN\n if not explicit_membership_only and 'memberof' in dir(adu):\n memberof = [g[0] for g in self.adq.search(base_dn,\n '(member:1.2.840.113556.1.4.1941:={0})'.\\\n format(search_result[0]),\n attributes=['memberof'])]\n adu.memberof = memberof\n ad_users.append(adu)\n\n\n return ad_users\n"
] |
#! /usr/bin/env python
#
# Copyright (C) 2015 zulily, llc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""pudl cli"""
from __future__ import print_function
import argparse
import getpass
import logging
import os
import sys
from pudl import __version__ as pudl_version
from pudl.ad_computer import ADComputer
from pudl.ad_group import ADGroup
from pudl.ad_query import ADQuery
from pudl.ad_user import ADUser
from pudl.helper import object_filter, serialize
def configure_logging(args):
"""Logging to console"""
log_format = logging.Formatter('%(levelname)s:%(name)s:line %(lineno)s:%(message)s')
log_level = logging.INFO if args.verbose else logging.WARN
log_level = logging.DEBUG if args.debug else log_level
console = logging.StreamHandler()
console.setFormatter(log_format)
console.setLevel(log_level)
root_logger = logging.getLogger()
if len(root_logger.handlers) == 0:
root_logger.addHandler(console)
root_logger.setLevel(log_level)
root_logger.handlers[0].setFormatter(log_format)
logging.getLogger(__name__)
def parse_arguments():
"""Collect command-line arguments. Let the caller run parse_args(), as
sphinx-argparse requires a function that returns an instance of
argparse.ArgumentParser
"""
# Pull a few settings from the environment, should they exist
base_dn = os.environ['PUDL_BASE_DN'] if 'PUDL_BASE_DN' in os.environ \
else 'OU=Departments,DC=example,DC=com'
domain = os.environ['PUDL_DOMAIN'].upper() if 'PUDL_DOMAIN' in os.environ else 'EXAMPLE'
page_size = os.environ['PUDL_PAGE_SIZE'].upper() if 'PUDL_PAGE_SIZE' in os.environ else 300
tls_no_verify = bool(os.environ['PUDL_TLS_NO_VERIFY'].lower().capitalize()) \
if 'PUDL_TLS_NO_VERIFY' in os.environ else False
parser = argparse.ArgumentParser(prog='pudl',
description='A script for interacting with Active ' + \
'Directory, leveraging python-ldap')
parser.add_argument('-V', '--version', action='version', version='pudl v' + pudl_version,
help="Print the version number and exit")
subparsers = parser.add_subparsers(dest='subcommand', help='Sub-command help')
parser_common = subparsers.add_parser('common', add_help=False)
parser_common.add_argument('--user', '-u', action='store', dest='user',
help='The ldap user (bind dn) to connect as. ' + \
'The full DN will work, or often, just the CN may be ' + \
'sufficient, such as "John Smith", or more commonly, ' + \
'specify the domain and sAMAccountName. Defaults to ' + \
'{0}\\username. The domain '.format(domain) + \
'portion may be overridden with PUDL_DOMAIN',
default='{0}\\{1}'.format(domain, getpass.getuser()))
parser_common.add_argument('--password', '-p', action='store',
dest='password', help="The connecting user's password")
parser_common.add_argument('--host', '-H', action='store',
dest='host', help='The AD/LDAP host, defaults to ldap',
default='ldap')
parser_common.add_argument('--port', '-P', action='store', dest='port',
help='The ldap port, defaults to 389. 389 is ' + \
'is the standard port', type=int, default=389)
parser_common.add_argument('--page-size', '-s', action='store', dest='page_size',
help='The ldap results are paged, specify the ' + \
'number of results per page, defaults to ' + \
'{0}. May be overridden with PUDL_PAGE_SIZE'.format(page_size),
type=int, default=page_size)
parser_common.add_argument('--base-dn', '-b', action='store',
dest='base_dn', default=base_dn,
help="The Base DN to use, defaults to {0}. ".format(base_dn) + \
"May be overridden with PUDL_BASE_DN")
parser_common.add_argument('--attribute', '-a', action='append',
dest='attributes', metavar='ATTRIBUTE',
help="Attributes to include in results objects. Note that " + \
"any nested objects return all attributes. Maybe be used " + \
"multiple times, and if not specified, all " + \
"attributes are included in top-level objects")
parser_common.add_argument('--grep', '-g', action='append', dest='grep',
help='Filter results to only those matching the specified ' + \
'regular expression (compares against all attributes). ' + \
'May be used multiple times')
parser_common.add_argument('--attributes-only', '-A', action='store_true',
dest='attributes_only', help="Only display a list of attributes " + \
"that are present for the object type returned by the LDAP query")
parser_common.add_argument('--output-format', '-f', action='store', dest='output_format',
choices=['json', 'yaml'], default='json',
help="Output format, defaults to json.")
parser_common.add_argument('--verbose', '-v', action='store_true', dest='verbose',
help='Turn on verbose output', default=False)
parser_common.add_argument('--debug', '-d', action='store_true', dest='debug', default=False,
help="Print out debugging information, very chatty")
parser_common.add_argument('--tls-no-verify', '-V', action='store_true',
dest='tls_no_verify',
default=tls_no_verify, help="Don't verify the authenticity " + \
"of the server's certificate, defaults to " + \
"{0} and may be overridden with ".format(tls_no_verify) + \
"PUDL_TLS_NO_VERIFY")
parser_user = subparsers.add_parser('user', parents=[parser_common], conflict_handler='resolve',
help='Pull user objects from AD')
parser_user.add_argument(nargs="*", dest='samaccountnames',
help='sAMAccountNames for any user objects that are to be ' + \
'looked up. If unspecified, returns all users under the base ' + \
'DN provided')
parser_user.add_argument('--explicit-membership-only', '-e', action='store_true',
dest='explicit_membership_only', default=False,
help="Only show membership for users that is explicit, " + \
"not taking into account group nesting. Defaults to False")
parser_group = subparsers.add_parser('group', parents=[parser_common],
conflict_handler='resolve',
help='Pull group objects from AD')
parser_group.add_argument(nargs="*", dest='samaccountnames',
help="sAMAccountNames for any group objects that are to be " + \
'looked up. If unspecified, returns all groups under the base ' + \
'DN provided. sAMAccountName may not be present in group ' + \
'objects in modern AD schemas')
parser_group.add_argument('--explicit-membership-only', '-e', action='store_true',
dest='explicit_membership_only', default=False,
help="Only show membership for users that is explicit, " + \
"not taking into account group nesting. Defaults to False")
parser_computer = subparsers.add_parser('computer', parents=[parser_common],
conflict_handler='resolve',
help='Pull computer objects from AD')
parser_computer.add_argument(nargs="*", dest='samaccountnames',
help="sAMAccountNames for any computer objects that are to be " + \
'looked up. If unspecified, returns all computers under ' + \
'the base DN provided.')
# sphinx is not add_help=False aware...
del subparsers.choices['common']
return parser
if __name__ == '__main__':
sys.exit(main())
|
zulily/pudl
|
pudl/scripts/cli.py
|
configure_logging
|
python
|
def configure_logging(args):
log_format = logging.Formatter('%(levelname)s:%(name)s:line %(lineno)s:%(message)s')
log_level = logging.INFO if args.verbose else logging.WARN
log_level = logging.DEBUG if args.debug else log_level
console = logging.StreamHandler()
console.setFormatter(log_format)
console.setLevel(log_level)
root_logger = logging.getLogger()
if len(root_logger.handlers) == 0:
root_logger.addHandler(console)
root_logger.setLevel(log_level)
root_logger.handlers[0].setFormatter(log_format)
logging.getLogger(__name__)
|
Logging to console
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/scripts/cli.py#L79-L92
| null |
#! /usr/bin/env python
#
# Copyright (C) 2015 zulily, llc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""pudl cli"""
from __future__ import print_function
import argparse
import getpass
import logging
import os
import sys
from pudl import __version__ as pudl_version
from pudl.ad_computer import ADComputer
from pudl.ad_group import ADGroup
from pudl.ad_query import ADQuery
from pudl.ad_user import ADUser
from pudl.helper import object_filter, serialize
def main():
"""Do some stuff"""
# Parse all command line argument
args = parse_arguments().parse_args()
# Setup logging
configure_logging(args)
logging.debug(args)
# Prompt for a password if necessary
if not args.password:
password = getpass.getpass(prompt='Password ({0}): '.format(args.user))
else:
password = args.password
# Create an instance of ADQuery which sets up a single
# connection used for querying, for all AD object types
ldap_url = 'ldap://{0}:{1}'.format(args.host, args.port)
adq = ADQuery(user=args.user, password=password, page_size=args.page_size,
ldap_url=ldap_url, tls_no_verify=args.tls_no_verify)
if args.subcommand == 'user':
adu = ADUser(adq)
users = adu.users(base_dn=args.base_dn, attributes=args.attributes,
samaccountnames=args.samaccountnames,
explicit_membership_only=args.explicit_membership_only)
users = object_filter(users, args.grep)
print(serialize([u.to_dict() for u in users],
output_format=args.output_format, attributes_only=args.attributes_only))
elif args.subcommand == 'group':
adg = ADGroup(adq)
groups = adg.groups(base_dn=args.base_dn, attributes=args.attributes,
samaccountnames=args.samaccountnames,
explicit_membership_only=args.explicit_membership_only)
groups = object_filter(groups, args.grep)
print(serialize([g.to_dict() for g in groups],
output_format=args.output_format, attributes_only=args.attributes_only))
elif args.subcommand == 'computer':
adg = ADComputer(adq)
computers = adg.computers(base_dn=args.base_dn, attributes=args.attributes,
samaccountnames=args.samaccountnames)
computers = object_filter(computers, args.grep)
print(serialize([c.to_dict() for c in computers],
output_format=args.output_format, attributes_only=args.attributes_only))
def parse_arguments():
"""Collect command-line arguments. Let the caller run parse_args(), as
sphinx-argparse requires a function that returns an instance of
argparse.ArgumentParser
"""
# Pull a few settings from the environment, should they exist
base_dn = os.environ['PUDL_BASE_DN'] if 'PUDL_BASE_DN' in os.environ \
else 'OU=Departments,DC=example,DC=com'
domain = os.environ['PUDL_DOMAIN'].upper() if 'PUDL_DOMAIN' in os.environ else 'EXAMPLE'
page_size = os.environ['PUDL_PAGE_SIZE'].upper() if 'PUDL_PAGE_SIZE' in os.environ else 300
tls_no_verify = bool(os.environ['PUDL_TLS_NO_VERIFY'].lower().capitalize()) \
if 'PUDL_TLS_NO_VERIFY' in os.environ else False
parser = argparse.ArgumentParser(prog='pudl',
description='A script for interacting with Active ' + \
'Directory, leveraging python-ldap')
parser.add_argument('-V', '--version', action='version', version='pudl v' + pudl_version,
help="Print the version number and exit")
subparsers = parser.add_subparsers(dest='subcommand', help='Sub-command help')
parser_common = subparsers.add_parser('common', add_help=False)
parser_common.add_argument('--user', '-u', action='store', dest='user',
help='The ldap user (bind dn) to connect as. ' + \
'The full DN will work, or often, just the CN may be ' + \
'sufficient, such as "John Smith", or more commonly, ' + \
'specify the domain and sAMAccountName. Defaults to ' + \
'{0}\\username. The domain '.format(domain) + \
'portion may be overridden with PUDL_DOMAIN',
default='{0}\\{1}'.format(domain, getpass.getuser()))
parser_common.add_argument('--password', '-p', action='store',
dest='password', help="The connecting user's password")
parser_common.add_argument('--host', '-H', action='store',
dest='host', help='The AD/LDAP host, defaults to ldap',
default='ldap')
parser_common.add_argument('--port', '-P', action='store', dest='port',
help='The ldap port, defaults to 389. 389 is ' + \
'is the standard port', type=int, default=389)
parser_common.add_argument('--page-size', '-s', action='store', dest='page_size',
help='The ldap results are paged, specify the ' + \
'number of results per page, defaults to ' + \
'{0}. May be overridden with PUDL_PAGE_SIZE'.format(page_size),
type=int, default=page_size)
parser_common.add_argument('--base-dn', '-b', action='store',
dest='base_dn', default=base_dn,
help="The Base DN to use, defaults to {0}. ".format(base_dn) + \
"May be overridden with PUDL_BASE_DN")
parser_common.add_argument('--attribute', '-a', action='append',
dest='attributes', metavar='ATTRIBUTE',
help="Attributes to include in results objects. Note that " + \
"any nested objects return all attributes. Maybe be used " + \
"multiple times, and if not specified, all " + \
"attributes are included in top-level objects")
parser_common.add_argument('--grep', '-g', action='append', dest='grep',
help='Filter results to only those matching the specified ' + \
'regular expression (compares against all attributes). ' + \
'May be used multiple times')
parser_common.add_argument('--attributes-only', '-A', action='store_true',
dest='attributes_only', help="Only display a list of attributes " + \
"that are present for the object type returned by the LDAP query")
parser_common.add_argument('--output-format', '-f', action='store', dest='output_format',
choices=['json', 'yaml'], default='json',
help="Output format, defaults to json.")
parser_common.add_argument('--verbose', '-v', action='store_true', dest='verbose',
help='Turn on verbose output', default=False)
parser_common.add_argument('--debug', '-d', action='store_true', dest='debug', default=False,
help="Print out debugging information, very chatty")
parser_common.add_argument('--tls-no-verify', '-V', action='store_true',
dest='tls_no_verify',
default=tls_no_verify, help="Don't verify the authenticity " + \
"of the server's certificate, defaults to " + \
"{0} and may be overridden with ".format(tls_no_verify) + \
"PUDL_TLS_NO_VERIFY")
parser_user = subparsers.add_parser('user', parents=[parser_common], conflict_handler='resolve',
help='Pull user objects from AD')
parser_user.add_argument(nargs="*", dest='samaccountnames',
help='sAMAccountNames for any user objects that are to be ' + \
'looked up. If unspecified, returns all users under the base ' + \
'DN provided')
parser_user.add_argument('--explicit-membership-only', '-e', action='store_true',
dest='explicit_membership_only', default=False,
help="Only show membership for users that is explicit, " + \
"not taking into account group nesting. Defaults to False")
parser_group = subparsers.add_parser('group', parents=[parser_common],
conflict_handler='resolve',
help='Pull group objects from AD')
parser_group.add_argument(nargs="*", dest='samaccountnames',
help="sAMAccountNames for any group objects that are to be " + \
'looked up. If unspecified, returns all groups under the base ' + \
'DN provided. sAMAccountName may not be present in group ' + \
'objects in modern AD schemas')
parser_group.add_argument('--explicit-membership-only', '-e', action='store_true',
dest='explicit_membership_only', default=False,
help="Only show membership for users that is explicit, " + \
"not taking into account group nesting. Defaults to False")
parser_computer = subparsers.add_parser('computer', parents=[parser_common],
conflict_handler='resolve',
help='Pull computer objects from AD')
parser_computer.add_argument(nargs="*", dest='samaccountnames',
help="sAMAccountNames for any computer objects that are to be " + \
'looked up. If unspecified, returns all computers under ' + \
'the base DN provided.')
# sphinx is not add_help=False aware...
del subparsers.choices['common']
return parser
if __name__ == '__main__':
sys.exit(main())
|
zulily/pudl
|
pudl/scripts/cli.py
|
parse_arguments
|
python
|
def parse_arguments():
# Pull a few settings from the environment, should they exist
base_dn = os.environ['PUDL_BASE_DN'] if 'PUDL_BASE_DN' in os.environ \
else 'OU=Departments,DC=example,DC=com'
domain = os.environ['PUDL_DOMAIN'].upper() if 'PUDL_DOMAIN' in os.environ else 'EXAMPLE'
page_size = os.environ['PUDL_PAGE_SIZE'].upper() if 'PUDL_PAGE_SIZE' in os.environ else 300
tls_no_verify = bool(os.environ['PUDL_TLS_NO_VERIFY'].lower().capitalize()) \
if 'PUDL_TLS_NO_VERIFY' in os.environ else False
parser = argparse.ArgumentParser(prog='pudl',
description='A script for interacting with Active ' + \
'Directory, leveraging python-ldap')
parser.add_argument('-V', '--version', action='version', version='pudl v' + pudl_version,
help="Print the version number and exit")
subparsers = parser.add_subparsers(dest='subcommand', help='Sub-command help')
parser_common = subparsers.add_parser('common', add_help=False)
parser_common.add_argument('--user', '-u', action='store', dest='user',
help='The ldap user (bind dn) to connect as. ' + \
'The full DN will work, or often, just the CN may be ' + \
'sufficient, such as "John Smith", or more commonly, ' + \
'specify the domain and sAMAccountName. Defaults to ' + \
'{0}\\username. The domain '.format(domain) + \
'portion may be overridden with PUDL_DOMAIN',
default='{0}\\{1}'.format(domain, getpass.getuser()))
parser_common.add_argument('--password', '-p', action='store',
dest='password', help="The connecting user's password")
parser_common.add_argument('--host', '-H', action='store',
dest='host', help='The AD/LDAP host, defaults to ldap',
default='ldap')
parser_common.add_argument('--port', '-P', action='store', dest='port',
help='The ldap port, defaults to 389. 389 is ' + \
'is the standard port', type=int, default=389)
parser_common.add_argument('--page-size', '-s', action='store', dest='page_size',
help='The ldap results are paged, specify the ' + \
'number of results per page, defaults to ' + \
'{0}. May be overridden with PUDL_PAGE_SIZE'.format(page_size),
type=int, default=page_size)
parser_common.add_argument('--base-dn', '-b', action='store',
dest='base_dn', default=base_dn,
help="The Base DN to use, defaults to {0}. ".format(base_dn) + \
"May be overridden with PUDL_BASE_DN")
parser_common.add_argument('--attribute', '-a', action='append',
dest='attributes', metavar='ATTRIBUTE',
help="Attributes to include in results objects. Note that " + \
"any nested objects return all attributes. Maybe be used " + \
"multiple times, and if not specified, all " + \
"attributes are included in top-level objects")
parser_common.add_argument('--grep', '-g', action='append', dest='grep',
help='Filter results to only those matching the specified ' + \
'regular expression (compares against all attributes). ' + \
'May be used multiple times')
parser_common.add_argument('--attributes-only', '-A', action='store_true',
dest='attributes_only', help="Only display a list of attributes " + \
"that are present for the object type returned by the LDAP query")
parser_common.add_argument('--output-format', '-f', action='store', dest='output_format',
choices=['json', 'yaml'], default='json',
help="Output format, defaults to json.")
parser_common.add_argument('--verbose', '-v', action='store_true', dest='verbose',
help='Turn on verbose output', default=False)
parser_common.add_argument('--debug', '-d', action='store_true', dest='debug', default=False,
help="Print out debugging information, very chatty")
parser_common.add_argument('--tls-no-verify', '-V', action='store_true',
dest='tls_no_verify',
default=tls_no_verify, help="Don't verify the authenticity " + \
"of the server's certificate, defaults to " + \
"{0} and may be overridden with ".format(tls_no_verify) + \
"PUDL_TLS_NO_VERIFY")
parser_user = subparsers.add_parser('user', parents=[parser_common], conflict_handler='resolve',
help='Pull user objects from AD')
parser_user.add_argument(nargs="*", dest='samaccountnames',
help='sAMAccountNames for any user objects that are to be ' + \
'looked up. If unspecified, returns all users under the base ' + \
'DN provided')
parser_user.add_argument('--explicit-membership-only', '-e', action='store_true',
dest='explicit_membership_only', default=False,
help="Only show membership for users that is explicit, " + \
"not taking into account group nesting. Defaults to False")
parser_group = subparsers.add_parser('group', parents=[parser_common],
conflict_handler='resolve',
help='Pull group objects from AD')
parser_group.add_argument(nargs="*", dest='samaccountnames',
help="sAMAccountNames for any group objects that are to be " + \
'looked up. If unspecified, returns all groups under the base ' + \
'DN provided. sAMAccountName may not be present in group ' + \
'objects in modern AD schemas')
parser_group.add_argument('--explicit-membership-only', '-e', action='store_true',
dest='explicit_membership_only', default=False,
help="Only show membership for users that is explicit, " + \
"not taking into account group nesting. Defaults to False")
parser_computer = subparsers.add_parser('computer', parents=[parser_common],
conflict_handler='resolve',
help='Pull computer objects from AD')
parser_computer.add_argument(nargs="*", dest='samaccountnames',
help="sAMAccountNames for any computer objects that are to be " + \
'looked up. If unspecified, returns all computers under ' + \
'the base DN provided.')
# sphinx is not add_help=False aware...
del subparsers.choices['common']
return parser
|
Collect command-line arguments. Let the caller run parse_args(), as
sphinx-argparse requires a function that returns an instance of
argparse.ArgumentParser
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/scripts/cli.py#L95-L199
| null |
#! /usr/bin/env python
#
# Copyright (C) 2015 zulily, llc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""pudl cli"""
from __future__ import print_function
import argparse
import getpass
import logging
import os
import sys
from pudl import __version__ as pudl_version
from pudl.ad_computer import ADComputer
from pudl.ad_group import ADGroup
from pudl.ad_query import ADQuery
from pudl.ad_user import ADUser
from pudl.helper import object_filter, serialize
def main():
"""Do some stuff"""
# Parse all command line argument
args = parse_arguments().parse_args()
# Setup logging
configure_logging(args)
logging.debug(args)
# Prompt for a password if necessary
if not args.password:
password = getpass.getpass(prompt='Password ({0}): '.format(args.user))
else:
password = args.password
# Create an instance of ADQuery which sets up a single
# connection used for querying, for all AD object types
ldap_url = 'ldap://{0}:{1}'.format(args.host, args.port)
adq = ADQuery(user=args.user, password=password, page_size=args.page_size,
ldap_url=ldap_url, tls_no_verify=args.tls_no_verify)
if args.subcommand == 'user':
adu = ADUser(adq)
users = adu.users(base_dn=args.base_dn, attributes=args.attributes,
samaccountnames=args.samaccountnames,
explicit_membership_only=args.explicit_membership_only)
users = object_filter(users, args.grep)
print(serialize([u.to_dict() for u in users],
output_format=args.output_format, attributes_only=args.attributes_only))
elif args.subcommand == 'group':
adg = ADGroup(adq)
groups = adg.groups(base_dn=args.base_dn, attributes=args.attributes,
samaccountnames=args.samaccountnames,
explicit_membership_only=args.explicit_membership_only)
groups = object_filter(groups, args.grep)
print(serialize([g.to_dict() for g in groups],
output_format=args.output_format, attributes_only=args.attributes_only))
elif args.subcommand == 'computer':
adg = ADComputer(adq)
computers = adg.computers(base_dn=args.base_dn, attributes=args.attributes,
samaccountnames=args.samaccountnames)
computers = object_filter(computers, args.grep)
print(serialize([c.to_dict() for c in computers],
output_format=args.output_format, attributes_only=args.attributes_only))
def configure_logging(args):
"""Logging to console"""
log_format = logging.Formatter('%(levelname)s:%(name)s:line %(lineno)s:%(message)s')
log_level = logging.INFO if args.verbose else logging.WARN
log_level = logging.DEBUG if args.debug else log_level
console = logging.StreamHandler()
console.setFormatter(log_format)
console.setLevel(log_level)
root_logger = logging.getLogger()
if len(root_logger.handlers) == 0:
root_logger.addHandler(console)
root_logger.setLevel(log_level)
root_logger.handlers[0].setFormatter(log_format)
logging.getLogger(__name__)
if __name__ == '__main__':
sys.exit(main())
|
zulily/pudl
|
pudl/ad_computer.py
|
ADComputer.computer
|
python
|
def computer(self, base_dn, samaccountname, attributes=()):
computers = self.computers(base_dn, samaccountnames=[samaccountname], attributes=attributes)
try:
# Usually we will find a match, but perhaps not always
return computers[0]
except IndexError:
logging.info("%s - unable to retrieve object from AD by sAMAccountName", samaccountname)
|
Produces a single, populated ADComputer object through the object factory.
Does not populate attributes for the caller instance.
:param str base_dn: The base DN to search within
:param str samaccountname: The computer's sAMAccountName
:param list attributes: Object attributes to populate, defaults to all
:return: A populated ADComputer object
:rtype: ADComputer
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/ad_computer.py#L30-L48
|
[
"def computers(self, base_dn, samaccountnames=(), attributes=()):\n \"\"\"Gathers a list of ADComputer objects\n\n :param str base_dn: The base DN to search within\n :param list samaccountnames: A list of computer names for which objects will be\n created, defaults to all computers if unspecified\n :param list attributes: Object attributes to populate, defaults to all\n\n :return: A list of populated ADComputer objects\n :rtype: list\n \"\"\"\n ad_computers = []\n\n search_filter = '(&(objectClass=computer){0})'\n # If no samaccountnames specified, filter will pull all computer objects under\n # base_dn\n if not samaccountnames:\n search_filter = search_filter.format('(sAMAccountName=*)')\n else:\n if len(samaccountnames) == 1:\n computer_names = '(sAMAccountName={0})'.format(samaccountnames[0])\n else:\n computer_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.\\\n format(computer) for computer\n in samaccountnames]))\n\n search_filter = search_filter.format(computer_names)\n\n\n logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)\n\n results = self.adq.search(base_dn, search_filter, attributes)\n\n for search_result in results:\n adc = self._object_factory(search_result)\n ad_computers.append(adc)\n\n return ad_computers\n"
] |
class ADComputer(ADObject):
"""A class to represent AD computer objects. Includes a number of
helper methods, particularly object-factory related.
ADComputer objects have minimal depth, with attributes set to
strings or lists. Available attributes are dependent
on the results returned by the LDAP query.
"""
def computers(self, base_dn, samaccountnames=(), attributes=()):
"""Gathers a list of ADComputer objects
:param str base_dn: The base DN to search within
:param list samaccountnames: A list of computer names for which objects will be
created, defaults to all computers if unspecified
:param list attributes: Object attributes to populate, defaults to all
:return: A list of populated ADComputer objects
:rtype: list
"""
ad_computers = []
search_filter = '(&(objectClass=computer){0})'
# If no samaccountnames specified, filter will pull all computer objects under
# base_dn
if not samaccountnames:
search_filter = search_filter.format('(sAMAccountName=*)')
else:
if len(samaccountnames) == 1:
computer_names = '(sAMAccountName={0})'.format(samaccountnames[0])
else:
computer_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.\
format(computer) for computer
in samaccountnames]))
search_filter = search_filter.format(computer_names)
logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)
results = self.adq.search(base_dn, search_filter, attributes)
for search_result in results:
adc = self._object_factory(search_result)
ad_computers.append(adc)
return ad_computers
|
zulily/pudl
|
pudl/ad_computer.py
|
ADComputer.computers
|
python
|
def computers(self, base_dn, samaccountnames=(), attributes=()):
ad_computers = []
search_filter = '(&(objectClass=computer){0})'
# If no samaccountnames specified, filter will pull all computer objects under
# base_dn
if not samaccountnames:
search_filter = search_filter.format('(sAMAccountName=*)')
else:
if len(samaccountnames) == 1:
computer_names = '(sAMAccountName={0})'.format(samaccountnames[0])
else:
computer_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.\
format(computer) for computer
in samaccountnames]))
search_filter = search_filter.format(computer_names)
logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)
results = self.adq.search(base_dn, search_filter, attributes)
for search_result in results:
adc = self._object_factory(search_result)
ad_computers.append(adc)
return ad_computers
|
Gathers a list of ADComputer objects
:param str base_dn: The base DN to search within
:param list samaccountnames: A list of computer names for which objects will be
created, defaults to all computers if unspecified
:param list attributes: Object attributes to populate, defaults to all
:return: A list of populated ADComputer objects
:rtype: list
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/ad_computer.py#L51-L88
|
[
"def _object_factory(self, search_result):\n \"\"\"Given a single search result, create and return an object\n\n :param tuple search_result: a single search result returned by an LDAP query,\n position 0 is the DN and position 1 is a dictionary of key/value pairs\n\n :return: A single AD object instance\n :rtype: Object (ADUser, ADGroup, etc.)\n\n \"\"\"\n class_name = self.__class__.__name__\n module = self.__module__\n logging.debug('Creating object of type %s for DN: %s', class_name, search_result[0])\n module = importlib.import_module('{0}'.format(module))\n class_ = getattr(module, class_name)\n ado = class_(self.adq)\n\n # A unique set of all attribute names found\n attribute_names = set()\n # A unique set\n multiples = set()\n for k in search_result[1].keys():\n if k not in attribute_names:\n attribute_names.add(k)\n else:\n multiples.add(k)\n for k, val in search_result[1].iteritems():\n if k in multiples and not hasattr(ado, k):\n setattr(ado, k.lower(), list())\n if hasattr(ado, k):\n value = getattr(ado, k)\n if len(val) == 1:\n value.append(val[0])\n else:\n value.append(val)\n else:\n if len(val) == 1:\n setattr(ado, k.lower(), val[0])\n else:\n setattr(ado, k.lower(), val)\n\n\n logging.debug('Attributes and values for %s object (DN: %s): %s', class_name,\n search_result[0], ado.__dict__)\n\n return ado\n",
"def search(self, base_dn, search_filter, attributes=()):\n \"\"\"Perform an AD search\n\n :param str base_dn: The base DN to search within\n :param str search_filter: The search filter to apply, such as:\n *objectClass=person*\n :param list attributes: Object attributes to populate, defaults to all\n \"\"\"\n results = []\n page = 0\n while page == 0 or self.sprc.cookie:\n page += 1\n #pylint: disable=no-member\n message_id = self.ldap.search_ext(base_dn, ldap.SCOPE_SUBTREE,\n search_filter, attributes,\n serverctrls=[self.sprc])\n #pylint: enable=no-member\n data, server_controls = self.ldap.result3(message_id)[1::2]\n self.sprc.cookie = server_controls[0].cookie\n logging.debug('%s - Page %s results: %s', \\\n self.__class__.__name__, page, ', '.join(k[0] for k in data))\n results += [u for u in data]\n\n return results\n"
] |
class ADComputer(ADObject):
"""A class to represent AD computer objects. Includes a number of
helper methods, particularly object-factory related.
ADComputer objects have minimal depth, with attributes set to
strings or lists. Available attributes are dependent
on the results returned by the LDAP query.
"""
def computer(self, base_dn, samaccountname, attributes=()):
"""Produces a single, populated ADComputer object through the object factory.
Does not populate attributes for the caller instance.
:param str base_dn: The base DN to search within
:param str samaccountname: The computer's sAMAccountName
:param list attributes: Object attributes to populate, defaults to all
:return: A populated ADComputer object
:rtype: ADComputer
"""
computers = self.computers(base_dn, samaccountnames=[samaccountname], attributes=attributes)
try:
# Usually we will find a match, but perhaps not always
return computers[0]
except IndexError:
logging.info("%s - unable to retrieve object from AD by sAMAccountName", samaccountname)
|
zulily/pudl
|
pudl/ad_object.py
|
ADObject.to_dict
|
python
|
def to_dict(self):
o_copy = copy.copy(self)
# Remove some stuff that is not likely related to AD attributes
for attribute in dir(self):
if attribute == 'logger' or attribute == 'adq':
try:
delattr(o_copy, attribute)
except AttributeError:
pass
return o_copy.__dict__
|
Prepare a minimal dictionary with keys mapping to attributes for
the current instance.
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/ad_object.py#L32-L46
| null |
class ADObject(object):
"""A base class for AD objects."""
def __init__(self, adq):
"""ADObject constructor"""
# Setup logging, assumes a root logger already exists with handlers
self.logger = logging.getLogger(__name__)
self.adq = adq
def samaccountname(self, base_dn, distinguished_name):
"""Retrieve the sAMAccountName for a specific DistinguishedName
:param str base_dn: The base DN to search within
:param list distinguished_name: The base DN to search within
:param list attributes: Object attributes to populate, defaults to all
:return: A populated ADUser object
:rtype: ADUser
"""
mappings = self.samaccountnames(base_dn, [distinguished_name])
try:
# Usually we will find a match, but perhaps not always
return mappings[distinguished_name]
except KeyError:
logging.info("%s - unable to retrieve object from AD by DistinguishedName",
distinguished_name)
def samaccountnames(self, base_dn, distinguished_names):
"""Retrieve the sAMAccountNames for the specified DNs
:param str base_dn: The base DN to search within
:param list distinguished_name: A list of distinguished names for which to
retrieve sAMAccountNames
:return: Key/value pairs mapping DistinguishedName to sAMAccountName
:rtype: dict
"""
attributes = ['sAMAccountName']
search_filter = '(|{0})'.format(''.join(['(DistinguishedName={0})'.format(dn)
for dn in distinguished_names]))
logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)
results = self.adq.search(base_dn, search_filter, attributes)
mappings = {result[0]: result[1]['sAMAccountName'][0] for result in results}
return mappings
def _object_factory(self, search_result):
"""Given a single search result, create and return an object
:param tuple search_result: a single search result returned by an LDAP query,
position 0 is the DN and position 1 is a dictionary of key/value pairs
:return: A single AD object instance
:rtype: Object (ADUser, ADGroup, etc.)
"""
class_name = self.__class__.__name__
module = self.__module__
logging.debug('Creating object of type %s for DN: %s', class_name, search_result[0])
module = importlib.import_module('{0}'.format(module))
class_ = getattr(module, class_name)
ado = class_(self.adq)
# A unique set of all attribute names found
attribute_names = set()
# A unique set
multiples = set()
for k in search_result[1].keys():
if k not in attribute_names:
attribute_names.add(k)
else:
multiples.add(k)
for k, val in search_result[1].iteritems():
if k in multiples and not hasattr(ado, k):
setattr(ado, k.lower(), list())
if hasattr(ado, k):
value = getattr(ado, k)
if len(val) == 1:
value.append(val[0])
else:
value.append(val)
else:
if len(val) == 1:
setattr(ado, k.lower(), val[0])
else:
setattr(ado, k.lower(), val)
logging.debug('Attributes and values for %s object (DN: %s): %s', class_name,
search_result[0], ado.__dict__)
return ado
|
zulily/pudl
|
pudl/ad_object.py
|
ADObject.samaccountname
|
python
|
def samaccountname(self, base_dn, distinguished_name):
mappings = self.samaccountnames(base_dn, [distinguished_name])
try:
# Usually we will find a match, but perhaps not always
return mappings[distinguished_name]
except KeyError:
logging.info("%s - unable to retrieve object from AD by DistinguishedName",
distinguished_name)
|
Retrieve the sAMAccountName for a specific DistinguishedName
:param str base_dn: The base DN to search within
:param list distinguished_name: The base DN to search within
:param list attributes: Object attributes to populate, defaults to all
:return: A populated ADUser object
:rtype: ADUser
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/ad_object.py#L49-L66
|
[
"def samaccountnames(self, base_dn, distinguished_names):\n \"\"\"Retrieve the sAMAccountNames for the specified DNs\n\n :param str base_dn: The base DN to search within\n :param list distinguished_name: A list of distinguished names for which to\n retrieve sAMAccountNames\n\n :return: Key/value pairs mapping DistinguishedName to sAMAccountName\n :rtype: dict\n \"\"\"\n attributes = ['sAMAccountName']\n search_filter = '(|{0})'.format(''.join(['(DistinguishedName={0})'.format(dn)\n for dn in distinguished_names]))\n logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)\n\n results = self.adq.search(base_dn, search_filter, attributes)\n\n mappings = {result[0]: result[1]['sAMAccountName'][0] for result in results}\n\n return mappings\n"
] |
class ADObject(object):
"""A base class for AD objects."""
def __init__(self, adq):
"""ADObject constructor"""
# Setup logging, assumes a root logger already exists with handlers
self.logger = logging.getLogger(__name__)
self.adq = adq
def to_dict(self):
"""Prepare a minimal dictionary with keys mapping to attributes for
the current instance.
"""
o_copy = copy.copy(self)
# Remove some stuff that is not likely related to AD attributes
for attribute in dir(self):
if attribute == 'logger' or attribute == 'adq':
try:
delattr(o_copy, attribute)
except AttributeError:
pass
return o_copy.__dict__
def samaccountnames(self, base_dn, distinguished_names):
"""Retrieve the sAMAccountNames for the specified DNs
:param str base_dn: The base DN to search within
:param list distinguished_name: A list of distinguished names for which to
retrieve sAMAccountNames
:return: Key/value pairs mapping DistinguishedName to sAMAccountName
:rtype: dict
"""
attributes = ['sAMAccountName']
search_filter = '(|{0})'.format(''.join(['(DistinguishedName={0})'.format(dn)
for dn in distinguished_names]))
logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)
results = self.adq.search(base_dn, search_filter, attributes)
mappings = {result[0]: result[1]['sAMAccountName'][0] for result in results}
return mappings
def _object_factory(self, search_result):
"""Given a single search result, create and return an object
:param tuple search_result: a single search result returned by an LDAP query,
position 0 is the DN and position 1 is a dictionary of key/value pairs
:return: A single AD object instance
:rtype: Object (ADUser, ADGroup, etc.)
"""
class_name = self.__class__.__name__
module = self.__module__
logging.debug('Creating object of type %s for DN: %s', class_name, search_result[0])
module = importlib.import_module('{0}'.format(module))
class_ = getattr(module, class_name)
ado = class_(self.adq)
# A unique set of all attribute names found
attribute_names = set()
# A unique set
multiples = set()
for k in search_result[1].keys():
if k not in attribute_names:
attribute_names.add(k)
else:
multiples.add(k)
for k, val in search_result[1].iteritems():
if k in multiples and not hasattr(ado, k):
setattr(ado, k.lower(), list())
if hasattr(ado, k):
value = getattr(ado, k)
if len(val) == 1:
value.append(val[0])
else:
value.append(val)
else:
if len(val) == 1:
setattr(ado, k.lower(), val[0])
else:
setattr(ado, k.lower(), val)
logging.debug('Attributes and values for %s object (DN: %s): %s', class_name,
search_result[0], ado.__dict__)
return ado
|
zulily/pudl
|
pudl/ad_object.py
|
ADObject.samaccountnames
|
python
|
def samaccountnames(self, base_dn, distinguished_names):
attributes = ['sAMAccountName']
search_filter = '(|{0})'.format(''.join(['(DistinguishedName={0})'.format(dn)
for dn in distinguished_names]))
logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)
results = self.adq.search(base_dn, search_filter, attributes)
mappings = {result[0]: result[1]['sAMAccountName'][0] for result in results}
return mappings
|
Retrieve the sAMAccountNames for the specified DNs
:param str base_dn: The base DN to search within
:param list distinguished_name: A list of distinguished names for which to
retrieve sAMAccountNames
:return: Key/value pairs mapping DistinguishedName to sAMAccountName
:rtype: dict
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/ad_object.py#L69-L88
| null |
class ADObject(object):
"""A base class for AD objects."""
def __init__(self, adq):
"""ADObject constructor"""
# Setup logging, assumes a root logger already exists with handlers
self.logger = logging.getLogger(__name__)
self.adq = adq
def to_dict(self):
"""Prepare a minimal dictionary with keys mapping to attributes for
the current instance.
"""
o_copy = copy.copy(self)
# Remove some stuff that is not likely related to AD attributes
for attribute in dir(self):
if attribute == 'logger' or attribute == 'adq':
try:
delattr(o_copy, attribute)
except AttributeError:
pass
return o_copy.__dict__
def samaccountname(self, base_dn, distinguished_name):
"""Retrieve the sAMAccountName for a specific DistinguishedName
:param str base_dn: The base DN to search within
:param list distinguished_name: The base DN to search within
:param list attributes: Object attributes to populate, defaults to all
:return: A populated ADUser object
:rtype: ADUser
"""
mappings = self.samaccountnames(base_dn, [distinguished_name])
try:
# Usually we will find a match, but perhaps not always
return mappings[distinguished_name]
except KeyError:
logging.info("%s - unable to retrieve object from AD by DistinguishedName",
distinguished_name)
def _object_factory(self, search_result):
"""Given a single search result, create and return an object
:param tuple search_result: a single search result returned by an LDAP query,
position 0 is the DN and position 1 is a dictionary of key/value pairs
:return: A single AD object instance
:rtype: Object (ADUser, ADGroup, etc.)
"""
class_name = self.__class__.__name__
module = self.__module__
logging.debug('Creating object of type %s for DN: %s', class_name, search_result[0])
module = importlib.import_module('{0}'.format(module))
class_ = getattr(module, class_name)
ado = class_(self.adq)
# A unique set of all attribute names found
attribute_names = set()
# A unique set
multiples = set()
for k in search_result[1].keys():
if k not in attribute_names:
attribute_names.add(k)
else:
multiples.add(k)
for k, val in search_result[1].iteritems():
if k in multiples and not hasattr(ado, k):
setattr(ado, k.lower(), list())
if hasattr(ado, k):
value = getattr(ado, k)
if len(val) == 1:
value.append(val[0])
else:
value.append(val)
else:
if len(val) == 1:
setattr(ado, k.lower(), val[0])
else:
setattr(ado, k.lower(), val)
logging.debug('Attributes and values for %s object (DN: %s): %s', class_name,
search_result[0], ado.__dict__)
return ado
|
zulily/pudl
|
pudl/ad_object.py
|
ADObject._object_factory
|
python
|
def _object_factory(self, search_result):
class_name = self.__class__.__name__
module = self.__module__
logging.debug('Creating object of type %s for DN: %s', class_name, search_result[0])
module = importlib.import_module('{0}'.format(module))
class_ = getattr(module, class_name)
ado = class_(self.adq)
# A unique set of all attribute names found
attribute_names = set()
# A unique set
multiples = set()
for k in search_result[1].keys():
if k not in attribute_names:
attribute_names.add(k)
else:
multiples.add(k)
for k, val in search_result[1].iteritems():
if k in multiples and not hasattr(ado, k):
setattr(ado, k.lower(), list())
if hasattr(ado, k):
value = getattr(ado, k)
if len(val) == 1:
value.append(val[0])
else:
value.append(val)
else:
if len(val) == 1:
setattr(ado, k.lower(), val[0])
else:
setattr(ado, k.lower(), val)
logging.debug('Attributes and values for %s object (DN: %s): %s', class_name,
search_result[0], ado.__dict__)
return ado
|
Given a single search result, create and return an object
:param tuple search_result: a single search result returned by an LDAP query,
position 0 is the DN and position 1 is a dictionary of key/value pairs
:return: A single AD object instance
:rtype: Object (ADUser, ADGroup, etc.)
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/ad_object.py#L91-L136
| null |
class ADObject(object):
"""A base class for AD objects."""
def __init__(self, adq):
"""ADObject constructor"""
# Setup logging, assumes a root logger already exists with handlers
self.logger = logging.getLogger(__name__)
self.adq = adq
def to_dict(self):
"""Prepare a minimal dictionary with keys mapping to attributes for
the current instance.
"""
o_copy = copy.copy(self)
# Remove some stuff that is not likely related to AD attributes
for attribute in dir(self):
if attribute == 'logger' or attribute == 'adq':
try:
delattr(o_copy, attribute)
except AttributeError:
pass
return o_copy.__dict__
def samaccountname(self, base_dn, distinguished_name):
"""Retrieve the sAMAccountName for a specific DistinguishedName
:param str base_dn: The base DN to search within
:param list distinguished_name: The base DN to search within
:param list attributes: Object attributes to populate, defaults to all
:return: A populated ADUser object
:rtype: ADUser
"""
mappings = self.samaccountnames(base_dn, [distinguished_name])
try:
# Usually we will find a match, but perhaps not always
return mappings[distinguished_name]
except KeyError:
logging.info("%s - unable to retrieve object from AD by DistinguishedName",
distinguished_name)
def samaccountnames(self, base_dn, distinguished_names):
"""Retrieve the sAMAccountNames for the specified DNs
:param str base_dn: The base DN to search within
:param list distinguished_name: A list of distinguished names for which to
retrieve sAMAccountNames
:return: Key/value pairs mapping DistinguishedName to sAMAccountName
:rtype: dict
"""
attributes = ['sAMAccountName']
search_filter = '(|{0})'.format(''.join(['(DistinguishedName={0})'.format(dn)
for dn in distinguished_names]))
logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)
results = self.adq.search(base_dn, search_filter, attributes)
mappings = {result[0]: result[1]['sAMAccountName'][0] for result in results}
return mappings
|
zulily/pudl
|
pudl/ad_user.py
|
ADUser.user
|
python
|
def user(self, base_dn, samaccountname, attributes=(), explicit_membership_only=False):
users = self.users(base_dn, samaccountnames=[samaccountname],
attributes=attributes, explicit_membership_only=explicit_membership_only)
try:
# Usually we will find a match, but perhaps not always
return users[0]
except IndexError:
logging.info("%s - unable to retrieve object from AD by sAMAccountName", samaccountname)
|
Produces a single, populated ADUser object through the object factory.
Does not populate attributes for the caller instance.
:param str base_dn: The base DN to search within
:param str samaccountname: The user's sAMAccountName
:param list attributes: Object attributes to populate, defaults to all
:param bool explicit_membership_only: If set True, memberof will only
list groups for which the user is a directly referenced member
:return: A populated ADUser object
:rtype: ADUser
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/ad_user.py#L38-L59
|
[
"def users(self, base_dn, samaccountnames=(), attributes=(), explicit_membership_only=False):\n \"\"\"Gathers a list of ADUser objects\n\n :param str base_dn: The base DN to search within\n :param list attributes: Object attributes to populate, defaults to all\n :param list samaccountnames: A list of usernames for which objects will be\n created, defaults to all users if unspecified\n :param bool explicit_membership_only: If set True, memberof will only\n list groups for which users are directly referenced members\n\n :return: A list of populated ADUser objects\n :rtype: list\n \"\"\"\n ad_users = []\n\n search_filter = '(&(objectClass=user)(!(objectClass=group))(!(objectClass=computer)){0})'\n # If no samaccountnames specified, filter will pull all user objects under\n # base_dn\n if not samaccountnames:\n search_filter = search_filter.format('(sAMAccountName=*)')\n else:\n # Extensible filter: http://bit.ly/1Qh4eyV\n if len(samaccountnames) == 1:\n account_names = '(sAMAccountName={0})'.format(samaccountnames[0])\n else:\n account_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.format(username) \\\n for username in samaccountnames]))\n\n search_filter = search_filter.format(account_names)\n\n\n logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)\n\n results = self.adq.search(base_dn, search_filter, attributes)\n\n for search_result in results:\n adu = self._object_factory(search_result)\n # Each results index 0 of the tuple is the DN\n if not explicit_membership_only and 'memberof' in dir(adu):\n memberof = [g[0] for g in self.adq.search(base_dn,\n '(member:1.2.840.113556.1.4.1941:={0})'.\\\n format(search_result[0]),\n attributes=['memberof'])]\n adu.memberof = memberof\n ad_users.append(adu)\n\n\n return ad_users\n"
] |
class ADUser(ADObject):
"""A class to represent AD user objects. Includes a number of
helper methods, particularly object-factory related.
ADUser objects have minimal depth, with attributes set to
strings or lists. Available attributes are dependent
on the results returned by the LDAP query.
"""
# Some refactoring may be considered in the future that would
# involve passing the sAMAccountName to a contstructor override,
# and possibly moving users() to become static. Otherwise,
# instead of user() creating and returning a single new object,
# perhaps just populate the current ADUser instance, which
# could make a little more sense
def users(self, base_dn, samaccountnames=(), attributes=(), explicit_membership_only=False):
"""Gathers a list of ADUser objects
:param str base_dn: The base DN to search within
:param list attributes: Object attributes to populate, defaults to all
:param list samaccountnames: A list of usernames for which objects will be
created, defaults to all users if unspecified
:param bool explicit_membership_only: If set True, memberof will only
list groups for which users are directly referenced members
:return: A list of populated ADUser objects
:rtype: list
"""
ad_users = []
search_filter = '(&(objectClass=user)(!(objectClass=group))(!(objectClass=computer)){0})'
# If no samaccountnames specified, filter will pull all user objects under
# base_dn
if not samaccountnames:
search_filter = search_filter.format('(sAMAccountName=*)')
else:
# Extensible filter: http://bit.ly/1Qh4eyV
if len(samaccountnames) == 1:
account_names = '(sAMAccountName={0})'.format(samaccountnames[0])
else:
account_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.format(username) \
for username in samaccountnames]))
search_filter = search_filter.format(account_names)
logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)
results = self.adq.search(base_dn, search_filter, attributes)
for search_result in results:
adu = self._object_factory(search_result)
# Each results index 0 of the tuple is the DN
if not explicit_membership_only and 'memberof' in dir(adu):
memberof = [g[0] for g in self.adq.search(base_dn,
'(member:1.2.840.113556.1.4.1941:={0})'.\
format(search_result[0]),
attributes=['memberof'])]
adu.memberof = memberof
ad_users.append(adu)
return ad_users
def is_member(self, group_distinguishedname):
"""For the current ADUser instance, determine if
the user is a member of a specific group (the group DN is used).
The result may not be accurate if explicit_membership_only was set to
True when the object factory method (user() or users()) was
called.
:param str group_distinguishedname: The group DistinguishedName
:return: A boolean indicating whether or not the user is a member of the group
:rtype: bool
"""
#pylint: disable=no-member
if group_distinguishedname.lower() in [dn.lower() for dn in self.memberof]:
#pylint: enable=no-member
return True
else:
return False
def group_samaccountnames(self, base_dn):
"""For the current ADUser instance, determine which
groups the user is a member of and convert the
group DistinguishedNames to sAMAccountNames.
The resulting list of groups may not be complete
if explicit_membership_only was set to
True when the object factory method (user() or users()) was
called.
:param str base_dn: The base DN to search within
:return: A list of groups (sAMAccountNames) for which the
current ADUser instance is a member, sAMAccountNames
:rtype: list
"""
#pylint: disable=no-member
mappings = self.samaccountnames(base_dn, self.memberof)
#pylint: enable=no-member
groups = [samaccountname for samaccountname in mappings.values()]
if not groups:
logging.info("%s - unable to retrieve any groups for the current ADUser instance",
self.samaccountname)
return groups
|
zulily/pudl
|
pudl/ad_user.py
|
ADUser.users
|
python
|
def users(self, base_dn, samaccountnames=(), attributes=(), explicit_membership_only=False):
ad_users = []
search_filter = '(&(objectClass=user)(!(objectClass=group))(!(objectClass=computer)){0})'
# If no samaccountnames specified, filter will pull all user objects under
# base_dn
if not samaccountnames:
search_filter = search_filter.format('(sAMAccountName=*)')
else:
# Extensible filter: http://bit.ly/1Qh4eyV
if len(samaccountnames) == 1:
account_names = '(sAMAccountName={0})'.format(samaccountnames[0])
else:
account_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.format(username) \
for username in samaccountnames]))
search_filter = search_filter.format(account_names)
logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)
results = self.adq.search(base_dn, search_filter, attributes)
for search_result in results:
adu = self._object_factory(search_result)
# Each results index 0 of the tuple is the DN
if not explicit_membership_only and 'memberof' in dir(adu):
memberof = [g[0] for g in self.adq.search(base_dn,
'(member:1.2.840.113556.1.4.1941:={0})'.\
format(search_result[0]),
attributes=['memberof'])]
adu.memberof = memberof
ad_users.append(adu)
return ad_users
|
Gathers a list of ADUser objects
:param str base_dn: The base DN to search within
:param list attributes: Object attributes to populate, defaults to all
:param list samaccountnames: A list of usernames for which objects will be
created, defaults to all users if unspecified
:param bool explicit_membership_only: If set True, memberof will only
list groups for which users are directly referenced members
:return: A list of populated ADUser objects
:rtype: list
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/ad_user.py#L63-L110
|
[
"def _object_factory(self, search_result):\n \"\"\"Given a single search result, create and return an object\n\n :param tuple search_result: a single search result returned by an LDAP query,\n position 0 is the DN and position 1 is a dictionary of key/value pairs\n\n :return: A single AD object instance\n :rtype: Object (ADUser, ADGroup, etc.)\n\n \"\"\"\n class_name = self.__class__.__name__\n module = self.__module__\n logging.debug('Creating object of type %s for DN: %s', class_name, search_result[0])\n module = importlib.import_module('{0}'.format(module))\n class_ = getattr(module, class_name)\n ado = class_(self.adq)\n\n # A unique set of all attribute names found\n attribute_names = set()\n # A unique set\n multiples = set()\n for k in search_result[1].keys():\n if k not in attribute_names:\n attribute_names.add(k)\n else:\n multiples.add(k)\n for k, val in search_result[1].iteritems():\n if k in multiples and not hasattr(ado, k):\n setattr(ado, k.lower(), list())\n if hasattr(ado, k):\n value = getattr(ado, k)\n if len(val) == 1:\n value.append(val[0])\n else:\n value.append(val)\n else:\n if len(val) == 1:\n setattr(ado, k.lower(), val[0])\n else:\n setattr(ado, k.lower(), val)\n\n\n logging.debug('Attributes and values for %s object (DN: %s): %s', class_name,\n search_result[0], ado.__dict__)\n\n return ado\n",
"def search(self, base_dn, search_filter, attributes=()):\n \"\"\"Perform an AD search\n\n :param str base_dn: The base DN to search within\n :param str search_filter: The search filter to apply, such as:\n *objectClass=person*\n :param list attributes: Object attributes to populate, defaults to all\n \"\"\"\n results = []\n page = 0\n while page == 0 or self.sprc.cookie:\n page += 1\n #pylint: disable=no-member\n message_id = self.ldap.search_ext(base_dn, ldap.SCOPE_SUBTREE,\n search_filter, attributes,\n serverctrls=[self.sprc])\n #pylint: enable=no-member\n data, server_controls = self.ldap.result3(message_id)[1::2]\n self.sprc.cookie = server_controls[0].cookie\n logging.debug('%s - Page %s results: %s', \\\n self.__class__.__name__, page, ', '.join(k[0] for k in data))\n results += [u for u in data]\n\n return results\n"
] |
class ADUser(ADObject):
"""A class to represent AD user objects. Includes a number of
helper methods, particularly object-factory related.
ADUser objects have minimal depth, with attributes set to
strings or lists. Available attributes are dependent
on the results returned by the LDAP query.
"""
# Some refactoring may be considered in the future that would
# involve passing the sAMAccountName to a contstructor override,
# and possibly moving users() to become static. Otherwise,
# instead of user() creating and returning a single new object,
# perhaps just populate the current ADUser instance, which
# could make a little more sense
def user(self, base_dn, samaccountname, attributes=(), explicit_membership_only=False):
"""Produces a single, populated ADUser object through the object factory.
Does not populate attributes for the caller instance.
:param str base_dn: The base DN to search within
:param str samaccountname: The user's sAMAccountName
:param list attributes: Object attributes to populate, defaults to all
:param bool explicit_membership_only: If set True, memberof will only
list groups for which the user is a directly referenced member
:return: A populated ADUser object
:rtype: ADUser
"""
users = self.users(base_dn, samaccountnames=[samaccountname],
attributes=attributes, explicit_membership_only=explicit_membership_only)
try:
# Usually we will find a match, but perhaps not always
return users[0]
except IndexError:
logging.info("%s - unable to retrieve object from AD by sAMAccountName", samaccountname)
def is_member(self, group_distinguishedname):
"""For the current ADUser instance, determine if
the user is a member of a specific group (the group DN is used).
The result may not be accurate if explicit_membership_only was set to
True when the object factory method (user() or users()) was
called.
:param str group_distinguishedname: The group DistinguishedName
:return: A boolean indicating whether or not the user is a member of the group
:rtype: bool
"""
#pylint: disable=no-member
if group_distinguishedname.lower() in [dn.lower() for dn in self.memberof]:
#pylint: enable=no-member
return True
else:
return False
def group_samaccountnames(self, base_dn):
"""For the current ADUser instance, determine which
groups the user is a member of and convert the
group DistinguishedNames to sAMAccountNames.
The resulting list of groups may not be complete
if explicit_membership_only was set to
True when the object factory method (user() or users()) was
called.
:param str base_dn: The base DN to search within
:return: A list of groups (sAMAccountNames) for which the
current ADUser instance is a member, sAMAccountNames
:rtype: list
"""
#pylint: disable=no-member
mappings = self.samaccountnames(base_dn, self.memberof)
#pylint: enable=no-member
groups = [samaccountname for samaccountname in mappings.values()]
if not groups:
logging.info("%s - unable to retrieve any groups for the current ADUser instance",
self.samaccountname)
return groups
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.